@ebowwa/mcp-nm 1.1.0 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +861 -2
- package/package.json +14 -4
- package/src/index.ts +1078 -4
package/dist/index.js
CHANGED
|
@@ -13642,13 +13642,61 @@ import { exec } from "child_process";
|
|
|
13642
13642
|
import { promisify } from "util";
|
|
13643
13643
|
var execAsync = promisify(exec);
|
|
13644
13644
|
var server = new Server({
|
|
13645
|
-
name: "@ebowwa/nm
|
|
13646
|
-
version: "
|
|
13645
|
+
name: "@ebowwa/mcp-nm",
|
|
13646
|
+
version: "2.0.0"
|
|
13647
13647
|
}, {
|
|
13648
13648
|
capabilities: {
|
|
13649
13649
|
tools: {}
|
|
13650
13650
|
}
|
|
13651
13651
|
});
|
|
13652
|
+
async function analyzeFile(filePath) {
|
|
13653
|
+
try {
|
|
13654
|
+
const { stdout: fileInfo } = await execAsync(`file "${filePath}"`);
|
|
13655
|
+
const info = fileInfo.toLowerCase();
|
|
13656
|
+
let fileType = "unknown";
|
|
13657
|
+
let hasDynamicSymbols = false;
|
|
13658
|
+
if (info.includes("elf")) {
|
|
13659
|
+
if (info.includes("shared object") || info.includes("dynamically linked")) {
|
|
13660
|
+
fileType = info.includes("shared object") ? "elf-shared" : "elf-executable";
|
|
13661
|
+
hasDynamicSymbols = true;
|
|
13662
|
+
} else if (info.includes("statically linked")) {
|
|
13663
|
+
fileType = "elf-executable";
|
|
13664
|
+
hasDynamicSymbols = false;
|
|
13665
|
+
} else if (info.includes("relocatable")) {
|
|
13666
|
+
fileType = "object-file";
|
|
13667
|
+
hasDynamicSymbols = false;
|
|
13668
|
+
}
|
|
13669
|
+
} else if (info.includes("mach-o")) {
|
|
13670
|
+
if (info.includes("dynamically linked") || info.includes("shared library") || info.includes("dylib")) {
|
|
13671
|
+
fileType = "mach-o";
|
|
13672
|
+
hasDynamicSymbols = true;
|
|
13673
|
+
} else if (info.includes("bundle") || info.includes("statically linked")) {
|
|
13674
|
+
fileType = "mach-o";
|
|
13675
|
+
hasDynamicSymbols = false;
|
|
13676
|
+
} else {
|
|
13677
|
+
fileType = "mach-o";
|
|
13678
|
+
hasDynamicSymbols = info.includes("executable");
|
|
13679
|
+
}
|
|
13680
|
+
} else if (info.includes("ar archive") || info.includes("current ar archive")) {
|
|
13681
|
+
fileType = "static-library";
|
|
13682
|
+
hasDynamicSymbols = false;
|
|
13683
|
+
} else if (info.includes("relocatable")) {
|
|
13684
|
+
fileType = "object-file";
|
|
13685
|
+
hasDynamicSymbols = false;
|
|
13686
|
+
}
|
|
13687
|
+
return {
|
|
13688
|
+
hasDynamicSymbols,
|
|
13689
|
+
fileType,
|
|
13690
|
+
format: fileInfo.trim()
|
|
13691
|
+
};
|
|
13692
|
+
} catch {
|
|
13693
|
+
return {
|
|
13694
|
+
hasDynamicSymbols: false,
|
|
13695
|
+
fileType: "unknown",
|
|
13696
|
+
format: "Unable to determine file type"
|
|
13697
|
+
};
|
|
13698
|
+
}
|
|
13699
|
+
}
|
|
13652
13700
|
async function runNm(filePath, options = {}) {
|
|
13653
13701
|
const args = [];
|
|
13654
13702
|
if (options.externalOnly)
|
|
@@ -14066,6 +14114,42 @@ async function handleDefinedSymbols(args) {
|
|
|
14066
14114
|
return { content: [{ type: "text", text: summary }] };
|
|
14067
14115
|
}
|
|
14068
14116
|
async function handleDynamicSymbols(args) {
|
|
14117
|
+
const analysis = await analyzeFile(args.filePath);
|
|
14118
|
+
if (!analysis.hasDynamicSymbols) {
|
|
14119
|
+
const suggestions = [];
|
|
14120
|
+
switch (analysis.fileType) {
|
|
14121
|
+
case "static-library":
|
|
14122
|
+
suggestions.push("Static libraries (.a) contain object files without dynamic symbols.");
|
|
14123
|
+
suggestions.push("Use nm_list_symbols or nm_defined_symbols to see available symbols.");
|
|
14124
|
+
break;
|
|
14125
|
+
case "object-file":
|
|
14126
|
+
suggestions.push("Object files (.o) are not linked and have no dynamic symbol table.");
|
|
14127
|
+
suggestions.push("Use nm_list_symbols to see symbols in this object file.");
|
|
14128
|
+
break;
|
|
14129
|
+
case "elf-executable":
|
|
14130
|
+
suggestions.push("This appears to be a statically linked executable.");
|
|
14131
|
+
suggestions.push("Use nm_list_symbols or nm_defined_symbols instead.");
|
|
14132
|
+
break;
|
|
14133
|
+
default:
|
|
14134
|
+
suggestions.push("This file type does not have a dynamic symbol table.");
|
|
14135
|
+
suggestions.push("Try nm_list_symbols or nm_defined_symbols instead.");
|
|
14136
|
+
}
|
|
14137
|
+
return {
|
|
14138
|
+
content: [{
|
|
14139
|
+
type: "text",
|
|
14140
|
+
text: [
|
|
14141
|
+
`Error: File has no dynamic symbol table`,
|
|
14142
|
+
"",
|
|
14143
|
+
`File: ${args.filePath}`,
|
|
14144
|
+
`Type: ${analysis.format}`,
|
|
14145
|
+
"",
|
|
14146
|
+
...suggestions
|
|
14147
|
+
].join(`
|
|
14148
|
+
`)
|
|
14149
|
+
}],
|
|
14150
|
+
isError: true
|
|
14151
|
+
};
|
|
14152
|
+
}
|
|
14069
14153
|
const result = await runNm(args.filePath, {
|
|
14070
14154
|
dynamicSymbols: true,
|
|
14071
14155
|
demangle: args.demangle ?? true,
|
|
@@ -14073,6 +14157,7 @@ async function handleDynamicSymbols(args) {
|
|
|
14073
14157
|
});
|
|
14074
14158
|
const summary = [
|
|
14075
14159
|
`File: ${result.filePath}`,
|
|
14160
|
+
`File type: ${analysis.format}`,
|
|
14076
14161
|
`Dynamic symbols: ${result.totalCount}`,
|
|
14077
14162
|
"",
|
|
14078
14163
|
"Symbols:",
|
|
@@ -14193,6 +14278,531 @@ async function handleSummary(args) {
|
|
|
14193
14278
|
`);
|
|
14194
14279
|
return { content: [{ type: "text", text: summary }] };
|
|
14195
14280
|
}
|
|
14281
|
+
async function handleStrings(args) {
|
|
14282
|
+
const minLen = args.minLength ?? 4;
|
|
14283
|
+
const encFlags = {
|
|
14284
|
+
ascii: "-a",
|
|
14285
|
+
unicode: "-e l",
|
|
14286
|
+
all: "-a -e l"
|
|
14287
|
+
};
|
|
14288
|
+
const encFlag = encFlags[args.encoding ?? "all"];
|
|
14289
|
+
try {
|
|
14290
|
+
const { stdout } = await execAsync(`strings ${encFlag} -n ${minLen} "${args.filePath}"`, { maxBuffer: 50 * 1024 * 1024 });
|
|
14291
|
+
const strings = stdout.trim().split(`
|
|
14292
|
+
`).filter(Boolean);
|
|
14293
|
+
const summary = [
|
|
14294
|
+
`Strings in: ${args.filePath}`,
|
|
14295
|
+
`Encoding: ${args.encoding ?? "all"}`,
|
|
14296
|
+
`Minimum length: ${minLen}`,
|
|
14297
|
+
`Total strings: ${strings.length}`,
|
|
14298
|
+
"",
|
|
14299
|
+
"Strings:",
|
|
14300
|
+
...strings.slice(0, 200).map((s) => ` ${s}`),
|
|
14301
|
+
strings.length > 200 ? ` ... and ${strings.length - 200} more` : ""
|
|
14302
|
+
].join(`
|
|
14303
|
+
`);
|
|
14304
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14305
|
+
} catch (error2) {
|
|
14306
|
+
throw new Error(`strings command failed: ${error2 instanceof Error ? error2.message : error2}`);
|
|
14307
|
+
}
|
|
14308
|
+
}
|
|
14309
|
+
async function handleFileInfo(args) {
|
|
14310
|
+
try {
|
|
14311
|
+
const { stdout: mimeInfo } = await execAsync(`file --mime --brief "${args.filePath}"`);
|
|
14312
|
+
const { stdout: detailedInfo } = await execAsync(`file "${args.filePath}"`);
|
|
14313
|
+
const summary = [
|
|
14314
|
+
`File: ${args.filePath}`,
|
|
14315
|
+
"",
|
|
14316
|
+
"MIME Type:",
|
|
14317
|
+
` ${mimeInfo.trim()}`,
|
|
14318
|
+
"",
|
|
14319
|
+
"Detailed Type:",
|
|
14320
|
+
` ${detailedInfo.trim()}`
|
|
14321
|
+
].join(`
|
|
14322
|
+
`);
|
|
14323
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14324
|
+
} catch (error2) {
|
|
14325
|
+
throw new Error(`file command failed: ${error2 instanceof Error ? error2.message : error2}`);
|
|
14326
|
+
}
|
|
14327
|
+
}
|
|
14328
|
+
async function handleSectionSizes(args) {
|
|
14329
|
+
try {
|
|
14330
|
+
const { stdout } = await execAsync(`size -A -x "${args.filePath}"`);
|
|
14331
|
+
const lines = stdout.trim().split(`
|
|
14332
|
+
`);
|
|
14333
|
+
const summary = [
|
|
14334
|
+
`Section Sizes: ${args.filePath}`,
|
|
14335
|
+
"",
|
|
14336
|
+
...lines.map((l) => ` ${l}`)
|
|
14337
|
+
].join(`
|
|
14338
|
+
`);
|
|
14339
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14340
|
+
} catch (error2) {
|
|
14341
|
+
try {
|
|
14342
|
+
const { stdout } = await execAsync(`size -m "${args.filePath}"`);
|
|
14343
|
+
return { content: [{ type: "text", text: `Section Sizes:
|
|
14344
|
+
${stdout}` }] };
|
|
14345
|
+
} catch {
|
|
14346
|
+
throw new Error(`size command failed: ${error2 instanceof Error ? error2.message : error2}`);
|
|
14347
|
+
}
|
|
14348
|
+
}
|
|
14349
|
+
}
|
|
14350
|
+
async function handleObjdumpSections(args) {
|
|
14351
|
+
try {
|
|
14352
|
+
const { stdout } = await execAsync(`objdump -h "${args.filePath}"`, {
|
|
14353
|
+
maxBuffer: 10 * 1024 * 1024
|
|
14354
|
+
});
|
|
14355
|
+
const summary = [
|
|
14356
|
+
`Section Headers: ${args.filePath}`,
|
|
14357
|
+
"",
|
|
14358
|
+
stdout
|
|
14359
|
+
].join(`
|
|
14360
|
+
`);
|
|
14361
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14362
|
+
} catch (error2) {
|
|
14363
|
+
throw new Error(`objdump -h failed: ${error2 instanceof Error ? error2.message : error2}`);
|
|
14364
|
+
}
|
|
14365
|
+
}
|
|
14366
|
+
async function handleObjdumpProgramHeaders(args) {
|
|
14367
|
+
try {
|
|
14368
|
+
const { stdout } = await execAsync(`objdump -p "${args.filePath}"`, {
|
|
14369
|
+
maxBuffer: 10 * 1024 * 1024
|
|
14370
|
+
});
|
|
14371
|
+
const summary = [
|
|
14372
|
+
`Program Headers / Imports: ${args.filePath}`,
|
|
14373
|
+
"",
|
|
14374
|
+
stdout
|
|
14375
|
+
].join(`
|
|
14376
|
+
`);
|
|
14377
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14378
|
+
} catch (error2) {
|
|
14379
|
+
throw new Error(`objdump -p failed: ${error2 instanceof Error ? error2.message : error2}`);
|
|
14380
|
+
}
|
|
14381
|
+
}
|
|
14382
|
+
async function handleOtoolLibs(args) {
|
|
14383
|
+
try {
|
|
14384
|
+
const { stdout } = await execAsync(`otool -L "${args.filePath}"`);
|
|
14385
|
+
const lines = stdout.trim().split(`
|
|
14386
|
+
`);
|
|
14387
|
+
const summary = [
|
|
14388
|
+
`Dynamic Library Dependencies: ${args.filePath}`,
|
|
14389
|
+
"",
|
|
14390
|
+
...lines.map((l) => ` ${l}`)
|
|
14391
|
+
].join(`
|
|
14392
|
+
`);
|
|
14393
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14394
|
+
} catch (error2) {
|
|
14395
|
+
throw new Error(`otool -L failed (macOS only): ${error2 instanceof Error ? error2.message : error2}`);
|
|
14396
|
+
}
|
|
14397
|
+
}
|
|
14398
|
+
async function handleReadelf(args) {
|
|
14399
|
+
const sections = args.sections ?? ["all"];
|
|
14400
|
+
const flags = [];
|
|
14401
|
+
if (sections.includes("all")) {
|
|
14402
|
+
flags.push("-a");
|
|
14403
|
+
} else {
|
|
14404
|
+
if (sections.includes("headers"))
|
|
14405
|
+
flags.push("-h");
|
|
14406
|
+
if (sections.includes("sections"))
|
|
14407
|
+
flags.push("-S");
|
|
14408
|
+
if (sections.includes("segments"))
|
|
14409
|
+
flags.push("-l");
|
|
14410
|
+
if (sections.includes("symbols"))
|
|
14411
|
+
flags.push("-s");
|
|
14412
|
+
if (sections.includes("dynamic"))
|
|
14413
|
+
flags.push("-d");
|
|
14414
|
+
}
|
|
14415
|
+
try {
|
|
14416
|
+
const { stdout } = await execAsync(`readelf ${flags.join(" ")} "${args.filePath}"`, {
|
|
14417
|
+
maxBuffer: 20 * 1024 * 1024
|
|
14418
|
+
});
|
|
14419
|
+
const summary = [
|
|
14420
|
+
`ELF Analysis: ${args.filePath}`,
|
|
14421
|
+
`Sections: ${sections.join(", ")}`,
|
|
14422
|
+
"",
|
|
14423
|
+
stdout.slice(0, 50000),
|
|
14424
|
+
stdout.length > 50000 ? `
|
|
14425
|
+
... truncated (${stdout.length - 50000} more chars)` : ""
|
|
14426
|
+
].join(`
|
|
14427
|
+
`);
|
|
14428
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14429
|
+
} catch (error2) {
|
|
14430
|
+
throw new Error(`readelf failed (Linux ELF only): ${error2 instanceof Error ? error2.message : error2}`);
|
|
14431
|
+
}
|
|
14432
|
+
}
|
|
14433
|
+
async function handleLdd(args) {
|
|
14434
|
+
try {
|
|
14435
|
+
const { stdout } = await execAsync(`ldd "${args.filePath}"`);
|
|
14436
|
+
const lines = stdout.trim().split(`
|
|
14437
|
+
`).filter(Boolean);
|
|
14438
|
+
const summary = [
|
|
14439
|
+
`Shared Library Dependencies: ${args.filePath}`,
|
|
14440
|
+
"",
|
|
14441
|
+
...lines.map((l) => ` ${l}`)
|
|
14442
|
+
].join(`
|
|
14443
|
+
`);
|
|
14444
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14445
|
+
} catch (error2) {
|
|
14446
|
+
throw new Error(`ldd failed (Linux only): ${error2 instanceof Error ? error2.message : error2}`);
|
|
14447
|
+
}
|
|
14448
|
+
}
|
|
14449
|
+
async function handleDisassembly(args) {
|
|
14450
|
+
let cmd = "objdump -d";
|
|
14451
|
+
if (args.symbol) {
|
|
14452
|
+
cmd += ` --disassemble="${args.symbol}"`;
|
|
14453
|
+
}
|
|
14454
|
+
cmd += ` "${args.filePath}"`;
|
|
14455
|
+
try {
|
|
14456
|
+
const { stdout } = await execAsync(cmd, {
|
|
14457
|
+
maxBuffer: 50 * 1024 * 1024
|
|
14458
|
+
});
|
|
14459
|
+
let output = stdout;
|
|
14460
|
+
if (args.startOffset !== undefined || args.length !== undefined) {
|
|
14461
|
+
const lines = stdout.split(`
|
|
14462
|
+
`);
|
|
14463
|
+
const start = args.startOffset ?? 0;
|
|
14464
|
+
const len = args.length ?? 500;
|
|
14465
|
+
output = lines.slice(start, start + len).join(`
|
|
14466
|
+
`);
|
|
14467
|
+
}
|
|
14468
|
+
const summary = [
|
|
14469
|
+
`Disassembly: ${args.filePath}`,
|
|
14470
|
+
args.symbol ? `Symbol: ${args.symbol}` : "",
|
|
14471
|
+
"",
|
|
14472
|
+
output.slice(0, 1e5),
|
|
14473
|
+
output.length > 1e5 ? `
|
|
14474
|
+
... truncated (${output.length - 1e5} more chars)` : ""
|
|
14475
|
+
].filter(Boolean).join(`
|
|
14476
|
+
`);
|
|
14477
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14478
|
+
} catch (error2) {
|
|
14479
|
+
throw new Error(`objdump -d failed: ${error2 instanceof Error ? error2.message : error2}`);
|
|
14480
|
+
}
|
|
14481
|
+
}
|
|
14482
|
+
async function handleSecurityAudit(args) {
|
|
14483
|
+
const results = [];
|
|
14484
|
+
try {
|
|
14485
|
+
const { stdout: fileInfo } = await execAsync(`file "${args.filePath}"`);
|
|
14486
|
+
const info = fileInfo.toLowerCase();
|
|
14487
|
+
if (info.includes("pie") || info.includes("position independent")) {
|
|
14488
|
+
results.push({ check: "PIE", status: "ENABLED", details: "Position Independent Executable" });
|
|
14489
|
+
} else if (info.includes("executable")) {
|
|
14490
|
+
results.push({ check: "PIE", status: "DISABLED", details: "Not a PIE binary" });
|
|
14491
|
+
}
|
|
14492
|
+
try {
|
|
14493
|
+
const { stdout: relro } = await execAsync(`readelf -l "${args.filePath}" 2>/dev/null | grep -i gnu_relro`);
|
|
14494
|
+
if (relro.trim()) {
|
|
14495
|
+
results.push({ check: "RELRO", status: "ENABLED", details: "Read-Only relocations" });
|
|
14496
|
+
}
|
|
14497
|
+
} catch {}
|
|
14498
|
+
try {
|
|
14499
|
+
const { stdout: canary } = await execAsync(`nm "${args.filePath}" 2>/dev/null | grep -i "__stack_chk_fail"`);
|
|
14500
|
+
if (canary.trim()) {
|
|
14501
|
+
results.push({ check: "Stack Canary", status: "ENABLED", details: "Stack smashing detected symbol found" });
|
|
14502
|
+
} else {
|
|
14503
|
+
results.push({ check: "Stack Canary", status: "UNKNOWN", details: "No stack canary symbol found" });
|
|
14504
|
+
}
|
|
14505
|
+
} catch {
|
|
14506
|
+
results.push({ check: "Stack Canary", status: "UNKNOWN", details: "Could not check" });
|
|
14507
|
+
}
|
|
14508
|
+
try {
|
|
14509
|
+
const { stdout: nx } = await execAsync(`readelf -l "${args.filePath}" 2>/dev/null | grep -i "gnu_stack"`);
|
|
14510
|
+
if (nx.toLowerCase().includes("rwe")) {
|
|
14511
|
+
results.push({ check: "NX", status: "DISABLED", details: "Stack is executable (RWE)" });
|
|
14512
|
+
} else if (nx.trim()) {
|
|
14513
|
+
results.push({ check: "NX", status: "ENABLED", details: "Non-executable stack" });
|
|
14514
|
+
}
|
|
14515
|
+
} catch {}
|
|
14516
|
+
try {
|
|
14517
|
+
const { stdout: fortify } = await execAsync(`nm "${args.filePath}" 2>/dev/null | grep -i "_chk@"`);
|
|
14518
|
+
if (fortify.trim()) {
|
|
14519
|
+
results.push({ check: "FORTIFY", status: "ENABLED", details: "Fortified functions detected" });
|
|
14520
|
+
}
|
|
14521
|
+
} catch {}
|
|
14522
|
+
if (info.includes("mach-o")) {
|
|
14523
|
+
try {
|
|
14524
|
+
const { stdout: loadCmds } = await execAsync(`otool -l "${args.filePath}"`);
|
|
14525
|
+
if (loadCmds.toLowerCase().includes("lc_main")) {
|
|
14526
|
+
results.push({ check: "PIE", status: "ENABLED", details: "Mach-O with LC_MAIN (likely PIE)" });
|
|
14527
|
+
}
|
|
14528
|
+
} catch {}
|
|
14529
|
+
}
|
|
14530
|
+
} catch (error2) {
|
|
14531
|
+
results.push({ check: "Error", status: "FAILED", details: error2 instanceof Error ? error2.message : String(error2) });
|
|
14532
|
+
}
|
|
14533
|
+
const summary = [
|
|
14534
|
+
`Security Audit: ${args.filePath}`,
|
|
14535
|
+
"",
|
|
14536
|
+
"Security Features:",
|
|
14537
|
+
...results.map((r) => ` ${r.check}: ${r.status} - ${r.details}`),
|
|
14538
|
+
"",
|
|
14539
|
+
"Recommendations:",
|
|
14540
|
+
...results.filter((r) => r.status === "DISABLED" || r.status === "UNKNOWN").map((r) => ` - Consider enabling ${r.check}`)
|
|
14541
|
+
].join(`
|
|
14542
|
+
`);
|
|
14543
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14544
|
+
}
|
|
14545
|
+
async function handleEntropyAnalysis(args) {
|
|
14546
|
+
const blockSize = args.blockSize ?? 1024;
|
|
14547
|
+
try {
|
|
14548
|
+
const { stdout: hexData } = await execAsync(`xxd -p "${args.filePath}" | tr -d '\\n'`);
|
|
14549
|
+
const bytes = hexData.match(/.{2}/g)?.map((h) => parseInt(h, 16)) ?? [];
|
|
14550
|
+
const results = [];
|
|
14551
|
+
for (let i = 0;i < bytes.length; i += blockSize) {
|
|
14552
|
+
const block = bytes.slice(i, i + blockSize);
|
|
14553
|
+
if (block.length === 0)
|
|
14554
|
+
continue;
|
|
14555
|
+
const freq = {};
|
|
14556
|
+
for (const byte of block) {
|
|
14557
|
+
freq[byte] = (freq[byte] ?? 0) + 1;
|
|
14558
|
+
}
|
|
14559
|
+
let entropy = 0;
|
|
14560
|
+
for (const count of Object.values(freq)) {
|
|
14561
|
+
const p = count / block.length;
|
|
14562
|
+
entropy -= p * Math.log2(p);
|
|
14563
|
+
}
|
|
14564
|
+
const normalizedEntropy = entropy / 8;
|
|
14565
|
+
let status = "normal";
|
|
14566
|
+
if (normalizedEntropy > 0.95)
|
|
14567
|
+
status = "high (possibly encrypted/compressed)";
|
|
14568
|
+
else if (normalizedEntropy < 0.3)
|
|
14569
|
+
status = "low (possibly padding/zeros)";
|
|
14570
|
+
results.push({
|
|
14571
|
+
section: `Block ${Math.floor(i / blockSize)}`,
|
|
14572
|
+
offset: i,
|
|
14573
|
+
entropy: Math.round(entropy * 100) / 100,
|
|
14574
|
+
status
|
|
14575
|
+
});
|
|
14576
|
+
}
|
|
14577
|
+
const avgEntropy = results.reduce((a, b) => a + b.entropy, 0) / results.length;
|
|
14578
|
+
const summary = [
|
|
14579
|
+
`Entropy Analysis: ${args.filePath}`,
|
|
14580
|
+
`Block size: ${blockSize} bytes`,
|
|
14581
|
+
`Total blocks: ${results.length}`,
|
|
14582
|
+
`Average entropy: ${(avgEntropy / 8 * 100).toFixed(1)}% of max`,
|
|
14583
|
+
"",
|
|
14584
|
+
"Blocks with unusual entropy:",
|
|
14585
|
+
...results.filter((r) => r.status !== "normal").slice(0, 50).map((r) => ` ${r.section} (offset ${r.offset}): ${r.entropy}/8 bits - ${r.status}`),
|
|
14586
|
+
results.filter((r) => r.status !== "normal").length > 50 ? ` ... and ${results.filter((r) => r.status !== "normal").length - 50} more` : ""
|
|
14587
|
+
].join(`
|
|
14588
|
+
`);
|
|
14589
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14590
|
+
} catch (error2) {
|
|
14591
|
+
throw new Error(`Entropy analysis failed: ${error2 instanceof Error ? error2.message : error2}`);
|
|
14592
|
+
}
|
|
14593
|
+
}
|
|
14594
|
+
async function handleImportExport(args) {
|
|
14595
|
+
const analysis = await analyzeFile(args.filePath);
|
|
14596
|
+
const imports = [];
|
|
14597
|
+
const exports = [];
|
|
14598
|
+
try {
|
|
14599
|
+
if (analysis.format.toLowerCase().includes("mach-o")) {
|
|
14600
|
+
const { stdout: importSyms } = await execAsync(`nm -u "${args.filePath}" 2>/dev/null | awk '{print $2}'`);
|
|
14601
|
+
imports.push(...importSyms.trim().split(`
|
|
14602
|
+
`).filter(Boolean));
|
|
14603
|
+
const { stdout: exportSyms } = await execAsync(`nm -g "${args.filePath}" 2>/dev/null | grep -v "U " | awk '{print $3}'`);
|
|
14604
|
+
exports.push(...exportSyms.trim().split(`
|
|
14605
|
+
`).filter(Boolean));
|
|
14606
|
+
} else {
|
|
14607
|
+
const { stdout: importSyms } = await execAsync(`nm -D -u "${args.filePath}" 2>/dev/null | awk '{print $2}'`);
|
|
14608
|
+
imports.push(...importSyms.trim().split(`
|
|
14609
|
+
`).filter(Boolean));
|
|
14610
|
+
const { stdout: exportSyms } = await execAsync(`nm -D --defined-only "${args.filePath}" 2>/dev/null | awk '{print $3}'`);
|
|
14611
|
+
exports.push(...exportSyms.trim().split(`
|
|
14612
|
+
`).filter(Boolean));
|
|
14613
|
+
}
|
|
14614
|
+
} catch {
|
|
14615
|
+
try {
|
|
14616
|
+
const { stdout: allSyms } = await execAsync(`nm "${args.filePath}" 2>/dev/null`);
|
|
14617
|
+
for (const line of allSyms.split(`
|
|
14618
|
+
`)) {
|
|
14619
|
+
const match = line.match(/^\s*([0-9a-fA-F]+)?\s+([UTDDBRC])\s+(.+)/);
|
|
14620
|
+
if (match) {
|
|
14621
|
+
if (match[2] === "U")
|
|
14622
|
+
imports.push(match[3]);
|
|
14623
|
+
else if (match[1] && match[2] === match[2].toUpperCase())
|
|
14624
|
+
exports.push(match[3]);
|
|
14625
|
+
}
|
|
14626
|
+
}
|
|
14627
|
+
} catch {}
|
|
14628
|
+
}
|
|
14629
|
+
const summary = [
|
|
14630
|
+
`Import/Export Analysis: ${args.filePath}`,
|
|
14631
|
+
`File type: ${analysis.format}`,
|
|
14632
|
+
"",
|
|
14633
|
+
`Imports (${imports.length}):`,
|
|
14634
|
+
...imports.slice(0, 100).map((s) => ` - ${s}`),
|
|
14635
|
+
imports.length > 100 ? ` ... and ${imports.length - 100} more` : "",
|
|
14636
|
+
"",
|
|
14637
|
+
`Exports (${exports.length}):`,
|
|
14638
|
+
...exports.slice(0, 100).map((s) => ` + ${s}`),
|
|
14639
|
+
exports.length > 100 ? ` ... and ${exports.length - 100} more` : ""
|
|
14640
|
+
].join(`
|
|
14641
|
+
`);
|
|
14642
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14643
|
+
}
|
|
14644
|
+
async function handleBinaryDiff(args) {
|
|
14645
|
+
const contextBytes = args.contextBytes ?? 32;
|
|
14646
|
+
try {
|
|
14647
|
+
const { stdout: hex1 } = await execAsync(`xxd -p "${args.file1}" | tr -d '\\n'`);
|
|
14648
|
+
const { stdout: hex2 } = await execAsync(`xxd -p "${args.file2}" | tr -d '\\n'`);
|
|
14649
|
+
const bytes1 = hex1.match(/.{2}/g) ?? [];
|
|
14650
|
+
const bytes2 = hex2.match(/.{2}/g) ?? [];
|
|
14651
|
+
const maxLen = Math.max(bytes1.length, bytes2.length);
|
|
14652
|
+
const diffs = [];
|
|
14653
|
+
for (let i = 0;i < maxLen; i++) {
|
|
14654
|
+
const b1 = bytes1[i] ?? "??";
|
|
14655
|
+
const b2 = bytes2[i] ?? "??";
|
|
14656
|
+
if (b1 !== b2) {
|
|
14657
|
+
diffs.push({ offset: i, file1: b1, file2: b2 });
|
|
14658
|
+
}
|
|
14659
|
+
}
|
|
14660
|
+
const diffPercent = (diffs.length / maxLen * 100).toFixed(2);
|
|
14661
|
+
const summary = [
|
|
14662
|
+
`Binary Diff: ${args.file1} vs ${args.file2}`,
|
|
14663
|
+
"",
|
|
14664
|
+
`File 1 size: ${bytes1.length} bytes`,
|
|
14665
|
+
`File 2 size: ${bytes2.length} bytes`,
|
|
14666
|
+
`Differences: ${diffs.length} bytes (${diffPercent}%)`,
|
|
14667
|
+
"",
|
|
14668
|
+
"Differences (first 200):",
|
|
14669
|
+
...diffs.slice(0, 200).map((d) => {
|
|
14670
|
+
const ctx1 = bytes1.slice(Math.max(0, d.offset - 4), d.offset + 5).join(" ");
|
|
14671
|
+
const ctx2 = bytes2.slice(Math.max(0, d.offset - 4), d.offset + 5).join(" ");
|
|
14672
|
+
return ` Offset 0x${d.offset.toString(16).padStart(8, "0")}: ${d.file1} -> ${d.file2}
|
|
14673
|
+
Context: [${ctx1}] -> [${ctx2}]`;
|
|
14674
|
+
}),
|
|
14675
|
+
diffs.length > 200 ? ` ... and ${diffs.length - 200} more differences` : ""
|
|
14676
|
+
].join(`
|
|
14677
|
+
`);
|
|
14678
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14679
|
+
} catch (error2) {
|
|
14680
|
+
throw new Error(`Binary diff failed: ${error2 instanceof Error ? error2.message : error2}`);
|
|
14681
|
+
}
|
|
14682
|
+
}
|
|
14683
|
+
async function handleArchiveExtract(args) {
|
|
14684
|
+
const outputDir = args.outputDir ?? `/tmp/archive_${Date.now()}`;
|
|
14685
|
+
try {
|
|
14686
|
+
const { stdout: fileInfo } = await execAsync(`file "${args.filePath}"`);
|
|
14687
|
+
if (!fileInfo.toLowerCase().includes("ar archive") && !fileInfo.toLowerCase().includes("archive")) {
|
|
14688
|
+
return {
|
|
14689
|
+
content: [{ type: "text", text: `Error: ${args.filePath} is not an archive file.
|
|
14690
|
+
${fileInfo}` }],
|
|
14691
|
+
isError: true
|
|
14692
|
+
};
|
|
14693
|
+
}
|
|
14694
|
+
const { stdout: contents } = await execAsync(`ar -t "${args.filePath}"`);
|
|
14695
|
+
const files = contents.trim().split(`
|
|
14696
|
+
`).filter(Boolean);
|
|
14697
|
+
if (args.listOnly) {
|
|
14698
|
+
return {
|
|
14699
|
+
content: [{
|
|
14700
|
+
type: "text",
|
|
14701
|
+
text: [
|
|
14702
|
+
`Archive Contents: ${args.filePath}`,
|
|
14703
|
+
`Total files: ${files.length}`,
|
|
14704
|
+
"",
|
|
14705
|
+
"Files:",
|
|
14706
|
+
...files.map((f) => ` - ${f}`)
|
|
14707
|
+
].join(`
|
|
14708
|
+
`)
|
|
14709
|
+
}]
|
|
14710
|
+
};
|
|
14711
|
+
}
|
|
14712
|
+
await execAsync(`mkdir -p "${outputDir}"`);
|
|
14713
|
+
await execAsync(`cd "${outputDir}" && ar -x "${args.filePath}"`);
|
|
14714
|
+
const summary = [
|
|
14715
|
+
`Archive Extracted: ${args.filePath}`,
|
|
14716
|
+
`Output directory: ${outputDir}`,
|
|
14717
|
+
`Files extracted: ${files.length}`,
|
|
14718
|
+
"",
|
|
14719
|
+
"Extracted files:",
|
|
14720
|
+
...files.map((f) => ` - ${f}`)
|
|
14721
|
+
].join(`
|
|
14722
|
+
`);
|
|
14723
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14724
|
+
} catch (error2) {
|
|
14725
|
+
throw new Error(`Archive extraction failed: ${error2 instanceof Error ? error2.message : error2}`);
|
|
14726
|
+
}
|
|
14727
|
+
}
|
|
14728
|
+
async function handlePatchBytes(args) {
|
|
14729
|
+
const backup = args.createBackup ?? true;
|
|
14730
|
+
try {
|
|
14731
|
+
const hex = args.hexData.replace(/\s/g, "");
|
|
14732
|
+
if (!/^[0-9a-fA-F]*$/.test(hex) || hex.length % 2 !== 0) {
|
|
14733
|
+
throw new Error("Invalid hex data. Must be even number of hex characters.");
|
|
14734
|
+
}
|
|
14735
|
+
if (backup) {
|
|
14736
|
+
await execAsync(`cp "${args.filePath}" "${args.filePath}.bak"`);
|
|
14737
|
+
}
|
|
14738
|
+
const bytes = hex.match(/.{2}/g)?.map((h) => parseInt(h, 16)) ?? [];
|
|
14739
|
+
const hexBytes = bytes.map((b) => `\\x${b.toString(16).padStart(2, "0")}`).join("");
|
|
14740
|
+
await execAsync(`printf '${hexBytes}' | dd of="${args.filePath}" bs=1 seek=${args.offset} conv=notrunc 2>/dev/null`);
|
|
14741
|
+
const summary = [
|
|
14742
|
+
`Bytes Patched: ${args.filePath}`,
|
|
14743
|
+
`Offset: 0x${args.offset.toString(16)} (${args.offset})`,
|
|
14744
|
+
`Bytes written: ${bytes.length}`,
|
|
14745
|
+
`Hex data: ${hex}`,
|
|
14746
|
+
backup ? `Backup created: ${args.filePath}.bak` : "WARNING: No backup created"
|
|
14747
|
+
].join(`
|
|
14748
|
+
`);
|
|
14749
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14750
|
+
} catch (error2) {
|
|
14751
|
+
throw new Error(`Patch failed: ${error2 instanceof Error ? error2.message : error2}`);
|
|
14752
|
+
}
|
|
14753
|
+
}
|
|
14754
|
+
async function handleNopSled(args) {
|
|
14755
|
+
const backup = args.createBackup ?? true;
|
|
14756
|
+
const nopHex = "90".repeat(args.count);
|
|
14757
|
+
return handlePatchBytes({
|
|
14758
|
+
filePath: args.filePath,
|
|
14759
|
+
offset: args.offset,
|
|
14760
|
+
hexData: nopHex,
|
|
14761
|
+
createBackup: backup
|
|
14762
|
+
});
|
|
14763
|
+
}
|
|
14764
|
+
async function handleHexEditor(args) {
|
|
14765
|
+
try {
|
|
14766
|
+
const { stdout: currentHex } = await execAsync(`xxd -s ${args.offset} -l ${args.length} -p "${args.filePath}" | tr -d '\\n'`);
|
|
14767
|
+
if (args.newHex) {
|
|
14768
|
+
const hex = args.newHex.replace(/\s/g, "");
|
|
14769
|
+
if (!/^[0-9a-fA-F]*$/.test(hex)) {
|
|
14770
|
+
throw new Error("Invalid hex data.");
|
|
14771
|
+
}
|
|
14772
|
+
await execAsync(`cp "${args.filePath}" "${args.filePath}.bak"`);
|
|
14773
|
+
const bytes = hex.match(/.{2}/g)?.map((h) => parseInt(h, 16)) ?? [];
|
|
14774
|
+
const hexBytes = bytes.map((b) => `\\x${b.toString(16).padStart(2, "0")}`).join("");
|
|
14775
|
+
await execAsync(`printf '${hexBytes}' | dd of="${args.filePath}" bs=1 seek=${args.offset} conv=notrunc 2>/dev/null`);
|
|
14776
|
+
return {
|
|
14777
|
+
content: [{
|
|
14778
|
+
type: "text",
|
|
14779
|
+
text: [
|
|
14780
|
+
`Hex Editor: ${args.filePath}`,
|
|
14781
|
+
`Offset: 0x${args.offset.toString(16)}`,
|
|
14782
|
+
"",
|
|
14783
|
+
`Previous: ${currentHex}`,
|
|
14784
|
+
`New: ${hex}`,
|
|
14785
|
+
`Backup: ${args.filePath}.bak`
|
|
14786
|
+
].join(`
|
|
14787
|
+
`)
|
|
14788
|
+
}]
|
|
14789
|
+
};
|
|
14790
|
+
}
|
|
14791
|
+
const summary = [
|
|
14792
|
+
`Hex Editor (read mode): ${args.filePath}`,
|
|
14793
|
+
`Offset: 0x${args.offset.toString(16)} (${args.offset})`,
|
|
14794
|
+
`Length: ${args.length} bytes`,
|
|
14795
|
+
"",
|
|
14796
|
+
`Hex: ${currentHex}`,
|
|
14797
|
+
"",
|
|
14798
|
+
"To modify, provide newHex parameter"
|
|
14799
|
+
].join(`
|
|
14800
|
+
`);
|
|
14801
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14802
|
+
} catch (error2) {
|
|
14803
|
+
throw new Error(`Hex editor failed: ${error2 instanceof Error ? error2.message : error2}`);
|
|
14804
|
+
}
|
|
14805
|
+
}
|
|
14196
14806
|
var TOOLS = [
|
|
14197
14807
|
{
|
|
14198
14808
|
name: "nm_list_symbols",
|
|
@@ -14413,6 +15023,221 @@ var TOOLS = [
|
|
|
14413
15023
|
},
|
|
14414
15024
|
required: ["filePath", "pattern"]
|
|
14415
15025
|
}
|
|
15026
|
+
},
|
|
15027
|
+
{
|
|
15028
|
+
name: "bin_strings",
|
|
15029
|
+
description: "Extract readable strings (ASCII, Unicode) from a binary file",
|
|
15030
|
+
inputSchema: {
|
|
15031
|
+
type: "object",
|
|
15032
|
+
properties: {
|
|
15033
|
+
filePath: { type: "string", description: "Path to the binary file" },
|
|
15034
|
+
minLength: { type: "number", description: "Minimum string length (default: 4)" },
|
|
15035
|
+
encoding: {
|
|
15036
|
+
type: "string",
|
|
15037
|
+
enum: ["ascii", "unicode", "all"],
|
|
15038
|
+
description: "String encoding to search (default: all)"
|
|
15039
|
+
}
|
|
15040
|
+
},
|
|
15041
|
+
required: ["filePath"]
|
|
15042
|
+
}
|
|
15043
|
+
},
|
|
15044
|
+
{
|
|
15045
|
+
name: "bin_file_info",
|
|
15046
|
+
description: "Get detailed file type information with MIME type",
|
|
15047
|
+
inputSchema: {
|
|
15048
|
+
type: "object",
|
|
15049
|
+
properties: {
|
|
15050
|
+
filePath: { type: "string", description: "Path to the file" }
|
|
15051
|
+
},
|
|
15052
|
+
required: ["filePath"]
|
|
15053
|
+
}
|
|
15054
|
+
},
|
|
15055
|
+
{
|
|
15056
|
+
name: "bin_section_sizes",
|
|
15057
|
+
description: "Get section sizes (text, data, bss) of a binary",
|
|
15058
|
+
inputSchema: {
|
|
15059
|
+
type: "object",
|
|
15060
|
+
properties: {
|
|
15061
|
+
filePath: { type: "string", description: "Path to the binary file" }
|
|
15062
|
+
},
|
|
15063
|
+
required: ["filePath"]
|
|
15064
|
+
}
|
|
15065
|
+
},
|
|
15066
|
+
{
|
|
15067
|
+
name: "bin_objdump_sections",
|
|
15068
|
+
description: "Get section headers with flags using objdump -h",
|
|
15069
|
+
inputSchema: {
|
|
15070
|
+
type: "object",
|
|
15071
|
+
properties: {
|
|
15072
|
+
filePath: { type: "string", description: "Path to the binary file" }
|
|
15073
|
+
},
|
|
15074
|
+
required: ["filePath"]
|
|
15075
|
+
}
|
|
15076
|
+
},
|
|
15077
|
+
{
|
|
15078
|
+
name: "bin_objdump_program_headers",
|
|
15079
|
+
description: "Get program headers and DLL imports using objdump -p",
|
|
15080
|
+
inputSchema: {
|
|
15081
|
+
type: "object",
|
|
15082
|
+
properties: {
|
|
15083
|
+
filePath: { type: "string", description: "Path to the binary file" }
|
|
15084
|
+
},
|
|
15085
|
+
required: ["filePath"]
|
|
15086
|
+
}
|
|
15087
|
+
},
|
|
15088
|
+
{
|
|
15089
|
+
name: "bin_otool_libs",
|
|
15090
|
+
description: "Get dynamic library dependencies for Mach-O files (macOS only)",
|
|
15091
|
+
inputSchema: {
|
|
15092
|
+
type: "object",
|
|
15093
|
+
properties: {
|
|
15094
|
+
filePath: { type: "string", description: "Path to the Mach-O file" }
|
|
15095
|
+
},
|
|
15096
|
+
required: ["filePath"]
|
|
15097
|
+
}
|
|
15098
|
+
},
|
|
15099
|
+
{
|
|
15100
|
+
name: "bin_readelf",
|
|
15101
|
+
description: "Comprehensive ELF structure analysis using readelf",
|
|
15102
|
+
inputSchema: {
|
|
15103
|
+
type: "object",
|
|
15104
|
+
properties: {
|
|
15105
|
+
filePath: { type: "string", description: "Path to the ELF file" },
|
|
15106
|
+
sections: {
|
|
15107
|
+
type: "array",
|
|
15108
|
+
items: { type: "string", enum: ["headers", "sections", "segments", "symbols", "dynamic", "all"] },
|
|
15109
|
+
description: "Sections to analyze (default: all)"
|
|
15110
|
+
}
|
|
15111
|
+
},
|
|
15112
|
+
required: ["filePath"]
|
|
15113
|
+
}
|
|
15114
|
+
},
|
|
15115
|
+
{
|
|
15116
|
+
name: "bin_ldd",
|
|
15117
|
+
description: "Get shared library dependencies using ldd (Linux only)",
|
|
15118
|
+
inputSchema: {
|
|
15119
|
+
type: "object",
|
|
15120
|
+
properties: {
|
|
15121
|
+
filePath: { type: "string", description: "Path to the binary file" }
|
|
15122
|
+
},
|
|
15123
|
+
required: ["filePath"]
|
|
15124
|
+
}
|
|
15125
|
+
},
|
|
15126
|
+
{
|
|
15127
|
+
name: "bin_disassembly",
|
|
15128
|
+
description: "Disassemble binary code using objdump -d",
|
|
15129
|
+
inputSchema: {
|
|
15130
|
+
type: "object",
|
|
15131
|
+
properties: {
|
|
15132
|
+
filePath: { type: "string", description: "Path to the binary file" },
|
|
15133
|
+
symbol: { type: "string", description: "Specific symbol to disassemble (optional)" },
|
|
15134
|
+
startOffset: { type: "number", description: "Line offset for output pagination" },
|
|
15135
|
+
length: { type: "number", description: "Number of lines to output" }
|
|
15136
|
+
},
|
|
15137
|
+
required: ["filePath"]
|
|
15138
|
+
}
|
|
15139
|
+
},
|
|
15140
|
+
{
|
|
15141
|
+
name: "bin_security_audit",
|
|
15142
|
+
description: "Check binary security features (ASLR, PIE, RELRO, stack canary, NX bit)",
|
|
15143
|
+
inputSchema: {
|
|
15144
|
+
type: "object",
|
|
15145
|
+
properties: {
|
|
15146
|
+
filePath: { type: "string", description: "Path to the binary file" }
|
|
15147
|
+
},
|
|
15148
|
+
required: ["filePath"]
|
|
15149
|
+
}
|
|
15150
|
+
},
|
|
15151
|
+
{
|
|
15152
|
+
name: "bin_entropy",
|
|
15153
|
+
description: "Analyze section entropy to detect packed/encrypted sections",
|
|
15154
|
+
inputSchema: {
|
|
15155
|
+
type: "object",
|
|
15156
|
+
properties: {
|
|
15157
|
+
filePath: { type: "string", description: "Path to the binary file" },
|
|
15158
|
+
blockSize: { type: "number", description: "Block size for entropy calculation (default: 1024)" }
|
|
15159
|
+
},
|
|
15160
|
+
required: ["filePath"]
|
|
15161
|
+
}
|
|
15162
|
+
},
|
|
15163
|
+
{
|
|
15164
|
+
name: "bin_import_export",
|
|
15165
|
+
description: "Get detailed import/export table analysis",
|
|
15166
|
+
inputSchema: {
|
|
15167
|
+
type: "object",
|
|
15168
|
+
properties: {
|
|
15169
|
+
filePath: { type: "string", description: "Path to the binary file" }
|
|
15170
|
+
},
|
|
15171
|
+
required: ["filePath"]
|
|
15172
|
+
}
|
|
15173
|
+
},
|
|
15174
|
+
{
|
|
15175
|
+
name: "bin_diff",
|
|
15176
|
+
description: "Byte-level comparison between two binary files",
|
|
15177
|
+
inputSchema: {
|
|
15178
|
+
type: "object",
|
|
15179
|
+
properties: {
|
|
15180
|
+
file1: { type: "string", description: "First binary file" },
|
|
15181
|
+
file2: { type: "string", description: "Second binary file" },
|
|
15182
|
+
contextBytes: { type: "number", description: "Bytes of context to show around differences (default: 32)" }
|
|
15183
|
+
},
|
|
15184
|
+
required: ["file1", "file2"]
|
|
15185
|
+
}
|
|
15186
|
+
},
|
|
15187
|
+
{
|
|
15188
|
+
name: "bin_archive_extract",
|
|
15189
|
+
description: "Extract or list contents of static library (.a) archives",
|
|
15190
|
+
inputSchema: {
|
|
15191
|
+
type: "object",
|
|
15192
|
+
properties: {
|
|
15193
|
+
filePath: { type: "string", description: "Path to the archive file" },
|
|
15194
|
+
outputDir: { type: "string", description: "Output directory for extraction (optional)" },
|
|
15195
|
+
listOnly: { type: "boolean", description: "Only list contents without extracting (default: false)" }
|
|
15196
|
+
},
|
|
15197
|
+
required: ["filePath"]
|
|
15198
|
+
}
|
|
15199
|
+
},
|
|
15200
|
+
{
|
|
15201
|
+
name: "bin_patch_bytes",
|
|
15202
|
+
description: "Patch bytes at a specific offset in a binary file (DESTRUCTIVE)",
|
|
15203
|
+
inputSchema: {
|
|
15204
|
+
type: "object",
|
|
15205
|
+
properties: {
|
|
15206
|
+
filePath: { type: "string", description: "Path to the binary file" },
|
|
15207
|
+
offset: { type: "number", description: "Byte offset to patch" },
|
|
15208
|
+
hexData: { type: "string", description: "Hex bytes to write (e.g., '90 90 90')" },
|
|
15209
|
+
createBackup: { type: "boolean", description: "Create .bak backup before patching (default: true)" }
|
|
15210
|
+
},
|
|
15211
|
+
required: ["filePath", "offset", "hexData"]
|
|
15212
|
+
}
|
|
15213
|
+
},
|
|
15214
|
+
{
|
|
15215
|
+
name: "bin_nop_sled",
|
|
15216
|
+
description: "Insert NOP instructions at a specific offset (DESTRUCTIVE)",
|
|
15217
|
+
inputSchema: {
|
|
15218
|
+
type: "object",
|
|
15219
|
+
properties: {
|
|
15220
|
+
filePath: { type: "string", description: "Path to the binary file" },
|
|
15221
|
+
offset: { type: "number", description: "Byte offset for NOP sled" },
|
|
15222
|
+
count: { type: "number", description: "Number of NOP instructions to insert" },
|
|
15223
|
+
createBackup: { type: "boolean", description: "Create .bak backup (default: true)" }
|
|
15224
|
+
},
|
|
15225
|
+
required: ["filePath", "offset", "count"]
|
|
15226
|
+
}
|
|
15227
|
+
},
|
|
15228
|
+
{
|
|
15229
|
+
name: "bin_hex_editor",
|
|
15230
|
+
description: "Read or modify bytes at a specific offset (hex editor mode)",
|
|
15231
|
+
inputSchema: {
|
|
15232
|
+
type: "object",
|
|
15233
|
+
properties: {
|
|
15234
|
+
filePath: { type: "string", description: "Path to the file" },
|
|
15235
|
+
offset: { type: "number", description: "Byte offset" },
|
|
15236
|
+
length: { type: "number", description: "Number of bytes to read/modify" },
|
|
15237
|
+
newHex: { type: "string", description: "New hex bytes to write (optional - read mode if omitted)" }
|
|
15238
|
+
},
|
|
15239
|
+
required: ["filePath", "offset", "length"]
|
|
15240
|
+
}
|
|
14416
15241
|
}
|
|
14417
15242
|
];
|
|
14418
15243
|
server.setRequestHandler(ListToolsRequestSchema, async () => {
|
|
@@ -14454,6 +15279,40 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
14454
15279
|
return await handleXxdExtract(args);
|
|
14455
15280
|
case "xxd_find_pattern":
|
|
14456
15281
|
return await handleXxdFindPattern(args);
|
|
15282
|
+
case "bin_strings":
|
|
15283
|
+
return await handleStrings(args);
|
|
15284
|
+
case "bin_file_info":
|
|
15285
|
+
return await handleFileInfo(args);
|
|
15286
|
+
case "bin_section_sizes":
|
|
15287
|
+
return await handleSectionSizes(args);
|
|
15288
|
+
case "bin_objdump_sections":
|
|
15289
|
+
return await handleObjdumpSections(args);
|
|
15290
|
+
case "bin_objdump_program_headers":
|
|
15291
|
+
return await handleObjdumpProgramHeaders(args);
|
|
15292
|
+
case "bin_otool_libs":
|
|
15293
|
+
return await handleOtoolLibs(args);
|
|
15294
|
+
case "bin_readelf":
|
|
15295
|
+
return await handleReadelf(args);
|
|
15296
|
+
case "bin_ldd":
|
|
15297
|
+
return await handleLdd(args);
|
|
15298
|
+
case "bin_disassembly":
|
|
15299
|
+
return await handleDisassembly(args);
|
|
15300
|
+
case "bin_security_audit":
|
|
15301
|
+
return await handleSecurityAudit(args);
|
|
15302
|
+
case "bin_entropy":
|
|
15303
|
+
return await handleEntropyAnalysis(args);
|
|
15304
|
+
case "bin_import_export":
|
|
15305
|
+
return await handleImportExport(args);
|
|
15306
|
+
case "bin_diff":
|
|
15307
|
+
return await handleBinaryDiff(args);
|
|
15308
|
+
case "bin_archive_extract":
|
|
15309
|
+
return await handleArchiveExtract(args);
|
|
15310
|
+
case "bin_patch_bytes":
|
|
15311
|
+
return await handlePatchBytes(args);
|
|
15312
|
+
case "bin_nop_sled":
|
|
15313
|
+
return await handleNopSled(args);
|
|
15314
|
+
case "bin_hex_editor":
|
|
15315
|
+
return await handleHexEditor(args);
|
|
14457
15316
|
default:
|
|
14458
15317
|
throw new Error(`Unknown tool: ${name}`);
|
|
14459
15318
|
}
|