iflow-mcp_developermode-korea_reversecore-mcp 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- iflow_mcp_developermode_korea_reversecore_mcp-1.0.0.dist-info/METADATA +543 -0
- iflow_mcp_developermode_korea_reversecore_mcp-1.0.0.dist-info/RECORD +79 -0
- iflow_mcp_developermode_korea_reversecore_mcp-1.0.0.dist-info/WHEEL +5 -0
- iflow_mcp_developermode_korea_reversecore_mcp-1.0.0.dist-info/entry_points.txt +2 -0
- iflow_mcp_developermode_korea_reversecore_mcp-1.0.0.dist-info/licenses/LICENSE +21 -0
- iflow_mcp_developermode_korea_reversecore_mcp-1.0.0.dist-info/top_level.txt +1 -0
- reversecore_mcp/__init__.py +9 -0
- reversecore_mcp/core/__init__.py +78 -0
- reversecore_mcp/core/audit.py +101 -0
- reversecore_mcp/core/binary_cache.py +138 -0
- reversecore_mcp/core/command_spec.py +357 -0
- reversecore_mcp/core/config.py +432 -0
- reversecore_mcp/core/container.py +288 -0
- reversecore_mcp/core/decorators.py +152 -0
- reversecore_mcp/core/error_formatting.py +93 -0
- reversecore_mcp/core/error_handling.py +142 -0
- reversecore_mcp/core/evidence.py +229 -0
- reversecore_mcp/core/exceptions.py +296 -0
- reversecore_mcp/core/execution.py +240 -0
- reversecore_mcp/core/ghidra.py +642 -0
- reversecore_mcp/core/ghidra_helper.py +481 -0
- reversecore_mcp/core/ghidra_manager.py +234 -0
- reversecore_mcp/core/json_utils.py +131 -0
- reversecore_mcp/core/loader.py +73 -0
- reversecore_mcp/core/logging_config.py +206 -0
- reversecore_mcp/core/memory.py +721 -0
- reversecore_mcp/core/metrics.py +198 -0
- reversecore_mcp/core/mitre_mapper.py +365 -0
- reversecore_mcp/core/plugin.py +45 -0
- reversecore_mcp/core/r2_helpers.py +404 -0
- reversecore_mcp/core/r2_pool.py +403 -0
- reversecore_mcp/core/report_generator.py +268 -0
- reversecore_mcp/core/resilience.py +252 -0
- reversecore_mcp/core/resource_manager.py +169 -0
- reversecore_mcp/core/result.py +132 -0
- reversecore_mcp/core/security.py +213 -0
- reversecore_mcp/core/validators.py +238 -0
- reversecore_mcp/dashboard/__init__.py +221 -0
- reversecore_mcp/prompts/__init__.py +56 -0
- reversecore_mcp/prompts/common.py +24 -0
- reversecore_mcp/prompts/game.py +280 -0
- reversecore_mcp/prompts/malware.py +1219 -0
- reversecore_mcp/prompts/report.py +150 -0
- reversecore_mcp/prompts/security.py +136 -0
- reversecore_mcp/resources.py +329 -0
- reversecore_mcp/server.py +727 -0
- reversecore_mcp/tools/__init__.py +49 -0
- reversecore_mcp/tools/analysis/__init__.py +74 -0
- reversecore_mcp/tools/analysis/capa_tools.py +215 -0
- reversecore_mcp/tools/analysis/die_tools.py +180 -0
- reversecore_mcp/tools/analysis/diff_tools.py +643 -0
- reversecore_mcp/tools/analysis/lief_tools.py +272 -0
- reversecore_mcp/tools/analysis/signature_tools.py +591 -0
- reversecore_mcp/tools/analysis/static_analysis.py +479 -0
- reversecore_mcp/tools/common/__init__.py +58 -0
- reversecore_mcp/tools/common/file_operations.py +352 -0
- reversecore_mcp/tools/common/memory_tools.py +516 -0
- reversecore_mcp/tools/common/patch_explainer.py +230 -0
- reversecore_mcp/tools/common/server_tools.py +115 -0
- reversecore_mcp/tools/ghidra/__init__.py +19 -0
- reversecore_mcp/tools/ghidra/decompilation.py +975 -0
- reversecore_mcp/tools/ghidra/ghidra_tools.py +1052 -0
- reversecore_mcp/tools/malware/__init__.py +61 -0
- reversecore_mcp/tools/malware/adaptive_vaccine.py +579 -0
- reversecore_mcp/tools/malware/dormant_detector.py +756 -0
- reversecore_mcp/tools/malware/ioc_tools.py +228 -0
- reversecore_mcp/tools/malware/vulnerability_hunter.py +519 -0
- reversecore_mcp/tools/malware/yara_tools.py +214 -0
- reversecore_mcp/tools/patch_explainer.py +19 -0
- reversecore_mcp/tools/radare2/__init__.py +13 -0
- reversecore_mcp/tools/radare2/r2_analysis.py +972 -0
- reversecore_mcp/tools/radare2/r2_session.py +376 -0
- reversecore_mcp/tools/radare2/radare2_mcp_tools.py +1183 -0
- reversecore_mcp/tools/report/__init__.py +4 -0
- reversecore_mcp/tools/report/email.py +82 -0
- reversecore_mcp/tools/report/report_mcp_tools.py +344 -0
- reversecore_mcp/tools/report/report_tools.py +1076 -0
- reversecore_mcp/tools/report/session.py +194 -0
- reversecore_mcp/tools/report_tools.py +11 -0
|
@@ -0,0 +1,1052 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Ghidra MCP Tools - Advanced binary analysis tools using Ghidra.
|
|
3
|
+
|
|
4
|
+
This module provides MCP tools for interacting with Ghidra's analysis capabilities
|
|
5
|
+
through the centralized GhidraService. It enables AI assistants to perform advanced
|
|
6
|
+
binary analysis, reverse engineering, and code annotation tasks.
|
|
7
|
+
|
|
8
|
+
Features:
|
|
9
|
+
- Structure/Enum/Data Type management
|
|
10
|
+
- Bookmark management
|
|
11
|
+
- Memory reading and patching
|
|
12
|
+
- Call graph analysis
|
|
13
|
+
- Function analysis triggers
|
|
14
|
+
|
|
15
|
+
Performance:
|
|
16
|
+
- Uses singleton GhidraService with project caching
|
|
17
|
+
- JVM is started once and reused across calls
|
|
18
|
+
- Projects are cached with LRU eviction
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
from typing import Any, Optional
|
|
22
|
+
|
|
23
|
+
from fastmcp import Context
|
|
24
|
+
|
|
25
|
+
from reversecore_mcp.core.config import get_config
|
|
26
|
+
from reversecore_mcp.core.decorators import log_execution
|
|
27
|
+
from reversecore_mcp.core.error_handling import handle_tool_errors
|
|
28
|
+
from reversecore_mcp.core.ghidra import ghidra_service
|
|
29
|
+
from reversecore_mcp.core.logging_config import get_logger
|
|
30
|
+
from reversecore_mcp.core.metrics import track_metrics
|
|
31
|
+
from reversecore_mcp.core.plugin import Plugin
|
|
32
|
+
from reversecore_mcp.core.result import ToolResult, failure, success
|
|
33
|
+
from reversecore_mcp.core.security import validate_file_path
|
|
34
|
+
from reversecore_mcp.core import json_utils as json # Optimized JSON (3-5x faster)
|
|
35
|
+
|
|
36
|
+
logger = get_logger(__name__)
|
|
37
|
+
|
|
38
|
+
DEFAULT_TIMEOUT = get_config().default_tool_timeout
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
# =============================================================================
|
|
42
|
+
# Helper Functions
|
|
43
|
+
# =============================================================================
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def _get_ghidra_program(file_path: str):
|
|
47
|
+
"""
|
|
48
|
+
Get Ghidra program using the cached GhidraService.
|
|
49
|
+
|
|
50
|
+
Returns:
|
|
51
|
+
Tuple of (program, flat_api) from cached project
|
|
52
|
+
|
|
53
|
+
Raises:
|
|
54
|
+
ImportError: If PyGhidra is not available
|
|
55
|
+
"""
|
|
56
|
+
if not ghidra_service.is_available():
|
|
57
|
+
raise ImportError("PyGhidra is not installed. Install with: pip install pyghidra")
|
|
58
|
+
|
|
59
|
+
ghidra_service._ensure_jvm_started()
|
|
60
|
+
program, flat_api, _ = ghidra_service._get_project(file_path)
|
|
61
|
+
return program, flat_api
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
# =============================================================================
|
|
65
|
+
# Structure Tools
|
|
66
|
+
# =============================================================================
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
@handle_tool_errors
|
|
70
|
+
@log_execution
|
|
71
|
+
@track_metrics
|
|
72
|
+
async def Ghidra_list_structures(
|
|
73
|
+
file_path: str,
|
|
74
|
+
offset: int = 0,
|
|
75
|
+
limit: int = 100,
|
|
76
|
+
ctx: Context = None,
|
|
77
|
+
) -> ToolResult:
|
|
78
|
+
"""
|
|
79
|
+
List all defined structures in the program.
|
|
80
|
+
|
|
81
|
+
Uses cached Ghidra project for performance - first call loads the project,
|
|
82
|
+
subsequent calls reuse the cached session.
|
|
83
|
+
|
|
84
|
+
Args:
|
|
85
|
+
file_path: Path to the binary file
|
|
86
|
+
offset: Pagination offset (default: 0)
|
|
87
|
+
limit: Maximum number of structures to return (default: 100)
|
|
88
|
+
|
|
89
|
+
Returns:
|
|
90
|
+
List of structure names with their sizes
|
|
91
|
+
"""
|
|
92
|
+
validated_path = validate_file_path(file_path)
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def _impl():
|
|
96
|
+
try:
|
|
97
|
+
program, flat_api = _get_ghidra_program(str(validated_path))
|
|
98
|
+
data_type_manager = program.getDataTypeManager()
|
|
99
|
+
|
|
100
|
+
structures = []
|
|
101
|
+
all_data_types = data_type_manager.getAllStructures()
|
|
102
|
+
|
|
103
|
+
idx = 0
|
|
104
|
+
for dt in all_data_types:
|
|
105
|
+
if idx < offset:
|
|
106
|
+
idx += 1
|
|
107
|
+
continue
|
|
108
|
+
if len(structures) >= limit:
|
|
109
|
+
break
|
|
110
|
+
|
|
111
|
+
structures.append({
|
|
112
|
+
"name": dt.getName(),
|
|
113
|
+
"size": dt.getLength(),
|
|
114
|
+
"category": str(dt.getCategoryPath()),
|
|
115
|
+
"num_fields": dt.getNumComponents() if hasattr(dt, "getNumComponents") else 0,
|
|
116
|
+
})
|
|
117
|
+
idx += 1
|
|
118
|
+
|
|
119
|
+
return success(
|
|
120
|
+
{"structures": structures, "total": idx, "offset": offset, "limit": limit},
|
|
121
|
+
description=f"Found {len(structures)} structures (cached project)",
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
except ImportError as e:
|
|
125
|
+
return failure("GHIDRA_NOT_AVAILABLE", str(e))
|
|
126
|
+
except Exception as e:
|
|
127
|
+
ghidra_service._invalidate_project(str(validated_path))
|
|
128
|
+
return failure("STRUCTURE_LIST_ERROR", str(e))
|
|
129
|
+
|
|
130
|
+
return await asyncio.to_thread(_impl)
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
@handle_tool_errors
|
|
134
|
+
@log_execution
|
|
135
|
+
@track_metrics
|
|
136
|
+
async def Ghidra_get_structure(
|
|
137
|
+
file_path: str,
|
|
138
|
+
name: str,
|
|
139
|
+
ctx: Context = None,
|
|
140
|
+
) -> ToolResult:
|
|
141
|
+
"""
|
|
142
|
+
Get detailed information about a specific structure.
|
|
143
|
+
|
|
144
|
+
Args:
|
|
145
|
+
file_path: Path to the binary file
|
|
146
|
+
name: Name of the structure to retrieve
|
|
147
|
+
|
|
148
|
+
Returns:
|
|
149
|
+
Structure definition with all fields, offsets, and types
|
|
150
|
+
"""
|
|
151
|
+
validated_path = validate_file_path(file_path)
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
def _impl():
|
|
155
|
+
try:
|
|
156
|
+
program, flat_api = _get_ghidra_program(str(validated_path))
|
|
157
|
+
data_type_manager = program.getDataTypeManager()
|
|
158
|
+
|
|
159
|
+
# Search for the structure
|
|
160
|
+
found_struct = None
|
|
161
|
+
for dt in data_type_manager.getAllStructures():
|
|
162
|
+
if dt.getName() == name:
|
|
163
|
+
found_struct = dt
|
|
164
|
+
break
|
|
165
|
+
|
|
166
|
+
if found_struct is None:
|
|
167
|
+
return failure("STRUCTURE_NOT_FOUND", f"Structure '{name}' not found")
|
|
168
|
+
|
|
169
|
+
# Extract fields
|
|
170
|
+
fields = []
|
|
171
|
+
if hasattr(found_struct, "getNumComponents"):
|
|
172
|
+
for i in range(found_struct.getNumComponents()):
|
|
173
|
+
component = found_struct.getComponent(i)
|
|
174
|
+
fields.append({
|
|
175
|
+
"offset": f"0x{component.getOffset():x}",
|
|
176
|
+
"name": component.getFieldName() or f"field_{component.getOffset():x}",
|
|
177
|
+
"type": component.getDataType().getName(),
|
|
178
|
+
"size": component.getLength(),
|
|
179
|
+
"comment": component.getComment() or "",
|
|
180
|
+
})
|
|
181
|
+
|
|
182
|
+
# Generate C definition
|
|
183
|
+
field_strs = [
|
|
184
|
+
f" {f['type']} {f['name']}; // offset {f['offset']}, size {f['size']}"
|
|
185
|
+
for f in fields
|
|
186
|
+
]
|
|
187
|
+
c_definition = f"struct {name} {{\n" + "\n".join(field_strs) + "\n};"
|
|
188
|
+
|
|
189
|
+
return success({
|
|
190
|
+
"name": name,
|
|
191
|
+
"size": found_struct.getLength(),
|
|
192
|
+
"category": str(found_struct.getCategoryPath()),
|
|
193
|
+
"fields": fields,
|
|
194
|
+
"c_definition": c_definition,
|
|
195
|
+
})
|
|
196
|
+
|
|
197
|
+
except ImportError as e:
|
|
198
|
+
return failure("GHIDRA_NOT_AVAILABLE", str(e))
|
|
199
|
+
except Exception as e:
|
|
200
|
+
ghidra_service._invalidate_project(str(validated_path))
|
|
201
|
+
return failure("STRUCTURE_GET_ERROR", str(e))
|
|
202
|
+
|
|
203
|
+
return await asyncio.to_thread(_impl)
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
@handle_tool_errors
|
|
207
|
+
@log_execution
|
|
208
|
+
@track_metrics
|
|
209
|
+
async def Ghidra_create_structure(
|
|
210
|
+
file_path: str,
|
|
211
|
+
name: str,
|
|
212
|
+
fields: str,
|
|
213
|
+
size: int = 0,
|
|
214
|
+
ctx: Context = None,
|
|
215
|
+
) -> ToolResult:
|
|
216
|
+
"""
|
|
217
|
+
Create a new structure definition.
|
|
218
|
+
|
|
219
|
+
Args:
|
|
220
|
+
file_path: Path to the binary file
|
|
221
|
+
name: Name for the new structure
|
|
222
|
+
fields: JSON string of fields, e.g.,
|
|
223
|
+
'[{"name": "id", "type": "int", "offset": 0},
|
|
224
|
+
{"name": "data", "type": "char[32]", "offset": 4}]'
|
|
225
|
+
size: Optional total size (0 = auto-calculate)
|
|
226
|
+
|
|
227
|
+
Returns:
|
|
228
|
+
Success message with created structure info
|
|
229
|
+
"""
|
|
230
|
+
validated_path = validate_file_path(file_path)
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
try:
|
|
234
|
+
field_list = json.loads(fields)
|
|
235
|
+
except json.JSONDecodeError as e:
|
|
236
|
+
return failure("INVALID_FIELDS_JSON", f"Invalid fields JSON: {e}")
|
|
237
|
+
|
|
238
|
+
def _impl():
|
|
239
|
+
try:
|
|
240
|
+
program, flat_api = _get_ghidra_program(str(validated_path))
|
|
241
|
+
|
|
242
|
+
from ghidra.program.model.data import StructureDataType, CategoryPath
|
|
243
|
+
|
|
244
|
+
data_type_manager = program.getDataTypeManager()
|
|
245
|
+
|
|
246
|
+
# Create structure
|
|
247
|
+
struct = StructureDataType(CategoryPath.ROOT, name, size)
|
|
248
|
+
|
|
249
|
+
# Add fields
|
|
250
|
+
for field in field_list:
|
|
251
|
+
field_name = field.get("name", "unknown")
|
|
252
|
+
field_type_str = field.get("type", "byte")
|
|
253
|
+
field_offset = field.get("offset", 0)
|
|
254
|
+
|
|
255
|
+
# Get or create data type
|
|
256
|
+
field_type = data_type_manager.getDataType(f"/{field_type_str}")
|
|
257
|
+
if field_type is None:
|
|
258
|
+
# Use default byte type
|
|
259
|
+
from ghidra.program.model.data import ByteDataType
|
|
260
|
+
field_type = ByteDataType.dataType
|
|
261
|
+
|
|
262
|
+
struct.insertAtOffset(field_offset, field_type, field_type.getLength(), field_name, None)
|
|
263
|
+
|
|
264
|
+
# Add to program
|
|
265
|
+
transaction = program.startTransaction("Create Structure")
|
|
266
|
+
try:
|
|
267
|
+
data_type_manager.addDataType(struct, None)
|
|
268
|
+
program.endTransaction(transaction, True)
|
|
269
|
+
except Exception:
|
|
270
|
+
program.endTransaction(transaction, False)
|
|
271
|
+
raise
|
|
272
|
+
|
|
273
|
+
return success({
|
|
274
|
+
"name": name,
|
|
275
|
+
"size": struct.getLength(),
|
|
276
|
+
"fields_count": len(field_list),
|
|
277
|
+
}, description=f"Created structure '{name}' with {len(field_list)} fields")
|
|
278
|
+
|
|
279
|
+
except ImportError as e:
|
|
280
|
+
return failure("GHIDRA_NOT_AVAILABLE", str(e))
|
|
281
|
+
except Exception as e:
|
|
282
|
+
ghidra_service._invalidate_project(str(validated_path))
|
|
283
|
+
return failure("STRUCTURE_CREATE_ERROR", str(e))
|
|
284
|
+
|
|
285
|
+
return await asyncio.to_thread(_impl)
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
# =============================================================================
|
|
289
|
+
# Enum Tools
|
|
290
|
+
# =============================================================================
|
|
291
|
+
|
|
292
|
+
|
|
293
|
+
@handle_tool_errors
|
|
294
|
+
@log_execution
|
|
295
|
+
@track_metrics
|
|
296
|
+
async def Ghidra_list_enums(
|
|
297
|
+
file_path: str,
|
|
298
|
+
offset: int = 0,
|
|
299
|
+
limit: int = 100,
|
|
300
|
+
ctx: Context = None,
|
|
301
|
+
) -> ToolResult:
|
|
302
|
+
"""
|
|
303
|
+
List all defined enums in the program.
|
|
304
|
+
|
|
305
|
+
Args:
|
|
306
|
+
file_path: Path to the binary file
|
|
307
|
+
offset: Pagination offset (default: 0)
|
|
308
|
+
limit: Maximum number of enums to return (default: 100)
|
|
309
|
+
|
|
310
|
+
Returns:
|
|
311
|
+
List of enum names with their values
|
|
312
|
+
"""
|
|
313
|
+
validated_path = validate_file_path(file_path)
|
|
314
|
+
|
|
315
|
+
|
|
316
|
+
def _impl():
|
|
317
|
+
try:
|
|
318
|
+
program, flat_api = _get_ghidra_program(str(validated_path))
|
|
319
|
+
data_type_manager = program.getDataTypeManager()
|
|
320
|
+
|
|
321
|
+
enums = []
|
|
322
|
+
all_data_types = data_type_manager.getAllDataTypes()
|
|
323
|
+
|
|
324
|
+
idx = 0
|
|
325
|
+
for dt in all_data_types:
|
|
326
|
+
# Check if it's an enum
|
|
327
|
+
if not hasattr(dt, "getCount"):
|
|
328
|
+
continue
|
|
329
|
+
|
|
330
|
+
if idx < offset:
|
|
331
|
+
idx += 1
|
|
332
|
+
continue
|
|
333
|
+
if len(enums) >= limit:
|
|
334
|
+
break
|
|
335
|
+
|
|
336
|
+
# Get enum values
|
|
337
|
+
values = []
|
|
338
|
+
try:
|
|
339
|
+
for i in range(dt.getCount()):
|
|
340
|
+
values.append({
|
|
341
|
+
"name": dt.getName(i),
|
|
342
|
+
"value": dt.getValue(i),
|
|
343
|
+
})
|
|
344
|
+
except Exception:
|
|
345
|
+
pass
|
|
346
|
+
|
|
347
|
+
enums.append({
|
|
348
|
+
"name": dt.getName(),
|
|
349
|
+
"size": dt.getLength(),
|
|
350
|
+
"count": dt.getCount() if hasattr(dt, "getCount") else 0,
|
|
351
|
+
"values": values[:10], # Limit values shown
|
|
352
|
+
})
|
|
353
|
+
idx += 1
|
|
354
|
+
|
|
355
|
+
return success(
|
|
356
|
+
{"enums": enums, "total": idx, "offset": offset, "limit": limit},
|
|
357
|
+
description=f"Found {len(enums)} enums",
|
|
358
|
+
)
|
|
359
|
+
|
|
360
|
+
except ImportError as e:
|
|
361
|
+
return failure("GHIDRA_NOT_AVAILABLE", str(e))
|
|
362
|
+
except Exception as e:
|
|
363
|
+
ghidra_service._invalidate_project(str(validated_path))
|
|
364
|
+
return failure("ENUM_LIST_ERROR", str(e))
|
|
365
|
+
|
|
366
|
+
return await asyncio.to_thread(_impl)
|
|
367
|
+
|
|
368
|
+
|
|
369
|
+
# =============================================================================
|
|
370
|
+
# Data Type Tools
|
|
371
|
+
# =============================================================================
|
|
372
|
+
|
|
373
|
+
|
|
374
|
+
@handle_tool_errors
|
|
375
|
+
@log_execution
|
|
376
|
+
@track_metrics
|
|
377
|
+
async def Ghidra_list_data_types(
|
|
378
|
+
file_path: str,
|
|
379
|
+
category: str = None,
|
|
380
|
+
offset: int = 0,
|
|
381
|
+
limit: int = 100,
|
|
382
|
+
ctx: Context = None,
|
|
383
|
+
) -> ToolResult:
|
|
384
|
+
"""
|
|
385
|
+
List all data types in the program's data type manager.
|
|
386
|
+
|
|
387
|
+
Args:
|
|
388
|
+
file_path: Path to the binary file
|
|
389
|
+
category: Optional category filter (e.g., "BuiltIn", "Structure", "Enum")
|
|
390
|
+
offset: Pagination offset (default: 0)
|
|
391
|
+
limit: Maximum number of types to return (default: 100)
|
|
392
|
+
|
|
393
|
+
Returns:
|
|
394
|
+
List of data type names with their categories and sizes
|
|
395
|
+
"""
|
|
396
|
+
validated_path = validate_file_path(file_path)
|
|
397
|
+
|
|
398
|
+
|
|
399
|
+
def _impl():
|
|
400
|
+
try:
|
|
401
|
+
program, flat_api = _get_ghidra_program(str(validated_path))
|
|
402
|
+
data_type_manager = program.getDataTypeManager()
|
|
403
|
+
|
|
404
|
+
data_types = []
|
|
405
|
+
all_types = data_type_manager.getAllDataTypes()
|
|
406
|
+
|
|
407
|
+
idx = 0
|
|
408
|
+
for dt in all_types:
|
|
409
|
+
type_category = str(dt.getCategoryPath())
|
|
410
|
+
|
|
411
|
+
# Apply category filter if specified
|
|
412
|
+
if category and category.lower() not in type_category.lower():
|
|
413
|
+
continue
|
|
414
|
+
|
|
415
|
+
if idx < offset:
|
|
416
|
+
idx += 1
|
|
417
|
+
continue
|
|
418
|
+
if len(data_types) >= limit:
|
|
419
|
+
break
|
|
420
|
+
|
|
421
|
+
data_types.append({
|
|
422
|
+
"name": dt.getName(),
|
|
423
|
+
"category": type_category,
|
|
424
|
+
"size": dt.getLength(),
|
|
425
|
+
"description": dt.getDescription() or "",
|
|
426
|
+
})
|
|
427
|
+
idx += 1
|
|
428
|
+
|
|
429
|
+
return success(
|
|
430
|
+
{"data_types": data_types, "total": idx, "offset": offset, "limit": limit},
|
|
431
|
+
description=f"Found {len(data_types)} data types",
|
|
432
|
+
)
|
|
433
|
+
|
|
434
|
+
except ImportError as e:
|
|
435
|
+
return failure("GHIDRA_NOT_AVAILABLE", str(e))
|
|
436
|
+
except Exception as e:
|
|
437
|
+
ghidra_service._invalidate_project(str(validated_path))
|
|
438
|
+
return failure("DATA_TYPE_LIST_ERROR", str(e))
|
|
439
|
+
|
|
440
|
+
return await asyncio.to_thread(_impl)
|
|
441
|
+
|
|
442
|
+
|
|
443
|
+
# =============================================================================
|
|
444
|
+
# Bookmark Tools
|
|
445
|
+
# =============================================================================
|
|
446
|
+
|
|
447
|
+
|
|
448
|
+
@handle_tool_errors
|
|
449
|
+
@log_execution
|
|
450
|
+
@track_metrics
|
|
451
|
+
async def Ghidra_list_bookmarks(
|
|
452
|
+
file_path: str,
|
|
453
|
+
bookmark_type: str = None,
|
|
454
|
+
offset: int = 0,
|
|
455
|
+
limit: int = 100,
|
|
456
|
+
ctx: Context = None,
|
|
457
|
+
) -> ToolResult:
|
|
458
|
+
"""
|
|
459
|
+
List all bookmarks in the program.
|
|
460
|
+
|
|
461
|
+
Args:
|
|
462
|
+
file_path: Path to the binary file
|
|
463
|
+
bookmark_type: Optional type filter ("Note", "Warning", "Error", "Info")
|
|
464
|
+
offset: Pagination offset (default: 0)
|
|
465
|
+
limit: Maximum number of bookmarks to return (default: 100)
|
|
466
|
+
|
|
467
|
+
Returns:
|
|
468
|
+
List of bookmarks with addresses, types, and comments
|
|
469
|
+
"""
|
|
470
|
+
validated_path = validate_file_path(file_path)
|
|
471
|
+
|
|
472
|
+
|
|
473
|
+
def _impl():
|
|
474
|
+
try:
|
|
475
|
+
program, flat_api = _get_ghidra_program(str(validated_path))
|
|
476
|
+
bookmark_manager = program.getBookmarkManager()
|
|
477
|
+
|
|
478
|
+
bookmarks = []
|
|
479
|
+
all_bookmarks = bookmark_manager.getBookmarksIterator()
|
|
480
|
+
|
|
481
|
+
idx = 0
|
|
482
|
+
for bookmark in all_bookmarks:
|
|
483
|
+
bm_type = bookmark.getTypeString()
|
|
484
|
+
|
|
485
|
+
# Apply type filter
|
|
486
|
+
if bookmark_type and bm_type.lower() != bookmark_type.lower():
|
|
487
|
+
continue
|
|
488
|
+
|
|
489
|
+
if idx < offset:
|
|
490
|
+
idx += 1
|
|
491
|
+
continue
|
|
492
|
+
if len(bookmarks) >= limit:
|
|
493
|
+
break
|
|
494
|
+
|
|
495
|
+
bookmarks.append({
|
|
496
|
+
"address": str(bookmark.getAddress()),
|
|
497
|
+
"type": bm_type,
|
|
498
|
+
"category": bookmark.getCategory(),
|
|
499
|
+
"comment": bookmark.getComment(),
|
|
500
|
+
})
|
|
501
|
+
idx += 1
|
|
502
|
+
|
|
503
|
+
return success(
|
|
504
|
+
{"bookmarks": bookmarks, "total": idx, "offset": offset, "limit": limit},
|
|
505
|
+
description=f"Found {len(bookmarks)} bookmarks",
|
|
506
|
+
)
|
|
507
|
+
|
|
508
|
+
except ImportError as e:
|
|
509
|
+
return failure("GHIDRA_NOT_AVAILABLE", str(e))
|
|
510
|
+
except Exception as e:
|
|
511
|
+
ghidra_service._invalidate_project(str(validated_path))
|
|
512
|
+
return failure("BOOKMARK_LIST_ERROR", str(e))
|
|
513
|
+
|
|
514
|
+
return await asyncio.to_thread(_impl)
|
|
515
|
+
|
|
516
|
+
|
|
517
|
+
@handle_tool_errors
|
|
518
|
+
@log_execution
|
|
519
|
+
@track_metrics
|
|
520
|
+
async def Ghidra_add_bookmark(
|
|
521
|
+
file_path: str,
|
|
522
|
+
address: str,
|
|
523
|
+
category: str,
|
|
524
|
+
comment: str,
|
|
525
|
+
bookmark_type: str = "Note",
|
|
526
|
+
ctx: Context = None,
|
|
527
|
+
) -> ToolResult:
|
|
528
|
+
"""
|
|
529
|
+
Add a bookmark at the specified address.
|
|
530
|
+
|
|
531
|
+
Args:
|
|
532
|
+
file_path: Path to the binary file
|
|
533
|
+
address: Address to bookmark (e.g., "0x1400010a0")
|
|
534
|
+
category: Category for the bookmark (e.g., "Analysis", "TODO")
|
|
535
|
+
comment: Comment/description for the bookmark
|
|
536
|
+
bookmark_type: Type of bookmark ("Note", "Warning", "Error", "Info")
|
|
537
|
+
|
|
538
|
+
Returns:
|
|
539
|
+
Success message
|
|
540
|
+
"""
|
|
541
|
+
validated_path = validate_file_path(file_path)
|
|
542
|
+
|
|
543
|
+
|
|
544
|
+
def _impl():
|
|
545
|
+
try:
|
|
546
|
+
program, flat_api = _get_ghidra_program(str(validated_path))
|
|
547
|
+
bookmark_manager = program.getBookmarkManager()
|
|
548
|
+
|
|
549
|
+
# Parse address
|
|
550
|
+
addr = flat_api.toAddr(address)
|
|
551
|
+
if addr is None:
|
|
552
|
+
return failure("INVALID_ADDRESS", f"Could not parse address: {address}")
|
|
553
|
+
|
|
554
|
+
# Add bookmark
|
|
555
|
+
transaction = program.startTransaction("Add Bookmark")
|
|
556
|
+
try:
|
|
557
|
+
bookmark_manager.setBookmark(addr, bookmark_type, category, comment)
|
|
558
|
+
program.endTransaction(transaction, True)
|
|
559
|
+
except Exception:
|
|
560
|
+
program.endTransaction(transaction, False)
|
|
561
|
+
raise
|
|
562
|
+
|
|
563
|
+
return success({
|
|
564
|
+
"address": address,
|
|
565
|
+
"type": bookmark_type,
|
|
566
|
+
"category": category,
|
|
567
|
+
"comment": comment,
|
|
568
|
+
}, description=f"Added bookmark at {address}")
|
|
569
|
+
|
|
570
|
+
except ImportError as e:
|
|
571
|
+
return failure("GHIDRA_NOT_AVAILABLE", str(e))
|
|
572
|
+
except Exception as e:
|
|
573
|
+
ghidra_service._invalidate_project(str(validated_path))
|
|
574
|
+
return failure("BOOKMARK_ADD_ERROR", str(e))
|
|
575
|
+
|
|
576
|
+
return await asyncio.to_thread(_impl)
|
|
577
|
+
|
|
578
|
+
|
|
579
|
+
# =============================================================================
|
|
580
|
+
# Memory Tools
|
|
581
|
+
# =============================================================================
|
|
582
|
+
|
|
583
|
+
|
|
584
|
+
@handle_tool_errors
|
|
585
|
+
@log_execution
|
|
586
|
+
@track_metrics
|
|
587
|
+
async def Ghidra_read_memory(
|
|
588
|
+
file_path: str,
|
|
589
|
+
address: str,
|
|
590
|
+
length: int = 256,
|
|
591
|
+
ctx: Context = None,
|
|
592
|
+
) -> ToolResult:
|
|
593
|
+
"""
|
|
594
|
+
Read raw bytes from memory at the specified address.
|
|
595
|
+
|
|
596
|
+
Args:
|
|
597
|
+
file_path: Path to the binary file
|
|
598
|
+
address: Starting address (e.g., "0x1400010a0")
|
|
599
|
+
length: Number of bytes to read (default: 256, max: 4096)
|
|
600
|
+
|
|
601
|
+
Returns:
|
|
602
|
+
Hex dump of memory contents
|
|
603
|
+
"""
|
|
604
|
+
validated_path = validate_file_path(file_path)
|
|
605
|
+
length = min(length, 4096) # Cap at 4KB
|
|
606
|
+
|
|
607
|
+
|
|
608
|
+
def _impl():
|
|
609
|
+
try:
|
|
610
|
+
program, flat_api = _get_ghidra_program(str(validated_path))
|
|
611
|
+
|
|
612
|
+
# Parse address
|
|
613
|
+
addr = flat_api.toAddr(address)
|
|
614
|
+
if addr is None:
|
|
615
|
+
return failure("INVALID_ADDRESS", f"Could not parse address: {address}")
|
|
616
|
+
|
|
617
|
+
# Read bytes
|
|
618
|
+
byte_array = flat_api.getBytes(addr, length)
|
|
619
|
+
|
|
620
|
+
if byte_array is None:
|
|
621
|
+
return failure("MEMORY_READ_ERROR", f"Could not read memory at {address}")
|
|
622
|
+
|
|
623
|
+
# Format as hex dump
|
|
624
|
+
hex_bytes = " ".join(f"{b & 0xFF:02X}" for b in byte_array)
|
|
625
|
+
|
|
626
|
+
# Format as hex dump with ASCII
|
|
627
|
+
hex_dump_lines = []
|
|
628
|
+
for i in range(0, len(byte_array), 16):
|
|
629
|
+
chunk = byte_array[i:i+16]
|
|
630
|
+
hex_part = " ".join(f"{b & 0xFF:02X}" for b in chunk)
|
|
631
|
+
ascii_part = "".join(
|
|
632
|
+
chr(b & 0xFF) if 32 <= (b & 0xFF) <= 126 else "."
|
|
633
|
+
for b in chunk
|
|
634
|
+
)
|
|
635
|
+
line_addr = addr.add(i)
|
|
636
|
+
hex_dump_lines.append(f"{line_addr}: {hex_part:<48} |{ascii_part}|")
|
|
637
|
+
|
|
638
|
+
return success({
|
|
639
|
+
"address": address,
|
|
640
|
+
"length": len(byte_array),
|
|
641
|
+
"hex_bytes": hex_bytes,
|
|
642
|
+
"hex_dump": "\n".join(hex_dump_lines),
|
|
643
|
+
})
|
|
644
|
+
|
|
645
|
+
except ImportError as e:
|
|
646
|
+
return failure("GHIDRA_NOT_AVAILABLE", str(e))
|
|
647
|
+
except Exception as e:
|
|
648
|
+
ghidra_service._invalidate_project(str(validated_path))
|
|
649
|
+
return failure("MEMORY_READ_ERROR", str(e))
|
|
650
|
+
|
|
651
|
+
return await asyncio.to_thread(_impl)
|
|
652
|
+
|
|
653
|
+
|
|
654
|
+
@handle_tool_errors
|
|
655
|
+
@log_execution
|
|
656
|
+
@track_metrics
|
|
657
|
+
async def Ghidra_get_bytes(
|
|
658
|
+
file_path: str,
|
|
659
|
+
address: str,
|
|
660
|
+
length: int = 64,
|
|
661
|
+
ctx: Context = None,
|
|
662
|
+
) -> ToolResult:
|
|
663
|
+
"""
|
|
664
|
+
Get bytes at the specified address as a hex string.
|
|
665
|
+
|
|
666
|
+
Args:
|
|
667
|
+
file_path: Path to the binary file
|
|
668
|
+
address: Starting address (e.g., "0x1400010a0")
|
|
669
|
+
length: Number of bytes to retrieve (default: 64, max: 1024)
|
|
670
|
+
|
|
671
|
+
Returns:
|
|
672
|
+
Hex string of bytes
|
|
673
|
+
"""
|
|
674
|
+
validated_path = validate_file_path(file_path)
|
|
675
|
+
length = min(length, 1024)
|
|
676
|
+
|
|
677
|
+
|
|
678
|
+
def _impl():
|
|
679
|
+
try:
|
|
680
|
+
program, flat_api = _get_ghidra_program(str(validated_path))
|
|
681
|
+
|
|
682
|
+
# Parse address
|
|
683
|
+
addr = flat_api.toAddr(address)
|
|
684
|
+
if addr is None:
|
|
685
|
+
return failure("INVALID_ADDRESS", f"Could not parse address: {address}")
|
|
686
|
+
|
|
687
|
+
# Read bytes
|
|
688
|
+
byte_array = flat_api.getBytes(addr, length)
|
|
689
|
+
|
|
690
|
+
if byte_array is None:
|
|
691
|
+
return failure("MEMORY_READ_ERROR", f"Could not read bytes at {address}")
|
|
692
|
+
|
|
693
|
+
hex_string = " ".join(f"{b & 0xFF:02X}" for b in byte_array)
|
|
694
|
+
|
|
695
|
+
return success({
|
|
696
|
+
"address": address,
|
|
697
|
+
"length": len(byte_array),
|
|
698
|
+
"bytes": hex_string,
|
|
699
|
+
})
|
|
700
|
+
|
|
701
|
+
except ImportError as e:
|
|
702
|
+
return failure("GHIDRA_NOT_AVAILABLE", str(e))
|
|
703
|
+
except Exception as e:
|
|
704
|
+
ghidra_service._invalidate_project(str(validated_path))
|
|
705
|
+
return failure("GET_BYTES_ERROR", str(e))
|
|
706
|
+
|
|
707
|
+
return await asyncio.to_thread(_impl)
|
|
708
|
+
|
|
709
|
+
|
|
710
|
+
# =============================================================================
|
|
711
|
+
# Patching Tools
|
|
712
|
+
# =============================================================================
|
|
713
|
+
|
|
714
|
+
|
|
715
|
+
@handle_tool_errors
|
|
716
|
+
@log_execution
|
|
717
|
+
@track_metrics
|
|
718
|
+
@handle_tool_errors
|
|
719
|
+
@log_execution
|
|
720
|
+
@track_metrics
|
|
721
|
+
async def Ghidra_simulate_patch(
|
|
722
|
+
file_path: str,
|
|
723
|
+
address: str,
|
|
724
|
+
hex_bytes: str,
|
|
725
|
+
ctx: Context = None,
|
|
726
|
+
) -> ToolResult:
|
|
727
|
+
"""
|
|
728
|
+
Simulate patching bytes at the specified address in the cached project.
|
|
729
|
+
|
|
730
|
+
WARNING: This tool ONLY modifies the binary in Ghidra's cached project database.
|
|
731
|
+
It DOES NOT modify the actual file on disk. Changes persist in the cache
|
|
732
|
+
until the project is evicted or server restarts. To apply patches to the
|
|
733
|
+
file, you must export the binary (feature coming soon).
|
|
734
|
+
|
|
735
|
+
Args:
|
|
736
|
+
file_path: Path to the binary file
|
|
737
|
+
address: Starting address to patch (e.g., "0x1400010a0")
|
|
738
|
+
hex_bytes: Hex string of bytes to write (e.g., "90 90 90" for NOPs)
|
|
739
|
+
|
|
740
|
+
Returns:
|
|
741
|
+
Success message with number of bytes patched
|
|
742
|
+
"""
|
|
743
|
+
validated_path = validate_file_path(file_path)
|
|
744
|
+
|
|
745
|
+
# Parse hex bytes (fast, can stay in main thread)
|
|
746
|
+
try:
|
|
747
|
+
hex_bytes_clean = hex_bytes.replace(" ", "").replace(",", "")
|
|
748
|
+
if len(hex_bytes_clean) % 2 != 0:
|
|
749
|
+
return failure("INVALID_HEX", "Hex string must have even length")
|
|
750
|
+
|
|
751
|
+
byte_values = bytes.fromhex(hex_bytes_clean)
|
|
752
|
+
except ValueError as e:
|
|
753
|
+
return failure("INVALID_HEX", f"Invalid hex string: {e}")
|
|
754
|
+
|
|
755
|
+
def _impl():
|
|
756
|
+
try:
|
|
757
|
+
program, flat_api = _get_ghidra_program(str(validated_path))
|
|
758
|
+
memory = program.getMemory()
|
|
759
|
+
|
|
760
|
+
# Parse address
|
|
761
|
+
addr = flat_api.toAddr(address)
|
|
762
|
+
if addr is None:
|
|
763
|
+
return failure("INVALID_ADDRESS", f"Could not parse address: {address}")
|
|
764
|
+
|
|
765
|
+
# Patch bytes
|
|
766
|
+
transaction = program.startTransaction("Patch Bytes")
|
|
767
|
+
try:
|
|
768
|
+
for i, byte_val in enumerate(byte_values):
|
|
769
|
+
memory.setByte(addr.add(i), byte_val)
|
|
770
|
+
program.endTransaction(transaction, True)
|
|
771
|
+
except Exception:
|
|
772
|
+
program.endTransaction(transaction, False)
|
|
773
|
+
raise
|
|
774
|
+
|
|
775
|
+
return success({
|
|
776
|
+
"address": address,
|
|
777
|
+
"bytes_patched": len(byte_values),
|
|
778
|
+
"new_bytes": hex_bytes,
|
|
779
|
+
}, description=f"Simulated patch of {len(byte_values)} bytes at {address} (Ghidra cache only)")
|
|
780
|
+
|
|
781
|
+
except ImportError as e:
|
|
782
|
+
return failure("GHIDRA_NOT_AVAILABLE", str(e))
|
|
783
|
+
except Exception as e:
|
|
784
|
+
ghidra_service._invalidate_project(str(validated_path))
|
|
785
|
+
return failure("PATCH_ERROR", str(e))
|
|
786
|
+
|
|
787
|
+
# Run blocking Ghidra operations in a separate thread
|
|
788
|
+
return await asyncio.to_thread(_impl)
|
|
789
|
+
|
|
790
|
+
|
|
791
|
+
# =============================================================================
|
|
792
|
+
# Analysis Tools
|
|
793
|
+
# =============================================================================
|
|
794
|
+
|
|
795
|
+
|
|
796
|
+
@handle_tool_errors
|
|
797
|
+
@log_execution
|
|
798
|
+
@track_metrics
|
|
799
|
+
async def Ghidra_analyze_function(
|
|
800
|
+
file_path: str,
|
|
801
|
+
address: str,
|
|
802
|
+
ctx: Context = None,
|
|
803
|
+
) -> ToolResult:
|
|
804
|
+
"""
|
|
805
|
+
Trigger Ghidra's analysis on a specific function.
|
|
806
|
+
|
|
807
|
+
Useful after making changes or for functions that weren't fully analyzed.
|
|
808
|
+
|
|
809
|
+
Args:
|
|
810
|
+
file_path: Path to the binary file
|
|
811
|
+
address: Address of the function to analyze (e.g., "0x1400010a0")
|
|
812
|
+
|
|
813
|
+
Returns:
|
|
814
|
+
Analysis result summary with function details
|
|
815
|
+
"""
|
|
816
|
+
validated_path = validate_file_path(file_path)
|
|
817
|
+
|
|
818
|
+
|
|
819
|
+
def _impl():
|
|
820
|
+
try:
|
|
821
|
+
program, flat_api = _get_ghidra_program(str(validated_path))
|
|
822
|
+
|
|
823
|
+
from ghidra.app.cmd.function import CreateFunctionCmd
|
|
824
|
+
|
|
825
|
+
function_manager = program.getFunctionManager()
|
|
826
|
+
|
|
827
|
+
# Parse address
|
|
828
|
+
addr = flat_api.toAddr(address)
|
|
829
|
+
if addr is None:
|
|
830
|
+
return failure("INVALID_ADDRESS", f"Could not parse address: {address}")
|
|
831
|
+
|
|
832
|
+
# Check if function exists
|
|
833
|
+
func = function_manager.getFunctionAt(addr)
|
|
834
|
+
|
|
835
|
+
transaction = program.startTransaction("Analyze Function")
|
|
836
|
+
try:
|
|
837
|
+
if func is None:
|
|
838
|
+
# Try to create function
|
|
839
|
+
cmd = CreateFunctionCmd(addr)
|
|
840
|
+
cmd.applyTo(program)
|
|
841
|
+
func = function_manager.getFunctionAt(addr)
|
|
842
|
+
|
|
843
|
+
if func is None:
|
|
844
|
+
program.endTransaction(transaction, False)
|
|
845
|
+
return failure("FUNCTION_NOT_FOUND", f"Could not find or create function at {address}")
|
|
846
|
+
|
|
847
|
+
# Get function info
|
|
848
|
+
result = {
|
|
849
|
+
"name": func.getName(),
|
|
850
|
+
"address": str(func.getEntryPoint()),
|
|
851
|
+
"signature": str(func.getSignature()),
|
|
852
|
+
"body_size": func.getBody().getNumAddresses(),
|
|
853
|
+
"parameter_count": func.getParameterCount(),
|
|
854
|
+
"local_variable_count": len(list(func.getLocalVariables())),
|
|
855
|
+
"calling_convention": str(func.getCallingConvention()),
|
|
856
|
+
}
|
|
857
|
+
|
|
858
|
+
program.endTransaction(transaction, True)
|
|
859
|
+
|
|
860
|
+
return success(result, description=f"Analyzed function '{func.getName()}'")
|
|
861
|
+
|
|
862
|
+
except Exception:
|
|
863
|
+
program.endTransaction(transaction, False)
|
|
864
|
+
raise
|
|
865
|
+
|
|
866
|
+
except ImportError as e:
|
|
867
|
+
return failure("GHIDRA_NOT_AVAILABLE", str(e))
|
|
868
|
+
except Exception as e:
|
|
869
|
+
ghidra_service._invalidate_project(str(validated_path))
|
|
870
|
+
return failure("ANALYZE_ERROR", str(e))
|
|
871
|
+
|
|
872
|
+
return await asyncio.to_thread(_impl)
|
|
873
|
+
|
|
874
|
+
|
|
875
|
+
@handle_tool_errors
|
|
876
|
+
@log_execution
|
|
877
|
+
@track_metrics
|
|
878
|
+
async def Ghidra_get_call_graph(
|
|
879
|
+
file_path: str,
|
|
880
|
+
address: str,
|
|
881
|
+
depth: int = 3,
|
|
882
|
+
direction: str = "both",
|
|
883
|
+
ctx: Context = None,
|
|
884
|
+
) -> ToolResult:
|
|
885
|
+
"""
|
|
886
|
+
Get the call graph for a function.
|
|
887
|
+
|
|
888
|
+
Args:
|
|
889
|
+
file_path: Path to the binary file
|
|
890
|
+
address: Address of the function (e.g., "0x1400010a0")
|
|
891
|
+
depth: How many levels deep to traverse (default: 3, max: 10)
|
|
892
|
+
direction: "callers" (who calls this), "callees" (what this calls), or "both"
|
|
893
|
+
|
|
894
|
+
Returns:
|
|
895
|
+
Call graph showing function relationships
|
|
896
|
+
"""
|
|
897
|
+
validated_path = validate_file_path(file_path)
|
|
898
|
+
depth = min(depth, 10)
|
|
899
|
+
|
|
900
|
+
def _impl():
|
|
901
|
+
try:
|
|
902
|
+
program, flat_api = _get_ghidra_program(str(validated_path))
|
|
903
|
+
|
|
904
|
+
function_manager = program.getFunctionManager()
|
|
905
|
+
reference_manager = program.getReferenceManager()
|
|
906
|
+
|
|
907
|
+
# Parse address
|
|
908
|
+
addr = flat_api.toAddr(address)
|
|
909
|
+
if addr is None:
|
|
910
|
+
return failure("INVALID_ADDRESS", f"Could not parse address: {address}")
|
|
911
|
+
|
|
912
|
+
# Get function
|
|
913
|
+
func = function_manager.getFunctionAt(addr)
|
|
914
|
+
if func is None:
|
|
915
|
+
func = function_manager.getFunctionContaining(addr)
|
|
916
|
+
|
|
917
|
+
if func is None:
|
|
918
|
+
return failure("FUNCTION_NOT_FOUND", f"No function at {address}")
|
|
919
|
+
|
|
920
|
+
root_name = func.getName()
|
|
921
|
+
root_addr = str(func.getEntryPoint())
|
|
922
|
+
|
|
923
|
+
callers = []
|
|
924
|
+
callees = []
|
|
925
|
+
|
|
926
|
+
# Get callers (functions that call this function)
|
|
927
|
+
if direction in ("callers", "both"):
|
|
928
|
+
refs_to = reference_manager.getReferencesTo(func.getEntryPoint())
|
|
929
|
+
seen_callers = set()
|
|
930
|
+
|
|
931
|
+
for ref in refs_to:
|
|
932
|
+
if ref.getReferenceType().isCall():
|
|
933
|
+
caller_func = function_manager.getFunctionContaining(ref.getFromAddress())
|
|
934
|
+
if caller_func and caller_func.getName() not in seen_callers:
|
|
935
|
+
seen_callers.add(caller_func.getName())
|
|
936
|
+
callers.append({
|
|
937
|
+
"name": caller_func.getName(),
|
|
938
|
+
"address": str(caller_func.getEntryPoint()),
|
|
939
|
+
})
|
|
940
|
+
|
|
941
|
+
# Get callees (functions called by this function)
|
|
942
|
+
if direction in ("callees", "both"):
|
|
943
|
+
seen_callees = set()
|
|
944
|
+
body = func.getBody()
|
|
945
|
+
instr_iter = program.getListing().getInstructions(body, True)
|
|
946
|
+
|
|
947
|
+
for instr in instr_iter:
|
|
948
|
+
refs = instr.getReferencesFrom()
|
|
949
|
+
for ref in refs:
|
|
950
|
+
if ref.getReferenceType().isCall():
|
|
951
|
+
callee_func = function_manager.getFunctionAt(ref.getToAddress())
|
|
952
|
+
if callee_func and callee_func.getName() not in seen_callees:
|
|
953
|
+
seen_callees.add(callee_func.getName())
|
|
954
|
+
callees.append({
|
|
955
|
+
"name": callee_func.getName(),
|
|
956
|
+
"address": str(callee_func.getEntryPoint()),
|
|
957
|
+
})
|
|
958
|
+
|
|
959
|
+
# Build graph representation (simplified for output)
|
|
960
|
+
graph_lines = [f"Call Graph for {root_name} ({root_addr})", "=" * 50]
|
|
961
|
+
|
|
962
|
+
if callers:
|
|
963
|
+
graph_lines.append(f"\n📥 Callers ({len(callers)}):")
|
|
964
|
+
for c in callers[:20]: # Limit output
|
|
965
|
+
graph_lines.append(f" ← {c['name']} ({c['address']})")
|
|
966
|
+
|
|
967
|
+
graph_lines.append(f"\n🎯 {root_name} ({root_addr})")
|
|
968
|
+
|
|
969
|
+
if callees:
|
|
970
|
+
graph_lines.append(f"\n📤 Callees ({len(callees)}):")
|
|
971
|
+
for c in callees[:20]:
|
|
972
|
+
graph_lines.append(f" → {c['name']} ({c['address']})")
|
|
973
|
+
|
|
974
|
+
return success({
|
|
975
|
+
"function": root_name,
|
|
976
|
+
"address": root_addr,
|
|
977
|
+
"callers": callers,
|
|
978
|
+
"callees": callees,
|
|
979
|
+
"caller_count": len(callers),
|
|
980
|
+
"callee_count": len(callees),
|
|
981
|
+
"graph": "\n".join(graph_lines),
|
|
982
|
+
}, description=f"Generated call graph for '{root_name}' (cached project)")
|
|
983
|
+
|
|
984
|
+
except ImportError as e:
|
|
985
|
+
return failure("GHIDRA_NOT_AVAILABLE", str(e))
|
|
986
|
+
except Exception as e:
|
|
987
|
+
ghidra_service._invalidate_project(str(validated_path))
|
|
988
|
+
return failure("CALL_GRAPH_ERROR", str(e))
|
|
989
|
+
|
|
990
|
+
return await asyncio.to_thread(_impl)
|
|
991
|
+
|
|
992
|
+
|
|
993
|
+
# =============================================================================
|
|
994
|
+
# Plugin Registration
|
|
995
|
+
# =============================================================================
|
|
996
|
+
|
|
997
|
+
|
|
998
|
+
class GhidraToolsPlugin(Plugin):
|
|
999
|
+
"""Plugin for advanced Ghidra analysis tools."""
|
|
1000
|
+
|
|
1001
|
+
@property
|
|
1002
|
+
def name(self) -> str:
|
|
1003
|
+
return "ghidra_tools"
|
|
1004
|
+
|
|
1005
|
+
@property
|
|
1006
|
+
def description(self) -> str:
|
|
1007
|
+
return "Advanced Ghidra analysis tools using cached GhidraService for structures, enums, memory, and call graphs."
|
|
1008
|
+
|
|
1009
|
+
def register(self, mcp_server: Any) -> None:
|
|
1010
|
+
"""Register Ghidra tools."""
|
|
1011
|
+
# Structure tools
|
|
1012
|
+
mcp_server.tool(Ghidra_list_structures)
|
|
1013
|
+
mcp_server.tool(Ghidra_get_structure)
|
|
1014
|
+
mcp_server.tool(Ghidra_create_structure)
|
|
1015
|
+
|
|
1016
|
+
# Enum tools
|
|
1017
|
+
mcp_server.tool(Ghidra_list_enums)
|
|
1018
|
+
|
|
1019
|
+
# Data type tools
|
|
1020
|
+
mcp_server.tool(Ghidra_list_data_types)
|
|
1021
|
+
|
|
1022
|
+
# Bookmark tools
|
|
1023
|
+
mcp_server.tool(Ghidra_list_bookmarks)
|
|
1024
|
+
mcp_server.tool(Ghidra_add_bookmark)
|
|
1025
|
+
|
|
1026
|
+
# Memory tools
|
|
1027
|
+
mcp_server.tool(Ghidra_read_memory)
|
|
1028
|
+
mcp_server.tool(Ghidra_get_bytes)
|
|
1029
|
+
|
|
1030
|
+
# Patching tools
|
|
1031
|
+
mcp_server.tool(Ghidra_simulate_patch)
|
|
1032
|
+
|
|
1033
|
+
# Analysis tools
|
|
1034
|
+
mcp_server.tool(Ghidra_analyze_function)
|
|
1035
|
+
mcp_server.tool(Ghidra_get_call_graph)
|
|
1036
|
+
|
|
1037
|
+
# =====================================================================
|
|
1038
|
+
# Decompilation Tools (from decompilation module)
|
|
1039
|
+
# =====================================================================
|
|
1040
|
+
from reversecore_mcp.tools.ghidra.decompilation import (
|
|
1041
|
+
emulate_machine_code,
|
|
1042
|
+
get_pseudo_code,
|
|
1043
|
+
smart_decompile,
|
|
1044
|
+
recover_structures,
|
|
1045
|
+
)
|
|
1046
|
+
mcp_server.tool(emulate_machine_code)
|
|
1047
|
+
mcp_server.tool(get_pseudo_code)
|
|
1048
|
+
mcp_server.tool(smart_decompile)
|
|
1049
|
+
mcp_server.tool(recover_structures)
|
|
1050
|
+
|
|
1051
|
+
logger.info(f"Registered {self.name} plugin with 17 Ghidra tools (unified)")
|
|
1052
|
+
|