iflow-mcp_developermode-korea_reversecore-mcp 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- iflow_mcp_developermode_korea_reversecore_mcp-1.0.0.dist-info/METADATA +543 -0
- iflow_mcp_developermode_korea_reversecore_mcp-1.0.0.dist-info/RECORD +79 -0
- iflow_mcp_developermode_korea_reversecore_mcp-1.0.0.dist-info/WHEEL +5 -0
- iflow_mcp_developermode_korea_reversecore_mcp-1.0.0.dist-info/entry_points.txt +2 -0
- iflow_mcp_developermode_korea_reversecore_mcp-1.0.0.dist-info/licenses/LICENSE +21 -0
- iflow_mcp_developermode_korea_reversecore_mcp-1.0.0.dist-info/top_level.txt +1 -0
- reversecore_mcp/__init__.py +9 -0
- reversecore_mcp/core/__init__.py +78 -0
- reversecore_mcp/core/audit.py +101 -0
- reversecore_mcp/core/binary_cache.py +138 -0
- reversecore_mcp/core/command_spec.py +357 -0
- reversecore_mcp/core/config.py +432 -0
- reversecore_mcp/core/container.py +288 -0
- reversecore_mcp/core/decorators.py +152 -0
- reversecore_mcp/core/error_formatting.py +93 -0
- reversecore_mcp/core/error_handling.py +142 -0
- reversecore_mcp/core/evidence.py +229 -0
- reversecore_mcp/core/exceptions.py +296 -0
- reversecore_mcp/core/execution.py +240 -0
- reversecore_mcp/core/ghidra.py +642 -0
- reversecore_mcp/core/ghidra_helper.py +481 -0
- reversecore_mcp/core/ghidra_manager.py +234 -0
- reversecore_mcp/core/json_utils.py +131 -0
- reversecore_mcp/core/loader.py +73 -0
- reversecore_mcp/core/logging_config.py +206 -0
- reversecore_mcp/core/memory.py +721 -0
- reversecore_mcp/core/metrics.py +198 -0
- reversecore_mcp/core/mitre_mapper.py +365 -0
- reversecore_mcp/core/plugin.py +45 -0
- reversecore_mcp/core/r2_helpers.py +404 -0
- reversecore_mcp/core/r2_pool.py +403 -0
- reversecore_mcp/core/report_generator.py +268 -0
- reversecore_mcp/core/resilience.py +252 -0
- reversecore_mcp/core/resource_manager.py +169 -0
- reversecore_mcp/core/result.py +132 -0
- reversecore_mcp/core/security.py +213 -0
- reversecore_mcp/core/validators.py +238 -0
- reversecore_mcp/dashboard/__init__.py +221 -0
- reversecore_mcp/prompts/__init__.py +56 -0
- reversecore_mcp/prompts/common.py +24 -0
- reversecore_mcp/prompts/game.py +280 -0
- reversecore_mcp/prompts/malware.py +1219 -0
- reversecore_mcp/prompts/report.py +150 -0
- reversecore_mcp/prompts/security.py +136 -0
- reversecore_mcp/resources.py +329 -0
- reversecore_mcp/server.py +727 -0
- reversecore_mcp/tools/__init__.py +49 -0
- reversecore_mcp/tools/analysis/__init__.py +74 -0
- reversecore_mcp/tools/analysis/capa_tools.py +215 -0
- reversecore_mcp/tools/analysis/die_tools.py +180 -0
- reversecore_mcp/tools/analysis/diff_tools.py +643 -0
- reversecore_mcp/tools/analysis/lief_tools.py +272 -0
- reversecore_mcp/tools/analysis/signature_tools.py +591 -0
- reversecore_mcp/tools/analysis/static_analysis.py +479 -0
- reversecore_mcp/tools/common/__init__.py +58 -0
- reversecore_mcp/tools/common/file_operations.py +352 -0
- reversecore_mcp/tools/common/memory_tools.py +516 -0
- reversecore_mcp/tools/common/patch_explainer.py +230 -0
- reversecore_mcp/tools/common/server_tools.py +115 -0
- reversecore_mcp/tools/ghidra/__init__.py +19 -0
- reversecore_mcp/tools/ghidra/decompilation.py +975 -0
- reversecore_mcp/tools/ghidra/ghidra_tools.py +1052 -0
- reversecore_mcp/tools/malware/__init__.py +61 -0
- reversecore_mcp/tools/malware/adaptive_vaccine.py +579 -0
- reversecore_mcp/tools/malware/dormant_detector.py +756 -0
- reversecore_mcp/tools/malware/ioc_tools.py +228 -0
- reversecore_mcp/tools/malware/vulnerability_hunter.py +519 -0
- reversecore_mcp/tools/malware/yara_tools.py +214 -0
- reversecore_mcp/tools/patch_explainer.py +19 -0
- reversecore_mcp/tools/radare2/__init__.py +13 -0
- reversecore_mcp/tools/radare2/r2_analysis.py +972 -0
- reversecore_mcp/tools/radare2/r2_session.py +376 -0
- reversecore_mcp/tools/radare2/radare2_mcp_tools.py +1183 -0
- reversecore_mcp/tools/report/__init__.py +4 -0
- reversecore_mcp/tools/report/email.py +82 -0
- reversecore_mcp/tools/report/report_mcp_tools.py +344 -0
- reversecore_mcp/tools/report/report_tools.py +1076 -0
- reversecore_mcp/tools/report/session.py +194 -0
- reversecore_mcp/tools/report_tools.py +11 -0
|
@@ -0,0 +1,352 @@
|
|
|
1
|
+
"""File operation tools for managing workspace and file handling."""
|
|
2
|
+
|
|
3
|
+
import shutil
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
from reversecore_mcp.core.config import get_config
|
|
7
|
+
from reversecore_mcp.core.decorators import log_execution
|
|
8
|
+
from reversecore_mcp.core.error_handling import handle_tool_errors
|
|
9
|
+
from reversecore_mcp.core.exceptions import ValidationError
|
|
10
|
+
from reversecore_mcp.core.execution import execute_subprocess_async
|
|
11
|
+
from reversecore_mcp.core.metrics import track_metrics
|
|
12
|
+
from reversecore_mcp.core.result import ToolResult, success
|
|
13
|
+
from reversecore_mcp.core.security import validate_file_path
|
|
14
|
+
|
|
15
|
+
# Load default timeout from configuration
|
|
16
|
+
|
|
17
|
+
DEFAULT_TIMEOUT = get_config().default_tool_timeout
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@log_execution(tool_name="run_file")
|
|
21
|
+
@track_metrics("run_file")
|
|
22
|
+
@handle_tool_errors
|
|
23
|
+
async def run_file(file_path: str, timeout: int = DEFAULT_TIMEOUT) -> ToolResult:
|
|
24
|
+
"""Identify file metadata using the ``file`` CLI utility."""
|
|
25
|
+
|
|
26
|
+
validated_path = validate_file_path(file_path)
|
|
27
|
+
cmd = ["file", str(validated_path)]
|
|
28
|
+
output, bytes_read = await execute_subprocess_async(
|
|
29
|
+
cmd,
|
|
30
|
+
max_output_size=1_000_000,
|
|
31
|
+
timeout=timeout,
|
|
32
|
+
)
|
|
33
|
+
output = output.strip()
|
|
34
|
+
|
|
35
|
+
# Try to infer mime type from output (simple heuristic)
|
|
36
|
+
mime_type = "application/octet-stream"
|
|
37
|
+
if "text" in output.lower():
|
|
38
|
+
mime_type = "text/plain"
|
|
39
|
+
elif "executable" in output.lower():
|
|
40
|
+
mime_type = "application/x-executable"
|
|
41
|
+
elif "image" in output.lower():
|
|
42
|
+
mime_type = "image/" + output.split()[0].lower()
|
|
43
|
+
|
|
44
|
+
return success(
|
|
45
|
+
{
|
|
46
|
+
"file_type": output,
|
|
47
|
+
"file_path": str(validated_path),
|
|
48
|
+
"file_name": validated_path.name,
|
|
49
|
+
"mime_type": mime_type,
|
|
50
|
+
},
|
|
51
|
+
bytes_read=bytes_read,
|
|
52
|
+
raw_output=output,
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
@log_execution(tool_name="copy_to_workspace")
|
|
57
|
+
@track_metrics("copy_to_workspace")
|
|
58
|
+
@handle_tool_errors
|
|
59
|
+
def copy_to_workspace(
|
|
60
|
+
source_path: str,
|
|
61
|
+
destination_name: str = None,
|
|
62
|
+
) -> ToolResult:
|
|
63
|
+
"""
|
|
64
|
+
Copy any accessible file to the workspace directory.
|
|
65
|
+
|
|
66
|
+
This tool allows copying files from any location (including AI agent upload directories)
|
|
67
|
+
to the workspace where other reverse engineering tools can access them.
|
|
68
|
+
|
|
69
|
+
Supports files from:
|
|
70
|
+
- Claude Desktop uploads (/mnt/user-data/uploads)
|
|
71
|
+
- Cursor uploads
|
|
72
|
+
- Windsurf uploads
|
|
73
|
+
- Local file paths
|
|
74
|
+
- Any other accessible location
|
|
75
|
+
|
|
76
|
+
Args:
|
|
77
|
+
source_path: Absolute or relative path to the source file
|
|
78
|
+
destination_name: Optional custom filename in workspace (defaults to original name)
|
|
79
|
+
|
|
80
|
+
Returns:
|
|
81
|
+
ToolResult with the new file path in workspace
|
|
82
|
+
"""
|
|
83
|
+
# Convert to Path and resolve (but don't require strict=True for external files)
|
|
84
|
+
try:
|
|
85
|
+
source = Path(source_path).expanduser().resolve()
|
|
86
|
+
except Exception as e:
|
|
87
|
+
raise ValidationError(
|
|
88
|
+
f"Invalid source path: {source_path}",
|
|
89
|
+
details={"source_path": source_path, "error": str(e)},
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
# Validate source exists and is a file
|
|
93
|
+
if not source.exists():
|
|
94
|
+
raise ValidationError(
|
|
95
|
+
f"Source file does not exist: {source}",
|
|
96
|
+
details={"source_path": str(source)},
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
if not source.is_file():
|
|
100
|
+
raise ValidationError(
|
|
101
|
+
f"Source path is not a file: {source}", details={"source_path": str(source)}
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
# Check file size (prevent copying extremely large files)
|
|
105
|
+
max_file_size = 5 * 1024 * 1024 * 1024 # 5GB
|
|
106
|
+
file_size = source.stat().st_size
|
|
107
|
+
if file_size > max_file_size:
|
|
108
|
+
raise ValidationError(
|
|
109
|
+
f"File too large to copy: {file_size} bytes (max: {max_file_size} bytes)",
|
|
110
|
+
details={"file_size": file_size, "max_size": max_file_size},
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
# Determine destination filename
|
|
114
|
+
if destination_name:
|
|
115
|
+
# Sanitize destination name (remove path separators and dangerous chars)
|
|
116
|
+
dest_name = Path(destination_name).name
|
|
117
|
+
# Additional sanitization for security - check if sanitization changed the name
|
|
118
|
+
if dest_name != destination_name or not dest_name:
|
|
119
|
+
raise ValidationError(
|
|
120
|
+
f"Invalid destination name: {destination_name}",
|
|
121
|
+
details={"destination_name": destination_name},
|
|
122
|
+
)
|
|
123
|
+
else:
|
|
124
|
+
dest_name = source.name
|
|
125
|
+
|
|
126
|
+
# Build destination path in workspace
|
|
127
|
+
config = get_config()
|
|
128
|
+
destination = config.workspace / dest_name
|
|
129
|
+
|
|
130
|
+
# Copy file to workspace using atomic exclusive creation
|
|
131
|
+
# This prevents TOCTOU race condition where another process could create
|
|
132
|
+
# the file between exists() check and copy2() call
|
|
133
|
+
try:
|
|
134
|
+
# mode 'xb' = exclusive create + binary, fails if file already exists
|
|
135
|
+
with open(destination, "xb") as dest_file:
|
|
136
|
+
with open(source, "rb") as src_file:
|
|
137
|
+
shutil.copyfileobj(src_file, dest_file)
|
|
138
|
+
|
|
139
|
+
# Preserve metadata (like copy2)
|
|
140
|
+
shutil.copystat(source, destination)
|
|
141
|
+
copied_size = destination.stat().st_size
|
|
142
|
+
|
|
143
|
+
return success(
|
|
144
|
+
str(destination),
|
|
145
|
+
source_path=str(source),
|
|
146
|
+
destination_path=str(destination),
|
|
147
|
+
file_size=copied_size,
|
|
148
|
+
message=f"File copied successfully to workspace: {dest_name}",
|
|
149
|
+
)
|
|
150
|
+
except FileExistsError:
|
|
151
|
+
raise ValidationError(
|
|
152
|
+
f"File already exists in workspace: {dest_name}",
|
|
153
|
+
details={
|
|
154
|
+
"destination": str(destination),
|
|
155
|
+
"hint": "Use a different destination_name or remove the existing file first",
|
|
156
|
+
},
|
|
157
|
+
)
|
|
158
|
+
except PermissionError as e:
|
|
159
|
+
raise ValidationError(
|
|
160
|
+
f"Permission denied when copying file: {e}",
|
|
161
|
+
details={"source": str(source), "destination": str(destination)},
|
|
162
|
+
)
|
|
163
|
+
except Exception as e:
|
|
164
|
+
raise ValidationError(
|
|
165
|
+
f"Failed to copy file: {e}",
|
|
166
|
+
details={
|
|
167
|
+
"source": str(source),
|
|
168
|
+
"destination": str(destination),
|
|
169
|
+
"error": str(e),
|
|
170
|
+
},
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
@log_execution(tool_name="list_workspace")
|
|
175
|
+
@track_metrics("list_workspace")
|
|
176
|
+
@handle_tool_errors
|
|
177
|
+
def list_workspace() -> ToolResult:
|
|
178
|
+
"""
|
|
179
|
+
List all files in the workspace directory.
|
|
180
|
+
|
|
181
|
+
Returns:
|
|
182
|
+
ToolResult with list of files in workspace
|
|
183
|
+
"""
|
|
184
|
+
config = get_config()
|
|
185
|
+
workspace = config.workspace
|
|
186
|
+
|
|
187
|
+
if not workspace.exists():
|
|
188
|
+
return success(
|
|
189
|
+
{"files": [], "message": "Workspace is empty"},
|
|
190
|
+
file_count=0,
|
|
191
|
+
workspace_path=str(workspace),
|
|
192
|
+
)
|
|
193
|
+
|
|
194
|
+
files = []
|
|
195
|
+
for item in workspace.iterdir():
|
|
196
|
+
if item.is_file():
|
|
197
|
+
files.append({"name": item.name, "size": item.stat().st_size, "path": str(item)})
|
|
198
|
+
|
|
199
|
+
return success({"files": files}, file_count=len(files), workspace_path=str(workspace))
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
@log_execution(tool_name="scan_workspace")
|
|
203
|
+
@track_metrics("scan_workspace")
|
|
204
|
+
@handle_tool_errors
|
|
205
|
+
async def scan_workspace(
|
|
206
|
+
file_patterns: list[str] = None,
|
|
207
|
+
timeout: int = 600,
|
|
208
|
+
ctx=None,
|
|
209
|
+
) -> ToolResult:
|
|
210
|
+
"""
|
|
211
|
+
Batch scan all files in the workspace using multiple tools in parallel.
|
|
212
|
+
|
|
213
|
+
This tool performs a comprehensive scan of the workspace to identify files,
|
|
214
|
+
analyze binaries, and detect threats. It runs 'run_file', 'parse_binary_with_lief',
|
|
215
|
+
and 'run_yara' (if rules exist) on all matching files concurrently.
|
|
216
|
+
|
|
217
|
+
**Workflow:**
|
|
218
|
+
1. Identify files matching patterns (default: all files)
|
|
219
|
+
2. Run 'file' command on all files
|
|
220
|
+
3. Run 'LIEF' analysis on executable files
|
|
221
|
+
4. Run 'YARA' scan if rules are available
|
|
222
|
+
5. Aggregate results into a single report
|
|
223
|
+
|
|
224
|
+
Args:
|
|
225
|
+
file_patterns: List of glob patterns to include (e.g., ["*.exe", "*.dll"]).
|
|
226
|
+
Default is ["*"] (all files).
|
|
227
|
+
timeout: Global timeout for the batch operation in seconds.
|
|
228
|
+
ctx: FastMCP Context for progress reporting (auto-injected)
|
|
229
|
+
|
|
230
|
+
Returns:
|
|
231
|
+
ToolResult with aggregated scan results for all files.
|
|
232
|
+
"""
|
|
233
|
+
import asyncio
|
|
234
|
+
|
|
235
|
+
from reversecore_mcp.core import json_utils as json
|
|
236
|
+
from reversecore_mcp.tools.analysis.lief_tools import parse_binary_with_lief
|
|
237
|
+
|
|
238
|
+
config = get_config()
|
|
239
|
+
workspace = config.workspace
|
|
240
|
+
|
|
241
|
+
if not file_patterns:
|
|
242
|
+
file_patterns = ["*"]
|
|
243
|
+
|
|
244
|
+
# 1. Collect files
|
|
245
|
+
# OPTIMIZATION: Use set to avoid duplicates during collection instead of after
|
|
246
|
+
files_to_scan_set = set()
|
|
247
|
+
for pattern in file_patterns:
|
|
248
|
+
for f in workspace.glob(pattern):
|
|
249
|
+
if f.is_file():
|
|
250
|
+
files_to_scan_set.add(f)
|
|
251
|
+
|
|
252
|
+
files_to_scan = list(files_to_scan_set)
|
|
253
|
+
|
|
254
|
+
if not files_to_scan:
|
|
255
|
+
return success({"files": [], "summary": "No files found matching patterns"}, file_count=0)
|
|
256
|
+
|
|
257
|
+
total_files = len(files_to_scan)
|
|
258
|
+
|
|
259
|
+
# 2. Define scan tasks
|
|
260
|
+
results = {}
|
|
261
|
+
completed_count = 0
|
|
262
|
+
|
|
263
|
+
async def scan_single_file(file_path: Path, index: int):
|
|
264
|
+
nonlocal completed_count
|
|
265
|
+
path_str = str(file_path)
|
|
266
|
+
file_name = file_path.name
|
|
267
|
+
file_result = {"name": file_name, "path": path_str}
|
|
268
|
+
|
|
269
|
+
# Task 1: run_file (async)
|
|
270
|
+
# We call the tool function directly. Since it's async, we await it.
|
|
271
|
+
try:
|
|
272
|
+
file_cmd_result = await run_file(path_str)
|
|
273
|
+
file_result["file_type"] = (
|
|
274
|
+
file_cmd_result.data if file_cmd_result.status == "success" else "unknown"
|
|
275
|
+
)
|
|
276
|
+
except Exception as e:
|
|
277
|
+
file_result["file_type_error"] = str(e)
|
|
278
|
+
|
|
279
|
+
# Task 2: LIEF (sync, run in thread)
|
|
280
|
+
# Only for likely binaries
|
|
281
|
+
if "executable" in str(
|
|
282
|
+
file_result.get("file_type", "")
|
|
283
|
+
).lower() or file_path.suffix.lower() in [
|
|
284
|
+
".exe",
|
|
285
|
+
".dll",
|
|
286
|
+
".so",
|
|
287
|
+
".dylib",
|
|
288
|
+
".bin",
|
|
289
|
+
".elf",
|
|
290
|
+
]:
|
|
291
|
+
try:
|
|
292
|
+
# Run sync function in thread pool
|
|
293
|
+
lief_result = await asyncio.to_thread(parse_binary_with_lief, path_str)
|
|
294
|
+
if lief_result.status == "success":
|
|
295
|
+
# Parse JSON content if available
|
|
296
|
+
content = lief_result.data
|
|
297
|
+
try:
|
|
298
|
+
file_result["lief_metadata"] = (
|
|
299
|
+
json.loads(content) if isinstance(content, str) else content
|
|
300
|
+
)
|
|
301
|
+
except (json.JSONDecodeError, ValueError, TypeError):
|
|
302
|
+
file_result["lief_metadata"] = content
|
|
303
|
+
except Exception as e:
|
|
304
|
+
file_result["lief_error"] = str(e)
|
|
305
|
+
|
|
306
|
+
# Task 3: YARA (sync, run in thread)
|
|
307
|
+
# Check if we have a default yara rule file or if user provided one (not supported in this batch mode yet, skipping for now or using default)
|
|
308
|
+
# For now, we skip YARA in batch mode unless we have a default rule path in config,
|
|
309
|
+
# but let's assume we might want to add it later.
|
|
310
|
+
# To keep it simple and robust, we'll skip YARA for now in this initial implementation
|
|
311
|
+
# unless we want to scan against a specific rule file which isn't passed here.
|
|
312
|
+
|
|
313
|
+
# Report progress
|
|
314
|
+
completed_count += 1
|
|
315
|
+
if ctx:
|
|
316
|
+
await ctx.report_progress(completed_count, total_files)
|
|
317
|
+
|
|
318
|
+
return file_name, file_result
|
|
319
|
+
|
|
320
|
+
# 3. Run scans in parallel
|
|
321
|
+
# Limit concurrency to avoid overwhelming the system
|
|
322
|
+
semaphore = asyncio.Semaphore(5) # Process 5 files at a time
|
|
323
|
+
|
|
324
|
+
async def sem_scan(file_path, index):
|
|
325
|
+
async with semaphore:
|
|
326
|
+
return await scan_single_file(file_path, index)
|
|
327
|
+
|
|
328
|
+
tasks = [sem_scan(f, i) for i, f in enumerate(files_to_scan)]
|
|
329
|
+
|
|
330
|
+
# Wait for all tasks with global timeout
|
|
331
|
+
try:
|
|
332
|
+
scan_results = await asyncio.wait_for(asyncio.gather(*tasks), timeout=timeout)
|
|
333
|
+
for name, res in scan_results:
|
|
334
|
+
results[name] = res
|
|
335
|
+
except asyncio.TimeoutError:
|
|
336
|
+
return success(
|
|
337
|
+
{"partial_results": results, "error": "Scan timed out"},
|
|
338
|
+
file_count=len(files_to_scan),
|
|
339
|
+
scanned_count=len(results),
|
|
340
|
+
status="timeout",
|
|
341
|
+
)
|
|
342
|
+
|
|
343
|
+
return success(
|
|
344
|
+
{"files": results},
|
|
345
|
+
file_count=len(files_to_scan),
|
|
346
|
+
status="completed",
|
|
347
|
+
description=f"Batch scan completed for {len(files_to_scan)} files",
|
|
348
|
+
)
|
|
349
|
+
|
|
350
|
+
|
|
351
|
+
# Note: FileOperationsPlugin has been removed.
|
|
352
|
+
# The file operation tools are now registered via CommonToolsPlugin in common/__init__.py.
|