iflow-mcp_developermode-korea_reversecore-mcp 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. iflow_mcp_developermode_korea_reversecore_mcp-1.0.0.dist-info/METADATA +543 -0
  2. iflow_mcp_developermode_korea_reversecore_mcp-1.0.0.dist-info/RECORD +79 -0
  3. iflow_mcp_developermode_korea_reversecore_mcp-1.0.0.dist-info/WHEEL +5 -0
  4. iflow_mcp_developermode_korea_reversecore_mcp-1.0.0.dist-info/entry_points.txt +2 -0
  5. iflow_mcp_developermode_korea_reversecore_mcp-1.0.0.dist-info/licenses/LICENSE +21 -0
  6. iflow_mcp_developermode_korea_reversecore_mcp-1.0.0.dist-info/top_level.txt +1 -0
  7. reversecore_mcp/__init__.py +9 -0
  8. reversecore_mcp/core/__init__.py +78 -0
  9. reversecore_mcp/core/audit.py +101 -0
  10. reversecore_mcp/core/binary_cache.py +138 -0
  11. reversecore_mcp/core/command_spec.py +357 -0
  12. reversecore_mcp/core/config.py +432 -0
  13. reversecore_mcp/core/container.py +288 -0
  14. reversecore_mcp/core/decorators.py +152 -0
  15. reversecore_mcp/core/error_formatting.py +93 -0
  16. reversecore_mcp/core/error_handling.py +142 -0
  17. reversecore_mcp/core/evidence.py +229 -0
  18. reversecore_mcp/core/exceptions.py +296 -0
  19. reversecore_mcp/core/execution.py +240 -0
  20. reversecore_mcp/core/ghidra.py +642 -0
  21. reversecore_mcp/core/ghidra_helper.py +481 -0
  22. reversecore_mcp/core/ghidra_manager.py +234 -0
  23. reversecore_mcp/core/json_utils.py +131 -0
  24. reversecore_mcp/core/loader.py +73 -0
  25. reversecore_mcp/core/logging_config.py +206 -0
  26. reversecore_mcp/core/memory.py +721 -0
  27. reversecore_mcp/core/metrics.py +198 -0
  28. reversecore_mcp/core/mitre_mapper.py +365 -0
  29. reversecore_mcp/core/plugin.py +45 -0
  30. reversecore_mcp/core/r2_helpers.py +404 -0
  31. reversecore_mcp/core/r2_pool.py +403 -0
  32. reversecore_mcp/core/report_generator.py +268 -0
  33. reversecore_mcp/core/resilience.py +252 -0
  34. reversecore_mcp/core/resource_manager.py +169 -0
  35. reversecore_mcp/core/result.py +132 -0
  36. reversecore_mcp/core/security.py +213 -0
  37. reversecore_mcp/core/validators.py +238 -0
  38. reversecore_mcp/dashboard/__init__.py +221 -0
  39. reversecore_mcp/prompts/__init__.py +56 -0
  40. reversecore_mcp/prompts/common.py +24 -0
  41. reversecore_mcp/prompts/game.py +280 -0
  42. reversecore_mcp/prompts/malware.py +1219 -0
  43. reversecore_mcp/prompts/report.py +150 -0
  44. reversecore_mcp/prompts/security.py +136 -0
  45. reversecore_mcp/resources.py +329 -0
  46. reversecore_mcp/server.py +727 -0
  47. reversecore_mcp/tools/__init__.py +49 -0
  48. reversecore_mcp/tools/analysis/__init__.py +74 -0
  49. reversecore_mcp/tools/analysis/capa_tools.py +215 -0
  50. reversecore_mcp/tools/analysis/die_tools.py +180 -0
  51. reversecore_mcp/tools/analysis/diff_tools.py +643 -0
  52. reversecore_mcp/tools/analysis/lief_tools.py +272 -0
  53. reversecore_mcp/tools/analysis/signature_tools.py +591 -0
  54. reversecore_mcp/tools/analysis/static_analysis.py +479 -0
  55. reversecore_mcp/tools/common/__init__.py +58 -0
  56. reversecore_mcp/tools/common/file_operations.py +352 -0
  57. reversecore_mcp/tools/common/memory_tools.py +516 -0
  58. reversecore_mcp/tools/common/patch_explainer.py +230 -0
  59. reversecore_mcp/tools/common/server_tools.py +115 -0
  60. reversecore_mcp/tools/ghidra/__init__.py +19 -0
  61. reversecore_mcp/tools/ghidra/decompilation.py +975 -0
  62. reversecore_mcp/tools/ghidra/ghidra_tools.py +1052 -0
  63. reversecore_mcp/tools/malware/__init__.py +61 -0
  64. reversecore_mcp/tools/malware/adaptive_vaccine.py +579 -0
  65. reversecore_mcp/tools/malware/dormant_detector.py +756 -0
  66. reversecore_mcp/tools/malware/ioc_tools.py +228 -0
  67. reversecore_mcp/tools/malware/vulnerability_hunter.py +519 -0
  68. reversecore_mcp/tools/malware/yara_tools.py +214 -0
  69. reversecore_mcp/tools/patch_explainer.py +19 -0
  70. reversecore_mcp/tools/radare2/__init__.py +13 -0
  71. reversecore_mcp/tools/radare2/r2_analysis.py +972 -0
  72. reversecore_mcp/tools/radare2/r2_session.py +376 -0
  73. reversecore_mcp/tools/radare2/radare2_mcp_tools.py +1183 -0
  74. reversecore_mcp/tools/report/__init__.py +4 -0
  75. reversecore_mcp/tools/report/email.py +82 -0
  76. reversecore_mcp/tools/report/report_mcp_tools.py +344 -0
  77. reversecore_mcp/tools/report/report_tools.py +1076 -0
  78. reversecore_mcp/tools/report/session.py +194 -0
  79. reversecore_mcp/tools/report_tools.py +11 -0
@@ -0,0 +1,272 @@
1
+ """LIEF (Library to Instrument Executable Formats) parsing tools for binary analysis."""
2
+
3
+ from itertools import islice
4
+ from typing import Any
5
+
6
+ # Use high-performance JSON implementation (3-5x faster)
7
+ from reversecore_mcp.core import json_utils as json
8
+ from reversecore_mcp.core.config import get_config
9
+ from reversecore_mcp.core.decorators import log_execution
10
+ from reversecore_mcp.core.error_handling import handle_tool_errors
11
+ from reversecore_mcp.core.metrics import track_metrics
12
+ from reversecore_mcp.core.result import ToolResult, failure, success
13
+ from reversecore_mcp.core.security import validate_file_path
14
+
15
+
16
+ def _extract_sections(binary: Any) -> list[dict[str, Any]]:
17
+ """Extract section information from binary."""
18
+ if not hasattr(binary, "sections") or not binary.sections:
19
+ return []
20
+ return [
21
+ {
22
+ "name": section.name,
23
+ "virtual_address": hex(section.virtual_address),
24
+ "size": section.size,
25
+ "entropy": (round(section.entropy, 2) if hasattr(section, "entropy") else None),
26
+ }
27
+ for section in binary.sections
28
+ ]
29
+
30
+
31
+ def _extract_symbols(binary: Any, max_imports: int = 100, max_exports: int = 100) -> dict[str, Any]:
32
+ """Extract symbol information (imports/exports) from binary.
33
+
34
+ Args:
35
+ binary: LIEF binary object
36
+ max_imports: Maximum number of imports to extract (P2 memory protection)
37
+ max_exports: Maximum number of exports to extract (P2 memory protection)
38
+ """
39
+ symbols: dict[str, Any] = {}
40
+
41
+ if hasattr(binary, "imported_functions") and binary.imported_functions:
42
+ # Use islice with configurable limit
43
+ symbols["imported_functions"] = [
44
+ str(func) for func in islice(binary.imported_functions, max_imports)
45
+ ]
46
+
47
+ if hasattr(binary, "exported_functions") and binary.exported_functions:
48
+ # Use islice with configurable limit
49
+ symbols["exported_functions"] = [
50
+ str(func) for func in islice(binary.exported_functions, max_exports)
51
+ ]
52
+
53
+ # PE-specific imports/exports
54
+ if hasattr(binary, "imports") and binary.imports:
55
+ # Use islice to avoid creating intermediate list
56
+ # OPTIMIZATION: Extract function list creation outside the dict to avoid
57
+ # nested comprehension inside loop
58
+ formatted_imports: list[dict[str, Any]] = []
59
+ for imp in islice(binary.imports, min(20, max_imports // 5)):
60
+ entries = getattr(imp, "entries", [])
61
+ # Process entries directly without intermediate list conversion
62
+ # Build function list separately for better performance
63
+ func_list = []
64
+ if entries:
65
+ for f in islice(entries, 20):
66
+ func_list.append(str(f))
67
+
68
+ formatted_imports.append(
69
+ {
70
+ "name": getattr(imp, "name", "unknown"),
71
+ "functions": func_list,
72
+ }
73
+ )
74
+ if formatted_imports:
75
+ symbols["imports"] = formatted_imports
76
+
77
+ if hasattr(binary, "exports") and binary.exports:
78
+ # Use islice to avoid creating intermediate list
79
+ formatted_exports: list[dict[str, Any]] = []
80
+ for exp in islice(binary.exports, 100):
81
+ formatted_exports.append(
82
+ {
83
+ "name": getattr(exp, "name", "unknown"),
84
+ "address": hex(exp.address) if hasattr(exp, "address") else None,
85
+ }
86
+ )
87
+ if formatted_exports:
88
+ symbols["exports"] = formatted_exports
89
+
90
+ return symbols
91
+
92
+
93
+ def _format_lief_output(result: dict[str, Any], format: str) -> str:
94
+ """Format LIEF parsing result as JSON or text."""
95
+ if format.lower() == "json":
96
+ return json.dumps(result, indent=2)
97
+
98
+ # Text format - optimize by using list comprehension and avoiding repeated slicing
99
+ lines = [f"Format: {result.get('format', 'Unknown')}"]
100
+ if result.get("entry_point"):
101
+ lines.append(f"Entry Point: {result['entry_point']}")
102
+
103
+ sections = result.get("sections")
104
+ if sections:
105
+ section_count = len(sections)
106
+ lines.append(f"\nSections ({section_count}):")
107
+ # Iterate directly with limit instead of slicing
108
+ for i, section in enumerate(sections):
109
+ if i >= 20:
110
+ break
111
+ lines.append(
112
+ f" - {section['name']}: VA={section['virtual_address']}, Size={section['size']}"
113
+ )
114
+
115
+ imported_funcs = result.get("imported_functions")
116
+ if imported_funcs:
117
+ func_count = len(imported_funcs)
118
+ lines.append(f"\nImported Functions ({func_count}):")
119
+ for i, func in enumerate(imported_funcs):
120
+ if i >= 20:
121
+ break
122
+ lines.append(f" - {func}")
123
+
124
+ exported_funcs = result.get("exported_functions")
125
+ if exported_funcs:
126
+ func_count = len(exported_funcs)
127
+ lines.append(f"\nExported Functions ({func_count}):")
128
+ for i, func in enumerate(exported_funcs):
129
+ if i >= 20:
130
+ break
131
+ lines.append(f" - {func}")
132
+
133
+ return "\n".join(lines)
134
+
135
+
136
+ # P2: Memory protection thresholds for LIEF parsing
137
+ LIEF_WARN_SIZE_MB = 100 # Warn user about memory usage
138
+ LIEF_LIMIT_SIZE_MB = 500 # Limit extraction to prevent OOM
139
+
140
+
141
+ @log_execution(tool_name="parse_binary_with_lief")
142
+ @track_metrics("parse_binary_with_lief")
143
+ @handle_tool_errors
144
+ def parse_binary_with_lief(file_path: str, format: str = "json") -> ToolResult:
145
+ """Parse binary metadata using LIEF and return structured results.
146
+
147
+ Memory-safe implementation with progressive limits:
148
+ - Under 100MB: Full parsing with all details
149
+ - 100-500MB: Warning + reduced extraction limits
150
+ - Over 500MB: Minimal parsing (headers only)
151
+ - Over config limit: Rejected
152
+ """
153
+
154
+ validated_path = validate_file_path(file_path)
155
+
156
+ max_file_size = get_config().lief_max_file_size
157
+ file_size = validated_path.stat().st_size
158
+ file_size_mb = file_size / (1024 * 1024)
159
+
160
+ if file_size > max_file_size:
161
+ return failure(
162
+ "FILE_TOO_LARGE",
163
+ f"File size ({file_size} bytes) exceeds maximum allowed size ({max_file_size} bytes)",
164
+ hint="Set LIEF_MAX_FILE_SIZE environment variable to increase limit",
165
+ )
166
+
167
+ # CRITICAL: Reject files over limit BEFORE lief.parse() to prevent OOM
168
+ # lief.parse() loads entire binary structure into memory, which can cause
169
+ # Python to consume several GB of RAM for large binaries due to object overhead.
170
+ if file_size_mb > LIEF_LIMIT_SIZE_MB:
171
+ return failure(
172
+ "FILE_TOO_LARGE_FOR_LIEF",
173
+ f"File size ({file_size_mb:.0f}MB) exceeds LIEF parsing limit ({LIEF_LIMIT_SIZE_MB}MB)",
174
+ hint="Use radare2 or other lightweight tools for analysis of very large binaries",
175
+ )
176
+
177
+ # P2: Determine extraction limits based on file size
178
+ extraction_warning = None
179
+ if file_size_mb > LIEF_WARN_SIZE_MB:
180
+ # Large file: reduced extraction
181
+ max_imports = 50
182
+ max_exports = 50
183
+ max_sections = 20
184
+ extraction_warning = f"Large file ({file_size_mb:.0f}MB): some details may be truncated"
185
+ else:
186
+ # Normal file: full extraction
187
+ max_imports = 100
188
+ max_exports = 100
189
+ max_sections = None # No limit
190
+
191
+ # Isolate potentially dangerous LIEF parsing in a separate process
192
+ # This protects the main server from C++ level crashes (segfaults) in the LIEF library
193
+ import concurrent.futures
194
+
195
+ try:
196
+ # Use ProcessPoolExecutor to run parsing in a separate process
197
+ with concurrent.futures.ProcessPoolExecutor(max_workers=1) as executor:
198
+ # Prepare arguments
199
+ future = executor.submit(
200
+ _run_lief_in_process,
201
+ str(validated_path),
202
+ max_imports,
203
+ max_exports,
204
+ max_sections
205
+ )
206
+
207
+ # Wait for result with timeout
208
+ try:
209
+ result_data = future.result(timeout=60) # 60s timeout for LIEF
210
+ except concurrent.futures.TimeoutError:
211
+ # Kill the worker via shutdown (not perfect but best effort)
212
+ executor.shutdown(wait=False, cancel_futures=True)
213
+ return failure(
214
+ "TIMEOUT",
215
+ "LIEF parsing timed out (possible hang in C++ library)",
216
+ )
217
+ except concurrent.futures.ProcessBrokenExecutor:
218
+ # This catches SEGFAULTs!
219
+ return failure(
220
+ "CRASH_DETECTED",
221
+ "LIEF parser crashed (segmentation fault detected). Analysis aborted safely.",
222
+ hint="The file may be malformed intentionally to crash analysis tools."
223
+ )
224
+ except Exception as e:
225
+ return failure("LIEF_ERROR", f"LIEF failed to parse binary: {e}")
226
+
227
+ except Exception as e:
228
+ return failure("EXECUTION_ERROR", f"Failed to run LIEF isolation: {e}")
229
+
230
+ # P2: Add warning if extraction was limited
231
+ if extraction_warning:
232
+ result_data["_warning"] = extraction_warning
233
+
234
+ if format.lower() == "json":
235
+ return success(result_data)
236
+
237
+ formatted_text = _format_lief_output(result_data, format)
238
+ return success(formatted_text)
239
+
240
+
241
+ def _run_lief_in_process(file_path: str, max_imports: int, max_exports: int, max_sections: int | None) -> dict[str, Any]:
242
+ """
243
+ Worker function to run LIEF parsing in a separate process.
244
+ Must be a standalone function (not closure) to be picklable.
245
+ """
246
+ import lief
247
+
248
+ try:
249
+ binary = lief.parse(file_path)
250
+ except Exception as exc:
251
+ raise RuntimeError(f"LIEF parse failed: {exc}")
252
+
253
+ if binary is None:
254
+ raise ValueError("Unsupported binary format")
255
+
256
+ result_data: dict[str, Any] = {
257
+ "format": str(binary.format).split(".")[-1].lower(),
258
+ "entry_point": (hex(binary.entrypoint) if hasattr(binary, "entrypoint") else None),
259
+ }
260
+
261
+ # Extract sections
262
+ sections = _extract_sections(binary)
263
+ if sections:
264
+ if max_sections is not None:
265
+ sections = sections[:max_sections]
266
+ result_data["sections"] = sections
267
+
268
+ # Extract symbols
269
+ symbols = _extract_symbols(binary, max_imports=max_imports, max_exports=max_exports)
270
+ result_data.update(symbols)
271
+
272
+ return result_data