iflow-mcp_developermode-korea_reversecore-mcp 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- iflow_mcp_developermode_korea_reversecore_mcp-1.0.0.dist-info/METADATA +543 -0
- iflow_mcp_developermode_korea_reversecore_mcp-1.0.0.dist-info/RECORD +79 -0
- iflow_mcp_developermode_korea_reversecore_mcp-1.0.0.dist-info/WHEEL +5 -0
- iflow_mcp_developermode_korea_reversecore_mcp-1.0.0.dist-info/entry_points.txt +2 -0
- iflow_mcp_developermode_korea_reversecore_mcp-1.0.0.dist-info/licenses/LICENSE +21 -0
- iflow_mcp_developermode_korea_reversecore_mcp-1.0.0.dist-info/top_level.txt +1 -0
- reversecore_mcp/__init__.py +9 -0
- reversecore_mcp/core/__init__.py +78 -0
- reversecore_mcp/core/audit.py +101 -0
- reversecore_mcp/core/binary_cache.py +138 -0
- reversecore_mcp/core/command_spec.py +357 -0
- reversecore_mcp/core/config.py +432 -0
- reversecore_mcp/core/container.py +288 -0
- reversecore_mcp/core/decorators.py +152 -0
- reversecore_mcp/core/error_formatting.py +93 -0
- reversecore_mcp/core/error_handling.py +142 -0
- reversecore_mcp/core/evidence.py +229 -0
- reversecore_mcp/core/exceptions.py +296 -0
- reversecore_mcp/core/execution.py +240 -0
- reversecore_mcp/core/ghidra.py +642 -0
- reversecore_mcp/core/ghidra_helper.py +481 -0
- reversecore_mcp/core/ghidra_manager.py +234 -0
- reversecore_mcp/core/json_utils.py +131 -0
- reversecore_mcp/core/loader.py +73 -0
- reversecore_mcp/core/logging_config.py +206 -0
- reversecore_mcp/core/memory.py +721 -0
- reversecore_mcp/core/metrics.py +198 -0
- reversecore_mcp/core/mitre_mapper.py +365 -0
- reversecore_mcp/core/plugin.py +45 -0
- reversecore_mcp/core/r2_helpers.py +404 -0
- reversecore_mcp/core/r2_pool.py +403 -0
- reversecore_mcp/core/report_generator.py +268 -0
- reversecore_mcp/core/resilience.py +252 -0
- reversecore_mcp/core/resource_manager.py +169 -0
- reversecore_mcp/core/result.py +132 -0
- reversecore_mcp/core/security.py +213 -0
- reversecore_mcp/core/validators.py +238 -0
- reversecore_mcp/dashboard/__init__.py +221 -0
- reversecore_mcp/prompts/__init__.py +56 -0
- reversecore_mcp/prompts/common.py +24 -0
- reversecore_mcp/prompts/game.py +280 -0
- reversecore_mcp/prompts/malware.py +1219 -0
- reversecore_mcp/prompts/report.py +150 -0
- reversecore_mcp/prompts/security.py +136 -0
- reversecore_mcp/resources.py +329 -0
- reversecore_mcp/server.py +727 -0
- reversecore_mcp/tools/__init__.py +49 -0
- reversecore_mcp/tools/analysis/__init__.py +74 -0
- reversecore_mcp/tools/analysis/capa_tools.py +215 -0
- reversecore_mcp/tools/analysis/die_tools.py +180 -0
- reversecore_mcp/tools/analysis/diff_tools.py +643 -0
- reversecore_mcp/tools/analysis/lief_tools.py +272 -0
- reversecore_mcp/tools/analysis/signature_tools.py +591 -0
- reversecore_mcp/tools/analysis/static_analysis.py +479 -0
- reversecore_mcp/tools/common/__init__.py +58 -0
- reversecore_mcp/tools/common/file_operations.py +352 -0
- reversecore_mcp/tools/common/memory_tools.py +516 -0
- reversecore_mcp/tools/common/patch_explainer.py +230 -0
- reversecore_mcp/tools/common/server_tools.py +115 -0
- reversecore_mcp/tools/ghidra/__init__.py +19 -0
- reversecore_mcp/tools/ghidra/decompilation.py +975 -0
- reversecore_mcp/tools/ghidra/ghidra_tools.py +1052 -0
- reversecore_mcp/tools/malware/__init__.py +61 -0
- reversecore_mcp/tools/malware/adaptive_vaccine.py +579 -0
- reversecore_mcp/tools/malware/dormant_detector.py +756 -0
- reversecore_mcp/tools/malware/ioc_tools.py +228 -0
- reversecore_mcp/tools/malware/vulnerability_hunter.py +519 -0
- reversecore_mcp/tools/malware/yara_tools.py +214 -0
- reversecore_mcp/tools/patch_explainer.py +19 -0
- reversecore_mcp/tools/radare2/__init__.py +13 -0
- reversecore_mcp/tools/radare2/r2_analysis.py +972 -0
- reversecore_mcp/tools/radare2/r2_session.py +376 -0
- reversecore_mcp/tools/radare2/radare2_mcp_tools.py +1183 -0
- reversecore_mcp/tools/report/__init__.py +4 -0
- reversecore_mcp/tools/report/email.py +82 -0
- reversecore_mcp/tools/report/report_mcp_tools.py +344 -0
- reversecore_mcp/tools/report/report_tools.py +1076 -0
- reversecore_mcp/tools/report/session.py +194 -0
- reversecore_mcp/tools/report_tools.py +11 -0
|
@@ -0,0 +1,404 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Shared helper functions for Radare2 operations.
|
|
3
|
+
|
|
4
|
+
This module provides common utilities used across multiple tool modules
|
|
5
|
+
to avoid circular dependencies between tools.
|
|
6
|
+
|
|
7
|
+
Functions:
|
|
8
|
+
- _strip_address_prefixes: Remove common address prefixes
|
|
9
|
+
- _escape_mermaid_chars: Escape special characters for Mermaid diagrams
|
|
10
|
+
- _get_r2_project_name: Generate unique project names
|
|
11
|
+
- _calculate_dynamic_timeout: Calculate timeout based on file size
|
|
12
|
+
- _build_r2_cmd: Build radare2 command list
|
|
13
|
+
- _execute_r2_command: Execute radare2 commands asynchronously
|
|
14
|
+
- _extract_first_json: Extract JSON from noisy output
|
|
15
|
+
- _parse_json_output: Parse JSON from command output
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
import hashlib
|
|
19
|
+
import os
|
|
20
|
+
import re
|
|
21
|
+
from functools import lru_cache
|
|
22
|
+
from pathlib import Path
|
|
23
|
+
|
|
24
|
+
from reversecore_mcp.core import json_utils as json
|
|
25
|
+
from reversecore_mcp.core.execution import execute_subprocess_async
|
|
26
|
+
from reversecore_mcp.core.logging_config import get_logger
|
|
27
|
+
|
|
28
|
+
logger = get_logger(__name__)
|
|
29
|
+
|
|
30
|
+
# File size thresholds for adaptive analysis
|
|
31
|
+
SMALL_FILE_MB = 10 # Files under this use 'aaa' (full analysis)
|
|
32
|
+
MEDIUM_FILE_MB = 50 # Files under this use 'aa' (basic analysis)
|
|
33
|
+
LARGE_FILE_MB = 200 # Files under this use 'aab' (minimal analysis)
|
|
34
|
+
# Files over LARGE_FILE_MB use '-n' (no analysis)
|
|
35
|
+
|
|
36
|
+
# OPTIMIZATION: Pre-compile patterns for better performance
|
|
37
|
+
_ADDRESS_PREFIX_PATTERN = re.compile(r"(0x|sym\.|fcn\.)")
|
|
38
|
+
_MERMAID_ESCAPE_CHARS = str.maketrans({'"': "'", "(": "[", ")": "]"})
|
|
39
|
+
_R2_ANALYSIS_PATTERN = re.compile(r"\b(aaa|aa)\b")
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def strip_address_prefixes(address: str) -> str:
|
|
43
|
+
"""
|
|
44
|
+
Efficiently strip common address prefixes using regex.
|
|
45
|
+
|
|
46
|
+
This is faster than chained .replace() calls for multiple patterns.
|
|
47
|
+
|
|
48
|
+
Args:
|
|
49
|
+
address: Address string with potential prefixes
|
|
50
|
+
|
|
51
|
+
Returns:
|
|
52
|
+
Address string with prefixes removed
|
|
53
|
+
"""
|
|
54
|
+
return _ADDRESS_PREFIX_PATTERN.sub("", address)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def escape_mermaid_chars(text: str) -> str:
|
|
58
|
+
"""
|
|
59
|
+
Efficiently escape Mermaid special characters using str.translate().
|
|
60
|
+
|
|
61
|
+
This is faster than chained .replace() calls for multiple characters.
|
|
62
|
+
|
|
63
|
+
Args:
|
|
64
|
+
text: Text to escape
|
|
65
|
+
|
|
66
|
+
Returns:
|
|
67
|
+
Escaped text safe for Mermaid diagrams
|
|
68
|
+
"""
|
|
69
|
+
return text.translate(_MERMAID_ESCAPE_CHARS)
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
@lru_cache(maxsize=128)
|
|
73
|
+
def get_r2_project_name(file_path: str) -> str:
|
|
74
|
+
"""
|
|
75
|
+
Generate a unique project name based on file path hash.
|
|
76
|
+
|
|
77
|
+
Cached to avoid repeated MD5 computation for the same file path.
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
file_path: Path to the binary file
|
|
81
|
+
|
|
82
|
+
Returns:
|
|
83
|
+
MD5 hash of the absolute file path
|
|
84
|
+
"""
|
|
85
|
+
abs_path = str(Path(file_path).resolve())
|
|
86
|
+
return hashlib.md5(abs_path.encode()).hexdigest()
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
@lru_cache(maxsize=128)
|
|
90
|
+
def calculate_dynamic_timeout(file_path: str, base_timeout: int = 300) -> int:
|
|
91
|
+
"""
|
|
92
|
+
Calculate timeout based on file size.
|
|
93
|
+
|
|
94
|
+
Strategy: Base timeout + 2 seconds per MB of file size.
|
|
95
|
+
Cached to avoid repeated file stat calls for the same file.
|
|
96
|
+
|
|
97
|
+
Args:
|
|
98
|
+
file_path: Path to the binary file
|
|
99
|
+
base_timeout: Base timeout in seconds
|
|
100
|
+
|
|
101
|
+
Returns:
|
|
102
|
+
Calculated timeout in seconds
|
|
103
|
+
"""
|
|
104
|
+
try:
|
|
105
|
+
size_mb = os.path.getsize(file_path) / (1024 * 1024)
|
|
106
|
+
# Cap the dynamic addition to avoid extremely long timeouts (max +10 mins)
|
|
107
|
+
additional_time = min(size_mb * 2, 600)
|
|
108
|
+
return int(base_timeout + additional_time)
|
|
109
|
+
except Exception:
|
|
110
|
+
return base_timeout
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
@lru_cache(maxsize=256)
|
|
114
|
+
def get_adaptive_analysis_level(file_path: str, requested_level: str = "aaa") -> str:
|
|
115
|
+
"""
|
|
116
|
+
Determine optimal analysis level based on file size.
|
|
117
|
+
|
|
118
|
+
This prevents timeout/OOM issues on large binaries while maintaining
|
|
119
|
+
quality for smaller files.
|
|
120
|
+
|
|
121
|
+
Args:
|
|
122
|
+
file_path: Path to the binary file
|
|
123
|
+
requested_level: Requested analysis level (may be overridden)
|
|
124
|
+
|
|
125
|
+
Returns:
|
|
126
|
+
Optimal analysis level for the file size
|
|
127
|
+
"""
|
|
128
|
+
# If user explicitly requested no analysis, respect that
|
|
129
|
+
if requested_level == "-n":
|
|
130
|
+
return "-n"
|
|
131
|
+
|
|
132
|
+
try:
|
|
133
|
+
file_size_mb = os.path.getsize(file_path) / (1024 * 1024)
|
|
134
|
+
except OSError:
|
|
135
|
+
return requested_level # Can't determine size, use requested
|
|
136
|
+
|
|
137
|
+
# Adaptive analysis based on file size
|
|
138
|
+
if file_size_mb < SMALL_FILE_MB:
|
|
139
|
+
# Small files: full analysis is fast
|
|
140
|
+
return "aaa"
|
|
141
|
+
elif file_size_mb < MEDIUM_FILE_MB:
|
|
142
|
+
# Medium files: basic analysis only
|
|
143
|
+
logger.debug(f"File {file_size_mb:.1f}MB > {SMALL_FILE_MB}MB, using 'aa' instead of 'aaa'")
|
|
144
|
+
return "aa"
|
|
145
|
+
elif file_size_mb < LARGE_FILE_MB:
|
|
146
|
+
# Large files: minimal analysis
|
|
147
|
+
logger.debug(f"File {file_size_mb:.1f}MB > {MEDIUM_FILE_MB}MB, using 'aab' (minimal analysis)")
|
|
148
|
+
return "aab"
|
|
149
|
+
else:
|
|
150
|
+
# Very large files: no analysis to prevent timeout
|
|
151
|
+
logger.warning(f"File {file_size_mb:.1f}MB > {LARGE_FILE_MB}MB, skipping analysis")
|
|
152
|
+
return "-n"
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
def build_r2_cmd(file_path: str, r2_commands: list[str], analysis_level: str = "aaa") -> list[str]:
|
|
156
|
+
"""
|
|
157
|
+
Build radare2 command with adaptive analysis.
|
|
158
|
+
|
|
159
|
+
Automatically adjusts analysis level based on file size to prevent
|
|
160
|
+
timeouts on large binaries.
|
|
161
|
+
|
|
162
|
+
Args:
|
|
163
|
+
file_path: Path to the binary file
|
|
164
|
+
r2_commands: List of radare2 commands to execute
|
|
165
|
+
analysis_level: Requested analysis level (may be adjusted)
|
|
166
|
+
|
|
167
|
+
Returns:
|
|
168
|
+
Complete command list for subprocess execution
|
|
169
|
+
"""
|
|
170
|
+
base_cmd = ["r2", "-q"]
|
|
171
|
+
|
|
172
|
+
# Apply adaptive analysis based on file size
|
|
173
|
+
effective_level = get_adaptive_analysis_level(file_path, analysis_level)
|
|
174
|
+
|
|
175
|
+
# If no analysis requested/determined
|
|
176
|
+
if effective_level == "-n":
|
|
177
|
+
return base_cmd + ["-n"] + ["-c", ";".join(r2_commands), str(file_path)]
|
|
178
|
+
|
|
179
|
+
# Run with analysis
|
|
180
|
+
# We use 'e scr.color=0' to ensure no color codes in output
|
|
181
|
+
combined_cmds = ["e scr.color=0", effective_level] + r2_commands
|
|
182
|
+
return base_cmd + ["-c", ";".join(combined_cmds), str(file_path)]
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
# Global reference to r2_pool (lazy-loaded to avoid circular imports)
|
|
186
|
+
_r2_pool = None
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def _get_r2_pool():
|
|
190
|
+
"""Get r2_pool singleton, lazy-loading to avoid circular imports."""
|
|
191
|
+
global _r2_pool
|
|
192
|
+
if _r2_pool is None:
|
|
193
|
+
try:
|
|
194
|
+
from reversecore_mcp.core.r2_pool import r2_pool
|
|
195
|
+
_r2_pool = r2_pool
|
|
196
|
+
except ImportError:
|
|
197
|
+
pass
|
|
198
|
+
return _r2_pool
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
async def execute_r2_command(
|
|
202
|
+
file_path: Path,
|
|
203
|
+
r2_commands: list[str],
|
|
204
|
+
analysis_level: str = "aaa",
|
|
205
|
+
max_output_size: int = 10_000_000,
|
|
206
|
+
base_timeout: int = 300,
|
|
207
|
+
skip_cache: bool = False,
|
|
208
|
+
) -> tuple[str, int]:
|
|
209
|
+
"""
|
|
210
|
+
Execute radare2 commands with analysis caching.
|
|
211
|
+
|
|
212
|
+
This helper consolidates the repeated pattern of:
|
|
213
|
+
1. Check if file already analyzed (cache hit = skip analysis)
|
|
214
|
+
2. Calculate dynamic timeout
|
|
215
|
+
3. Build r2 command with adaptive analysis level
|
|
216
|
+
4. Execute subprocess
|
|
217
|
+
5. Mark file as analyzed for future calls
|
|
218
|
+
|
|
219
|
+
Performance optimization:
|
|
220
|
+
- First call: runs full analysis (slow)
|
|
221
|
+
- Subsequent calls: skips analysis (fast) - uses cached r2 session
|
|
222
|
+
|
|
223
|
+
Args:
|
|
224
|
+
file_path: Path to the binary file (already validated)
|
|
225
|
+
r2_commands: List of radare2 commands to execute
|
|
226
|
+
analysis_level: Requested analysis level (may be adapted)
|
|
227
|
+
max_output_size: Maximum output size in bytes
|
|
228
|
+
base_timeout: Base timeout in seconds
|
|
229
|
+
skip_cache: Force fresh analysis (ignore cache)
|
|
230
|
+
|
|
231
|
+
Returns:
|
|
232
|
+
Tuple of (output, bytes_read)
|
|
233
|
+
"""
|
|
234
|
+
file_path_str = str(file_path)
|
|
235
|
+
|
|
236
|
+
# NOTE: Previously had is_analyzed() cache that set "-n" flag, but this was buggy.
|
|
237
|
+
# Subprocess is independent from r2_pool sessions - analysis state is NOT shared.
|
|
238
|
+
# Each subprocess must perform its own analysis.
|
|
239
|
+
|
|
240
|
+
# Apply adaptive analysis based on file size (P1 optimization)
|
|
241
|
+
effective_level = get_adaptive_analysis_level(file_path_str, analysis_level)
|
|
242
|
+
|
|
243
|
+
effective_timeout = calculate_dynamic_timeout(file_path_str, base_timeout)
|
|
244
|
+
cmd = build_r2_cmd(file_path_str, r2_commands, effective_level)
|
|
245
|
+
|
|
246
|
+
output, bytes_read = await execute_subprocess_async(
|
|
247
|
+
cmd,
|
|
248
|
+
max_output_size=max_output_size,
|
|
249
|
+
timeout=effective_timeout,
|
|
250
|
+
)
|
|
251
|
+
|
|
252
|
+
return output, bytes_read
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
def extract_first_json(text: str) -> str | None:
|
|
256
|
+
"""
|
|
257
|
+
Extract the first valid JSON object or array from a string.
|
|
258
|
+
|
|
259
|
+
Handles nested structures and ignores surrounding garbage.
|
|
260
|
+
Optimized to O(n) by minimizing redundant scanning.
|
|
261
|
+
|
|
262
|
+
Args:
|
|
263
|
+
text: Input text potentially containing JSON
|
|
264
|
+
|
|
265
|
+
Returns:
|
|
266
|
+
The extracted JSON string, or None if no valid JSON found
|
|
267
|
+
"""
|
|
268
|
+
text = text.strip()
|
|
269
|
+
if not text:
|
|
270
|
+
return None
|
|
271
|
+
|
|
272
|
+
# Quick optimization: Try parsing the whole string first
|
|
273
|
+
if text[0] in ("{", "["):
|
|
274
|
+
try:
|
|
275
|
+
json.loads(text)
|
|
276
|
+
return text
|
|
277
|
+
except json.JSONDecodeError:
|
|
278
|
+
pass
|
|
279
|
+
|
|
280
|
+
# Need to extract JSON from noisy output
|
|
281
|
+
i = 0
|
|
282
|
+
text_len = len(text)
|
|
283
|
+
|
|
284
|
+
while i < text_len:
|
|
285
|
+
char = text[i]
|
|
286
|
+
|
|
287
|
+
# Skip non-JSON start characters
|
|
288
|
+
if char not in ("{", "["):
|
|
289
|
+
i += 1
|
|
290
|
+
continue
|
|
291
|
+
|
|
292
|
+
# Found potential JSON start - check for false positives
|
|
293
|
+
if i + 1 < text_len and text[i + 1] in (" ", "\t"):
|
|
294
|
+
next_idx = i + 2
|
|
295
|
+
while next_idx < text_len and text[next_idx] in (" ", "\t", "\n", "\r"):
|
|
296
|
+
next_idx += 1
|
|
297
|
+
if next_idx < text_len and text[next_idx] == char:
|
|
298
|
+
i += 1
|
|
299
|
+
continue
|
|
300
|
+
|
|
301
|
+
# Try to extract JSON starting from this position
|
|
302
|
+
stack = []
|
|
303
|
+
start_idx = i
|
|
304
|
+
in_string = False
|
|
305
|
+
escape_next = False
|
|
306
|
+
j = i
|
|
307
|
+
|
|
308
|
+
while j < text_len:
|
|
309
|
+
c = text[j]
|
|
310
|
+
|
|
311
|
+
if escape_next:
|
|
312
|
+
escape_next = False
|
|
313
|
+
j += 1
|
|
314
|
+
continue
|
|
315
|
+
|
|
316
|
+
if c == "\\" and in_string:
|
|
317
|
+
escape_next = True
|
|
318
|
+
j += 1
|
|
319
|
+
continue
|
|
320
|
+
|
|
321
|
+
if c == '"':
|
|
322
|
+
in_string = not in_string
|
|
323
|
+
j += 1
|
|
324
|
+
continue
|
|
325
|
+
|
|
326
|
+
if not in_string:
|
|
327
|
+
if c in ("{", "["):
|
|
328
|
+
stack.append(c)
|
|
329
|
+
elif c in ("}", "]"):
|
|
330
|
+
if not stack:
|
|
331
|
+
break
|
|
332
|
+
|
|
333
|
+
last = stack[-1]
|
|
334
|
+
if (c == "}" and last == "{") or (c == "]" and last == "["):
|
|
335
|
+
stack.pop()
|
|
336
|
+
if not stack:
|
|
337
|
+
candidate = text[start_idx : j + 1]
|
|
338
|
+
try:
|
|
339
|
+
json.loads(candidate)
|
|
340
|
+
return candidate
|
|
341
|
+
except json.JSONDecodeError:
|
|
342
|
+
i = j + 1
|
|
343
|
+
break
|
|
344
|
+
else:
|
|
345
|
+
break
|
|
346
|
+
|
|
347
|
+
j += 1
|
|
348
|
+
|
|
349
|
+
if i == start_idx:
|
|
350
|
+
i += 1
|
|
351
|
+
|
|
352
|
+
return None
|
|
353
|
+
|
|
354
|
+
|
|
355
|
+
def parse_json_output(output: str):
|
|
356
|
+
"""
|
|
357
|
+
Safely parse JSON from command output.
|
|
358
|
+
|
|
359
|
+
Tries to extract JSON from output that may contain non-JSON text
|
|
360
|
+
(like warnings, debug messages, etc.) and parse it.
|
|
361
|
+
|
|
362
|
+
Args:
|
|
363
|
+
output: Raw command output that may contain JSON
|
|
364
|
+
|
|
365
|
+
Returns:
|
|
366
|
+
Parsed JSON object (dict/list) or None if parsing fails
|
|
367
|
+
|
|
368
|
+
Raises:
|
|
369
|
+
json.JSONDecodeError: If JSON is found but invalid
|
|
370
|
+
"""
|
|
371
|
+
json_str = extract_first_json(output)
|
|
372
|
+
|
|
373
|
+
if json_str is not None:
|
|
374
|
+
try:
|
|
375
|
+
return json.loads(json_str)
|
|
376
|
+
except json.JSONDecodeError:
|
|
377
|
+
pass
|
|
378
|
+
|
|
379
|
+
return json.loads(output)
|
|
380
|
+
|
|
381
|
+
|
|
382
|
+
def remove_analysis_commands(command: str) -> str:
|
|
383
|
+
"""
|
|
384
|
+
Remove explicit radare2 analysis commands (aaa, aa) from a command string.
|
|
385
|
+
|
|
386
|
+
Args:
|
|
387
|
+
command: Radare2 command string
|
|
388
|
+
|
|
389
|
+
Returns:
|
|
390
|
+
Command string with analysis commands removed
|
|
391
|
+
"""
|
|
392
|
+
return _R2_ANALYSIS_PATTERN.sub("", command).strip(" ;")
|
|
393
|
+
|
|
394
|
+
|
|
395
|
+
# Legacy aliases for backward compatibility (deprecated)
|
|
396
|
+
# These will be removed in a future version
|
|
397
|
+
_strip_address_prefixes = strip_address_prefixes
|
|
398
|
+
_escape_mermaid_chars = escape_mermaid_chars
|
|
399
|
+
_get_r2_project_name = get_r2_project_name
|
|
400
|
+
_calculate_dynamic_timeout = calculate_dynamic_timeout
|
|
401
|
+
_build_r2_cmd = build_r2_cmd
|
|
402
|
+
_execute_r2_command = execute_r2_command
|
|
403
|
+
_extract_first_json = extract_first_json
|
|
404
|
+
_parse_json_output = parse_json_output
|