iflow-mcp_developermode-korea_reversecore-mcp 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. iflow_mcp_developermode_korea_reversecore_mcp-1.0.0.dist-info/METADATA +543 -0
  2. iflow_mcp_developermode_korea_reversecore_mcp-1.0.0.dist-info/RECORD +79 -0
  3. iflow_mcp_developermode_korea_reversecore_mcp-1.0.0.dist-info/WHEEL +5 -0
  4. iflow_mcp_developermode_korea_reversecore_mcp-1.0.0.dist-info/entry_points.txt +2 -0
  5. iflow_mcp_developermode_korea_reversecore_mcp-1.0.0.dist-info/licenses/LICENSE +21 -0
  6. iflow_mcp_developermode_korea_reversecore_mcp-1.0.0.dist-info/top_level.txt +1 -0
  7. reversecore_mcp/__init__.py +9 -0
  8. reversecore_mcp/core/__init__.py +78 -0
  9. reversecore_mcp/core/audit.py +101 -0
  10. reversecore_mcp/core/binary_cache.py +138 -0
  11. reversecore_mcp/core/command_spec.py +357 -0
  12. reversecore_mcp/core/config.py +432 -0
  13. reversecore_mcp/core/container.py +288 -0
  14. reversecore_mcp/core/decorators.py +152 -0
  15. reversecore_mcp/core/error_formatting.py +93 -0
  16. reversecore_mcp/core/error_handling.py +142 -0
  17. reversecore_mcp/core/evidence.py +229 -0
  18. reversecore_mcp/core/exceptions.py +296 -0
  19. reversecore_mcp/core/execution.py +240 -0
  20. reversecore_mcp/core/ghidra.py +642 -0
  21. reversecore_mcp/core/ghidra_helper.py +481 -0
  22. reversecore_mcp/core/ghidra_manager.py +234 -0
  23. reversecore_mcp/core/json_utils.py +131 -0
  24. reversecore_mcp/core/loader.py +73 -0
  25. reversecore_mcp/core/logging_config.py +206 -0
  26. reversecore_mcp/core/memory.py +721 -0
  27. reversecore_mcp/core/metrics.py +198 -0
  28. reversecore_mcp/core/mitre_mapper.py +365 -0
  29. reversecore_mcp/core/plugin.py +45 -0
  30. reversecore_mcp/core/r2_helpers.py +404 -0
  31. reversecore_mcp/core/r2_pool.py +403 -0
  32. reversecore_mcp/core/report_generator.py +268 -0
  33. reversecore_mcp/core/resilience.py +252 -0
  34. reversecore_mcp/core/resource_manager.py +169 -0
  35. reversecore_mcp/core/result.py +132 -0
  36. reversecore_mcp/core/security.py +213 -0
  37. reversecore_mcp/core/validators.py +238 -0
  38. reversecore_mcp/dashboard/__init__.py +221 -0
  39. reversecore_mcp/prompts/__init__.py +56 -0
  40. reversecore_mcp/prompts/common.py +24 -0
  41. reversecore_mcp/prompts/game.py +280 -0
  42. reversecore_mcp/prompts/malware.py +1219 -0
  43. reversecore_mcp/prompts/report.py +150 -0
  44. reversecore_mcp/prompts/security.py +136 -0
  45. reversecore_mcp/resources.py +329 -0
  46. reversecore_mcp/server.py +727 -0
  47. reversecore_mcp/tools/__init__.py +49 -0
  48. reversecore_mcp/tools/analysis/__init__.py +74 -0
  49. reversecore_mcp/tools/analysis/capa_tools.py +215 -0
  50. reversecore_mcp/tools/analysis/die_tools.py +180 -0
  51. reversecore_mcp/tools/analysis/diff_tools.py +643 -0
  52. reversecore_mcp/tools/analysis/lief_tools.py +272 -0
  53. reversecore_mcp/tools/analysis/signature_tools.py +591 -0
  54. reversecore_mcp/tools/analysis/static_analysis.py +479 -0
  55. reversecore_mcp/tools/common/__init__.py +58 -0
  56. reversecore_mcp/tools/common/file_operations.py +352 -0
  57. reversecore_mcp/tools/common/memory_tools.py +516 -0
  58. reversecore_mcp/tools/common/patch_explainer.py +230 -0
  59. reversecore_mcp/tools/common/server_tools.py +115 -0
  60. reversecore_mcp/tools/ghidra/__init__.py +19 -0
  61. reversecore_mcp/tools/ghidra/decompilation.py +975 -0
  62. reversecore_mcp/tools/ghidra/ghidra_tools.py +1052 -0
  63. reversecore_mcp/tools/malware/__init__.py +61 -0
  64. reversecore_mcp/tools/malware/adaptive_vaccine.py +579 -0
  65. reversecore_mcp/tools/malware/dormant_detector.py +756 -0
  66. reversecore_mcp/tools/malware/ioc_tools.py +228 -0
  67. reversecore_mcp/tools/malware/vulnerability_hunter.py +519 -0
  68. reversecore_mcp/tools/malware/yara_tools.py +214 -0
  69. reversecore_mcp/tools/patch_explainer.py +19 -0
  70. reversecore_mcp/tools/radare2/__init__.py +13 -0
  71. reversecore_mcp/tools/radare2/r2_analysis.py +972 -0
  72. reversecore_mcp/tools/radare2/r2_session.py +376 -0
  73. reversecore_mcp/tools/radare2/radare2_mcp_tools.py +1183 -0
  74. reversecore_mcp/tools/report/__init__.py +4 -0
  75. reversecore_mcp/tools/report/email.py +82 -0
  76. reversecore_mcp/tools/report/report_mcp_tools.py +344 -0
  77. reversecore_mcp/tools/report/report_tools.py +1076 -0
  78. reversecore_mcp/tools/report/session.py +194 -0
  79. reversecore_mcp/tools/report_tools.py +11 -0
@@ -0,0 +1,727 @@
1
+ """
2
+ Reversecore_MCP Server
3
+
4
+ This module initializes the FastMCP server and registers all available tools.
5
+ It includes health and metrics endpoints for monitoring in HTTP mode.
6
+ """
7
+
8
+ import asyncio
9
+ import re
10
+ import shutil
11
+ import time
12
+ import uuid
13
+ from collections.abc import AsyncGenerator
14
+ from contextlib import asynccontextmanager
15
+
16
+ import aiofiles
17
+
18
+ try:
19
+ import magic
20
+ except ImportError:
21
+ magic = None
22
+
23
+ from fastmcp import FastMCP
24
+
25
+ from reversecore_mcp.core.audit import AuditAction, audit_logger
26
+ from reversecore_mcp.core.config import get_config
27
+ from reversecore_mcp.core.logging_config import get_logger, setup_logging
28
+ from reversecore_mcp.core.resource_manager import resource_manager
29
+
30
+ # Setup logging
31
+ setup_logging()
32
+ logger = get_logger(__name__)
33
+
34
+
35
+ @asynccontextmanager
36
+ async def server_lifespan(server: FastMCP) -> AsyncGenerator[None, None]:
37
+ """
38
+ Manage server lifecycle events.
39
+ 1. Initialize resources (DB, tools)
40
+ 2. Start background tasks (cleanup)
41
+ 3. Cleanup on shutdown
42
+ """
43
+ # Startup
44
+ logger.info("🚀 Reversecore MCP Server starting...")
45
+ settings = get_config()
46
+
47
+ # 1. Ensure workspace exists
48
+ try:
49
+ settings.workspace.mkdir(parents=True, exist_ok=True)
50
+ logger.info(f"✅ Workspace ready: {settings.workspace}")
51
+ except Exception as e:
52
+ logger.error(f"❌ Failed to create workspace: {e}")
53
+ raise
54
+
55
+ # 2. Check critical dependencies
56
+ dependencies_ok = True
57
+
58
+ # Check radare2
59
+ if not shutil.which("radare2"):
60
+ logger.warning("⚠️ radare2 not found in PATH")
61
+ dependencies_ok = False
62
+ else:
63
+ logger.info("✅ radare2 found")
64
+
65
+ # Check Java (for Ghidra)
66
+ if not shutil.which("java"):
67
+ logger.warning("⚠️ Java not found - Ghidra decompilation unavailable")
68
+ else:
69
+ logger.info("✅ Java found")
70
+
71
+ # Check graphviz (for PNG CFG generation)
72
+ if not shutil.which("dot"):
73
+ logger.warning("⚠️ graphviz not found - PNG CFG generation unavailable")
74
+ else:
75
+ logger.info("✅ graphviz found")
76
+
77
+ if not dependencies_ok:
78
+ logger.warning("⚠️ Some dependencies missing, functionality may be limited")
79
+
80
+ logger.info("✅ Server startup complete")
81
+
82
+ # 3. Start Resource Manager
83
+ await resource_manager.start()
84
+
85
+ # 4. Initialize AI Memory Store
86
+ from reversecore_mcp.core.memory import initialize_memory_store
87
+
88
+ try:
89
+ await initialize_memory_store()
90
+ logger.info("✅ AI Memory store initialized")
91
+ except Exception as e:
92
+ logger.warning(f"⚠️ Memory store initialization failed: {e}")
93
+
94
+ # Note: Async resources are initialized lazily when first accessed
95
+ logger.info("Async resources ready")
96
+
97
+ # Start cleanup task
98
+ cleanup_task = asyncio.create_task(_cleanup_old_files())
99
+
100
+ # ============================================================================
101
+ # SERVER RUNNING (yield control)
102
+ # ============================================================================
103
+ yield
104
+
105
+ # ============================================================================
106
+ # SHUTDOWN
107
+ # ============================================================================
108
+ logger.info("🛑 Reversecore MCP Server shutting down...")
109
+
110
+ # Stop Resource Manager
111
+ await resource_manager.stop()
112
+
113
+ # Close AI Memory Store
114
+ from reversecore_mcp.core.memory import get_memory_store
115
+
116
+ try:
117
+ memory_store = get_memory_store()
118
+ await memory_store.close()
119
+ logger.info("💾 AI Memory store closed")
120
+ except Exception as e:
121
+ logger.debug(f"Memory store close: {e}")
122
+
123
+ # Cancel cleanup task
124
+ cleanup_task.cancel()
125
+ try:
126
+ await cleanup_task
127
+ except asyncio.CancelledError:
128
+ pass
129
+
130
+ try:
131
+ # Perform cleanup
132
+ from reversecore_mcp.core.ghidra_manager import ghidra_manager
133
+
134
+ ghidra_manager.close_all()
135
+
136
+ # Cleanup temp directory if it exists
137
+ temp_dir = settings.workspace / "tmp"
138
+ if temp_dir.exists():
139
+ shutil.rmtree(temp_dir, ignore_errors=True)
140
+ logger.info("Cleaned up temporary directory")
141
+
142
+ except Exception as e:
143
+ logger.error(f"Error during shutdown cleanup: {e}")
144
+
145
+ # Cleanup temporary files (original logic, kept for now)
146
+ try:
147
+ temp_files = list(settings.workspace.glob("*.tmp"))
148
+ temp_files.extend(settings.workspace.glob(".r2_*")) # radare2 temp files
149
+
150
+ for temp_file in temp_files:
151
+ try:
152
+ temp_file.unlink()
153
+ logger.debug(f"Cleaned up: {temp_file.name}")
154
+ except (OSError, FileNotFoundError) as e:
155
+ logger.debug(f"Could not remove temp file {temp_file.name}: {e}")
156
+
157
+ if temp_files:
158
+ logger.info(f"🧹 Cleaned up {len(temp_files)} temporary files")
159
+ except Exception as e:
160
+ logger.error(f"Error during cleanup: {e}")
161
+
162
+ logger.info("👋 Server shutdown complete")
163
+
164
+
165
+ async def _cleanup_old_files():
166
+ """Background task to delete files older than retention period."""
167
+ settings = get_config()
168
+ retention_seconds = settings.file_retention_minutes * 60
169
+ logger.info(f"Started workspace cleaner (Retention: {settings.file_retention_minutes} mins)")
170
+
171
+ while True:
172
+ try:
173
+ # Check every hour (or frequent enough)
174
+ await asyncio.sleep(3600)
175
+
176
+ workspace = settings.workspace
177
+ if not workspace.exists():
178
+ continue
179
+
180
+ now = time.time()
181
+ count = 0
182
+
183
+ # Scan only tmp/ or uploads/ if organized, but here we scan workspace root files carefully
184
+ # Usually safer to scan a dedicated uploads/tmp folder.
185
+ # Assuming temporary files are in workspace root.
186
+ # We will conservatively clean only things that look temp or explicitly marked.
187
+ # For now, let's target the 'tmp' folder and specific file patterns if needed.
188
+
189
+ # Only scan tmp folder - never touch user's analysis files in workspace root
190
+ # This prevents accidental deletion of important binary files
191
+ targets = [workspace / "tmp", workspace / "uploads"]
192
+
193
+ for target_dir in targets:
194
+ if not target_dir.exists():
195
+ continue
196
+
197
+ for p in target_dir.rglob("*"):
198
+ if p.is_file():
199
+ # Check mtime
200
+ if now - p.stat().st_mtime > retention_seconds:
201
+ # Only delete files that are clearly temporary or uploaded
202
+ # This is a safety measure to avoid deleting user's important files
203
+ # Match UUID-prefixed uploads (8 hex chars followed by underscore)
204
+ is_uuid_upload = bool(re.match(r"^[0-9a-f]{8}_", p.name))
205
+ # Match temp files (.tmp suffix or .r2_* prefix for radare2)
206
+ is_temp = p.suffix == ".tmp" or p.name.startswith(".r2_")
207
+
208
+ if is_uuid_upload or is_temp:
209
+ try:
210
+ p.unlink()
211
+ count += 1
212
+ except Exception:
213
+ pass
214
+
215
+ if count > 0:
216
+ logger.info(f"Cleaner: Removed {count} old files")
217
+
218
+ except asyncio.CancelledError:
219
+ break
220
+ except Exception as e:
221
+ logger.error(f"Cleaner task error: {e}")
222
+ await asyncio.sleep(300) # Retry sooner on error
223
+
224
+
225
+ async def _validate_file_magic(file_path: str, filename: str):
226
+ """
227
+ Validate file content matches extension using libmagic.
228
+
229
+ prevents malicious renaming (e.g. malware.exe -> report.pdf).
230
+ """
231
+ ext = filename.lower().split(".")[-1] if "." in filename else ""
232
+ is_safe_ext = ext in [
233
+ "txt",
234
+ "pdf",
235
+ "json",
236
+ "yml",
237
+ "yaml",
238
+ "md",
239
+ "csv",
240
+ "log",
241
+ "png",
242
+ "jpg",
243
+ "jpeg",
244
+ "gif",
245
+ ]
246
+
247
+ if not magic:
248
+ # Fallback: Check magic headers manually when python-magic is unavailable
249
+ logger.warning("python-magic not installed. Using fallback header validation.")
250
+ try:
251
+ async with aiofiles.open(file_path, "rb") as f:
252
+ header = await f.read(8)
253
+
254
+ # Known executable headers
255
+ EXECUTABLE_HEADERS = [
256
+ (b"MZ", "DOS/PE executable"),
257
+ (b"\x7fELF", "ELF executable"),
258
+ (b"\xca\xfe\xba\xbe", "Mach-O universal"),
259
+ (b"\xcf\xfa\xed\xfe", "Mach-O 64-bit"),
260
+ (b"\xce\xfa\xed\xfe", "Mach-O 32-bit"),
261
+ (b"\xfe\xed\xfa\xce", "Mach-O 32-bit (BE)"),
262
+ (b"\xfe\xed\xfa\xcf", "Mach-O 64-bit (BE)"),
263
+ ]
264
+
265
+ for magic_bytes, desc in EXECUTABLE_HEADERS:
266
+ if header.startswith(magic_bytes):
267
+ if is_safe_ext:
268
+ import os
269
+
270
+ new_path = file_path + ".dangerous"
271
+ os.rename(file_path, new_path)
272
+ raise ValueError(
273
+ f"Security Alert: File {filename} contains {desc} code but has safe extension. Renamed to .dangerous"
274
+ )
275
+ return # Executable with executable extension is OK
276
+ return # No executable header found
277
+ except ValueError:
278
+ raise
279
+ except Exception as e:
280
+ logger.warning(f"Fallback magic validation failed: {e}")
281
+ return
282
+
283
+ try:
284
+ # Get MIME type from content
285
+ mime = magic.from_file(file_path, mime=True)
286
+ ext = filename.lower().split(".")[-1] if "." in filename else ""
287
+
288
+ # Define suspicious mismatches
289
+ # executing header but safe extension
290
+ is_executable = mime in [
291
+ "application/x-dosexec",
292
+ "application/x-executable",
293
+ "application/x-elf",
294
+ "application/x-mach-binary",
295
+ ]
296
+ is_safe_ext = ext in [
297
+ "txt",
298
+ "pdf",
299
+ "json",
300
+ "yml",
301
+ "yaml",
302
+ "md",
303
+ "csv",
304
+ "log",
305
+ "png",
306
+ "jpg",
307
+ "jpeg",
308
+ "gif",
309
+ ]
310
+
311
+ if is_executable and is_safe_ext:
312
+ logger.warning(f"SECURITY: Executable content detected in {filename} (MIME: {mime})")
313
+ # In high security mode, we might delete it.
314
+ # For now, log a prominent warning or rename it to .dangerous
315
+ import os
316
+
317
+ new_path = file_path + ".dangerous"
318
+ os.rename(file_path, new_path)
319
+ raise ValueError(
320
+ f"Security Alert: File {filename} contains executable code but has safe extension. Renamed to .dangerous"
321
+ )
322
+
323
+ except Exception as e:
324
+ if "Security Alert" in str(e):
325
+ raise
326
+ logger.warning(f"Magic validation failed for {filename}: {e}")
327
+ # Re-raise if it's a critical validation failure, otherwise just log.
328
+ # For now, we'll re-raise to prevent processing potentially malicious files.
329
+ raise
330
+
331
+
332
+ # Initialize the FastMCP server with lifespan management
333
+ mcp = FastMCP(name="Reversecore_MCP", lifespan=server_lifespan)
334
+
335
+ # Register plugins dynamically
336
+ import os # noqa: E402
337
+
338
+ from reversecore_mcp.core.loader import PluginLoader # noqa: E402
339
+
340
+ # Initialize plugin loader
341
+ loader = PluginLoader()
342
+
343
+ # Discover and load plugins from the tools directory
344
+ # We assume tools are in the 'reversecore_mcp/tools' package
345
+ tools_dir = os.path.join(os.path.dirname(__file__), "reversecore_mcp", "tools")
346
+ if not os.path.exists(tools_dir):
347
+ # Fallback for development environment where running from root
348
+ tools_dir = os.path.join(os.getcwd(), "reversecore_mcp", "tools")
349
+
350
+ plugins = loader.discover_plugins(tools_dir, "reversecore_mcp.tools")
351
+
352
+ # Register each plugin with the MCP server
353
+ for plugin in plugins:
354
+ try:
355
+ plugin.register(mcp)
356
+ logger.info(f"Registered plugin: {plugin.name}")
357
+ except Exception as e:
358
+ logger.error(f"Failed to register plugin {plugin.name}: {e}")
359
+
360
+ # Register prompts
361
+ from reversecore_mcp import prompts # noqa: E402
362
+
363
+ prompts.register_prompts(mcp)
364
+
365
+ # Register resources (reversecore:// URIs)
366
+ from reversecore_mcp import resources # noqa: E402
367
+
368
+ resources.register_resources(mcp)
369
+
370
+ # Register report tools for malware analysis reporting
371
+ from reversecore_mcp.tools.report.report_mcp_tools import register_report_tools # noqa: E402
372
+
373
+ # Register report tools for malware analysis reporting
374
+
375
+ report_tools = register_report_tools(mcp)
376
+ logger.info("Registered report tools")
377
+
378
+ # ============================================================================
379
+ # Security Middleware
380
+ # ============================================================================
381
+ from starlette.middleware.base import BaseHTTPMiddleware # noqa: E402
382
+ from starlette.requests import Request # noqa: E402
383
+
384
+
385
+ class SecurityHeadersMiddleware(BaseHTTPMiddleware):
386
+ async def dispatch(self, request: Request, call_next):
387
+ response = await call_next(request)
388
+ response.headers["Strict-Transport-Security"] = "max-age=31536000; includeSubDomains"
389
+ response.headers["X-Content-Type-Options"] = "nosniff"
390
+ response.headers["X-Frame-Options"] = "DENY"
391
+ response.headers["Content-Security-Policy"] = "default-src 'self'"
392
+ return response
393
+
394
+
395
+ # Access underlying FastAPI app to add middleware
396
+ # Note: FastMCP 2.13.1 exposes _fastapi_app or we can use mcp.fastapi_app if available
397
+ # Checking source or assuming standard access.
398
+ if hasattr(mcp, "_fastapi_app"):
399
+ mcp._fastapi_app.add_middleware(SecurityHeadersMiddleware)
400
+ elif hasattr(mcp, "fastapi_app"):
401
+ mcp.fastapi_app.add_middleware(SecurityHeadersMiddleware)
402
+
403
+ # ============================================================================
404
+ # Server Composition (Mounting Sub-servers)
405
+ # ============================================================================
406
+ # If you have specialized sub-servers (e.g., Ghidra-only, Dynamic-analysis-only),
407
+ # you can mount them here to create a unified platform:
408
+ #
409
+ # Example:
410
+ # from ghidra_server import ghidra_mcp
411
+ # mcp.mount("ghidra", ghidra_mcp)
412
+ #
413
+ # Now clients can access ghidra tools with prefix: ghidra.tool_name
414
+ # This allows microservice-style architecture for large deployments.
415
+ # ============================================================================
416
+
417
+
418
+ # ============================================================================
419
+ # Authentication (HTTP mode only)
420
+ # ============================================================================
421
+ def setup_authentication():
422
+ """
423
+ Setup API Key authentication for HTTP transport mode.
424
+
425
+ To enable authentication, set environment variable:
426
+ MCP_API_KEY=your-secret-key
427
+
428
+ All HTTP requests must include header:
429
+ X-API-Key: your-secret-key
430
+ """
431
+ import os
432
+
433
+ from fastapi import Depends, HTTPException, Request, status
434
+ from fastapi.security import APIKeyHeader
435
+
436
+ api_key = os.getenv("MCP_API_KEY")
437
+
438
+ if not api_key:
439
+ logger.info("🔓 API Key authentication disabled (MCP_API_KEY not set)")
440
+ return None
441
+
442
+ logger.info("🔐 API Key authentication enabled")
443
+
444
+ api_key_header = APIKeyHeader(name="X-API-Key", auto_error=False)
445
+
446
+ async def verify_api_key(request: Request, key: str = Depends(api_key_header)):
447
+ # Allow health endpoint without authentication
448
+ if request.url.path == "/health":
449
+ return
450
+
451
+ if key != api_key:
452
+ logger.warning(f"⚠️ Unauthorized access attempt from {request.client.host}")
453
+ raise HTTPException(
454
+ status_code=status.HTTP_403_FORBIDDEN,
455
+ detail="Invalid or missing API key",
456
+ )
457
+ return key
458
+
459
+ return Depends(verify_api_key)
460
+
461
+
462
+ def main():
463
+ """Run the MCP server."""
464
+ # Get transport mode from settings (default: stdio)
465
+ settings = get_config()
466
+
467
+ # Validate paths at startup
468
+ try:
469
+ settings.validate_paths()
470
+ logger.info("Path validation successful")
471
+ except ValueError as e:
472
+ logger.error(f"Path validation failed: {e}")
473
+ raise
474
+
475
+ transport = settings.mcp_transport.lower()
476
+
477
+ if transport == "http":
478
+ # HTTP transport mode for network-based AI agents
479
+ import uvicorn
480
+ from fastapi import FastAPI, File, UploadFile
481
+ from fastapi.responses import JSONResponse
482
+
483
+ from reversecore_mcp.core.metrics import metrics_collector
484
+
485
+ # Setup authentication (if MCP_API_KEY is set)
486
+ auth_dependency = setup_authentication()
487
+
488
+ # Build a host FastAPI app with docs enabled and mount FastMCP under /mcp
489
+ # Apply authentication to all endpoints if enabled
490
+ dependencies = [auth_dependency] if auth_dependency else []
491
+
492
+ mcp_app = mcp.http_app()
493
+
494
+ # Fix: Wrap initialization in FastAPI lifespan
495
+ @asynccontextmanager
496
+ async def app_lifespan(app: FastAPI):
497
+ # Run server startup logic
498
+ async with server_lifespan(mcp):
499
+ yield
500
+
501
+ app = FastAPI(
502
+ title="Reversecore_MCP",
503
+ docs_url="/docs",
504
+ redoc_url="/redoc",
505
+ openapi_url="/openapi.json",
506
+ dependencies=dependencies, # Apply authentication globally
507
+ lifespan=app_lifespan, # Register lifespan
508
+ )
509
+ app.mount("/mcp", mcp_app)
510
+
511
+ # Mount dashboard
512
+ try:
513
+ from reversecore_mcp.dashboard import get_router, get_static_files
514
+
515
+ app.include_router(get_router())
516
+ app.mount("/dashboard/static", get_static_files(), name="dashboard_static")
517
+ logger.info("📊 Dashboard available at /dashboard/")
518
+ except ImportError as e:
519
+ logger.warning(f"Dashboard not available: {e}")
520
+
521
+ # Add health endpoint
522
+ @app.get("/health")
523
+ async def health():
524
+ """Health check endpoint with dependency status."""
525
+ import platform
526
+ import sys
527
+ import time
528
+
529
+ health_status = {
530
+ "status": "healthy",
531
+ "service": "Reversecore_MCP",
532
+ "transport": "http",
533
+ "version": "1.0.0",
534
+ "timestamp": time.time(),
535
+ "python_version": sys.version,
536
+ "platform": platform.system(),
537
+ "workspace": str(settings.workspace),
538
+ "workspace_exists": settings.workspace.exists(),
539
+ "dependencies": {},
540
+ }
541
+
542
+ # Check dependencies
543
+ deps = health_status["dependencies"]
544
+
545
+ # radare2
546
+ if shutil.which("radare2"):
547
+ deps["radare2"] = {"status": "available", "path": shutil.which("radare2")}
548
+ else:
549
+ deps["radare2"] = {"status": "unavailable"}
550
+ health_status["status"] = "degraded"
551
+
552
+ # Java (for Ghidra)
553
+ if shutil.which("java"):
554
+ deps["java"] = {"status": "available", "path": shutil.which("java")}
555
+ else:
556
+ deps["java"] = {"status": "unavailable"}
557
+
558
+ # Graphviz
559
+ if shutil.which("dot"):
560
+ deps["graphviz"] = {"status": "available", "path": shutil.which("dot")}
561
+ else:
562
+ deps["graphviz"] = {"status": "unavailable"}
563
+
564
+ # YARA
565
+ if shutil.which("yara"):
566
+ deps["yara"] = {"status": "available", "path": shutil.which("yara")}
567
+ else:
568
+ deps["yara"] = {"status": "unavailable"}
569
+
570
+ # binwalk
571
+ if shutil.which("binwalk"):
572
+ deps["binwalk"] = {"status": "available", "path": shutil.which("binwalk")}
573
+ else:
574
+ deps["binwalk"] = {"status": "unavailable"}
575
+
576
+ return JSONResponse(content=health_status)
577
+
578
+ # Lightweight liveness probe
579
+ @app.get("/health/live")
580
+ async def liveness():
581
+ """Kubernetes liveness probe endpoint."""
582
+ return JSONResponse(content={"status": "alive"})
583
+
584
+ # Readiness probe
585
+ @app.get("/health/ready")
586
+ async def readiness():
587
+ """Kubernetes readiness probe endpoint."""
588
+ is_ready = settings.workspace.exists() and shutil.which("radare2") is not None
589
+ if is_ready:
590
+ return JSONResponse(content={"status": "ready"})
591
+ return JSONResponse(
592
+ status_code=503,
593
+ content={"status": "not_ready", "reason": "Dependencies not available"},
594
+ )
595
+
596
+ # Add metrics endpoint
597
+ @app.get("/metrics")
598
+ async def metrics():
599
+ """Metrics endpoint returning collected tool metrics."""
600
+ return JSONResponse(content=metrics_collector.get_metrics())
601
+
602
+ # Add file upload endpoint for remote clients (e.g., Claude.ai)
603
+ @app.post("/upload")
604
+ async def upload_file(file: UploadFile = File(...)):
605
+ """
606
+ Upload a file to the workspace for analysis.
607
+
608
+ This endpoint allows remote clients (like Claude.ai) to upload files
609
+ to the local workspace for analysis by MCP tools.
610
+
611
+ Args:
612
+ file: The file to upload (multipart/form-data)
613
+
614
+ Returns:
615
+ JSON response with file path and status
616
+ """
617
+
618
+ def _secure_filename(filename: str) -> str:
619
+ """Sanitize filename to prevent path traversal and injection."""
620
+ # Remove path components
621
+ filename = filename.replace("/", "_").replace("\\", "_")
622
+ # Remove dangerous characters, keep only safe ones
623
+ filename = re.sub(r"[^\w\-.]", "_", filename)
624
+ # Limit length
625
+ if len(filename) > 200:
626
+ name, ext = filename.rsplit(".", 1) if "." in filename else (filename, "")
627
+ filename = name[:195] + ("." + ext if ext else "")
628
+ return filename or "unnamed_file"
629
+
630
+ try:
631
+ # Ensure uploads directory exists (separate from workspace root)
632
+ upload_dir = settings.workspace / "uploads"
633
+ upload_dir.mkdir(parents=True, exist_ok=True)
634
+
635
+ # SECURITY: Sanitize filename and add UUID prefix to prevent overwrites
636
+ original_filename = file.filename or "unnamed"
637
+ safe_filename = f"{uuid.uuid4().hex[:8]}_{_secure_filename(original_filename)}"
638
+ file_path = upload_dir / safe_filename
639
+
640
+ # PERFORMANCE: Use aiofiles for non-blocking async I/O
641
+ # This prevents blocking the event loop during large file uploads
642
+
643
+ async with aiofiles.open(file_path, "wb") as out_file:
644
+ while content := await file.read(1024 * 64): # 64KB chunks
645
+ await out_file.write(content)
646
+
647
+ # Security: Validate file content (Magic Number)
648
+ try:
649
+ await _validate_file_magic(str(file_path), safe_filename)
650
+ except Exception as e:
651
+ audit_logger.log_event(
652
+ AuditAction.FILE_UPLOAD,
653
+ safe_filename,
654
+ "FAILURE",
655
+ details={"error": str(e), "path": str(file_path)},
656
+ )
657
+ # Cleanup malicious file
658
+ try:
659
+ file_path.unlink()
660
+ except Exception:
661
+ pass
662
+ raise
663
+
664
+ audit_logger.log_event(
665
+ AuditAction.FILE_UPLOAD,
666
+ safe_filename,
667
+ "SUCCESS",
668
+ details={"path": str(file_path)},
669
+ )
670
+
671
+ logger.info(f"File uploaded successfully: {safe_filename} ({file_path})")
672
+ return JSONResponse(
673
+ content={
674
+ "status": "success",
675
+ "message": "File uploaded successfully",
676
+ # SECURITY: Don't expose absolute server paths
677
+ "filename": safe_filename,
678
+ "original_filename": original_filename,
679
+ "size": file_path.stat().st_size,
680
+ }
681
+ )
682
+ except Exception as e:
683
+ logger.error(f"File upload failed: {e}")
684
+ return JSONResponse(
685
+ status_code=500,
686
+ content={
687
+ "status": "error",
688
+ "message": "File upload failed due to an internal error.",
689
+ },
690
+ )
691
+
692
+ # Optional: apply rate limiting if slowapi is available
693
+ try:
694
+ from slowapi import Limiter, _rate_limit_exceeded_handler # type: ignore
695
+ from slowapi.errors import RateLimitExceeded # type: ignore
696
+ from slowapi.util import get_remote_address # type: ignore
697
+
698
+ rate_limit = settings.rate_limit
699
+ limiter = Limiter(key_func=get_remote_address, default_limits=[f"{rate_limit}/minute"])
700
+
701
+ # Attach middleware and exception handler
702
+ @app.middleware("http")
703
+ async def rate_limit_middleware(request, call_next): # pragma: no cover - integration
704
+ return await limiter.middleware(request, call_next)
705
+
706
+ app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
707
+ except ImportError:
708
+ # slowapi unavailable: log warning as this is a security risk
709
+ logger.warning(
710
+ "slowapi not installed: Rate limiting is DISABLED. This is a security risk in production."
711
+ )
712
+ except Exception as e:
713
+ # Version mismatch or other error
714
+ logger.warning(f"Failed to setup rate limiting: {e}")
715
+
716
+ # Run uvicorn with the FastMCP HTTP app
717
+ # IMPORTANT: workers=1 is required because R2 sessions are stored in-memory
718
+ # and not shareable across worker processes
719
+ uvicorn.run(app, host="0.0.0.0", port=8000, workers=1)
720
+ else:
721
+ # Stdio transport mode for local AI clients (default)
722
+ # Rate limiting not needed for stdio mode (single client)
723
+ mcp.run(transport="stdio")
724
+
725
+
726
+ if __name__ == "__main__":
727
+ main()