ida-pro-mcp-xjoker 1.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. ida_pro_mcp/__init__.py +0 -0
  2. ida_pro_mcp/__main__.py +6 -0
  3. ida_pro_mcp/ida_mcp/__init__.py +68 -0
  4. ida_pro_mcp/ida_mcp/api_analysis.py +1296 -0
  5. ida_pro_mcp/ida_mcp/api_core.py +337 -0
  6. ida_pro_mcp/ida_mcp/api_debug.py +617 -0
  7. ida_pro_mcp/ida_mcp/api_memory.py +304 -0
  8. ida_pro_mcp/ida_mcp/api_modify.py +406 -0
  9. ida_pro_mcp/ida_mcp/api_python.py +179 -0
  10. ida_pro_mcp/ida_mcp/api_resources.py +295 -0
  11. ida_pro_mcp/ida_mcp/api_stack.py +167 -0
  12. ida_pro_mcp/ida_mcp/api_types.py +480 -0
  13. ida_pro_mcp/ida_mcp/auth.py +166 -0
  14. ida_pro_mcp/ida_mcp/cache.py +232 -0
  15. ida_pro_mcp/ida_mcp/config.py +228 -0
  16. ida_pro_mcp/ida_mcp/framework.py +547 -0
  17. ida_pro_mcp/ida_mcp/http.py +859 -0
  18. ida_pro_mcp/ida_mcp/port_utils.py +104 -0
  19. ida_pro_mcp/ida_mcp/rpc.py +187 -0
  20. ida_pro_mcp/ida_mcp/server_manager.py +339 -0
  21. ida_pro_mcp/ida_mcp/sync.py +233 -0
  22. ida_pro_mcp/ida_mcp/tests/__init__.py +14 -0
  23. ida_pro_mcp/ida_mcp/tests/test_api_analysis.py +336 -0
  24. ida_pro_mcp/ida_mcp/tests/test_api_core.py +237 -0
  25. ida_pro_mcp/ida_mcp/tests/test_api_memory.py +207 -0
  26. ida_pro_mcp/ida_mcp/tests/test_api_modify.py +123 -0
  27. ida_pro_mcp/ida_mcp/tests/test_api_resources.py +199 -0
  28. ida_pro_mcp/ida_mcp/tests/test_api_stack.py +77 -0
  29. ida_pro_mcp/ida_mcp/tests/test_api_types.py +249 -0
  30. ida_pro_mcp/ida_mcp/ui.py +357 -0
  31. ida_pro_mcp/ida_mcp/utils.py +1186 -0
  32. ida_pro_mcp/ida_mcp/zeromcp/__init__.py +5 -0
  33. ida_pro_mcp/ida_mcp/zeromcp/jsonrpc.py +384 -0
  34. ida_pro_mcp/ida_mcp/zeromcp/mcp.py +883 -0
  35. ida_pro_mcp/ida_mcp.py +186 -0
  36. ida_pro_mcp/idalib_server.py +354 -0
  37. ida_pro_mcp/idalib_session_manager.py +259 -0
  38. ida_pro_mcp/server.py +1060 -0
  39. ida_pro_mcp/test.py +170 -0
  40. ida_pro_mcp_xjoker-1.0.1.dist-info/METADATA +405 -0
  41. ida_pro_mcp_xjoker-1.0.1.dist-info/RECORD +45 -0
  42. ida_pro_mcp_xjoker-1.0.1.dist-info/WHEEL +5 -0
  43. ida_pro_mcp_xjoker-1.0.1.dist-info/entry_points.txt +4 -0
  44. ida_pro_mcp_xjoker-1.0.1.dist-info/licenses/LICENSE +21 -0
  45. ida_pro_mcp_xjoker-1.0.1.dist-info/top_level.txt +1 -0
@@ -0,0 +1,232 @@
1
+ """LRU Cache implementation for IDA Pro MCP Server.
2
+
3
+ Provides thread-safe caching with TTL expiration for frequently accessed data
4
+ such as function lookups, decompilation results, and cross-references.
5
+ """
6
+
7
+ import threading
8
+ import time
9
+ from collections import OrderedDict
10
+ from typing import Callable, Generic, Optional, TypeVar
11
+
12
+ T = TypeVar("T")
13
+
14
+
15
+ class LRUCache(Generic[T]):
16
+ """Thread-safe LRU cache with TTL expiration.
17
+
18
+ Features:
19
+ - Automatic eviction of least recently used items when max_size is reached
20
+ - Time-based expiration (TTL) for cached entries
21
+ - Thread-safe operations using RLock
22
+ - Optional key generation function for complex keys
23
+ """
24
+
25
+ def __init__(self, max_size: int = 1000, ttl_seconds: float = 300.0):
26
+ """Initialize the LRU cache.
27
+
28
+ Args:
29
+ max_size: Maximum number of entries to store
30
+ ttl_seconds: Time-to-live for entries in seconds (0 = no expiration)
31
+ """
32
+ self.max_size = max_size
33
+ self.ttl = ttl_seconds
34
+ self._cache: OrderedDict[str, tuple[T, float]] = OrderedDict()
35
+ self._lock = threading.RLock()
36
+ self._hits = 0
37
+ self._misses = 0
38
+
39
+ def get(self, key: str) -> Optional[T]:
40
+ """Get a value from the cache.
41
+
42
+ Args:
43
+ key: Cache key
44
+
45
+ Returns:
46
+ Cached value or None if not found/expired
47
+ """
48
+ with self._lock:
49
+ entry = self._cache.get(key)
50
+ if entry is None:
51
+ self._misses += 1
52
+ return None
53
+
54
+ value, timestamp = entry
55
+
56
+ # Check TTL expiration
57
+ if self.ttl > 0 and time.monotonic() - timestamp > self.ttl:
58
+ del self._cache[key]
59
+ self._misses += 1
60
+ return None
61
+
62
+ # Move to end (most recently used)
63
+ self._cache.move_to_end(key)
64
+ self._hits += 1
65
+ return value
66
+
67
+ def set(self, key: str, value: T) -> None:
68
+ """Set a value in the cache.
69
+
70
+ Args:
71
+ key: Cache key
72
+ value: Value to cache
73
+ """
74
+ with self._lock:
75
+ # Remove old entry if exists
76
+ if key in self._cache:
77
+ del self._cache[key]
78
+
79
+ # Evict LRU entries if at capacity
80
+ while len(self._cache) >= self.max_size:
81
+ self._cache.popitem(last=False)
82
+
83
+ # Add new entry
84
+ self._cache[key] = (value, time.monotonic())
85
+
86
+ def invalidate(self, key: Optional[str] = None) -> None:
87
+ """Invalidate cache entries.
88
+
89
+ Args:
90
+ key: Specific key to invalidate, or None to clear all
91
+ """
92
+ with self._lock:
93
+ if key is None:
94
+ self._cache.clear()
95
+ elif key in self._cache:
96
+ del self._cache[key]
97
+
98
+ def invalidate_prefix(self, prefix: str) -> int:
99
+ """Invalidate all entries with keys starting with prefix.
100
+
101
+ Args:
102
+ prefix: Key prefix to match
103
+
104
+ Returns:
105
+ Number of entries invalidated
106
+ """
107
+ with self._lock:
108
+ keys_to_remove = [k for k in self._cache if k.startswith(prefix)]
109
+ for key in keys_to_remove:
110
+ del self._cache[key]
111
+ return len(keys_to_remove)
112
+
113
+ def get_or_compute(
114
+ self,
115
+ key: str,
116
+ compute_fn: Callable[[], T],
117
+ ttl_override: Optional[float] = None,
118
+ ) -> T:
119
+ """Get from cache or compute and cache the value.
120
+
121
+ Args:
122
+ key: Cache key
123
+ compute_fn: Function to compute value if not cached
124
+ ttl_override: Optional TTL override for this entry
125
+
126
+ Returns:
127
+ Cached or computed value
128
+ """
129
+ # Try to get from cache first
130
+ value = self.get(key)
131
+ if value is not None:
132
+ return value
133
+
134
+ # Compute value
135
+ value = compute_fn()
136
+
137
+ # Store with optional TTL override
138
+ if ttl_override is not None:
139
+ original_ttl = self.ttl
140
+ self.ttl = ttl_override
141
+ self.set(key, value)
142
+ self.ttl = original_ttl
143
+ else:
144
+ self.set(key, value)
145
+
146
+ return value
147
+
148
+ def stats(self) -> dict:
149
+ """Get cache statistics.
150
+
151
+ Returns:
152
+ Dict with hits, misses, size, and hit_rate
153
+ """
154
+ with self._lock:
155
+ total = self._hits + self._misses
156
+ return {
157
+ "hits": self._hits,
158
+ "misses": self._misses,
159
+ "size": len(self._cache),
160
+ "max_size": self.max_size,
161
+ "hit_rate": self._hits / total if total > 0 else 0.0,
162
+ }
163
+
164
+ def clear_stats(self) -> None:
165
+ """Reset cache statistics."""
166
+ with self._lock:
167
+ self._hits = 0
168
+ self._misses = 0
169
+
170
+
171
+ # ============================================================================
172
+ # Global Cache Instances
173
+ # ============================================================================
174
+
175
+ # Function lookup cache: name/address -> function info
176
+ # High TTL since function metadata rarely changes
177
+ function_cache = LRUCache(max_size=5000, ttl_seconds=300.0)
178
+
179
+ # Decompilation cache: address -> pseudocode
180
+ # Moderate size, longer TTL since decompilation is expensive
181
+ decompile_cache = LRUCache(max_size=200, ttl_seconds=600.0)
182
+
183
+ # Cross-reference cache: address -> xrefs list
184
+ # Moderate size and TTL
185
+ xrefs_cache = LRUCache(max_size=2000, ttl_seconds=300.0)
186
+
187
+ # String cache: regex pattern -> matches
188
+ # Smaller size, moderate TTL
189
+ string_cache = LRUCache(max_size=500, ttl_seconds=180.0)
190
+
191
+
192
+ def invalidate_all_caches() -> None:
193
+ """Invalidate all global caches.
194
+
195
+ Call this when IDB changes significantly (e.g., new analysis, database reload).
196
+ """
197
+ function_cache.invalidate()
198
+ decompile_cache.invalidate()
199
+ xrefs_cache.invalidate()
200
+ string_cache.invalidate()
201
+
202
+
203
+ def invalidate_function_caches(addr: Optional[int] = None) -> None:
204
+ """Invalidate caches related to a specific function.
205
+
206
+ Args:
207
+ addr: Function address to invalidate, or None for all functions
208
+ """
209
+ if addr is None:
210
+ function_cache.invalidate()
211
+ decompile_cache.invalidate()
212
+ xrefs_cache.invalidate()
213
+ else:
214
+ # Invalidate specific function entries
215
+ addr_hex = hex(addr)
216
+ function_cache.invalidate(addr_hex)
217
+ decompile_cache.invalidate(addr_hex)
218
+ xrefs_cache.invalidate_prefix(addr_hex)
219
+
220
+
221
+ def get_cache_stats() -> dict:
222
+ """Get statistics for all caches.
223
+
224
+ Returns:
225
+ Dict with stats for each cache type
226
+ """
227
+ return {
228
+ "function_cache": function_cache.stats(),
229
+ "decompile_cache": decompile_cache.stats(),
230
+ "xrefs_cache": xrefs_cache.stats(),
231
+ "string_cache": string_cache.stats(),
232
+ }
@@ -0,0 +1,228 @@
1
+ """IDA MCP Configuration System
2
+
3
+ Provides configuration management for MCP server instances with support for:
4
+ - Multiple server instances (different hosts/ports)
5
+ - API Key authentication
6
+ - Persistent configuration storage
7
+ """
8
+
9
+ import os
10
+ import logging
11
+ from dataclasses import dataclass, field
12
+ from pathlib import Path
13
+ from typing import Optional
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+ # Configuration directory
18
+ CONFIG_DIR = Path.home() / ".ida_mcp"
19
+ CONFIG_FILE = CONFIG_DIR / "config.toml"
20
+
21
+
22
+ @dataclass
23
+ class ServerInstanceConfig:
24
+ """Configuration for a single MCP server instance"""
25
+
26
+ instance_id: str
27
+ host: str = "127.0.0.1"
28
+ port: int = 13337
29
+ enabled: bool = True
30
+ auth_enabled: bool = False
31
+ api_key: Optional[str] = None
32
+ auto_start: bool = False
33
+
34
+ def __post_init__(self):
35
+ # Resolve environment variable references in api_key
36
+ if self.api_key and self.api_key.startswith("${") and self.api_key.endswith("}"):
37
+ env_var = self.api_key[2:-1]
38
+ self.api_key = os.environ.get(env_var)
39
+
40
+ @property
41
+ def address(self) -> str:
42
+ return f"{self.host}:{self.port}"
43
+
44
+ def to_dict(self) -> dict:
45
+ return {
46
+ "instance_id": self.instance_id,
47
+ "host": self.host,
48
+ "port": self.port,
49
+ "enabled": self.enabled,
50
+ "auth_enabled": self.auth_enabled,
51
+ "api_key": self.api_key,
52
+ "auto_start": self.auto_start,
53
+ }
54
+
55
+ @classmethod
56
+ def from_dict(cls, data: dict) -> "ServerInstanceConfig":
57
+ return cls(
58
+ instance_id=data.get("instance_id", "default"),
59
+ host=data.get("host", "127.0.0.1"),
60
+ port=data.get("port", 13337),
61
+ enabled=data.get("enabled", True),
62
+ auth_enabled=data.get("auth_enabled", False),
63
+ api_key=data.get("api_key"),
64
+ auto_start=data.get("auto_start", False),
65
+ )
66
+
67
+
68
+ @dataclass
69
+ class McpConfig:
70
+ """Main configuration container"""
71
+
72
+ version: int = 1
73
+ servers: list[ServerInstanceConfig] = field(default_factory=list)
74
+ tool_timeout_sec: float = 15.0
75
+ debug: bool = False
76
+
77
+ def __post_init__(self):
78
+ # Ensure at least one default server exists
79
+ if not self.servers:
80
+ self.servers.append(ServerInstanceConfig(instance_id="local"))
81
+
82
+ def get_server(self, instance_id: str) -> Optional[ServerInstanceConfig]:
83
+ for server in self.servers:
84
+ if server.instance_id == instance_id:
85
+ return server
86
+ return None
87
+
88
+ def add_server(self, config: ServerInstanceConfig) -> bool:
89
+ if self.get_server(config.instance_id):
90
+ return False
91
+ self.servers.append(config)
92
+ return True
93
+
94
+ def remove_server(self, instance_id: str) -> bool:
95
+ for i, server in enumerate(self.servers):
96
+ if server.instance_id == instance_id:
97
+ self.servers.pop(i)
98
+ return True
99
+ return False
100
+
101
+ def to_dict(self) -> dict:
102
+ return {
103
+ "version": self.version,
104
+ "tool_timeout_sec": self.tool_timeout_sec,
105
+ "debug": self.debug,
106
+ "servers": [s.to_dict() for s in self.servers],
107
+ }
108
+
109
+ @classmethod
110
+ def from_dict(cls, data: dict) -> "McpConfig":
111
+ servers = [
112
+ ServerInstanceConfig.from_dict(s) for s in data.get("servers", [])
113
+ ]
114
+ return cls(
115
+ version=data.get("version", 1),
116
+ servers=servers,
117
+ tool_timeout_sec=data.get("tool_timeout_sec", 15.0),
118
+ debug=data.get("debug", False),
119
+ )
120
+
121
+
122
+ def _ensure_config_dir() -> None:
123
+ """Create configuration directory if it doesn't exist"""
124
+ CONFIG_DIR.mkdir(parents=True, exist_ok=True)
125
+
126
+
127
+ def load_config() -> McpConfig:
128
+ """Load configuration from TOML file"""
129
+ if not CONFIG_FILE.exists():
130
+ return McpConfig()
131
+
132
+ try:
133
+ # Python 3.11+ has tomllib in stdlib
134
+ import tomllib
135
+
136
+ with open(CONFIG_FILE, "rb") as f:
137
+ data = tomllib.load(f)
138
+ return McpConfig.from_dict(data)
139
+ except ImportError:
140
+ # Fallback: try toml package
141
+ try:
142
+ import toml
143
+
144
+ data = toml.load(CONFIG_FILE)
145
+ return McpConfig.from_dict(data)
146
+ except ImportError:
147
+ logger.warning("No TOML parser available, using defaults")
148
+ return McpConfig()
149
+ except Exception as e:
150
+ logger.error(f"Failed to load config: {e}")
151
+ return McpConfig()
152
+
153
+
154
+ def save_config(config: McpConfig) -> bool:
155
+ """Save configuration to TOML file"""
156
+ _ensure_config_dir()
157
+
158
+ try:
159
+ # Try toml package for writing (tomllib is read-only)
160
+ try:
161
+ import toml
162
+
163
+ with open(CONFIG_FILE, "w") as f:
164
+ toml.dump(config.to_dict(), f)
165
+ return True
166
+ except ImportError:
167
+ # Manual TOML generation for simple config
168
+ return _write_toml_manual(config)
169
+ except Exception as e:
170
+ logger.error(f"Failed to save config: {e}")
171
+ return False
172
+
173
+
174
+ def _write_toml_manual(config: McpConfig) -> bool:
175
+ """Write config as TOML without external dependencies"""
176
+ lines = [
177
+ f"version = {config.version}",
178
+ f"tool_timeout_sec = {config.tool_timeout_sec}",
179
+ f"debug = {'true' if config.debug else 'false'}",
180
+ "",
181
+ ]
182
+
183
+ for server in config.servers:
184
+ lines.append("[[servers]]")
185
+ lines.append(f'instance_id = "{server.instance_id}"')
186
+ lines.append(f'host = "{server.host}"')
187
+ lines.append(f"port = {server.port}")
188
+ lines.append(f"enabled = {'true' if server.enabled else 'false'}")
189
+ lines.append(f"auth_enabled = {'true' if server.auth_enabled else 'false'}")
190
+ if server.api_key:
191
+ lines.append(f'api_key = "{server.api_key}"')
192
+ lines.append(f"auto_start = {'true' if server.auto_start else 'false'}")
193
+ lines.append("")
194
+
195
+ with open(CONFIG_FILE, "w") as f:
196
+ f.write("\n".join(lines))
197
+ return True
198
+
199
+
200
+ # Global config instance (lazy loaded)
201
+ _config: Optional[McpConfig] = None
202
+
203
+
204
+ def get_config() -> McpConfig:
205
+ """Get the global configuration instance"""
206
+ global _config
207
+ if _config is None:
208
+ _config = load_config()
209
+ return _config
210
+
211
+
212
+ def reload_config() -> McpConfig:
213
+ """Force reload configuration from disk"""
214
+ global _config
215
+ _config = load_config()
216
+ return _config
217
+
218
+
219
+ __all__ = [
220
+ "ServerInstanceConfig",
221
+ "McpConfig",
222
+ "load_config",
223
+ "save_config",
224
+ "get_config",
225
+ "reload_config",
226
+ "CONFIG_DIR",
227
+ "CONFIG_FILE",
228
+ ]