zen-ai-pentest 2.2.0__py3-none-any.whl → 2.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,272 @@
1
+ """
2
+ Scan Performance Benchmarks
3
+
4
+ Measures scan throughput, target processing speed, and tool execution times.
5
+ """
6
+
7
+ import asyncio
8
+ import time
9
+ from typing import List, Dict, Any, Optional, Callable
10
+ from dataclasses import dataclass, field
11
+
12
+ import sys
13
+ from pathlib import Path
14
+ sys.path.insert(0, str(Path(__file__).parent.parent))
15
+
16
+ from modules.benchmark import (
17
+ BenchmarkRunner, BenchmarkResult, BenchmarkCategory,
18
+ ThroughputMetrics, measure_scan_throughput
19
+ )
20
+
21
+
22
+ @dataclass
23
+ class ScanBenchmarkConfig:
24
+ """Configuration for scan benchmarks."""
25
+ target_count: int = 10
26
+ target_type: str = "host" # host, web, network
27
+ ports: str = "top-100"
28
+ scan_depth: str = "quick" # quick, standard, deep
29
+ concurrent_scans: int = 3
30
+ warmup_targets: int = 2
31
+
32
+
33
+ class ScanPerformanceBenchmark:
34
+ """Benchmark suite for scan performance measurement."""
35
+
36
+ def __init__(self, output_dir: str = "benchmark_results"):
37
+ self.runner = BenchmarkRunner(output_dir=output_dir)
38
+ self.config = ScanBenchmarkConfig()
39
+
40
+ def _generate_test_targets(self, count: int, target_type: str) -> List[str]:
41
+ """Generate test targets for benchmarking."""
42
+ if target_type == "host":
43
+ # Generate local network targets for testing
44
+ return [f"192.168.1.{i}" for i in range(1, min(count + 1, 255))]
45
+ elif target_type == "web":
46
+ # Generate test web targets
47
+ return [f"test-target-{i}.local" for i in range(count)]
48
+ elif target_type == "network":
49
+ # Generate network ranges
50
+ ranges = []
51
+ for i in range(min(count, 10)):
52
+ ranges.append(f"10.0.{i}.0/24")
53
+ return ranges
54
+ return []
55
+
56
+ async def benchmark_nmap_speed(self, config: Optional[ScanBenchmarkConfig] = None) -> BenchmarkResult:
57
+ """
58
+ Benchmark Nmap scan speed.
59
+
60
+ Returns:
61
+ BenchmarkResult with scan timing metrics
62
+ """
63
+ cfg = config or self.config
64
+ targets = self._generate_test_targets(cfg.target_count, cfg.target_type)
65
+
66
+ # Mock scan function for benchmarking structure
67
+ async def mock_nmap_scan(target: str) -> Dict[str, Any]:
68
+ # Simulate scan delay
69
+ await asyncio.sleep(0.5)
70
+ return {"target": target, "status": "completed", "ports_found": 5}
71
+
72
+ result, _ = await measure_scan_throughput(
73
+ scan_func=mock_nmap_scan,
74
+ targets=targets,
75
+ )
76
+
77
+ result.name = "nmap_scan_speed"
78
+ result.description = f"Nmap scan speed for {len(targets)} targets"
79
+
80
+ return result
81
+
82
+ async def benchmark_web_scan_speed(self, config: Optional[ScanBenchmarkConfig] = None) -> BenchmarkResult:
83
+ """
84
+ Benchmark web vulnerability scan speed.
85
+
86
+ Returns:
87
+ BenchmarkResult with web scan timing metrics
88
+ """
89
+ cfg = config or ScanBenchmarkConfig(target_count=5, target_type="web")
90
+ targets = self._generate_test_targets(cfg.target_count, cfg.target_type)
91
+
92
+ async def mock_web_scan(target: str) -> Dict[str, Any]:
93
+ # Simulate web scan with crawling
94
+ await asyncio.sleep(1.2)
95
+ return {
96
+ "target": target,
97
+ "pages_scanned": 15,
98
+ "vulnerabilities_found": 3
99
+ }
100
+
101
+ result, _ = await measure_scan_throughput(
102
+ scan_func=mock_web_scan,
103
+ targets=targets,
104
+ )
105
+
106
+ result.name = "web_scan_speed"
107
+ result.description = f"Web vulnerability scan speed for {len(targets)} targets"
108
+
109
+ return result
110
+
111
+ async def benchmark_scan_throughput(self, config: Optional[ScanBenchmarkConfig] = None) -> BenchmarkResult:
112
+ """
113
+ Benchmark overall scan throughput (targets per minute).
114
+
115
+ Returns:
116
+ BenchmarkResult with throughput metrics
117
+ """
118
+ cfg = config or self.config
119
+ targets = self._generate_test_targets(cfg.target_count, cfg.target_type)
120
+
121
+ async def mock_scan(target: str) -> Dict[str, Any]:
122
+ # Simulate varying scan times
123
+ delay = 0.3 + (hash(target) % 10) / 10 # 0.3-1.3s
124
+ await asyncio.sleep(delay)
125
+ return {"target": target, "completed": True}
126
+
127
+ result, _ = await measure_scan_throughput(
128
+ scan_func=mock_scan,
129
+ targets=targets,
130
+ )
131
+
132
+ result.name = "scan_throughput"
133
+ result.description = f"Overall scan throughput: {len(targets)} targets"
134
+
135
+ return result
136
+
137
+ async def benchmark_concurrent_scans(self, config: Optional[ScanBenchmarkConfig] = None) -> BenchmarkResult:
138
+ """
139
+ Benchmark performance with concurrent scans.
140
+
141
+ Returns:
142
+ BenchmarkResult with concurrent scan metrics
143
+ """
144
+ cfg = config or self.config
145
+ targets = self._generate_test_targets(cfg.target_count * 2, cfg.target_type)
146
+
147
+ async def concurrent_benchmark():
148
+ semaphore = asyncio.Semaphore(cfg.concurrent_scans)
149
+
150
+ async def scan_with_limit(target: str):
151
+ async with semaphore:
152
+ await asyncio.sleep(0.4)
153
+ return {"target": target}
154
+
155
+ await asyncio.gather(*[scan_with_limit(t) for t in targets])
156
+
157
+ result = await self.runner.run_benchmark(
158
+ name="concurrent_scan_performance",
159
+ category=BenchmarkCategory.SCAN,
160
+ description=f"Concurrent scan performance ({cfg.concurrent_scans} parallel)",
161
+ benchmark_func=concurrent_benchmark,
162
+ iterations=1,
163
+ monitor_resources=True
164
+ )
165
+
166
+ # Calculate effective throughput
167
+ result.throughput = ThroughputMetrics(
168
+ operations=len(targets),
169
+ duration_seconds=result.timing.duration_seconds
170
+ )
171
+ result.throughput.calculate()
172
+
173
+ result.custom_metrics["concurrent_scans"] = cfg.concurrent_scans
174
+ result.custom_metrics["total_targets"] = len(targets)
175
+
176
+ return result
177
+
178
+ async def run_all(self) -> List[BenchmarkResult]:
179
+ """Run all scan performance benchmarks."""
180
+ results = []
181
+
182
+ print("Running scan performance benchmarks...")
183
+
184
+ # Run each benchmark
185
+ benchmarks = [
186
+ ("Nmap Speed", self.benchmark_nmap_speed),
187
+ ("Web Scan Speed", self.benchmark_web_scan_speed),
188
+ ("Scan Throughput", self.benchmark_scan_throughput),
189
+ ("Concurrent Scans", self.benchmark_concurrent_scans),
190
+ ]
191
+
192
+ for name, benchmark_func in benchmarks:
193
+ print(f" Running: {name}...")
194
+ try:
195
+ result = await benchmark_func()
196
+ results.append(result)
197
+ self.runner.save_result(result)
198
+ print(f" ✓ {name}: {result.throughput.ops_per_minute:.1f} targets/min")
199
+ except Exception as e:
200
+ print(f" ✗ {name} failed: {e}")
201
+
202
+ # Save combined results
203
+ self.runner.save_all_results("scan_benchmarks.json")
204
+
205
+ return results
206
+
207
+ def get_summary(self) -> Dict[str, Any]:
208
+ """Get summary of scan benchmark results."""
209
+ return self.runner.get_summary()
210
+
211
+
212
+ # Convenience function
213
+ async def measure_scan_speed(
214
+ scan_func: Callable,
215
+ targets: List[str],
216
+ output_dir: str = "benchmark_results"
217
+ ) -> BenchmarkResult:
218
+ """
219
+ Quick function to measure scan speed.
220
+
221
+ Args:
222
+ scan_func: Async function to perform scan
223
+ targets: List of targets
224
+ output_dir: Directory for results
225
+
226
+ Returns:
227
+ BenchmarkResult with scan metrics
228
+ """
229
+ result, _ = await measure_scan_throughput(
230
+ scan_func=scan_func,
231
+ targets=targets
232
+ )
233
+
234
+ # Save result
235
+ runner = BenchmarkRunner(output_dir=output_dir)
236
+ runner.save_result(result)
237
+
238
+ return result
239
+
240
+
241
+ # CLI interface
242
+ if __name__ == "__main__":
243
+ import argparse
244
+
245
+ parser = argparse.ArgumentParser(description="Scan Performance Benchmarks")
246
+ parser.add_argument("--output", default="benchmark_results", help="Output directory")
247
+ parser.add_argument("--targets", type=int, default=10, help="Number of test targets")
248
+ parser.add_argument("--type", default="host", choices=["host", "web", "network"],
249
+ help="Target type")
250
+
251
+ args = parser.parse_args()
252
+
253
+ async def main():
254
+ config = ScanBenchmarkConfig(
255
+ target_count=args.targets,
256
+ target_type=args.type
257
+ )
258
+
259
+ benchmark = ScanPerformanceBenchmark(output_dir=args.output)
260
+ results = await benchmark.run_all()
261
+
262
+ print("\n" + "="*60)
263
+ print("SCAN PERFORMANCE BENCHMARK RESULTS")
264
+ print("="*60)
265
+
266
+ for result in results:
267
+ print(f"\n{result.name}:")
268
+ print(f" Duration: {result.timing.duration_ms:.2f}ms")
269
+ print(f" Throughput: {result.throughput.ops_per_minute:.2f} ops/min")
270
+ print(f" Peak Memory: {result.memory.peak_mb:.2f} MB")
271
+
272
+ asyncio.run(main())
@@ -0,0 +1,255 @@
1
+ """Multi-Agent Coordinator with Deadlock Prevention
2
+
3
+ Prevents deadlocks in multi-agent systems using resource ordering and timeouts.
4
+ Addresses Issue #13
5
+ """
6
+ import asyncio
7
+ import time
8
+ from typing import Dict, List, Set, Optional, Any
9
+ from dataclasses import dataclass, field
10
+ from enum import Enum
11
+ from contextlib import asynccontextmanager
12
+ import logging
13
+
14
+
15
+ class AgentStatus(Enum):
16
+ IDLE = "idle"
17
+ RUNNING = "running"
18
+ WAITING = "waiting"
19
+ COMPLETED = "completed"
20
+ ERROR = "error"
21
+
22
+
23
+ class ResourceType(Enum):
24
+ SCANNER = "scanner"
25
+ DATABASE = "database"
26
+ API_RATE_LIMIT = "api_rate_limit"
27
+ FILE_SYSTEM = "file_system"
28
+ NETWORK = "network"
29
+
30
+
31
+ @dataclass
32
+ class Agent:
33
+ """Agent representation"""
34
+ id: str
35
+ name: str
36
+ status: AgentStatus = AgentStatus.IDLE
37
+ acquired_resources: Set[ResourceType] = field(default_factory=set)
38
+ waiting_for: Optional[ResourceType] = None
39
+ start_time: float = field(default_factory=time.time)
40
+ timeout: float = 300.0 # 5 minutes default
41
+
42
+
43
+ @dataclass
44
+ class Resource:
45
+ """Resource representation"""
46
+ type: ResourceType
47
+ max_concurrent: int = 1
48
+ current_users: Set[str] = field(default_factory=set)
49
+ wait_queue: List[str] = field(default_factory=list)
50
+
51
+
52
+ class AgentCoordinator:
53
+ """Coordinates multiple agents with deadlock prevention"""
54
+
55
+ name = "agent_coordinator"
56
+ version = "1.0.0"
57
+
58
+ def __init__(self):
59
+ self.agents: Dict[str, Agent] = {}
60
+ self.resources: Dict[ResourceType, Resource] = {}
61
+ self._lock = asyncio.Lock()
62
+ self._init_resources()
63
+ self.deadlock_check_interval = 10.0
64
+
65
+ def _init_resources(self):
66
+ """Initialize system resources"""
67
+ self.resources = {
68
+ ResourceType.SCANNER: Resource(ResourceType.SCANNER, max_concurrent=3),
69
+ ResourceType.DATABASE: Resource(ResourceType.DATABASE, max_concurrent=5),
70
+ ResourceType.API_RATE_LIMIT: Resource(ResourceType.API_RATE_LIMIT, max_concurrent=2),
71
+ ResourceType.FILE_SYSTEM: Resource(ResourceType.FILE_SYSTEM, max_concurrent=10),
72
+ ResourceType.NETWORK: Resource(ResourceType.NETWORK, max_concurrent=5),
73
+ }
74
+
75
+ async def register_agent(self, agent_id: str, name: str, timeout: float = 300.0) -> Agent:
76
+ """Register a new agent"""
77
+ async with self._lock:
78
+ if agent_id in self.agents:
79
+ raise ValueError(f"Agent {agent_id} already registered")
80
+
81
+ agent = Agent(id=agent_id, name=name, timeout=timeout)
82
+ self.agents[agent_id] = agent
83
+ return agent
84
+
85
+ async def unregister_agent(self, agent_id: str):
86
+ """Unregister an agent and release its resources"""
87
+ async with self._lock:
88
+ if agent_id not in self.agents:
89
+ return
90
+
91
+ agent = self.agents[agent_id]
92
+
93
+ # Release all acquired resources
94
+ for resource_type in list(agent.acquired_resources):
95
+ await self._release_resource(agent_id, resource_type)
96
+
97
+ del self.agents[agent_id]
98
+
99
+ async def _acquire_resource(
100
+ self,
101
+ agent_id: str,
102
+ resource_type: ResourceType,
103
+ timeout: float = 30.0
104
+ ) -> bool:
105
+ """Acquire a resource with timeout"""
106
+ start_time = time.time()
107
+
108
+ while time.time() - start_time < timeout:
109
+ async with self._lock:
110
+ if agent_id not in self.agents:
111
+ return False
112
+
113
+ resource = self.resources[resource_type]
114
+ agent = self.agents[agent_id]
115
+
116
+ # Check if resource available
117
+ if len(resource.current_users) < resource.max_concurrent:
118
+ resource.current_users.add(agent_id)
119
+ agent.acquired_resources.add(resource_type)
120
+ agent.waiting_for = None
121
+ return True
122
+
123
+ # Add to wait queue
124
+ if agent_id not in resource.wait_queue:
125
+ resource.wait_queue.append(agent_id)
126
+ agent.waiting_for = resource_type
127
+ agent.status = AgentStatus.WAITING
128
+
129
+ # Wait and retry
130
+ await asyncio.sleep(0.5)
131
+
132
+ return False
133
+
134
+ async def _release_resource(self, agent_id: str, resource_type: ResourceType):
135
+ """Release an acquired resource"""
136
+ resource = self.resources[resource_type]
137
+
138
+ if agent_id in resource.current_users:
139
+ resource.current_users.remove(agent_id)
140
+
141
+ if agent_id in resource.wait_queue:
142
+ resource.wait_queue.remove(agent_id)
143
+
144
+ if agent_id in self.agents:
145
+ agent = self.agents[agent_id]
146
+ agent.acquired_resources.discard(resource_type)
147
+
148
+ @asynccontextmanager
149
+ async def acquire_resources(
150
+ self,
151
+ agent_id: str,
152
+ resources: List[ResourceType],
153
+ timeout: float = 30.0
154
+ ):
155
+ """
156
+ Context manager for acquiring multiple resources
157
+ Uses resource ordering to prevent deadlocks
158
+ """
159
+ # Sort resources to ensure consistent ordering (prevents circular wait)
160
+ sorted_resources = sorted(resources, key=lambda r: r.value)
161
+
162
+ acquired = []
163
+ try:
164
+ # Acquire all resources in order
165
+ for resource_type in sorted_resources:
166
+ success = await self._acquire_resource(agent_id, resource_type, timeout)
167
+ if not success:
168
+ raise TimeoutError(f"Could not acquire {resource_type.value}")
169
+ acquired.append(resource_type)
170
+
171
+ yield acquired
172
+ finally:
173
+ # Release in reverse order
174
+ for resource_type in reversed(acquired):
175
+ await self._release_resource(agent_id, resource_type)
176
+
177
+ async def check_deadlocks(self) -> List[Dict]:
178
+ """Detect potential deadlocks in the system"""
179
+ async with self._lock:
180
+ deadlocks = []
181
+
182
+ for agent_id, agent in self.agents.items():
183
+ # Check for timeout
184
+ if agent.status == AgentStatus.WAITING:
185
+ wait_time = time.time() - agent.start_time
186
+ if wait_time > agent.timeout:
187
+ deadlocks.append({
188
+ 'agent_id': agent_id,
189
+ 'type': 'timeout',
190
+ 'wait_time': wait_time,
191
+ 'waiting_for': agent.waiting_for.value if agent.waiting_for else None
192
+ })
193
+
194
+ # Check for circular wait (simplified)
195
+ if agent.waiting_for:
196
+ resource = self.resources[agent.waiting_for]
197
+ for other_id in resource.current_users:
198
+ if other_id in self.agents:
199
+ other = self.agents[other_id]
200
+ if other.waiting_for in agent.acquired_resources:
201
+ deadlocks.append({
202
+ 'agent_id': agent_id,
203
+ 'type': 'circular_wait',
204
+ 'involved': [agent_id, other_id],
205
+ 'resources': [agent.waiting_for.value, other.waiting_for.value if other.waiting_for else None]
206
+ })
207
+
208
+ return deadlocks
209
+
210
+ async def resolve_deadlock(self, agent_id: str):
211
+ """Forcefully resolve a deadlock by killing an agent"""
212
+ async with self._lock:
213
+ if agent_id not in self.agents:
214
+ return
215
+
216
+ agent = self.agents[agent_id]
217
+
218
+ # Release all resources
219
+ for resource_type in list(agent.acquired_resources):
220
+ await self._release_resource(agent_id, resource_type)
221
+
222
+ agent.status = AgentStatus.ERROR
223
+ logging.warning(f"Deadlock resolved: Agent {agent_id} terminated")
224
+
225
+ def get_status(self) -> Dict:
226
+ """Get coordinator status"""
227
+ return {
228
+ 'agents': {
229
+ aid: {
230
+ 'name': a.name,
231
+ 'status': a.status.value,
232
+ 'resources': [r.value for r in a.acquired_resources],
233
+ 'waiting_for': a.waiting_for.value if a.waiting_for else None
234
+ }
235
+ for aid, a in self.agents.items()
236
+ },
237
+ 'resources': {
238
+ rt.value: {
239
+ 'max': r.max_concurrent,
240
+ 'current': len(r.current_users),
241
+ 'queue': len(r.wait_queue)
242
+ }
243
+ for rt, r in self.resources.items()
244
+ }
245
+ }
246
+
247
+ def get_info(self) -> Dict:
248
+ """Get module info"""
249
+ return {
250
+ 'name': self.name,
251
+ 'version': self.version,
252
+ 'description': 'Multi-agent coordinator with deadlock prevention',
253
+ 'resources': [r.value for r in ResourceType],
254
+ 'deadlock_prevention': ['resource_ordering', 'timeout_detection', 'circular_wait_detection']
255
+ }