aiptx 2.0.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aipt_v2/__init__.py +110 -0
- aipt_v2/__main__.py +24 -0
- aipt_v2/agents/AIPTxAgent/__init__.py +10 -0
- aipt_v2/agents/AIPTxAgent/aiptx_agent.py +211 -0
- aipt_v2/agents/__init__.py +46 -0
- aipt_v2/agents/base.py +520 -0
- aipt_v2/agents/exploit_agent.py +688 -0
- aipt_v2/agents/ptt.py +406 -0
- aipt_v2/agents/state.py +168 -0
- aipt_v2/app.py +957 -0
- aipt_v2/browser/__init__.py +31 -0
- aipt_v2/browser/automation.py +458 -0
- aipt_v2/browser/crawler.py +453 -0
- aipt_v2/cli.py +2933 -0
- aipt_v2/compliance/__init__.py +71 -0
- aipt_v2/compliance/compliance_report.py +449 -0
- aipt_v2/compliance/framework_mapper.py +424 -0
- aipt_v2/compliance/nist_mapping.py +345 -0
- aipt_v2/compliance/owasp_mapping.py +330 -0
- aipt_v2/compliance/pci_mapping.py +297 -0
- aipt_v2/config.py +341 -0
- aipt_v2/core/__init__.py +43 -0
- aipt_v2/core/agent.py +630 -0
- aipt_v2/core/llm.py +395 -0
- aipt_v2/core/memory.py +305 -0
- aipt_v2/core/ptt.py +329 -0
- aipt_v2/database/__init__.py +14 -0
- aipt_v2/database/models.py +232 -0
- aipt_v2/database/repository.py +384 -0
- aipt_v2/docker/__init__.py +23 -0
- aipt_v2/docker/builder.py +260 -0
- aipt_v2/docker/manager.py +222 -0
- aipt_v2/docker/sandbox.py +371 -0
- aipt_v2/evasion/__init__.py +58 -0
- aipt_v2/evasion/request_obfuscator.py +272 -0
- aipt_v2/evasion/tls_fingerprint.py +285 -0
- aipt_v2/evasion/ua_rotator.py +301 -0
- aipt_v2/evasion/waf_bypass.py +439 -0
- aipt_v2/execution/__init__.py +23 -0
- aipt_v2/execution/executor.py +302 -0
- aipt_v2/execution/parser.py +544 -0
- aipt_v2/execution/terminal.py +337 -0
- aipt_v2/health.py +437 -0
- aipt_v2/intelligence/__init__.py +194 -0
- aipt_v2/intelligence/adaptation.py +474 -0
- aipt_v2/intelligence/auth.py +520 -0
- aipt_v2/intelligence/chaining.py +775 -0
- aipt_v2/intelligence/correlation.py +536 -0
- aipt_v2/intelligence/cve_aipt.py +334 -0
- aipt_v2/intelligence/cve_info.py +1111 -0
- aipt_v2/intelligence/knowledge_graph.py +590 -0
- aipt_v2/intelligence/learning.py +626 -0
- aipt_v2/intelligence/llm_analyzer.py +502 -0
- aipt_v2/intelligence/llm_tool_selector.py +518 -0
- aipt_v2/intelligence/payload_generator.py +562 -0
- aipt_v2/intelligence/rag.py +239 -0
- aipt_v2/intelligence/scope.py +442 -0
- aipt_v2/intelligence/searchers/__init__.py +5 -0
- aipt_v2/intelligence/searchers/exploitdb_searcher.py +523 -0
- aipt_v2/intelligence/searchers/github_searcher.py +467 -0
- aipt_v2/intelligence/searchers/google_searcher.py +281 -0
- aipt_v2/intelligence/tools.json +443 -0
- aipt_v2/intelligence/triage.py +670 -0
- aipt_v2/interactive_shell.py +559 -0
- aipt_v2/interface/__init__.py +5 -0
- aipt_v2/interface/cli.py +230 -0
- aipt_v2/interface/main.py +501 -0
- aipt_v2/interface/tui.py +1276 -0
- aipt_v2/interface/utils.py +583 -0
- aipt_v2/llm/__init__.py +39 -0
- aipt_v2/llm/config.py +26 -0
- aipt_v2/llm/llm.py +514 -0
- aipt_v2/llm/memory.py +214 -0
- aipt_v2/llm/request_queue.py +89 -0
- aipt_v2/llm/utils.py +89 -0
- aipt_v2/local_tool_installer.py +1467 -0
- aipt_v2/models/__init__.py +15 -0
- aipt_v2/models/findings.py +295 -0
- aipt_v2/models/phase_result.py +224 -0
- aipt_v2/models/scan_config.py +207 -0
- aipt_v2/monitoring/grafana/dashboards/aipt-dashboard.json +355 -0
- aipt_v2/monitoring/grafana/dashboards/default.yml +17 -0
- aipt_v2/monitoring/grafana/datasources/prometheus.yml +17 -0
- aipt_v2/monitoring/prometheus.yml +60 -0
- aipt_v2/orchestration/__init__.py +52 -0
- aipt_v2/orchestration/pipeline.py +398 -0
- aipt_v2/orchestration/progress.py +300 -0
- aipt_v2/orchestration/scheduler.py +296 -0
- aipt_v2/orchestrator.py +2427 -0
- aipt_v2/payloads/__init__.py +27 -0
- aipt_v2/payloads/cmdi.py +150 -0
- aipt_v2/payloads/sqli.py +263 -0
- aipt_v2/payloads/ssrf.py +204 -0
- aipt_v2/payloads/templates.py +222 -0
- aipt_v2/payloads/traversal.py +166 -0
- aipt_v2/payloads/xss.py +204 -0
- aipt_v2/prompts/__init__.py +60 -0
- aipt_v2/proxy/__init__.py +29 -0
- aipt_v2/proxy/history.py +352 -0
- aipt_v2/proxy/interceptor.py +452 -0
- aipt_v2/recon/__init__.py +44 -0
- aipt_v2/recon/dns.py +241 -0
- aipt_v2/recon/osint.py +367 -0
- aipt_v2/recon/subdomain.py +372 -0
- aipt_v2/recon/tech_detect.py +311 -0
- aipt_v2/reports/__init__.py +17 -0
- aipt_v2/reports/generator.py +313 -0
- aipt_v2/reports/html_report.py +378 -0
- aipt_v2/runtime/__init__.py +53 -0
- aipt_v2/runtime/base.py +30 -0
- aipt_v2/runtime/docker.py +401 -0
- aipt_v2/runtime/local.py +346 -0
- aipt_v2/runtime/tool_server.py +205 -0
- aipt_v2/runtime/vps.py +830 -0
- aipt_v2/scanners/__init__.py +28 -0
- aipt_v2/scanners/base.py +273 -0
- aipt_v2/scanners/nikto.py +244 -0
- aipt_v2/scanners/nmap.py +402 -0
- aipt_v2/scanners/nuclei.py +273 -0
- aipt_v2/scanners/web.py +454 -0
- aipt_v2/scripts/security_audit.py +366 -0
- aipt_v2/setup_wizard.py +941 -0
- aipt_v2/skills/__init__.py +80 -0
- aipt_v2/skills/agents/__init__.py +14 -0
- aipt_v2/skills/agents/api_tester.py +706 -0
- aipt_v2/skills/agents/base.py +477 -0
- aipt_v2/skills/agents/code_review.py +459 -0
- aipt_v2/skills/agents/security_agent.py +336 -0
- aipt_v2/skills/agents/web_pentest.py +818 -0
- aipt_v2/skills/prompts/__init__.py +647 -0
- aipt_v2/system_detector.py +539 -0
- aipt_v2/telemetry/__init__.py +7 -0
- aipt_v2/telemetry/tracer.py +347 -0
- aipt_v2/terminal/__init__.py +28 -0
- aipt_v2/terminal/executor.py +400 -0
- aipt_v2/terminal/sandbox.py +350 -0
- aipt_v2/tools/__init__.py +44 -0
- aipt_v2/tools/active_directory/__init__.py +78 -0
- aipt_v2/tools/active_directory/ad_config.py +238 -0
- aipt_v2/tools/active_directory/bloodhound_wrapper.py +447 -0
- aipt_v2/tools/active_directory/kerberos_attacks.py +430 -0
- aipt_v2/tools/active_directory/ldap_enum.py +533 -0
- aipt_v2/tools/active_directory/smb_attacks.py +505 -0
- aipt_v2/tools/agents_graph/__init__.py +19 -0
- aipt_v2/tools/agents_graph/agents_graph_actions.py +69 -0
- aipt_v2/tools/api_security/__init__.py +76 -0
- aipt_v2/tools/api_security/api_discovery.py +608 -0
- aipt_v2/tools/api_security/graphql_scanner.py +622 -0
- aipt_v2/tools/api_security/jwt_analyzer.py +577 -0
- aipt_v2/tools/api_security/openapi_fuzzer.py +761 -0
- aipt_v2/tools/browser/__init__.py +5 -0
- aipt_v2/tools/browser/browser_actions.py +238 -0
- aipt_v2/tools/browser/browser_instance.py +535 -0
- aipt_v2/tools/browser/tab_manager.py +344 -0
- aipt_v2/tools/cloud/__init__.py +70 -0
- aipt_v2/tools/cloud/cloud_config.py +273 -0
- aipt_v2/tools/cloud/cloud_scanner.py +639 -0
- aipt_v2/tools/cloud/prowler_tool.py +571 -0
- aipt_v2/tools/cloud/scoutsuite_tool.py +359 -0
- aipt_v2/tools/executor.py +307 -0
- aipt_v2/tools/parser.py +408 -0
- aipt_v2/tools/proxy/__init__.py +5 -0
- aipt_v2/tools/proxy/proxy_actions.py +103 -0
- aipt_v2/tools/proxy/proxy_manager.py +789 -0
- aipt_v2/tools/registry.py +196 -0
- aipt_v2/tools/scanners/__init__.py +343 -0
- aipt_v2/tools/scanners/acunetix_tool.py +712 -0
- aipt_v2/tools/scanners/burp_tool.py +631 -0
- aipt_v2/tools/scanners/config.py +156 -0
- aipt_v2/tools/scanners/nessus_tool.py +588 -0
- aipt_v2/tools/scanners/zap_tool.py +612 -0
- aipt_v2/tools/terminal/__init__.py +5 -0
- aipt_v2/tools/terminal/terminal_actions.py +37 -0
- aipt_v2/tools/terminal/terminal_manager.py +153 -0
- aipt_v2/tools/terminal/terminal_session.py +449 -0
- aipt_v2/tools/tool_processing.py +108 -0
- aipt_v2/utils/__init__.py +17 -0
- aipt_v2/utils/logging.py +202 -0
- aipt_v2/utils/model_manager.py +187 -0
- aipt_v2/utils/searchers/__init__.py +269 -0
- aipt_v2/verify_install.py +793 -0
- aiptx-2.0.7.dist-info/METADATA +345 -0
- aiptx-2.0.7.dist-info/RECORD +187 -0
- aiptx-2.0.7.dist-info/WHEEL +5 -0
- aiptx-2.0.7.dist-info/entry_points.txt +7 -0
- aiptx-2.0.7.dist-info/licenses/LICENSE +21 -0
- aiptx-2.0.7.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,398 @@
|
|
|
1
|
+
"""
|
|
2
|
+
AIPT Pipeline - Flexible stage-based execution pipeline
|
|
3
|
+
|
|
4
|
+
Provides a configurable pipeline for pentest workflows with:
|
|
5
|
+
- Custom stage definitions
|
|
6
|
+
- Conditional execution
|
|
7
|
+
- Parallel stage support
|
|
8
|
+
- Progress callbacks
|
|
9
|
+
"""
|
|
10
|
+
from __future__ import annotations
|
|
11
|
+
|
|
12
|
+
import asyncio
|
|
13
|
+
from typing import Optional, List, Dict, Any, Callable, Awaitable
|
|
14
|
+
from dataclasses import dataclass, field
|
|
15
|
+
from enum import Enum
|
|
16
|
+
from datetime import datetime
|
|
17
|
+
import logging
|
|
18
|
+
|
|
19
|
+
logger = logging.getLogger(__name__)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class StageStatus(str, Enum):
|
|
23
|
+
"""Stage execution status"""
|
|
24
|
+
PENDING = "pending"
|
|
25
|
+
RUNNING = "running"
|
|
26
|
+
COMPLETED = "completed"
|
|
27
|
+
FAILED = "failed"
|
|
28
|
+
SKIPPED = "skipped"
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
@dataclass
|
|
32
|
+
class PipelineStage:
|
|
33
|
+
"""
|
|
34
|
+
A stage in the pipeline.
|
|
35
|
+
|
|
36
|
+
Attributes:
|
|
37
|
+
name: Stage identifier
|
|
38
|
+
description: Human-readable description
|
|
39
|
+
handler: Async function to execute
|
|
40
|
+
depends_on: List of stage names this depends on
|
|
41
|
+
condition: Optional function to check if stage should run
|
|
42
|
+
timeout: Stage timeout in seconds
|
|
43
|
+
retry_count: Number of retries on failure
|
|
44
|
+
"""
|
|
45
|
+
name: str
|
|
46
|
+
description: str
|
|
47
|
+
handler: Callable[..., Awaitable[Any]]
|
|
48
|
+
depends_on: List[str] = field(default_factory=list)
|
|
49
|
+
condition: Optional[Callable[[Dict], bool]] = None
|
|
50
|
+
timeout: int = 600
|
|
51
|
+
retry_count: int = 0
|
|
52
|
+
parallel_group: Optional[str] = None # Stages in same group run in parallel
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
@dataclass
|
|
56
|
+
class StageResult:
|
|
57
|
+
"""Result of stage execution"""
|
|
58
|
+
stage_name: str
|
|
59
|
+
status: StageStatus
|
|
60
|
+
output: Any = None
|
|
61
|
+
error: Optional[str] = None
|
|
62
|
+
duration: float = 0.0
|
|
63
|
+
started_at: Optional[str] = None
|
|
64
|
+
completed_at: Optional[str] = None
|
|
65
|
+
retries: int = 0
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
@dataclass
|
|
69
|
+
class PipelineResult:
|
|
70
|
+
"""Result of pipeline execution"""
|
|
71
|
+
success: bool
|
|
72
|
+
stages: Dict[str, StageResult]
|
|
73
|
+
total_duration: float
|
|
74
|
+
started_at: str
|
|
75
|
+
completed_at: str
|
|
76
|
+
context: Dict[str, Any] = field(default_factory=dict)
|
|
77
|
+
|
|
78
|
+
@property
|
|
79
|
+
def failed_stages(self) -> List[str]:
|
|
80
|
+
return [name for name, result in self.stages.items() if result.status == StageStatus.FAILED]
|
|
81
|
+
|
|
82
|
+
@property
|
|
83
|
+
def completed_stages(self) -> List[str]:
|
|
84
|
+
return [name for name, result in self.stages.items() if result.status == StageStatus.COMPLETED]
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
class Pipeline:
|
|
88
|
+
"""
|
|
89
|
+
Flexible execution pipeline for pentest workflows.
|
|
90
|
+
|
|
91
|
+
Example:
|
|
92
|
+
pipeline = Pipeline("recon_pipeline")
|
|
93
|
+
|
|
94
|
+
pipeline.add_stage(PipelineStage(
|
|
95
|
+
name="subdomain_enum",
|
|
96
|
+
description="Enumerate subdomains",
|
|
97
|
+
handler=enumerate_subdomains,
|
|
98
|
+
))
|
|
99
|
+
|
|
100
|
+
pipeline.add_stage(PipelineStage(
|
|
101
|
+
name="port_scan",
|
|
102
|
+
description="Scan ports",
|
|
103
|
+
handler=scan_ports,
|
|
104
|
+
depends_on=["subdomain_enum"],
|
|
105
|
+
))
|
|
106
|
+
|
|
107
|
+
result = await pipeline.run(context={"target": "example.com"})
|
|
108
|
+
"""
|
|
109
|
+
|
|
110
|
+
def __init__(
|
|
111
|
+
self,
|
|
112
|
+
name: str,
|
|
113
|
+
description: str = "",
|
|
114
|
+
on_stage_start: Optional[Callable[[str], None]] = None,
|
|
115
|
+
on_stage_complete: Optional[Callable[[str, StageResult], None]] = None,
|
|
116
|
+
on_progress: Optional[Callable[[float, str], None]] = None,
|
|
117
|
+
):
|
|
118
|
+
self.name = name
|
|
119
|
+
self.description = description
|
|
120
|
+
self.stages: Dict[str, PipelineStage] = {}
|
|
121
|
+
self.stage_order: List[str] = []
|
|
122
|
+
|
|
123
|
+
# Callbacks
|
|
124
|
+
self.on_stage_start = on_stage_start
|
|
125
|
+
self.on_stage_complete = on_stage_complete
|
|
126
|
+
self.on_progress = on_progress
|
|
127
|
+
|
|
128
|
+
def add_stage(self, stage: PipelineStage) -> "Pipeline":
|
|
129
|
+
"""Add a stage to the pipeline"""
|
|
130
|
+
self.stages[stage.name] = stage
|
|
131
|
+
if stage.name not in self.stage_order:
|
|
132
|
+
self.stage_order.append(stage.name)
|
|
133
|
+
return self
|
|
134
|
+
|
|
135
|
+
def remove_stage(self, name: str) -> "Pipeline":
|
|
136
|
+
"""Remove a stage from the pipeline"""
|
|
137
|
+
if name in self.stages:
|
|
138
|
+
del self.stages[name]
|
|
139
|
+
self.stage_order.remove(name)
|
|
140
|
+
return self
|
|
141
|
+
|
|
142
|
+
async def run(
|
|
143
|
+
self,
|
|
144
|
+
context: Optional[Dict[str, Any]] = None,
|
|
145
|
+
start_from: Optional[str] = None,
|
|
146
|
+
stop_at: Optional[str] = None,
|
|
147
|
+
) -> PipelineResult:
|
|
148
|
+
"""
|
|
149
|
+
Execute the pipeline.
|
|
150
|
+
|
|
151
|
+
Args:
|
|
152
|
+
context: Shared context passed to all stages
|
|
153
|
+
start_from: Start from this stage (skip previous)
|
|
154
|
+
stop_at: Stop after this stage
|
|
155
|
+
|
|
156
|
+
Returns:
|
|
157
|
+
PipelineResult with all stage results
|
|
158
|
+
"""
|
|
159
|
+
import time
|
|
160
|
+
|
|
161
|
+
start_time = time.time()
|
|
162
|
+
started_at = datetime.now().isoformat()
|
|
163
|
+
context = context or {}
|
|
164
|
+
results: Dict[str, StageResult] = {}
|
|
165
|
+
|
|
166
|
+
# Determine execution order respecting dependencies
|
|
167
|
+
execution_order = self._get_execution_order()
|
|
168
|
+
|
|
169
|
+
# Filter stages if start_from/stop_at specified
|
|
170
|
+
if start_from:
|
|
171
|
+
try:
|
|
172
|
+
start_idx = execution_order.index(start_from)
|
|
173
|
+
execution_order = execution_order[start_idx:]
|
|
174
|
+
except ValueError:
|
|
175
|
+
pass
|
|
176
|
+
|
|
177
|
+
if stop_at:
|
|
178
|
+
try:
|
|
179
|
+
stop_idx = execution_order.index(stop_at) + 1
|
|
180
|
+
execution_order = execution_order[:stop_idx]
|
|
181
|
+
except ValueError:
|
|
182
|
+
pass
|
|
183
|
+
|
|
184
|
+
total_stages = len(execution_order)
|
|
185
|
+
completed_count = 0
|
|
186
|
+
|
|
187
|
+
# Group parallel stages
|
|
188
|
+
parallel_groups = self._group_parallel_stages(execution_order)
|
|
189
|
+
|
|
190
|
+
for group in parallel_groups:
|
|
191
|
+
# Check if all dependencies are met
|
|
192
|
+
deps_met = all(
|
|
193
|
+
all(
|
|
194
|
+
dep in results and results[dep].status == StageStatus.COMPLETED
|
|
195
|
+
for dep in self.stages[stage_name].depends_on
|
|
196
|
+
)
|
|
197
|
+
for stage_name in group
|
|
198
|
+
)
|
|
199
|
+
|
|
200
|
+
if not deps_met:
|
|
201
|
+
for stage_name in group:
|
|
202
|
+
results[stage_name] = StageResult(
|
|
203
|
+
stage_name=stage_name,
|
|
204
|
+
status=StageStatus.SKIPPED,
|
|
205
|
+
error="Dependencies not met",
|
|
206
|
+
)
|
|
207
|
+
continue
|
|
208
|
+
|
|
209
|
+
# Execute group (parallel if multiple, sequential if single)
|
|
210
|
+
if len(group) > 1:
|
|
211
|
+
group_results = await self._run_parallel_stages(group, context, results)
|
|
212
|
+
else:
|
|
213
|
+
stage_name = group[0]
|
|
214
|
+
result = await self._run_stage(stage_name, context, results)
|
|
215
|
+
group_results = {stage_name: result}
|
|
216
|
+
|
|
217
|
+
results.update(group_results)
|
|
218
|
+
completed_count += len(group)
|
|
219
|
+
|
|
220
|
+
# Progress callback
|
|
221
|
+
if self.on_progress:
|
|
222
|
+
progress = completed_count / total_stages
|
|
223
|
+
self.on_progress(progress, f"Completed {completed_count}/{total_stages} stages")
|
|
224
|
+
|
|
225
|
+
completed_at = datetime.now().isoformat()
|
|
226
|
+
total_duration = time.time() - start_time
|
|
227
|
+
|
|
228
|
+
# Determine overall success
|
|
229
|
+
success = all(
|
|
230
|
+
r.status in [StageStatus.COMPLETED, StageStatus.SKIPPED]
|
|
231
|
+
for r in results.values()
|
|
232
|
+
)
|
|
233
|
+
|
|
234
|
+
return PipelineResult(
|
|
235
|
+
success=success,
|
|
236
|
+
stages=results,
|
|
237
|
+
total_duration=total_duration,
|
|
238
|
+
started_at=started_at,
|
|
239
|
+
completed_at=completed_at,
|
|
240
|
+
context=context,
|
|
241
|
+
)
|
|
242
|
+
|
|
243
|
+
async def _run_stage(
|
|
244
|
+
self,
|
|
245
|
+
stage_name: str,
|
|
246
|
+
context: Dict[str, Any],
|
|
247
|
+
previous_results: Dict[str, StageResult],
|
|
248
|
+
) -> StageResult:
|
|
249
|
+
"""Execute a single stage"""
|
|
250
|
+
import time
|
|
251
|
+
|
|
252
|
+
stage = self.stages[stage_name]
|
|
253
|
+
start_time = time.time()
|
|
254
|
+
started_at = datetime.now().isoformat()
|
|
255
|
+
retries = 0
|
|
256
|
+
|
|
257
|
+
# Check condition
|
|
258
|
+
if stage.condition and not stage.condition(context):
|
|
259
|
+
return StageResult(
|
|
260
|
+
stage_name=stage_name,
|
|
261
|
+
status=StageStatus.SKIPPED,
|
|
262
|
+
started_at=started_at,
|
|
263
|
+
completed_at=datetime.now().isoformat(),
|
|
264
|
+
)
|
|
265
|
+
|
|
266
|
+
# Notify start
|
|
267
|
+
if self.on_stage_start:
|
|
268
|
+
self.on_stage_start(stage_name)
|
|
269
|
+
|
|
270
|
+
while retries <= stage.retry_count:
|
|
271
|
+
try:
|
|
272
|
+
# Execute with timeout
|
|
273
|
+
output = await asyncio.wait_for(
|
|
274
|
+
stage.handler(context, previous_results),
|
|
275
|
+
timeout=stage.timeout,
|
|
276
|
+
)
|
|
277
|
+
|
|
278
|
+
result = StageResult(
|
|
279
|
+
stage_name=stage_name,
|
|
280
|
+
status=StageStatus.COMPLETED,
|
|
281
|
+
output=output,
|
|
282
|
+
duration=time.time() - start_time,
|
|
283
|
+
started_at=started_at,
|
|
284
|
+
completed_at=datetime.now().isoformat(),
|
|
285
|
+
retries=retries,
|
|
286
|
+
)
|
|
287
|
+
|
|
288
|
+
# Notify complete
|
|
289
|
+
if self.on_stage_complete:
|
|
290
|
+
self.on_stage_complete(stage_name, result)
|
|
291
|
+
|
|
292
|
+
return result
|
|
293
|
+
|
|
294
|
+
except asyncio.TimeoutError:
|
|
295
|
+
error = f"Stage timed out after {stage.timeout}s"
|
|
296
|
+
except Exception as e:
|
|
297
|
+
error = str(e)
|
|
298
|
+
logger.error(f"Stage {stage_name} failed: {error}")
|
|
299
|
+
|
|
300
|
+
retries += 1
|
|
301
|
+
|
|
302
|
+
# All retries failed
|
|
303
|
+
result = StageResult(
|
|
304
|
+
stage_name=stage_name,
|
|
305
|
+
status=StageStatus.FAILED,
|
|
306
|
+
error=error,
|
|
307
|
+
duration=time.time() - start_time,
|
|
308
|
+
started_at=started_at,
|
|
309
|
+
completed_at=datetime.now().isoformat(),
|
|
310
|
+
retries=retries - 1,
|
|
311
|
+
)
|
|
312
|
+
|
|
313
|
+
if self.on_stage_complete:
|
|
314
|
+
self.on_stage_complete(stage_name, result)
|
|
315
|
+
|
|
316
|
+
return result
|
|
317
|
+
|
|
318
|
+
async def _run_parallel_stages(
|
|
319
|
+
self,
|
|
320
|
+
stage_names: List[str],
|
|
321
|
+
context: Dict[str, Any],
|
|
322
|
+
previous_results: Dict[str, StageResult],
|
|
323
|
+
) -> Dict[str, StageResult]:
|
|
324
|
+
"""Execute multiple stages in parallel"""
|
|
325
|
+
tasks = [
|
|
326
|
+
self._run_stage(name, context, previous_results)
|
|
327
|
+
for name in stage_names
|
|
328
|
+
]
|
|
329
|
+
results = await asyncio.gather(*tasks)
|
|
330
|
+
return dict(zip(stage_names, results))
|
|
331
|
+
|
|
332
|
+
def _get_execution_order(self) -> List[str]:
|
|
333
|
+
"""Topological sort of stages based on dependencies"""
|
|
334
|
+
visited = set()
|
|
335
|
+
order = []
|
|
336
|
+
|
|
337
|
+
def visit(name: str):
|
|
338
|
+
if name in visited:
|
|
339
|
+
return
|
|
340
|
+
visited.add(name)
|
|
341
|
+
|
|
342
|
+
stage = self.stages.get(name)
|
|
343
|
+
if stage:
|
|
344
|
+
for dep in stage.depends_on:
|
|
345
|
+
if dep in self.stages:
|
|
346
|
+
visit(dep)
|
|
347
|
+
order.append(name)
|
|
348
|
+
|
|
349
|
+
for stage_name in self.stage_order:
|
|
350
|
+
visit(stage_name)
|
|
351
|
+
|
|
352
|
+
return order
|
|
353
|
+
|
|
354
|
+
def _group_parallel_stages(self, execution_order: List[str]) -> List[List[str]]:
|
|
355
|
+
"""Group stages that can run in parallel"""
|
|
356
|
+
groups = []
|
|
357
|
+
current_group = []
|
|
358
|
+
current_parallel_group = None
|
|
359
|
+
|
|
360
|
+
for stage_name in execution_order:
|
|
361
|
+
stage = self.stages[stage_name]
|
|
362
|
+
|
|
363
|
+
if stage.parallel_group:
|
|
364
|
+
if stage.parallel_group == current_parallel_group:
|
|
365
|
+
current_group.append(stage_name)
|
|
366
|
+
else:
|
|
367
|
+
if current_group:
|
|
368
|
+
groups.append(current_group)
|
|
369
|
+
current_group = [stage_name]
|
|
370
|
+
current_parallel_group = stage.parallel_group
|
|
371
|
+
else:
|
|
372
|
+
if current_group:
|
|
373
|
+
groups.append(current_group)
|
|
374
|
+
groups.append([stage_name])
|
|
375
|
+
current_group = []
|
|
376
|
+
current_parallel_group = None
|
|
377
|
+
|
|
378
|
+
if current_group:
|
|
379
|
+
groups.append(current_group)
|
|
380
|
+
|
|
381
|
+
return groups
|
|
382
|
+
|
|
383
|
+
def visualize(self) -> str:
|
|
384
|
+
"""Generate ASCII visualization of the pipeline"""
|
|
385
|
+
lines = [f"Pipeline: {self.name}"]
|
|
386
|
+
lines.append("=" * 50)
|
|
387
|
+
|
|
388
|
+
execution_order = self._get_execution_order()
|
|
389
|
+
|
|
390
|
+
for i, stage_name in enumerate(execution_order):
|
|
391
|
+
stage = self.stages[stage_name]
|
|
392
|
+
prefix = "└── " if i == len(execution_order) - 1 else "├── "
|
|
393
|
+
deps = f" (depends on: {', '.join(stage.depends_on)})" if stage.depends_on else ""
|
|
394
|
+
lines.append(f"{prefix}{stage_name}{deps}")
|
|
395
|
+
if stage.description:
|
|
396
|
+
lines.append(f" {stage.description}")
|
|
397
|
+
|
|
398
|
+
return "\n".join(lines)
|
|
@@ -0,0 +1,300 @@
|
|
|
1
|
+
"""
|
|
2
|
+
AIPT Progress Tracker - Track and report progress
|
|
3
|
+
|
|
4
|
+
Provides progress tracking with:
|
|
5
|
+
- Percentage progress
|
|
6
|
+
- ETA calculation
|
|
7
|
+
- Event callbacks
|
|
8
|
+
- Logging integration
|
|
9
|
+
"""
|
|
10
|
+
from __future__ import annotations
|
|
11
|
+
|
|
12
|
+
import time
|
|
13
|
+
from typing import Optional, Callable, Dict, Any, List
|
|
14
|
+
from dataclasses import dataclass, field
|
|
15
|
+
from datetime import datetime, timedelta
|
|
16
|
+
import logging
|
|
17
|
+
|
|
18
|
+
logger = logging.getLogger(__name__)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
# Type alias for progress callback
|
|
22
|
+
ProgressCallback = Callable[[float, str, Dict[str, Any]], None]
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
@dataclass
|
|
26
|
+
class ProgressEvent:
|
|
27
|
+
"""A progress event"""
|
|
28
|
+
timestamp: str
|
|
29
|
+
progress: float # 0.0 to 1.0
|
|
30
|
+
message: str
|
|
31
|
+
phase: str = ""
|
|
32
|
+
details: Dict[str, Any] = field(default_factory=dict)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
@dataclass
|
|
36
|
+
class PhaseProgress:
|
|
37
|
+
"""Progress within a phase"""
|
|
38
|
+
name: str
|
|
39
|
+
total_steps: int
|
|
40
|
+
completed_steps: int = 0
|
|
41
|
+
current_step: str = ""
|
|
42
|
+
started_at: Optional[str] = None
|
|
43
|
+
completed_at: Optional[str] = None
|
|
44
|
+
|
|
45
|
+
@property
|
|
46
|
+
def progress(self) -> float:
|
|
47
|
+
if self.total_steps == 0:
|
|
48
|
+
return 0.0
|
|
49
|
+
return self.completed_steps / self.total_steps
|
|
50
|
+
|
|
51
|
+
@property
|
|
52
|
+
def is_complete(self) -> bool:
|
|
53
|
+
return self.completed_steps >= self.total_steps
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
class ProgressTracker:
|
|
57
|
+
"""
|
|
58
|
+
Track and report progress for long-running operations.
|
|
59
|
+
|
|
60
|
+
Example:
|
|
61
|
+
tracker = ProgressTracker(
|
|
62
|
+
total_phases=4,
|
|
63
|
+
on_progress=lambda p, m, d: print(f"{p*100:.1f}% - {m}")
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
tracker.start_phase("recon", total_steps=10)
|
|
67
|
+
for i in range(10):
|
|
68
|
+
tracker.update_step(f"Scanning target {i}")
|
|
69
|
+
tracker.complete_phase()
|
|
70
|
+
|
|
71
|
+
print(f"ETA: {tracker.eta}")
|
|
72
|
+
"""
|
|
73
|
+
|
|
74
|
+
def __init__(
|
|
75
|
+
self,
|
|
76
|
+
total_phases: int = 1,
|
|
77
|
+
on_progress: Optional[ProgressCallback] = None,
|
|
78
|
+
on_phase_start: Optional[Callable[[str], None]] = None,
|
|
79
|
+
on_phase_complete: Optional[Callable[[str, float], None]] = None,
|
|
80
|
+
):
|
|
81
|
+
self.total_phases = total_phases
|
|
82
|
+
self.on_progress = on_progress
|
|
83
|
+
self.on_phase_start = on_phase_start
|
|
84
|
+
self.on_phase_complete = on_phase_complete
|
|
85
|
+
|
|
86
|
+
self.phases: Dict[str, PhaseProgress] = {}
|
|
87
|
+
self.phase_order: List[str] = []
|
|
88
|
+
self.events: List[ProgressEvent] = []
|
|
89
|
+
|
|
90
|
+
self._current_phase: Optional[str] = None
|
|
91
|
+
self._start_time: Optional[float] = None
|
|
92
|
+
self._completed_phases: int = 0
|
|
93
|
+
|
|
94
|
+
@property
|
|
95
|
+
def progress(self) -> float:
|
|
96
|
+
"""Overall progress (0.0 to 1.0)"""
|
|
97
|
+
if not self.phases:
|
|
98
|
+
return 0.0
|
|
99
|
+
|
|
100
|
+
# Weight each phase equally
|
|
101
|
+
phase_weight = 1.0 / max(self.total_phases, len(self.phases))
|
|
102
|
+
total_progress = 0.0
|
|
103
|
+
|
|
104
|
+
for phase_name in self.phase_order:
|
|
105
|
+
phase = self.phases[phase_name]
|
|
106
|
+
if phase.is_complete:
|
|
107
|
+
total_progress += phase_weight
|
|
108
|
+
else:
|
|
109
|
+
total_progress += phase_weight * phase.progress
|
|
110
|
+
|
|
111
|
+
return min(total_progress, 1.0)
|
|
112
|
+
|
|
113
|
+
@property
|
|
114
|
+
def eta(self) -> Optional[timedelta]:
|
|
115
|
+
"""Estimated time remaining"""
|
|
116
|
+
if not self._start_time or self.progress == 0:
|
|
117
|
+
return None
|
|
118
|
+
|
|
119
|
+
elapsed = time.time() - self._start_time
|
|
120
|
+
if self.progress >= 1.0:
|
|
121
|
+
return timedelta(seconds=0)
|
|
122
|
+
|
|
123
|
+
estimated_total = elapsed / self.progress
|
|
124
|
+
remaining = estimated_total - elapsed
|
|
125
|
+
return timedelta(seconds=int(remaining))
|
|
126
|
+
|
|
127
|
+
@property
|
|
128
|
+
def elapsed(self) -> timedelta:
|
|
129
|
+
"""Elapsed time"""
|
|
130
|
+
if not self._start_time:
|
|
131
|
+
return timedelta(seconds=0)
|
|
132
|
+
return timedelta(seconds=int(time.time() - self._start_time))
|
|
133
|
+
|
|
134
|
+
@property
|
|
135
|
+
def current_phase(self) -> Optional[PhaseProgress]:
|
|
136
|
+
"""Get current phase"""
|
|
137
|
+
if self._current_phase:
|
|
138
|
+
return self.phases.get(self._current_phase)
|
|
139
|
+
return None
|
|
140
|
+
|
|
141
|
+
def start(self) -> None:
|
|
142
|
+
"""Start tracking"""
|
|
143
|
+
self._start_time = time.time()
|
|
144
|
+
self._emit_progress("Started tracking")
|
|
145
|
+
|
|
146
|
+
def start_phase(
|
|
147
|
+
self,
|
|
148
|
+
name: str,
|
|
149
|
+
total_steps: int = 1,
|
|
150
|
+
description: str = "",
|
|
151
|
+
) -> None:
|
|
152
|
+
"""Start a new phase"""
|
|
153
|
+
self._current_phase = name
|
|
154
|
+
|
|
155
|
+
phase = PhaseProgress(
|
|
156
|
+
name=name,
|
|
157
|
+
total_steps=total_steps,
|
|
158
|
+
started_at=datetime.now().isoformat(),
|
|
159
|
+
)
|
|
160
|
+
self.phases[name] = phase
|
|
161
|
+
|
|
162
|
+
if name not in self.phase_order:
|
|
163
|
+
self.phase_order.append(name)
|
|
164
|
+
|
|
165
|
+
if self.on_phase_start:
|
|
166
|
+
self.on_phase_start(name)
|
|
167
|
+
|
|
168
|
+
self._emit_progress(f"Started phase: {name}", phase=name)
|
|
169
|
+
logger.info(f"Phase started: {name} ({total_steps} steps)")
|
|
170
|
+
|
|
171
|
+
def update_step(
|
|
172
|
+
self,
|
|
173
|
+
step_description: str = "",
|
|
174
|
+
increment: int = 1,
|
|
175
|
+
) -> None:
|
|
176
|
+
"""Update progress within current phase"""
|
|
177
|
+
if not self._current_phase:
|
|
178
|
+
return
|
|
179
|
+
|
|
180
|
+
phase = self.phases[self._current_phase]
|
|
181
|
+
phase.completed_steps += increment
|
|
182
|
+
phase.current_step = step_description
|
|
183
|
+
|
|
184
|
+
self._emit_progress(
|
|
185
|
+
step_description or f"Step {phase.completed_steps}/{phase.total_steps}",
|
|
186
|
+
phase=self._current_phase,
|
|
187
|
+
)
|
|
188
|
+
|
|
189
|
+
def complete_phase(self, message: str = "") -> None:
|
|
190
|
+
"""Mark current phase as complete"""
|
|
191
|
+
if not self._current_phase:
|
|
192
|
+
return
|
|
193
|
+
|
|
194
|
+
phase = self.phases[self._current_phase]
|
|
195
|
+
phase.completed_steps = phase.total_steps
|
|
196
|
+
phase.completed_at = datetime.now().isoformat()
|
|
197
|
+
self._completed_phases += 1
|
|
198
|
+
|
|
199
|
+
duration = 0.0
|
|
200
|
+
if phase.started_at:
|
|
201
|
+
start = datetime.fromisoformat(phase.started_at)
|
|
202
|
+
end = datetime.fromisoformat(phase.completed_at)
|
|
203
|
+
duration = (end - start).total_seconds()
|
|
204
|
+
|
|
205
|
+
if self.on_phase_complete:
|
|
206
|
+
self.on_phase_complete(self._current_phase, duration)
|
|
207
|
+
|
|
208
|
+
self._emit_progress(
|
|
209
|
+
message or f"Completed phase: {self._current_phase}",
|
|
210
|
+
phase=self._current_phase,
|
|
211
|
+
)
|
|
212
|
+
|
|
213
|
+
logger.info(f"Phase completed: {self._current_phase} ({duration:.1f}s)")
|
|
214
|
+
self._current_phase = None
|
|
215
|
+
|
|
216
|
+
def skip_phase(self, name: str, reason: str = "") -> None:
|
|
217
|
+
"""Mark a phase as skipped"""
|
|
218
|
+
phase = PhaseProgress(
|
|
219
|
+
name=name,
|
|
220
|
+
total_steps=1,
|
|
221
|
+
completed_steps=1,
|
|
222
|
+
started_at=datetime.now().isoformat(),
|
|
223
|
+
completed_at=datetime.now().isoformat(),
|
|
224
|
+
current_step=f"Skipped: {reason}" if reason else "Skipped",
|
|
225
|
+
)
|
|
226
|
+
self.phases[name] = phase
|
|
227
|
+
|
|
228
|
+
if name not in self.phase_order:
|
|
229
|
+
self.phase_order.append(name)
|
|
230
|
+
|
|
231
|
+
self._emit_progress(f"Skipped phase: {name}", phase=name)
|
|
232
|
+
|
|
233
|
+
def fail_phase(self, error: str) -> None:
|
|
234
|
+
"""Mark current phase as failed"""
|
|
235
|
+
if not self._current_phase:
|
|
236
|
+
return
|
|
237
|
+
|
|
238
|
+
phase = self.phases[self._current_phase]
|
|
239
|
+
phase.current_step = f"Failed: {error}"
|
|
240
|
+
phase.completed_at = datetime.now().isoformat()
|
|
241
|
+
|
|
242
|
+
self._emit_progress(f"Phase failed: {error}", phase=self._current_phase)
|
|
243
|
+
logger.error(f"Phase failed: {self._current_phase} - {error}")
|
|
244
|
+
self._current_phase = None
|
|
245
|
+
|
|
246
|
+
def _emit_progress(
|
|
247
|
+
self,
|
|
248
|
+
message: str,
|
|
249
|
+
phase: str = "",
|
|
250
|
+
details: Dict[str, Any] = None,
|
|
251
|
+
) -> None:
|
|
252
|
+
"""Emit progress event"""
|
|
253
|
+
event = ProgressEvent(
|
|
254
|
+
timestamp=datetime.now().isoformat(),
|
|
255
|
+
progress=self.progress,
|
|
256
|
+
message=message,
|
|
257
|
+
phase=phase,
|
|
258
|
+
details=details or {},
|
|
259
|
+
)
|
|
260
|
+
self.events.append(event)
|
|
261
|
+
|
|
262
|
+
if self.on_progress:
|
|
263
|
+
self.on_progress(self.progress, message, event.details)
|
|
264
|
+
|
|
265
|
+
def get_summary(self) -> Dict[str, Any]:
|
|
266
|
+
"""Get progress summary"""
|
|
267
|
+
return {
|
|
268
|
+
"progress": self.progress,
|
|
269
|
+
"progress_percent": f"{self.progress * 100:.1f}%",
|
|
270
|
+
"elapsed": str(self.elapsed),
|
|
271
|
+
"eta": str(self.eta) if self.eta else "Unknown",
|
|
272
|
+
"completed_phases": self._completed_phases,
|
|
273
|
+
"total_phases": self.total_phases,
|
|
274
|
+
"current_phase": self._current_phase,
|
|
275
|
+
"phases": {
|
|
276
|
+
name: {
|
|
277
|
+
"progress": phase.progress,
|
|
278
|
+
"completed_steps": phase.completed_steps,
|
|
279
|
+
"total_steps": phase.total_steps,
|
|
280
|
+
"is_complete": phase.is_complete,
|
|
281
|
+
}
|
|
282
|
+
for name, phase in self.phases.items()
|
|
283
|
+
},
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
def to_string(self) -> str:
|
|
287
|
+
"""Get human-readable progress string"""
|
|
288
|
+
summary = self.get_summary()
|
|
289
|
+
lines = [
|
|
290
|
+
f"Progress: {summary['progress_percent']}",
|
|
291
|
+
f"Elapsed: {summary['elapsed']}",
|
|
292
|
+
f"ETA: {summary['eta']}",
|
|
293
|
+
f"Phases: {summary['completed_phases']}/{summary['total_phases']}",
|
|
294
|
+
]
|
|
295
|
+
|
|
296
|
+
if summary['current_phase']:
|
|
297
|
+
phase = self.phases[summary['current_phase']]
|
|
298
|
+
lines.append(f"Current: {summary['current_phase']} ({phase.current_step})")
|
|
299
|
+
|
|
300
|
+
return " | ".join(lines)
|