aiptx 2.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of aiptx might be problematic. Click here for more details.
- aipt_v2/__init__.py +110 -0
- aipt_v2/__main__.py +24 -0
- aipt_v2/agents/AIPTxAgent/__init__.py +10 -0
- aipt_v2/agents/AIPTxAgent/aiptx_agent.py +211 -0
- aipt_v2/agents/__init__.py +24 -0
- aipt_v2/agents/base.py +520 -0
- aipt_v2/agents/ptt.py +406 -0
- aipt_v2/agents/state.py +168 -0
- aipt_v2/app.py +960 -0
- aipt_v2/browser/__init__.py +31 -0
- aipt_v2/browser/automation.py +458 -0
- aipt_v2/browser/crawler.py +453 -0
- aipt_v2/cli.py +321 -0
- aipt_v2/compliance/__init__.py +71 -0
- aipt_v2/compliance/compliance_report.py +449 -0
- aipt_v2/compliance/framework_mapper.py +424 -0
- aipt_v2/compliance/nist_mapping.py +345 -0
- aipt_v2/compliance/owasp_mapping.py +330 -0
- aipt_v2/compliance/pci_mapping.py +297 -0
- aipt_v2/config.py +288 -0
- aipt_v2/core/__init__.py +43 -0
- aipt_v2/core/agent.py +630 -0
- aipt_v2/core/llm.py +395 -0
- aipt_v2/core/memory.py +305 -0
- aipt_v2/core/ptt.py +329 -0
- aipt_v2/database/__init__.py +14 -0
- aipt_v2/database/models.py +232 -0
- aipt_v2/database/repository.py +384 -0
- aipt_v2/docker/__init__.py +23 -0
- aipt_v2/docker/builder.py +260 -0
- aipt_v2/docker/manager.py +222 -0
- aipt_v2/docker/sandbox.py +371 -0
- aipt_v2/evasion/__init__.py +58 -0
- aipt_v2/evasion/request_obfuscator.py +272 -0
- aipt_v2/evasion/tls_fingerprint.py +285 -0
- aipt_v2/evasion/ua_rotator.py +301 -0
- aipt_v2/evasion/waf_bypass.py +439 -0
- aipt_v2/execution/__init__.py +23 -0
- aipt_v2/execution/executor.py +302 -0
- aipt_v2/execution/parser.py +544 -0
- aipt_v2/execution/terminal.py +337 -0
- aipt_v2/health.py +437 -0
- aipt_v2/intelligence/__init__.py +85 -0
- aipt_v2/intelligence/auth.py +520 -0
- aipt_v2/intelligence/chaining.py +775 -0
- aipt_v2/intelligence/cve_aipt.py +334 -0
- aipt_v2/intelligence/cve_info.py +1111 -0
- aipt_v2/intelligence/rag.py +239 -0
- aipt_v2/intelligence/scope.py +442 -0
- aipt_v2/intelligence/searchers/__init__.py +5 -0
- aipt_v2/intelligence/searchers/exploitdb_searcher.py +523 -0
- aipt_v2/intelligence/searchers/github_searcher.py +467 -0
- aipt_v2/intelligence/searchers/google_searcher.py +281 -0
- aipt_v2/intelligence/tools.json +443 -0
- aipt_v2/intelligence/triage.py +670 -0
- aipt_v2/interface/__init__.py +5 -0
- aipt_v2/interface/cli.py +230 -0
- aipt_v2/interface/main.py +501 -0
- aipt_v2/interface/tui.py +1276 -0
- aipt_v2/interface/utils.py +583 -0
- aipt_v2/llm/__init__.py +39 -0
- aipt_v2/llm/config.py +26 -0
- aipt_v2/llm/llm.py +514 -0
- aipt_v2/llm/memory.py +214 -0
- aipt_v2/llm/request_queue.py +89 -0
- aipt_v2/llm/utils.py +89 -0
- aipt_v2/models/__init__.py +15 -0
- aipt_v2/models/findings.py +295 -0
- aipt_v2/models/phase_result.py +224 -0
- aipt_v2/models/scan_config.py +207 -0
- aipt_v2/monitoring/grafana/dashboards/aipt-dashboard.json +355 -0
- aipt_v2/monitoring/grafana/dashboards/default.yml +17 -0
- aipt_v2/monitoring/grafana/datasources/prometheus.yml +17 -0
- aipt_v2/monitoring/prometheus.yml +60 -0
- aipt_v2/orchestration/__init__.py +52 -0
- aipt_v2/orchestration/pipeline.py +398 -0
- aipt_v2/orchestration/progress.py +300 -0
- aipt_v2/orchestration/scheduler.py +296 -0
- aipt_v2/orchestrator.py +2284 -0
- aipt_v2/payloads/__init__.py +27 -0
- aipt_v2/payloads/cmdi.py +150 -0
- aipt_v2/payloads/sqli.py +263 -0
- aipt_v2/payloads/ssrf.py +204 -0
- aipt_v2/payloads/templates.py +222 -0
- aipt_v2/payloads/traversal.py +166 -0
- aipt_v2/payloads/xss.py +204 -0
- aipt_v2/prompts/__init__.py +60 -0
- aipt_v2/proxy/__init__.py +29 -0
- aipt_v2/proxy/history.py +352 -0
- aipt_v2/proxy/interceptor.py +452 -0
- aipt_v2/recon/__init__.py +44 -0
- aipt_v2/recon/dns.py +241 -0
- aipt_v2/recon/osint.py +367 -0
- aipt_v2/recon/subdomain.py +372 -0
- aipt_v2/recon/tech_detect.py +311 -0
- aipt_v2/reports/__init__.py +17 -0
- aipt_v2/reports/generator.py +313 -0
- aipt_v2/reports/html_report.py +378 -0
- aipt_v2/runtime/__init__.py +44 -0
- aipt_v2/runtime/base.py +30 -0
- aipt_v2/runtime/docker.py +401 -0
- aipt_v2/runtime/local.py +346 -0
- aipt_v2/runtime/tool_server.py +205 -0
- aipt_v2/scanners/__init__.py +28 -0
- aipt_v2/scanners/base.py +273 -0
- aipt_v2/scanners/nikto.py +244 -0
- aipt_v2/scanners/nmap.py +402 -0
- aipt_v2/scanners/nuclei.py +273 -0
- aipt_v2/scanners/web.py +454 -0
- aipt_v2/scripts/security_audit.py +366 -0
- aipt_v2/telemetry/__init__.py +7 -0
- aipt_v2/telemetry/tracer.py +347 -0
- aipt_v2/terminal/__init__.py +28 -0
- aipt_v2/terminal/executor.py +400 -0
- aipt_v2/terminal/sandbox.py +350 -0
- aipt_v2/tools/__init__.py +44 -0
- aipt_v2/tools/active_directory/__init__.py +78 -0
- aipt_v2/tools/active_directory/ad_config.py +238 -0
- aipt_v2/tools/active_directory/bloodhound_wrapper.py +447 -0
- aipt_v2/tools/active_directory/kerberos_attacks.py +430 -0
- aipt_v2/tools/active_directory/ldap_enum.py +533 -0
- aipt_v2/tools/active_directory/smb_attacks.py +505 -0
- aipt_v2/tools/agents_graph/__init__.py +19 -0
- aipt_v2/tools/agents_graph/agents_graph_actions.py +69 -0
- aipt_v2/tools/api_security/__init__.py +76 -0
- aipt_v2/tools/api_security/api_discovery.py +608 -0
- aipt_v2/tools/api_security/graphql_scanner.py +622 -0
- aipt_v2/tools/api_security/jwt_analyzer.py +577 -0
- aipt_v2/tools/api_security/openapi_fuzzer.py +761 -0
- aipt_v2/tools/browser/__init__.py +5 -0
- aipt_v2/tools/browser/browser_actions.py +238 -0
- aipt_v2/tools/browser/browser_instance.py +535 -0
- aipt_v2/tools/browser/tab_manager.py +344 -0
- aipt_v2/tools/cloud/__init__.py +70 -0
- aipt_v2/tools/cloud/cloud_config.py +273 -0
- aipt_v2/tools/cloud/cloud_scanner.py +639 -0
- aipt_v2/tools/cloud/prowler_tool.py +571 -0
- aipt_v2/tools/cloud/scoutsuite_tool.py +359 -0
- aipt_v2/tools/executor.py +307 -0
- aipt_v2/tools/parser.py +408 -0
- aipt_v2/tools/proxy/__init__.py +5 -0
- aipt_v2/tools/proxy/proxy_actions.py +103 -0
- aipt_v2/tools/proxy/proxy_manager.py +789 -0
- aipt_v2/tools/registry.py +196 -0
- aipt_v2/tools/scanners/__init__.py +343 -0
- aipt_v2/tools/scanners/acunetix_tool.py +712 -0
- aipt_v2/tools/scanners/burp_tool.py +631 -0
- aipt_v2/tools/scanners/config.py +156 -0
- aipt_v2/tools/scanners/nessus_tool.py +588 -0
- aipt_v2/tools/scanners/zap_tool.py +612 -0
- aipt_v2/tools/terminal/__init__.py +5 -0
- aipt_v2/tools/terminal/terminal_actions.py +37 -0
- aipt_v2/tools/terminal/terminal_manager.py +153 -0
- aipt_v2/tools/terminal/terminal_session.py +449 -0
- aipt_v2/tools/tool_processing.py +108 -0
- aipt_v2/utils/__init__.py +17 -0
- aipt_v2/utils/logging.py +201 -0
- aipt_v2/utils/model_manager.py +187 -0
- aipt_v2/utils/searchers/__init__.py +269 -0
- aiptx-2.0.2.dist-info/METADATA +324 -0
- aiptx-2.0.2.dist-info/RECORD +165 -0
- aiptx-2.0.2.dist-info/WHEEL +5 -0
- aiptx-2.0.2.dist-info/entry_points.txt +7 -0
- aiptx-2.0.2.dist-info/licenses/LICENSE +21 -0
- aiptx-2.0.2.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,384 @@
|
|
|
1
|
+
"""
|
|
2
|
+
AIPT Repository - Database access layer
|
|
3
|
+
Provides CRUD operations for all models.
|
|
4
|
+
|
|
5
|
+
Usage:
|
|
6
|
+
repo = Repository("sqlite:///aipt.db")
|
|
7
|
+
project = repo.create_project("Test", "192.168.1.0/24")
|
|
8
|
+
session = repo.create_session(project.id)
|
|
9
|
+
repo.add_finding(project.id, session.id, "port", "80/tcp", "HTTP server")
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
from datetime import datetime
|
|
13
|
+
from typing import Optional, List
|
|
14
|
+
from contextlib import contextmanager
|
|
15
|
+
|
|
16
|
+
from sqlalchemy import create_engine
|
|
17
|
+
from sqlalchemy.orm import sessionmaker, Session as DBSession
|
|
18
|
+
|
|
19
|
+
from .models import (
|
|
20
|
+
Base, Project, Session, Finding, Task,
|
|
21
|
+
SeverityLevel, TaskStatus, PhaseType
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class Repository:
|
|
26
|
+
"""
|
|
27
|
+
Database repository for AIPT.
|
|
28
|
+
Handles all database operations.
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
def __init__(self, db_url: str = "sqlite:///~/.aipt/aipt.db"):
|
|
32
|
+
"""
|
|
33
|
+
Initialize repository.
|
|
34
|
+
|
|
35
|
+
Args:
|
|
36
|
+
db_url: Database URL (SQLite or PostgreSQL)
|
|
37
|
+
SQLite: sqlite:///path/to/db.sqlite
|
|
38
|
+
PostgreSQL: postgresql://user:pass@host:port/db
|
|
39
|
+
"""
|
|
40
|
+
import os
|
|
41
|
+
|
|
42
|
+
# Expand ~ in path
|
|
43
|
+
if db_url.startswith("sqlite:///~"):
|
|
44
|
+
db_url = db_url.replace("~", os.path.expanduser("~"))
|
|
45
|
+
# Ensure directory exists
|
|
46
|
+
db_path = db_url.replace("sqlite:///", "")
|
|
47
|
+
os.makedirs(os.path.dirname(db_path), exist_ok=True)
|
|
48
|
+
|
|
49
|
+
self.engine = create_engine(db_url, echo=False)
|
|
50
|
+
self.SessionLocal = sessionmaker(bind=self.engine, expire_on_commit=False)
|
|
51
|
+
|
|
52
|
+
# Create tables
|
|
53
|
+
Base.metadata.create_all(self.engine)
|
|
54
|
+
|
|
55
|
+
@contextmanager
|
|
56
|
+
def _get_db(self):
|
|
57
|
+
"""Get database session with automatic cleanup"""
|
|
58
|
+
session = self.SessionLocal()
|
|
59
|
+
try:
|
|
60
|
+
yield session
|
|
61
|
+
session.commit()
|
|
62
|
+
except Exception:
|
|
63
|
+
session.rollback()
|
|
64
|
+
raise
|
|
65
|
+
finally:
|
|
66
|
+
session.close()
|
|
67
|
+
|
|
68
|
+
# ============== Project Operations ==============
|
|
69
|
+
|
|
70
|
+
def create_project(
|
|
71
|
+
self,
|
|
72
|
+
name: str,
|
|
73
|
+
target: str,
|
|
74
|
+
description: Optional[str] = None,
|
|
75
|
+
scope: Optional[List[str]] = None,
|
|
76
|
+
config: Optional[dict] = None,
|
|
77
|
+
) -> Project:
|
|
78
|
+
"""Create a new project"""
|
|
79
|
+
with self._get_db() as session:
|
|
80
|
+
project = Project(
|
|
81
|
+
name=name,
|
|
82
|
+
target=target,
|
|
83
|
+
description=description,
|
|
84
|
+
scope=scope or [target],
|
|
85
|
+
config=config or {},
|
|
86
|
+
)
|
|
87
|
+
session.add(project)
|
|
88
|
+
session.flush()
|
|
89
|
+
session.refresh(project)
|
|
90
|
+
return project
|
|
91
|
+
|
|
92
|
+
def get_project(self, project_id: int) -> Optional[Project]:
|
|
93
|
+
"""Get project by ID"""
|
|
94
|
+
with self._get_db() as session:
|
|
95
|
+
return session.query(Project).filter(Project.id == project_id).first()
|
|
96
|
+
|
|
97
|
+
def get_project_by_target(self, target: str) -> Optional[Project]:
|
|
98
|
+
"""Get project by target"""
|
|
99
|
+
with self._get_db() as session:
|
|
100
|
+
return session.query(Project).filter(Project.target == target).first()
|
|
101
|
+
|
|
102
|
+
def list_projects(self, status: Optional[str] = None) -> List[Project]:
|
|
103
|
+
"""List all projects"""
|
|
104
|
+
with self._get_db() as session:
|
|
105
|
+
query = session.query(Project)
|
|
106
|
+
if status:
|
|
107
|
+
query = query.filter(Project.status == status)
|
|
108
|
+
return query.order_by(Project.created_at.desc()).all()
|
|
109
|
+
|
|
110
|
+
def update_project(
|
|
111
|
+
self,
|
|
112
|
+
project_id: int,
|
|
113
|
+
**kwargs
|
|
114
|
+
) -> Optional[Project]:
|
|
115
|
+
"""Update project fields"""
|
|
116
|
+
with self._get_db() as session:
|
|
117
|
+
project = session.query(Project).filter(Project.id == project_id).first()
|
|
118
|
+
if project:
|
|
119
|
+
for key, value in kwargs.items():
|
|
120
|
+
if hasattr(project, key):
|
|
121
|
+
setattr(project, key, value)
|
|
122
|
+
session.flush()
|
|
123
|
+
session.refresh(project)
|
|
124
|
+
return project
|
|
125
|
+
|
|
126
|
+
def delete_project(self, project_id: int) -> bool:
|
|
127
|
+
"""Delete project and all related data"""
|
|
128
|
+
with self._get_db() as session:
|
|
129
|
+
project = session.query(Project).filter(Project.id == project_id).first()
|
|
130
|
+
if project:
|
|
131
|
+
session.delete(project)
|
|
132
|
+
return True
|
|
133
|
+
return False
|
|
134
|
+
|
|
135
|
+
# ============== Session Operations ==============
|
|
136
|
+
|
|
137
|
+
def create_session(
|
|
138
|
+
self,
|
|
139
|
+
project_id: int,
|
|
140
|
+
name: Optional[str] = None,
|
|
141
|
+
phase: PhaseType = PhaseType.RECON,
|
|
142
|
+
max_iterations: int = 100,
|
|
143
|
+
) -> Session:
|
|
144
|
+
"""Create a new session"""
|
|
145
|
+
with self._get_db() as db_session:
|
|
146
|
+
session = Session(
|
|
147
|
+
project_id=project_id,
|
|
148
|
+
name=name or f"Session {datetime.now().strftime('%Y%m%d_%H%M%S')}",
|
|
149
|
+
phase=phase,
|
|
150
|
+
max_iterations=max_iterations,
|
|
151
|
+
)
|
|
152
|
+
db_session.add(session)
|
|
153
|
+
db_session.flush()
|
|
154
|
+
db_session.refresh(session)
|
|
155
|
+
return session
|
|
156
|
+
|
|
157
|
+
def get_session_by_id(self, session_id: int) -> Optional[Session]:
|
|
158
|
+
"""Get session by ID"""
|
|
159
|
+
with self._get_db() as db_session:
|
|
160
|
+
return db_session.query(Session).filter(Session.id == session_id).first()
|
|
161
|
+
|
|
162
|
+
def get_active_session(self, project_id: int) -> Optional[Session]:
|
|
163
|
+
"""Get active session for project"""
|
|
164
|
+
with self._get_db() as db_session:
|
|
165
|
+
return db_session.query(Session).filter(
|
|
166
|
+
Session.project_id == project_id,
|
|
167
|
+
Session.status == "running"
|
|
168
|
+
).first()
|
|
169
|
+
|
|
170
|
+
def list_sessions(self, project_id: int) -> List[Session]:
|
|
171
|
+
"""List all sessions for a project"""
|
|
172
|
+
with self._get_db() as db_session:
|
|
173
|
+
return db_session.query(Session).filter(
|
|
174
|
+
Session.project_id == project_id
|
|
175
|
+
).order_by(Session.started_at.desc()).all()
|
|
176
|
+
|
|
177
|
+
def update_session(
|
|
178
|
+
self,
|
|
179
|
+
session_id: int,
|
|
180
|
+
**kwargs
|
|
181
|
+
) -> Optional[Session]:
|
|
182
|
+
"""Update session fields"""
|
|
183
|
+
with self._get_db() as db_session:
|
|
184
|
+
session = db_session.query(Session).filter(Session.id == session_id).first()
|
|
185
|
+
if session:
|
|
186
|
+
for key, value in kwargs.items():
|
|
187
|
+
if hasattr(session, key):
|
|
188
|
+
setattr(session, key, value)
|
|
189
|
+
db_session.flush()
|
|
190
|
+
db_session.refresh(session)
|
|
191
|
+
return session
|
|
192
|
+
|
|
193
|
+
def save_session_state(
|
|
194
|
+
self,
|
|
195
|
+
session_id: int,
|
|
196
|
+
state: dict,
|
|
197
|
+
memory_summary: Optional[str] = None,
|
|
198
|
+
) -> None:
|
|
199
|
+
"""Save session state for resume"""
|
|
200
|
+
self.update_session(
|
|
201
|
+
session_id,
|
|
202
|
+
state=state,
|
|
203
|
+
memory_summary=memory_summary,
|
|
204
|
+
)
|
|
205
|
+
|
|
206
|
+
def complete_session(self, session_id: int) -> None:
|
|
207
|
+
"""Mark session as completed"""
|
|
208
|
+
self.update_session(
|
|
209
|
+
session_id,
|
|
210
|
+
status="completed",
|
|
211
|
+
ended_at=datetime.now(),
|
|
212
|
+
)
|
|
213
|
+
|
|
214
|
+
# ============== Finding Operations ==============
|
|
215
|
+
|
|
216
|
+
def add_finding(
|
|
217
|
+
self,
|
|
218
|
+
project_id: int,
|
|
219
|
+
session_id: Optional[int],
|
|
220
|
+
type: str,
|
|
221
|
+
value: str,
|
|
222
|
+
description: Optional[str] = None,
|
|
223
|
+
severity: str = "info",
|
|
224
|
+
phase: Optional[str] = None,
|
|
225
|
+
tool: Optional[str] = None,
|
|
226
|
+
raw_output: Optional[str] = None,
|
|
227
|
+
metadata: Optional[dict] = None,
|
|
228
|
+
) -> Finding:
|
|
229
|
+
"""Add a new finding"""
|
|
230
|
+
with self._get_db() as db_session:
|
|
231
|
+
# Check for duplicate
|
|
232
|
+
existing = db_session.query(Finding).filter(
|
|
233
|
+
Finding.project_id == project_id,
|
|
234
|
+
Finding.type == type,
|
|
235
|
+
Finding.value == value,
|
|
236
|
+
).first()
|
|
237
|
+
|
|
238
|
+
if existing:
|
|
239
|
+
# Update existing finding
|
|
240
|
+
if metadata:
|
|
241
|
+
existing.extra_data.update(metadata)
|
|
242
|
+
return existing
|
|
243
|
+
|
|
244
|
+
# Create new finding
|
|
245
|
+
finding = Finding(
|
|
246
|
+
project_id=project_id,
|
|
247
|
+
session_id=session_id,
|
|
248
|
+
type=type,
|
|
249
|
+
value=value,
|
|
250
|
+
description=description,
|
|
251
|
+
severity=SeverityLevel(severity) if severity else SeverityLevel.INFO,
|
|
252
|
+
phase=PhaseType(phase) if phase else None,
|
|
253
|
+
tool=tool,
|
|
254
|
+
raw_output=raw_output,
|
|
255
|
+
extra_data=metadata or {},
|
|
256
|
+
)
|
|
257
|
+
db_session.add(finding)
|
|
258
|
+
db_session.flush()
|
|
259
|
+
db_session.refresh(finding)
|
|
260
|
+
return finding
|
|
261
|
+
|
|
262
|
+
def get_findings(
|
|
263
|
+
self,
|
|
264
|
+
project_id: int,
|
|
265
|
+
type: Optional[str] = None,
|
|
266
|
+
severity: Optional[str] = None,
|
|
267
|
+
phase: Optional[str] = None,
|
|
268
|
+
verified_only: bool = False,
|
|
269
|
+
) -> List[Finding]:
|
|
270
|
+
"""Get findings with optional filters"""
|
|
271
|
+
with self._get_db() as db_session:
|
|
272
|
+
query = db_session.query(Finding).filter(Finding.project_id == project_id)
|
|
273
|
+
|
|
274
|
+
if type:
|
|
275
|
+
query = query.filter(Finding.type == type)
|
|
276
|
+
if severity:
|
|
277
|
+
query = query.filter(Finding.severity == SeverityLevel(severity))
|
|
278
|
+
if phase:
|
|
279
|
+
query = query.filter(Finding.phase == PhaseType(phase))
|
|
280
|
+
if verified_only:
|
|
281
|
+
query = query.filter(Finding.verified == True)
|
|
282
|
+
|
|
283
|
+
return query.order_by(Finding.discovered_at.desc()).all()
|
|
284
|
+
|
|
285
|
+
def verify_finding(self, finding_id: int, verified: bool = True, notes: Optional[str] = None) -> None:
|
|
286
|
+
"""Mark finding as verified"""
|
|
287
|
+
with self._get_db() as db_session:
|
|
288
|
+
finding = db_session.query(Finding).filter(Finding.id == finding_id).first()
|
|
289
|
+
if finding:
|
|
290
|
+
finding.verified = verified
|
|
291
|
+
if notes:
|
|
292
|
+
finding.notes = notes
|
|
293
|
+
|
|
294
|
+
def mark_false_positive(self, finding_id: int, notes: Optional[str] = None) -> None:
|
|
295
|
+
"""Mark finding as false positive"""
|
|
296
|
+
with self._get_db() as db_session:
|
|
297
|
+
finding = db_session.query(Finding).filter(Finding.id == finding_id).first()
|
|
298
|
+
if finding:
|
|
299
|
+
finding.false_positive = True
|
|
300
|
+
if notes:
|
|
301
|
+
finding.notes = notes
|
|
302
|
+
|
|
303
|
+
def get_findings_summary(self, project_id: int) -> dict:
|
|
304
|
+
"""Get summary of findings by severity"""
|
|
305
|
+
with self._get_db() as db_session:
|
|
306
|
+
findings = db_session.query(Finding).filter(
|
|
307
|
+
Finding.project_id == project_id,
|
|
308
|
+
Finding.false_positive == False,
|
|
309
|
+
).all()
|
|
310
|
+
|
|
311
|
+
summary = {
|
|
312
|
+
"total": len(findings),
|
|
313
|
+
"by_severity": {},
|
|
314
|
+
"by_type": {},
|
|
315
|
+
"verified": 0,
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
for f in findings:
|
|
319
|
+
# By severity
|
|
320
|
+
sev = f.severity.value if f.severity else "info"
|
|
321
|
+
summary["by_severity"][sev] = summary["by_severity"].get(sev, 0) + 1
|
|
322
|
+
|
|
323
|
+
# By type
|
|
324
|
+
summary["by_type"][f.type] = summary["by_type"].get(f.type, 0) + 1
|
|
325
|
+
|
|
326
|
+
# Verified count
|
|
327
|
+
if f.verified:
|
|
328
|
+
summary["verified"] += 1
|
|
329
|
+
|
|
330
|
+
return summary
|
|
331
|
+
|
|
332
|
+
# ============== Task Operations ==============
|
|
333
|
+
|
|
334
|
+
def add_task(
|
|
335
|
+
self,
|
|
336
|
+
session_id: int,
|
|
337
|
+
task_id: str,
|
|
338
|
+
description: str,
|
|
339
|
+
phase: str,
|
|
340
|
+
status: str = "to-do",
|
|
341
|
+
) -> Task:
|
|
342
|
+
"""Add a new task"""
|
|
343
|
+
with self._get_db() as db_session:
|
|
344
|
+
task = Task(
|
|
345
|
+
session_id=session_id,
|
|
346
|
+
task_id=task_id,
|
|
347
|
+
description=description,
|
|
348
|
+
phase=PhaseType(phase),
|
|
349
|
+
status=TaskStatus(status),
|
|
350
|
+
)
|
|
351
|
+
db_session.add(task)
|
|
352
|
+
db_session.flush()
|
|
353
|
+
db_session.refresh(task)
|
|
354
|
+
return task
|
|
355
|
+
|
|
356
|
+
def update_task(
|
|
357
|
+
self,
|
|
358
|
+
task_id: int,
|
|
359
|
+
status: Optional[str] = None,
|
|
360
|
+
findings_count: Optional[int] = None,
|
|
361
|
+
notes: Optional[str] = None,
|
|
362
|
+
) -> Optional[Task]:
|
|
363
|
+
"""Update task status"""
|
|
364
|
+
with self._get_db() as db_session:
|
|
365
|
+
task = db_session.query(Task).filter(Task.id == task_id).first()
|
|
366
|
+
if task:
|
|
367
|
+
if status:
|
|
368
|
+
task.status = TaskStatus(status)
|
|
369
|
+
if status == "in-progress":
|
|
370
|
+
task.started_at = datetime.now()
|
|
371
|
+
elif status == "completed":
|
|
372
|
+
task.completed_at = datetime.now()
|
|
373
|
+
if findings_count is not None:
|
|
374
|
+
task.findings_count = findings_count
|
|
375
|
+
if notes:
|
|
376
|
+
task.notes = notes
|
|
377
|
+
return task
|
|
378
|
+
|
|
379
|
+
def get_tasks(self, session_id: int) -> List[Task]:
|
|
380
|
+
"""Get all tasks for a session"""
|
|
381
|
+
with self._get_db() as db_session:
|
|
382
|
+
return db_session.query(Task).filter(
|
|
383
|
+
Task.session_id == session_id
|
|
384
|
+
).order_by(Task.created_at).all()
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
"""
|
|
2
|
+
AIPT Docker Module
|
|
3
|
+
|
|
4
|
+
Container management for secure execution:
|
|
5
|
+
- Docker sandbox for isolated command execution
|
|
6
|
+
- Pre-configured images for security tools
|
|
7
|
+
- Resource limits and network isolation
|
|
8
|
+
- Container lifecycle management
|
|
9
|
+
"""
|
|
10
|
+
from __future__ import annotations
|
|
11
|
+
|
|
12
|
+
from .sandbox import DockerSandbox, SandboxConfig, SandboxResult
|
|
13
|
+
from .manager import ContainerManager, SecurityImage
|
|
14
|
+
from .builder import ImageBuilder
|
|
15
|
+
|
|
16
|
+
__all__ = [
|
|
17
|
+
"DockerSandbox",
|
|
18
|
+
"SandboxConfig",
|
|
19
|
+
"SandboxResult",
|
|
20
|
+
"ContainerManager",
|
|
21
|
+
"SecurityImage",
|
|
22
|
+
"ImageBuilder",
|
|
23
|
+
]
|
|
@@ -0,0 +1,260 @@
|
|
|
1
|
+
"""
|
|
2
|
+
AIPT Image Builder - Custom Docker image building for security tools
|
|
3
|
+
|
|
4
|
+
Provides utilities to build custom images with:
|
|
5
|
+
- Multiple security tools pre-installed
|
|
6
|
+
- Custom configurations
|
|
7
|
+
- Optimized for pentest workflows
|
|
8
|
+
"""
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
import subprocess
|
|
12
|
+
import tempfile
|
|
13
|
+
from typing import Optional, List, Dict
|
|
14
|
+
from dataclasses import dataclass, field
|
|
15
|
+
from pathlib import Path
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@dataclass
|
|
19
|
+
class ImageSpec:
|
|
20
|
+
"""Specification for a custom Docker image"""
|
|
21
|
+
name: str
|
|
22
|
+
tag: str = "latest"
|
|
23
|
+
base_image: str = "kalilinux/kali-rolling"
|
|
24
|
+
packages: List[str] = field(default_factory=list)
|
|
25
|
+
pip_packages: List[str] = field(default_factory=list)
|
|
26
|
+
go_packages: List[str] = field(default_factory=list)
|
|
27
|
+
custom_commands: List[str] = field(default_factory=list)
|
|
28
|
+
environment: Dict[str, str] = field(default_factory=dict)
|
|
29
|
+
workdir: str = "/workspace"
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class ImageBuilder:
|
|
33
|
+
"""
|
|
34
|
+
Build custom Docker images for security testing.
|
|
35
|
+
|
|
36
|
+
Example:
|
|
37
|
+
builder = ImageBuilder()
|
|
38
|
+
spec = ImageSpec(
|
|
39
|
+
name="aipt-recon",
|
|
40
|
+
packages=["nmap", "masscan", "subfinder"],
|
|
41
|
+
pip_packages=["httpx", "dnspython"],
|
|
42
|
+
)
|
|
43
|
+
builder.build(spec)
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
# Pre-defined tool sets
|
|
47
|
+
TOOL_SETS = {
|
|
48
|
+
"recon": {
|
|
49
|
+
"packages": ["nmap", "masscan", "dnsutils", "whois", "curl", "wget"],
|
|
50
|
+
"pip_packages": ["httpx", "dnspython", "shodan"],
|
|
51
|
+
"go_packages": [
|
|
52
|
+
"github.com/projectdiscovery/subfinder/v2/cmd/subfinder@latest",
|
|
53
|
+
"github.com/projectdiscovery/httpx/cmd/httpx@latest",
|
|
54
|
+
"github.com/tomnomnom/assetfinder@latest",
|
|
55
|
+
],
|
|
56
|
+
},
|
|
57
|
+
"enum": {
|
|
58
|
+
"packages": ["nikto", "dirb", "gobuster", "ffuf"],
|
|
59
|
+
"pip_packages": ["wappalyzer", "whatweb"],
|
|
60
|
+
"go_packages": [
|
|
61
|
+
"github.com/projectdiscovery/nuclei/v3/cmd/nuclei@latest",
|
|
62
|
+
"github.com/ffuf/ffuf/v2@latest",
|
|
63
|
+
],
|
|
64
|
+
},
|
|
65
|
+
"exploit": {
|
|
66
|
+
"packages": ["sqlmap", "hydra", "john", "hashcat"],
|
|
67
|
+
"pip_packages": ["impacket", "pwntools"],
|
|
68
|
+
},
|
|
69
|
+
"post": {
|
|
70
|
+
"packages": ["netcat-openbsd", "socat", "python3-pip"],
|
|
71
|
+
"pip_packages": ["linpeas", "pspy"],
|
|
72
|
+
},
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
def __init__(self, registry: Optional[str] = None):
|
|
76
|
+
"""
|
|
77
|
+
Initialize image builder.
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
registry: Optional Docker registry (e.g., "ghcr.io/myorg")
|
|
81
|
+
"""
|
|
82
|
+
self.registry = registry
|
|
83
|
+
|
|
84
|
+
def generate_dockerfile(self, spec: ImageSpec) -> str:
|
|
85
|
+
"""Generate Dockerfile content from spec"""
|
|
86
|
+
lines = [
|
|
87
|
+
f"FROM {spec.base_image}",
|
|
88
|
+
"",
|
|
89
|
+
"# Update and install base packages",
|
|
90
|
+
"RUN apt-get update && apt-get install -y \\",
|
|
91
|
+
" curl wget git python3 python3-pip golang-go \\",
|
|
92
|
+
" && rm -rf /var/lib/apt/lists/*",
|
|
93
|
+
"",
|
|
94
|
+
]
|
|
95
|
+
|
|
96
|
+
# Install system packages
|
|
97
|
+
if spec.packages:
|
|
98
|
+
lines.extend([
|
|
99
|
+
"# Install security tools",
|
|
100
|
+
"RUN apt-get update && apt-get install -y \\",
|
|
101
|
+
" " + " \\\n ".join(spec.packages) + " \\",
|
|
102
|
+
" && rm -rf /var/lib/apt/lists/*",
|
|
103
|
+
"",
|
|
104
|
+
])
|
|
105
|
+
|
|
106
|
+
# Install pip packages
|
|
107
|
+
if spec.pip_packages:
|
|
108
|
+
lines.extend([
|
|
109
|
+
"# Install Python packages",
|
|
110
|
+
"RUN pip3 install --no-cache-dir \\",
|
|
111
|
+
" " + " \\\n ".join(spec.pip_packages),
|
|
112
|
+
"",
|
|
113
|
+
])
|
|
114
|
+
|
|
115
|
+
# Install Go packages
|
|
116
|
+
if spec.go_packages:
|
|
117
|
+
lines.extend([
|
|
118
|
+
"# Install Go tools",
|
|
119
|
+
"ENV GOPATH=/go",
|
|
120
|
+
"ENV PATH=$PATH:/go/bin",
|
|
121
|
+
])
|
|
122
|
+
for pkg in spec.go_packages:
|
|
123
|
+
lines.append(f"RUN go install {pkg}")
|
|
124
|
+
lines.append("")
|
|
125
|
+
|
|
126
|
+
# Custom commands
|
|
127
|
+
if spec.custom_commands:
|
|
128
|
+
lines.extend([
|
|
129
|
+
"# Custom commands",
|
|
130
|
+
])
|
|
131
|
+
for cmd in spec.custom_commands:
|
|
132
|
+
lines.append(f"RUN {cmd}")
|
|
133
|
+
lines.append("")
|
|
134
|
+
|
|
135
|
+
# Environment variables
|
|
136
|
+
if spec.environment:
|
|
137
|
+
lines.append("# Environment variables")
|
|
138
|
+
for key, value in spec.environment.items():
|
|
139
|
+
lines.append(f"ENV {key}={value}")
|
|
140
|
+
lines.append("")
|
|
141
|
+
|
|
142
|
+
# Set workdir
|
|
143
|
+
lines.extend([
|
|
144
|
+
f"WORKDIR {spec.workdir}",
|
|
145
|
+
"",
|
|
146
|
+
"# Default command",
|
|
147
|
+
'CMD ["/bin/bash"]',
|
|
148
|
+
])
|
|
149
|
+
|
|
150
|
+
return "\n".join(lines)
|
|
151
|
+
|
|
152
|
+
def build(
|
|
153
|
+
self,
|
|
154
|
+
spec: ImageSpec,
|
|
155
|
+
push: bool = False,
|
|
156
|
+
no_cache: bool = False,
|
|
157
|
+
) -> bool:
|
|
158
|
+
"""
|
|
159
|
+
Build Docker image from spec.
|
|
160
|
+
|
|
161
|
+
Args:
|
|
162
|
+
spec: Image specification
|
|
163
|
+
push: Push to registry after build
|
|
164
|
+
no_cache: Build without cache
|
|
165
|
+
|
|
166
|
+
Returns:
|
|
167
|
+
True if successful
|
|
168
|
+
"""
|
|
169
|
+
dockerfile_content = self.generate_dockerfile(spec)
|
|
170
|
+
|
|
171
|
+
# Create temp directory with Dockerfile
|
|
172
|
+
with tempfile.TemporaryDirectory() as tmpdir:
|
|
173
|
+
dockerfile_path = Path(tmpdir) / "Dockerfile"
|
|
174
|
+
dockerfile_path.write_text(dockerfile_content)
|
|
175
|
+
|
|
176
|
+
# Build image
|
|
177
|
+
image_name = f"{spec.name}:{spec.tag}"
|
|
178
|
+
if self.registry:
|
|
179
|
+
image_name = f"{self.registry}/{image_name}"
|
|
180
|
+
|
|
181
|
+
cmd = ["docker", "build", "-t", image_name, "."]
|
|
182
|
+
if no_cache:
|
|
183
|
+
cmd.append("--no-cache")
|
|
184
|
+
|
|
185
|
+
try:
|
|
186
|
+
result = subprocess.run(
|
|
187
|
+
cmd,
|
|
188
|
+
cwd=tmpdir,
|
|
189
|
+
capture_output=True,
|
|
190
|
+
text=True,
|
|
191
|
+
timeout=1800, # 30 min timeout for builds
|
|
192
|
+
)
|
|
193
|
+
|
|
194
|
+
if result.returncode != 0:
|
|
195
|
+
print(f"Build failed: {result.stderr}")
|
|
196
|
+
return False
|
|
197
|
+
|
|
198
|
+
if push and self.registry:
|
|
199
|
+
return self.push(image_name)
|
|
200
|
+
|
|
201
|
+
return True
|
|
202
|
+
|
|
203
|
+
except subprocess.TimeoutExpired:
|
|
204
|
+
print("Build timed out")
|
|
205
|
+
return False
|
|
206
|
+
except Exception as e:
|
|
207
|
+
print(f"Build error: {e}")
|
|
208
|
+
return False
|
|
209
|
+
|
|
210
|
+
def push(self, image_name: str) -> bool:
|
|
211
|
+
"""Push image to registry"""
|
|
212
|
+
try:
|
|
213
|
+
result = subprocess.run(
|
|
214
|
+
["docker", "push", image_name],
|
|
215
|
+
capture_output=True,
|
|
216
|
+
text=True,
|
|
217
|
+
timeout=600,
|
|
218
|
+
)
|
|
219
|
+
return result.returncode == 0
|
|
220
|
+
except Exception:
|
|
221
|
+
return False
|
|
222
|
+
|
|
223
|
+
def build_preset(
|
|
224
|
+
self,
|
|
225
|
+
preset: str,
|
|
226
|
+
name: Optional[str] = None,
|
|
227
|
+
**kwargs
|
|
228
|
+
) -> bool:
|
|
229
|
+
"""
|
|
230
|
+
Build image from preset tool set.
|
|
231
|
+
|
|
232
|
+
Args:
|
|
233
|
+
preset: One of "recon", "enum", "exploit", "post"
|
|
234
|
+
name: Image name (defaults to "aipt-{preset}")
|
|
235
|
+
**kwargs: Additional ImageSpec parameters
|
|
236
|
+
|
|
237
|
+
Returns:
|
|
238
|
+
True if successful
|
|
239
|
+
"""
|
|
240
|
+
if preset not in self.TOOL_SETS:
|
|
241
|
+
raise ValueError(f"Unknown preset: {preset}. Choose from: {list(self.TOOL_SETS.keys())}")
|
|
242
|
+
|
|
243
|
+
tools = self.TOOL_SETS[preset]
|
|
244
|
+
|
|
245
|
+
spec = ImageSpec(
|
|
246
|
+
name=name or f"aipt-{preset}",
|
|
247
|
+
packages=tools.get("packages", []),
|
|
248
|
+
pip_packages=tools.get("pip_packages", []),
|
|
249
|
+
go_packages=tools.get("go_packages", []),
|
|
250
|
+
**kwargs
|
|
251
|
+
)
|
|
252
|
+
|
|
253
|
+
return self.build(spec)
|
|
254
|
+
|
|
255
|
+
def build_all_presets(self, **kwargs) -> Dict[str, bool]:
|
|
256
|
+
"""Build all preset images"""
|
|
257
|
+
results = {}
|
|
258
|
+
for preset in self.TOOL_SETS:
|
|
259
|
+
results[preset] = self.build_preset(preset, **kwargs)
|
|
260
|
+
return results
|