zen-ai-pentest 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agents/__init__.py +28 -0
- agents/agent_base.py +239 -0
- agents/agent_orchestrator.py +346 -0
- agents/analysis_agent.py +225 -0
- agents/cli.py +258 -0
- agents/exploit_agent.py +224 -0
- agents/integration.py +211 -0
- agents/post_scan_agent.py +937 -0
- agents/react_agent.py +384 -0
- agents/react_agent_enhanced.py +616 -0
- agents/react_agent_vm.py +298 -0
- agents/research_agent.py +176 -0
- api/__init__.py +11 -0
- api/auth.py +123 -0
- api/main.py +1027 -0
- api/schemas.py +357 -0
- api/websocket.py +97 -0
- autonomous/__init__.py +122 -0
- autonomous/agent.py +253 -0
- autonomous/agent_loop.py +1370 -0
- autonomous/exploit_validator.py +1537 -0
- autonomous/memory.py +448 -0
- autonomous/react.py +339 -0
- autonomous/tool_executor.py +488 -0
- backends/__init__.py +16 -0
- backends/chatgpt_direct.py +133 -0
- backends/claude_direct.py +130 -0
- backends/duckduckgo.py +138 -0
- backends/openrouter.py +120 -0
- benchmarks/__init__.py +149 -0
- benchmarks/benchmark_engine.py +904 -0
- benchmarks/ci_benchmark.py +785 -0
- benchmarks/comparison.py +729 -0
- benchmarks/metrics.py +553 -0
- benchmarks/run_benchmarks.py +809 -0
- ci_cd/__init__.py +2 -0
- core/__init__.py +17 -0
- core/async_pool.py +282 -0
- core/asyncio_fix.py +222 -0
- core/cache.py +472 -0
- core/container.py +277 -0
- core/database.py +114 -0
- core/input_validator.py +353 -0
- core/models.py +288 -0
- core/orchestrator.py +611 -0
- core/plugin_manager.py +571 -0
- core/rate_limiter.py +405 -0
- core/secure_config.py +328 -0
- core/shield_integration.py +296 -0
- modules/__init__.py +46 -0
- modules/cve_database.py +362 -0
- modules/exploit_assist.py +330 -0
- modules/nuclei_integration.py +480 -0
- modules/osint.py +604 -0
- modules/protonvpn.py +554 -0
- modules/recon.py +165 -0
- modules/sql_injection_db.py +826 -0
- modules/tool_orchestrator.py +498 -0
- modules/vuln_scanner.py +292 -0
- modules/wordlist_generator.py +566 -0
- risk_engine/__init__.py +99 -0
- risk_engine/business_impact.py +267 -0
- risk_engine/business_impact_calculator.py +563 -0
- risk_engine/cvss.py +156 -0
- risk_engine/epss.py +190 -0
- risk_engine/example_usage.py +294 -0
- risk_engine/false_positive_engine.py +1073 -0
- risk_engine/scorer.py +304 -0
- web_ui/backend/main.py +471 -0
- zen_ai_pentest-2.0.0.dist-info/METADATA +795 -0
- zen_ai_pentest-2.0.0.dist-info/RECORD +75 -0
- zen_ai_pentest-2.0.0.dist-info/WHEEL +5 -0
- zen_ai_pentest-2.0.0.dist-info/entry_points.txt +2 -0
- zen_ai_pentest-2.0.0.dist-info/licenses/LICENSE +21 -0
- zen_ai_pentest-2.0.0.dist-info/top_level.txt +10 -0
api/main.py
ADDED
|
@@ -0,0 +1,1027 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Zen-AI-Pentest API Server
|
|
3
|
+
|
|
4
|
+
FastAPI-basiertes Backend für das Pentesting-Framework.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import os
|
|
8
|
+
import sys
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
|
|
11
|
+
# Add parent to path
|
|
12
|
+
sys.path.insert(0, str(Path(__file__).parent.parent))
|
|
13
|
+
|
|
14
|
+
from fastapi import FastAPI, Depends, HTTPException, BackgroundTasks, WebSocket, WebSocketDisconnect
|
|
15
|
+
from fastapi.middleware.cors import CORSMiddleware
|
|
16
|
+
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
|
17
|
+
from contextlib import asynccontextmanager
|
|
18
|
+
import asyncio
|
|
19
|
+
import json
|
|
20
|
+
import logging
|
|
21
|
+
from typing import List, Optional
|
|
22
|
+
from datetime import datetime
|
|
23
|
+
|
|
24
|
+
from database.models import init_db, get_db, SessionLocal
|
|
25
|
+
from database.crud import (
|
|
26
|
+
create_scan, get_scan, get_scans, update_scan_status,
|
|
27
|
+
create_finding, get_findings, create_report, get_reports
|
|
28
|
+
)
|
|
29
|
+
from api.schemas import (
|
|
30
|
+
ScanCreate, ScanResponse, ScanUpdate,
|
|
31
|
+
FindingCreate, FindingResponse,
|
|
32
|
+
ReportCreate, ReportResponse,
|
|
33
|
+
ToolExecuteRequest, ToolExecuteResponse,
|
|
34
|
+
WSMessage,
|
|
35
|
+
ScheduledScanCreate, ScheduledScanUpdate, ScheduledScanResponse
|
|
36
|
+
)
|
|
37
|
+
from api.auth import verify_token, create_access_token
|
|
38
|
+
from api.websocket import ConnectionManager
|
|
39
|
+
|
|
40
|
+
logging.basicConfig(level=logging.INFO)
|
|
41
|
+
logger = logging.getLogger(__name__)
|
|
42
|
+
|
|
43
|
+
# Security
|
|
44
|
+
security = HTTPBearer()
|
|
45
|
+
|
|
46
|
+
# WebSocket Manager
|
|
47
|
+
ws_manager = ConnectionManager()
|
|
48
|
+
|
|
49
|
+
@asynccontextmanager
|
|
50
|
+
async def lifespan(app: FastAPI):
|
|
51
|
+
"""Application lifespan handler"""
|
|
52
|
+
# Startup
|
|
53
|
+
logger.info("Starting up Zen-AI-Pentest API...")
|
|
54
|
+
init_db()
|
|
55
|
+
logger.info("Database initialized")
|
|
56
|
+
yield
|
|
57
|
+
# Shutdown
|
|
58
|
+
logger.info("Shutting down...")
|
|
59
|
+
|
|
60
|
+
app = FastAPI(
|
|
61
|
+
title="Zen-AI-Pentest API",
|
|
62
|
+
description="Professional Pentesting Framework API",
|
|
63
|
+
version="2.0.0",
|
|
64
|
+
lifespan=lifespan
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
# CORS
|
|
68
|
+
app.add_middleware(
|
|
69
|
+
CORSMiddleware,
|
|
70
|
+
allow_origins=["*"], # Production: Einschränken!
|
|
71
|
+
allow_credentials=True,
|
|
72
|
+
allow_methods=["*"],
|
|
73
|
+
allow_headers=["*"],
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
# ============================================================================
|
|
77
|
+
# AUTHENTICATION
|
|
78
|
+
# ============================================================================
|
|
79
|
+
|
|
80
|
+
@app.post("/auth/login")
|
|
81
|
+
async def login(credentials: dict):
|
|
82
|
+
"""Login and get JWT token"""
|
|
83
|
+
# Simplified - in production: verify against DB/LDAP
|
|
84
|
+
if credentials.get("username") == "admin" and credentials.get("password") == "admin":
|
|
85
|
+
token = create_access_token({"sub": "admin", "role": "admin"})
|
|
86
|
+
return {"access_token": token, "token_type": "bearer"}
|
|
87
|
+
raise HTTPException(status_code=401, detail="Invalid credentials")
|
|
88
|
+
|
|
89
|
+
@app.get("/auth/me")
|
|
90
|
+
async def me(user: dict = Depends(verify_token)):
|
|
91
|
+
"""Get current user info"""
|
|
92
|
+
return user
|
|
93
|
+
|
|
94
|
+
# ============================================================================
|
|
95
|
+
# SCANS
|
|
96
|
+
# ============================================================================
|
|
97
|
+
|
|
98
|
+
@app.post("/scans", response_model=ScanResponse)
|
|
99
|
+
async def create_new_scan(
|
|
100
|
+
scan: ScanCreate,
|
|
101
|
+
background_tasks: BackgroundTasks,
|
|
102
|
+
user: dict = Depends(verify_token),
|
|
103
|
+
db = Depends(get_db)
|
|
104
|
+
):
|
|
105
|
+
"""Create a new pentest scan"""
|
|
106
|
+
db_scan = create_scan(
|
|
107
|
+
db,
|
|
108
|
+
name=scan.name,
|
|
109
|
+
target=scan.target,
|
|
110
|
+
scan_type=scan.scan_type,
|
|
111
|
+
config=scan.config,
|
|
112
|
+
user_id=user.get("sub")
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
# Start scan in background
|
|
116
|
+
background_tasks.add_task(run_scan_task, db_scan.id, scan.dict())
|
|
117
|
+
|
|
118
|
+
return db_scan
|
|
119
|
+
|
|
120
|
+
@app.get("/scans", response_model=List[ScanResponse])
|
|
121
|
+
async def list_scans(
|
|
122
|
+
skip: int = 0,
|
|
123
|
+
limit: int = 100,
|
|
124
|
+
status: Optional[str] = None,
|
|
125
|
+
user: dict = Depends(verify_token),
|
|
126
|
+
db = Depends(get_db)
|
|
127
|
+
):
|
|
128
|
+
"""List all scans with optional filtering"""
|
|
129
|
+
scans = get_scans(db, skip=skip, limit=limit, status=status)
|
|
130
|
+
return scans
|
|
131
|
+
|
|
132
|
+
@app.get("/scans/{scan_id}", response_model=ScanResponse)
|
|
133
|
+
async def get_scan_by_id(
|
|
134
|
+
scan_id: int,
|
|
135
|
+
user: dict = Depends(verify_token),
|
|
136
|
+
db = Depends(get_db)
|
|
137
|
+
):
|
|
138
|
+
"""Get scan details by ID"""
|
|
139
|
+
scan = get_scan(db, scan_id)
|
|
140
|
+
if not scan:
|
|
141
|
+
raise HTTPException(status_code=404, detail="Scan not found")
|
|
142
|
+
return scan
|
|
143
|
+
|
|
144
|
+
@app.patch("/scans/{scan_id}", response_model=ScanResponse)
|
|
145
|
+
async def update_scan(
|
|
146
|
+
scan_id: int,
|
|
147
|
+
update: ScanUpdate,
|
|
148
|
+
user: dict = Depends(verify_token),
|
|
149
|
+
db = Depends(get_db)
|
|
150
|
+
):
|
|
151
|
+
"""Update scan status or config"""
|
|
152
|
+
scan = update_scan_status(db, scan_id, update.status, update.config)
|
|
153
|
+
if not scan:
|
|
154
|
+
raise HTTPException(status_code=404, detail="Scan not found")
|
|
155
|
+
return scan
|
|
156
|
+
|
|
157
|
+
@app.delete("/scans/{scan_id}")
|
|
158
|
+
async def delete_scan(
|
|
159
|
+
scan_id: int,
|
|
160
|
+
user: dict = Depends(verify_token),
|
|
161
|
+
db = Depends(get_db)
|
|
162
|
+
):
|
|
163
|
+
"""Delete a scan"""
|
|
164
|
+
# Implementation here
|
|
165
|
+
return {"message": "Scan deleted"}
|
|
166
|
+
|
|
167
|
+
# ============================================================================
|
|
168
|
+
# FINDINGS
|
|
169
|
+
# ============================================================================
|
|
170
|
+
|
|
171
|
+
@app.get("/scans/{scan_id}/findings", response_model=List[FindingResponse])
|
|
172
|
+
async def get_scan_findings(
|
|
173
|
+
scan_id: int,
|
|
174
|
+
severity: Optional[str] = None,
|
|
175
|
+
user: dict = Depends(verify_token),
|
|
176
|
+
db = Depends(get_db)
|
|
177
|
+
):
|
|
178
|
+
"""Get all findings for a scan"""
|
|
179
|
+
findings = get_findings(db, scan_id, severity)
|
|
180
|
+
return findings
|
|
181
|
+
|
|
182
|
+
@app.post("/scans/{scan_id}/findings", response_model=FindingResponse)
|
|
183
|
+
async def add_finding(
|
|
184
|
+
scan_id: int,
|
|
185
|
+
finding: FindingCreate,
|
|
186
|
+
user: dict = Depends(verify_token),
|
|
187
|
+
db = Depends(get_db)
|
|
188
|
+
):
|
|
189
|
+
"""Add a finding to a scan"""
|
|
190
|
+
db_finding = create_finding(
|
|
191
|
+
db,
|
|
192
|
+
scan_id=scan_id,
|
|
193
|
+
title=finding.title,
|
|
194
|
+
description=finding.description,
|
|
195
|
+
severity=finding.severity,
|
|
196
|
+
cvss_score=finding.cvss_score,
|
|
197
|
+
evidence=finding.evidence,
|
|
198
|
+
tool=finding.tool
|
|
199
|
+
)
|
|
200
|
+
return db_finding
|
|
201
|
+
|
|
202
|
+
# ============================================================================
|
|
203
|
+
# TOOLS EXECUTION
|
|
204
|
+
# ============================================================================
|
|
205
|
+
|
|
206
|
+
@app.post("/tools/execute", response_model=ToolExecuteResponse)
|
|
207
|
+
async def execute_tool(
|
|
208
|
+
request: ToolExecuteRequest,
|
|
209
|
+
background_tasks: BackgroundTasks,
|
|
210
|
+
user: dict = Depends(verify_token),
|
|
211
|
+
db = Depends(get_db)
|
|
212
|
+
):
|
|
213
|
+
"""Execute a pentesting tool"""
|
|
214
|
+
# Create scan entry
|
|
215
|
+
db_scan = create_scan(
|
|
216
|
+
db,
|
|
217
|
+
name=f"Tool: {request.tool_name}",
|
|
218
|
+
target=request.target,
|
|
219
|
+
scan_type="tool_execution",
|
|
220
|
+
config=request.parameters,
|
|
221
|
+
user_id=user.get("sub")
|
|
222
|
+
)
|
|
223
|
+
|
|
224
|
+
# Execute in background
|
|
225
|
+
background_tasks.add_task(
|
|
226
|
+
execute_tool_task,
|
|
227
|
+
db_scan.id,
|
|
228
|
+
request.tool_name,
|
|
229
|
+
request.target,
|
|
230
|
+
request.parameters
|
|
231
|
+
)
|
|
232
|
+
|
|
233
|
+
return ToolExecuteResponse(
|
|
234
|
+
scan_id=db_scan.id,
|
|
235
|
+
status="started",
|
|
236
|
+
message=f"Tool {request.tool_name} execution started"
|
|
237
|
+
)
|
|
238
|
+
|
|
239
|
+
@app.get("/tools")
|
|
240
|
+
async def list_tools(user: dict = Depends(verify_token)):
|
|
241
|
+
"""List available tools"""
|
|
242
|
+
from tools import TOOL_REGISTRY
|
|
243
|
+
|
|
244
|
+
tools = []
|
|
245
|
+
for name, func in TOOL_REGISTRY.items():
|
|
246
|
+
if func:
|
|
247
|
+
tools.append({
|
|
248
|
+
"name": name,
|
|
249
|
+
"description": func.__doc__ or "No description",
|
|
250
|
+
"category": get_tool_category(name)
|
|
251
|
+
})
|
|
252
|
+
|
|
253
|
+
return {"tools": tools}
|
|
254
|
+
|
|
255
|
+
def get_tool_category(tool_name: str) -> str:
|
|
256
|
+
"""Get tool category based on name"""
|
|
257
|
+
categories = {
|
|
258
|
+
"nmap": "network",
|
|
259
|
+
"masscan": "network",
|
|
260
|
+
"scapy": "network",
|
|
261
|
+
"tshark": "network",
|
|
262
|
+
"burp": "web",
|
|
263
|
+
"sqlmap": "web",
|
|
264
|
+
"gobuster": "web",
|
|
265
|
+
"metasploit": "exploitation",
|
|
266
|
+
"hydra": "brute_force",
|
|
267
|
+
"amass": "recon",
|
|
268
|
+
"bloodhound": "ad",
|
|
269
|
+
"cme": "ad",
|
|
270
|
+
"responder": "ad",
|
|
271
|
+
"aircrack": "wireless"
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
for key, cat in categories.items():
|
|
275
|
+
if key in tool_name.lower():
|
|
276
|
+
return cat
|
|
277
|
+
return "other"
|
|
278
|
+
|
|
279
|
+
# ============================================================================
|
|
280
|
+
# REPORTS
|
|
281
|
+
# ============================================================================
|
|
282
|
+
|
|
283
|
+
@app.post("/reports", response_model=ReportResponse)
|
|
284
|
+
async def generate_report(
|
|
285
|
+
report: ReportCreate,
|
|
286
|
+
background_tasks: BackgroundTasks,
|
|
287
|
+
user: dict = Depends(verify_token),
|
|
288
|
+
db = Depends(get_db)
|
|
289
|
+
):
|
|
290
|
+
"""Generate a report from scan findings"""
|
|
291
|
+
db_report = create_report(
|
|
292
|
+
db,
|
|
293
|
+
scan_id=report.scan_id,
|
|
294
|
+
format=report.format,
|
|
295
|
+
template=report.template,
|
|
296
|
+
user_id=user.get("sub")
|
|
297
|
+
)
|
|
298
|
+
|
|
299
|
+
# Generate in background
|
|
300
|
+
background_tasks.add_task(
|
|
301
|
+
generate_report_task,
|
|
302
|
+
db_report.id,
|
|
303
|
+
report.scan_id,
|
|
304
|
+
report.format
|
|
305
|
+
)
|
|
306
|
+
|
|
307
|
+
return db_report
|
|
308
|
+
|
|
309
|
+
@app.get("/reports", response_model=List[ReportResponse])
|
|
310
|
+
async def list_reports(
|
|
311
|
+
skip: int = 0,
|
|
312
|
+
limit: int = 100,
|
|
313
|
+
user: dict = Depends(verify_token),
|
|
314
|
+
db = Depends(get_db)
|
|
315
|
+
):
|
|
316
|
+
"""List all reports"""
|
|
317
|
+
reports = get_reports(db, skip, limit)
|
|
318
|
+
return reports
|
|
319
|
+
|
|
320
|
+
@app.get("/reports/{report_id}/download")
|
|
321
|
+
async def download_report(
|
|
322
|
+
report_id: int,
|
|
323
|
+
user: dict = Depends(verify_token),
|
|
324
|
+
db = Depends(get_db)
|
|
325
|
+
):
|
|
326
|
+
"""Download a generated report"""
|
|
327
|
+
from fastapi.responses import FileResponse
|
|
328
|
+
|
|
329
|
+
report = db.query(Report).filter(Report.id == report_id).first()
|
|
330
|
+
if not report or not report.file_path:
|
|
331
|
+
raise HTTPException(status_code=404, detail="Report not found")
|
|
332
|
+
|
|
333
|
+
return FileResponse(
|
|
334
|
+
report.file_path,
|
|
335
|
+
filename=f"report_{report_id}.{report.format}"
|
|
336
|
+
)
|
|
337
|
+
|
|
338
|
+
# ============================================================================
|
|
339
|
+
# WEBSOCKET
|
|
340
|
+
# ============================================================================
|
|
341
|
+
|
|
342
|
+
@app.websocket("/ws/scans/{scan_id}")
|
|
343
|
+
async def scan_websocket(websocket: WebSocket, scan_id: int):
|
|
344
|
+
"""WebSocket for real-time scan updates"""
|
|
345
|
+
await ws_manager.connect(websocket, scan_id)
|
|
346
|
+
try:
|
|
347
|
+
while True:
|
|
348
|
+
# Keep connection alive
|
|
349
|
+
data = await websocket.receive_text()
|
|
350
|
+
message = json.loads(data)
|
|
351
|
+
|
|
352
|
+
if message.get("action") == "ping":
|
|
353
|
+
await websocket.send_json({"type": "pong"})
|
|
354
|
+
|
|
355
|
+
except WebSocketDisconnect:
|
|
356
|
+
ws_manager.disconnect(websocket, scan_id)
|
|
357
|
+
|
|
358
|
+
@app.websocket("/ws/notifications")
|
|
359
|
+
async def notifications_websocket(websocket: WebSocket):
|
|
360
|
+
"""WebSocket for global notifications"""
|
|
361
|
+
await ws_manager.connect(websocket, "global")
|
|
362
|
+
try:
|
|
363
|
+
while True:
|
|
364
|
+
data = await websocket.receive_text()
|
|
365
|
+
# Handle incoming messages
|
|
366
|
+
except WebSocketDisconnect:
|
|
367
|
+
ws_manager.disconnect(websocket, "global")
|
|
368
|
+
|
|
369
|
+
# ============================================================================
|
|
370
|
+
# BACKGROUND TASKS
|
|
371
|
+
# ============================================================================
|
|
372
|
+
|
|
373
|
+
async def run_scan_task(scan_id: int, scan_config: dict):
|
|
374
|
+
"""Background task for running a scan"""
|
|
375
|
+
from agents.react_agent import ReActAgent, ReActAgentConfig
|
|
376
|
+
|
|
377
|
+
db = SessionLocal()
|
|
378
|
+
try:
|
|
379
|
+
update_scan_status(db, scan_id, "running")
|
|
380
|
+
|
|
381
|
+
# Notify via WebSocket
|
|
382
|
+
await ws_manager.broadcast_to_scan(scan_id, {
|
|
383
|
+
"type": "status",
|
|
384
|
+
"status": "running",
|
|
385
|
+
"message": "Scan started"
|
|
386
|
+
})
|
|
387
|
+
|
|
388
|
+
# Run agent
|
|
389
|
+
config = ReActAgentConfig(max_iterations=10)
|
|
390
|
+
agent = ReActAgent(config)
|
|
391
|
+
|
|
392
|
+
result = agent.run(
|
|
393
|
+
target=scan_config["target"],
|
|
394
|
+
objective=scan_config.get("objective", "comprehensive scan")
|
|
395
|
+
)
|
|
396
|
+
|
|
397
|
+
# Save findings
|
|
398
|
+
for finding_data in result.get("findings", []):
|
|
399
|
+
create_finding(
|
|
400
|
+
db,
|
|
401
|
+
scan_id=scan_id,
|
|
402
|
+
title=f"Finding from {finding_data.get('tool', 'unknown')}",
|
|
403
|
+
description=str(finding_data.get('result', ''))[:500],
|
|
404
|
+
severity="medium",
|
|
405
|
+
tool=finding_data.get('tool')
|
|
406
|
+
)
|
|
407
|
+
|
|
408
|
+
update_scan_status(db, scan_id, "completed", {
|
|
409
|
+
"result": result.get("final_message", ""),
|
|
410
|
+
"iterations": result.get("iterations", 0)
|
|
411
|
+
})
|
|
412
|
+
|
|
413
|
+
# Notify completion
|
|
414
|
+
await ws_manager.broadcast_to_scan(scan_id, {
|
|
415
|
+
"type": "status",
|
|
416
|
+
"status": "completed",
|
|
417
|
+
"message": "Scan completed",
|
|
418
|
+
"findings_count": len(result.get("findings", []))
|
|
419
|
+
})
|
|
420
|
+
|
|
421
|
+
except Exception as e:
|
|
422
|
+
logger.error(f"Scan task error: {e}")
|
|
423
|
+
update_scan_status(db, scan_id, "failed", {"error": str(e)})
|
|
424
|
+
|
|
425
|
+
await ws_manager.broadcast_to_scan(scan_id, {
|
|
426
|
+
"type": "error",
|
|
427
|
+
"message": str(e)
|
|
428
|
+
})
|
|
429
|
+
finally:
|
|
430
|
+
db.close()
|
|
431
|
+
|
|
432
|
+
async def execute_tool_task(scan_id: int, tool_name: str, target: str, parameters: dict):
|
|
433
|
+
"""Execute a single tool"""
|
|
434
|
+
from tools import TOOL_REGISTRY
|
|
435
|
+
|
|
436
|
+
db = SessionLocal()
|
|
437
|
+
try:
|
|
438
|
+
tool_func = TOOL_REGISTRY.get(tool_name)
|
|
439
|
+
if not tool_func:
|
|
440
|
+
raise ValueError(f"Tool {tool_name} not found")
|
|
441
|
+
|
|
442
|
+
# Execute tool
|
|
443
|
+
result = tool_func(target, **parameters)
|
|
444
|
+
|
|
445
|
+
# Save finding
|
|
446
|
+
create_finding(
|
|
447
|
+
db,
|
|
448
|
+
scan_id=scan_id,
|
|
449
|
+
title=f"{tool_name} result",
|
|
450
|
+
description=str(result)[:1000],
|
|
451
|
+
severity="info",
|
|
452
|
+
tool=tool_name
|
|
453
|
+
)
|
|
454
|
+
|
|
455
|
+
update_scan_status(db, scan_id, "completed")
|
|
456
|
+
|
|
457
|
+
except Exception as e:
|
|
458
|
+
logger.error(f"Tool execution error: {e}")
|
|
459
|
+
update_scan_status(db, scan_id, "failed", {"error": str(e)})
|
|
460
|
+
finally:
|
|
461
|
+
db.close()
|
|
462
|
+
|
|
463
|
+
async def generate_report_task(report_id: int, scan_id: int, format: str):
|
|
464
|
+
"""Generate report in background"""
|
|
465
|
+
from reports.generator import ReportGenerator
|
|
466
|
+
|
|
467
|
+
db = SessionLocal()
|
|
468
|
+
try:
|
|
469
|
+
generator = ReportGenerator()
|
|
470
|
+
|
|
471
|
+
if format == "pdf":
|
|
472
|
+
file_path = generator.generate_pdf(scan_id)
|
|
473
|
+
elif format == "html":
|
|
474
|
+
file_path = generator.generate_html(scan_id)
|
|
475
|
+
else:
|
|
476
|
+
file_path = generator.generate_json(scan_id)
|
|
477
|
+
|
|
478
|
+
# Update report
|
|
479
|
+
report = db.query(Report).filter(Report.id == report_id).first()
|
|
480
|
+
if report:
|
|
481
|
+
report.file_path = file_path
|
|
482
|
+
report.status = "completed"
|
|
483
|
+
db.commit()
|
|
484
|
+
|
|
485
|
+
except Exception as e:
|
|
486
|
+
logger.error(f"Report generation error: {e}")
|
|
487
|
+
report = db.query(Report).filter(Report.id == report_id).first()
|
|
488
|
+
if report:
|
|
489
|
+
report.status = "failed"
|
|
490
|
+
db.commit()
|
|
491
|
+
finally:
|
|
492
|
+
db.close()
|
|
493
|
+
|
|
494
|
+
# ============================================================================
|
|
495
|
+
# HEALTH & INFO
|
|
496
|
+
# ============================================================================
|
|
497
|
+
|
|
498
|
+
@app.get("/health")
|
|
499
|
+
async def health_check():
|
|
500
|
+
"""Health check endpoint"""
|
|
501
|
+
return {
|
|
502
|
+
"status": "healthy",
|
|
503
|
+
"version": "2.0.0",
|
|
504
|
+
"timestamp": datetime.utcnow().isoformat()
|
|
505
|
+
}
|
|
506
|
+
|
|
507
|
+
@app.get("/info")
|
|
508
|
+
async def api_info():
|
|
509
|
+
"""API information"""
|
|
510
|
+
return {
|
|
511
|
+
"name": "Zen-AI-Pentest API",
|
|
512
|
+
"version": "2.0.0",
|
|
513
|
+
"description": "Professional Pentesting Framework",
|
|
514
|
+
"endpoints": {
|
|
515
|
+
"scans": "/scans",
|
|
516
|
+
"findings": "/scans/{id}/findings",
|
|
517
|
+
"tools": "/tools",
|
|
518
|
+
"reports": "/reports"
|
|
519
|
+
}
|
|
520
|
+
}
|
|
521
|
+
|
|
522
|
+
# ============================================================================
|
|
523
|
+
# SCHEDULED SCANS
|
|
524
|
+
# ============================================================================
|
|
525
|
+
|
|
526
|
+
# In-memory storage for scheduled scans (in production: use database)
|
|
527
|
+
SCHEDULED_SCANS = []
|
|
528
|
+
SCHEDULE_ID_COUNTER = 1
|
|
529
|
+
|
|
530
|
+
@app.post("/schedules", response_model=ScheduledScanResponse)
|
|
531
|
+
async def create_schedule(
|
|
532
|
+
schedule: ScheduledScanCreate,
|
|
533
|
+
user: dict = Depends(verify_token),
|
|
534
|
+
db = Depends(get_db)
|
|
535
|
+
):
|
|
536
|
+
"""Create a new scheduled scan"""
|
|
537
|
+
global SCHEDULE_ID_COUNTER
|
|
538
|
+
|
|
539
|
+
schedule_dict = {
|
|
540
|
+
"id": SCHEDULE_ID_COUNTER,
|
|
541
|
+
"name": schedule.name,
|
|
542
|
+
"target": schedule.target,
|
|
543
|
+
"scan_type": schedule.scan_type,
|
|
544
|
+
"frequency": schedule.frequency.value,
|
|
545
|
+
"schedule_time": schedule.schedule_time,
|
|
546
|
+
"schedule_day": schedule.schedule_day,
|
|
547
|
+
"enabled": schedule.enabled,
|
|
548
|
+
"notification_email": schedule.notification_email,
|
|
549
|
+
"notification_slack": schedule.notification_slack,
|
|
550
|
+
"last_run_at": None,
|
|
551
|
+
"last_run_status": None,
|
|
552
|
+
"next_run_at": calculate_next_run(schedule.frequency.value, schedule.schedule_time, schedule.schedule_day),
|
|
553
|
+
"created_at": datetime.utcnow(),
|
|
554
|
+
"created_by": user.get("sub", "unknown")
|
|
555
|
+
}
|
|
556
|
+
|
|
557
|
+
SCHEDULED_SCANS.append(schedule_dict)
|
|
558
|
+
SCHEDULE_ID_COUNTER += 1
|
|
559
|
+
|
|
560
|
+
return schedule_dict
|
|
561
|
+
|
|
562
|
+
@app.get("/schedules", response_model=List[ScheduledScanResponse])
|
|
563
|
+
async def list_schedules(
|
|
564
|
+
user: dict = Depends(verify_token),
|
|
565
|
+
db = Depends(get_db)
|
|
566
|
+
):
|
|
567
|
+
"""List all scheduled scans"""
|
|
568
|
+
return SCHEDULED_SCANS
|
|
569
|
+
|
|
570
|
+
@app.get("/schedules/{schedule_id}", response_model=ScheduledScanResponse)
|
|
571
|
+
async def get_schedule(
|
|
572
|
+
schedule_id: int,
|
|
573
|
+
user: dict = Depends(verify_token),
|
|
574
|
+
db = Depends(get_db)
|
|
575
|
+
):
|
|
576
|
+
"""Get a specific scheduled scan"""
|
|
577
|
+
for schedule in SCHEDULED_SCANS:
|
|
578
|
+
if schedule["id"] == schedule_id:
|
|
579
|
+
return schedule
|
|
580
|
+
raise HTTPException(status_code=404, detail="Schedule not found")
|
|
581
|
+
|
|
582
|
+
@app.patch("/schedules/{schedule_id}", response_model=ScheduledScanResponse)
|
|
583
|
+
async def update_schedule(
|
|
584
|
+
schedule_id: int,
|
|
585
|
+
update: ScheduledScanUpdate,
|
|
586
|
+
user: dict = Depends(verify_token),
|
|
587
|
+
db = Depends(get_db)
|
|
588
|
+
):
|
|
589
|
+
"""Update a scheduled scan"""
|
|
590
|
+
for schedule in SCHEDULED_SCANS:
|
|
591
|
+
if schedule["id"] == schedule_id:
|
|
592
|
+
# Update fields
|
|
593
|
+
for field, value in update.dict(exclude_unset=True).items():
|
|
594
|
+
if value is not None:
|
|
595
|
+
schedule[field] = value
|
|
596
|
+
|
|
597
|
+
# Recalculate next run if schedule changed
|
|
598
|
+
if update.frequency or update.schedule_time or update.schedule_day is not None:
|
|
599
|
+
schedule["next_run_at"] = calculate_next_run(
|
|
600
|
+
schedule["frequency"],
|
|
601
|
+
schedule["schedule_time"],
|
|
602
|
+
schedule["schedule_day"]
|
|
603
|
+
)
|
|
604
|
+
|
|
605
|
+
return schedule
|
|
606
|
+
raise HTTPException(status_code=404, detail="Schedule not found")
|
|
607
|
+
|
|
608
|
+
@app.delete("/schedules/{schedule_id}")
|
|
609
|
+
async def delete_schedule(
|
|
610
|
+
schedule_id: int,
|
|
611
|
+
user: dict = Depends(verify_token),
|
|
612
|
+
db = Depends(get_db)
|
|
613
|
+
):
|
|
614
|
+
"""Delete a scheduled scan"""
|
|
615
|
+
global SCHEDULED_SCANS
|
|
616
|
+
for i, schedule in enumerate(SCHEDULED_SCANS):
|
|
617
|
+
if schedule["id"] == schedule_id:
|
|
618
|
+
SCHEDULED_SCANS.pop(i)
|
|
619
|
+
return {"message": "Schedule deleted"}
|
|
620
|
+
raise HTTPException(status_code=404, detail="Schedule not found")
|
|
621
|
+
|
|
622
|
+
@app.post("/schedules/{schedule_id}/run")
|
|
623
|
+
async def run_schedule_now(
|
|
624
|
+
schedule_id: int,
|
|
625
|
+
background_tasks: BackgroundTasks,
|
|
626
|
+
user: dict = Depends(verify_token),
|
|
627
|
+
db = Depends(get_db)
|
|
628
|
+
):
|
|
629
|
+
"""Manually trigger a scheduled scan"""
|
|
630
|
+
for schedule in SCHEDULED_SCANS:
|
|
631
|
+
if schedule["id"] == schedule_id:
|
|
632
|
+
background_tasks.add_task(
|
|
633
|
+
execute_scheduled_scan,
|
|
634
|
+
schedule,
|
|
635
|
+
user.get("sub")
|
|
636
|
+
)
|
|
637
|
+
return {"message": "Scan triggered"}
|
|
638
|
+
raise HTTPException(status_code=404, detail="Schedule not found")
|
|
639
|
+
|
|
640
|
+
def calculate_next_run(frequency: str, time_str: str, day: Optional[int] = None) -> datetime:
|
|
641
|
+
"""Calculate the next run time for a schedule"""
|
|
642
|
+
from datetime import timedelta
|
|
643
|
+
|
|
644
|
+
now = datetime.utcnow()
|
|
645
|
+
hour, minute = map(int, time_str.split(':'))
|
|
646
|
+
|
|
647
|
+
next_run = now.replace(hour=hour, minute=minute, second=0, microsecond=0)
|
|
648
|
+
|
|
649
|
+
if frequency == "once":
|
|
650
|
+
if next_run <= now:
|
|
651
|
+
next_run += timedelta(days=1)
|
|
652
|
+
elif frequency == "daily":
|
|
653
|
+
if next_run <= now:
|
|
654
|
+
next_run += timedelta(days=1)
|
|
655
|
+
elif frequency == "weekly":
|
|
656
|
+
days_ahead = day - now.weekday() if day is not None else 0
|
|
657
|
+
if days_ahead <= 0:
|
|
658
|
+
days_ahead += 7
|
|
659
|
+
next_run += timedelta(days=days_ahead)
|
|
660
|
+
elif frequency == "monthly":
|
|
661
|
+
# Simplified: run on the first of next month
|
|
662
|
+
if now.day > 1 or (now.day == 1 and next_run <= now):
|
|
663
|
+
next_run = next_run.replace(month=now.month + 1 if now.month < 12 else 1)
|
|
664
|
+
if next_run.month == 1:
|
|
665
|
+
next_run = next_run.replace(year=next_run.year + 1)
|
|
666
|
+
|
|
667
|
+
return next_run
|
|
668
|
+
|
|
669
|
+
async def execute_scheduled_scan(schedule: dict, user_id: str):
|
|
670
|
+
"""Execute a scheduled scan"""
|
|
671
|
+
from database.crud import create_scan, update_scan_status
|
|
672
|
+
|
|
673
|
+
db = SessionLocal()
|
|
674
|
+
try:
|
|
675
|
+
# Update last run
|
|
676
|
+
schedule["last_run_at"] = datetime.utcnow()
|
|
677
|
+
schedule["last_run_status"] = "running"
|
|
678
|
+
|
|
679
|
+
# Create scan
|
|
680
|
+
scan_data = type('obj', (object,), {
|
|
681
|
+
'name': schedule["name"],
|
|
682
|
+
'target': schedule["target"],
|
|
683
|
+
'scan_type': schedule["scan_type"],
|
|
684
|
+
'config': {},
|
|
685
|
+
'user_id': user_id
|
|
686
|
+
})
|
|
687
|
+
|
|
688
|
+
db_scan = create_scan(db, scan_data)
|
|
689
|
+
|
|
690
|
+
# Run scan
|
|
691
|
+
update_scan_status(db, db_scan.id, "running")
|
|
692
|
+
|
|
693
|
+
# TODO: Actually run the scan (simplified for now)
|
|
694
|
+
import asyncio
|
|
695
|
+
await asyncio.sleep(5) # Simulate scan
|
|
696
|
+
|
|
697
|
+
update_scan_status(db, db_scan.id, "completed")
|
|
698
|
+
schedule["last_run_status"] = "completed"
|
|
699
|
+
|
|
700
|
+
# Calculate next run
|
|
701
|
+
schedule["next_run_at"] = calculate_next_run(
|
|
702
|
+
schedule["frequency"],
|
|
703
|
+
schedule["schedule_time"],
|
|
704
|
+
schedule.get("schedule_day")
|
|
705
|
+
)
|
|
706
|
+
|
|
707
|
+
# Send notifications
|
|
708
|
+
if schedule.get("notification_email"):
|
|
709
|
+
await send_email_notification(schedule, db_scan.id)
|
|
710
|
+
if schedule.get("notification_slack"):
|
|
711
|
+
await send_slack_notification(schedule, db_scan.id)
|
|
712
|
+
|
|
713
|
+
except Exception as e:
|
|
714
|
+
schedule["last_run_status"] = "failed"
|
|
715
|
+
logger.error(f"Scheduled scan error: {e}")
|
|
716
|
+
finally:
|
|
717
|
+
db.close()
|
|
718
|
+
|
|
719
|
+
async def send_email_notification(schedule: dict, scan_id: int):
|
|
720
|
+
"""Send email notification"""
|
|
721
|
+
logger.info(f"Would send email to {schedule['notification_email']} for scan {scan_id}")
|
|
722
|
+
|
|
723
|
+
async def send_slack_notification(schedule: dict, scan_id: int):
|
|
724
|
+
"""Send Slack notification"""
|
|
725
|
+
logger.info(f"Would send Slack notification for scan {scan_id}")
|
|
726
|
+
|
|
727
|
+
# ============================================================================
|
|
728
|
+
# STATS
|
|
729
|
+
# ============================================================================
|
|
730
|
+
|
|
731
|
+
@app.get("/stats/overview")
|
|
732
|
+
async def get_stats_overview(
|
|
733
|
+
user: dict = Depends(verify_token),
|
|
734
|
+
db = Depends(get_db)
|
|
735
|
+
):
|
|
736
|
+
"""Get dashboard statistics overview"""
|
|
737
|
+
from sqlalchemy import func
|
|
738
|
+
from database.models import Scan, Finding
|
|
739
|
+
|
|
740
|
+
# Basic counts
|
|
741
|
+
total_scans = db.query(Scan).count()
|
|
742
|
+
completed_scans = db.query(Scan).filter(Scan.status == "completed").count()
|
|
743
|
+
running_scans = db.query(Scan).filter(Scan.status == "running").count()
|
|
744
|
+
|
|
745
|
+
# Findings counts
|
|
746
|
+
total_findings = db.query(Finding).count()
|
|
747
|
+
|
|
748
|
+
# Severity distribution
|
|
749
|
+
severity_counts = db.query(
|
|
750
|
+
Finding.severity,
|
|
751
|
+
func.count(Finding.id)
|
|
752
|
+
).group_by(Finding.severity).all()
|
|
753
|
+
|
|
754
|
+
severity_distribution = [
|
|
755
|
+
{"name": sev.capitalize(), "value": count, "color": get_severity_color(sev)}
|
|
756
|
+
for sev, count in severity_counts
|
|
757
|
+
]
|
|
758
|
+
|
|
759
|
+
# Fill missing severities
|
|
760
|
+
all_severities = ['critical', 'high', 'medium', 'low', 'info']
|
|
761
|
+
existing = {s['name'].lower(): s for s in severity_distribution}
|
|
762
|
+
for sev in all_severities:
|
|
763
|
+
if sev not in existing:
|
|
764
|
+
severity_distribution.append({
|
|
765
|
+
"name": sev.capitalize(),
|
|
766
|
+
"value": 0,
|
|
767
|
+
"color": get_severity_color(sev)
|
|
768
|
+
})
|
|
769
|
+
|
|
770
|
+
return {
|
|
771
|
+
"total_scans": total_scans,
|
|
772
|
+
"completed_scans": completed_scans,
|
|
773
|
+
"running_scans": running_scans,
|
|
774
|
+
"total_findings": total_findings,
|
|
775
|
+
"critical_findings": sum(1 for s in severity_counts if s[0] == 'critical'),
|
|
776
|
+
"severity_distribution": severity_distribution,
|
|
777
|
+
"trends": [], # TODO: Implement trends
|
|
778
|
+
"tool_usage": [] # TODO: Implement tool usage
|
|
779
|
+
}
|
|
780
|
+
|
|
781
|
+
def get_severity_color(severity: str) -> str:
|
|
782
|
+
"""Get color for severity level"""
|
|
783
|
+
colors = {
|
|
784
|
+
'critical': '#ef4444',
|
|
785
|
+
'high': '#f97316',
|
|
786
|
+
'medium': '#eab308',
|
|
787
|
+
'low': '#22c55e',
|
|
788
|
+
'info': '#3b82f6'
|
|
789
|
+
}
|
|
790
|
+
return colors.get(severity.lower(), '#6b7280')
|
|
791
|
+
|
|
792
|
+
@app.get("/stats/trends")
|
|
793
|
+
async def get_stats_trends(
|
|
794
|
+
days: int = 30,
|
|
795
|
+
user: dict = Depends(verify_token),
|
|
796
|
+
db = Depends(get_db)
|
|
797
|
+
):
|
|
798
|
+
"""Get scan trends for the last N days"""
|
|
799
|
+
# TODO: Implement trend calculation
|
|
800
|
+
return []
|
|
801
|
+
|
|
802
|
+
@app.get("/stats/severity")
|
|
803
|
+
async def get_severity_stats(
|
|
804
|
+
user: dict = Depends(verify_token),
|
|
805
|
+
db = Depends(get_db)
|
|
806
|
+
):
|
|
807
|
+
"""Get findings by severity"""
|
|
808
|
+
from sqlalchemy import func
|
|
809
|
+
from database.models import Finding
|
|
810
|
+
|
|
811
|
+
severity_counts = db.query(
|
|
812
|
+
Finding.severity,
|
|
813
|
+
func.count(Finding.id)
|
|
814
|
+
).group_by(Finding.severity).all()
|
|
815
|
+
|
|
816
|
+
return [
|
|
817
|
+
{"severity": sev, "count": count}
|
|
818
|
+
for sev, count in severity_counts
|
|
819
|
+
]
|
|
820
|
+
|
|
821
|
+
@app.get("/stats/tools")
|
|
822
|
+
async def get_tool_usage(
|
|
823
|
+
user: dict = Depends(verify_token),
|
|
824
|
+
db = Depends(get_db)
|
|
825
|
+
):
|
|
826
|
+
"""Get tool usage statistics"""
|
|
827
|
+
# TODO: Implement tool usage tracking
|
|
828
|
+
return []
|
|
829
|
+
|
|
830
|
+
# ============================================================================
|
|
831
|
+
# NOTIFICATIONS (SLACK)
|
|
832
|
+
# ============================================================================
|
|
833
|
+
|
|
834
|
+
@app.post("/notifications/slack/test")
|
|
835
|
+
async def test_slack_notification(
|
|
836
|
+
webhook_url: str,
|
|
837
|
+
user: dict = Depends(verify_token)
|
|
838
|
+
):
|
|
839
|
+
"""Test Slack webhook configuration"""
|
|
840
|
+
try:
|
|
841
|
+
from notifications.slack import SlackNotifier
|
|
842
|
+
notifier = SlackNotifier(webhook_url)
|
|
843
|
+
success = notifier.send_message(
|
|
844
|
+
f"Test notification from Zen AI Pentest\nUser: {user.get('sub', 'unknown')}\nTime: {datetime.utcnow().isoformat()}"
|
|
845
|
+
)
|
|
846
|
+
if success:
|
|
847
|
+
return {"status": "success", "message": "Test notification sent"}
|
|
848
|
+
else:
|
|
849
|
+
raise HTTPException(status_code=400, detail="Failed to send Slack notification")
|
|
850
|
+
except Exception as e:
|
|
851
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
852
|
+
|
|
853
|
+
@app.post("/notifications/slack/scan-complete")
|
|
854
|
+
async def notify_slack_scan_complete(
|
|
855
|
+
scan_id: int,
|
|
856
|
+
webhook_url: str,
|
|
857
|
+
user: dict = Depends(verify_token),
|
|
858
|
+
db = Depends(get_db)
|
|
859
|
+
):
|
|
860
|
+
"""Send Slack notification for scan completion"""
|
|
861
|
+
try:
|
|
862
|
+
from notifications.slack import SlackNotifier
|
|
863
|
+
from database.models import Scan, Finding
|
|
864
|
+
|
|
865
|
+
# Get scan details
|
|
866
|
+
scan = db.query(Scan).filter(Scan.id == scan_id).first()
|
|
867
|
+
if not scan:
|
|
868
|
+
raise HTTPException(status_code=404, detail="Scan not found")
|
|
869
|
+
|
|
870
|
+
# Count findings
|
|
871
|
+
findings = db.query(Finding).filter(Finding.scan_id == scan_id).all()
|
|
872
|
+
findings_count = len(findings)
|
|
873
|
+
critical_count = sum(1 for f in findings if f.severity == 'critical')
|
|
874
|
+
|
|
875
|
+
# Send notification
|
|
876
|
+
notifier = SlackNotifier(webhook_url)
|
|
877
|
+
success = notifier.send_scan_completed(
|
|
878
|
+
scan_id=scan_id,
|
|
879
|
+
target=scan.target,
|
|
880
|
+
findings_count=findings_count,
|
|
881
|
+
critical_count=critical_count
|
|
882
|
+
)
|
|
883
|
+
|
|
884
|
+
if success:
|
|
885
|
+
return {"status": "success", "message": "Notification sent"}
|
|
886
|
+
else:
|
|
887
|
+
raise HTTPException(status_code=400, detail="Failed to send notification")
|
|
888
|
+
except Exception as e:
|
|
889
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
890
|
+
|
|
891
|
+
# Store Slack webhook in memory (in production: use database)
|
|
892
|
+
SLACK_CONFIG = {
|
|
893
|
+
"webhook_url": None,
|
|
894
|
+
"enabled": False
|
|
895
|
+
}
|
|
896
|
+
|
|
897
|
+
@app.get("/settings/slack")
|
|
898
|
+
async def get_slack_settings(
|
|
899
|
+
user: dict = Depends(verify_token)
|
|
900
|
+
):
|
|
901
|
+
"""Get Slack configuration (without sensitive data)"""
|
|
902
|
+
return {
|
|
903
|
+
"enabled": SLACK_CONFIG["enabled"],
|
|
904
|
+
"configured": SLACK_CONFIG["webhook_url"] is not None
|
|
905
|
+
}
|
|
906
|
+
|
|
907
|
+
@app.post("/settings/slack")
|
|
908
|
+
async def update_slack_settings(
|
|
909
|
+
webhook_url: str,
|
|
910
|
+
enabled: bool = True,
|
|
911
|
+
user: dict = Depends(verify_token)
|
|
912
|
+
):
|
|
913
|
+
"""Update Slack configuration"""
|
|
914
|
+
global SLACK_CONFIG
|
|
915
|
+
SLACK_CONFIG["webhook_url"] = webhook_url
|
|
916
|
+
SLACK_CONFIG["enabled"] = enabled
|
|
917
|
+
return {"status": "success", "message": "Slack settings updated"}
|
|
918
|
+
|
|
919
|
+
# ============================================================================
|
|
920
|
+
# JIRA INTEGRATION
|
|
921
|
+
# ============================================================================
|
|
922
|
+
|
|
923
|
+
@app.get("/settings/jira")
|
|
924
|
+
async def get_jira_settings(
|
|
925
|
+
user: dict = Depends(verify_token)
|
|
926
|
+
):
|
|
927
|
+
"""Get JIRA configuration (without sensitive data)"""
|
|
928
|
+
from integrations.jira_client import JIRA_CONFIG
|
|
929
|
+
return {
|
|
930
|
+
"enabled": JIRA_CONFIG["enabled"],
|
|
931
|
+
"configured": JIRA_CONFIG["base_url"] is not None,
|
|
932
|
+
"base_url": JIRA_CONFIG["base_url"]
|
|
933
|
+
}
|
|
934
|
+
|
|
935
|
+
@app.post("/settings/jira")
|
|
936
|
+
async def update_jira_settings(
|
|
937
|
+
base_url: str,
|
|
938
|
+
username: str,
|
|
939
|
+
api_token: str,
|
|
940
|
+
enabled: bool = True,
|
|
941
|
+
user: dict = Depends(verify_token)
|
|
942
|
+
):
|
|
943
|
+
"""Update JIRA configuration"""
|
|
944
|
+
from integrations.jira_client import JIRA_CONFIG
|
|
945
|
+
JIRA_CONFIG["base_url"] = base_url
|
|
946
|
+
JIRA_CONFIG["username"] = username
|
|
947
|
+
JIRA_CONFIG["api_token"] = api_token
|
|
948
|
+
JIRA_CONFIG["enabled"] = enabled
|
|
949
|
+
return {"status": "success", "message": "JIRA settings updated"}
|
|
950
|
+
|
|
951
|
+
@app.post("/settings/jira/test")
|
|
952
|
+
async def test_jira_connection(
|
|
953
|
+
user: dict = Depends(verify_token)
|
|
954
|
+
):
|
|
955
|
+
"""Test JIRA connection"""
|
|
956
|
+
from integrations.jira_client import get_jira_client
|
|
957
|
+
|
|
958
|
+
client = get_jira_client()
|
|
959
|
+
if not client:
|
|
960
|
+
raise HTTPException(status_code=400, detail="JIRA not configured")
|
|
961
|
+
|
|
962
|
+
if client.test_connection():
|
|
963
|
+
return {"status": "success", "message": "Connection successful"}
|
|
964
|
+
else:
|
|
965
|
+
raise HTTPException(status_code=400, detail="Connection failed")
|
|
966
|
+
|
|
967
|
+
@app.get("/settings/jira/projects")
|
|
968
|
+
async def get_jira_projects(
|
|
969
|
+
user: dict = Depends(verify_token)
|
|
970
|
+
):
|
|
971
|
+
"""Get available JIRA projects"""
|
|
972
|
+
from integrations.jira_client import get_jira_client
|
|
973
|
+
|
|
974
|
+
client = get_jira_client()
|
|
975
|
+
if not client:
|
|
976
|
+
raise HTTPException(status_code=400, detail="JIRA not configured")
|
|
977
|
+
|
|
978
|
+
projects = client.get_projects()
|
|
979
|
+
return [{"key": p["key"], "name": p["name"]} for p in projects]
|
|
980
|
+
|
|
981
|
+
@app.post("/integrations/jira/create-ticket")
|
|
982
|
+
async def create_jira_ticket(
|
|
983
|
+
finding_id: int,
|
|
984
|
+
project_key: str,
|
|
985
|
+
user: dict = Depends(verify_token),
|
|
986
|
+
db = Depends(get_db)
|
|
987
|
+
):
|
|
988
|
+
"""Create JIRA ticket from finding"""
|
|
989
|
+
from integrations.jira_client import get_jira_client
|
|
990
|
+
from database.models import Finding
|
|
991
|
+
|
|
992
|
+
client = get_jira_client()
|
|
993
|
+
if not client:
|
|
994
|
+
raise HTTPException(status_code=400, detail="JIRA not configured")
|
|
995
|
+
|
|
996
|
+
# Get finding
|
|
997
|
+
finding = db.query(Finding).filter(Finding.id == finding_id).first()
|
|
998
|
+
if not finding:
|
|
999
|
+
raise HTTPException(status_code=404, detail="Finding not found")
|
|
1000
|
+
|
|
1001
|
+
# Create ticket
|
|
1002
|
+
result = client.create_finding_ticket(
|
|
1003
|
+
project_key=project_key,
|
|
1004
|
+
finding={
|
|
1005
|
+
"title": finding.title,
|
|
1006
|
+
"description": finding.description,
|
|
1007
|
+
"severity": finding.severity,
|
|
1008
|
+
"target": finding.target,
|
|
1009
|
+
"tool": finding.tool
|
|
1010
|
+
}
|
|
1011
|
+
)
|
|
1012
|
+
|
|
1013
|
+
if result:
|
|
1014
|
+
return {
|
|
1015
|
+
"status": "success",
|
|
1016
|
+
"ticket_key": result.get("key"),
|
|
1017
|
+
"ticket_url": f"{client.base_url}/browse/{result.get('key')}"
|
|
1018
|
+
}
|
|
1019
|
+
else:
|
|
1020
|
+
raise HTTPException(status_code=500, detail="Failed to create ticket")
|
|
1021
|
+
|
|
1022
|
+
# Import models for reports
|
|
1023
|
+
from database.models import Report
|
|
1024
|
+
|
|
1025
|
+
if __name__ == "__main__":
|
|
1026
|
+
import uvicorn
|
|
1027
|
+
uvicorn.run(app, host="0.0.0.0", port=8000)
|