aiptx 2.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of aiptx might be problematic. Click here for more details.
- aipt_v2/__init__.py +110 -0
- aipt_v2/__main__.py +24 -0
- aipt_v2/agents/AIPTxAgent/__init__.py +10 -0
- aipt_v2/agents/AIPTxAgent/aiptx_agent.py +211 -0
- aipt_v2/agents/__init__.py +24 -0
- aipt_v2/agents/base.py +520 -0
- aipt_v2/agents/ptt.py +406 -0
- aipt_v2/agents/state.py +168 -0
- aipt_v2/app.py +960 -0
- aipt_v2/browser/__init__.py +31 -0
- aipt_v2/browser/automation.py +458 -0
- aipt_v2/browser/crawler.py +453 -0
- aipt_v2/cli.py +321 -0
- aipt_v2/compliance/__init__.py +71 -0
- aipt_v2/compliance/compliance_report.py +449 -0
- aipt_v2/compliance/framework_mapper.py +424 -0
- aipt_v2/compliance/nist_mapping.py +345 -0
- aipt_v2/compliance/owasp_mapping.py +330 -0
- aipt_v2/compliance/pci_mapping.py +297 -0
- aipt_v2/config.py +288 -0
- aipt_v2/core/__init__.py +43 -0
- aipt_v2/core/agent.py +630 -0
- aipt_v2/core/llm.py +395 -0
- aipt_v2/core/memory.py +305 -0
- aipt_v2/core/ptt.py +329 -0
- aipt_v2/database/__init__.py +14 -0
- aipt_v2/database/models.py +232 -0
- aipt_v2/database/repository.py +384 -0
- aipt_v2/docker/__init__.py +23 -0
- aipt_v2/docker/builder.py +260 -0
- aipt_v2/docker/manager.py +222 -0
- aipt_v2/docker/sandbox.py +371 -0
- aipt_v2/evasion/__init__.py +58 -0
- aipt_v2/evasion/request_obfuscator.py +272 -0
- aipt_v2/evasion/tls_fingerprint.py +285 -0
- aipt_v2/evasion/ua_rotator.py +301 -0
- aipt_v2/evasion/waf_bypass.py +439 -0
- aipt_v2/execution/__init__.py +23 -0
- aipt_v2/execution/executor.py +302 -0
- aipt_v2/execution/parser.py +544 -0
- aipt_v2/execution/terminal.py +337 -0
- aipt_v2/health.py +437 -0
- aipt_v2/intelligence/__init__.py +85 -0
- aipt_v2/intelligence/auth.py +520 -0
- aipt_v2/intelligence/chaining.py +775 -0
- aipt_v2/intelligence/cve_aipt.py +334 -0
- aipt_v2/intelligence/cve_info.py +1111 -0
- aipt_v2/intelligence/rag.py +239 -0
- aipt_v2/intelligence/scope.py +442 -0
- aipt_v2/intelligence/searchers/__init__.py +5 -0
- aipt_v2/intelligence/searchers/exploitdb_searcher.py +523 -0
- aipt_v2/intelligence/searchers/github_searcher.py +467 -0
- aipt_v2/intelligence/searchers/google_searcher.py +281 -0
- aipt_v2/intelligence/tools.json +443 -0
- aipt_v2/intelligence/triage.py +670 -0
- aipt_v2/interface/__init__.py +5 -0
- aipt_v2/interface/cli.py +230 -0
- aipt_v2/interface/main.py +501 -0
- aipt_v2/interface/tui.py +1276 -0
- aipt_v2/interface/utils.py +583 -0
- aipt_v2/llm/__init__.py +39 -0
- aipt_v2/llm/config.py +26 -0
- aipt_v2/llm/llm.py +514 -0
- aipt_v2/llm/memory.py +214 -0
- aipt_v2/llm/request_queue.py +89 -0
- aipt_v2/llm/utils.py +89 -0
- aipt_v2/models/__init__.py +15 -0
- aipt_v2/models/findings.py +295 -0
- aipt_v2/models/phase_result.py +224 -0
- aipt_v2/models/scan_config.py +207 -0
- aipt_v2/monitoring/grafana/dashboards/aipt-dashboard.json +355 -0
- aipt_v2/monitoring/grafana/dashboards/default.yml +17 -0
- aipt_v2/monitoring/grafana/datasources/prometheus.yml +17 -0
- aipt_v2/monitoring/prometheus.yml +60 -0
- aipt_v2/orchestration/__init__.py +52 -0
- aipt_v2/orchestration/pipeline.py +398 -0
- aipt_v2/orchestration/progress.py +300 -0
- aipt_v2/orchestration/scheduler.py +296 -0
- aipt_v2/orchestrator.py +2284 -0
- aipt_v2/payloads/__init__.py +27 -0
- aipt_v2/payloads/cmdi.py +150 -0
- aipt_v2/payloads/sqli.py +263 -0
- aipt_v2/payloads/ssrf.py +204 -0
- aipt_v2/payloads/templates.py +222 -0
- aipt_v2/payloads/traversal.py +166 -0
- aipt_v2/payloads/xss.py +204 -0
- aipt_v2/prompts/__init__.py +60 -0
- aipt_v2/proxy/__init__.py +29 -0
- aipt_v2/proxy/history.py +352 -0
- aipt_v2/proxy/interceptor.py +452 -0
- aipt_v2/recon/__init__.py +44 -0
- aipt_v2/recon/dns.py +241 -0
- aipt_v2/recon/osint.py +367 -0
- aipt_v2/recon/subdomain.py +372 -0
- aipt_v2/recon/tech_detect.py +311 -0
- aipt_v2/reports/__init__.py +17 -0
- aipt_v2/reports/generator.py +313 -0
- aipt_v2/reports/html_report.py +378 -0
- aipt_v2/runtime/__init__.py +44 -0
- aipt_v2/runtime/base.py +30 -0
- aipt_v2/runtime/docker.py +401 -0
- aipt_v2/runtime/local.py +346 -0
- aipt_v2/runtime/tool_server.py +205 -0
- aipt_v2/scanners/__init__.py +28 -0
- aipt_v2/scanners/base.py +273 -0
- aipt_v2/scanners/nikto.py +244 -0
- aipt_v2/scanners/nmap.py +402 -0
- aipt_v2/scanners/nuclei.py +273 -0
- aipt_v2/scanners/web.py +454 -0
- aipt_v2/scripts/security_audit.py +366 -0
- aipt_v2/telemetry/__init__.py +7 -0
- aipt_v2/telemetry/tracer.py +347 -0
- aipt_v2/terminal/__init__.py +28 -0
- aipt_v2/terminal/executor.py +400 -0
- aipt_v2/terminal/sandbox.py +350 -0
- aipt_v2/tools/__init__.py +44 -0
- aipt_v2/tools/active_directory/__init__.py +78 -0
- aipt_v2/tools/active_directory/ad_config.py +238 -0
- aipt_v2/tools/active_directory/bloodhound_wrapper.py +447 -0
- aipt_v2/tools/active_directory/kerberos_attacks.py +430 -0
- aipt_v2/tools/active_directory/ldap_enum.py +533 -0
- aipt_v2/tools/active_directory/smb_attacks.py +505 -0
- aipt_v2/tools/agents_graph/__init__.py +19 -0
- aipt_v2/tools/agents_graph/agents_graph_actions.py +69 -0
- aipt_v2/tools/api_security/__init__.py +76 -0
- aipt_v2/tools/api_security/api_discovery.py +608 -0
- aipt_v2/tools/api_security/graphql_scanner.py +622 -0
- aipt_v2/tools/api_security/jwt_analyzer.py +577 -0
- aipt_v2/tools/api_security/openapi_fuzzer.py +761 -0
- aipt_v2/tools/browser/__init__.py +5 -0
- aipt_v2/tools/browser/browser_actions.py +238 -0
- aipt_v2/tools/browser/browser_instance.py +535 -0
- aipt_v2/tools/browser/tab_manager.py +344 -0
- aipt_v2/tools/cloud/__init__.py +70 -0
- aipt_v2/tools/cloud/cloud_config.py +273 -0
- aipt_v2/tools/cloud/cloud_scanner.py +639 -0
- aipt_v2/tools/cloud/prowler_tool.py +571 -0
- aipt_v2/tools/cloud/scoutsuite_tool.py +359 -0
- aipt_v2/tools/executor.py +307 -0
- aipt_v2/tools/parser.py +408 -0
- aipt_v2/tools/proxy/__init__.py +5 -0
- aipt_v2/tools/proxy/proxy_actions.py +103 -0
- aipt_v2/tools/proxy/proxy_manager.py +789 -0
- aipt_v2/tools/registry.py +196 -0
- aipt_v2/tools/scanners/__init__.py +343 -0
- aipt_v2/tools/scanners/acunetix_tool.py +712 -0
- aipt_v2/tools/scanners/burp_tool.py +631 -0
- aipt_v2/tools/scanners/config.py +156 -0
- aipt_v2/tools/scanners/nessus_tool.py +588 -0
- aipt_v2/tools/scanners/zap_tool.py +612 -0
- aipt_v2/tools/terminal/__init__.py +5 -0
- aipt_v2/tools/terminal/terminal_actions.py +37 -0
- aipt_v2/tools/terminal/terminal_manager.py +153 -0
- aipt_v2/tools/terminal/terminal_session.py +449 -0
- aipt_v2/tools/tool_processing.py +108 -0
- aipt_v2/utils/__init__.py +17 -0
- aipt_v2/utils/logging.py +201 -0
- aipt_v2/utils/model_manager.py +187 -0
- aipt_v2/utils/searchers/__init__.py +269 -0
- aiptx-2.0.2.dist-info/METADATA +324 -0
- aiptx-2.0.2.dist-info/RECORD +165 -0
- aiptx-2.0.2.dist-info/WHEEL +5 -0
- aiptx-2.0.2.dist-info/entry_points.txt +7 -0
- aiptx-2.0.2.dist-info/licenses/LICENSE +21 -0
- aiptx-2.0.2.dist-info/top_level.txt +1 -0
aipt_v2/tools/parser.py
ADDED
|
@@ -0,0 +1,408 @@
|
|
|
1
|
+
"""
|
|
2
|
+
AIPT Output Parser - Extract structured data from tool outputs
|
|
3
|
+
Uses regex patterns + LLM fallback for complex parsing.
|
|
4
|
+
|
|
5
|
+
Supports:
|
|
6
|
+
- nmap, masscan (port scanning)
|
|
7
|
+
- gobuster, ffuf (directory enumeration)
|
|
8
|
+
- nuclei (vulnerability scanning)
|
|
9
|
+
- hydra (credential brute-forcing)
|
|
10
|
+
- Custom patterns
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
import re
|
|
14
|
+
from typing import Optional
|
|
15
|
+
from dataclasses import dataclass, field
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@dataclass
|
|
19
|
+
class Finding:
|
|
20
|
+
"""A structured finding from tool output"""
|
|
21
|
+
type: str # port, service, vuln, credential, host, path
|
|
22
|
+
value: str
|
|
23
|
+
description: str
|
|
24
|
+
severity: str = "info" # info, low, medium, high, critical
|
|
25
|
+
metadata: dict = field(default_factory=dict)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class OutputParser:
|
|
29
|
+
"""
|
|
30
|
+
Parse tool outputs into structured findings.
|
|
31
|
+
|
|
32
|
+
Uses regex patterns for known tools, with LLM fallback
|
|
33
|
+
for unstructured or unknown outputs.
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
# Regex patterns for common tools
|
|
37
|
+
PATTERNS = {
|
|
38
|
+
# nmap patterns
|
|
39
|
+
"nmap_port": re.compile(
|
|
40
|
+
r"(\d+)/(tcp|udp)\s+(\w+)\s+(\S+)(?:\s+(.*))?",
|
|
41
|
+
re.MULTILINE
|
|
42
|
+
),
|
|
43
|
+
"nmap_host": re.compile(
|
|
44
|
+
r"Nmap scan report for\s+(\S+)(?:\s+\((\d+\.\d+\.\d+\.\d+)\))?",
|
|
45
|
+
re.MULTILINE
|
|
46
|
+
),
|
|
47
|
+
|
|
48
|
+
# masscan patterns
|
|
49
|
+
"masscan_port": re.compile(
|
|
50
|
+
r"Discovered open port\s+(\d+)/(tcp|udp)\s+on\s+(\S+)",
|
|
51
|
+
re.MULTILINE
|
|
52
|
+
),
|
|
53
|
+
|
|
54
|
+
# gobuster/ffuf patterns
|
|
55
|
+
"directory": re.compile(
|
|
56
|
+
r"(/\S+)\s+\(Status:\s*(\d+)\)",
|
|
57
|
+
re.MULTILINE
|
|
58
|
+
),
|
|
59
|
+
"ffuf_result": re.compile(
|
|
60
|
+
r'"url":\s*"([^"]+)".*?"status":\s*(\d+)',
|
|
61
|
+
re.MULTILINE
|
|
62
|
+
),
|
|
63
|
+
|
|
64
|
+
# nuclei patterns
|
|
65
|
+
"nuclei_vuln": re.compile(
|
|
66
|
+
r"\[([^\]]+)\]\s+\[([^\]]+)\]\s+\[([^\]]+)\]\s+(.+)",
|
|
67
|
+
re.MULTILINE
|
|
68
|
+
),
|
|
69
|
+
|
|
70
|
+
# hydra patterns
|
|
71
|
+
"hydra_cred": re.compile(
|
|
72
|
+
r"\[(\d+)\]\[(\w+)\]\s+host:\s+(\S+)\s+login:\s+(\S+)\s+password:\s+(\S+)",
|
|
73
|
+
re.MULTILINE
|
|
74
|
+
),
|
|
75
|
+
|
|
76
|
+
# generic patterns
|
|
77
|
+
"ip_address": re.compile(
|
|
78
|
+
r"\b(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})\b"
|
|
79
|
+
),
|
|
80
|
+
"domain": re.compile(
|
|
81
|
+
r"\b([a-zA-Z0-9][-a-zA-Z0-9]*\.)+[a-zA-Z]{2,}\b"
|
|
82
|
+
),
|
|
83
|
+
"email": re.compile(
|
|
84
|
+
r"\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b"
|
|
85
|
+
),
|
|
86
|
+
"hash_md5": re.compile(
|
|
87
|
+
r"\b[a-fA-F0-9]{32}\b"
|
|
88
|
+
),
|
|
89
|
+
"hash_sha1": re.compile(
|
|
90
|
+
r"\b[a-fA-F0-9]{40}\b"
|
|
91
|
+
),
|
|
92
|
+
"cve": re.compile(
|
|
93
|
+
r"CVE-\d{4}-\d{4,}",
|
|
94
|
+
re.IGNORECASE
|
|
95
|
+
),
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
def __init__(self):
|
|
99
|
+
pass
|
|
100
|
+
|
|
101
|
+
def parse_nmap(self, output: str) -> list[Finding]:
|
|
102
|
+
"""Parse nmap output - public wrapper"""
|
|
103
|
+
return self._parse_nmap(output)
|
|
104
|
+
|
|
105
|
+
def parse_masscan(self, output: str) -> list[Finding]:
|
|
106
|
+
"""Parse masscan output - public wrapper"""
|
|
107
|
+
return self._parse_masscan(output)
|
|
108
|
+
|
|
109
|
+
def parse_directory(self, output: str) -> list[Finding]:
|
|
110
|
+
"""Parse directory brute-force output - public wrapper"""
|
|
111
|
+
return self._parse_directory(output)
|
|
112
|
+
|
|
113
|
+
def parse_nuclei(self, output: str) -> list[Finding]:
|
|
114
|
+
"""Parse nuclei output - public wrapper"""
|
|
115
|
+
return self._parse_nuclei(output)
|
|
116
|
+
|
|
117
|
+
def parse_hydra(self, output: str) -> list[Finding]:
|
|
118
|
+
"""Parse hydra output - public wrapper"""
|
|
119
|
+
return self._parse_hydra(output)
|
|
120
|
+
|
|
121
|
+
def parse_generic(self, output: str) -> list[Finding]:
|
|
122
|
+
"""Parse generic patterns from output - public wrapper"""
|
|
123
|
+
return self._parse_generic(output)
|
|
124
|
+
|
|
125
|
+
def parse(
|
|
126
|
+
self,
|
|
127
|
+
output: str,
|
|
128
|
+
tool_name: Optional[str] = None,
|
|
129
|
+
) -> list[Finding]:
|
|
130
|
+
"""
|
|
131
|
+
Parse tool output into structured findings.
|
|
132
|
+
|
|
133
|
+
Args:
|
|
134
|
+
output: Raw tool output
|
|
135
|
+
tool_name: Name of the tool (for tool-specific parsing)
|
|
136
|
+
|
|
137
|
+
Returns:
|
|
138
|
+
List of Finding objects
|
|
139
|
+
"""
|
|
140
|
+
findings = []
|
|
141
|
+
|
|
142
|
+
if not output:
|
|
143
|
+
return findings
|
|
144
|
+
|
|
145
|
+
# Tool-specific parsing
|
|
146
|
+
if tool_name:
|
|
147
|
+
tool_findings = self._parse_tool_specific(output, tool_name.lower())
|
|
148
|
+
findings.extend(tool_findings)
|
|
149
|
+
|
|
150
|
+
# Generic pattern matching
|
|
151
|
+
generic_findings = self._parse_generic(output)
|
|
152
|
+
findings.extend(generic_findings)
|
|
153
|
+
|
|
154
|
+
# Deduplicate
|
|
155
|
+
findings = self._deduplicate(findings)
|
|
156
|
+
|
|
157
|
+
return findings
|
|
158
|
+
|
|
159
|
+
def _parse_tool_specific(self, output: str, tool_name: str) -> list[Finding]:
|
|
160
|
+
"""Parse output based on known tool"""
|
|
161
|
+
findings = []
|
|
162
|
+
|
|
163
|
+
if tool_name in ["nmap", "nmap-scan"]:
|
|
164
|
+
findings.extend(self._parse_nmap(output))
|
|
165
|
+
|
|
166
|
+
elif tool_name == "masscan":
|
|
167
|
+
findings.extend(self._parse_masscan(output))
|
|
168
|
+
|
|
169
|
+
elif tool_name in ["gobuster", "ffuf", "dirb", "dirbuster"]:
|
|
170
|
+
findings.extend(self._parse_directory(output))
|
|
171
|
+
|
|
172
|
+
elif tool_name == "nuclei":
|
|
173
|
+
findings.extend(self._parse_nuclei(output))
|
|
174
|
+
|
|
175
|
+
elif tool_name == "hydra":
|
|
176
|
+
findings.extend(self._parse_hydra(output))
|
|
177
|
+
|
|
178
|
+
elif tool_name in ["nikto", "wpscan"]:
|
|
179
|
+
findings.extend(self._parse_vuln_scanner(output))
|
|
180
|
+
|
|
181
|
+
return findings
|
|
182
|
+
|
|
183
|
+
def _parse_nmap(self, output: str) -> list[Finding]:
|
|
184
|
+
"""Parse nmap output"""
|
|
185
|
+
findings = []
|
|
186
|
+
|
|
187
|
+
# Parse hosts
|
|
188
|
+
for match in self.PATTERNS["nmap_host"].finditer(output):
|
|
189
|
+
hostname = match.group(1)
|
|
190
|
+
ip = match.group(2) or hostname
|
|
191
|
+
findings.append(Finding(
|
|
192
|
+
type="host",
|
|
193
|
+
value=ip,
|
|
194
|
+
description=f"Host discovered: {hostname} ({ip})",
|
|
195
|
+
metadata={"hostname": hostname, "ip": ip}
|
|
196
|
+
))
|
|
197
|
+
|
|
198
|
+
# Parse ports
|
|
199
|
+
for match in self.PATTERNS["nmap_port"].finditer(output):
|
|
200
|
+
port = match.group(1)
|
|
201
|
+
protocol = match.group(2)
|
|
202
|
+
state = match.group(3)
|
|
203
|
+
service = match.group(4)
|
|
204
|
+
version = match.group(5) or ""
|
|
205
|
+
|
|
206
|
+
if state == "open":
|
|
207
|
+
findings.append(Finding(
|
|
208
|
+
type="port",
|
|
209
|
+
value=f"{port}/{protocol}",
|
|
210
|
+
description=f"Open port {port}/{protocol}: {service} {version}".strip(),
|
|
211
|
+
metadata={
|
|
212
|
+
"port": int(port),
|
|
213
|
+
"protocol": protocol,
|
|
214
|
+
"service": service,
|
|
215
|
+
"version": version,
|
|
216
|
+
}
|
|
217
|
+
))
|
|
218
|
+
|
|
219
|
+
return findings
|
|
220
|
+
|
|
221
|
+
def _parse_masscan(self, output: str) -> list[Finding]:
|
|
222
|
+
"""Parse masscan output"""
|
|
223
|
+
findings = []
|
|
224
|
+
|
|
225
|
+
for match in self.PATTERNS["masscan_port"].finditer(output):
|
|
226
|
+
port = match.group(1)
|
|
227
|
+
protocol = match.group(2)
|
|
228
|
+
ip = match.group(3)
|
|
229
|
+
|
|
230
|
+
findings.append(Finding(
|
|
231
|
+
type="port",
|
|
232
|
+
value=f"{ip}:{port}/{protocol}",
|
|
233
|
+
description=f"Open port {port}/{protocol} on {ip}",
|
|
234
|
+
metadata={"ip": ip, "port": int(port), "protocol": protocol}
|
|
235
|
+
))
|
|
236
|
+
|
|
237
|
+
return findings
|
|
238
|
+
|
|
239
|
+
def _parse_directory(self, output: str) -> list[Finding]:
|
|
240
|
+
"""Parse directory brute-force output"""
|
|
241
|
+
findings = []
|
|
242
|
+
|
|
243
|
+
# Standard format
|
|
244
|
+
for match in self.PATTERNS["directory"].finditer(output):
|
|
245
|
+
path = match.group(1)
|
|
246
|
+
status = match.group(2)
|
|
247
|
+
|
|
248
|
+
severity = "info"
|
|
249
|
+
if status in ["200", "301", "302"]:
|
|
250
|
+
severity = "low"
|
|
251
|
+
if any(kw in path.lower() for kw in ["admin", "backup", "config", "upload"]):
|
|
252
|
+
severity = "medium"
|
|
253
|
+
|
|
254
|
+
findings.append(Finding(
|
|
255
|
+
type="path",
|
|
256
|
+
value=path,
|
|
257
|
+
description=f"Directory found: {path} (Status: {status})",
|
|
258
|
+
severity=severity,
|
|
259
|
+
metadata={"status_code": int(status)}
|
|
260
|
+
))
|
|
261
|
+
|
|
262
|
+
# ffuf JSON format
|
|
263
|
+
for match in self.PATTERNS["ffuf_result"].finditer(output):
|
|
264
|
+
url = match.group(1)
|
|
265
|
+
status = match.group(2)
|
|
266
|
+
|
|
267
|
+
findings.append(Finding(
|
|
268
|
+
type="path",
|
|
269
|
+
value=url,
|
|
270
|
+
description=f"Endpoint found: {url} (Status: {status})",
|
|
271
|
+
severity="low",
|
|
272
|
+
metadata={"status_code": int(status)}
|
|
273
|
+
))
|
|
274
|
+
|
|
275
|
+
return findings
|
|
276
|
+
|
|
277
|
+
def _parse_nuclei(self, output: str) -> list[Finding]:
|
|
278
|
+
"""Parse nuclei vulnerability scanner output"""
|
|
279
|
+
findings = []
|
|
280
|
+
|
|
281
|
+
for match in self.PATTERNS["nuclei_vuln"].finditer(output):
|
|
282
|
+
template_id = match.group(1)
|
|
283
|
+
severity = match.group(2).lower()
|
|
284
|
+
protocol = match.group(3)
|
|
285
|
+
target = match.group(4)
|
|
286
|
+
|
|
287
|
+
# Normalize severity
|
|
288
|
+
if severity not in ["info", "low", "medium", "high", "critical"]:
|
|
289
|
+
severity = "info"
|
|
290
|
+
|
|
291
|
+
findings.append(Finding(
|
|
292
|
+
type="vuln",
|
|
293
|
+
value=template_id,
|
|
294
|
+
description=f"Vulnerability: {template_id} on {target}",
|
|
295
|
+
severity=severity,
|
|
296
|
+
metadata={
|
|
297
|
+
"template": template_id,
|
|
298
|
+
"protocol": protocol,
|
|
299
|
+
"target": target,
|
|
300
|
+
}
|
|
301
|
+
))
|
|
302
|
+
|
|
303
|
+
return findings
|
|
304
|
+
|
|
305
|
+
def _parse_hydra(self, output: str) -> list[Finding]:
|
|
306
|
+
"""Parse hydra brute-force output"""
|
|
307
|
+
findings = []
|
|
308
|
+
|
|
309
|
+
for match in self.PATTERNS["hydra_cred"].finditer(output):
|
|
310
|
+
port = match.group(1)
|
|
311
|
+
service = match.group(2)
|
|
312
|
+
host = match.group(3)
|
|
313
|
+
username = match.group(4)
|
|
314
|
+
password = match.group(5)
|
|
315
|
+
|
|
316
|
+
findings.append(Finding(
|
|
317
|
+
type="credential",
|
|
318
|
+
value=f"{username}:{password}",
|
|
319
|
+
description=f"Valid credentials found for {service} on {host}:{port}",
|
|
320
|
+
severity="critical",
|
|
321
|
+
metadata={
|
|
322
|
+
"host": host,
|
|
323
|
+
"port": int(port),
|
|
324
|
+
"service": service,
|
|
325
|
+
"username": username,
|
|
326
|
+
"password": password,
|
|
327
|
+
}
|
|
328
|
+
))
|
|
329
|
+
|
|
330
|
+
return findings
|
|
331
|
+
|
|
332
|
+
def _parse_vuln_scanner(self, output: str) -> list[Finding]:
|
|
333
|
+
"""Parse generic vulnerability scanner output (nikto, wpscan)"""
|
|
334
|
+
findings = []
|
|
335
|
+
|
|
336
|
+
# Look for CVEs
|
|
337
|
+
for match in self.PATTERNS["cve"].finditer(output):
|
|
338
|
+
cve = match.group(0).upper()
|
|
339
|
+
findings.append(Finding(
|
|
340
|
+
type="vuln",
|
|
341
|
+
value=cve,
|
|
342
|
+
description=f"CVE detected: {cve}",
|
|
343
|
+
severity="high",
|
|
344
|
+
metadata={"cve": cve}
|
|
345
|
+
))
|
|
346
|
+
|
|
347
|
+
# Look for common vulnerability keywords
|
|
348
|
+
vuln_keywords = [
|
|
349
|
+
("SQL injection", "high"),
|
|
350
|
+
("XSS", "medium"),
|
|
351
|
+
("CSRF", "medium"),
|
|
352
|
+
("directory listing", "low"),
|
|
353
|
+
("information disclosure", "medium"),
|
|
354
|
+
("remote code execution", "critical"),
|
|
355
|
+
("RCE", "critical"),
|
|
356
|
+
("LFI", "high"),
|
|
357
|
+
("RFI", "high"),
|
|
358
|
+
]
|
|
359
|
+
|
|
360
|
+
output_lower = output.lower()
|
|
361
|
+
for keyword, severity in vuln_keywords:
|
|
362
|
+
if keyword.lower() in output_lower:
|
|
363
|
+
findings.append(Finding(
|
|
364
|
+
type="vuln",
|
|
365
|
+
value=keyword,
|
|
366
|
+
description=f"Potential vulnerability: {keyword}",
|
|
367
|
+
severity=severity,
|
|
368
|
+
))
|
|
369
|
+
|
|
370
|
+
return findings
|
|
371
|
+
|
|
372
|
+
def _parse_generic(self, output: str) -> list[Finding]:
|
|
373
|
+
"""Extract generic patterns from any output"""
|
|
374
|
+
findings = []
|
|
375
|
+
|
|
376
|
+
# Extract CVEs
|
|
377
|
+
for match in self.PATTERNS["cve"].finditer(output):
|
|
378
|
+
cve = match.group(0).upper()
|
|
379
|
+
findings.append(Finding(
|
|
380
|
+
type="vuln",
|
|
381
|
+
value=cve,
|
|
382
|
+
description=f"CVE reference: {cve}",
|
|
383
|
+
severity="medium",
|
|
384
|
+
))
|
|
385
|
+
|
|
386
|
+
# Extract emails (potential targets for phishing)
|
|
387
|
+
for match in self.PATTERNS["email"].finditer(output):
|
|
388
|
+
email = match.group(0)
|
|
389
|
+
findings.append(Finding(
|
|
390
|
+
type="info",
|
|
391
|
+
value=email,
|
|
392
|
+
description=f"Email discovered: {email}",
|
|
393
|
+
))
|
|
394
|
+
|
|
395
|
+
return findings
|
|
396
|
+
|
|
397
|
+
def _deduplicate(self, findings: list[Finding]) -> list[Finding]:
|
|
398
|
+
"""Remove duplicate findings"""
|
|
399
|
+
seen = set()
|
|
400
|
+
unique = []
|
|
401
|
+
|
|
402
|
+
for finding in findings:
|
|
403
|
+
key = (finding.type, finding.value)
|
|
404
|
+
if key not in seen:
|
|
405
|
+
seen.add(key)
|
|
406
|
+
unique.append(finding)
|
|
407
|
+
|
|
408
|
+
return unique
|
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any, Literal
|
|
4
|
+
|
|
5
|
+
from aipt_v2.tools.registry import register_tool
|
|
6
|
+
|
|
7
|
+
from .proxy_manager import get_proxy_manager
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
RequestPart = Literal["request", "response"]
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@register_tool
|
|
14
|
+
def list_requests(
|
|
15
|
+
httpql_filter: str | None = None,
|
|
16
|
+
start_page: int = 1,
|
|
17
|
+
end_page: int = 1,
|
|
18
|
+
page_size: int = 50,
|
|
19
|
+
sort_by: Literal[
|
|
20
|
+
"timestamp",
|
|
21
|
+
"host",
|
|
22
|
+
"method",
|
|
23
|
+
"path",
|
|
24
|
+
"status_code",
|
|
25
|
+
"response_time",
|
|
26
|
+
"response_size",
|
|
27
|
+
"source",
|
|
28
|
+
] = "timestamp",
|
|
29
|
+
sort_order: Literal["asc", "desc"] = "desc",
|
|
30
|
+
scope_id: str | None = None,
|
|
31
|
+
) -> dict[str, Any]:
|
|
32
|
+
manager = get_proxy_manager()
|
|
33
|
+
return manager.list_requests(
|
|
34
|
+
httpql_filter, start_page, end_page, page_size, sort_by, sort_order, scope_id
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
@register_tool
|
|
39
|
+
def view_request(
|
|
40
|
+
request_id: str,
|
|
41
|
+
part: RequestPart = "request",
|
|
42
|
+
search_pattern: str | None = None,
|
|
43
|
+
page: int = 1,
|
|
44
|
+
page_size: int = 50,
|
|
45
|
+
) -> dict[str, Any]:
|
|
46
|
+
manager = get_proxy_manager()
|
|
47
|
+
return manager.view_request(request_id, part, search_pattern, page, page_size)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@register_tool
|
|
51
|
+
def send_request(
|
|
52
|
+
method: str,
|
|
53
|
+
url: str,
|
|
54
|
+
headers: dict[str, str] | None = None,
|
|
55
|
+
body: str = "",
|
|
56
|
+
timeout: int = 30,
|
|
57
|
+
) -> dict[str, Any]:
|
|
58
|
+
if headers is None:
|
|
59
|
+
headers = {}
|
|
60
|
+
manager = get_proxy_manager()
|
|
61
|
+
return manager.send_simple_request(method, url, headers, body, timeout)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
@register_tool
|
|
65
|
+
def repeat_request(
|
|
66
|
+
request_id: str,
|
|
67
|
+
modifications: dict[str, Any] | None = None,
|
|
68
|
+
) -> dict[str, Any]:
|
|
69
|
+
if modifications is None:
|
|
70
|
+
modifications = {}
|
|
71
|
+
manager = get_proxy_manager()
|
|
72
|
+
return manager.repeat_request(request_id, modifications)
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
@register_tool
|
|
76
|
+
def scope_rules(
|
|
77
|
+
action: Literal["get", "list", "create", "update", "delete"],
|
|
78
|
+
allowlist: list[str] | None = None,
|
|
79
|
+
denylist: list[str] | None = None,
|
|
80
|
+
scope_id: str | None = None,
|
|
81
|
+
scope_name: str | None = None,
|
|
82
|
+
) -> dict[str, Any]:
|
|
83
|
+
manager = get_proxy_manager()
|
|
84
|
+
return manager.scope_rules(action, allowlist, denylist, scope_id, scope_name)
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
@register_tool
|
|
88
|
+
def list_sitemap(
|
|
89
|
+
scope_id: str | None = None,
|
|
90
|
+
parent_id: str | None = None,
|
|
91
|
+
depth: Literal["DIRECT", "ALL"] = "DIRECT",
|
|
92
|
+
page: int = 1,
|
|
93
|
+
) -> dict[str, Any]:
|
|
94
|
+
manager = get_proxy_manager()
|
|
95
|
+
return manager.list_sitemap(scope_id, parent_id, depth, page)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
@register_tool
|
|
99
|
+
def view_sitemap_entry(
|
|
100
|
+
entry_id: str,
|
|
101
|
+
) -> dict[str, Any]:
|
|
102
|
+
manager = get_proxy_manager()
|
|
103
|
+
return manager.view_sitemap_entry(entry_id)
|