souleyez 2.22.0__py3-none-any.whl → 2.26.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- souleyez/__init__.py +1 -1
- souleyez/assets/__init__.py +1 -0
- souleyez/assets/souleyez-icon.png +0 -0
- souleyez/core/msf_sync_manager.py +15 -5
- souleyez/core/tool_chaining.py +126 -26
- souleyez/detection/validator.py +4 -2
- souleyez/docs/README.md +2 -2
- souleyez/docs/user-guide/installation.md +14 -1
- souleyez/engine/background.py +17 -1
- souleyez/engine/result_handler.py +89 -0
- souleyez/main.py +103 -4
- souleyez/parsers/crackmapexec_parser.py +101 -43
- souleyez/parsers/dnsrecon_parser.py +50 -35
- souleyez/parsers/enum4linux_parser.py +101 -21
- souleyez/parsers/http_fingerprint_parser.py +319 -0
- souleyez/parsers/hydra_parser.py +56 -5
- souleyez/parsers/impacket_parser.py +123 -44
- souleyez/parsers/john_parser.py +47 -14
- souleyez/parsers/msf_parser.py +20 -5
- souleyez/parsers/nmap_parser.py +48 -27
- souleyez/parsers/smbmap_parser.py +39 -23
- souleyez/parsers/sqlmap_parser.py +18 -9
- souleyez/parsers/theharvester_parser.py +21 -13
- souleyez/plugins/http_fingerprint.py +592 -0
- souleyez/plugins/nuclei.py +41 -17
- souleyez/ui/interactive.py +99 -7
- souleyez/ui/setup_wizard.py +93 -5
- souleyez/ui/tool_setup.py +52 -52
- souleyez/utils/tool_checker.py +45 -5
- {souleyez-2.22.0.dist-info → souleyez-2.26.0.dist-info}/METADATA +16 -3
- {souleyez-2.22.0.dist-info → souleyez-2.26.0.dist-info}/RECORD +35 -31
- {souleyez-2.22.0.dist-info → souleyez-2.26.0.dist-info}/WHEEL +0 -0
- {souleyez-2.22.0.dist-info → souleyez-2.26.0.dist-info}/entry_points.txt +0 -0
- {souleyez-2.22.0.dist-info → souleyez-2.26.0.dist-info}/licenses/LICENSE +0 -0
- {souleyez-2.22.0.dist-info → souleyez-2.26.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,592 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
souleyez.plugins.http_fingerprint - Lightweight HTTP fingerprinting
|
|
4
|
+
|
|
5
|
+
Detects:
|
|
6
|
+
- Server software (Apache, nginx, IIS, etc.)
|
|
7
|
+
- WAFs (Cloudflare, Akamai, AWS WAF, etc.)
|
|
8
|
+
- CDNs (Cloudflare, Fastly, CloudFront, etc.)
|
|
9
|
+
- Managed hosting platforms (Squarespace, Wix, Shopify, etc.)
|
|
10
|
+
- Technologies (via headers and cookies)
|
|
11
|
+
|
|
12
|
+
This runs BEFORE web vulnerability scanners to enable smarter tool configuration.
|
|
13
|
+
"""
|
|
14
|
+
import json
|
|
15
|
+
import time
|
|
16
|
+
import ssl
|
|
17
|
+
import socket
|
|
18
|
+
from typing import Dict, Any, List, Optional
|
|
19
|
+
from urllib.parse import urlparse
|
|
20
|
+
|
|
21
|
+
from .plugin_base import PluginBase
|
|
22
|
+
|
|
23
|
+
HELP = {
|
|
24
|
+
"name": "HTTP Fingerprint - Lightweight Web Reconnaissance",
|
|
25
|
+
"description": (
|
|
26
|
+
"Performs lightweight HTTP fingerprinting to detect server software, "
|
|
27
|
+
"WAFs, CDNs, and managed hosting platforms.\n\n"
|
|
28
|
+
"This runs automatically before web vulnerability scanners to enable "
|
|
29
|
+
"smarter tool configuration. For example, if Squarespace is detected, "
|
|
30
|
+
"nikto will skip CGI enumeration (pointless on managed platforms).\n\n"
|
|
31
|
+
"Detection categories:\n"
|
|
32
|
+
"- Server software (Apache, nginx, IIS, etc.)\n"
|
|
33
|
+
"- WAFs (Cloudflare, Akamai, AWS WAF, Imperva, etc.)\n"
|
|
34
|
+
"- CDNs (Cloudflare, Fastly, CloudFront, Akamai, etc.)\n"
|
|
35
|
+
"- Managed hosting (Squarespace, Wix, Shopify, Netlify, etc.)\n"
|
|
36
|
+
),
|
|
37
|
+
"usage": "souleyez jobs enqueue http_fingerprint <target>",
|
|
38
|
+
"examples": [
|
|
39
|
+
"souleyez jobs enqueue http_fingerprint http://example.com",
|
|
40
|
+
"souleyez jobs enqueue http_fingerprint https://example.com",
|
|
41
|
+
],
|
|
42
|
+
"flags": [
|
|
43
|
+
["--timeout <sec>", "Request timeout (default: 10)"],
|
|
44
|
+
],
|
|
45
|
+
"presets": [
|
|
46
|
+
{"name": "Quick Fingerprint", "args": [], "desc": "Fast fingerprint scan"},
|
|
47
|
+
],
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
# WAF detection signatures
|
|
51
|
+
# Format: {header_name: {value_pattern: waf_name}}
|
|
52
|
+
WAF_SIGNATURES = {
|
|
53
|
+
# Header-based detection
|
|
54
|
+
'headers': {
|
|
55
|
+
'server': {
|
|
56
|
+
'cloudflare': 'Cloudflare',
|
|
57
|
+
'akamaighost': 'Akamai',
|
|
58
|
+
'akamainetworkstorage': 'Akamai',
|
|
59
|
+
'awselb': 'AWS ELB',
|
|
60
|
+
'bigip': 'F5 BIG-IP',
|
|
61
|
+
'barracuda': 'Barracuda',
|
|
62
|
+
'denyall': 'DenyAll',
|
|
63
|
+
'fortigate': 'Fortinet FortiGate',
|
|
64
|
+
'imperva': 'Imperva',
|
|
65
|
+
'incapsula': 'Imperva Incapsula',
|
|
66
|
+
'netscaler': 'Citrix NetScaler',
|
|
67
|
+
'sucuri': 'Sucuri',
|
|
68
|
+
'wallarm': 'Wallarm',
|
|
69
|
+
},
|
|
70
|
+
'x-powered-by': {
|
|
71
|
+
'aws lambda': 'AWS Lambda',
|
|
72
|
+
'express': 'Express.js',
|
|
73
|
+
'php': 'PHP',
|
|
74
|
+
'asp.net': 'ASP.NET',
|
|
75
|
+
},
|
|
76
|
+
'x-sucuri-id': {'': 'Sucuri'},
|
|
77
|
+
'x-sucuri-cache': {'': 'Sucuri'},
|
|
78
|
+
'cf-ray': {'': 'Cloudflare'},
|
|
79
|
+
'cf-cache-status': {'': 'Cloudflare'},
|
|
80
|
+
'x-amz-cf-id': {'': 'AWS CloudFront'},
|
|
81
|
+
'x-amz-cf-pop': {'': 'AWS CloudFront'},
|
|
82
|
+
'x-akamai-transformed': {'': 'Akamai'},
|
|
83
|
+
'x-cache': {
|
|
84
|
+
'cloudfront': 'AWS CloudFront',
|
|
85
|
+
'varnish': 'Varnish',
|
|
86
|
+
},
|
|
87
|
+
'x-fastly-request-id': {'': 'Fastly'},
|
|
88
|
+
'x-served-by': {
|
|
89
|
+
'cache-': 'Fastly',
|
|
90
|
+
},
|
|
91
|
+
'x-cdn': {
|
|
92
|
+
'incapsula': 'Imperva Incapsula',
|
|
93
|
+
'cloudflare': 'Cloudflare',
|
|
94
|
+
},
|
|
95
|
+
'x-iinfo': {'': 'Imperva Incapsula'},
|
|
96
|
+
'x-proxy-id': {'': 'Imperva'},
|
|
97
|
+
'x-request-id': {}, # Generic, but useful context
|
|
98
|
+
'x-fw-protection': {'': 'Unknown WAF'},
|
|
99
|
+
'x-protected-by': {'': 'Unknown WAF'},
|
|
100
|
+
'x-waf-status': {'': 'Unknown WAF'},
|
|
101
|
+
'x-denied-reason': {'': 'Unknown WAF'},
|
|
102
|
+
},
|
|
103
|
+
# Cookie-based detection
|
|
104
|
+
'cookies': {
|
|
105
|
+
'__cfduid': 'Cloudflare',
|
|
106
|
+
'cf_clearance': 'Cloudflare',
|
|
107
|
+
'__cf_bm': 'Cloudflare Bot Management',
|
|
108
|
+
'incap_ses': 'Imperva Incapsula',
|
|
109
|
+
'visid_incap': 'Imperva Incapsula',
|
|
110
|
+
'nlbi_': 'Imperva Incapsula',
|
|
111
|
+
'ak_bmsc': 'Akamai Bot Manager',
|
|
112
|
+
'bm_sz': 'Akamai Bot Manager',
|
|
113
|
+
'_abck': 'Akamai Bot Manager',
|
|
114
|
+
'awsalb': 'AWS ALB',
|
|
115
|
+
'awsalbcors': 'AWS ALB',
|
|
116
|
+
'ts': 'F5 BIG-IP',
|
|
117
|
+
'bigipserver': 'F5 BIG-IP',
|
|
118
|
+
'citrix_ns_id': 'Citrix NetScaler',
|
|
119
|
+
'sucuri_cloudproxy': 'Sucuri',
|
|
120
|
+
},
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
# CDN detection signatures
|
|
124
|
+
CDN_SIGNATURES = {
|
|
125
|
+
'headers': {
|
|
126
|
+
'cf-ray': 'Cloudflare',
|
|
127
|
+
'cf-cache-status': 'Cloudflare',
|
|
128
|
+
'x-amz-cf-id': 'AWS CloudFront',
|
|
129
|
+
'x-amz-cf-pop': 'AWS CloudFront',
|
|
130
|
+
'x-cache': {
|
|
131
|
+
'cloudfront': 'AWS CloudFront',
|
|
132
|
+
'hit from cloudfront': 'AWS CloudFront',
|
|
133
|
+
},
|
|
134
|
+
'x-fastly-request-id': 'Fastly',
|
|
135
|
+
'x-served-by': 'Fastly',
|
|
136
|
+
'x-akamai-transformed': 'Akamai',
|
|
137
|
+
'x-akamai-request-id': 'Akamai',
|
|
138
|
+
'x-edge-location': 'Generic CDN',
|
|
139
|
+
'x-cdn': 'Generic CDN',
|
|
140
|
+
'x-cache-status': 'Generic CDN',
|
|
141
|
+
'x-varnish': 'Varnish',
|
|
142
|
+
'via': {
|
|
143
|
+
'cloudfront': 'AWS CloudFront',
|
|
144
|
+
'varnish': 'Varnish',
|
|
145
|
+
'akamai': 'Akamai',
|
|
146
|
+
},
|
|
147
|
+
'x-azure-ref': 'Azure CDN',
|
|
148
|
+
'x-msedge-ref': 'Azure CDN',
|
|
149
|
+
'x-goog-': 'Google Cloud CDN',
|
|
150
|
+
'x-bunny-': 'Bunny CDN',
|
|
151
|
+
'x-hw': 'Huawei CDN',
|
|
152
|
+
},
|
|
153
|
+
'server': {
|
|
154
|
+
'cloudflare': 'Cloudflare',
|
|
155
|
+
'akamaighost': 'Akamai',
|
|
156
|
+
'cloudfront': 'AWS CloudFront',
|
|
157
|
+
'fastly': 'Fastly',
|
|
158
|
+
'varnish': 'Varnish',
|
|
159
|
+
'keycdn': 'KeyCDN',
|
|
160
|
+
'bunnycdn': 'Bunny CDN',
|
|
161
|
+
'cdn77': 'CDN77',
|
|
162
|
+
'stackpath': 'StackPath',
|
|
163
|
+
'limelight': 'Limelight',
|
|
164
|
+
'azure': 'Azure CDN',
|
|
165
|
+
},
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
# Managed hosting platform signatures
|
|
169
|
+
MANAGED_HOSTING_SIGNATURES = {
|
|
170
|
+
'server': {
|
|
171
|
+
'squarespace': 'Squarespace',
|
|
172
|
+
'wix': 'Wix',
|
|
173
|
+
'shopify': 'Shopify',
|
|
174
|
+
'weebly': 'Weebly',
|
|
175
|
+
'webflow': 'Webflow',
|
|
176
|
+
'ghost': 'Ghost',
|
|
177
|
+
'medium': 'Medium',
|
|
178
|
+
'tumblr': 'Tumblr',
|
|
179
|
+
'blogger': 'Blogger/Blogspot',
|
|
180
|
+
'wordpress.com': 'WordPress.com',
|
|
181
|
+
'netlify': 'Netlify',
|
|
182
|
+
'vercel': 'Vercel',
|
|
183
|
+
'heroku': 'Heroku',
|
|
184
|
+
'github': 'GitHub Pages',
|
|
185
|
+
'gitlab': 'GitLab Pages',
|
|
186
|
+
'firebase': 'Firebase Hosting',
|
|
187
|
+
'render': 'Render',
|
|
188
|
+
'railway': 'Railway',
|
|
189
|
+
'fly': 'Fly.io',
|
|
190
|
+
'deno': 'Deno Deploy',
|
|
191
|
+
},
|
|
192
|
+
'headers': {
|
|
193
|
+
'x-shopify-stage': 'Shopify',
|
|
194
|
+
'x-shopify-request-id': 'Shopify',
|
|
195
|
+
'x-wix-request-id': 'Wix',
|
|
196
|
+
'x-wix-renderer-server': 'Wix',
|
|
197
|
+
'x-sqsp-edge': 'Squarespace',
|
|
198
|
+
'x-squarespace-': 'Squarespace',
|
|
199
|
+
'x-ghost-': 'Ghost',
|
|
200
|
+
'x-medium-content': 'Medium',
|
|
201
|
+
'x-tumblr-': 'Tumblr',
|
|
202
|
+
'x-blogger-': 'Blogger/Blogspot',
|
|
203
|
+
'x-netlify-': 'Netlify',
|
|
204
|
+
'x-nf-request-id': 'Netlify',
|
|
205
|
+
'x-vercel-': 'Vercel',
|
|
206
|
+
'x-vercel-id': 'Vercel',
|
|
207
|
+
'x-heroku-': 'Heroku',
|
|
208
|
+
'x-github-request-id': 'GitHub Pages',
|
|
209
|
+
'x-firebase-': 'Firebase Hosting',
|
|
210
|
+
'x-render-origin-server': 'Render',
|
|
211
|
+
'fly-request-id': 'Fly.io',
|
|
212
|
+
},
|
|
213
|
+
'cookies': {
|
|
214
|
+
'wordpress_': 'WordPress',
|
|
215
|
+
'wp-settings': 'WordPress',
|
|
216
|
+
'_shopify_': 'Shopify',
|
|
217
|
+
'wixSession': 'Wix',
|
|
218
|
+
},
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
# Server software signatures
|
|
222
|
+
SERVER_SIGNATURES = {
|
|
223
|
+
'apache': 'Apache',
|
|
224
|
+
'nginx': 'nginx',
|
|
225
|
+
'microsoft-iis': 'Microsoft IIS',
|
|
226
|
+
'iis': 'Microsoft IIS',
|
|
227
|
+
'lighttpd': 'lighttpd',
|
|
228
|
+
'litespeed': 'LiteSpeed',
|
|
229
|
+
'openresty': 'OpenResty',
|
|
230
|
+
'caddy': 'Caddy',
|
|
231
|
+
'tomcat': 'Apache Tomcat',
|
|
232
|
+
'jetty': 'Eclipse Jetty',
|
|
233
|
+
'gunicorn': 'Gunicorn',
|
|
234
|
+
'uvicorn': 'Uvicorn',
|
|
235
|
+
'werkzeug': 'Werkzeug (Flask)',
|
|
236
|
+
'waitress': 'Waitress',
|
|
237
|
+
'cowboy': 'Cowboy (Erlang)',
|
|
238
|
+
'kestrel': 'Kestrel (ASP.NET)',
|
|
239
|
+
'express': 'Express.js',
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
|
|
243
|
+
class HttpFingerprintPlugin(PluginBase):
|
|
244
|
+
name = "HTTP Fingerprint"
|
|
245
|
+
tool = "http_fingerprint"
|
|
246
|
+
category = "recon"
|
|
247
|
+
HELP = HELP
|
|
248
|
+
|
|
249
|
+
def build_command(self, target: str, args: List[str] = None, label: str = "", log_path: str = None):
|
|
250
|
+
"""
|
|
251
|
+
HTTP fingerprinting is done in Python, not via external command.
|
|
252
|
+
Return None to use run() method instead.
|
|
253
|
+
"""
|
|
254
|
+
return None
|
|
255
|
+
|
|
256
|
+
def run(self, target: str, args: List[str] = None, label: str = "", log_path: str = None) -> int:
|
|
257
|
+
"""Execute HTTP fingerprint scan."""
|
|
258
|
+
args = args or []
|
|
259
|
+
timeout = 10
|
|
260
|
+
|
|
261
|
+
# Parse timeout from args
|
|
262
|
+
for i, arg in enumerate(args):
|
|
263
|
+
if arg == '--timeout' and i + 1 < len(args):
|
|
264
|
+
try:
|
|
265
|
+
timeout = int(args[i + 1])
|
|
266
|
+
except ValueError:
|
|
267
|
+
pass
|
|
268
|
+
|
|
269
|
+
# Ensure target has scheme
|
|
270
|
+
if not target.startswith(('http://', 'https://')):
|
|
271
|
+
target = f'http://{target}'
|
|
272
|
+
|
|
273
|
+
try:
|
|
274
|
+
result = self._fingerprint(target, timeout)
|
|
275
|
+
output = self._format_output(target, result, label)
|
|
276
|
+
|
|
277
|
+
if log_path:
|
|
278
|
+
with open(log_path, 'a', encoding='utf-8', errors='replace') as fh:
|
|
279
|
+
fh.write(output)
|
|
280
|
+
# Write JSON result for parsing
|
|
281
|
+
fh.write("\n\n=== JSON_RESULT ===\n")
|
|
282
|
+
fh.write(json.dumps(result, indent=2))
|
|
283
|
+
fh.write("\n=== END_JSON_RESULT ===\n")
|
|
284
|
+
|
|
285
|
+
return 0
|
|
286
|
+
|
|
287
|
+
except Exception as e:
|
|
288
|
+
error_output = f"=== Plugin: HTTP Fingerprint ===\n"
|
|
289
|
+
error_output += f"Target: {target}\n"
|
|
290
|
+
error_output += f"Error: {type(e).__name__}: {e}\n"
|
|
291
|
+
|
|
292
|
+
if log_path:
|
|
293
|
+
with open(log_path, 'a', encoding='utf-8', errors='replace') as fh:
|
|
294
|
+
fh.write(error_output)
|
|
295
|
+
|
|
296
|
+
return 1
|
|
297
|
+
|
|
298
|
+
def _fingerprint(self, url: str, timeout: int = 10) -> Dict[str, Any]:
|
|
299
|
+
"""
|
|
300
|
+
Perform HTTP fingerprinting on target URL.
|
|
301
|
+
|
|
302
|
+
Returns dict with:
|
|
303
|
+
- server: Server software detected
|
|
304
|
+
- waf: WAF/protection detected (if any)
|
|
305
|
+
- cdn: CDN detected (if any)
|
|
306
|
+
- managed_hosting: Managed platform detected (if any)
|
|
307
|
+
- headers: Raw response headers
|
|
308
|
+
- technologies: List of detected technologies
|
|
309
|
+
- tls: TLS/SSL information (for HTTPS)
|
|
310
|
+
"""
|
|
311
|
+
import urllib.request
|
|
312
|
+
import urllib.error
|
|
313
|
+
|
|
314
|
+
result = {
|
|
315
|
+
'server': None,
|
|
316
|
+
'server_version': None,
|
|
317
|
+
'waf': [],
|
|
318
|
+
'cdn': [],
|
|
319
|
+
'managed_hosting': None,
|
|
320
|
+
'technologies': [],
|
|
321
|
+
'headers': {},
|
|
322
|
+
'cookies': [],
|
|
323
|
+
'tls': None,
|
|
324
|
+
'status_code': None,
|
|
325
|
+
'redirect_url': None,
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
parsed = urlparse(url)
|
|
329
|
+
is_https = parsed.scheme == 'https'
|
|
330
|
+
|
|
331
|
+
# Create request with common browser headers
|
|
332
|
+
req = urllib.request.Request(
|
|
333
|
+
url,
|
|
334
|
+
headers={
|
|
335
|
+
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
|
|
336
|
+
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
|
337
|
+
'Accept-Language': 'en-US,en;q=0.5',
|
|
338
|
+
'Accept-Encoding': 'identity',
|
|
339
|
+
'Connection': 'close',
|
|
340
|
+
}
|
|
341
|
+
)
|
|
342
|
+
|
|
343
|
+
try:
|
|
344
|
+
# Create SSL context for HTTPS
|
|
345
|
+
if is_https:
|
|
346
|
+
ctx = ssl.create_default_context()
|
|
347
|
+
ctx.check_hostname = False
|
|
348
|
+
ctx.verify_mode = ssl.CERT_NONE
|
|
349
|
+
|
|
350
|
+
# Get TLS info
|
|
351
|
+
try:
|
|
352
|
+
with socket.create_connection((parsed.hostname, parsed.port or 443), timeout=timeout) as sock:
|
|
353
|
+
with ctx.wrap_socket(sock, server_hostname=parsed.hostname) as ssock:
|
|
354
|
+
cert = ssock.getpeercert(binary_form=True)
|
|
355
|
+
cipher = ssock.cipher()
|
|
356
|
+
version = ssock.version()
|
|
357
|
+
result['tls'] = {
|
|
358
|
+
'version': version,
|
|
359
|
+
'cipher': cipher[0] if cipher else None,
|
|
360
|
+
'bits': cipher[2] if cipher else None,
|
|
361
|
+
}
|
|
362
|
+
except Exception:
|
|
363
|
+
pass # TLS info is optional
|
|
364
|
+
|
|
365
|
+
response = urllib.request.urlopen(req, timeout=timeout, context=ctx)
|
|
366
|
+
else:
|
|
367
|
+
response = urllib.request.urlopen(req, timeout=timeout)
|
|
368
|
+
|
|
369
|
+
result['status_code'] = response.getcode()
|
|
370
|
+
|
|
371
|
+
# Get headers
|
|
372
|
+
headers = {k.lower(): v for k, v in response.headers.items()}
|
|
373
|
+
result['headers'] = dict(response.headers)
|
|
374
|
+
|
|
375
|
+
# Check for redirect
|
|
376
|
+
if response.geturl() != url:
|
|
377
|
+
result['redirect_url'] = response.geturl()
|
|
378
|
+
|
|
379
|
+
# Parse cookies
|
|
380
|
+
if 'set-cookie' in headers:
|
|
381
|
+
cookies = headers.get('set-cookie', '')
|
|
382
|
+
result['cookies'] = [c.strip() for c in cookies.split(',')]
|
|
383
|
+
|
|
384
|
+
# Detect server
|
|
385
|
+
server_header = headers.get('server', '').lower()
|
|
386
|
+
result['server'] = headers.get('server')
|
|
387
|
+
|
|
388
|
+
for sig, name in SERVER_SIGNATURES.items():
|
|
389
|
+
if sig in server_header:
|
|
390
|
+
result['server_version'] = name
|
|
391
|
+
result['technologies'].append(name)
|
|
392
|
+
break
|
|
393
|
+
|
|
394
|
+
# Detect WAF
|
|
395
|
+
result['waf'] = self._detect_waf(headers, result['cookies'])
|
|
396
|
+
|
|
397
|
+
# Detect CDN
|
|
398
|
+
result['cdn'] = self._detect_cdn(headers, server_header)
|
|
399
|
+
|
|
400
|
+
# Detect managed hosting
|
|
401
|
+
result['managed_hosting'] = self._detect_managed_hosting(headers, server_header, result['cookies'])
|
|
402
|
+
|
|
403
|
+
# Detect technologies from headers
|
|
404
|
+
self._detect_technologies(headers, result)
|
|
405
|
+
|
|
406
|
+
except urllib.error.HTTPError as e:
|
|
407
|
+
# Even errors give us useful headers
|
|
408
|
+
result['status_code'] = e.code
|
|
409
|
+
headers = {k.lower(): v for k, v in e.headers.items()}
|
|
410
|
+
result['headers'] = dict(e.headers)
|
|
411
|
+
result['server'] = headers.get('server')
|
|
412
|
+
|
|
413
|
+
server_header = headers.get('server', '').lower()
|
|
414
|
+
result['waf'] = self._detect_waf(headers, [])
|
|
415
|
+
result['cdn'] = self._detect_cdn(headers, server_header)
|
|
416
|
+
result['managed_hosting'] = self._detect_managed_hosting(headers, server_header, [])
|
|
417
|
+
|
|
418
|
+
except urllib.error.URLError as e:
|
|
419
|
+
result['error'] = str(e.reason)
|
|
420
|
+
|
|
421
|
+
except socket.timeout:
|
|
422
|
+
result['error'] = 'Connection timed out'
|
|
423
|
+
|
|
424
|
+
except Exception as e:
|
|
425
|
+
result['error'] = f'{type(e).__name__}: {e}'
|
|
426
|
+
|
|
427
|
+
return result
|
|
428
|
+
|
|
429
|
+
def _detect_waf(self, headers: Dict[str, str], cookies: List[str]) -> List[str]:
|
|
430
|
+
"""Detect WAF from headers and cookies."""
|
|
431
|
+
detected = []
|
|
432
|
+
|
|
433
|
+
# Check headers
|
|
434
|
+
for header, signatures in WAF_SIGNATURES['headers'].items():
|
|
435
|
+
header_val = headers.get(header, '').lower()
|
|
436
|
+
if header_val:
|
|
437
|
+
if isinstance(signatures, dict):
|
|
438
|
+
for sig, waf_name in signatures.items():
|
|
439
|
+
if sig == '' or sig in header_val:
|
|
440
|
+
if waf_name and waf_name not in detected:
|
|
441
|
+
detected.append(waf_name)
|
|
442
|
+
elif isinstance(signatures, str) and signatures not in detected:
|
|
443
|
+
detected.append(signatures)
|
|
444
|
+
|
|
445
|
+
# Check cookies
|
|
446
|
+
cookie_str = ' '.join(cookies).lower()
|
|
447
|
+
for cookie_sig, waf_name in WAF_SIGNATURES['cookies'].items():
|
|
448
|
+
if cookie_sig.lower() in cookie_str:
|
|
449
|
+
if waf_name not in detected:
|
|
450
|
+
detected.append(waf_name)
|
|
451
|
+
|
|
452
|
+
return detected
|
|
453
|
+
|
|
454
|
+
def _detect_cdn(self, headers: Dict[str, str], server_header: str) -> List[str]:
|
|
455
|
+
"""Detect CDN from headers."""
|
|
456
|
+
detected = []
|
|
457
|
+
|
|
458
|
+
# Check specific headers
|
|
459
|
+
for header, cdn_info in CDN_SIGNATURES['headers'].items():
|
|
460
|
+
header_val = headers.get(header, '').lower()
|
|
461
|
+
if header_val:
|
|
462
|
+
if isinstance(cdn_info, dict):
|
|
463
|
+
for sig, cdn_name in cdn_info.items():
|
|
464
|
+
if sig in header_val and cdn_name not in detected:
|
|
465
|
+
detected.append(cdn_name)
|
|
466
|
+
elif isinstance(cdn_info, str) and cdn_info not in detected:
|
|
467
|
+
detected.append(cdn_info)
|
|
468
|
+
|
|
469
|
+
# Check server header
|
|
470
|
+
for sig, cdn_name in CDN_SIGNATURES['server'].items():
|
|
471
|
+
if sig in server_header and cdn_name not in detected:
|
|
472
|
+
detected.append(cdn_name)
|
|
473
|
+
|
|
474
|
+
return detected
|
|
475
|
+
|
|
476
|
+
def _detect_managed_hosting(self, headers: Dict[str, str], server_header: str, cookies: List[str]) -> Optional[str]:
|
|
477
|
+
"""Detect managed hosting platform."""
|
|
478
|
+
# Check server header first (most reliable)
|
|
479
|
+
for sig, platform in MANAGED_HOSTING_SIGNATURES['server'].items():
|
|
480
|
+
if sig in server_header:
|
|
481
|
+
return platform
|
|
482
|
+
|
|
483
|
+
# Check specific headers
|
|
484
|
+
for header_prefix, platform in MANAGED_HOSTING_SIGNATURES['headers'].items():
|
|
485
|
+
for header in headers:
|
|
486
|
+
if header.lower().startswith(header_prefix.lower()):
|
|
487
|
+
return platform
|
|
488
|
+
|
|
489
|
+
# Check cookies
|
|
490
|
+
cookie_str = ' '.join(cookies).lower()
|
|
491
|
+
for cookie_sig, platform in MANAGED_HOSTING_SIGNATURES['cookies'].items():
|
|
492
|
+
if cookie_sig.lower() in cookie_str:
|
|
493
|
+
return platform
|
|
494
|
+
|
|
495
|
+
return None
|
|
496
|
+
|
|
497
|
+
def _detect_technologies(self, headers: Dict[str, str], result: Dict[str, Any]):
|
|
498
|
+
"""Detect additional technologies from headers."""
|
|
499
|
+
techs = result['technologies']
|
|
500
|
+
|
|
501
|
+
# X-Powered-By
|
|
502
|
+
powered_by = headers.get('x-powered-by', '')
|
|
503
|
+
if powered_by:
|
|
504
|
+
if 'php' in powered_by.lower():
|
|
505
|
+
techs.append(f'PHP ({powered_by})')
|
|
506
|
+
elif 'asp.net' in powered_by.lower():
|
|
507
|
+
techs.append(f'ASP.NET ({powered_by})')
|
|
508
|
+
elif 'express' in powered_by.lower():
|
|
509
|
+
techs.append('Express.js')
|
|
510
|
+
elif powered_by not in techs:
|
|
511
|
+
techs.append(powered_by)
|
|
512
|
+
|
|
513
|
+
# X-AspNet-Version
|
|
514
|
+
aspnet_ver = headers.get('x-aspnet-version', '')
|
|
515
|
+
if aspnet_ver:
|
|
516
|
+
techs.append(f'ASP.NET {aspnet_ver}')
|
|
517
|
+
|
|
518
|
+
# X-Generator
|
|
519
|
+
generator = headers.get('x-generator', '')
|
|
520
|
+
if generator:
|
|
521
|
+
techs.append(generator)
|
|
522
|
+
|
|
523
|
+
result['technologies'] = list(set(techs))
|
|
524
|
+
|
|
525
|
+
def _format_output(self, target: str, result: Dict[str, Any], label: str) -> str:
|
|
526
|
+
"""Format fingerprint results for log output."""
|
|
527
|
+
lines = []
|
|
528
|
+
lines.append("=== Plugin: HTTP Fingerprint ===")
|
|
529
|
+
lines.append(f"Target: {target}")
|
|
530
|
+
if label:
|
|
531
|
+
lines.append(f"Label: {label}")
|
|
532
|
+
lines.append(f"Started: {time.strftime('%Y-%m-%d %H:%M:%S UTC', time.gmtime())}")
|
|
533
|
+
lines.append("=" * 60)
|
|
534
|
+
lines.append("")
|
|
535
|
+
|
|
536
|
+
if result.get('error'):
|
|
537
|
+
lines.append(f"ERROR: {result['error']}")
|
|
538
|
+
return '\n'.join(lines)
|
|
539
|
+
|
|
540
|
+
# Status
|
|
541
|
+
lines.append(f"HTTP Status: {result.get('status_code', 'N/A')}")
|
|
542
|
+
|
|
543
|
+
if result.get('redirect_url'):
|
|
544
|
+
lines.append(f"Redirected to: {result['redirect_url']}")
|
|
545
|
+
|
|
546
|
+
# Server
|
|
547
|
+
if result.get('server'):
|
|
548
|
+
lines.append(f"Server: {result['server']}")
|
|
549
|
+
|
|
550
|
+
# TLS
|
|
551
|
+
if result.get('tls'):
|
|
552
|
+
tls = result['tls']
|
|
553
|
+
lines.append(f"TLS: {tls.get('version', 'Unknown')} ({tls.get('cipher', 'Unknown')})")
|
|
554
|
+
|
|
555
|
+
lines.append("")
|
|
556
|
+
|
|
557
|
+
# Managed Hosting (most important for tool decisions)
|
|
558
|
+
if result.get('managed_hosting'):
|
|
559
|
+
lines.append("-" * 40)
|
|
560
|
+
lines.append(f"MANAGED HOSTING DETECTED: {result['managed_hosting']}")
|
|
561
|
+
lines.append(" -> CGI enumeration will be skipped")
|
|
562
|
+
lines.append(" -> Limited vulnerability surface expected")
|
|
563
|
+
lines.append("-" * 40)
|
|
564
|
+
lines.append("")
|
|
565
|
+
|
|
566
|
+
# WAF
|
|
567
|
+
if result.get('waf'):
|
|
568
|
+
lines.append(f"WAF/Protection Detected:")
|
|
569
|
+
for waf in result['waf']:
|
|
570
|
+
lines.append(f" - {waf}")
|
|
571
|
+
lines.append("")
|
|
572
|
+
|
|
573
|
+
# CDN
|
|
574
|
+
if result.get('cdn'):
|
|
575
|
+
lines.append(f"CDN Detected:")
|
|
576
|
+
for cdn in result['cdn']:
|
|
577
|
+
lines.append(f" - {cdn}")
|
|
578
|
+
lines.append("")
|
|
579
|
+
|
|
580
|
+
# Technologies
|
|
581
|
+
if result.get('technologies'):
|
|
582
|
+
lines.append(f"Technologies:")
|
|
583
|
+
for tech in result['technologies']:
|
|
584
|
+
lines.append(f" - {tech}")
|
|
585
|
+
lines.append("")
|
|
586
|
+
|
|
587
|
+
lines.append(f"\n=== Completed: {time.strftime('%Y-%m-%d %H:%M:%S UTC', time.gmtime())} ===")
|
|
588
|
+
|
|
589
|
+
return '\n'.join(lines)
|
|
590
|
+
|
|
591
|
+
|
|
592
|
+
plugin = HttpFingerprintPlugin()
|
souleyez/plugins/nuclei.py
CHANGED
|
@@ -207,19 +207,51 @@ class NucleiPlugin(PluginBase):
|
|
|
207
207
|
return True
|
|
208
208
|
return False
|
|
209
209
|
|
|
210
|
-
def
|
|
211
|
-
"""
|
|
212
|
-
|
|
210
|
+
def _normalize_target(self, target: str, args: List[str] = None, log_path: str = None) -> str:
|
|
211
|
+
"""
|
|
212
|
+
Normalize target for Nuclei scanning.
|
|
213
|
+
|
|
214
|
+
- URLs are validated and passed through
|
|
215
|
+
- Bare IPs/domains get http:// prepended for web scanning
|
|
216
|
+
|
|
217
|
+
This fixes the issue where nmap chains pass bare IPs but Nuclei
|
|
218
|
+
needs URLs to properly scan web services.
|
|
219
|
+
"""
|
|
220
|
+
import re
|
|
221
|
+
|
|
222
|
+
# Already a URL - validate and return
|
|
213
223
|
if target.startswith(('http://', 'https://')):
|
|
214
224
|
try:
|
|
215
|
-
|
|
225
|
+
return validate_url(target)
|
|
216
226
|
except ValidationError as e:
|
|
217
227
|
if log_path:
|
|
218
228
|
with open(log_path, 'w') as f:
|
|
219
229
|
f.write(f"ERROR: Invalid URL: {e}\n")
|
|
220
230
|
return None
|
|
221
231
|
|
|
232
|
+
# Bare IP or domain - prepend http:// for web scanning
|
|
233
|
+
# This is needed because Nuclei web templates require a URL
|
|
234
|
+
ip_pattern = r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}(:\d+)?$'
|
|
235
|
+
domain_pattern = r'^[a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?(\.[a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?)*$'
|
|
236
|
+
|
|
237
|
+
if re.match(ip_pattern, target) or re.match(domain_pattern, target):
|
|
238
|
+
# Log the conversion
|
|
239
|
+
if log_path:
|
|
240
|
+
with open(log_path, 'a') as f:
|
|
241
|
+
f.write(f"NOTE: Converting bare target '{target}' to 'http://{target}' for web scanning\n")
|
|
242
|
+
return f"http://{target}"
|
|
243
|
+
|
|
244
|
+
# Unknown format - return as-is
|
|
245
|
+
return target
|
|
246
|
+
|
|
247
|
+
def build_command(self, target: str, args: List[str] = None, label: str = "", log_path: str = None):
|
|
248
|
+
"""Build nuclei command for background execution with PID tracking."""
|
|
222
249
|
args = args or []
|
|
250
|
+
|
|
251
|
+
# Normalize target (convert bare IPs to URLs)
|
|
252
|
+
target = self._normalize_target(target, args, log_path)
|
|
253
|
+
if target is None:
|
|
254
|
+
return None
|
|
223
255
|
args = [arg.replace("<target>", target) for arg in args]
|
|
224
256
|
|
|
225
257
|
cmd = ["nuclei", "-target", target]
|
|
@@ -252,21 +284,13 @@ class NucleiPlugin(PluginBase):
|
|
|
252
284
|
|
|
253
285
|
def run(self, target: str, args: List[str] = None, label: str = "", log_path: str = None) -> int:
|
|
254
286
|
"""Execute nuclei scan and write JSON output to log_path."""
|
|
255
|
-
|
|
256
|
-
# For URLs, validate them. For bare IPs/domains, let Nuclei auto-detect protocols
|
|
257
|
-
if target.startswith(('http://', 'https://')):
|
|
258
|
-
try:
|
|
259
|
-
target = validate_url(target)
|
|
260
|
-
except ValidationError as e:
|
|
261
|
-
if log_path:
|
|
262
|
-
with open(log_path, 'w') as f:
|
|
263
|
-
f.write(f"ERROR: Invalid URL: {e}\n")
|
|
264
|
-
return 1
|
|
265
|
-
raise ValueError(f"Invalid URL: {e}")
|
|
266
|
-
# Otherwise keep target as-is (IP or domain) for Nuclei auto-detect protocols
|
|
267
|
-
|
|
268
287
|
args = args or []
|
|
269
288
|
|
|
289
|
+
# Normalize target (convert bare IPs to URLs)
|
|
290
|
+
target = self._normalize_target(target, args, log_path)
|
|
291
|
+
if target is None:
|
|
292
|
+
return 1
|
|
293
|
+
|
|
270
294
|
# Replace <target> placeholder
|
|
271
295
|
args = [arg.replace("<target>", target) for arg in args]
|
|
272
296
|
|