changedetection.io-osint-processor 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- changedetection_io_osint_processor-0.0.1.dist-info/METADATA +274 -0
- changedetection_io_osint_processor-0.0.1.dist-info/RECORD +29 -0
- changedetection_io_osint_processor-0.0.1.dist-info/WHEEL +5 -0
- changedetection_io_osint_processor-0.0.1.dist-info/entry_points.txt +2 -0
- changedetection_io_osint_processor-0.0.1.dist-info/licenses/LICENSE +661 -0
- changedetection_io_osint_processor-0.0.1.dist-info/top_level.txt +1 -0
- changedetectionio_osint/__init__.py +22 -0
- changedetectionio_osint/forms.py +289 -0
- changedetectionio_osint/plugin.py +37 -0
- changedetectionio_osint/processor.py +655 -0
- changedetectionio_osint/steps/__init__.py +4 -0
- changedetectionio_osint/steps/base.py +76 -0
- changedetectionio_osint/steps/bgp.py +88 -0
- changedetectionio_osint/steps/dns.py +147 -0
- changedetectionio_osint/steps/dns_scan.py +88 -0
- changedetectionio_osint/steps/dnssec.py +260 -0
- changedetectionio_osint/steps/email_security.py +236 -0
- changedetectionio_osint/steps/http_fingerprint.py +359 -0
- changedetectionio_osint/steps/http_scan.py +31 -0
- changedetectionio_osint/steps/mac_lookup.py +209 -0
- changedetectionio_osint/steps/os_detection.py +245 -0
- changedetectionio_osint/steps/portscan.py +113 -0
- changedetectionio_osint/steps/registry.py +49 -0
- changedetectionio_osint/steps/smtp_fingerprint.py +517 -0
- changedetectionio_osint/steps/ssh_fingerprint.py +310 -0
- changedetectionio_osint/steps/tls_analysis.py +332 -0
- changedetectionio_osint/steps/traceroute.py +127 -0
- changedetectionio_osint/steps/whois_lookup.py +125 -0
- changedetectionio_osint/steps/whois_scan.py +123 -0
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Email Security Reconnaissance Step
|
|
3
|
+
Analyzes SPF, DMARC, and DKIM records for email authentication and anti-spoofing
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import asyncio
|
|
7
|
+
# SOCKS5 proxy support: Requires DNS-over-TCP implementation (TODO: use dns.query.tcp with SOCKS5 socket)
|
|
8
|
+
supports_socks5 = False
|
|
9
|
+
import re
|
|
10
|
+
from loguru import logger
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
async def scan_email_security(hostname, dns_resolver, watch_uuid=None, update_signal=None):
|
|
14
|
+
"""
|
|
15
|
+
Perform email security reconnaissance (SPF, DMARC, DKIM)
|
|
16
|
+
|
|
17
|
+
Args:
|
|
18
|
+
hostname: Target hostname to query
|
|
19
|
+
dns_resolver: Configured dns.resolver.Resolver instance
|
|
20
|
+
watch_uuid: Optional watch UUID for status updates
|
|
21
|
+
update_signal: Optional blinker signal for status updates
|
|
22
|
+
|
|
23
|
+
Returns:
|
|
24
|
+
dict: Email security results with SPF, DMARC, DKIM data
|
|
25
|
+
"""
|
|
26
|
+
if update_signal and watch_uuid:
|
|
27
|
+
update_signal.send(watch_uuid=watch_uuid, status="Email Security")
|
|
28
|
+
|
|
29
|
+
def query_email_security():
|
|
30
|
+
results = {
|
|
31
|
+
'spf': None,
|
|
32
|
+
'dmarc': None,
|
|
33
|
+
'dkim': [],
|
|
34
|
+
'spf_valid': False,
|
|
35
|
+
'dmarc_valid': False,
|
|
36
|
+
'spf_policy': None,
|
|
37
|
+
'dmarc_policy': None,
|
|
38
|
+
'dmarc_pct': None,
|
|
39
|
+
'dmarc_rua': [],
|
|
40
|
+
'dmarc_ruf': [],
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
# === SPF (Sender Policy Framework) ===
|
|
44
|
+
# SPF records are TXT records on the domain itself
|
|
45
|
+
try:
|
|
46
|
+
answers = dns_resolver.resolve(hostname, 'TXT')
|
|
47
|
+
for rdata in answers:
|
|
48
|
+
txt_value = str(rdata).strip('"')
|
|
49
|
+
if txt_value.startswith('v=spf1'):
|
|
50
|
+
results['spf'] = txt_value
|
|
51
|
+
results['spf_valid'] = True
|
|
52
|
+
|
|
53
|
+
# Parse SPF policy (last mechanism: all)
|
|
54
|
+
# Common endings: -all (fail), ~all (softfail), +all (pass), ?all (neutral)
|
|
55
|
+
if '-all' in txt_value:
|
|
56
|
+
results['spf_policy'] = 'strict (-all)'
|
|
57
|
+
elif '~all' in txt_value:
|
|
58
|
+
results['spf_policy'] = 'softfail (~all)'
|
|
59
|
+
elif '+all' in txt_value:
|
|
60
|
+
results['spf_policy'] = 'permissive (+all)'
|
|
61
|
+
elif '?all' in txt_value:
|
|
62
|
+
results['spf_policy'] = 'neutral (?all)'
|
|
63
|
+
else:
|
|
64
|
+
results['spf_policy'] = 'unknown'
|
|
65
|
+
break
|
|
66
|
+
except Exception as e:
|
|
67
|
+
logger.debug(f"SPF query failed: {e}")
|
|
68
|
+
|
|
69
|
+
# === DMARC (Domain-based Message Authentication) ===
|
|
70
|
+
# DMARC records are TXT records on _dmarc.domain.com
|
|
71
|
+
try:
|
|
72
|
+
dmarc_domain = f"_dmarc.{hostname}"
|
|
73
|
+
answers = dns_resolver.resolve(dmarc_domain, 'TXT')
|
|
74
|
+
for rdata in answers:
|
|
75
|
+
txt_value = str(rdata).strip('"')
|
|
76
|
+
if txt_value.startswith('v=DMARC1'):
|
|
77
|
+
results['dmarc'] = txt_value
|
|
78
|
+
results['dmarc_valid'] = True
|
|
79
|
+
|
|
80
|
+
# Parse DMARC policy (p=none/quarantine/reject)
|
|
81
|
+
policy_match = re.search(r'p=(\w+)', txt_value)
|
|
82
|
+
if policy_match:
|
|
83
|
+
results['dmarc_policy'] = policy_match.group(1)
|
|
84
|
+
|
|
85
|
+
# Parse DMARC percentage (pct=0-100)
|
|
86
|
+
pct_match = re.search(r'pct=(\d+)', txt_value)
|
|
87
|
+
if pct_match:
|
|
88
|
+
results['dmarc_pct'] = int(pct_match.group(1))
|
|
89
|
+
else:
|
|
90
|
+
results['dmarc_pct'] = 100 # Default is 100%
|
|
91
|
+
|
|
92
|
+
# Parse aggregate reporting URIs (rua)
|
|
93
|
+
rua_match = re.search(r'rua=([^;]+)', txt_value)
|
|
94
|
+
if rua_match:
|
|
95
|
+
results['dmarc_rua'] = [uri.strip() for uri in rua_match.group(1).split(',')]
|
|
96
|
+
|
|
97
|
+
# Parse forensic reporting URIs (ruf)
|
|
98
|
+
ruf_match = re.search(r'ruf=([^;]+)', txt_value)
|
|
99
|
+
if ruf_match:
|
|
100
|
+
results['dmarc_ruf'] = [uri.strip() for uri in ruf_match.group(1).split(',')]
|
|
101
|
+
|
|
102
|
+
break
|
|
103
|
+
except Exception as e:
|
|
104
|
+
logger.debug(f"DMARC query failed: {e}")
|
|
105
|
+
|
|
106
|
+
# === DKIM (DomainKeys Identified Mail) ===
|
|
107
|
+
# DKIM records are TXT records on <selector>._domainkey.domain.com
|
|
108
|
+
# Common selectors to check (brute force approach since selector is arbitrary)
|
|
109
|
+
common_selectors = [
|
|
110
|
+
'default', 'google', 'k1', 'k2', 'k3', 'dkim', 'selector1', 'selector2',
|
|
111
|
+
's1', 's2', 'mail', 'email', 'mx', 'smtp', 'mta', 'key1', 'key2'
|
|
112
|
+
]
|
|
113
|
+
|
|
114
|
+
for selector in common_selectors:
|
|
115
|
+
try:
|
|
116
|
+
dkim_domain = f"{selector}._domainkey.{hostname}"
|
|
117
|
+
answers = dns_resolver.resolve(dkim_domain, 'TXT')
|
|
118
|
+
for rdata in answers:
|
|
119
|
+
txt_value = str(rdata).strip('"')
|
|
120
|
+
if 'v=DKIM1' in txt_value or 'p=' in txt_value:
|
|
121
|
+
# Extract key type if present
|
|
122
|
+
key_type = 'RSA' # Default
|
|
123
|
+
k_match = re.search(r'k=(\w+)', txt_value)
|
|
124
|
+
if k_match:
|
|
125
|
+
key_type = k_match.group(1).upper()
|
|
126
|
+
|
|
127
|
+
results['dkim'].append({
|
|
128
|
+
'selector': selector,
|
|
129
|
+
'record': txt_value[:100] + '...' if len(txt_value) > 100 else txt_value,
|
|
130
|
+
'key_type': key_type
|
|
131
|
+
})
|
|
132
|
+
break
|
|
133
|
+
except Exception:
|
|
134
|
+
continue
|
|
135
|
+
|
|
136
|
+
return results
|
|
137
|
+
|
|
138
|
+
return await asyncio.to_thread(query_email_security)
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def format_email_security_results(email_results):
|
|
142
|
+
"""Format email security results for output"""
|
|
143
|
+
lines = []
|
|
144
|
+
lines.append("=== Email Security (SPF/DMARC/DKIM) ===")
|
|
145
|
+
|
|
146
|
+
if not email_results:
|
|
147
|
+
lines.append("No email security records found")
|
|
148
|
+
lines.append("")
|
|
149
|
+
return '\n'.join(lines)
|
|
150
|
+
|
|
151
|
+
# SPF Section
|
|
152
|
+
lines.append("")
|
|
153
|
+
lines.append("SPF (Sender Policy Framework):")
|
|
154
|
+
if email_results.get('spf_valid'):
|
|
155
|
+
lines.append(f" Status: ✓ SPF record found")
|
|
156
|
+
lines.append(f" Policy: {email_results.get('spf_policy', 'unknown')}")
|
|
157
|
+
lines.append(f" Record: {email_results.get('spf')}")
|
|
158
|
+
|
|
159
|
+
# Security assessment
|
|
160
|
+
policy = email_results.get('spf_policy', '')
|
|
161
|
+
if 'strict' in policy:
|
|
162
|
+
lines.append(" Security: ✓ Strong (rejects unauthorized senders)")
|
|
163
|
+
elif 'softfail' in policy:
|
|
164
|
+
lines.append(" Security: ⚠ Moderate (marks unauthorized senders as suspicious)")
|
|
165
|
+
elif 'permissive' in policy:
|
|
166
|
+
lines.append(" Security: ✗ Weak (allows all senders)")
|
|
167
|
+
else:
|
|
168
|
+
lines.append(" Security: ? Unknown policy")
|
|
169
|
+
else:
|
|
170
|
+
lines.append(" Status: ✗ No SPF record found")
|
|
171
|
+
lines.append(" Security: ✗ Domain is vulnerable to email spoofing")
|
|
172
|
+
|
|
173
|
+
# DMARC Section
|
|
174
|
+
lines.append("")
|
|
175
|
+
lines.append("DMARC (Domain-based Message Authentication):")
|
|
176
|
+
if email_results.get('dmarc_valid'):
|
|
177
|
+
lines.append(f" Status: ✓ DMARC record found")
|
|
178
|
+
lines.append(f" Policy: {email_results.get('dmarc_policy', 'unknown')}")
|
|
179
|
+
lines.append(f" Enforcement: {email_results.get('dmarc_pct', 100)}% of messages")
|
|
180
|
+
|
|
181
|
+
if email_results.get('dmarc_rua'):
|
|
182
|
+
lines.append(f" Aggregate Reports: {', '.join(email_results['dmarc_rua'])}")
|
|
183
|
+
if email_results.get('dmarc_ruf'):
|
|
184
|
+
lines.append(f" Forensic Reports: {', '.join(email_results['dmarc_ruf'])}")
|
|
185
|
+
|
|
186
|
+
lines.append(f" Record: {email_results.get('dmarc')}")
|
|
187
|
+
|
|
188
|
+
# Security assessment
|
|
189
|
+
policy = email_results.get('dmarc_policy', '')
|
|
190
|
+
pct = email_results.get('dmarc_pct', 100)
|
|
191
|
+
if policy == 'reject' and pct == 100:
|
|
192
|
+
lines.append(" Security: ✓ Strong (rejects failed authentication)")
|
|
193
|
+
elif policy == 'quarantine':
|
|
194
|
+
lines.append(" Security: ⚠ Moderate (quarantines failed authentication)")
|
|
195
|
+
elif policy == 'none':
|
|
196
|
+
lines.append(" Security: ⚠ Monitor-only (no enforcement)")
|
|
197
|
+
else:
|
|
198
|
+
lines.append(" Security: ? Unknown policy")
|
|
199
|
+
else:
|
|
200
|
+
lines.append(" Status: ✗ No DMARC record found")
|
|
201
|
+
lines.append(" Security: ⚠ No DMARC policy enforcement")
|
|
202
|
+
|
|
203
|
+
# DKIM Section
|
|
204
|
+
lines.append("")
|
|
205
|
+
lines.append("DKIM (DomainKeys Identified Mail):")
|
|
206
|
+
if email_results.get('dkim'):
|
|
207
|
+
lines.append(f" Status: ✓ Found {len(email_results['dkim'])} DKIM selector(s)")
|
|
208
|
+
for dkim_entry in email_results['dkim']:
|
|
209
|
+
lines.append(f" Selector: {dkim_entry['selector']}")
|
|
210
|
+
lines.append(f" Key Type: {dkim_entry['key_type']}")
|
|
211
|
+
lines.append(f" Record: {dkim_entry['record']}")
|
|
212
|
+
lines.append(" Security: ✓ Email signing enabled")
|
|
213
|
+
else:
|
|
214
|
+
lines.append(" Status: ⚠ No DKIM records found (checked common selectors)")
|
|
215
|
+
lines.append(" Note: DKIM may be present with a custom selector")
|
|
216
|
+
|
|
217
|
+
# Overall Security Summary
|
|
218
|
+
lines.append("")
|
|
219
|
+
lines.append("Overall Email Security Posture:")
|
|
220
|
+
spf_ok = email_results.get('spf_valid', False)
|
|
221
|
+
dmarc_ok = email_results.get('dmarc_valid', False)
|
|
222
|
+
dkim_ok = len(email_results.get('dkim', [])) > 0
|
|
223
|
+
|
|
224
|
+
score = sum([spf_ok, dmarc_ok, dkim_ok])
|
|
225
|
+
if score == 3:
|
|
226
|
+
lines.append(" ✓ Excellent: SPF, DMARC, and DKIM all configured")
|
|
227
|
+
elif score == 2:
|
|
228
|
+
lines.append(" ⚠ Good: 2 out of 3 email security standards configured")
|
|
229
|
+
elif score == 1:
|
|
230
|
+
lines.append(" ⚠ Weak: Only 1 out of 3 email security standards configured")
|
|
231
|
+
else:
|
|
232
|
+
lines.append(" ✗ Poor: No email security standards configured")
|
|
233
|
+
lines.append(" Recommendation: Configure SPF, DMARC, and DKIM to prevent email spoofing")
|
|
234
|
+
|
|
235
|
+
lines.append("")
|
|
236
|
+
return '\n'.join(lines)
|
|
@@ -0,0 +1,359 @@
|
|
|
1
|
+
"""
|
|
2
|
+
HTTP Fingerprinting Step
|
|
3
|
+
Captures server-side HTTP/HTTPS fingerprints including redirect chains and CDN detection
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import asyncio
|
|
7
|
+
# SOCKS5 proxy support: HTTP requests support SOCKS5 via requests library
|
|
8
|
+
supports_socks5 = True
|
|
9
|
+
import socket
|
|
10
|
+
import time
|
|
11
|
+
import hashlib
|
|
12
|
+
from urllib.parse import urlparse, urljoin, urlunparse
|
|
13
|
+
from loguru import logger
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
async def scan_http(url, dns_resolver, proxy_url=None, watch_uuid=None, update_signal=None):
|
|
17
|
+
"""
|
|
18
|
+
Perform HTTP fingerprinting on target URL
|
|
19
|
+
|
|
20
|
+
Args:
|
|
21
|
+
url: Target URL
|
|
22
|
+
dns_resolver: Configured dns.resolver.Resolver instance
|
|
23
|
+
proxy_url: Optional proxy URL
|
|
24
|
+
watch_uuid: Optional watch UUID for status updates
|
|
25
|
+
update_signal: Optional blinker signal for status updates
|
|
26
|
+
|
|
27
|
+
Returns:
|
|
28
|
+
dict: HTTP fingerprint data
|
|
29
|
+
"""
|
|
30
|
+
if update_signal and watch_uuid:
|
|
31
|
+
update_signal.send(watch_uuid=watch_uuid, status="HTTP")
|
|
32
|
+
|
|
33
|
+
def fetch_http_fingerprint():
|
|
34
|
+
"""Synchronous HTTP fingerprinting - captures server TLS configuration"""
|
|
35
|
+
import requests
|
|
36
|
+
|
|
37
|
+
# Monkey-patch socket.getaddrinfo to use our custom DNS server
|
|
38
|
+
# CRITICAL: Skip DNS monkey-patching when using SOCKS5 proxy to prevent DNS leaks
|
|
39
|
+
# SOCKS5 proxy should handle DNS resolution (use socks5h:// for remote DNS)
|
|
40
|
+
original_getaddrinfo = socket.getaddrinfo
|
|
41
|
+
|
|
42
|
+
if not proxy_url or not proxy_url.strip():
|
|
43
|
+
# Only monkey-patch DNS when NOT using proxy
|
|
44
|
+
def custom_getaddrinfo(host, port, family=0, type=0, proto=0, flags=0):
|
|
45
|
+
"""Custom getaddrinfo that uses our DNS_SERVER"""
|
|
46
|
+
try:
|
|
47
|
+
# Use our dns_resolver to resolve the hostname
|
|
48
|
+
try:
|
|
49
|
+
answers = dns_resolver.resolve(host, 'A')
|
|
50
|
+
resolved_ip = str(answers[0])
|
|
51
|
+
except:
|
|
52
|
+
# Fallback to AAAA
|
|
53
|
+
try:
|
|
54
|
+
answers = dns_resolver.resolve(host, 'AAAA')
|
|
55
|
+
resolved_ip = str(answers[0])
|
|
56
|
+
except:
|
|
57
|
+
# If our DNS fails, fall back to original
|
|
58
|
+
return original_getaddrinfo(host, port, family, type, proto, flags)
|
|
59
|
+
|
|
60
|
+
# Return address info with our resolved IP
|
|
61
|
+
if ':' in resolved_ip:
|
|
62
|
+
# IPv6
|
|
63
|
+
return [(socket.AF_INET6, socket.SOCK_STREAM, proto, '', (resolved_ip, port, 0, 0))]
|
|
64
|
+
else:
|
|
65
|
+
# IPv4
|
|
66
|
+
return [(socket.AF_INET, socket.SOCK_STREAM, proto, '', (resolved_ip, port))]
|
|
67
|
+
except:
|
|
68
|
+
return original_getaddrinfo(host, port, family, type, proto, flags)
|
|
69
|
+
|
|
70
|
+
# Apply the monkey-patch (only when not using proxy)
|
|
71
|
+
socket.getaddrinfo = custom_getaddrinfo
|
|
72
|
+
else:
|
|
73
|
+
# When using SOCKS5 proxy: let proxy handle DNS resolution
|
|
74
|
+
# User should use socks5h:// (not socks5://) for remote DNS resolution
|
|
75
|
+
logger.debug("SOCKS5 proxy configured - skipping DNS monkey-patch to prevent leaks")
|
|
76
|
+
|
|
77
|
+
parsed = urlparse(url)
|
|
78
|
+
session = requests.Session()
|
|
79
|
+
|
|
80
|
+
# Configure proxy if provided
|
|
81
|
+
proxies = None
|
|
82
|
+
if proxy_url:
|
|
83
|
+
proxies = {'http': proxy_url, 'https': proxy_url}
|
|
84
|
+
|
|
85
|
+
# Set realistic headers
|
|
86
|
+
headers = {
|
|
87
|
+
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
|
|
88
|
+
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
|
89
|
+
'Accept-Language': 'en-US,en;q=0.5',
|
|
90
|
+
'Accept-Encoding': 'gzip, deflate',
|
|
91
|
+
'Connection': 'keep-alive',
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
fingerprint = {}
|
|
95
|
+
start_time = time.time()
|
|
96
|
+
|
|
97
|
+
# Track redirect chain
|
|
98
|
+
redirect_chain = []
|
|
99
|
+
|
|
100
|
+
# CDN/WAF detection patterns
|
|
101
|
+
cdn_waf_indicators = {
|
|
102
|
+
'Cloudflare': ['CF-Ray', 'cf-request-id', '__cfduid', 'cloudflare'],
|
|
103
|
+
'Akamai': ['X-Akamai-', 'akamai'],
|
|
104
|
+
'AWS CloudFront': ['X-Amz-Cf-', 'cloudfront'],
|
|
105
|
+
'Fastly': ['Fastly-', 'X-Fastly-'],
|
|
106
|
+
'Incapsula': ['X-CDN: Incapsula', 'incap_ses', 'visid_incap'],
|
|
107
|
+
'Sucuri': ['X-Sucuri-', 'sucuri'],
|
|
108
|
+
'StackPath': ['X-Stackpath-'],
|
|
109
|
+
'KeyCDN': ['X-Edge-', 'Server: keycdn'],
|
|
110
|
+
'Imperva': ['X-Iinfo', 'imperva'],
|
|
111
|
+
'F5 BIG-IP': ['BigIP', 'F5-', 'X-WA-Info'],
|
|
112
|
+
'Nginx': ['Server: nginx', 'X-Nginx-'],
|
|
113
|
+
'Varnish': ['Via: varnish', 'X-Varnish'],
|
|
114
|
+
'Squid': ['Via: squid', 'X-Squid-'],
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
try:
|
|
118
|
+
# Follow redirects manually to capture chain
|
|
119
|
+
current_url = url
|
|
120
|
+
max_redirects = 5
|
|
121
|
+
redirect_count = 0
|
|
122
|
+
|
|
123
|
+
while redirect_count < max_redirects:
|
|
124
|
+
response = session.get(
|
|
125
|
+
current_url,
|
|
126
|
+
headers=headers,
|
|
127
|
+
proxies=proxies,
|
|
128
|
+
timeout=10,
|
|
129
|
+
allow_redirects=False,
|
|
130
|
+
verify=True
|
|
131
|
+
)
|
|
132
|
+
|
|
133
|
+
# Record this hop in redirect chain
|
|
134
|
+
redirect_chain.append({
|
|
135
|
+
'url': current_url,
|
|
136
|
+
'status': response.status_code,
|
|
137
|
+
'location': response.headers.get('Location', '')
|
|
138
|
+
})
|
|
139
|
+
|
|
140
|
+
# Check if it's a redirect
|
|
141
|
+
if response.status_code in [301, 302, 303, 307, 308]:
|
|
142
|
+
location = response.headers.get('Location')
|
|
143
|
+
if not location:
|
|
144
|
+
break
|
|
145
|
+
|
|
146
|
+
# Handle relative URLs
|
|
147
|
+
if location.startswith('/'):
|
|
148
|
+
parsed_current = urlparse(current_url)
|
|
149
|
+
location = urlunparse((
|
|
150
|
+
parsed_current.scheme,
|
|
151
|
+
parsed_current.netloc,
|
|
152
|
+
location,
|
|
153
|
+
'', '', ''
|
|
154
|
+
))
|
|
155
|
+
elif not location.startswith(('http://', 'https://')):
|
|
156
|
+
location = urljoin(current_url, location)
|
|
157
|
+
|
|
158
|
+
current_url = location
|
|
159
|
+
redirect_count += 1
|
|
160
|
+
else:
|
|
161
|
+
# Final response
|
|
162
|
+
break
|
|
163
|
+
|
|
164
|
+
elapsed_time = time.time() - start_time
|
|
165
|
+
|
|
166
|
+
# Basic response info
|
|
167
|
+
fingerprint['status_code'] = response.status_code
|
|
168
|
+
fingerprint['reason'] = response.reason
|
|
169
|
+
fingerprint['elapsed_ms'] = int(elapsed_time * 1000)
|
|
170
|
+
fingerprint['content_length'] = len(response.content)
|
|
171
|
+
fingerprint['http_version'] = f"HTTP/{response.raw.version // 10}.{response.raw.version % 10}"
|
|
172
|
+
|
|
173
|
+
# All response headers
|
|
174
|
+
fingerprint['headers'] = dict(response.headers)
|
|
175
|
+
|
|
176
|
+
# Server fingerprinting
|
|
177
|
+
fingerprint['server'] = response.headers.get('Server', 'Not disclosed')
|
|
178
|
+
fingerprint['powered_by'] = response.headers.get('X-Powered-By', 'Not disclosed')
|
|
179
|
+
|
|
180
|
+
# Security headers
|
|
181
|
+
security_headers = {
|
|
182
|
+
'Strict-Transport-Security': response.headers.get('Strict-Transport-Security'),
|
|
183
|
+
'Content-Security-Policy': response.headers.get('Content-Security-Policy'),
|
|
184
|
+
'X-Frame-Options': response.headers.get('X-Frame-Options'),
|
|
185
|
+
'X-Content-Type-Options': response.headers.get('X-Content-Type-Options'),
|
|
186
|
+
'X-XSS-Protection': response.headers.get('X-XSS-Protection'),
|
|
187
|
+
'Referrer-Policy': response.headers.get('Referrer-Policy'),
|
|
188
|
+
'Permissions-Policy': response.headers.get('Permissions-Policy'),
|
|
189
|
+
}
|
|
190
|
+
fingerprint['security_headers'] = {k: v for k, v in security_headers.items() if v}
|
|
191
|
+
|
|
192
|
+
# SERVER TLS Configuration (what the server chose/prefers)
|
|
193
|
+
if parsed.scheme == 'https':
|
|
194
|
+
try:
|
|
195
|
+
# Get what the SERVER negotiated/chose
|
|
196
|
+
if hasattr(response.raw, 'connection') and response.raw.connection:
|
|
197
|
+
sock = getattr(response.raw.connection, 'sock', None)
|
|
198
|
+
if sock:
|
|
199
|
+
# Server's certificate info
|
|
200
|
+
if hasattr(sock, 'getpeercert'):
|
|
201
|
+
cert = sock.getpeercert()
|
|
202
|
+
if cert:
|
|
203
|
+
fingerprint['server_cert_subject'] = dict(x[0] for x in cert.get('subject', []))
|
|
204
|
+
fingerprint['server_cert_issuer'] = dict(x[0] for x in cert.get('issuer', []))
|
|
205
|
+
|
|
206
|
+
# What the SERVER chose/negotiated with us
|
|
207
|
+
if hasattr(sock, 'version'):
|
|
208
|
+
fingerprint['server_tls_version'] = sock.version()
|
|
209
|
+
if hasattr(sock, 'cipher'):
|
|
210
|
+
cipher_info = sock.cipher()
|
|
211
|
+
fingerprint['server_cipher'] = cipher_info
|
|
212
|
+
# JA3S-like: The server's preferred cipher tells us about the server
|
|
213
|
+
if cipher_info:
|
|
214
|
+
# Create simple server fingerprint from what it chose
|
|
215
|
+
server_fp_string = f"{cipher_info[0]}:{cipher_info[1]}:{cipher_info[2]}"
|
|
216
|
+
fingerprint['server_cipher_fingerprint'] = hashlib.md5(server_fp_string.encode()).hexdigest()
|
|
217
|
+
except Exception as e:
|
|
218
|
+
fingerprint['ssl_error'] = str(e)
|
|
219
|
+
|
|
220
|
+
# Cookies
|
|
221
|
+
if response.cookies:
|
|
222
|
+
fingerprint['cookies'] = [
|
|
223
|
+
f"{cookie.name}={'[HttpOnly]' if cookie.has_nonstandard_attr('HttpOnly') else ''}"
|
|
224
|
+
f"{'[Secure]' if cookie.secure else ''}"
|
|
225
|
+
for cookie in response.cookies
|
|
226
|
+
]
|
|
227
|
+
|
|
228
|
+
# CDN/WAF Detection
|
|
229
|
+
detected_cdns = []
|
|
230
|
+
all_headers_lower = {k.lower(): v.lower() for k, v in response.headers.items()}
|
|
231
|
+
all_cookies_lower = ' '.join([c.name.lower() for c in response.cookies])
|
|
232
|
+
|
|
233
|
+
for cdn_name, indicators in cdn_waf_indicators.items():
|
|
234
|
+
for indicator in indicators:
|
|
235
|
+
indicator_lower = indicator.lower()
|
|
236
|
+
# Check headers (both key and value)
|
|
237
|
+
header_match = any(
|
|
238
|
+
indicator_lower in header_key or indicator_lower in header_value
|
|
239
|
+
for header_key, header_value in all_headers_lower.items()
|
|
240
|
+
)
|
|
241
|
+
# Check cookies
|
|
242
|
+
cookie_match = indicator_lower in all_cookies_lower
|
|
243
|
+
|
|
244
|
+
if header_match or cookie_match:
|
|
245
|
+
if cdn_name not in detected_cdns:
|
|
246
|
+
detected_cdns.append(cdn_name)
|
|
247
|
+
break
|
|
248
|
+
|
|
249
|
+
if detected_cdns:
|
|
250
|
+
fingerprint['cdn_waf'] = detected_cdns
|
|
251
|
+
|
|
252
|
+
# Store redirect chain in fingerprint
|
|
253
|
+
fingerprint['redirect_chain'] = redirect_chain
|
|
254
|
+
|
|
255
|
+
except Exception as e:
|
|
256
|
+
fingerprint['error'] = str(e)
|
|
257
|
+
logger.error(f"HTTP fingerprinting failed: {e}")
|
|
258
|
+
finally:
|
|
259
|
+
# Restore original getaddrinfo
|
|
260
|
+
socket.getaddrinfo = original_getaddrinfo
|
|
261
|
+
|
|
262
|
+
return fingerprint
|
|
263
|
+
|
|
264
|
+
return await asyncio.to_thread(fetch_http_fingerprint)
|
|
265
|
+
|
|
266
|
+
|
|
267
|
+
def format_http_results(http_fingerprint, parsed_url):
|
|
268
|
+
"""Format HTTP results for output"""
|
|
269
|
+
lines = []
|
|
270
|
+
lines.append("=== HTTP Response Fingerprint ===")
|
|
271
|
+
|
|
272
|
+
if 'error' in http_fingerprint:
|
|
273
|
+
lines.append(f"Error: {http_fingerprint['error']}")
|
|
274
|
+
else:
|
|
275
|
+
# Response basics
|
|
276
|
+
lines.append(f"Status: {http_fingerprint.get('status_code')} {http_fingerprint.get('reason')}")
|
|
277
|
+
lines.append(f"HTTP Version: {http_fingerprint.get('http_version')}")
|
|
278
|
+
lines.append(f"Content Length: {http_fingerprint.get('content_length')} bytes")
|
|
279
|
+
lines.append("")
|
|
280
|
+
|
|
281
|
+
# Server identification
|
|
282
|
+
lines.append("Server Identification:")
|
|
283
|
+
lines.append(f" Server: {http_fingerprint.get('server')}")
|
|
284
|
+
lines.append(f" X-Powered-By: {http_fingerprint.get('powered_by')}")
|
|
285
|
+
lines.append("")
|
|
286
|
+
|
|
287
|
+
# Security headers
|
|
288
|
+
if http_fingerprint.get('security_headers'):
|
|
289
|
+
lines.append("Security Headers:")
|
|
290
|
+
for header, value in http_fingerprint['security_headers'].items():
|
|
291
|
+
# Truncate long CSP headers
|
|
292
|
+
if len(str(value)) > 100:
|
|
293
|
+
value = str(value)[:100] + "..."
|
|
294
|
+
lines.append(f" {header}: {value}")
|
|
295
|
+
lines.append("")
|
|
296
|
+
|
|
297
|
+
# SERVER TLS Configuration (what the server chose)
|
|
298
|
+
if http_fingerprint.get('server_tls_version'):
|
|
299
|
+
lines.append("Server TLS Configuration:")
|
|
300
|
+
lines.append(f" TLS Version: {http_fingerprint.get('server_tls_version')}")
|
|
301
|
+
|
|
302
|
+
if http_fingerprint.get('server_cipher'):
|
|
303
|
+
cipher = http_fingerprint['server_cipher']
|
|
304
|
+
lines.append(f" Server Chose Cipher: {cipher[0]}")
|
|
305
|
+
lines.append(f" Cipher Protocol: {cipher[1]}")
|
|
306
|
+
lines.append(f" Cipher Bits: {cipher[2]}")
|
|
307
|
+
|
|
308
|
+
# Server cipher fingerprint (JA3S-like)
|
|
309
|
+
if http_fingerprint.get('server_cipher_fingerprint'):
|
|
310
|
+
lines.append(f" Server Cipher Fingerprint: {http_fingerprint['server_cipher_fingerprint']}")
|
|
311
|
+
|
|
312
|
+
lines.append("")
|
|
313
|
+
lines.append(" Note: The cipher the server chose can indicate")
|
|
314
|
+
lines.append(" server software (nginx, Apache, IIS, Cloudflare, etc.)")
|
|
315
|
+
lines.append(" See 'SSL/TLS Analysis' section for full server capabilities.")
|
|
316
|
+
lines.append("")
|
|
317
|
+
|
|
318
|
+
# CDN/WAF Detection
|
|
319
|
+
if http_fingerprint.get('cdn_waf'):
|
|
320
|
+
lines.append("CDN/WAF/Proxy Detection:")
|
|
321
|
+
for cdn in http_fingerprint['cdn_waf']:
|
|
322
|
+
lines.append(f" - {cdn}")
|
|
323
|
+
lines.append("")
|
|
324
|
+
|
|
325
|
+
# Redirect Chain
|
|
326
|
+
redirect_chain = http_fingerprint.get('redirect_chain', [])
|
|
327
|
+
if redirect_chain and len(redirect_chain) > 1:
|
|
328
|
+
lines.append("Redirect Chain:")
|
|
329
|
+
for i, hop in enumerate(redirect_chain, 1):
|
|
330
|
+
lines.append(f" {i}. [{hop['status']}] {hop['url']}")
|
|
331
|
+
if hop.get('location'):
|
|
332
|
+
lines.append(f" → {hop['location']}")
|
|
333
|
+
lines.append("")
|
|
334
|
+
|
|
335
|
+
# Interesting headers
|
|
336
|
+
headers = http_fingerprint.get('headers', {})
|
|
337
|
+
interesting_headers = [
|
|
338
|
+
'Content-Type', 'Content-Encoding', 'Transfer-Encoding',
|
|
339
|
+
'Cache-Control', 'Pragma', 'Expires', 'ETag', 'Last-Modified',
|
|
340
|
+
'Access-Control-Allow-Origin', 'Vary', 'X-Request-ID', 'X-Runtime'
|
|
341
|
+
]
|
|
342
|
+
|
|
343
|
+
found_headers = {h: headers[h] for h in interesting_headers if h in headers}
|
|
344
|
+
if found_headers:
|
|
345
|
+
lines.append("Notable Headers:")
|
|
346
|
+
for header, value in found_headers.items():
|
|
347
|
+
if len(str(value)) > 100:
|
|
348
|
+
value = str(value)[:100] + "..."
|
|
349
|
+
lines.append(f" {header}: {value}")
|
|
350
|
+
lines.append("")
|
|
351
|
+
|
|
352
|
+
# Cookies
|
|
353
|
+
if http_fingerprint.get('cookies'):
|
|
354
|
+
lines.append("Cookies Set:")
|
|
355
|
+
for cookie in http_fingerprint['cookies']:
|
|
356
|
+
lines.append(f" {cookie}")
|
|
357
|
+
|
|
358
|
+
lines.append("")
|
|
359
|
+
return '\n'.join(lines)
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
"""
|
|
2
|
+
HTTP Fingerprinting Step
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from .base import ScanStep
|
|
6
|
+
from .registry import register_step
|
|
7
|
+
from . import http_fingerprint
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@register_step
|
|
11
|
+
class HTTPScanStep(ScanStep):
|
|
12
|
+
"""HTTP response fingerprinting, CDN/WAF detection, redirect chains"""
|
|
13
|
+
|
|
14
|
+
name = "HTTP Response Fingerprint"
|
|
15
|
+
order = 30
|
|
16
|
+
|
|
17
|
+
async def scan(self, context: dict):
|
|
18
|
+
"""Perform HTTP fingerprinting"""
|
|
19
|
+
return await http_fingerprint.scan_http(
|
|
20
|
+
context['url'],
|
|
21
|
+
context['dns_resolver'],
|
|
22
|
+
context.get('proxy_url'),
|
|
23
|
+
context.get('watch_uuid'),
|
|
24
|
+
context.get('update_signal')
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
def format_results(self, results):
|
|
28
|
+
"""Format HTTP results"""
|
|
29
|
+
if results and not isinstance(results, Exception):
|
|
30
|
+
return http_fingerprint.format_http_results(results, context['parsed_url'])
|
|
31
|
+
return "=== HTTP Response Fingerprint ===\nNo data available\n"
|