souleyez 2.16.0__py3-none-any.whl → 2.26.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of souleyez might be problematic. Click here for more details.
- souleyez/__init__.py +1 -1
- souleyez/assets/__init__.py +1 -0
- souleyez/assets/souleyez-icon.png +0 -0
- souleyez/core/msf_sync_manager.py +15 -5
- souleyez/core/tool_chaining.py +221 -29
- souleyez/detection/validator.py +4 -2
- souleyez/docs/README.md +2 -2
- souleyez/docs/user-guide/installation.md +14 -1
- souleyez/engine/background.py +25 -1
- souleyez/engine/result_handler.py +129 -0
- souleyez/integrations/siem/splunk.py +58 -11
- souleyez/main.py +103 -4
- souleyez/parsers/crackmapexec_parser.py +101 -43
- souleyez/parsers/dnsrecon_parser.py +50 -35
- souleyez/parsers/enum4linux_parser.py +101 -21
- souleyez/parsers/http_fingerprint_parser.py +319 -0
- souleyez/parsers/hydra_parser.py +56 -5
- souleyez/parsers/impacket_parser.py +123 -44
- souleyez/parsers/john_parser.py +47 -14
- souleyez/parsers/msf_parser.py +20 -5
- souleyez/parsers/nmap_parser.py +145 -28
- souleyez/parsers/smbmap_parser.py +69 -25
- souleyez/parsers/sqlmap_parser.py +72 -26
- souleyez/parsers/theharvester_parser.py +21 -13
- souleyez/plugins/gobuster.py +96 -3
- souleyez/plugins/http_fingerprint.py +592 -0
- souleyez/plugins/msf_exploit.py +6 -3
- souleyez/plugins/nuclei.py +41 -17
- souleyez/ui/interactive.py +130 -20
- souleyez/ui/setup_wizard.py +424 -58
- souleyez/ui/tool_setup.py +52 -52
- souleyez/utils/tool_checker.py +75 -13
- {souleyez-2.16.0.dist-info → souleyez-2.26.0.dist-info}/METADATA +16 -3
- {souleyez-2.16.0.dist-info → souleyez-2.26.0.dist-info}/RECORD +38 -34
- {souleyez-2.16.0.dist-info → souleyez-2.26.0.dist-info}/WHEEL +0 -0
- {souleyez-2.16.0.dist-info → souleyez-2.26.0.dist-info}/entry_points.txt +0 -0
- {souleyez-2.16.0.dist-info → souleyez-2.26.0.dist-info}/licenses/LICENSE +0 -0
- {souleyez-2.16.0.dist-info → souleyez-2.26.0.dist-info}/top_level.txt +0 -0
souleyez/__init__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = '2.
|
|
1
|
+
__version__ = '2.26.0'
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
# SoulEyez assets package
|
|
Binary file
|
|
@@ -29,15 +29,25 @@ logger = logging.getLogger(__name__)
|
|
|
29
29
|
|
|
30
30
|
def get_msf_database_config() -> Optional[Dict[str, Any]]:
|
|
31
31
|
"""
|
|
32
|
-
Get MSF database configuration from ~/.msf4/database.yml
|
|
32
|
+
Get MSF database configuration from ~/.msf4/database.yml or system-wide config.
|
|
33
|
+
|
|
34
|
+
Checks user config first, then falls back to system-wide config (Kali Linux).
|
|
33
35
|
|
|
34
36
|
Returns:
|
|
35
37
|
Dictionary with database config or None if not found/parseable
|
|
36
38
|
"""
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
39
|
+
# Check user config first, then system-wide config (Kali uses system-wide)
|
|
40
|
+
user_db_path = Path.home() / ".msf4" / "database.yml"
|
|
41
|
+
system_db_path = Path('/usr/share/metasploit-framework/config/database.yml')
|
|
42
|
+
|
|
43
|
+
db_yml_path = None
|
|
44
|
+
if user_db_path.exists():
|
|
45
|
+
db_yml_path = user_db_path
|
|
46
|
+
elif system_db_path.exists():
|
|
47
|
+
db_yml_path = system_db_path
|
|
48
|
+
|
|
49
|
+
if not db_yml_path:
|
|
50
|
+
logger.debug("MSF database.yml not found in user or system config")
|
|
41
51
|
return None
|
|
42
52
|
|
|
43
53
|
try:
|
souleyez/core/tool_chaining.py
CHANGED
|
@@ -15,6 +15,17 @@ CATEGORY_CTF = "ctf" # Lab/learning scenarios - vulnerable by design
|
|
|
15
15
|
CATEGORY_ENTERPRISE = "enterprise" # Real-world enterprise testing
|
|
16
16
|
CATEGORY_GENERAL = "general" # Standard recon that applies everywhere
|
|
17
17
|
|
|
18
|
+
# Managed hosting platforms - skip CGI enumeration (pointless on these)
|
|
19
|
+
# These are detected from server headers/banners and product names
|
|
20
|
+
MANAGED_HOSTING_PLATFORMS = {
|
|
21
|
+
'squarespace', 'wix', 'shopify', 'webflow', 'weebly',
|
|
22
|
+
'wordpress.com', 'ghost.io', 'medium', 'tumblr', 'blogger',
|
|
23
|
+
'netlify', 'vercel', 'github.io', 'pages.dev', 'cloudflare',
|
|
24
|
+
'heroku', 'railway', 'render.com', 'fly.io',
|
|
25
|
+
'aws cloudfront', 'akamai', 'fastly', 'cloudflare',
|
|
26
|
+
'azure', 'google cloud', 'firebase',
|
|
27
|
+
}
|
|
28
|
+
|
|
18
29
|
# Category display icons
|
|
19
30
|
CATEGORY_ICONS = {
|
|
20
31
|
CATEGORY_CTF: "🎯",
|
|
@@ -140,6 +151,75 @@ def classify_os_device(os_string: str, services: list) -> dict:
|
|
|
140
151
|
return {'os_family': 'unknown', 'device_type': 'unknown', 'vendor': None}
|
|
141
152
|
|
|
142
153
|
|
|
154
|
+
def is_managed_hosting(services: List[Dict[str, Any]], http_fingerprint: Dict[str, Any] = None) -> bool:
|
|
155
|
+
"""
|
|
156
|
+
Detect if target is a managed hosting platform.
|
|
157
|
+
|
|
158
|
+
These platforms don't have CGI directories, so tools like nikto
|
|
159
|
+
should skip CGI enumeration to avoid long, pointless scans.
|
|
160
|
+
|
|
161
|
+
Args:
|
|
162
|
+
services: List of service dicts from nmap parser
|
|
163
|
+
http_fingerprint: Optional fingerprint data from http_fingerprint plugin
|
|
164
|
+
|
|
165
|
+
Returns:
|
|
166
|
+
True if managed hosting detected, False otherwise
|
|
167
|
+
"""
|
|
168
|
+
# Check fingerprint data first (most reliable, comes from actual HTTP headers)
|
|
169
|
+
if http_fingerprint:
|
|
170
|
+
managed = http_fingerprint.get('managed_hosting')
|
|
171
|
+
if managed:
|
|
172
|
+
return True
|
|
173
|
+
|
|
174
|
+
# Fall back to checking services data (less reliable, from nmap banners)
|
|
175
|
+
for service in services:
|
|
176
|
+
# Check product field
|
|
177
|
+
product = (service.get('product') or '').lower()
|
|
178
|
+
raw_version = (service.get('raw_version') or '').lower()
|
|
179
|
+
service_name = (service.get('service') or '').lower()
|
|
180
|
+
|
|
181
|
+
# Combine all fields for matching
|
|
182
|
+
combined = f"{product} {raw_version} {service_name}"
|
|
183
|
+
|
|
184
|
+
# Check against known managed hosting platforms
|
|
185
|
+
for platform in MANAGED_HOSTING_PLATFORMS:
|
|
186
|
+
if platform in combined:
|
|
187
|
+
return True
|
|
188
|
+
|
|
189
|
+
return False
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
def get_managed_hosting_platform(services: List[Dict[str, Any]], http_fingerprint: Dict[str, Any] = None) -> Optional[str]:
|
|
193
|
+
"""
|
|
194
|
+
Get the name of the managed hosting platform if detected.
|
|
195
|
+
|
|
196
|
+
Args:
|
|
197
|
+
services: List of service dicts from nmap parser
|
|
198
|
+
http_fingerprint: Optional fingerprint data from http_fingerprint plugin
|
|
199
|
+
|
|
200
|
+
Returns:
|
|
201
|
+
Platform name or None
|
|
202
|
+
"""
|
|
203
|
+
# Check fingerprint data first
|
|
204
|
+
if http_fingerprint:
|
|
205
|
+
managed = http_fingerprint.get('managed_hosting')
|
|
206
|
+
if managed:
|
|
207
|
+
return managed
|
|
208
|
+
|
|
209
|
+
# Fall back to services check
|
|
210
|
+
for service in services:
|
|
211
|
+
product = (service.get('product') or '').lower()
|
|
212
|
+
raw_version = (service.get('raw_version') or '').lower()
|
|
213
|
+
service_name = (service.get('service') or '').lower()
|
|
214
|
+
combined = f"{product} {raw_version} {service_name}"
|
|
215
|
+
|
|
216
|
+
for platform in MANAGED_HOSTING_PLATFORMS:
|
|
217
|
+
if platform in combined:
|
|
218
|
+
return platform.title()
|
|
219
|
+
|
|
220
|
+
return None
|
|
221
|
+
|
|
222
|
+
|
|
143
223
|
# Technology to Nuclei tags mapping
|
|
144
224
|
# Maps detected products/technologies to relevant nuclei template tags
|
|
145
225
|
TECH_TO_NUCLEI_TAGS = {
|
|
@@ -423,6 +503,21 @@ class ChainRule:
|
|
|
423
503
|
result = True
|
|
424
504
|
break
|
|
425
505
|
|
|
506
|
+
elif cond_type == 'svc_version':
|
|
507
|
+
# Simple version string match (e.g., 'svc_version:2.3.4')
|
|
508
|
+
# Matches if any service has this exact version string
|
|
509
|
+
# Useful when nmap doesn't detect product name
|
|
510
|
+
services = context.get('services', [])
|
|
511
|
+
for service in services:
|
|
512
|
+
svc_version = (
|
|
513
|
+
service.get('version', '') or
|
|
514
|
+
service.get('service_version', '') or
|
|
515
|
+
''
|
|
516
|
+
)
|
|
517
|
+
if svc_version and cond_value.lower() in svc_version.lower():
|
|
518
|
+
result = True
|
|
519
|
+
break
|
|
520
|
+
|
|
426
521
|
# Apply negation if needed
|
|
427
522
|
return not result if negated else result
|
|
428
523
|
|
|
@@ -560,6 +655,25 @@ class ChainRule:
|
|
|
560
655
|
new_args.append(arg)
|
|
561
656
|
args = new_args
|
|
562
657
|
|
|
658
|
+
# For Nikto: Skip CGI enumeration on managed hosting platforms
|
|
659
|
+
# This prevents long, pointless scans on Squarespace, Wix, etc.
|
|
660
|
+
if self.target_tool == 'nikto':
|
|
661
|
+
services = context.get('services', [])
|
|
662
|
+
http_fingerprint = context.get('http_fingerprint', {})
|
|
663
|
+
if is_managed_hosting(services, http_fingerprint):
|
|
664
|
+
# Add -C none to skip CGI dirs (pointless on managed hosting)
|
|
665
|
+
if '-C' not in str(args):
|
|
666
|
+
args.extend(['-C', 'none'])
|
|
667
|
+
# Add -Tuning x6 to skip remote file inclusion tests
|
|
668
|
+
if '-Tuning' not in str(args):
|
|
669
|
+
args.extend(['-Tuning', 'x6'])
|
|
670
|
+
# Log which platform was detected
|
|
671
|
+
platform = get_managed_hosting_platform(services, http_fingerprint)
|
|
672
|
+
if platform:
|
|
673
|
+
from souleyez.log_config import get_logger
|
|
674
|
+
logger = get_logger(__name__)
|
|
675
|
+
logger.info(f"[FINGERPRINT] Managed hosting detected ({platform}) - nikto using optimized scan config")
|
|
676
|
+
|
|
563
677
|
# For SQLMap with POST injections, add --data if we have POST data
|
|
564
678
|
if self.target_tool == 'sqlmap' and post_data and '--data' not in str(args):
|
|
565
679
|
# Insert --data after -u argument
|
|
@@ -627,32 +741,42 @@ class ToolChaining:
|
|
|
627
741
|
|
|
628
742
|
# Web service discovered → run web scanners
|
|
629
743
|
self.rules.extend([
|
|
630
|
-
#
|
|
631
|
-
#
|
|
744
|
+
# HTTP Fingerprinting - runs FIRST to detect WAF/CDN/managed hosting
|
|
745
|
+
# This enables smarter tool configuration for downstream scanners
|
|
632
746
|
ChainRule(
|
|
633
747
|
trigger_tool='nmap',
|
|
634
748
|
trigger_condition='service:http',
|
|
749
|
+
target_tool='http_fingerprint',
|
|
750
|
+
priority=11, # Highest priority - runs before all other web tools
|
|
751
|
+
args_template=[],
|
|
752
|
+
description='Web server detected, fingerprinting for WAF/CDN/platform detection'
|
|
753
|
+
),
|
|
754
|
+
# Nikto triggered by http_fingerprint (uses fingerprint data for smart config)
|
|
755
|
+
ChainRule(
|
|
756
|
+
trigger_tool='http_fingerprint',
|
|
757
|
+
trigger_condition='has:services',
|
|
758
|
+
target_tool='nikto',
|
|
759
|
+
priority=8,
|
|
760
|
+
args_template=['-nointeractive', '-timeout', '10'],
|
|
761
|
+
description='Fingerprinting complete, scanning for server misconfigurations with Nikto'
|
|
762
|
+
),
|
|
763
|
+
# Nuclei triggered by http_fingerprint
|
|
764
|
+
ChainRule(
|
|
765
|
+
trigger_tool='http_fingerprint',
|
|
766
|
+
trigger_condition='has:services',
|
|
635
767
|
target_tool='nuclei',
|
|
636
768
|
priority=9,
|
|
637
769
|
args_template=['-tags', '{nuclei_tags}', '-severity', 'critical,high', '-rate-limit', '50', '-c', '10', '-timeout', '10'],
|
|
638
|
-
description='
|
|
770
|
+
description='Fingerprinting complete, scanning with Nuclei'
|
|
639
771
|
),
|
|
772
|
+
# Gobuster triggered by http_fingerprint
|
|
640
773
|
ChainRule(
|
|
641
|
-
trigger_tool='
|
|
642
|
-
trigger_condition='
|
|
774
|
+
trigger_tool='http_fingerprint',
|
|
775
|
+
trigger_condition='has:services',
|
|
643
776
|
target_tool='gobuster',
|
|
644
777
|
priority=7,
|
|
645
778
|
args_template=['dir', '-u', 'http://{target}:{port}', '-w', 'data/wordlists/web_dirs_common.txt', '-x', 'js,json,php,asp,aspx,html,txt,bak,old,zip', '--no-error', '--timeout', '30s', '-t', '5', '--delay', '20ms'],
|
|
646
|
-
description='
|
|
647
|
-
),
|
|
648
|
-
# Nikto - web server vulnerability scanner (complements nuclei)
|
|
649
|
-
ChainRule(
|
|
650
|
-
trigger_tool='nmap',
|
|
651
|
-
trigger_condition='service:http',
|
|
652
|
-
target_tool='nikto',
|
|
653
|
-
priority=8,
|
|
654
|
-
args_template=['-nointeractive', '-timeout', '10'],
|
|
655
|
-
description='Web server detected, scanning for server misconfigurations with Nikto'
|
|
779
|
+
description='Fingerprinting complete, discovering directories and files'
|
|
656
780
|
),
|
|
657
781
|
# Dalfox - XSS scanner triggered after gobuster finds pages
|
|
658
782
|
ChainRule(
|
|
@@ -731,14 +855,8 @@ class ToolChaining:
|
|
|
731
855
|
args_template=['-a', '{target}'],
|
|
732
856
|
description='SMB service detected, enumerating shares and users (runs after CrackMapExec)'
|
|
733
857
|
),
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
trigger_condition='service:smb',
|
|
737
|
-
target_tool='smbmap',
|
|
738
|
-
priority=7,
|
|
739
|
-
args_template=['-H', '{target}'],
|
|
740
|
-
description='SMB service detected, mapping shares'
|
|
741
|
-
),
|
|
858
|
+
# NOTE: smbmap removed - has upstream impacket pickling bug on Python 3.13+
|
|
859
|
+
# Use crackmapexec/netexec --shares instead (enum4linux rule above)
|
|
742
860
|
])
|
|
743
861
|
|
|
744
862
|
# Active Directory attacks - smart chaining workflow
|
|
@@ -1143,13 +1261,16 @@ class ToolChaining:
|
|
|
1143
1261
|
# )
|
|
1144
1262
|
# )
|
|
1145
1263
|
|
|
1264
|
+
# DISABLED: smbmap has upstream pickling bug - won't produce results
|
|
1146
1265
|
# Writable SMB shares found → check for exploitability
|
|
1266
|
+
# TODO: Add rule triggering from crackmapexec writable shares detection
|
|
1147
1267
|
self.rules.append(
|
|
1148
1268
|
ChainRule(
|
|
1149
1269
|
trigger_tool='smbmap',
|
|
1150
1270
|
trigger_condition='has:writable_shares',
|
|
1151
1271
|
target_tool='msf_auxiliary',
|
|
1152
1272
|
priority=10,
|
|
1273
|
+
enabled=False, # Disabled - smbmap broken
|
|
1153
1274
|
args_template=['auxiliary/scanner/smb/smb_version'],
|
|
1154
1275
|
description='Writable SMB shares found, checking for vulnerabilities'
|
|
1155
1276
|
)
|
|
@@ -1908,28 +2029,42 @@ class ToolChaining:
|
|
|
1908
2029
|
|
|
1909
2030
|
# vsftpd 2.3.4 backdoor (CVE-2011-2523)
|
|
1910
2031
|
# Triggers backdoor shell on port 6200 when username contains :)
|
|
2032
|
+
# Match FTP service with version 2.3.4 (nmap often shows just "ftp" + "2.3.4")
|
|
1911
2033
|
self.rules.append(
|
|
1912
2034
|
ChainRule(
|
|
1913
2035
|
trigger_tool='nmap',
|
|
1914
|
-
trigger_condition='
|
|
2036
|
+
trigger_condition='service:ftp & svc_version:2.3.4',
|
|
1915
2037
|
target_tool='msf_exploit',
|
|
1916
2038
|
priority=10,
|
|
1917
2039
|
args_template=['exploit/unix/ftp/vsftpd_234_backdoor'],
|
|
1918
|
-
description='
|
|
2040
|
+
description='FTP 2.3.4 detected - checking for vsftpd backdoor (CVE-2011-2523)',
|
|
1919
2041
|
category=CATEGORY_CTF
|
|
1920
2042
|
)
|
|
1921
2043
|
)
|
|
1922
2044
|
|
|
1923
2045
|
# Samba 3.0.x usermap_script RCE (CVE-2007-2447)
|
|
1924
2046
|
# Command injection in username field
|
|
2047
|
+
# Match SMB service with version starting with 3 (nmap shows "3.X" or "3.0.x")
|
|
2048
|
+
self.rules.append(
|
|
2049
|
+
ChainRule(
|
|
2050
|
+
trigger_tool='nmap',
|
|
2051
|
+
trigger_condition='service:smb & svc_version:3.',
|
|
2052
|
+
target_tool='msf_exploit',
|
|
2053
|
+
priority=10,
|
|
2054
|
+
args_template=['exploit/multi/samba/usermap_script'],
|
|
2055
|
+
description='Samba 3.x detected - checking for usermap_script RCE (CVE-2007-2447)',
|
|
2056
|
+
category=CATEGORY_CTF
|
|
2057
|
+
)
|
|
2058
|
+
)
|
|
2059
|
+
# Also match netbios-ssn service (common nmap detection for SMB)
|
|
1925
2060
|
self.rules.append(
|
|
1926
2061
|
ChainRule(
|
|
1927
2062
|
trigger_tool='nmap',
|
|
1928
|
-
trigger_condition='
|
|
2063
|
+
trigger_condition='service:netbios-ssn & svc_version:3.',
|
|
1929
2064
|
target_tool='msf_exploit',
|
|
1930
2065
|
priority=10,
|
|
1931
2066
|
args_template=['exploit/multi/samba/usermap_script'],
|
|
1932
|
-
description='Samba 3.
|
|
2067
|
+
description='Samba 3.x detected (netbios-ssn) - checking for usermap_script RCE (CVE-2007-2447)',
|
|
1933
2068
|
category=CATEGORY_CTF
|
|
1934
2069
|
)
|
|
1935
2070
|
)
|
|
@@ -2132,14 +2267,15 @@ class ToolChaining:
|
|
|
2132
2267
|
)
|
|
2133
2268
|
|
|
2134
2269
|
# ProFTPD mod_copy (CVE-2015-3306) - file copy without auth
|
|
2270
|
+
# Match FTP service with version 1.3.x (common ProFTPD versions)
|
|
2135
2271
|
self.rules.append(
|
|
2136
2272
|
ChainRule(
|
|
2137
2273
|
trigger_tool='nmap',
|
|
2138
|
-
trigger_condition='
|
|
2274
|
+
trigger_condition='service:ftp & svc_version:1.3',
|
|
2139
2275
|
target_tool='msf_exploit',
|
|
2140
2276
|
priority=8,
|
|
2141
2277
|
args_template=['exploit/unix/ftp/proftpd_modcopy_exec'],
|
|
2142
|
-
description='
|
|
2278
|
+
description='FTP 1.3.x detected - checking for ProFTPD mod_copy RCE (CVE-2015-3306)',
|
|
2143
2279
|
category=CATEGORY_CTF
|
|
2144
2280
|
)
|
|
2145
2281
|
)
|
|
@@ -4160,6 +4296,40 @@ class ToolChaining:
|
|
|
4160
4296
|
if len(app_databases) > db_limit:
|
|
4161
4297
|
logger.info(f"SQLMap auto-chaining limited to first {db_limit} of {len(app_databases)} application databases")
|
|
4162
4298
|
|
|
4299
|
+
# === Post-exploitation chain rules (is_dba, file_read, os_cmd) ===
|
|
4300
|
+
# Check for post-exploitation flags and fire appropriate chain rules
|
|
4301
|
+
is_dba = parse_results.get('is_dba', False)
|
|
4302
|
+
file_read_success = parse_results.get('file_read_success', False)
|
|
4303
|
+
os_command_success = parse_results.get('os_command_success', False)
|
|
4304
|
+
|
|
4305
|
+
if is_dba or file_read_success or os_command_success:
|
|
4306
|
+
from souleyez.log_config import get_logger
|
|
4307
|
+
logger = get_logger(__name__)
|
|
4308
|
+
|
|
4309
|
+
# Build context with post-exploitation flags using injectable_url
|
|
4310
|
+
post_exploit_context = {
|
|
4311
|
+
'target': injectable_url, # Use the correct injectable URL
|
|
4312
|
+
'tool': tool,
|
|
4313
|
+
'is_dba': is_dba,
|
|
4314
|
+
'file_read_success': file_read_success,
|
|
4315
|
+
'os_command_success': os_command_success,
|
|
4316
|
+
'post_data': post_data, # Preserve POST data for subsequent commands
|
|
4317
|
+
}
|
|
4318
|
+
|
|
4319
|
+
if is_dba:
|
|
4320
|
+
logger.info(f"SQLMap: DBA access confirmed! Evaluating post-exploitation chains...")
|
|
4321
|
+
if file_read_success:
|
|
4322
|
+
logger.info(f"SQLMap: File read successful! Evaluating file read chains...")
|
|
4323
|
+
if os_command_success:
|
|
4324
|
+
logger.info(f"SQLMap: OS command execution successful!")
|
|
4325
|
+
|
|
4326
|
+
# Evaluate chain rules - this will fire rules like has:is_dba
|
|
4327
|
+
commands = self.evaluate_chains(tool, post_exploit_context)
|
|
4328
|
+
if commands:
|
|
4329
|
+
logger.info(f"SQLMap: Matched {len(commands)} post-exploitation chain rule(s)")
|
|
4330
|
+
job_ids.extend(self._enqueue_commands(commands, tool, engagement_id, injectable_url, parent_job_id=job.get('id')))
|
|
4331
|
+
# === END Post-exploitation chain rules ===
|
|
4332
|
+
|
|
4163
4333
|
return job_ids
|
|
4164
4334
|
# === END SQLMap special handling ===
|
|
4165
4335
|
|
|
@@ -4877,6 +5047,28 @@ class ToolChaining:
|
|
|
4877
5047
|
if not endpoint_url:
|
|
4878
5048
|
continue
|
|
4879
5049
|
|
|
5050
|
+
# === Filter out non-injectable files ===
|
|
5051
|
+
path_lower = endpoint_url.lower()
|
|
5052
|
+
filename = path_lower.split('/')[-1] if '/' in path_lower else path_lower
|
|
5053
|
+
|
|
5054
|
+
# Skip Apache/nginx config files
|
|
5055
|
+
if filename.startswith('.ht') or filename.startswith('.nginx'):
|
|
5056
|
+
logger.debug(f"Skipping config file: {endpoint_url}")
|
|
5057
|
+
continue
|
|
5058
|
+
|
|
5059
|
+
# Skip static files that can't have SQL injection
|
|
5060
|
+
static_extensions = (
|
|
5061
|
+
'.html', '.htm', '.txt', '.css', '.js', '.json',
|
|
5062
|
+
'.xml', '.svg', '.png', '.jpg', '.jpeg', '.gif',
|
|
5063
|
+
'.ico', '.woff', '.woff2', '.ttf', '.eot',
|
|
5064
|
+
'.pdf', '.doc', '.docx', '.xls', '.xlsx',
|
|
5065
|
+
'.bak', '.old', '.backup', '.swp', '.orig',
|
|
5066
|
+
'.map', '.md', '.rst', '.log'
|
|
5067
|
+
)
|
|
5068
|
+
if any(filename.endswith(ext) for ext in static_extensions):
|
|
5069
|
+
logger.debug(f"Skipping static file: {endpoint_url}")
|
|
5070
|
+
continue
|
|
5071
|
+
|
|
4880
5072
|
# === SQLMap for testable endpoints ===
|
|
4881
5073
|
if status_code in testable_statuses and created_sqlmap_jobs < max_sqlmap_jobs:
|
|
4882
5074
|
# For API endpoints without parameters, add test parameters
|
souleyez/detection/validator.py
CHANGED
|
@@ -156,8 +156,10 @@ class DetectionValidator:
|
|
|
156
156
|
job_command = _reconstruct_command(job)
|
|
157
157
|
# Use started_at or finished_at for execution time
|
|
158
158
|
executed_at = job.get('started_at') or job.get('finished_at') or job.get('created_at')
|
|
159
|
-
# Job
|
|
160
|
-
|
|
159
|
+
# Job ran successfully if status is done, no_results, or warning
|
|
160
|
+
# (all of these sent network traffic that should be detectable by SIEM)
|
|
161
|
+
job_status = job.get('status', '')
|
|
162
|
+
success = job_status in ('done', 'no_results', 'warning')
|
|
161
163
|
|
|
162
164
|
# Extract target IP from command (common patterns)
|
|
163
165
|
target_ip = None
|
souleyez/docs/README.md
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
# SoulEyez Documentation
|
|
2
2
|
|
|
3
|
-
**Version:** 2.
|
|
4
|
-
**Last Updated:** January
|
|
3
|
+
**Version:** 2.26.0
|
|
4
|
+
**Last Updated:** January 8, 2026
|
|
5
5
|
**Organization:** CyberSoul Security
|
|
6
6
|
|
|
7
7
|
Welcome to the SoulEyez documentation! This documentation covers architecture, development, user guides, and operational information for the SoulEyez penetration testing platform.
|
|
@@ -22,6 +22,17 @@ This guide walks you through installing souleyez on your system. The process tak
|
|
|
22
22
|
- **RAM Usage**: Running multiple heavy tools (Metasploit, SQLMap, Hashcat) simultaneously requires additional RAM
|
|
23
23
|
- **Disk I/O**: SSD recommended for database operations and log processing
|
|
24
24
|
|
|
25
|
+
> **🐉 Kali Linux Recommended**
|
|
26
|
+
>
|
|
27
|
+
> SoulEyez performs significantly better on **Kali Linux** than other distributions:
|
|
28
|
+
> - All pentesting tools pre-installed and optimized
|
|
29
|
+
> - Metasploit database and RPC already configured
|
|
30
|
+
> - Security-focused kernel and networking stack
|
|
31
|
+
> - No dependency hunting or version conflicts
|
|
32
|
+
> - Wordlists, databases, and tool configs ready to go
|
|
33
|
+
>
|
|
34
|
+
> While Ubuntu and other Debian-based distros are supported, you may experience slower setup times and occasional tool compatibility issues.
|
|
35
|
+
|
|
25
36
|
### Software Requirements
|
|
26
37
|
|
|
27
38
|
- **Operating System**: Linux (Kali Linux recommended, any Debian-based distro supported)
|
|
@@ -40,12 +51,14 @@ pipx is the Python community's recommended way to install CLI applications. It h
|
|
|
40
51
|
# One-time setup
|
|
41
52
|
sudo apt install pipx
|
|
42
53
|
pipx ensurepath
|
|
43
|
-
source ~/.bashrc
|
|
54
|
+
source ~/.bashrc # Kali Linux: use 'source ~/.zshrc' instead
|
|
44
55
|
|
|
45
56
|
# Install SoulEyez
|
|
46
57
|
pipx install souleyez
|
|
47
58
|
```
|
|
48
59
|
|
|
60
|
+
> **Kali Linux users:** Kali uses zsh by default. Use `source ~/.zshrc` instead of `source ~/.bashrc`
|
|
61
|
+
|
|
49
62
|
On first run, SoulEyez will prompt you to install pentesting tools (nmap, sqlmap, gobuster, etc.).
|
|
50
63
|
|
|
51
64
|
```bash
|
souleyez/engine/background.py
CHANGED
|
@@ -711,7 +711,14 @@ def _run_rpc_exploit(cmd_spec: Dict[str, Any], log_path: str, jid: int = None, p
|
|
|
711
711
|
)
|
|
712
712
|
|
|
713
713
|
return 0
|
|
714
|
+
elif result.get('no_session'):
|
|
715
|
+
# Exploit ran but no session opened - this is "no results", not an error
|
|
716
|
+
# Return 1 but let parser set status to no_results
|
|
717
|
+
reason = result.get('reason', 'No session opened')
|
|
718
|
+
_append_worker_log(f"job {jid}: exploit completed - {reason}")
|
|
719
|
+
return 1
|
|
714
720
|
else:
|
|
721
|
+
# True error (connection failed, RPC error, etc.)
|
|
715
722
|
error = result.get('error', 'Unknown error')
|
|
716
723
|
_append_worker_log(f"job {jid}: RPC exploit failed - {error}")
|
|
717
724
|
return 1
|
|
@@ -1031,7 +1038,9 @@ def _is_true_error_exit_code(rc: int, tool: str) -> bool:
|
|
|
1031
1038
|
|
|
1032
1039
|
# Tools that use non-zero exit codes for non-error conditions
|
|
1033
1040
|
# Parser will determine the actual status based on output
|
|
1034
|
-
|
|
1041
|
+
# msf_exploit returns 1 when no session opened (exploit ran but target not vulnerable)
|
|
1042
|
+
# nikto returns non-zero when it finds vulnerabilities (not an error!)
|
|
1043
|
+
tools_with_nonzero_success = ['gobuster', 'hydra', 'medusa', 'msf_exploit', 'nikto']
|
|
1035
1044
|
|
|
1036
1045
|
if tool.lower() in tools_with_nonzero_success:
|
|
1037
1046
|
# Let parser determine status
|
|
@@ -1425,6 +1434,21 @@ def _detect_and_recover_stale_jobs() -> int:
|
|
|
1425
1434
|
"status": status,
|
|
1426
1435
|
"parse_result": parse_result
|
|
1427
1436
|
})
|
|
1437
|
+
|
|
1438
|
+
# Mark for auto-chaining if conditions are met
|
|
1439
|
+
try:
|
|
1440
|
+
from souleyez.core.tool_chaining import ToolChaining
|
|
1441
|
+
chaining = ToolChaining()
|
|
1442
|
+
if chaining.is_enabled() and is_chainable(status):
|
|
1443
|
+
_update_job(jid, chainable=True)
|
|
1444
|
+
_append_worker_log(f"job {jid} stale recovery marked as chainable")
|
|
1445
|
+
logger.info("Stale job marked as chainable", extra={
|
|
1446
|
+
"job_id": jid,
|
|
1447
|
+
"tool": tool,
|
|
1448
|
+
"status": status
|
|
1449
|
+
})
|
|
1450
|
+
except Exception as chain_err:
|
|
1451
|
+
_append_worker_log(f"job {jid} stale recovery chainable error: {chain_err}")
|
|
1428
1452
|
except Exception as parse_err:
|
|
1429
1453
|
_append_worker_log(f"job {jid} stale recovery parse exception: {parse_err}")
|
|
1430
1454
|
|