souleyez 2.17.0__py3-none-any.whl → 2.23.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- souleyez/__init__.py +1 -1
- souleyez/core/tool_chaining.py +99 -7
- souleyez/detection/validator.py +4 -2
- souleyez/docs/README.md +2 -2
- souleyez/docs/user-guide/installation.md +3 -1
- souleyez/engine/background.py +9 -1
- souleyez/engine/result_handler.py +4 -0
- souleyez/integrations/siem/splunk.py +58 -11
- souleyez/main.py +1 -1
- souleyez/parsers/smbmap_parser.py +30 -2
- souleyez/parsers/sqlmap_parser.py +54 -17
- souleyez/plugins/gobuster.py +96 -3
- souleyez/plugins/msf_exploit.py +6 -3
- souleyez/ui/interactive.py +34 -16
- souleyez/ui/setup_wizard.py +353 -58
- souleyez/ui/tool_setup.py +49 -52
- souleyez/utils/tool_checker.py +33 -11
- {souleyez-2.17.0.dist-info → souleyez-2.23.0.dist-info}/METADATA +5 -3
- {souleyez-2.17.0.dist-info → souleyez-2.23.0.dist-info}/RECORD +23 -23
- {souleyez-2.17.0.dist-info → souleyez-2.23.0.dist-info}/WHEEL +0 -0
- {souleyez-2.17.0.dist-info → souleyez-2.23.0.dist-info}/entry_points.txt +0 -0
- {souleyez-2.17.0.dist-info → souleyez-2.23.0.dist-info}/licenses/LICENSE +0 -0
- {souleyez-2.17.0.dist-info → souleyez-2.23.0.dist-info}/top_level.txt +0 -0
souleyez/__init__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = '2.
|
|
1
|
+
__version__ = '2.23.0'
|
souleyez/core/tool_chaining.py
CHANGED
|
@@ -423,6 +423,21 @@ class ChainRule:
|
|
|
423
423
|
result = True
|
|
424
424
|
break
|
|
425
425
|
|
|
426
|
+
elif cond_type == 'svc_version':
|
|
427
|
+
# Simple version string match (e.g., 'svc_version:2.3.4')
|
|
428
|
+
# Matches if any service has this exact version string
|
|
429
|
+
# Useful when nmap doesn't detect product name
|
|
430
|
+
services = context.get('services', [])
|
|
431
|
+
for service in services:
|
|
432
|
+
svc_version = (
|
|
433
|
+
service.get('version', '') or
|
|
434
|
+
service.get('service_version', '') or
|
|
435
|
+
''
|
|
436
|
+
)
|
|
437
|
+
if svc_version and cond_value.lower() in svc_version.lower():
|
|
438
|
+
result = True
|
|
439
|
+
break
|
|
440
|
+
|
|
426
441
|
# Apply negation if needed
|
|
427
442
|
return not result if negated else result
|
|
428
443
|
|
|
@@ -731,13 +746,16 @@ class ToolChaining:
|
|
|
731
746
|
args_template=['-a', '{target}'],
|
|
732
747
|
description='SMB service detected, enumerating shares and users (runs after CrackMapExec)'
|
|
733
748
|
),
|
|
749
|
+
# DISABLED: smbmap has upstream pickling bug with impacket (affects all versions)
|
|
750
|
+
# Use crackmapexec/netexec --shares instead (rule #10 above)
|
|
734
751
|
ChainRule(
|
|
735
752
|
trigger_tool='nmap',
|
|
736
753
|
trigger_condition='service:smb',
|
|
737
754
|
target_tool='smbmap',
|
|
738
755
|
priority=7,
|
|
756
|
+
enabled=False, # Disabled due to impacket pickling bug
|
|
739
757
|
args_template=['-H', '{target}'],
|
|
740
|
-
description='SMB service detected, mapping shares'
|
|
758
|
+
description='SMB service detected, mapping shares (DISABLED - use netexec)'
|
|
741
759
|
),
|
|
742
760
|
])
|
|
743
761
|
|
|
@@ -1143,13 +1161,16 @@ class ToolChaining:
|
|
|
1143
1161
|
# )
|
|
1144
1162
|
# )
|
|
1145
1163
|
|
|
1164
|
+
# DISABLED: smbmap has upstream pickling bug - won't produce results
|
|
1146
1165
|
# Writable SMB shares found → check for exploitability
|
|
1166
|
+
# TODO: Add rule triggering from crackmapexec writable shares detection
|
|
1147
1167
|
self.rules.append(
|
|
1148
1168
|
ChainRule(
|
|
1149
1169
|
trigger_tool='smbmap',
|
|
1150
1170
|
trigger_condition='has:writable_shares',
|
|
1151
1171
|
target_tool='msf_auxiliary',
|
|
1152
1172
|
priority=10,
|
|
1173
|
+
enabled=False, # Disabled - smbmap broken
|
|
1153
1174
|
args_template=['auxiliary/scanner/smb/smb_version'],
|
|
1154
1175
|
description='Writable SMB shares found, checking for vulnerabilities'
|
|
1155
1176
|
)
|
|
@@ -1908,28 +1929,42 @@ class ToolChaining:
|
|
|
1908
1929
|
|
|
1909
1930
|
# vsftpd 2.3.4 backdoor (CVE-2011-2523)
|
|
1910
1931
|
# Triggers backdoor shell on port 6200 when username contains :)
|
|
1932
|
+
# Match FTP service with version 2.3.4 (nmap often shows just "ftp" + "2.3.4")
|
|
1911
1933
|
self.rules.append(
|
|
1912
1934
|
ChainRule(
|
|
1913
1935
|
trigger_tool='nmap',
|
|
1914
|
-
trigger_condition='
|
|
1936
|
+
trigger_condition='service:ftp & svc_version:2.3.4',
|
|
1915
1937
|
target_tool='msf_exploit',
|
|
1916
1938
|
priority=10,
|
|
1917
1939
|
args_template=['exploit/unix/ftp/vsftpd_234_backdoor'],
|
|
1918
|
-
description='
|
|
1940
|
+
description='FTP 2.3.4 detected - checking for vsftpd backdoor (CVE-2011-2523)',
|
|
1919
1941
|
category=CATEGORY_CTF
|
|
1920
1942
|
)
|
|
1921
1943
|
)
|
|
1922
1944
|
|
|
1923
1945
|
# Samba 3.0.x usermap_script RCE (CVE-2007-2447)
|
|
1924
1946
|
# Command injection in username field
|
|
1947
|
+
# Match SMB service with version starting with 3 (nmap shows "3.X" or "3.0.x")
|
|
1948
|
+
self.rules.append(
|
|
1949
|
+
ChainRule(
|
|
1950
|
+
trigger_tool='nmap',
|
|
1951
|
+
trigger_condition='service:smb & svc_version:3.',
|
|
1952
|
+
target_tool='msf_exploit',
|
|
1953
|
+
priority=10,
|
|
1954
|
+
args_template=['exploit/multi/samba/usermap_script'],
|
|
1955
|
+
description='Samba 3.x detected - checking for usermap_script RCE (CVE-2007-2447)',
|
|
1956
|
+
category=CATEGORY_CTF
|
|
1957
|
+
)
|
|
1958
|
+
)
|
|
1959
|
+
# Also match netbios-ssn service (common nmap detection for SMB)
|
|
1925
1960
|
self.rules.append(
|
|
1926
1961
|
ChainRule(
|
|
1927
1962
|
trigger_tool='nmap',
|
|
1928
|
-
trigger_condition='
|
|
1963
|
+
trigger_condition='service:netbios-ssn & svc_version:3.',
|
|
1929
1964
|
target_tool='msf_exploit',
|
|
1930
1965
|
priority=10,
|
|
1931
1966
|
args_template=['exploit/multi/samba/usermap_script'],
|
|
1932
|
-
description='Samba 3.
|
|
1967
|
+
description='Samba 3.x detected (netbios-ssn) - checking for usermap_script RCE (CVE-2007-2447)',
|
|
1933
1968
|
category=CATEGORY_CTF
|
|
1934
1969
|
)
|
|
1935
1970
|
)
|
|
@@ -2132,14 +2167,15 @@ class ToolChaining:
|
|
|
2132
2167
|
)
|
|
2133
2168
|
|
|
2134
2169
|
# ProFTPD mod_copy (CVE-2015-3306) - file copy without auth
|
|
2170
|
+
# Match FTP service with version 1.3.x (common ProFTPD versions)
|
|
2135
2171
|
self.rules.append(
|
|
2136
2172
|
ChainRule(
|
|
2137
2173
|
trigger_tool='nmap',
|
|
2138
|
-
trigger_condition='
|
|
2174
|
+
trigger_condition='service:ftp & svc_version:1.3',
|
|
2139
2175
|
target_tool='msf_exploit',
|
|
2140
2176
|
priority=8,
|
|
2141
2177
|
args_template=['exploit/unix/ftp/proftpd_modcopy_exec'],
|
|
2142
|
-
description='
|
|
2178
|
+
description='FTP 1.3.x detected - checking for ProFTPD mod_copy RCE (CVE-2015-3306)',
|
|
2143
2179
|
category=CATEGORY_CTF
|
|
2144
2180
|
)
|
|
2145
2181
|
)
|
|
@@ -4160,6 +4196,40 @@ class ToolChaining:
|
|
|
4160
4196
|
if len(app_databases) > db_limit:
|
|
4161
4197
|
logger.info(f"SQLMap auto-chaining limited to first {db_limit} of {len(app_databases)} application databases")
|
|
4162
4198
|
|
|
4199
|
+
# === Post-exploitation chain rules (is_dba, file_read, os_cmd) ===
|
|
4200
|
+
# Check for post-exploitation flags and fire appropriate chain rules
|
|
4201
|
+
is_dba = parse_results.get('is_dba', False)
|
|
4202
|
+
file_read_success = parse_results.get('file_read_success', False)
|
|
4203
|
+
os_command_success = parse_results.get('os_command_success', False)
|
|
4204
|
+
|
|
4205
|
+
if is_dba or file_read_success or os_command_success:
|
|
4206
|
+
from souleyez.log_config import get_logger
|
|
4207
|
+
logger = get_logger(__name__)
|
|
4208
|
+
|
|
4209
|
+
# Build context with post-exploitation flags using injectable_url
|
|
4210
|
+
post_exploit_context = {
|
|
4211
|
+
'target': injectable_url, # Use the correct injectable URL
|
|
4212
|
+
'tool': tool,
|
|
4213
|
+
'is_dba': is_dba,
|
|
4214
|
+
'file_read_success': file_read_success,
|
|
4215
|
+
'os_command_success': os_command_success,
|
|
4216
|
+
'post_data': post_data, # Preserve POST data for subsequent commands
|
|
4217
|
+
}
|
|
4218
|
+
|
|
4219
|
+
if is_dba:
|
|
4220
|
+
logger.info(f"SQLMap: DBA access confirmed! Evaluating post-exploitation chains...")
|
|
4221
|
+
if file_read_success:
|
|
4222
|
+
logger.info(f"SQLMap: File read successful! Evaluating file read chains...")
|
|
4223
|
+
if os_command_success:
|
|
4224
|
+
logger.info(f"SQLMap: OS command execution successful!")
|
|
4225
|
+
|
|
4226
|
+
# Evaluate chain rules - this will fire rules like has:is_dba
|
|
4227
|
+
commands = self.evaluate_chains(tool, post_exploit_context)
|
|
4228
|
+
if commands:
|
|
4229
|
+
logger.info(f"SQLMap: Matched {len(commands)} post-exploitation chain rule(s)")
|
|
4230
|
+
job_ids.extend(self._enqueue_commands(commands, tool, engagement_id, injectable_url, parent_job_id=job.get('id')))
|
|
4231
|
+
# === END Post-exploitation chain rules ===
|
|
4232
|
+
|
|
4163
4233
|
return job_ids
|
|
4164
4234
|
# === END SQLMap special handling ===
|
|
4165
4235
|
|
|
@@ -4877,6 +4947,28 @@ class ToolChaining:
|
|
|
4877
4947
|
if not endpoint_url:
|
|
4878
4948
|
continue
|
|
4879
4949
|
|
|
4950
|
+
# === Filter out non-injectable files ===
|
|
4951
|
+
path_lower = endpoint_url.lower()
|
|
4952
|
+
filename = path_lower.split('/')[-1] if '/' in path_lower else path_lower
|
|
4953
|
+
|
|
4954
|
+
# Skip Apache/nginx config files
|
|
4955
|
+
if filename.startswith('.ht') or filename.startswith('.nginx'):
|
|
4956
|
+
logger.debug(f"Skipping config file: {endpoint_url}")
|
|
4957
|
+
continue
|
|
4958
|
+
|
|
4959
|
+
# Skip static files that can't have SQL injection
|
|
4960
|
+
static_extensions = (
|
|
4961
|
+
'.html', '.htm', '.txt', '.css', '.js', '.json',
|
|
4962
|
+
'.xml', '.svg', '.png', '.jpg', '.jpeg', '.gif',
|
|
4963
|
+
'.ico', '.woff', '.woff2', '.ttf', '.eot',
|
|
4964
|
+
'.pdf', '.doc', '.docx', '.xls', '.xlsx',
|
|
4965
|
+
'.bak', '.old', '.backup', '.swp', '.orig',
|
|
4966
|
+
'.map', '.md', '.rst', '.log'
|
|
4967
|
+
)
|
|
4968
|
+
if any(filename.endswith(ext) for ext in static_extensions):
|
|
4969
|
+
logger.debug(f"Skipping static file: {endpoint_url}")
|
|
4970
|
+
continue
|
|
4971
|
+
|
|
4880
4972
|
# === SQLMap for testable endpoints ===
|
|
4881
4973
|
if status_code in testable_statuses and created_sqlmap_jobs < max_sqlmap_jobs:
|
|
4882
4974
|
# For API endpoints without parameters, add test parameters
|
souleyez/detection/validator.py
CHANGED
|
@@ -156,8 +156,10 @@ class DetectionValidator:
|
|
|
156
156
|
job_command = _reconstruct_command(job)
|
|
157
157
|
# Use started_at or finished_at for execution time
|
|
158
158
|
executed_at = job.get('started_at') or job.get('finished_at') or job.get('created_at')
|
|
159
|
-
# Job
|
|
160
|
-
|
|
159
|
+
# Job ran successfully if status is done, no_results, or warning
|
|
160
|
+
# (all of these sent network traffic that should be detectable by SIEM)
|
|
161
|
+
job_status = job.get('status', '')
|
|
162
|
+
success = job_status in ('done', 'no_results', 'warning')
|
|
161
163
|
|
|
162
164
|
# Extract target IP from command (common patterns)
|
|
163
165
|
target_ip = None
|
souleyez/docs/README.md
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
# SoulEyez Documentation
|
|
2
2
|
|
|
3
|
-
**Version:** 2.
|
|
4
|
-
**Last Updated:** January
|
|
3
|
+
**Version:** 2.23.0
|
|
4
|
+
**Last Updated:** January 7, 2026
|
|
5
5
|
**Organization:** CyberSoul Security
|
|
6
6
|
|
|
7
7
|
Welcome to the SoulEyez documentation! This documentation covers architecture, development, user guides, and operational information for the SoulEyez penetration testing platform.
|
|
@@ -40,12 +40,14 @@ pipx is the Python community's recommended way to install CLI applications. It h
|
|
|
40
40
|
# One-time setup
|
|
41
41
|
sudo apt install pipx
|
|
42
42
|
pipx ensurepath
|
|
43
|
-
source ~/.bashrc
|
|
43
|
+
source ~/.bashrc # Kali Linux: use 'source ~/.zshrc' instead
|
|
44
44
|
|
|
45
45
|
# Install SoulEyez
|
|
46
46
|
pipx install souleyez
|
|
47
47
|
```
|
|
48
48
|
|
|
49
|
+
> **Kali Linux users:** Kali uses zsh by default. Use `source ~/.zshrc` instead of `source ~/.bashrc`
|
|
50
|
+
|
|
49
51
|
On first run, SoulEyez will prompt you to install pentesting tools (nmap, sqlmap, gobuster, etc.).
|
|
50
52
|
|
|
51
53
|
```bash
|
souleyez/engine/background.py
CHANGED
|
@@ -711,7 +711,14 @@ def _run_rpc_exploit(cmd_spec: Dict[str, Any], log_path: str, jid: int = None, p
|
|
|
711
711
|
)
|
|
712
712
|
|
|
713
713
|
return 0
|
|
714
|
+
elif result.get('no_session'):
|
|
715
|
+
# Exploit ran but no session opened - this is "no results", not an error
|
|
716
|
+
# Return 1 but let parser set status to no_results
|
|
717
|
+
reason = result.get('reason', 'No session opened')
|
|
718
|
+
_append_worker_log(f"job {jid}: exploit completed - {reason}")
|
|
719
|
+
return 1
|
|
714
720
|
else:
|
|
721
|
+
# True error (connection failed, RPC error, etc.)
|
|
715
722
|
error = result.get('error', 'Unknown error')
|
|
716
723
|
_append_worker_log(f"job {jid}: RPC exploit failed - {error}")
|
|
717
724
|
return 1
|
|
@@ -1031,7 +1038,8 @@ def _is_true_error_exit_code(rc: int, tool: str) -> bool:
|
|
|
1031
1038
|
|
|
1032
1039
|
# Tools that use non-zero exit codes for non-error conditions
|
|
1033
1040
|
# Parser will determine the actual status based on output
|
|
1034
|
-
|
|
1041
|
+
# msf_exploit returns 1 when no session opened (exploit ran but target not vulnerable)
|
|
1042
|
+
tools_with_nonzero_success = ['gobuster', 'hydra', 'medusa', 'msf_exploit']
|
|
1035
1043
|
|
|
1036
1044
|
if tool.lower() in tools_with_nonzero_success:
|
|
1037
1045
|
# Let parser determine status
|
|
@@ -305,6 +305,8 @@ def parse_nmap_job(engagement_id: int, log_path: str, job: Dict[str, Any]) -> Di
|
|
|
305
305
|
# Import into database
|
|
306
306
|
hm = HostManager()
|
|
307
307
|
result = hm.import_nmap_results(engagement_id, parsed)
|
|
308
|
+
logger.info(f"Nmap import: {result['hosts_added']} hosts, {result['services_added']} services in engagement {engagement_id}")
|
|
309
|
+
logger.debug(f"Info scripts to process: {len(parsed.get('info_scripts', []))}")
|
|
308
310
|
|
|
309
311
|
# Check for CVEs and common issues
|
|
310
312
|
fm = FindingsManager()
|
|
@@ -436,11 +438,13 @@ def parse_nmap_job(engagement_id: int, log_path: str, job: Dict[str, Any]) -> Di
|
|
|
436
438
|
for info in parsed.get('info_scripts', []):
|
|
437
439
|
host_ip = info.get('host_ip')
|
|
438
440
|
if not host_ip:
|
|
441
|
+
logger.warning(f"Info script missing host_ip: {info.get('script')}")
|
|
439
442
|
continue
|
|
440
443
|
|
|
441
444
|
# Find host ID
|
|
442
445
|
host = hm.get_host_by_ip(engagement_id, host_ip)
|
|
443
446
|
if not host:
|
|
447
|
+
logger.warning(f"Host not found for info script: {host_ip} in engagement {engagement_id}")
|
|
444
448
|
continue
|
|
445
449
|
|
|
446
450
|
host_id = host['id']
|
|
@@ -348,7 +348,15 @@ class SplunkSIEMClient(SIEMClient):
|
|
|
348
348
|
# HEC wraps in 'event' key, or data might be at top level
|
|
349
349
|
event_data = parsed.get('event', parsed) if isinstance(parsed, dict) else {}
|
|
350
350
|
except (json_lib.JSONDecodeError, TypeError):
|
|
351
|
-
|
|
351
|
+
# Try to extract embedded JSON from syslog lines
|
|
352
|
+
# Format: "Jan 7 14:23:38 host program {json...}"
|
|
353
|
+
import re
|
|
354
|
+
json_match = re.search(r'\{.*\}', raw_str)
|
|
355
|
+
if json_match:
|
|
356
|
+
try:
|
|
357
|
+
event_data = json_lib.loads(json_match.group())
|
|
358
|
+
except (json_lib.JSONDecodeError, TypeError):
|
|
359
|
+
pass
|
|
352
360
|
|
|
353
361
|
# Helper to get field from event_data first, then raw_result
|
|
354
362
|
def get_field(*keys, default=''):
|
|
@@ -370,10 +378,15 @@ class SplunkSIEMClient(SIEMClient):
|
|
|
370
378
|
rule_id = get_field('rule_id', 'rule_name', 'savedsearch_name', 'alert')
|
|
371
379
|
rule_name = get_field('rule_name', 'search_name') or rule_id
|
|
372
380
|
|
|
373
|
-
# For plain log events (no alert fields), use
|
|
381
|
+
# For plain log events (no alert fields), use event_type or sourcetype
|
|
374
382
|
if not rule_id:
|
|
383
|
+
# Prefer Suricata event_type over generic sourcetype
|
|
384
|
+
event_type = get_field('event_type')
|
|
375
385
|
sourcetype = raw_result.get('sourcetype', '')
|
|
376
|
-
if
|
|
386
|
+
if event_type:
|
|
387
|
+
rule_id = event_type
|
|
388
|
+
rule_name = f"Suricata: {event_type}"
|
|
389
|
+
elif sourcetype:
|
|
377
390
|
rule_id = sourcetype
|
|
378
391
|
rule_name = f"Log: {sourcetype}"
|
|
379
392
|
|
|
@@ -389,19 +402,53 @@ class SplunkSIEMClient(SIEMClient):
|
|
|
389
402
|
if not source_ip:
|
|
390
403
|
source_ip = raw_result.get('host', '')
|
|
391
404
|
|
|
392
|
-
# Extract description
|
|
405
|
+
# Extract description - try multiple sources
|
|
393
406
|
description = get_field('description', 'signature', 'message')
|
|
407
|
+
|
|
408
|
+
# Suricata-specific: check nested alert object
|
|
409
|
+
if not description and event_data.get('alert'):
|
|
410
|
+
alert_obj = event_data['alert']
|
|
411
|
+
if isinstance(alert_obj, dict):
|
|
412
|
+
description = alert_obj.get('signature', alert_obj.get('category', ''))
|
|
413
|
+
|
|
414
|
+
# Suricata event_type with context
|
|
394
415
|
if not description:
|
|
395
|
-
# Fallback: use event_type as description
|
|
396
416
|
event_type = get_field('event_type', 'category')
|
|
397
417
|
if event_type:
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
418
|
+
# Add context based on event type
|
|
419
|
+
if event_type == 'dns' and event_data.get('dns'):
|
|
420
|
+
dns = event_data['dns']
|
|
421
|
+
rrname = dns.get('rrname', '') if isinstance(dns, dict) else ''
|
|
422
|
+
description = f"DNS: {rrname}" if rrname else f"DNS query"
|
|
423
|
+
elif event_type == 'http' and event_data.get('http'):
|
|
424
|
+
http = event_data['http']
|
|
425
|
+
hostname = http.get('hostname', '') if isinstance(http, dict) else ''
|
|
426
|
+
description = f"HTTP: {hostname}" if hostname else "HTTP request"
|
|
427
|
+
elif event_type == 'flow':
|
|
428
|
+
app_proto = get_field('app_proto', default='')
|
|
429
|
+
description = f"Flow: {app_proto}" if app_proto else "Network flow"
|
|
430
|
+
elif event_type == 'alert':
|
|
431
|
+
description = "Suricata alert"
|
|
432
|
+
else:
|
|
433
|
+
description = f"{event_type}: {get_field('action', default='detected')}"
|
|
434
|
+
|
|
435
|
+
# For plain log events, try to extract something useful from _raw
|
|
401
436
|
if not description and raw_str:
|
|
402
|
-
#
|
|
403
|
-
|
|
404
|
-
|
|
437
|
+
# Skip syslog header to get actual message
|
|
438
|
+
# Format: "Mon DD HH:MM:SS hostname program: message"
|
|
439
|
+
import re
|
|
440
|
+
# Try to extract message after "program:" or "program["
|
|
441
|
+
msg_match = re.search(r'^\w+\s+\d+\s+[\d:]+\s+\S+\s+\S+[:\[]\s*(.+)', raw_str)
|
|
442
|
+
if msg_match:
|
|
443
|
+
description = msg_match.group(1).strip()[:150]
|
|
444
|
+
else:
|
|
445
|
+
# Fallback: clean up raw log
|
|
446
|
+
clean_raw = raw_str.replace('\n', ' ').strip()
|
|
447
|
+
# Skip if it's just timestamps/IPs with no real content
|
|
448
|
+
if len(clean_raw) > 50:
|
|
449
|
+
description = clean_raw[:150] + ('...' if len(clean_raw) > 150 else '')
|
|
450
|
+
else:
|
|
451
|
+
description = clean_raw if clean_raw else 'No details available'
|
|
405
452
|
|
|
406
453
|
# Extract MITRE info - check event_data first
|
|
407
454
|
mitre_tactics = []
|
souleyez/main.py
CHANGED
|
@@ -173,7 +173,7 @@ def _check_privileged_tools():
|
|
|
173
173
|
|
|
174
174
|
|
|
175
175
|
@click.group()
|
|
176
|
-
@click.version_option(version='2.
|
|
176
|
+
@click.version_option(version='2.23.0')
|
|
177
177
|
def cli():
|
|
178
178
|
"""SoulEyez - AI-Powered Pentesting Platform by CyberSoul Security"""
|
|
179
179
|
from souleyez.log_config import init_logging
|
|
@@ -49,16 +49,44 @@ def parse_smbmap_output(output: str, target: str = "") -> Dict[str, Any]:
|
|
|
49
49
|
'timestamp': str
|
|
50
50
|
},
|
|
51
51
|
...
|
|
52
|
-
]
|
|
52
|
+
],
|
|
53
|
+
'smb_detected': bool, # True if SMB service was detected
|
|
54
|
+
'hosts_count': int, # Number of hosts serving SMB
|
|
55
|
+
'error': str # Error message if tool crashed
|
|
53
56
|
}
|
|
54
57
|
"""
|
|
55
58
|
result = {
|
|
56
59
|
'target': target,
|
|
57
60
|
'status': None,
|
|
58
61
|
'shares': [],
|
|
59
|
-
'files': []
|
|
62
|
+
'files': [],
|
|
63
|
+
'smb_detected': False,
|
|
64
|
+
'hosts_count': 0,
|
|
65
|
+
'error': None
|
|
60
66
|
}
|
|
61
67
|
|
|
68
|
+
# Check for SMB detection (even if tool crashes later)
|
|
69
|
+
# [*] Detected 1 hosts serving SMB
|
|
70
|
+
smb_detected_match = re.search(r'\[\*\]\s*Detected\s+(\d+)\s+hosts?\s+serving\s+SMB', output)
|
|
71
|
+
if smb_detected_match:
|
|
72
|
+
result['smb_detected'] = True
|
|
73
|
+
result['hosts_count'] = int(smb_detected_match.group(1))
|
|
74
|
+
|
|
75
|
+
# Check for Python traceback (tool crash)
|
|
76
|
+
if 'Traceback (most recent call last):' in output:
|
|
77
|
+
# Extract error message from traceback
|
|
78
|
+
error_match = re.search(r'(?:Error|Exception).*?[\'"]([^\'"]+)[\'"]', output, re.DOTALL)
|
|
79
|
+
if error_match:
|
|
80
|
+
result['error'] = error_match.group(1)
|
|
81
|
+
else:
|
|
82
|
+
# Try to get the last line of the traceback
|
|
83
|
+
traceback_lines = output.split('Traceback (most recent call last):')[-1].strip().split('\n')
|
|
84
|
+
for line in reversed(traceback_lines):
|
|
85
|
+
line = line.strip()
|
|
86
|
+
if line and not line.startswith('File') and not line.startswith('raise'):
|
|
87
|
+
result['error'] = line[:200] # Limit length
|
|
88
|
+
break
|
|
89
|
+
|
|
62
90
|
lines = output.split('\n')
|
|
63
91
|
in_share_table = False
|
|
64
92
|
current_share = None
|
|
@@ -80,10 +80,15 @@ def parse_sqlmap_output(output: str, target: str = "") -> Dict[str, Any]:
|
|
|
80
80
|
current_method = 'GET'
|
|
81
81
|
current_post_data = None
|
|
82
82
|
|
|
83
|
+
# Track POST form URLs separately to prevent GET URL testing from overwriting them
|
|
84
|
+
# This fixes bug where chain rules get wrong URL when SQLMap tests multiple URLs
|
|
85
|
+
last_post_form_url = None
|
|
86
|
+
last_post_form_data = None
|
|
87
|
+
|
|
83
88
|
for i, line in enumerate(lines):
|
|
84
89
|
line = line.strip()
|
|
85
90
|
|
|
86
|
-
# Extract URL being tested
|
|
91
|
+
# Extract URL being tested (GET requests typically)
|
|
87
92
|
if 'testing URL' in line:
|
|
88
93
|
url_match = re.search(r"testing URL '([^']+)'", line)
|
|
89
94
|
if url_match:
|
|
@@ -100,6 +105,9 @@ def parse_sqlmap_output(output: str, target: str = "") -> Dict[str, Any]:
|
|
|
100
105
|
current_url = url_match.group(2)
|
|
101
106
|
if current_url not in result['urls_tested']:
|
|
102
107
|
result['urls_tested'].append(current_url)
|
|
108
|
+
# Save POST form URL separately for later use
|
|
109
|
+
if current_method == 'POST':
|
|
110
|
+
last_post_form_url = current_url
|
|
103
111
|
|
|
104
112
|
# Extract POST data (appears after "POST http://..." line)
|
|
105
113
|
# Format: "POST data: username=&password=&submit=Login"
|
|
@@ -107,6 +115,8 @@ def parse_sqlmap_output(output: str, target: str = "") -> Dict[str, Any]:
|
|
|
107
115
|
post_data_match = re.search(r'^POST data:\s*(.+)$', line)
|
|
108
116
|
if post_data_match:
|
|
109
117
|
current_post_data = post_data_match.group(1).strip()
|
|
118
|
+
# Associate POST data with the POST form URL
|
|
119
|
+
last_post_form_data = current_post_data
|
|
110
120
|
|
|
111
121
|
# Handle resumed injection points from stored session
|
|
112
122
|
# Pattern: "sqlmap resumed the following injection point(s) from stored session:"
|
|
@@ -129,15 +139,23 @@ def parse_sqlmap_output(output: str, target: str = "") -> Dict[str, Any]:
|
|
|
129
139
|
else:
|
|
130
140
|
method = current_method # Use current context
|
|
131
141
|
|
|
142
|
+
# For POST parameters, use the saved POST form URL instead of current_url
|
|
143
|
+
if method == 'POST' and last_post_form_url:
|
|
144
|
+
effective_url = last_post_form_url
|
|
145
|
+
effective_post_data = last_post_form_data or current_post_data
|
|
146
|
+
else:
|
|
147
|
+
effective_url = current_url or target
|
|
148
|
+
effective_post_data = current_post_data if method == 'POST' else None
|
|
149
|
+
|
|
132
150
|
# Mark as confirmed injection
|
|
133
151
|
result['sql_injection_confirmed'] = True
|
|
134
152
|
result['injectable_parameter'] = param
|
|
135
|
-
result['injectable_url'] =
|
|
153
|
+
result['injectable_url'] = effective_url
|
|
136
154
|
result['injectable_method'] = method
|
|
137
155
|
|
|
138
156
|
# Add vulnerability entry
|
|
139
157
|
result['vulnerabilities'].append({
|
|
140
|
-
'url':
|
|
158
|
+
'url': effective_url,
|
|
141
159
|
'parameter': param,
|
|
142
160
|
'vuln_type': 'sqli',
|
|
143
161
|
'injectable': True,
|
|
@@ -147,10 +165,10 @@ def parse_sqlmap_output(output: str, target: str = "") -> Dict[str, Any]:
|
|
|
147
165
|
|
|
148
166
|
# Collect injection point
|
|
149
167
|
injection_point = {
|
|
150
|
-
'url':
|
|
168
|
+
'url': effective_url,
|
|
151
169
|
'parameter': param,
|
|
152
170
|
'method': method,
|
|
153
|
-
'post_data':
|
|
171
|
+
'post_data': effective_post_data,
|
|
154
172
|
'techniques': []
|
|
155
173
|
}
|
|
156
174
|
if not any(
|
|
@@ -318,8 +336,17 @@ def parse_sqlmap_output(output: str, target: str = "") -> Dict[str, Any]:
|
|
|
318
336
|
)
|
|
319
337
|
|
|
320
338
|
if not already_added:
|
|
339
|
+
# For POST parameters, use the saved POST form URL instead of current_url
|
|
340
|
+
# This prevents bug where GET URL testing overwrites the correct POST form URL
|
|
341
|
+
if param_method == 'POST' and last_post_form_url:
|
|
342
|
+
effective_url = last_post_form_url
|
|
343
|
+
effective_post_data = last_post_form_data or current_post_data
|
|
344
|
+
else:
|
|
345
|
+
effective_url = current_url or target
|
|
346
|
+
effective_post_data = current_post_data if param_method == 'POST' else None
|
|
347
|
+
|
|
321
348
|
result['vulnerabilities'].append({
|
|
322
|
-
'url':
|
|
349
|
+
'url': effective_url,
|
|
323
350
|
'parameter': param,
|
|
324
351
|
'vuln_type': 'sqli',
|
|
325
352
|
'injectable': True,
|
|
@@ -332,17 +359,17 @@ def parse_sqlmap_output(output: str, target: str = "") -> Dict[str, Any]:
|
|
|
332
359
|
# Set confirmation flags
|
|
333
360
|
result['sql_injection_confirmed'] = True
|
|
334
361
|
result['injectable_parameter'] = param
|
|
335
|
-
result['injectable_url'] =
|
|
362
|
+
result['injectable_url'] = effective_url
|
|
336
363
|
result['injectable_method'] = param_method # GET, POST, etc.
|
|
337
|
-
if param_method == 'POST' and
|
|
338
|
-
result['injectable_post_data'] =
|
|
364
|
+
if param_method == 'POST' and effective_post_data:
|
|
365
|
+
result['injectable_post_data'] = effective_post_data
|
|
339
366
|
|
|
340
367
|
# Collect ALL injection points for fallback
|
|
341
368
|
injection_point = {
|
|
342
|
-
'url':
|
|
369
|
+
'url': effective_url,
|
|
343
370
|
'parameter': param,
|
|
344
371
|
'method': param_method,
|
|
345
|
-
'post_data':
|
|
372
|
+
'post_data': effective_post_data,
|
|
346
373
|
'techniques': techniques
|
|
347
374
|
}
|
|
348
375
|
# Avoid duplicates
|
|
@@ -364,8 +391,18 @@ def parse_sqlmap_output(output: str, target: str = "") -> Dict[str, Any]:
|
|
|
364
391
|
if param_match:
|
|
365
392
|
method = param_match.group(1) or current_method
|
|
366
393
|
param = param_match.group(2)
|
|
394
|
+
|
|
395
|
+
# For POST parameters, use the saved POST form URL instead of current_url
|
|
396
|
+
# This prevents bug where GET URL testing overwrites the correct POST form URL
|
|
397
|
+
if method == 'POST' and last_post_form_url:
|
|
398
|
+
effective_url = last_post_form_url
|
|
399
|
+
effective_post_data = last_post_form_data or current_post_data
|
|
400
|
+
else:
|
|
401
|
+
effective_url = current_url or target
|
|
402
|
+
effective_post_data = current_post_data if method == 'POST' else None
|
|
403
|
+
|
|
367
404
|
result['vulnerabilities'].append({
|
|
368
|
-
'url':
|
|
405
|
+
'url': effective_url,
|
|
369
406
|
'parameter': param,
|
|
370
407
|
'vuln_type': 'sqli',
|
|
371
408
|
'injectable': True,
|
|
@@ -376,17 +413,17 @@ def parse_sqlmap_output(output: str, target: str = "") -> Dict[str, Any]:
|
|
|
376
413
|
# Set confirmation flags
|
|
377
414
|
result['sql_injection_confirmed'] = True
|
|
378
415
|
result['injectable_parameter'] = param
|
|
379
|
-
result['injectable_url'] =
|
|
416
|
+
result['injectable_url'] = effective_url
|
|
380
417
|
result['injectable_method'] = method
|
|
381
|
-
if method == 'POST' and
|
|
382
|
-
result['injectable_post_data'] =
|
|
418
|
+
if method == 'POST' and effective_post_data:
|
|
419
|
+
result['injectable_post_data'] = effective_post_data
|
|
383
420
|
|
|
384
421
|
# Collect ALL injection points for fallback
|
|
385
422
|
injection_point = {
|
|
386
|
-
'url':
|
|
423
|
+
'url': effective_url,
|
|
387
424
|
'parameter': param,
|
|
388
425
|
'method': method,
|
|
389
|
-
'post_data':
|
|
426
|
+
'post_data': effective_post_data,
|
|
390
427
|
'techniques': [] # Technique details not available at this detection point
|
|
391
428
|
}
|
|
392
429
|
# Avoid duplicates
|