souleyez 2.16.0__py3-none-any.whl → 2.26.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- souleyez/__init__.py +1 -1
- souleyez/assets/__init__.py +1 -0
- souleyez/assets/souleyez-icon.png +0 -0
- souleyez/core/msf_sync_manager.py +15 -5
- souleyez/core/tool_chaining.py +221 -29
- souleyez/detection/validator.py +4 -2
- souleyez/docs/README.md +2 -2
- souleyez/docs/user-guide/installation.md +14 -1
- souleyez/engine/background.py +25 -1
- souleyez/engine/result_handler.py +129 -0
- souleyez/integrations/siem/splunk.py +58 -11
- souleyez/main.py +103 -4
- souleyez/parsers/crackmapexec_parser.py +101 -43
- souleyez/parsers/dnsrecon_parser.py +50 -35
- souleyez/parsers/enum4linux_parser.py +101 -21
- souleyez/parsers/http_fingerprint_parser.py +319 -0
- souleyez/parsers/hydra_parser.py +56 -5
- souleyez/parsers/impacket_parser.py +123 -44
- souleyez/parsers/john_parser.py +47 -14
- souleyez/parsers/msf_parser.py +20 -5
- souleyez/parsers/nmap_parser.py +145 -28
- souleyez/parsers/smbmap_parser.py +69 -25
- souleyez/parsers/sqlmap_parser.py +72 -26
- souleyez/parsers/theharvester_parser.py +21 -13
- souleyez/plugins/gobuster.py +96 -3
- souleyez/plugins/http_fingerprint.py +592 -0
- souleyez/plugins/msf_exploit.py +6 -3
- souleyez/plugins/nuclei.py +41 -17
- souleyez/ui/interactive.py +130 -20
- souleyez/ui/setup_wizard.py +424 -58
- souleyez/ui/tool_setup.py +52 -52
- souleyez/utils/tool_checker.py +75 -13
- {souleyez-2.16.0.dist-info → souleyez-2.26.0.dist-info}/METADATA +16 -3
- {souleyez-2.16.0.dist-info → souleyez-2.26.0.dist-info}/RECORD +38 -34
- {souleyez-2.16.0.dist-info → souleyez-2.26.0.dist-info}/WHEEL +0 -0
- {souleyez-2.16.0.dist-info → souleyez-2.26.0.dist-info}/entry_points.txt +0 -0
- {souleyez-2.16.0.dist-info → souleyez-2.26.0.dist-info}/licenses/LICENSE +0 -0
- {souleyez-2.16.0.dist-info → souleyez-2.26.0.dist-info}/top_level.txt +0 -0
|
@@ -108,6 +108,8 @@ def handle_job_result(job: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
|
|
108
108
|
parse_result = parse_nikto_job(engagement_id, log_path, job)
|
|
109
109
|
elif tool == 'dalfox':
|
|
110
110
|
parse_result = parse_dalfox_job(engagement_id, log_path, job)
|
|
111
|
+
elif tool == 'http_fingerprint':
|
|
112
|
+
parse_result = parse_http_fingerprint_job(engagement_id, log_path, job)
|
|
111
113
|
|
|
112
114
|
# NOTE: Auto-chaining is now handled in background.py after parsing completes
|
|
113
115
|
# This avoids duplicate job creation and gives better control over timing
|
|
@@ -204,6 +206,8 @@ def reparse_job(job_id: int) -> Dict[str, Any]:
|
|
|
204
206
|
parse_result = parse_nikto_job(engagement_id, log_path, job)
|
|
205
207
|
elif tool == 'dalfox':
|
|
206
208
|
parse_result = parse_dalfox_job(engagement_id, log_path, job)
|
|
209
|
+
elif tool == 'http_fingerprint':
|
|
210
|
+
parse_result = parse_http_fingerprint_job(engagement_id, log_path, job)
|
|
207
211
|
else:
|
|
208
212
|
return {'success': False, 'message': f'No parser available for tool: {tool}'}
|
|
209
213
|
|
|
@@ -305,6 +309,8 @@ def parse_nmap_job(engagement_id: int, log_path: str, job: Dict[str, Any]) -> Di
|
|
|
305
309
|
# Import into database
|
|
306
310
|
hm = HostManager()
|
|
307
311
|
result = hm.import_nmap_results(engagement_id, parsed)
|
|
312
|
+
logger.info(f"Nmap import: {result['hosts_added']} hosts, {result['services_added']} services in engagement {engagement_id}")
|
|
313
|
+
logger.debug(f"Info scripts to process: {len(parsed.get('info_scripts', []))}")
|
|
308
314
|
|
|
309
315
|
# Check for CVEs and common issues
|
|
310
316
|
fm = FindingsManager()
|
|
@@ -432,6 +438,44 @@ def parse_nmap_job(engagement_id: int, log_path: str, job: Dict[str, Any]) -> Di
|
|
|
432
438
|
)
|
|
433
439
|
findings_added += 1
|
|
434
440
|
|
|
441
|
+
# Store info script findings (vnc-info, ssh-hostkey, etc.)
|
|
442
|
+
for info in parsed.get('info_scripts', []):
|
|
443
|
+
host_ip = info.get('host_ip')
|
|
444
|
+
if not host_ip:
|
|
445
|
+
logger.warning(f"Info script missing host_ip: {info.get('script')}")
|
|
446
|
+
continue
|
|
447
|
+
|
|
448
|
+
# Find host ID
|
|
449
|
+
host = hm.get_host_by_ip(engagement_id, host_ip)
|
|
450
|
+
if not host:
|
|
451
|
+
logger.warning(f"Host not found for info script: {host_ip} in engagement {engagement_id}")
|
|
452
|
+
continue
|
|
453
|
+
|
|
454
|
+
host_id = host['id']
|
|
455
|
+
|
|
456
|
+
# Build finding title and description
|
|
457
|
+
script_name = info.get('script', 'unknown')
|
|
458
|
+
title = info.get('title', script_name)
|
|
459
|
+
description = info.get('description', '')
|
|
460
|
+
|
|
461
|
+
# Add port to title if available
|
|
462
|
+
port = info.get('port')
|
|
463
|
+
if port:
|
|
464
|
+
title = f"{title} (port {port})"
|
|
465
|
+
|
|
466
|
+
fm.add_finding(
|
|
467
|
+
engagement_id=engagement_id,
|
|
468
|
+
host_id=host_id,
|
|
469
|
+
title=title,
|
|
470
|
+
finding_type='info',
|
|
471
|
+
severity='info',
|
|
472
|
+
description=description,
|
|
473
|
+
port=port,
|
|
474
|
+
tool='nmap',
|
|
475
|
+
evidence=f"Host: {host_ip}:{port if port else 'N/A'}\nScript: {script_name}"
|
|
476
|
+
)
|
|
477
|
+
findings_added += 1
|
|
478
|
+
|
|
435
479
|
# Build host details list for summary
|
|
436
480
|
host_details = []
|
|
437
481
|
for host_data in parsed.get('hosts', []):
|
|
@@ -3028,6 +3072,91 @@ def parse_nikto_job(engagement_id: int, log_path: str, job: Dict[str, Any]) -> D
|
|
|
3028
3072
|
return {'error': str(e)}
|
|
3029
3073
|
|
|
3030
3074
|
|
|
3075
|
+
def parse_http_fingerprint_job(engagement_id: int, log_path: str, job: Dict[str, Any]) -> Dict[str, Any]:
|
|
3076
|
+
"""
|
|
3077
|
+
Parse HTTP fingerprint results.
|
|
3078
|
+
|
|
3079
|
+
Returns fingerprint data for use in auto-chaining context.
|
|
3080
|
+
This enables downstream tools (nikto, nuclei, etc.) to make smarter decisions
|
|
3081
|
+
based on detected WAF, CDN, or managed hosting platform.
|
|
3082
|
+
"""
|
|
3083
|
+
try:
|
|
3084
|
+
from souleyez.parsers.http_fingerprint_parser import (
|
|
3085
|
+
parse_http_fingerprint_output,
|
|
3086
|
+
build_fingerprint_context,
|
|
3087
|
+
get_tool_recommendations
|
|
3088
|
+
)
|
|
3089
|
+
from souleyez.storage.hosts import HostManager
|
|
3090
|
+
from urllib.parse import urlparse
|
|
3091
|
+
|
|
3092
|
+
target = job.get('target', '')
|
|
3093
|
+
|
|
3094
|
+
# Read log file
|
|
3095
|
+
with open(log_path, 'r', encoding='utf-8', errors='replace') as f:
|
|
3096
|
+
output = f.read()
|
|
3097
|
+
|
|
3098
|
+
parsed = parse_http_fingerprint_output(output, target)
|
|
3099
|
+
|
|
3100
|
+
# Extract host from target URL
|
|
3101
|
+
parsed_url = urlparse(target)
|
|
3102
|
+
target_host = parsed_url.hostname or target
|
|
3103
|
+
|
|
3104
|
+
# Update host with fingerprint info if we have useful data
|
|
3105
|
+
if target_host and (parsed.get('server') or parsed.get('managed_hosting')):
|
|
3106
|
+
hm = HostManager()
|
|
3107
|
+
host_data = {
|
|
3108
|
+
'ip': target_host,
|
|
3109
|
+
'status': 'up'
|
|
3110
|
+
}
|
|
3111
|
+
# Store server info in notes or a dedicated field
|
|
3112
|
+
# For now, we just ensure the host exists
|
|
3113
|
+
hm.add_or_update_host(engagement_id, host_data)
|
|
3114
|
+
|
|
3115
|
+
# Build fingerprint context for chaining
|
|
3116
|
+
fingerprint_context = build_fingerprint_context(parsed)
|
|
3117
|
+
|
|
3118
|
+
# Get tool recommendations
|
|
3119
|
+
recommendations = get_tool_recommendations(parsed)
|
|
3120
|
+
|
|
3121
|
+
# Determine status
|
|
3122
|
+
if parsed.get('error'):
|
|
3123
|
+
status = STATUS_ERROR
|
|
3124
|
+
elif parsed.get('managed_hosting') or parsed.get('waf') or parsed.get('cdn'):
|
|
3125
|
+
status = STATUS_DONE # Found useful info
|
|
3126
|
+
elif parsed.get('server'):
|
|
3127
|
+
status = STATUS_DONE
|
|
3128
|
+
else:
|
|
3129
|
+
status = STATUS_NO_RESULTS
|
|
3130
|
+
|
|
3131
|
+
return {
|
|
3132
|
+
'tool': 'http_fingerprint',
|
|
3133
|
+
'status': status,
|
|
3134
|
+
'target': target,
|
|
3135
|
+
'target_host': target_host,
|
|
3136
|
+
# Core fingerprint data
|
|
3137
|
+
'server': parsed.get('server'),
|
|
3138
|
+
'managed_hosting': parsed.get('managed_hosting'),
|
|
3139
|
+
'waf': parsed.get('waf', []),
|
|
3140
|
+
'cdn': parsed.get('cdn', []),
|
|
3141
|
+
'technologies': parsed.get('technologies', []),
|
|
3142
|
+
'status_code': parsed.get('status_code'),
|
|
3143
|
+
# For auto-chaining context
|
|
3144
|
+
'http_fingerprint': fingerprint_context.get('http_fingerprint', {}),
|
|
3145
|
+
'recommendations': recommendations,
|
|
3146
|
+
# Pass through for downstream chains
|
|
3147
|
+
'services': [{
|
|
3148
|
+
'ip': target_host,
|
|
3149
|
+
'port': parsed_url.port or (443 if parsed_url.scheme == 'https' else 80),
|
|
3150
|
+
'service_name': 'https' if parsed_url.scheme == 'https' else 'http',
|
|
3151
|
+
'product': parsed.get('server', ''),
|
|
3152
|
+
}],
|
|
3153
|
+
}
|
|
3154
|
+
|
|
3155
|
+
except Exception as e:
|
|
3156
|
+
logger.error(f"Error parsing http_fingerprint job: {e}")
|
|
3157
|
+
return {'error': str(e)}
|
|
3158
|
+
|
|
3159
|
+
|
|
3031
3160
|
def parse_dalfox_job(engagement_id: int, log_path: str, job: Dict[str, Any]) -> Dict[str, Any]:
|
|
3032
3161
|
"""Parse Dalfox XSS scanner results."""
|
|
3033
3162
|
try:
|
|
@@ -348,7 +348,15 @@ class SplunkSIEMClient(SIEMClient):
|
|
|
348
348
|
# HEC wraps in 'event' key, or data might be at top level
|
|
349
349
|
event_data = parsed.get('event', parsed) if isinstance(parsed, dict) else {}
|
|
350
350
|
except (json_lib.JSONDecodeError, TypeError):
|
|
351
|
-
|
|
351
|
+
# Try to extract embedded JSON from syslog lines
|
|
352
|
+
# Format: "Jan 7 14:23:38 host program {json...}"
|
|
353
|
+
import re
|
|
354
|
+
json_match = re.search(r'\{.*\}', raw_str)
|
|
355
|
+
if json_match:
|
|
356
|
+
try:
|
|
357
|
+
event_data = json_lib.loads(json_match.group())
|
|
358
|
+
except (json_lib.JSONDecodeError, TypeError):
|
|
359
|
+
pass
|
|
352
360
|
|
|
353
361
|
# Helper to get field from event_data first, then raw_result
|
|
354
362
|
def get_field(*keys, default=''):
|
|
@@ -370,10 +378,15 @@ class SplunkSIEMClient(SIEMClient):
|
|
|
370
378
|
rule_id = get_field('rule_id', 'rule_name', 'savedsearch_name', 'alert')
|
|
371
379
|
rule_name = get_field('rule_name', 'search_name') or rule_id
|
|
372
380
|
|
|
373
|
-
# For plain log events (no alert fields), use
|
|
381
|
+
# For plain log events (no alert fields), use event_type or sourcetype
|
|
374
382
|
if not rule_id:
|
|
383
|
+
# Prefer Suricata event_type over generic sourcetype
|
|
384
|
+
event_type = get_field('event_type')
|
|
375
385
|
sourcetype = raw_result.get('sourcetype', '')
|
|
376
|
-
if
|
|
386
|
+
if event_type:
|
|
387
|
+
rule_id = event_type
|
|
388
|
+
rule_name = f"Suricata: {event_type}"
|
|
389
|
+
elif sourcetype:
|
|
377
390
|
rule_id = sourcetype
|
|
378
391
|
rule_name = f"Log: {sourcetype}"
|
|
379
392
|
|
|
@@ -389,19 +402,53 @@ class SplunkSIEMClient(SIEMClient):
|
|
|
389
402
|
if not source_ip:
|
|
390
403
|
source_ip = raw_result.get('host', '')
|
|
391
404
|
|
|
392
|
-
# Extract description
|
|
405
|
+
# Extract description - try multiple sources
|
|
393
406
|
description = get_field('description', 'signature', 'message')
|
|
407
|
+
|
|
408
|
+
# Suricata-specific: check nested alert object
|
|
409
|
+
if not description and event_data.get('alert'):
|
|
410
|
+
alert_obj = event_data['alert']
|
|
411
|
+
if isinstance(alert_obj, dict):
|
|
412
|
+
description = alert_obj.get('signature', alert_obj.get('category', ''))
|
|
413
|
+
|
|
414
|
+
# Suricata event_type with context
|
|
394
415
|
if not description:
|
|
395
|
-
# Fallback: use event_type as description
|
|
396
416
|
event_type = get_field('event_type', 'category')
|
|
397
417
|
if event_type:
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
418
|
+
# Add context based on event type
|
|
419
|
+
if event_type == 'dns' and event_data.get('dns'):
|
|
420
|
+
dns = event_data['dns']
|
|
421
|
+
rrname = dns.get('rrname', '') if isinstance(dns, dict) else ''
|
|
422
|
+
description = f"DNS: {rrname}" if rrname else f"DNS query"
|
|
423
|
+
elif event_type == 'http' and event_data.get('http'):
|
|
424
|
+
http = event_data['http']
|
|
425
|
+
hostname = http.get('hostname', '') if isinstance(http, dict) else ''
|
|
426
|
+
description = f"HTTP: {hostname}" if hostname else "HTTP request"
|
|
427
|
+
elif event_type == 'flow':
|
|
428
|
+
app_proto = get_field('app_proto', default='')
|
|
429
|
+
description = f"Flow: {app_proto}" if app_proto else "Network flow"
|
|
430
|
+
elif event_type == 'alert':
|
|
431
|
+
description = "Suricata alert"
|
|
432
|
+
else:
|
|
433
|
+
description = f"{event_type}: {get_field('action', default='detected')}"
|
|
434
|
+
|
|
435
|
+
# For plain log events, try to extract something useful from _raw
|
|
401
436
|
if not description and raw_str:
|
|
402
|
-
#
|
|
403
|
-
|
|
404
|
-
|
|
437
|
+
# Skip syslog header to get actual message
|
|
438
|
+
# Format: "Mon DD HH:MM:SS hostname program: message"
|
|
439
|
+
import re
|
|
440
|
+
# Try to extract message after "program:" or "program["
|
|
441
|
+
msg_match = re.search(r'^\w+\s+\d+\s+[\d:]+\s+\S+\s+\S+[:\[]\s*(.+)', raw_str)
|
|
442
|
+
if msg_match:
|
|
443
|
+
description = msg_match.group(1).strip()[:150]
|
|
444
|
+
else:
|
|
445
|
+
# Fallback: clean up raw log
|
|
446
|
+
clean_raw = raw_str.replace('\n', ' ').strip()
|
|
447
|
+
# Skip if it's just timestamps/IPs with no real content
|
|
448
|
+
if len(clean_raw) > 50:
|
|
449
|
+
description = clean_raw[:150] + ('...' if len(clean_raw) > 150 else '')
|
|
450
|
+
else:
|
|
451
|
+
description = clean_raw if clean_raw else 'No details available'
|
|
405
452
|
|
|
406
453
|
# Extract MITRE info - check event_data first
|
|
407
454
|
mitre_tactics = []
|
souleyez/main.py
CHANGED
|
@@ -173,7 +173,7 @@ def _check_privileged_tools():
|
|
|
173
173
|
|
|
174
174
|
|
|
175
175
|
@click.group()
|
|
176
|
-
@click.version_option(version='2.
|
|
176
|
+
@click.version_option(version='2.26.0')
|
|
177
177
|
def cli():
|
|
178
178
|
"""SoulEyez - AI-Powered Pentesting Platform by CyberSoul Security"""
|
|
179
179
|
from souleyez.log_config import init_logging
|
|
@@ -1388,19 +1388,24 @@ def _run_doctor(fix=False, verbose=False):
|
|
|
1388
1388
|
path_dirs = os.environ.get('PATH', '').split(':')
|
|
1389
1389
|
pipx_bin = str(Path.home() / '.local' / 'bin')
|
|
1390
1390
|
go_bin = str(Path.home() / 'go' / 'bin')
|
|
1391
|
+
|
|
1392
|
+
# Detect shell config file (zsh for Kali, bash for others)
|
|
1393
|
+
shell = os.environ.get('SHELL', '/bin/bash')
|
|
1394
|
+
shell_rc = '~/.zshrc' if 'zsh' in shell else '~/.bashrc'
|
|
1395
|
+
|
|
1391
1396
|
if pipx_bin in path_dirs:
|
|
1392
1397
|
if verbose:
|
|
1393
1398
|
check_pass("PATH includes ~/.local/bin (pipx)")
|
|
1394
1399
|
else:
|
|
1395
1400
|
if Path(pipx_bin).exists() and any(Path(pipx_bin).iterdir()):
|
|
1396
|
-
check_warn("~/.local/bin not in PATH", "Add to
|
|
1401
|
+
check_warn("~/.local/bin not in PATH", f"Add to {shell_rc}: export PATH=\"$HOME/.local/bin:$PATH\"")
|
|
1397
1402
|
|
|
1398
1403
|
if go_bin in path_dirs:
|
|
1399
1404
|
if verbose:
|
|
1400
1405
|
check_pass("PATH includes ~/go/bin")
|
|
1401
1406
|
else:
|
|
1402
1407
|
if Path(go_bin).exists() and any(Path(go_bin).iterdir()):
|
|
1403
|
-
check_warn("~/go/bin not in PATH", "Add to
|
|
1408
|
+
check_warn("~/go/bin not in PATH", f"Add to {shell_rc}: export PATH=\"$HOME/go/bin:$PATH\"")
|
|
1404
1409
|
|
|
1405
1410
|
# Check database is writable
|
|
1406
1411
|
if db_path.exists():
|
|
@@ -1430,8 +1435,10 @@ def _run_doctor(fix=False, verbose=False):
|
|
|
1430
1435
|
# Section 7: MSF Database (if msfconsole available)
|
|
1431
1436
|
if shutil.which('msfconsole'):
|
|
1432
1437
|
click.echo(click.style("Metasploit", bold=True))
|
|
1438
|
+
# Check user config first, then system-wide config (Kali uses system-wide)
|
|
1433
1439
|
msf_db = Path.home() / '.msf4' / 'database.yml'
|
|
1434
|
-
|
|
1440
|
+
system_msf_db = Path('/usr/share/metasploit-framework/config/database.yml')
|
|
1441
|
+
if msf_db.exists() or system_msf_db.exists():
|
|
1435
1442
|
check_pass("MSF database configured")
|
|
1436
1443
|
else:
|
|
1437
1444
|
check_fail("MSF database not initialized", "msfdb init")
|
|
@@ -1513,6 +1520,98 @@ def tutorial():
|
|
|
1513
1520
|
run_tutorial()
|
|
1514
1521
|
|
|
1515
1522
|
|
|
1523
|
+
@cli.command('install-desktop')
|
|
1524
|
+
@click.option('--remove', is_flag=True, help='Remove the desktop shortcut')
|
|
1525
|
+
def install_desktop(remove):
|
|
1526
|
+
"""Install SoulEyez desktop shortcut in Applications menu.
|
|
1527
|
+
|
|
1528
|
+
Creates a .desktop file so SoulEyez appears in your
|
|
1529
|
+
Applications > Security menu with its icon.
|
|
1530
|
+
"""
|
|
1531
|
+
import shutil
|
|
1532
|
+
from importlib import resources
|
|
1533
|
+
|
|
1534
|
+
applications_dir = Path.home() / '.local' / 'share' / 'applications'
|
|
1535
|
+
icons_dir = Path.home() / '.local' / 'share' / 'icons'
|
|
1536
|
+
desktop_file = applications_dir / 'souleyez.desktop'
|
|
1537
|
+
icon_dest = icons_dir / 'souleyez.png'
|
|
1538
|
+
|
|
1539
|
+
if remove:
|
|
1540
|
+
# Remove desktop shortcut
|
|
1541
|
+
removed = False
|
|
1542
|
+
if desktop_file.exists():
|
|
1543
|
+
desktop_file.unlink()
|
|
1544
|
+
click.echo(click.style(" Removed desktop shortcut", fg='green'))
|
|
1545
|
+
removed = True
|
|
1546
|
+
if icon_dest.exists():
|
|
1547
|
+
icon_dest.unlink()
|
|
1548
|
+
click.echo(click.style(" Removed icon", fg='green'))
|
|
1549
|
+
removed = True
|
|
1550
|
+
if removed:
|
|
1551
|
+
click.echo(click.style("\nSoulEyez removed from Applications menu.", fg='cyan'))
|
|
1552
|
+
else:
|
|
1553
|
+
click.echo(click.style("No desktop shortcut found.", fg='yellow'))
|
|
1554
|
+
return
|
|
1555
|
+
|
|
1556
|
+
click.echo(click.style("\nInstalling SoulEyez desktop shortcut...\n", fg='cyan', bold=True))
|
|
1557
|
+
|
|
1558
|
+
# Create directories
|
|
1559
|
+
applications_dir.mkdir(parents=True, exist_ok=True)
|
|
1560
|
+
icons_dir.mkdir(parents=True, exist_ok=True)
|
|
1561
|
+
|
|
1562
|
+
# Find and copy icon
|
|
1563
|
+
try:
|
|
1564
|
+
# Try importlib.resources first (Python 3.9+)
|
|
1565
|
+
try:
|
|
1566
|
+
from importlib.resources import files
|
|
1567
|
+
icon_source = files('souleyez.assets').joinpath('souleyez-icon.png')
|
|
1568
|
+
with open(icon_source, 'rb') as src:
|
|
1569
|
+
icon_data = src.read()
|
|
1570
|
+
except (ImportError, TypeError, FileNotFoundError):
|
|
1571
|
+
# Fallback: find icon relative to this file
|
|
1572
|
+
icon_source = Path(__file__).parent / 'assets' / 'souleyez-icon.png'
|
|
1573
|
+
with open(icon_source, 'rb') as src:
|
|
1574
|
+
icon_data = src.read()
|
|
1575
|
+
|
|
1576
|
+
with open(icon_dest, 'wb') as dst:
|
|
1577
|
+
dst.write(icon_data)
|
|
1578
|
+
click.echo(click.style(" Installed icon", fg='green'))
|
|
1579
|
+
except Exception as e:
|
|
1580
|
+
click.echo(click.style(f" Warning: Could not copy icon: {e}", fg='yellow'))
|
|
1581
|
+
icon_dest = "utilities-terminal" # Fallback to system icon
|
|
1582
|
+
|
|
1583
|
+
# Create .desktop file
|
|
1584
|
+
desktop_content = f"""[Desktop Entry]
|
|
1585
|
+
Name=SoulEyez
|
|
1586
|
+
Comment=AI-Powered Penetration Testing Platform
|
|
1587
|
+
Exec=souleyez interactive
|
|
1588
|
+
Icon={icon_dest}
|
|
1589
|
+
Terminal=true
|
|
1590
|
+
Type=Application
|
|
1591
|
+
Categories=Security;System;Network;
|
|
1592
|
+
Keywords=pentest;security;hacking;nmap;metasploit;
|
|
1593
|
+
"""
|
|
1594
|
+
|
|
1595
|
+
desktop_file.write_text(desktop_content)
|
|
1596
|
+
click.echo(click.style(" Created desktop entry", fg='green'))
|
|
1597
|
+
|
|
1598
|
+
# Update desktop database (optional, may not be available)
|
|
1599
|
+
try:
|
|
1600
|
+
import subprocess
|
|
1601
|
+
subprocess.run(['update-desktop-database', str(applications_dir)],
|
|
1602
|
+
capture_output=True, check=False)
|
|
1603
|
+
except Exception:
|
|
1604
|
+
pass # Not critical if this fails
|
|
1605
|
+
|
|
1606
|
+
click.echo()
|
|
1607
|
+
click.echo(click.style("SoulEyez added to Applications menu!", fg='green', bold=True))
|
|
1608
|
+
click.echo()
|
|
1609
|
+
click.echo("You can find it under:")
|
|
1610
|
+
click.echo(click.style(" Applications > Security > SoulEyez", fg='cyan'))
|
|
1611
|
+
click.echo()
|
|
1612
|
+
click.echo("To remove: souleyez install-desktop --remove")
|
|
1613
|
+
|
|
1614
|
+
|
|
1516
1615
|
def main():
|
|
1517
1616
|
"""Main entry point."""
|
|
1518
1617
|
cli()
|
|
@@ -72,42 +72,61 @@ def _parse_content(content: str, target: str) -> Dict[str, Any]:
|
|
|
72
72
|
'auth_info': {}
|
|
73
73
|
}
|
|
74
74
|
|
|
75
|
+
# Remove ANSI color codes first
|
|
76
|
+
content = re.sub(r'\x1b\[[0-9;]*m', '', content)
|
|
77
|
+
|
|
75
78
|
for line in content.split('\n'):
|
|
76
79
|
# Parse host information (Windows OR Unix/Samba)
|
|
77
|
-
# Format
|
|
78
|
-
|
|
79
|
-
|
|
80
|
+
# Format variations:
|
|
81
|
+
# SMB 10.0.0.88 445 HOSTNAME [*] Windows/Unix ... (name:HOSTNAME) (domain:DOMAIN) ...
|
|
82
|
+
# SMB 10.0.0.88 445 HOSTNAME [*] Windows Server 2016 ...
|
|
83
|
+
# WINRM 10.0.0.88 5985 HOSTNAME [*] http://10.0.0.88:5985/wsman
|
|
84
|
+
|
|
85
|
+
os_keywords = ['Windows', 'Unix', 'Samba', 'Linux', 'Server', 'Microsoft']
|
|
86
|
+
if any(proto in line for proto in ['SMB', 'WINRM', 'SSH', 'RDP']) and '[*]' in line:
|
|
87
|
+
# Try multiple patterns for host info
|
|
88
|
+
host_match = None
|
|
89
|
+
|
|
90
|
+
# Pattern 1: Standard format with flexible whitespace
|
|
91
|
+
host_match = re.search(r'(\d+\.\d+\.\d+\.\d+)\s+(\d+)\s+(\S+)\s+\[\*\]\s*(.+)', line)
|
|
92
|
+
|
|
93
|
+
# Pattern 2: Protocol prefix format
|
|
94
|
+
if not host_match:
|
|
95
|
+
host_match = re.search(r'(?:SMB|WINRM|SSH|RDP)\s+(\d+\.\d+\.\d+\.\d+)\s+(\d+)\s+(\S+)\s+\[\*\]\s*(.+)', line)
|
|
96
|
+
|
|
80
97
|
if host_match:
|
|
81
98
|
ip = host_match.group(1)
|
|
82
99
|
port = int(host_match.group(2))
|
|
83
100
|
hostname = host_match.group(3)
|
|
84
101
|
details = host_match.group(4).strip()
|
|
85
102
|
|
|
86
|
-
#
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
'
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
103
|
+
# Only process as host info if it looks like OS/version info
|
|
104
|
+
if any(kw in details for kw in os_keywords) or '(domain:' in details:
|
|
105
|
+
# Extract domain from (domain:DOMAIN) or domain: pattern
|
|
106
|
+
domain_match = re.search(r'\(?domain:?\s*([^)\s]+)\)?', details, re.IGNORECASE)
|
|
107
|
+
domain = domain_match.group(1) if domain_match else None
|
|
108
|
+
|
|
109
|
+
# Extract OS info (everything before the first parenthesis)
|
|
110
|
+
os_match = re.match(r'([^(]+)', details)
|
|
111
|
+
os_info = os_match.group(1).strip() if os_match else details
|
|
112
|
+
|
|
113
|
+
# Extract SMB signing status (multiple formats)
|
|
114
|
+
signing_match = re.search(r'\(?signing:?\s*(\w+)\)?', details, re.IGNORECASE)
|
|
115
|
+
signing = signing_match.group(1) if signing_match else None
|
|
116
|
+
|
|
117
|
+
# Extract SMBv1 status
|
|
118
|
+
smbv1_match = re.search(r'\(?SMBv1:?\s*(\w+)\)?', details, re.IGNORECASE)
|
|
119
|
+
smbv1 = smbv1_match.group(1) if smbv1_match else None
|
|
120
|
+
|
|
121
|
+
findings['hosts'].append({
|
|
122
|
+
'ip': ip,
|
|
123
|
+
'port': port,
|
|
124
|
+
'hostname': hostname,
|
|
125
|
+
'domain': domain,
|
|
126
|
+
'os': os_info,
|
|
127
|
+
'signing': signing,
|
|
128
|
+
'smbv1': smbv1
|
|
129
|
+
})
|
|
111
130
|
|
|
112
131
|
# Parse authentication status
|
|
113
132
|
# Format: SMB 10.0.0.14 445 HOSTNAME [+] \: (Guest)
|
|
@@ -122,13 +141,23 @@ def _parse_content(content: str, target: str) -> Dict[str, Any]:
|
|
|
122
141
|
}
|
|
123
142
|
|
|
124
143
|
# Parse share enumeration (shares WITH permissions)
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
144
|
+
# Format variations:
|
|
145
|
+
# SMB ... ADMIN$ READ,WRITE Remote Admin
|
|
146
|
+
# SMB ... ADMIN$ READ, WRITE Remote Admin (with space)
|
|
147
|
+
# SMB ... C$ READ ONLY Default share
|
|
148
|
+
share_perm_match = re.search(
|
|
149
|
+
r'SMB.*\s+(\S+\$?)\s+(READ,?\s*WRITE|READ\s*ONLY|WRITE\s*ONLY|READ|WRITE|NO\s*ACCESS)\s*(.*)$',
|
|
150
|
+
line, re.IGNORECASE
|
|
151
|
+
)
|
|
152
|
+
if share_perm_match:
|
|
153
|
+
share_name = share_perm_match.group(1)
|
|
154
|
+
# Skip if it looks like a header or status line
|
|
155
|
+
if share_name not in ['Share', 'Permissions', 'shares']:
|
|
156
|
+
findings['shares'].append({
|
|
157
|
+
'name': share_name,
|
|
158
|
+
'permissions': share_perm_match.group(2).upper().replace(' ', ''),
|
|
159
|
+
'comment': share_perm_match.group(3).strip() if share_perm_match.group(3) else ''
|
|
160
|
+
})
|
|
132
161
|
# Parse share enumeration (shares WITHOUT explicit permissions - just listed)
|
|
133
162
|
elif 'SMB' in line and not ('Share' in line and 'Permissions' in line) and not '-----' in line:
|
|
134
163
|
# Look for lines with share names (ending with $, or common names like print$, public, IPC$)
|
|
@@ -146,9 +175,16 @@ def _parse_content(content: str, target: str) -> Dict[str, Any]:
|
|
|
146
175
|
'comment': remark
|
|
147
176
|
})
|
|
148
177
|
|
|
149
|
-
# Parse user enumeration
|
|
150
|
-
|
|
151
|
-
|
|
178
|
+
# Parse user enumeration with flexible format
|
|
179
|
+
# Format variations:
|
|
180
|
+
# username badpwdcount: 0 desc: Description
|
|
181
|
+
# username badpwdcount:0 desc:Description
|
|
182
|
+
# username baddpwdcount: 0 description: Description
|
|
183
|
+
if 'badpwdcount' in line.lower() or 'baddpwdcount' in line.lower():
|
|
184
|
+
user_match = re.search(
|
|
185
|
+
r'(\S+)\s+bad+pwdcount:?\s*(\d+)\s+(?:desc(?:ription)?:?\s*)?(.+)?',
|
|
186
|
+
line, re.IGNORECASE
|
|
187
|
+
)
|
|
152
188
|
if user_match:
|
|
153
189
|
findings['users'].append({
|
|
154
190
|
'username': user_match.group(1),
|
|
@@ -165,15 +201,37 @@ def _parse_content(content: str, target: str) -> Dict[str, Any]:
|
|
|
165
201
|
})
|
|
166
202
|
|
|
167
203
|
# Parse valid credentials (but not Guest authentication)
|
|
168
|
-
|
|
169
|
-
|
|
204
|
+
# Format variations:
|
|
205
|
+
# [+] DOMAIN\username:password (Pwn3d!)
|
|
206
|
+
# [+] DOMAIN\\username:password (Pwn3d!)
|
|
207
|
+
# [+] username:password (Pwn3d!)
|
|
208
|
+
# [+] DOMAIN/username:password (Pwn3d!)
|
|
209
|
+
if '[+]' in line and ('Pwn3d' in line or ':' in line):
|
|
210
|
+
# Try domain\user:pass format first
|
|
211
|
+
cred_match = re.search(
|
|
212
|
+
r'\[\+\]\s*([^\\/:]+)[\\\/]+([^:]+):([^\s(]+)\s*(\(Pwn3d!?\))?',
|
|
213
|
+
line, re.IGNORECASE
|
|
214
|
+
)
|
|
170
215
|
if cred_match:
|
|
171
216
|
findings['credentials'].append({
|
|
172
|
-
'domain': cred_match.group(1),
|
|
173
|
-
'username': cred_match.group(2),
|
|
174
|
-
'password': cred_match.group(3),
|
|
217
|
+
'domain': cred_match.group(1).strip(),
|
|
218
|
+
'username': cred_match.group(2).strip(),
|
|
219
|
+
'password': cred_match.group(3).strip(),
|
|
175
220
|
'admin': bool(cred_match.group(4))
|
|
176
221
|
})
|
|
222
|
+
else:
|
|
223
|
+
# Try user:pass format (no domain)
|
|
224
|
+
cred_match = re.search(
|
|
225
|
+
r'\[\+\]\s*([^:@\s]+):([^\s(]+)\s*(\(Pwn3d!?\))?',
|
|
226
|
+
line, re.IGNORECASE
|
|
227
|
+
)
|
|
228
|
+
if cred_match and '@' not in cred_match.group(1):
|
|
229
|
+
findings['credentials'].append({
|
|
230
|
+
'domain': '',
|
|
231
|
+
'username': cred_match.group(1).strip(),
|
|
232
|
+
'password': cred_match.group(2).strip(),
|
|
233
|
+
'admin': bool(cred_match.group(3))
|
|
234
|
+
})
|
|
177
235
|
|
|
178
236
|
# Extract admin credentials for auto-chaining
|
|
179
237
|
admin_creds = [c for c in findings['credentials'] if c.get('admin')]
|