souleyez 2.27.0__py3-none-any.whl → 2.32.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of souleyez might be problematic. Click here for more details.

@@ -321,7 +321,7 @@ def _next_job_id(jobs: List[Dict[str, Any]]) -> int:
321
321
  return maxid + 1
322
322
 
323
323
 
324
- def enqueue_job(tool: str, target: str, args: List[str], label: str = "", engagement_id: int = None, metadata: Dict[str, Any] = None, parent_id: int = None, reason: str = None, rule_id: int = None) -> int:
324
+ def enqueue_job(tool: str, target: str, args: List[str], label: str = "", engagement_id: int = None, metadata: Dict[str, Any] = None, parent_id: int = None, reason: str = None, rule_id: int = None, skip_scope_check: bool = False) -> int:
325
325
  with _lock:
326
326
  jobs = _read_jobs()
327
327
  jid = _next_job_id(jobs)
@@ -339,6 +339,43 @@ def enqueue_job(tool: str, target: str, args: List[str], label: str = "", engage
339
339
 
340
340
  # Merge parent_id, reason, and rule_id into metadata
341
341
  job_metadata = metadata or {}
342
+
343
+ # Scope validation - check if target is within engagement scope
344
+ if not skip_scope_check and engagement_id:
345
+ try:
346
+ from souleyez.security.scope_validator import ScopeValidator, ScopeViolationError
347
+ validator = ScopeValidator(engagement_id)
348
+ result = validator.validate_target(target)
349
+ enforcement = validator.get_enforcement_mode()
350
+
351
+ if not result.is_in_scope and validator.has_scope_defined():
352
+ if enforcement == 'block':
353
+ validator.log_validation(target, result, 'blocked', job_id=jid)
354
+ raise ScopeViolationError(
355
+ f"Target '{target}' is out of scope. {result.reason}"
356
+ )
357
+ elif enforcement == 'warn':
358
+ validator.log_validation(target, result, 'warned', job_id=jid)
359
+ if 'warnings' not in job_metadata:
360
+ job_metadata['warnings'] = []
361
+ job_metadata['warnings'].append(
362
+ f"SCOPE WARNING: {target} may be out of scope. {result.reason}"
363
+ )
364
+ logger.warning("Out-of-scope target allowed (warn mode)", extra={
365
+ "target": target,
366
+ "engagement_id": engagement_id,
367
+ "reason": result.reason
368
+ })
369
+ else:
370
+ validator.log_validation(target, result, 'allowed', job_id=jid)
371
+ except ScopeViolationError:
372
+ raise # Re-raise scope violations
373
+ except Exception as e:
374
+ # Don't block jobs if scope validation fails unexpectedly
375
+ logger.warning("Scope validation error (allowing job)", extra={
376
+ "target": target,
377
+ "error": str(e)
378
+ })
342
379
  if parent_id is not None:
343
380
  job_metadata['parent_id'] = parent_id
344
381
  if reason:
@@ -10,6 +10,95 @@ from .job_status import STATUS_DONE, STATUS_NO_RESULTS, STATUS_WARNING, STATUS_E
10
10
  logger = logging.getLogger(__name__)
11
11
 
12
12
 
13
+ # Common error patterns that indicate tool failure (not "no results")
14
+ TOOL_ERROR_PATTERNS = {
15
+ 'common': [
16
+ 'connection refused',
17
+ 'connection timed out',
18
+ 'no route to host',
19
+ 'network is unreachable',
20
+ 'name or service not known',
21
+ 'temporary failure in name resolution',
22
+ 'host is down',
23
+ 'connection reset by peer',
24
+ ],
25
+ 'nmap': [
26
+ 'host seems down',
27
+ 'note: host seems down',
28
+ 'failed to resolve',
29
+ ],
30
+ 'gobuster': [
31
+ 'timeout occurred during the request',
32
+ 'error on running gobuster',
33
+ 'unable to connect',
34
+ 'context deadline exceeded',
35
+ ],
36
+ 'hydra': [
37
+ 'can not connect',
38
+ 'could not connect',
39
+ 'error connecting',
40
+ 'target does not support',
41
+ ],
42
+ 'nikto': [
43
+ 'error connecting to host',
44
+ 'unable to connect',
45
+ 'no web server found',
46
+ ],
47
+ 'nuclei': [
48
+ 'could not connect',
49
+ 'context deadline exceeded',
50
+ 'no address found',
51
+ ],
52
+ 'ffuf': [
53
+ 'error making request',
54
+ 'context deadline exceeded',
55
+ ],
56
+ 'sqlmap': [
57
+ 'connection timed out',
58
+ 'unable to connect',
59
+ 'target url content is not stable',
60
+ ],
61
+ 'enum4linux': [
62
+ 'could not initialise',
63
+ 'nt_status_connection_refused',
64
+ 'nt_status_host_unreachable',
65
+ 'nt_status_io_timeout',
66
+ ],
67
+ 'smbmap': [
68
+ 'could not connect',
69
+ 'connection error',
70
+ 'nt_status_connection_refused',
71
+ ],
72
+ }
73
+
74
+
75
+ def detect_tool_error(log_content: str, tool: str) -> Optional[str]:
76
+ """
77
+ Check log content for tool errors that indicate failure (not just "no results").
78
+
79
+ Args:
80
+ log_content: The log file content
81
+ tool: Tool name (lowercase)
82
+
83
+ Returns:
84
+ Error pattern found, or None if no error detected
85
+ """
86
+ log_lower = log_content.lower()
87
+
88
+ # Check common patterns
89
+ for pattern in TOOL_ERROR_PATTERNS['common']:
90
+ if pattern in log_lower:
91
+ return pattern
92
+
93
+ # Check tool-specific patterns
94
+ tool_patterns = TOOL_ERROR_PATTERNS.get(tool, [])
95
+ for pattern in tool_patterns:
96
+ if pattern in log_lower:
97
+ return pattern
98
+
99
+ return None
100
+
101
+
13
102
  def handle_job_result(job: Dict[str, Any]) -> Optional[Dict[str, Any]]:
14
103
  """
15
104
  Process completed job and parse results into database.
@@ -527,9 +616,16 @@ def parse_nmap_job(engagement_id: int, log_path: str, job: Dict[str, Any]) -> Di
527
616
  'version': svc.get('version', '')
528
617
  })
529
618
 
619
+ # Check for nmap errors before determining status
620
+ with open(log_path, 'r', encoding='utf-8', errors='replace') as f:
621
+ log_content = f.read()
622
+ nmap_error = detect_tool_error(log_content, 'nmap')
623
+
530
624
  # Determine status based on results
531
625
  hosts_up = len([h for h in parsed.get('hosts', []) if h.get('status') == 'up'])
532
- if hosts_up > 0:
626
+ if nmap_error:
627
+ status = STATUS_ERROR # Tool failed to run properly
628
+ elif hosts_up > 0:
533
629
  status = STATUS_DONE # Found hosts
534
630
  else:
535
631
  status = STATUS_NO_RESULTS # No hosts up
@@ -1122,8 +1218,13 @@ def parse_gobuster_job(engagement_id: int, log_path: str, job: Dict[str, Any]) -
1122
1218
  exclude_length = length_match.group(1)
1123
1219
  logger.info(f"Gobuster wildcard detected: Length {exclude_length}b")
1124
1220
 
1221
+ # Check for gobuster errors
1222
+ gobuster_error = detect_tool_error(log_content, 'gobuster')
1223
+
1125
1224
  # Determine status based on results
1126
- if wildcard_detected:
1225
+ if gobuster_error:
1226
+ status = STATUS_ERROR # Tool failed to connect
1227
+ elif wildcard_detected:
1127
1228
  # Wildcard detected - warning status (triggers auto-retry)
1128
1229
  status = STATUS_WARNING
1129
1230
  elif stats['total'] > 0:
@@ -1504,8 +1605,13 @@ def parse_sqlmap_job(engagement_id: int, log_path: str, job: Dict[str, Any]) ->
1504
1605
 
1505
1606
  stats = get_sqli_stats(parsed)
1506
1607
 
1608
+ # Check for sqlmap errors
1609
+ sqlmap_error = detect_tool_error(log_content, 'sqlmap')
1610
+
1507
1611
  # Determine status based on results
1508
- if stats['sqli_confirmed'] or stats['xss_possible'] or stats['fi_possible']:
1612
+ if sqlmap_error:
1613
+ status = STATUS_ERROR # Tool failed to connect
1614
+ elif stats['sqli_confirmed'] or stats['xss_possible'] or stats['fi_possible']:
1509
1615
  status = STATUS_DONE # Found injection vulnerabilities
1510
1616
  else:
1511
1617
  status = STATUS_NO_RESULTS # No injections found
@@ -2011,11 +2117,22 @@ def parse_smbmap_job(engagement_id: int, log_path: str, job: Dict[str, Any]) ->
2011
2117
  )
2012
2118
  findings_added += 1
2013
2119
 
2120
+ # Check for smbmap errors
2121
+ smbmap_error = detect_tool_error(log_content, 'smbmap')
2122
+
2123
+ # Determine status
2124
+ if smbmap_error:
2125
+ status = STATUS_ERROR # Tool failed to connect
2126
+ elif shares_added > 0 or findings_added > 0:
2127
+ status = STATUS_DONE
2128
+ else:
2129
+ status = STATUS_NO_RESULTS
2130
+
2014
2131
  return {
2015
2132
  'tool': 'smbmap',
2016
2133
  'host': parsed['target'],
2017
2134
  'connection_status': parsed.get('status', 'Unknown'), # SMB connection status
2018
- 'status': STATUS_DONE if (shares_added > 0 or findings_added > 0) else STATUS_NO_RESULTS, # Job status
2135
+ 'status': status, # Job status
2019
2136
  'shares_added': shares_added,
2020
2137
  'files_added': files_added,
2021
2138
  'findings_added': findings_added
@@ -2382,8 +2499,13 @@ def parse_hydra_job(engagement_id: int, log_path: str, job: Dict[str, Any]) -> D
2382
2499
  )
2383
2500
  findings_added += 1
2384
2501
 
2502
+ # Check for hydra errors
2503
+ hydra_error = detect_tool_error(log_content, 'hydra')
2504
+
2385
2505
  # Determine status based on results
2386
- if len(parsed.get('credentials', [])) > 0:
2506
+ if hydra_error:
2507
+ status = STATUS_ERROR # Tool failed to connect
2508
+ elif len(parsed.get('credentials', [])) > 0:
2387
2509
  status = STATUS_DONE # Found valid credentials
2388
2510
  elif len(parsed.get('usernames', [])) > 0:
2389
2511
  status = STATUS_DONE # Found valid usernames (partial success is still a result)
@@ -2495,8 +2617,15 @@ def parse_nuclei_job(engagement_id: int, log_path: str, job: Dict[str, Any]) ->
2495
2617
  )
2496
2618
  findings_added += 1
2497
2619
 
2620
+ # Check for nuclei errors
2621
+ with open(log_path, 'r', encoding='utf-8', errors='replace') as f:
2622
+ log_content = f.read()
2623
+ nuclei_error = detect_tool_error(log_content, 'nuclei')
2624
+
2498
2625
  # Determine status based on results
2499
- if parsed.get('findings_count', 0) > 0:
2626
+ if nuclei_error:
2627
+ status = STATUS_ERROR # Tool failed to connect
2628
+ elif parsed.get('findings_count', 0) > 0:
2500
2629
  status = STATUS_DONE # Found vulnerabilities
2501
2630
  else:
2502
2631
  status = STATUS_NO_RESULTS # No vulnerabilities found
@@ -2618,6 +2747,9 @@ def parse_enum4linux_job(engagement_id: int, log_path: str, job: Dict[str, Any])
2618
2747
  'ip': parsed['target']
2619
2748
  })
2620
2749
 
2750
+ # Check for enum4linux errors
2751
+ enum4linux_error = detect_tool_error(log_content, 'enum4linux')
2752
+
2621
2753
  # Determine status: done if we found any results (shares, users, or findings)
2622
2754
  has_results = (
2623
2755
  findings_added > 0 or
@@ -2626,9 +2758,16 @@ def parse_enum4linux_job(engagement_id: int, log_path: str, job: Dict[str, Any])
2626
2758
  stats['total_shares'] > 0
2627
2759
  )
2628
2760
 
2761
+ if enum4linux_error:
2762
+ status = STATUS_ERROR # Tool failed to connect
2763
+ elif has_results:
2764
+ status = STATUS_DONE
2765
+ else:
2766
+ status = STATUS_NO_RESULTS
2767
+
2629
2768
  return {
2630
2769
  'tool': 'enum4linux',
2631
- 'status': STATUS_DONE if has_results else STATUS_NO_RESULTS,
2770
+ 'status': status,
2632
2771
  'findings_added': findings_added,
2633
2772
  'credentials_added': credentials_added,
2634
2773
  'users_found': len(parsed['users']),
@@ -2735,13 +2874,26 @@ def parse_ffuf_job(engagement_id: int, log_path: str, job: Dict[str, Any]) -> Di
2735
2874
 
2736
2875
  if host_id and parsed.get('paths'):
2737
2876
  paths_added = wpm.bulk_add_web_paths(host_id, parsed['paths'])
2738
-
2877
+
2739
2878
  # Check for sensitive paths and create findings (same as gobuster)
2740
2879
  created_findings = _create_findings_for_sensitive_paths(engagement_id, host_id, parsed['paths'], job)
2741
2880
 
2881
+ # Check for ffuf errors
2882
+ with open(log_path, 'r', encoding='utf-8', errors='replace') as f:
2883
+ log_content = f.read()
2884
+ ffuf_error = detect_tool_error(log_content, 'ffuf')
2885
+
2886
+ # Determine status
2887
+ if ffuf_error:
2888
+ status = STATUS_ERROR # Tool failed to connect
2889
+ elif parsed.get('results_found', 0) > 0:
2890
+ status = STATUS_DONE
2891
+ else:
2892
+ status = STATUS_NO_RESULTS
2893
+
2742
2894
  return {
2743
2895
  'tool': 'ffuf',
2744
- 'status': STATUS_DONE if parsed.get('results_found', 0) > 0 else STATUS_NO_RESULTS,
2896
+ 'status': status,
2745
2897
  'target': target,
2746
2898
  'results_found': parsed.get('results_found', 0),
2747
2899
  'paths_added': paths_added,
@@ -3060,8 +3212,13 @@ def parse_nikto_job(engagement_id: int, log_path: str, job: Dict[str, Any]) -> D
3060
3212
  )
3061
3213
  findings_added += 1
3062
3214
 
3215
+ # Check for nikto errors
3216
+ nikto_error = detect_tool_error(output, 'nikto')
3217
+
3063
3218
  # Determine status based on results
3064
- if findings_added > 0:
3219
+ if nikto_error:
3220
+ status = STATUS_ERROR # Tool failed to connect
3221
+ elif findings_added > 0:
3065
3222
  status = STATUS_DONE
3066
3223
  else:
3067
3224
  status = STATUS_NO_RESULTS
@@ -22,10 +22,14 @@ class WazuhConfig:
22
22
  """
23
23
 
24
24
  @staticmethod
25
- def get_config(engagement_id: int) -> Optional[Dict[str, Any]]:
25
+ def get_config(engagement_id: int, siem_type: str = None) -> Optional[Dict[str, Any]]:
26
26
  """
27
27
  Get SIEM config for an engagement.
28
28
 
29
+ Args:
30
+ engagement_id: Engagement ID
31
+ siem_type: Optional SIEM type to filter by. If None, returns first/active config.
32
+
29
33
  Returns:
30
34
  Config dict or None if not configured
31
35
  """
@@ -33,19 +37,31 @@ class WazuhConfig:
33
37
  conn = db.get_connection()
34
38
  cursor = conn.cursor()
35
39
 
36
- # Check if new columns exist (migration 025)
40
+ # Check if new columns exist (migration 025+)
37
41
  cursor.execute("PRAGMA table_info(wazuh_config)")
38
42
  columns = [col[1] for col in cursor.fetchall()]
39
43
  has_new_columns = 'siem_type' in columns
40
44
 
41
45
  # Query with or without new columns
42
46
  if has_new_columns:
43
- cursor.execute("""
44
- SELECT api_url, api_user, api_password, indexer_url, indexer_user,
45
- indexer_password, verify_ssl, enabled, siem_type, config_json
46
- FROM wazuh_config
47
- WHERE engagement_id = ?
48
- """, (engagement_id,))
47
+ if siem_type:
48
+ cursor.execute("""
49
+ SELECT api_url, api_user, api_password, indexer_url, indexer_user,
50
+ indexer_password, verify_ssl, enabled, siem_type, config_json
51
+ FROM wazuh_config
52
+ WHERE engagement_id = ? AND siem_type = ?
53
+ """, (engagement_id, siem_type))
54
+ else:
55
+ # Get most recently updated config (the "current" selected SIEM)
56
+ # Not filtering by enabled - user may have selected but not configured yet
57
+ cursor.execute("""
58
+ SELECT api_url, api_user, api_password, indexer_url, indexer_user,
59
+ indexer_password, verify_ssl, enabled, siem_type, config_json
60
+ FROM wazuh_config
61
+ WHERE engagement_id = ?
62
+ ORDER BY updated_at DESC
63
+ LIMIT 1
64
+ """, (engagement_id,))
49
65
  else:
50
66
  cursor.execute("""
51
67
  SELECT api_url, api_user, api_password, indexer_url, indexer_user,
@@ -190,7 +206,7 @@ class WazuhConfig:
190
206
  pass
191
207
  config_json_str = json.dumps(encrypted_config)
192
208
 
193
- # Upsert config
209
+ # Upsert config - keyed by (engagement_id, siem_type) for multi-SIEM support
194
210
  cursor.execute("""
195
211
  INSERT INTO wazuh_config (
196
212
  engagement_id, api_url, api_user, api_password, indexer_url,
@@ -198,7 +214,7 @@ class WazuhConfig:
198
214
  siem_type, config_json
199
215
  )
200
216
  VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
201
- ON CONFLICT(engagement_id) DO UPDATE SET
217
+ ON CONFLICT(engagement_id, siem_type) DO UPDATE SET
202
218
  api_url = excluded.api_url,
203
219
  api_user = excluded.api_user,
204
220
  api_password = excluded.api_password,
@@ -207,8 +223,8 @@ class WazuhConfig:
207
223
  indexer_password = excluded.indexer_password,
208
224
  verify_ssl = excluded.verify_ssl,
209
225
  enabled = excluded.enabled,
210
- siem_type = excluded.siem_type,
211
- config_json = excluded.config_json
226
+ config_json = excluded.config_json,
227
+ updated_at = CURRENT_TIMESTAMP
212
228
  """, (
213
229
  engagement_id, api_url, api_user, encrypted_api_password,
214
230
  indexer_url, indexer_user or 'admin', encrypted_indexer_password,
@@ -262,17 +278,124 @@ class WazuhConfig:
262
278
  )
263
279
 
264
280
  @staticmethod
265
- def delete_config(engagement_id: int) -> bool:
266
- """Delete Wazuh config for an engagement."""
281
+ def delete_config(engagement_id: int, siem_type: str = None) -> bool:
282
+ """
283
+ Delete SIEM config for an engagement.
284
+
285
+ Args:
286
+ engagement_id: Engagement ID
287
+ siem_type: Optional SIEM type. If None, deletes ALL SIEM configs for engagement.
288
+ """
267
289
  db = get_db()
268
290
  conn = db.get_connection()
269
291
  cursor = conn.cursor()
270
- cursor.execute("DELETE FROM wazuh_config WHERE engagement_id = ?", (engagement_id,))
292
+ if siem_type:
293
+ cursor.execute(
294
+ "DELETE FROM wazuh_config WHERE engagement_id = ? AND siem_type = ?",
295
+ (engagement_id, siem_type)
296
+ )
297
+ else:
298
+ cursor.execute("DELETE FROM wazuh_config WHERE engagement_id = ?", (engagement_id,))
271
299
  conn.commit()
272
300
  return cursor.rowcount > 0
273
301
 
274
302
  @staticmethod
275
- def is_configured(engagement_id: int) -> bool:
276
- """Check if Wazuh is configured for an engagement."""
277
- config = WazuhConfig.get_config(engagement_id)
303
+ def is_configured(engagement_id: int, siem_type: str = None) -> bool:
304
+ """Check if SIEM is configured for an engagement."""
305
+ config = WazuhConfig.get_config(engagement_id, siem_type)
278
306
  return config is not None and config.get("enabled", False)
307
+
308
+ @staticmethod
309
+ def list_configured_siems(engagement_id: int) -> List[Dict[str, Any]]:
310
+ """
311
+ List all configured SIEMs for an engagement.
312
+
313
+ Returns:
314
+ List of dicts with siem_type, enabled, and api_url for each configured SIEM
315
+ """
316
+ db = get_db()
317
+ conn = db.get_connection()
318
+ cursor = conn.cursor()
319
+
320
+ cursor.execute("PRAGMA table_info(wazuh_config)")
321
+ columns = [col[1] for col in cursor.fetchall()]
322
+
323
+ if 'siem_type' not in columns:
324
+ # Old schema - only one config possible
325
+ cursor.execute("""
326
+ SELECT 'wazuh' as siem_type, enabled, api_url
327
+ FROM wazuh_config
328
+ WHERE engagement_id = ?
329
+ """, (engagement_id,))
330
+ else:
331
+ cursor.execute("""
332
+ SELECT siem_type, enabled, api_url, updated_at
333
+ FROM wazuh_config
334
+ WHERE engagement_id = ?
335
+ ORDER BY siem_type
336
+ """, (engagement_id,))
337
+
338
+ rows = cursor.fetchall()
339
+ return [
340
+ {
341
+ 'siem_type': row[0],
342
+ 'enabled': bool(row[1]),
343
+ 'api_url': row[2],
344
+ 'updated_at': row[3] if len(row) > 3 else None
345
+ }
346
+ for row in rows
347
+ ]
348
+
349
+ @staticmethod
350
+ def get_all_configs(engagement_id: int) -> Dict[str, Dict[str, Any]]:
351
+ """
352
+ Get all SIEM configs for an engagement, keyed by siem_type.
353
+
354
+ Returns:
355
+ Dict mapping siem_type to config dict
356
+ """
357
+ configs = {}
358
+ for siem in SIEM_TYPES:
359
+ config = WazuhConfig.get_config(engagement_id, siem)
360
+ if config:
361
+ configs[siem] = config
362
+ return configs
363
+
364
+ @staticmethod
365
+ def get_current_siem_type(engagement_id: int) -> str:
366
+ """
367
+ Get the currently selected SIEM type for an engagement.
368
+
369
+ Returns the most recently selected SIEM type, even if not fully configured.
370
+
371
+ Returns:
372
+ SIEM type string ('wazuh', 'splunk', etc.) or 'wazuh' as default
373
+ """
374
+ config = WazuhConfig.get_config(engagement_id)
375
+ if config:
376
+ return config.get('siem_type', 'wazuh')
377
+ return 'wazuh'
378
+
379
+ @staticmethod
380
+ def set_current_siem(engagement_id: int, siem_type: str) -> bool:
381
+ """
382
+ Set a SIEM type as current by updating its timestamp.
383
+
384
+ This makes the specified SIEM the "active" one without changing its config.
385
+
386
+ Args:
387
+ engagement_id: Engagement ID
388
+ siem_type: SIEM type to make current
389
+
390
+ Returns:
391
+ True if successful
392
+ """
393
+ db = get_db()
394
+ conn = db.get_connection()
395
+ cursor = conn.cursor()
396
+ cursor.execute(
397
+ "UPDATE wazuh_config SET updated_at = CURRENT_TIMESTAMP WHERE engagement_id = ? AND siem_type = ?",
398
+ (engagement_id, siem_type)
399
+ )
400
+ conn.commit()
401
+ return cursor.rowcount > 0