regscale-cli 6.27.1.0__py3-none-any.whl → 6.27.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of regscale-cli might be problematic. Click here for more details.
- regscale/_version.py +1 -1
- regscale/core/app/utils/app_utils.py +41 -7
- regscale/integrations/commercial/aws/scanner.py +3 -2
- regscale/integrations/commercial/microsoft_defender/defender_api.py +1 -1
- regscale/integrations/commercial/sicura/api.py +65 -29
- regscale/integrations/commercial/sicura/scanner.py +36 -7
- regscale/integrations/commercial/tenablev2/commands.py +4 -4
- regscale/integrations/commercial/tenablev2/scanner.py +1 -2
- regscale/integrations/commercial/wizv2/scanner.py +40 -16
- regscale/integrations/public/cci_importer.py +400 -9
- regscale/models/integration_models/aqua.py +2 -2
- regscale/models/integration_models/cisa_kev_data.json +76 -3
- regscale/models/integration_models/flat_file_importer/__init__.py +4 -6
- regscale/models/integration_models/synqly_models/capabilities.json +1 -1
- {regscale_cli-6.27.1.0.dist-info → regscale_cli-6.27.2.0.dist-info}/METADATA +1 -1
- {regscale_cli-6.27.1.0.dist-info → regscale_cli-6.27.2.0.dist-info}/RECORD +23 -23
- tests/regscale/integrations/commercial/test_sicura.py +0 -1
- tests/regscale/integrations/commercial/wizv2/test_wizv2.py +86 -0
- tests/regscale/integrations/public/test_cci.py +596 -1
- {regscale_cli-6.27.1.0.dist-info → regscale_cli-6.27.2.0.dist-info}/LICENSE +0 -0
- {regscale_cli-6.27.1.0.dist-info → regscale_cli-6.27.2.0.dist-info}/WHEEL +0 -0
- {regscale_cli-6.27.1.0.dist-info → regscale_cli-6.27.2.0.dist-info}/entry_points.txt +0 -0
- {regscale_cli-6.27.1.0.dist-info → regscale_cli-6.27.2.0.dist-info}/top_level.txt +0 -0
regscale/_version.py
CHANGED
|
@@ -584,6 +584,42 @@ def check_supported_file_type(file: Path) -> None:
|
|
|
584
584
|
raise RuntimeError(f"Unsupported file type: {file.suffix}")
|
|
585
585
|
|
|
586
586
|
|
|
587
|
+
def _remove_nested_dicts_before_saving(data: Any) -> "pd.DataFrame":
|
|
588
|
+
"""
|
|
589
|
+
Remove nested dictionaries before saving the data to a file.
|
|
590
|
+
|
|
591
|
+
:param Any data: The data to remove nested dictionaries from.
|
|
592
|
+
:return: A pandas DataFrame with the nested dictionaries removed.
|
|
593
|
+
:rtype: "pd.DataFrame"
|
|
594
|
+
"""
|
|
595
|
+
import pandas as pd # Optimize import performance
|
|
596
|
+
|
|
597
|
+
# Handle case where data is a single dict (not a list)
|
|
598
|
+
# This occurs with endpoints that return a single object with nested structures
|
|
599
|
+
if isinstance(data, dict) and not isinstance(data, list):
|
|
600
|
+
# Check if the dict contains nested dicts or lists of dicts (not simple lists)
|
|
601
|
+
has_nested_dicts = any(
|
|
602
|
+
isinstance(v, dict) or (isinstance(v, list) and v and isinstance(v[0], dict)) for v in data.values()
|
|
603
|
+
)
|
|
604
|
+
if has_nested_dicts:
|
|
605
|
+
# Use json_normalize to flatten nested dict structures
|
|
606
|
+
d_frame = pd.json_normalize(data)
|
|
607
|
+
else:
|
|
608
|
+
# Simple dict or dict with simple lists
|
|
609
|
+
# Check if all values are scalars (not lists) - if so, wrap in list for DataFrame
|
|
610
|
+
has_any_lists = any(isinstance(v, list) for v in data.values())
|
|
611
|
+
if has_any_lists:
|
|
612
|
+
# Dict with simple lists - can use DataFrame directly
|
|
613
|
+
d_frame = pd.DataFrame(data)
|
|
614
|
+
else:
|
|
615
|
+
# All scalar values - must wrap in list for DataFrame
|
|
616
|
+
d_frame = pd.DataFrame([data])
|
|
617
|
+
else:
|
|
618
|
+
# Handle list of dicts or other data structures
|
|
619
|
+
d_frame = pd.DataFrame(data)
|
|
620
|
+
return d_frame
|
|
621
|
+
|
|
622
|
+
|
|
587
623
|
def save_to_csv(file: Path, data: Any, output_log: bool, transpose: bool = True) -> None:
|
|
588
624
|
"""
|
|
589
625
|
Save data to a CSV file.
|
|
@@ -594,13 +630,12 @@ def save_to_csv(file: Path, data: Any, output_log: bool, transpose: bool = True)
|
|
|
594
630
|
:param bool transpose: Whether to transpose the data, defaults to True
|
|
595
631
|
:rtype: None
|
|
596
632
|
"""
|
|
597
|
-
|
|
633
|
+
d_frame = _remove_nested_dicts_before_saving(data)
|
|
598
634
|
|
|
599
635
|
if transpose:
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
data.to_csv(file)
|
|
636
|
+
d_frame = d_frame.transpose()
|
|
637
|
+
|
|
638
|
+
d_frame.to_csv(file)
|
|
604
639
|
if output_log:
|
|
605
640
|
logger.info("Data successfully saved to: %s", file.absolute())
|
|
606
641
|
|
|
@@ -615,9 +650,8 @@ def save_to_excel(file: Path, data: Any, output_log: bool, transpose: bool = Tru
|
|
|
615
650
|
:param bool transpose: Whether to transpose the data, defaults to True
|
|
616
651
|
:rtype: None
|
|
617
652
|
"""
|
|
618
|
-
|
|
653
|
+
d_frame = _remove_nested_dicts_before_saving(data)
|
|
619
654
|
|
|
620
|
-
d_frame = pd.DataFrame(data)
|
|
621
655
|
if transpose:
|
|
622
656
|
d_frame = d_frame.transpose()
|
|
623
657
|
|
|
@@ -706,7 +706,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
706
706
|
|
|
707
707
|
aws_secret_key_id = kwargs.get("aws_access_key_id") or os.getenv("AWS_ACCESS_KEY_ID")
|
|
708
708
|
aws_secret_access_key = kwargs.get("aws_secret_access_key") or os.getenv("AWS_SECRET_ACCESS_KEY")
|
|
709
|
-
region = kwargs.get("region") or os.getenv("AWS_REGION"
|
|
709
|
+
region = kwargs.get("region") or os.getenv("AWS_REGION")
|
|
710
710
|
if not aws_secret_key_id or not aws_secret_access_key:
|
|
711
711
|
raise ValueError(
|
|
712
712
|
"AWS Access Key ID and Secret Access Key are required.\nPlease update in environment "
|
|
@@ -714,8 +714,9 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
714
714
|
)
|
|
715
715
|
if not region:
|
|
716
716
|
logger.warning("AWS region not provided. Defaulting to 'us-east-1'.")
|
|
717
|
+
region = "us-east-1"
|
|
717
718
|
session = boto3.Session(
|
|
718
|
-
region_name=
|
|
719
|
+
region_name=region,
|
|
719
720
|
aws_access_key_id=aws_secret_key_id,
|
|
720
721
|
aws_secret_access_key=aws_secret_access_key,
|
|
721
722
|
aws_session_token=kwargs.get("aws_session_token"),
|
|
@@ -353,7 +353,7 @@ class DefenderApi:
|
|
|
353
353
|
|
|
354
354
|
data = self.get_items_from_azure(url=url, parse_value=kwargs.get("parse_value", True))
|
|
355
355
|
save_path = Path(
|
|
356
|
-
os.path.join(ENTRA_SAVE_DIR, f"azure_entra_{endpoint_key}_{get_current_datetime('%Y%m%d')}.
|
|
356
|
+
os.path.join(ENTRA_SAVE_DIR, f"azure_entra_{endpoint_key}_{get_current_datetime('%Y%m%d')}.xlsx")
|
|
357
357
|
)
|
|
358
358
|
save_data_to(file=save_path, data=data, transpose_data=False)
|
|
359
359
|
return [save_path]
|
|
@@ -203,7 +203,6 @@ class SicuraAPI:
|
|
|
203
203
|
FILTER_TYPE = "filter[type]"
|
|
204
204
|
FILTER_REJECTED = "filter[rejected]"
|
|
205
205
|
FILTER_TASK_ID = "filter[task_id]"
|
|
206
|
-
csrf_token: Optional[str] = None
|
|
207
206
|
|
|
208
207
|
def __init__(self):
|
|
209
208
|
"""
|
|
@@ -246,12 +245,9 @@ class SicuraAPI:
|
|
|
246
245
|
|
|
247
246
|
if data:
|
|
248
247
|
self.session.headers["Content-Type"] = "application/json"
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
if self.csrf_token:
|
|
253
|
-
self.session.headers["X-CSRF-TOKEN"] = str(self.csrf_token)
|
|
254
|
-
self.session.headers["auth-token-signature"] = SicuraVariables.sicuraToken
|
|
248
|
+
|
|
249
|
+
# Always set the auth token signature for API authentication
|
|
250
|
+
self.session.headers["auth-token-signature"] = SicuraVariables.sicuraToken
|
|
255
251
|
|
|
256
252
|
try:
|
|
257
253
|
# Use the session object to maintain cookies between requests
|
|
@@ -313,22 +309,6 @@ class SicuraAPI:
|
|
|
313
309
|
logging.error(f"Error validating response: {e}", exc_info=True)
|
|
314
310
|
return None
|
|
315
311
|
|
|
316
|
-
def get_csrf_token(self) -> Optional[AuthResponse]:
|
|
317
|
-
"""
|
|
318
|
-
Get authentication token from Sicura API.
|
|
319
|
-
|
|
320
|
-
:return: Authentication response
|
|
321
|
-
:rtype: Optional[AuthResponse]
|
|
322
|
-
:raises requests.exceptions.RequestException: If the request fails
|
|
323
|
-
"""
|
|
324
|
-
try:
|
|
325
|
-
response = self._make_request("GET", "/auth/token")
|
|
326
|
-
self.csrf_token = response
|
|
327
|
-
return response
|
|
328
|
-
except requests.exceptions.RequestException as e:
|
|
329
|
-
logger.error(f"Error getting authentication token: {e}", exc_info=True)
|
|
330
|
-
raise
|
|
331
|
-
|
|
332
312
|
class Device(SicuraModel):
|
|
333
313
|
"""Model for Sicura device information."""
|
|
334
314
|
|
|
@@ -384,11 +364,56 @@ class SicuraAPI:
|
|
|
384
364
|
logger.error(f"Failed to get devices: {e}", exc_info=True)
|
|
385
365
|
return []
|
|
386
366
|
|
|
367
|
+
def create_or_update_control_profile(self, profile_name: str, controls: list[dict]) -> Optional[dict]:
|
|
368
|
+
"""
|
|
369
|
+
Create or update a control profile.
|
|
370
|
+
|
|
371
|
+
:param str profile_name: Name of the control profile
|
|
372
|
+
:param list[dict] controls: List of controls to add to the profile
|
|
373
|
+
:return: The control profile if successfully created or updated, None otherwise
|
|
374
|
+
:rtype: Optional[dict]
|
|
375
|
+
"""
|
|
376
|
+
profile_id = None
|
|
377
|
+
profile_data = None
|
|
378
|
+
params = {"verbose": "true"}
|
|
379
|
+
try:
|
|
380
|
+
# see if the profile already exists
|
|
381
|
+
response = self._make_request("GET", "/api/jaeger/v1/control_profiles", params=params)
|
|
382
|
+
for profile in response:
|
|
383
|
+
if profile["name"] == profile_name:
|
|
384
|
+
profile_id = profile["id"]
|
|
385
|
+
break
|
|
386
|
+
payload = {
|
|
387
|
+
"name": profile_name,
|
|
388
|
+
"description": f"Profile for {profile_name} with {len(controls)} controls.",
|
|
389
|
+
"controls": controls,
|
|
390
|
+
}
|
|
391
|
+
if not profile_id:
|
|
392
|
+
crud_operation = "Created"
|
|
393
|
+
response = self._make_request("POST", "/api/jaeger/v1/control_profiles", data=payload, params=params)
|
|
394
|
+
profile_id = response
|
|
395
|
+
profile_data = self._make_request("GET", f"/api/jaeger/v1/control_profiles/{profile_id}", params=params)
|
|
396
|
+
else:
|
|
397
|
+
crud_operation = "Updated"
|
|
398
|
+
response = self._make_request(
|
|
399
|
+
"PUT", f"/api/jaeger/v1/control_profiles/{profile_id}", data=payload, params=params
|
|
400
|
+
)
|
|
401
|
+
profile_id = response["id"]
|
|
402
|
+
profile_data = response
|
|
403
|
+
logger.info(f"{crud_operation} control profile #{profile_id} in Sicura with {len(controls)} controls.")
|
|
404
|
+
return profile_data
|
|
405
|
+
|
|
406
|
+
return profile_id
|
|
407
|
+
except Exception as e:
|
|
408
|
+
logger.error(f"Failed to create or update control profile: {e}", exc_info=True)
|
|
409
|
+
return None
|
|
410
|
+
|
|
387
411
|
def create_scan_task(
|
|
388
412
|
self,
|
|
389
413
|
device_id: int,
|
|
390
414
|
platform: str,
|
|
391
|
-
profile: SicuraProfile,
|
|
415
|
+
profile: Union[SicuraProfile, str],
|
|
416
|
+
author: Optional[str] = None,
|
|
392
417
|
task_name: Optional[str] = None,
|
|
393
418
|
scheduled_time: Optional[datetime.datetime] = None,
|
|
394
419
|
) -> Optional[str]:
|
|
@@ -398,6 +423,7 @@ class SicuraAPI:
|
|
|
398
423
|
:param int device_id: ID of the device to scan
|
|
399
424
|
:param str platform: Platform name (e.g., 'Red Hat Enterprise Linux 9')
|
|
400
425
|
:param SicuraProfile profile: Scan profile name (e.g., 'I - Mission Critical Classified')
|
|
426
|
+
:param Optional[str] author: Author of the scan task (default: None)
|
|
401
427
|
:param Optional[str] task_name: Name for the scan task (default: auto-generated)
|
|
402
428
|
:param Optional[datetime.datetime] scheduled_time: When to run the scan (default: now)
|
|
403
429
|
:return: Task ID if successful, None otherwise
|
|
@@ -425,6 +451,9 @@ class SicuraAPI:
|
|
|
425
451
|
"scanAttributes": {"platform": platform, "profile": profile},
|
|
426
452
|
}
|
|
427
453
|
|
|
454
|
+
if author:
|
|
455
|
+
payload["scanAttributes"]["author"] = author
|
|
456
|
+
|
|
428
457
|
result = self._make_request("POST", "/api/jaeger/v1/tasks/", data=payload)
|
|
429
458
|
|
|
430
459
|
if result:
|
|
@@ -483,14 +512,16 @@ class SicuraAPI:
|
|
|
483
512
|
self,
|
|
484
513
|
fqdn: str,
|
|
485
514
|
platform: Optional[str] = None,
|
|
486
|
-
profile: SicuraProfile = SicuraProfile.I_MISSION_CRITICAL_CLASSIFIED,
|
|
515
|
+
profile: Union[SicuraProfile, str] = SicuraProfile.I_MISSION_CRITICAL_CLASSIFIED,
|
|
516
|
+
author: Optional[str] = None,
|
|
487
517
|
) -> Optional[ScanReport]:
|
|
488
518
|
"""
|
|
489
519
|
Get scan results for a specific device.
|
|
490
520
|
|
|
491
521
|
:param str fqdn: Fully qualified domain name of the device
|
|
492
522
|
:param Optional[str] platform: Platform name to filter results (e.g., 'Red Hat Enterprise Linux 9')
|
|
493
|
-
:param SicuraProfile profile: Profile name to filter results
|
|
523
|
+
:param Union[SicuraProfile, str] profile: Profile name to filter results, defaults to I - Mission Critical Classified
|
|
524
|
+
:param Optional[str] author: Author of the scan task (default: None)
|
|
494
525
|
:return: Scan report containing device info and scan results, or None if not found
|
|
495
526
|
:rtype: Optional[ScanReport]
|
|
496
527
|
"""
|
|
@@ -503,6 +534,9 @@ class SicuraAPI:
|
|
|
503
534
|
if profile:
|
|
504
535
|
params["profile"] = profile
|
|
505
536
|
|
|
537
|
+
if author:
|
|
538
|
+
params["author"] = author
|
|
539
|
+
|
|
506
540
|
response = self._make_request("GET", "/api/jaeger/v1/nodes", params=params)
|
|
507
541
|
|
|
508
542
|
# Handle 404 or empty response
|
|
@@ -560,7 +594,8 @@ class SicuraAPI:
|
|
|
560
594
|
task_id: Union[int, str],
|
|
561
595
|
fqdn: str,
|
|
562
596
|
platform: Optional[str] = None,
|
|
563
|
-
profile: SicuraProfile = SicuraProfile.I_MISSION_CRITICAL_CLASSIFIED,
|
|
597
|
+
profile: Union[SicuraProfile, str] = SicuraProfile.I_MISSION_CRITICAL_CLASSIFIED,
|
|
598
|
+
author: Optional[str] = None,
|
|
564
599
|
max_wait_time: int = 600,
|
|
565
600
|
poll_interval: int = 10,
|
|
566
601
|
) -> Optional[Union[ScanReport, Dict[str, Any]]]:
|
|
@@ -570,7 +605,8 @@ class SicuraAPI:
|
|
|
570
605
|
:param Union[int, str] task_id: ID of the scan task to monitor
|
|
571
606
|
:param str fqdn: Fully qualified domain name of the device
|
|
572
607
|
:param Optional[str] platform: Platform name to filter results
|
|
573
|
-
:param SicuraProfile profile: Profile name to filter results
|
|
608
|
+
:param Union[SicuraProfile, str] profile: Profile name to filter results, defaults to I - Mission Critical Classified
|
|
609
|
+
:param Optional[str] author: Author of the scan task (default: None)
|
|
574
610
|
:param int max_wait_time: Maximum time to wait in seconds (default: 10 minutes)
|
|
575
611
|
:param int poll_interval: Time between status checks in seconds (default: 10 seconds)
|
|
576
612
|
:return: Scan results once the task is complete, or None if timeout or error
|
|
@@ -602,7 +638,7 @@ class SicuraAPI:
|
|
|
602
638
|
logger.info(f"Scan task {task_id} completed successfully, fetching results...")
|
|
603
639
|
# Wait a moment for results to be processed
|
|
604
640
|
time.sleep(2)
|
|
605
|
-
return self.get_scan_results(fqdn, platform, profile)
|
|
641
|
+
return self.get_scan_results(fqdn=fqdn, platform=platform, profile=profile, author=author)
|
|
606
642
|
else:
|
|
607
643
|
logger.error(f"Scan task {task_id} ended with status {latest_status}")
|
|
608
644
|
return None
|
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
RegScale Sicura Integration
|
|
5
5
|
"""
|
|
6
6
|
import datetime
|
|
7
|
-
from typing import Generator, Iterator,
|
|
7
|
+
from typing import Any, Generator, Iterator, Union
|
|
8
8
|
|
|
9
9
|
from regscale.core.utils.date import date_str
|
|
10
10
|
from regscale.integrations.commercial.sicura.api import SicuraAPI, ScanReport, SicuraProfile, Device, ScanResult
|
|
@@ -55,6 +55,8 @@ class SicuraIntegration(ScannerIntegration):
|
|
|
55
55
|
"""
|
|
56
56
|
super().__init__(*args, **kwargs)
|
|
57
57
|
self.api = SicuraAPI()
|
|
58
|
+
self.control_scan = False
|
|
59
|
+
self.control_scan_profile = None
|
|
58
60
|
|
|
59
61
|
def fetch_findings(self, **kwargs) -> Generator[IntegrationFinding, None, None]:
|
|
60
62
|
"""
|
|
@@ -103,23 +105,28 @@ class SicuraIntegration(ScannerIntegration):
|
|
|
103
105
|
return
|
|
104
106
|
|
|
105
107
|
# Profiles to scan
|
|
106
|
-
profiles = [SicuraProfile.I_MISSION_CRITICAL_CLASSIFIED
|
|
108
|
+
profiles = [SicuraProfile.I_MISSION_CRITICAL_CLASSIFIED]
|
|
109
|
+
if self.control_scan:
|
|
110
|
+
profiles = [self.control_scan_profile]
|
|
107
111
|
|
|
108
112
|
for profile in profiles:
|
|
109
113
|
yield from self._process_profile_findings(device, profile)
|
|
110
114
|
|
|
111
115
|
def _process_profile_findings(
|
|
112
|
-
self, device: Device, profile: SicuraProfile
|
|
116
|
+
self, device: Device, profile: Union[SicuraProfile, str]
|
|
113
117
|
) -> Generator[IntegrationFinding, None, None]:
|
|
114
118
|
"""
|
|
115
119
|
Process findings for a device with a specific profile
|
|
116
120
|
|
|
117
121
|
:param Device device: The device to process
|
|
118
|
-
:param SicuraProfile profile: The profile to process
|
|
122
|
+
:param Union[SicuraProfile, str] profile: The profile to process
|
|
119
123
|
:yield: IntegrationFinding objects
|
|
120
124
|
:rtype: Generator[IntegrationFinding, None, None]
|
|
121
125
|
"""
|
|
122
|
-
|
|
126
|
+
if self.control_scan and profile == self.control_scan_profile:
|
|
127
|
+
scan_report = self.api.get_scan_results(fqdn=device.fqdn, profile=profile, author="control")
|
|
128
|
+
else:
|
|
129
|
+
scan_report = self.api.get_scan_results(fqdn=device.fqdn, profile=profile)
|
|
123
130
|
|
|
124
131
|
if not scan_report:
|
|
125
132
|
logger.warning(f"No scan results found for device: {device.fqdn} with profile: {profile}")
|
|
@@ -458,17 +465,21 @@ class SicuraIntegration(ScannerIntegration):
|
|
|
458
465
|
:param Device device: The device to trigger a scan for
|
|
459
466
|
:return: None
|
|
460
467
|
"""
|
|
468
|
+
profile = self.control_scan_profile if self.control_scan else SicuraProfile.I_MISSION_CRITICAL_CLASSIFIED
|
|
469
|
+
author = "control" if self.control_scan else None
|
|
461
470
|
task_id = self.api.create_scan_task(
|
|
462
471
|
device_id=device.id,
|
|
463
472
|
platform=device.platforms,
|
|
464
|
-
profile=
|
|
473
|
+
profile=profile,
|
|
474
|
+
author=author,
|
|
465
475
|
)
|
|
466
476
|
if task_id:
|
|
467
477
|
self.api.wait_for_scan_results(
|
|
468
478
|
task_id=task_id,
|
|
469
479
|
fqdn=device.fqdn,
|
|
470
480
|
platform=device.platforms,
|
|
471
|
-
profile=
|
|
481
|
+
profile=profile,
|
|
482
|
+
author=author,
|
|
472
483
|
)
|
|
473
484
|
else:
|
|
474
485
|
logger.warning(f"Failed to create scan task for device {device.fqdn}")
|
|
@@ -480,6 +491,24 @@ class SicuraIntegration(ScannerIntegration):
|
|
|
480
491
|
:param list[Device] devices: The devices to trigger scans for
|
|
481
492
|
:return: None
|
|
482
493
|
"""
|
|
494
|
+
# get the SSP's controlImplementations
|
|
495
|
+
if control_imps := regscale_models.ControlImplementation.get_list_by_parent(
|
|
496
|
+
regscale_id=self.plan_id, regscale_module=regscale_models.SecurityPlan.get_module_slug()
|
|
497
|
+
):
|
|
498
|
+
if profile := self.api.create_or_update_control_profile(
|
|
499
|
+
profile_name=f"regscale_ssp_id_{self.plan_id}",
|
|
500
|
+
controls=control_imps,
|
|
501
|
+
):
|
|
502
|
+
self.control_scan = True
|
|
503
|
+
self.control_scan_profile = profile["name"]
|
|
504
|
+
else:
|
|
505
|
+
logger.warning("Failed to create or update control profile")
|
|
506
|
+
self.control_scan = False
|
|
507
|
+
self.control_scan_profile = None
|
|
508
|
+
else:
|
|
509
|
+
self.control_scan = False
|
|
510
|
+
self.control_scan_profile = None
|
|
511
|
+
|
|
483
512
|
if len(devices) > 1:
|
|
484
513
|
from regscale.utils.threading import ThreadManager
|
|
485
514
|
|
|
@@ -154,8 +154,8 @@ def io_sync_assets(regscale_ssp_id: int, tags: List[Tuple[str, str]] = None):
|
|
|
154
154
|
try:
|
|
155
155
|
from regscale.integrations.commercial.tenablev2.scanner import TenableIntegration
|
|
156
156
|
|
|
157
|
-
integration = TenableIntegration(plan_id=regscale_ssp_id
|
|
158
|
-
integration.sync_assets(plan_id=regscale_ssp_id)
|
|
157
|
+
integration = TenableIntegration(plan_id=regscale_ssp_id)
|
|
158
|
+
integration.sync_assets(plan_id=regscale_ssp_id, tags=tags)
|
|
159
159
|
|
|
160
160
|
console.print("[bold green]Tenable.io asset synchronization complete.[/bold green]")
|
|
161
161
|
except Exception as e:
|
|
@@ -193,8 +193,8 @@ def io_sync_findings(
|
|
|
193
193
|
try:
|
|
194
194
|
from regscale.integrations.commercial.tenablev2.scanner import TenableIntegration
|
|
195
195
|
|
|
196
|
-
integration = TenableIntegration(plan_id=regscale_ssp_id,
|
|
197
|
-
integration.sync_findings(plan_id=regscale_ssp_id, severity=severity)
|
|
196
|
+
integration = TenableIntegration(plan_id=regscale_ssp_id, scan_date=scan_date)
|
|
197
|
+
integration.sync_findings(plan_id=regscale_ssp_id, severity=severity, tags=tags)
|
|
198
198
|
|
|
199
199
|
console.print("[bold green]Tenable.io finding synchronization complete.[/bold green]")
|
|
200
200
|
except Exception as e:
|
|
@@ -44,7 +44,7 @@ class TenableIntegration(ScannerIntegration):
|
|
|
44
44
|
"low": regscale_models.IssueSeverity.Low,
|
|
45
45
|
}
|
|
46
46
|
|
|
47
|
-
def __init__(self, plan_id: int, tenant_id: int = 1,
|
|
47
|
+
def __init__(self, plan_id: int, tenant_id: int = 1, **kwargs):
|
|
48
48
|
"""
|
|
49
49
|
Initialize the TenableIntegration.
|
|
50
50
|
|
|
@@ -53,7 +53,6 @@ class TenableIntegration(ScannerIntegration):
|
|
|
53
53
|
"""
|
|
54
54
|
super().__init__(plan_id, tenant_id, **kwargs)
|
|
55
55
|
self.client = None
|
|
56
|
-
self.tags = tags or []
|
|
57
56
|
self.scan_date = kwargs.get("scan_date", get_current_datetime())
|
|
58
57
|
|
|
59
58
|
def authenticate(self) -> None:
|
|
@@ -70,6 +70,8 @@ class WizVulnerabilityIntegration(ScannerIntegration):
|
|
|
70
70
|
"Medium": regscale_models.IssueSeverity.Moderate,
|
|
71
71
|
"Low": regscale_models.IssueSeverity.Low,
|
|
72
72
|
"INFORMATIONAL": regscale_models.IssueSeverity.NotAssigned,
|
|
73
|
+
"INFO": regscale_models.IssueSeverity.NotAssigned, # Wiz uses "INFO" for informational data findings
|
|
74
|
+
"None": regscale_models.IssueSeverity.NotAssigned, # Wiz uses "NONE" for findings without severity
|
|
73
75
|
}
|
|
74
76
|
asset_lookup = "vulnerableAsset"
|
|
75
77
|
wiz_token = None
|
|
@@ -706,13 +708,11 @@ class WizVulnerabilityIntegration(ScannerIntegration):
|
|
|
706
708
|
:yield: IntegrationFinding objects
|
|
707
709
|
:rtype: Iterator[IntegrationFinding]
|
|
708
710
|
"""
|
|
709
|
-
logger.
|
|
710
|
-
f"🔍 VULNERABILITY PROCESSING ANALYSIS: Received {len(nodes)} raw Wiz vulnerabilities for processing"
|
|
711
|
-
)
|
|
711
|
+
logger.debug(f"VULNERABILITY PROCESSING ANALYSIS: Received {len(nodes)} raw Wiz vulnerabilities for processing")
|
|
712
712
|
|
|
713
713
|
# Count issues by severity for analysis
|
|
714
|
-
severity_counts = {}
|
|
715
|
-
status_counts = {}
|
|
714
|
+
severity_counts: dict[str, int] = {}
|
|
715
|
+
status_counts: dict[str, int] = {}
|
|
716
716
|
for node in nodes:
|
|
717
717
|
severity = node.get("severity", "Low")
|
|
718
718
|
status = node.get("status", "OPEN")
|
|
@@ -728,22 +728,35 @@ class WizVulnerabilityIntegration(ScannerIntegration):
|
|
|
728
728
|
for node in nodes:
|
|
729
729
|
wiz_severity = node.get("severity", "Low")
|
|
730
730
|
wiz_id = node.get("id", "unknown")
|
|
731
|
+
|
|
732
|
+
# Log sample record for NONE severity (only first occurrence per session)
|
|
733
|
+
if wiz_severity and wiz_severity.upper() == "NONE":
|
|
734
|
+
if not hasattr(self, "_none_severity_sample_logged"):
|
|
735
|
+
logger.info(
|
|
736
|
+
f"SAMPLE RECORD - Vulnerability with NONE severity (treating as informational): "
|
|
737
|
+
f"ID={node.get('id', 'Unknown')}, "
|
|
738
|
+
f"Name={node.get('name', 'Unknown')}, "
|
|
739
|
+
f"Type={node.get('type', 'Unknown')}, "
|
|
740
|
+
f"Severity={wiz_severity}"
|
|
741
|
+
)
|
|
742
|
+
self._none_severity_sample_logged = True
|
|
743
|
+
|
|
731
744
|
if self.should_process_finding_by_severity(wiz_severity):
|
|
732
745
|
filtered_nodes.append(node)
|
|
733
746
|
else:
|
|
734
747
|
filtered_out_count += 1
|
|
735
|
-
logger.
|
|
736
|
-
f"
|
|
748
|
+
logger.debug(
|
|
749
|
+
f"FILTERED BY SEVERITY: Vulnerability {wiz_id} with severity '{wiz_severity}' "
|
|
737
750
|
f"filtered due to minimumSeverity configuration"
|
|
738
751
|
)
|
|
739
752
|
|
|
740
753
|
logger.info(
|
|
741
|
-
f"
|
|
754
|
+
f"After severity filtering: {len(filtered_nodes)} vulnerabilities kept, {filtered_out_count} filtered out"
|
|
742
755
|
)
|
|
743
756
|
|
|
744
757
|
if not filtered_nodes:
|
|
745
758
|
logger.warning(
|
|
746
|
-
"
|
|
759
|
+
"All vulnerabilities filtered out by severity configuration - check your minimumSeverity setting"
|
|
747
760
|
)
|
|
748
761
|
return
|
|
749
762
|
|
|
@@ -944,10 +957,17 @@ class WizVulnerabilityIntegration(ScannerIntegration):
|
|
|
944
957
|
self._severity_config_logged = True
|
|
945
958
|
|
|
946
959
|
# Define severity hierarchy (lower index = higher severity)
|
|
947
|
-
|
|
960
|
+
# Note: "info", "informational", and "none" are all treated as informational
|
|
961
|
+
severity_hierarchy = ["critical", "high", "medium", "low", "informational", "info", "none"]
|
|
948
962
|
|
|
949
963
|
try:
|
|
950
964
|
wiz_severity_lower = wiz_severity.lower()
|
|
965
|
+
|
|
966
|
+
# Handle empty or None severity values - treat as informational
|
|
967
|
+
# Normalize "info" to "informational" for consistent processing
|
|
968
|
+
if not wiz_severity_lower or wiz_severity_lower == "none" or wiz_severity_lower == "info":
|
|
969
|
+
wiz_severity_lower = "informational"
|
|
970
|
+
|
|
951
971
|
min_severity_index = severity_hierarchy.index(min_severity)
|
|
952
972
|
finding_severity_index = severity_hierarchy.index(wiz_severity_lower)
|
|
953
973
|
|
|
@@ -1010,8 +1030,8 @@ class WizVulnerabilityIntegration(ScannerIntegration):
|
|
|
1010
1030
|
return graph_entity.get("id")
|
|
1011
1031
|
|
|
1012
1032
|
# Standard case - direct id access
|
|
1013
|
-
asset_container = node.get(asset_lookup_key
|
|
1014
|
-
asset_id = asset_container.get("id")
|
|
1033
|
+
asset_container = node.get(asset_lookup_key) or {}
|
|
1034
|
+
asset_id = asset_container.get("id") if isinstance(asset_container, dict) else None
|
|
1015
1035
|
|
|
1016
1036
|
# Add debug logging to help diagnose missing assets
|
|
1017
1037
|
if not asset_id:
|
|
@@ -1023,8 +1043,8 @@ class WizVulnerabilityIntegration(ScannerIntegration):
|
|
|
1023
1043
|
fallback_keys = ["vulnerableAsset", "resource", "exposedEntity", "entitySnapshot"]
|
|
1024
1044
|
for fallback_key in fallback_keys:
|
|
1025
1045
|
if fallback_key != asset_lookup_key and fallback_key in node:
|
|
1026
|
-
fallback_asset = node.get(fallback_key
|
|
1027
|
-
if fallback_id := fallback_asset.get("id"):
|
|
1046
|
+
fallback_asset = node.get(fallback_key) or {}
|
|
1047
|
+
if isinstance(fallback_asset, dict) and (fallback_id := fallback_asset.get("id")):
|
|
1028
1048
|
logger.debug(
|
|
1029
1049
|
f"Found asset ID using fallback key '{fallback_key}' for {vulnerability_type.value}"
|
|
1030
1050
|
)
|
|
@@ -1068,7 +1088,11 @@ class WizVulnerabilityIntegration(ScannerIntegration):
|
|
|
1068
1088
|
return graph_entity.get("providerUniqueId") or graph_entity.get("name") or graph_entity.get("id")
|
|
1069
1089
|
|
|
1070
1090
|
# Standard case - get asset container and extract provider identifier
|
|
1071
|
-
asset_container = node.get(asset_lookup_key
|
|
1091
|
+
asset_container = node.get(asset_lookup_key) or {}
|
|
1092
|
+
|
|
1093
|
+
# Ensure asset_container is a dict before accessing
|
|
1094
|
+
if not isinstance(asset_container, dict):
|
|
1095
|
+
return None
|
|
1072
1096
|
|
|
1073
1097
|
# For Issue queries, the field is called 'providerId' instead of 'providerUniqueId'
|
|
1074
1098
|
if vulnerability_type == WizVulnerabilityType.ISSUE:
|
|
@@ -1167,7 +1191,7 @@ class WizVulnerabilityIntegration(ScannerIntegration):
|
|
|
1167
1191
|
# Get asset identifier
|
|
1168
1192
|
asset_id = self.get_asset_id_from_node(node, vulnerability_type)
|
|
1169
1193
|
if not asset_id:
|
|
1170
|
-
logger.
|
|
1194
|
+
logger.debug(
|
|
1171
1195
|
f"Skipping {vulnerability_type.value} finding '{node.get('name', 'Unknown')}' "
|
|
1172
1196
|
f"(ID: {node.get('id', 'Unknown')}) - no asset identifier found"
|
|
1173
1197
|
)
|