regscale-cli 6.20.4.0__py3-none-any.whl → 6.20.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of regscale-cli might be problematic. Click here for more details.
- regscale/__init__.py +1 -1
- regscale/core/app/internal/model_editor.py +3 -3
- regscale/core/app/utils/regscale_utils.py +37 -0
- regscale/core/utils/date.py +26 -3
- regscale/integrations/commercial/defender.py +3 -0
- regscale/integrations/commercial/qualys/__init__.py +40 -14
- regscale/integrations/commercial/qualys/containers.py +324 -0
- regscale/integrations/commercial/qualys/scanner.py +203 -8
- regscale/integrations/commercial/synqly/edr.py +10 -0
- regscale/integrations/commercial/wizv2/click.py +2 -2
- regscale/integrations/commercial/wizv2/constants.py +13 -0
- regscale/integrations/commercial/wizv2/issue.py +3 -2
- regscale/integrations/commercial/wizv2/scanner.py +5 -1
- regscale/integrations/commercial/wizv2/utils.py +118 -72
- regscale/integrations/public/fedramp/fedramp_cis_crm.py +107 -22
- regscale/models/integration_models/cisa_kev_data.json +140 -3
- regscale/models/integration_models/synqly_models/capabilities.json +1 -1
- regscale/models/regscale_models/catalog.py +16 -0
- regscale/models/regscale_models/file.py +2 -1
- regscale/models/regscale_models/form_field_value.py +59 -1
- regscale/models/regscale_models/issue.py +47 -0
- regscale/models/regscale_models/organization.py +30 -0
- regscale/models/regscale_models/regscale_model.py +13 -5
- regscale/models/regscale_models/security_control.py +47 -0
- regscale/models/regscale_models/security_plan.py +32 -0
- {regscale_cli-6.20.4.0.dist-info → regscale_cli-6.20.5.0.dist-info}/METADATA +1 -1
- {regscale_cli-6.20.4.0.dist-info → regscale_cli-6.20.5.0.dist-info}/RECORD +33 -31
- tests/fixtures/test_fixture.py +33 -4
- tests/regscale/core/test_app.py +53 -32
- {regscale_cli-6.20.4.0.dist-info → regscale_cli-6.20.5.0.dist-info}/LICENSE +0 -0
- {regscale_cli-6.20.4.0.dist-info → regscale_cli-6.20.5.0.dist-info}/WHEEL +0 -0
- {regscale_cli-6.20.4.0.dist-info → regscale_cli-6.20.5.0.dist-info}/entry_points.txt +0 -0
- {regscale_cli-6.20.4.0.dist-info → regscale_cli-6.20.5.0.dist-info}/top_level.txt +0 -0
|
@@ -7,23 +7,26 @@ import os
|
|
|
7
7
|
import time
|
|
8
8
|
import traceback
|
|
9
9
|
import xml.etree.ElementTree as ET
|
|
10
|
-
from
|
|
11
|
-
|
|
10
|
+
from datetime import date, datetime
|
|
11
|
+
from io import TextIOWrapper
|
|
12
12
|
from pathlib import Path
|
|
13
|
-
from
|
|
13
|
+
from typing import Any, Dict, Iterator, List, Optional, TextIO, Tuple, Union
|
|
14
|
+
|
|
15
|
+
from rich.progress import BarColumn, Progress, SpinnerColumn, TaskID, TextColumn, TimeElapsedColumn
|
|
14
16
|
|
|
15
17
|
from regscale.core.app.utils.app_utils import get_current_datetime
|
|
18
|
+
from regscale.core.utils.date import date_obj, date_str, normalize_timestamp
|
|
19
|
+
from regscale.integrations.commercial.qualys.qualys_error_handler import QualysErrorHandler
|
|
16
20
|
from regscale.integrations.commercial.qualys.variables import QualysVariables
|
|
17
21
|
from regscale.integrations.jsonl_scanner_integration import JSONLScannerIntegration
|
|
18
22
|
from regscale.integrations.scanner_integration import (
|
|
19
23
|
IntegrationAsset,
|
|
20
24
|
IntegrationFinding,
|
|
21
|
-
issue_due_date,
|
|
22
25
|
ScannerIntegrationType,
|
|
26
|
+
issue_due_date,
|
|
23
27
|
)
|
|
24
28
|
from regscale.integrations.variables import ScannerVariables
|
|
25
29
|
from regscale.models import AssetStatus, IssueSeverity, IssueStatus
|
|
26
|
-
from regscale.integrations.commercial.qualys.qualys_error_handler import QualysErrorHandler
|
|
27
30
|
|
|
28
31
|
logger = logging.getLogger("regscale")
|
|
29
32
|
|
|
@@ -55,6 +58,8 @@ class QualysTotalCloudJSONLIntegration(JSONLScannerIntegration):
|
|
|
55
58
|
# Constants for file paths
|
|
56
59
|
ASSETS_FILE = "./artifacts/qualys_total_cloud_assets.jsonl"
|
|
57
60
|
FINDINGS_FILE = "./artifacts/qualys_total_cloud_findings.jsonl"
|
|
61
|
+
CONTAINERS_FILE = "./artifacts/qualys_total_cloud_containers.jsonl"
|
|
62
|
+
CONTAINER_FINDINGS_FILE = "./artifacts/qualys_total_cloud_container_findings.jsonl"
|
|
58
63
|
|
|
59
64
|
def __init__(self, *args, **kwargs):
|
|
60
65
|
"""
|
|
@@ -65,6 +70,7 @@ class QualysTotalCloudJSONLIntegration(JSONLScannerIntegration):
|
|
|
65
70
|
"""
|
|
66
71
|
self.type = ScannerIntegrationType.VULNERABILITY
|
|
67
72
|
self.xml_data = kwargs.pop("xml_data", None)
|
|
73
|
+
self.containers = kwargs.pop("containers", None)
|
|
68
74
|
# Setting a dummy file path to avoid validation errors
|
|
69
75
|
if self.xml_data and "file_path" not in kwargs:
|
|
70
76
|
kwargs["file_path"] = None
|
|
@@ -875,7 +881,9 @@ class QualysTotalCloudJSONLIntegration(JSONLScannerIntegration):
|
|
|
875
881
|
logger.info(f"Found {num_findings} total findings in XML dictionary data")
|
|
876
882
|
|
|
877
883
|
# Process assets and findings
|
|
878
|
-
self._process_dict_assets_and_findings(
|
|
884
|
+
self._process_dict_assets_and_findings(
|
|
885
|
+
hosts_data=hosts_data, all_findings=all_findings, containers_data=self.containers
|
|
886
|
+
)
|
|
879
887
|
|
|
880
888
|
def _extract_hosts_from_dict(self):
|
|
881
889
|
"""Extract host data from XML dictionary structure."""
|
|
@@ -914,11 +922,19 @@ class QualysTotalCloudJSONLIntegration(JSONLScannerIntegration):
|
|
|
914
922
|
|
|
915
923
|
return all_findings
|
|
916
924
|
|
|
917
|
-
def _process_dict_assets_and_findings(self, hosts_data, all_findings):
|
|
918
|
-
"""
|
|
925
|
+
def _process_dict_assets_and_findings(self, hosts_data, all_findings, containers_data=None):
|
|
926
|
+
"""
|
|
927
|
+
Process assets and findings from dictionary data.
|
|
928
|
+
:param List[Dict[str, Any]] hosts_data: List of host data dictionaries
|
|
929
|
+
:param List[Dict[str, Any]] all_findings: List of findings dictionaries
|
|
930
|
+
:param List[Dict[str, Any]] containers_data: List of container data dictionaries
|
|
931
|
+
"""
|
|
919
932
|
with open(self.ASSETS_FILE, "w") as assets_file, open(self.FINDINGS_FILE, "w") as findings_file:
|
|
920
933
|
self._write_assets_from_dict(assets_file, hosts_data)
|
|
921
934
|
self._write_findings_from_dict(findings_file, all_findings)
|
|
935
|
+
if containers_data:
|
|
936
|
+
self._write_containers_from_dict(assets_file, containers_data)
|
|
937
|
+
self._write_container_findings_from_dict(findings_file, containers_data)
|
|
922
938
|
|
|
923
939
|
def _write_assets_from_dict(self, assets_file, hosts_data):
|
|
924
940
|
"""Write assets from dictionary data to JSONL file."""
|
|
@@ -949,6 +965,160 @@ class QualysTotalCloudJSONLIntegration(JSONLScannerIntegration):
|
|
|
949
965
|
|
|
950
966
|
logger.info(f"Wrote {findings_written} findings to {self.FINDINGS_FILE}")
|
|
951
967
|
|
|
968
|
+
def _write_containers_from_dict(
|
|
969
|
+
self, containers_file: TextIOWrapper, containers_data: List[Dict[str, Any]]
|
|
970
|
+
) -> None:
|
|
971
|
+
"""
|
|
972
|
+
Write containers from dictionary data to JSONL file.
|
|
973
|
+
|
|
974
|
+
:param TextIOWrapper containers_file: Open file handle to write containers to
|
|
975
|
+
:param List[Dict[str, Any]] containers_data: List of container dictionaries to process
|
|
976
|
+
"""
|
|
977
|
+
containers_written = 0
|
|
978
|
+
for container in containers_data:
|
|
979
|
+
try:
|
|
980
|
+
if container_asset := self.parse_container_asset(container=container):
|
|
981
|
+
self._write_item(containers_file, container_asset)
|
|
982
|
+
containers_written += 1
|
|
983
|
+
except Exception as e:
|
|
984
|
+
logger.error(f"Error processing container: {str(e)}")
|
|
985
|
+
logger.debug(traceback.format_exc())
|
|
986
|
+
|
|
987
|
+
logger.info("Wrote %s containers to %s", containers_written, containers_file.name)
|
|
988
|
+
|
|
989
|
+
def _write_container_findings_from_dict(
|
|
990
|
+
self, container_findings_file: TextIOWrapper, container_findings_data: List[Dict[str, Any]]
|
|
991
|
+
):
|
|
992
|
+
"""
|
|
993
|
+
Write container findings from dictionary data to JSONL file.
|
|
994
|
+
|
|
995
|
+
:param TextIOWrapper container_findings_file: Path to the container findings file
|
|
996
|
+
:param List[Dict[str, Any]] container_findings_data: Dictionary of container findings data
|
|
997
|
+
"""
|
|
998
|
+
findings_written = 0
|
|
999
|
+
for finding in container_findings_data:
|
|
1000
|
+
try:
|
|
1001
|
+
container_id = finding.get("containerId", "")
|
|
1002
|
+
if parsed_finding := self.parse_container_finding(finding=finding, container_id=container_id):
|
|
1003
|
+
self._write_item(container_findings_file, parsed_finding)
|
|
1004
|
+
findings_written += 1
|
|
1005
|
+
except Exception as e:
|
|
1006
|
+
logger.error(f"Error processing container finding: {str(e)}")
|
|
1007
|
+
logger.debug(traceback.format_exc())
|
|
1008
|
+
|
|
1009
|
+
logger.info("Wrote %s container findings to %s", findings_written, container_findings_file.name)
|
|
1010
|
+
|
|
1011
|
+
def parse_container_asset(self, container: dict) -> Optional[IntegrationAsset]:
|
|
1012
|
+
"""
|
|
1013
|
+
Parse a single container asset from Qualys container data.
|
|
1014
|
+
|
|
1015
|
+
:param container: Dictionary representing a container
|
|
1016
|
+
:return: IntegrationAsset object
|
|
1017
|
+
:rtype: Optional[IntegrationAsset]
|
|
1018
|
+
"""
|
|
1019
|
+
state_map = {
|
|
1020
|
+
"running": AssetStatus.Active,
|
|
1021
|
+
"stopped": AssetStatus.Inactive,
|
|
1022
|
+
"paused": AssetStatus.Inactive,
|
|
1023
|
+
"restarting": AssetStatus.Active,
|
|
1024
|
+
"exited": AssetStatus.Inactive,
|
|
1025
|
+
}
|
|
1026
|
+
try:
|
|
1027
|
+
# Extract container information
|
|
1028
|
+
container_id = container.get("containerId", "")
|
|
1029
|
+
name = container.get("name", "Unknown Container")
|
|
1030
|
+
image_id = container.get("imageId", "")
|
|
1031
|
+
state = container.get("state", "stopped")
|
|
1032
|
+
sha = container.get("sha", "")
|
|
1033
|
+
state_changed = self._convert_timestamp_to_date_str(container.get("stateChanged", ""))
|
|
1034
|
+
|
|
1035
|
+
return IntegrationAsset(
|
|
1036
|
+
name=name,
|
|
1037
|
+
identifier=container_id,
|
|
1038
|
+
asset_type="Virtual Machine (VM)",
|
|
1039
|
+
asset_category="Hardware",
|
|
1040
|
+
operating_system="Linux",
|
|
1041
|
+
status=state_map.get((state or "running").lower(), AssetStatus.Inactive),
|
|
1042
|
+
external_id=container_id,
|
|
1043
|
+
date_last_updated=state_changed,
|
|
1044
|
+
mac_address=None,
|
|
1045
|
+
notes=f"Qualys Container ID: {container_id}. Image ID: {image_id}. SHA: {sha}",
|
|
1046
|
+
parent_id=self.plan_id,
|
|
1047
|
+
parent_module="securityplans",
|
|
1048
|
+
is_virtual=True,
|
|
1049
|
+
)
|
|
1050
|
+
|
|
1051
|
+
except Exception as e:
|
|
1052
|
+
logger.error(f"Error parsing container asset: {str(e)}")
|
|
1053
|
+
logger.debug(traceback.format_exc())
|
|
1054
|
+
|
|
1055
|
+
def parse_container_finding(self, finding: dict, container_id: str):
|
|
1056
|
+
"""
|
|
1057
|
+
Parse a single container finding from Qualys container vulnerability data.
|
|
1058
|
+
|
|
1059
|
+
:param dict finding: Dictionary representing a container vulnerability
|
|
1060
|
+
:param str container_id: Container ID associated with the finding
|
|
1061
|
+
:return: IntegrationFinding object
|
|
1062
|
+
:rtype: Optional[IntegrationFinding]
|
|
1063
|
+
"""
|
|
1064
|
+
|
|
1065
|
+
vulns: List[dict] = finding.get("vulnerabilities")
|
|
1066
|
+
severity_map = {
|
|
1067
|
+
"1": IssueSeverity.Critical,
|
|
1068
|
+
"2": IssueSeverity.High,
|
|
1069
|
+
"3": IssueSeverity.Moderate,
|
|
1070
|
+
"4": IssueSeverity.Low,
|
|
1071
|
+
"5": IssueSeverity.NotAssigned,
|
|
1072
|
+
}
|
|
1073
|
+
|
|
1074
|
+
for vuln in vulns:
|
|
1075
|
+
try:
|
|
1076
|
+
# Extract finding information
|
|
1077
|
+
title = vuln.get("title", "Unknown Container Vulnerability")
|
|
1078
|
+
severity_num = vuln.get("severity", 0)
|
|
1079
|
+
severity = severity_map.get(str(severity_num), IssueSeverity.NotAssigned)
|
|
1080
|
+
description = vuln.get("result", "No description available")
|
|
1081
|
+
status = vuln.get("status", "New")
|
|
1082
|
+
vuln_id = vuln.get("id", "")
|
|
1083
|
+
|
|
1084
|
+
qid = vuln.get("qid", "")
|
|
1085
|
+
|
|
1086
|
+
# Get current time for any missing date fields
|
|
1087
|
+
current_time = self.scan_date or get_current_datetime()
|
|
1088
|
+
|
|
1089
|
+
# Convert timestamp to datetime if needed
|
|
1090
|
+
first_found = vuln.get("firstFound", current_time)
|
|
1091
|
+
last_found = vuln.get("lastFound", current_time)
|
|
1092
|
+
|
|
1093
|
+
# Handle timestamp conversion if it's a numeric timestamp
|
|
1094
|
+
first_found = self._convert_timestamp_to_date_str(first_found)
|
|
1095
|
+
last_found = self._convert_timestamp_to_date_str(last_found)
|
|
1096
|
+
|
|
1097
|
+
cve = next(iter(vuln.get("cveids", [])), "")
|
|
1098
|
+
# Create finding object
|
|
1099
|
+
return IntegrationFinding(
|
|
1100
|
+
title=title,
|
|
1101
|
+
description=description,
|
|
1102
|
+
severity=severity,
|
|
1103
|
+
status=self.get_finding_status(status),
|
|
1104
|
+
external_id=vuln_id,
|
|
1105
|
+
asset_identifier=container_id,
|
|
1106
|
+
cve=cve,
|
|
1107
|
+
category="Vulnerability",
|
|
1108
|
+
plugin_name=cve or f"QID-{qid}",
|
|
1109
|
+
control_labels=[f"QID-{qid}"],
|
|
1110
|
+
cvss_v3_base_score=vuln.get("cvss3Info", {}).get("baseScore"),
|
|
1111
|
+
cvss_v3_vector=vuln.get("cvss3Info", {}).get("temporalScore"),
|
|
1112
|
+
first_seen=first_found,
|
|
1113
|
+
last_seen=last_found,
|
|
1114
|
+
evidence=vuln.get("result", "No evidence available"),
|
|
1115
|
+
)
|
|
1116
|
+
|
|
1117
|
+
except Exception as e:
|
|
1118
|
+
logger.error(f"Error parsing container finding: {str(e)}")
|
|
1119
|
+
logger.debug(traceback.format_exc())
|
|
1120
|
+
continue # Continue to next vulnerability if this one fails
|
|
1121
|
+
|
|
952
1122
|
def _process_xml_elements(self, hosts):
|
|
953
1123
|
"""Process XML element hosts and detections with progress tracking."""
|
|
954
1124
|
# Convert XML elements to dictionaries first
|
|
@@ -1154,6 +1324,31 @@ class QualysTotalCloudJSONLIntegration(JSONLScannerIntegration):
|
|
|
1154
1324
|
|
|
1155
1325
|
return unique_id
|
|
1156
1326
|
|
|
1327
|
+
def _convert_timestamp_to_date_str(self, timestamp_value: Any) -> str:
|
|
1328
|
+
"""
|
|
1329
|
+
Convert a timestamp value to a date string with validation.
|
|
1330
|
+
|
|
1331
|
+
:param Any timestamp_value: The timestamp value to convert
|
|
1332
|
+
:return: Date string in ISO format
|
|
1333
|
+
:raises ValueError: If the timestamp is not numeric
|
|
1334
|
+
:rtype: str
|
|
1335
|
+
"""
|
|
1336
|
+
try:
|
|
1337
|
+
# Handle empty or None values early
|
|
1338
|
+
if not timestamp_value and timestamp_value != 0:
|
|
1339
|
+
raise ValueError(f"Invalid timestamp value: {timestamp_value}, defaulting to current datetime")
|
|
1340
|
+
|
|
1341
|
+
# Convert to integer timestamp
|
|
1342
|
+
timestamp_int = normalize_timestamp(timestamp_value)
|
|
1343
|
+
|
|
1344
|
+
s = date_obj(timestamp_int)
|
|
1345
|
+
if not s or timestamp_int == 0:
|
|
1346
|
+
raise ValueError(f"Invalid timestamp value: {timestamp_value}, defaulting to current datetime")
|
|
1347
|
+
return date_str(s)
|
|
1348
|
+
except ValueError as e:
|
|
1349
|
+
logger.error(f"Error converting timestamp to date string: {str(e)}")
|
|
1350
|
+
return get_current_datetime()
|
|
1351
|
+
|
|
1157
1352
|
def get_finding_status(self, status: Optional[str]) -> IssueStatus:
|
|
1158
1353
|
"""
|
|
1159
1354
|
Convert the Qualys status to a RegScale issue status.
|
|
@@ -87,4 +87,14 @@ def sync_sophos(regscale_ssp_id: int, url: str) -> None:
|
|
|
87
87
|
edr_sophos.run_sync(regscale_ssp_id=regscale_ssp_id, url=url)
|
|
88
88
|
|
|
89
89
|
|
|
90
|
+
@edr.command(name="sync_tanium")
|
|
91
|
+
@regscale_ssp_id()
|
|
92
|
+
def sync_tanium(regscale_ssp_id: int) -> None:
|
|
93
|
+
"""Sync Edr from Tanium to RegScale."""
|
|
94
|
+
from regscale.models.integration_models.synqly_models.connectors import Edr
|
|
95
|
+
|
|
96
|
+
edr_tanium = Edr("tanium")
|
|
97
|
+
edr_tanium.run_sync(regscale_ssp_id=regscale_ssp_id)
|
|
98
|
+
|
|
99
|
+
|
|
90
100
|
# pylint: enable=line-too-long
|
|
@@ -360,9 +360,9 @@ def add_report_evidence(
|
|
|
360
360
|
"--catalog_id",
|
|
361
361
|
"-c",
|
|
362
362
|
help="RegScale Catalog ID for the selected framework.",
|
|
363
|
-
prompt="RegScale Catalog ID",
|
|
364
363
|
hide_input=False,
|
|
365
|
-
required=
|
|
364
|
+
required=False,
|
|
365
|
+
default=None,
|
|
366
366
|
)
|
|
367
367
|
@click.option( # type: ignore
|
|
368
368
|
"--framework",
|
|
@@ -304,6 +304,19 @@ DOWNLOAD_QUERY = """
|
|
|
304
304
|
}
|
|
305
305
|
}
|
|
306
306
|
"""
|
|
307
|
+
RERUN_REPORT_QUERY = """
|
|
308
|
+
mutation RerunReport($reportId: ID!) {
|
|
309
|
+
rerunReport(input: {id: $reportId}) {
|
|
310
|
+
report {
|
|
311
|
+
id
|
|
312
|
+
lastRun {
|
|
313
|
+
url
|
|
314
|
+
status
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
"""
|
|
307
320
|
ISSUE_QUERY = """query IssuesTable(
|
|
308
321
|
$filterBy: IssueFilters
|
|
309
322
|
$first: Int
|
|
@@ -5,8 +5,9 @@ import re
|
|
|
5
5
|
from typing import List, Dict, Any, Iterator, Optional
|
|
6
6
|
|
|
7
7
|
from regscale.core.app.utils.parser_utils import safe_datetime_str
|
|
8
|
-
from regscale.integrations.scanner_integration import
|
|
8
|
+
from regscale.integrations.scanner_integration import IntegrationFinding
|
|
9
9
|
from regscale.utils.dict_utils import get_value
|
|
10
|
+
from regscale.models import Issue
|
|
10
11
|
from .constants import (
|
|
11
12
|
get_wiz_issue_queries,
|
|
12
13
|
WizVulnerabilityType,
|
|
@@ -353,7 +354,7 @@ class WizIssue(WizVulnerabilityIntegration):
|
|
|
353
354
|
source_rule_id=source_rule_id,
|
|
354
355
|
vulnerability_type=vulnerability_type.value,
|
|
355
356
|
date_created=date_created,
|
|
356
|
-
due_date=
|
|
357
|
+
due_date=Issue.get_due_date(severity, self.app.config, "wiz", date_created),
|
|
357
358
|
recommendation_for_mitigation=source_rule.get("resolutionRecommendation")
|
|
358
359
|
or wiz_issue.get("description", ""),
|
|
359
360
|
poam_comments=None,
|
|
@@ -167,7 +167,9 @@ class WizVulnerabilityIntegration(ScannerIntegration):
|
|
|
167
167
|
if not asset_id:
|
|
168
168
|
return None
|
|
169
169
|
|
|
170
|
+
first_seen = node.get("firstDetectedAt") or node.get("firstSeenAt") or get_current_datetime()
|
|
170
171
|
severity = self.get_issue_severity(node.get("severity", "Low"))
|
|
172
|
+
due_date = regscale_models.Issue.get_due_date(severity, self.app.config, "wiz", first_seen)
|
|
171
173
|
|
|
172
174
|
status = self.map_status_to_issue_status(node.get("status", "Open"))
|
|
173
175
|
name: str = node.get("name", "")
|
|
@@ -186,7 +188,8 @@ class WizVulnerabilityIntegration(ScannerIntegration):
|
|
|
186
188
|
status=status,
|
|
187
189
|
asset_identifier=asset_id,
|
|
188
190
|
external_id=f"{node.get('sourceRule', {'id': cve}).get('id')}",
|
|
189
|
-
first_seen=
|
|
191
|
+
first_seen=first_seen,
|
|
192
|
+
date_created=first_seen,
|
|
190
193
|
last_seen=node.get("lastDetectedAt") or node.get("analyzedAt") or get_current_datetime(),
|
|
191
194
|
remediation=node.get("description", ""),
|
|
192
195
|
cvss_score=node.get("score"),
|
|
@@ -195,6 +198,7 @@ class WizVulnerabilityIntegration(ScannerIntegration):
|
|
|
195
198
|
cvss_v3_base_score=node.get("score"),
|
|
196
199
|
source_rule_id=node.get("sourceRule", {}).get("id"),
|
|
197
200
|
vulnerability_type=vulnerability_type.value,
|
|
201
|
+
due_date=due_date,
|
|
198
202
|
)
|
|
199
203
|
except (KeyError, TypeError, ValueError) as e:
|
|
200
204
|
logger.error("Error parsing Wiz finding: %s", str(e), exc_info=True)
|
|
@@ -16,6 +16,7 @@ from zipfile import ZipFile
|
|
|
16
16
|
import cachetools
|
|
17
17
|
import requests
|
|
18
18
|
from pydantic import ValidationError
|
|
19
|
+
from rich.progress import Progress, TaskID
|
|
19
20
|
|
|
20
21
|
from regscale.core.app.api import Api
|
|
21
22
|
from regscale.core.app.utils.app_utils import (
|
|
@@ -27,19 +28,29 @@ from regscale.core.app.utils.app_utils import (
|
|
|
27
28
|
)
|
|
28
29
|
from regscale.core.utils.date import datetime_obj
|
|
29
30
|
from regscale.integrations.commercial.wizv2.constants import (
|
|
30
|
-
DOWNLOAD_QUERY,
|
|
31
31
|
BEARER,
|
|
32
|
-
|
|
32
|
+
CHECK_INTERVAL_FOR_DOWNLOAD_REPORT,
|
|
33
33
|
CONTENT_TYPE,
|
|
34
|
-
RATE_LIMIT_MSG,
|
|
35
34
|
CREATE_REPORT_QUERY,
|
|
35
|
+
DOWNLOAD_QUERY,
|
|
36
36
|
MAX_RETRIES,
|
|
37
|
-
|
|
37
|
+
RATE_LIMIT_MSG,
|
|
38
|
+
REPORTS_QUERY,
|
|
39
|
+
RERUN_REPORT_QUERY,
|
|
38
40
|
)
|
|
39
41
|
from regscale.integrations.commercial.wizv2.models import ComplianceReport, ComplianceCheckStatus
|
|
40
42
|
from regscale.integrations.commercial.wizv2.variables import WizVariables
|
|
41
43
|
from regscale.integrations.commercial.wizv2.wiz_auth import wiz_authenticate
|
|
42
|
-
from regscale.models import
|
|
44
|
+
from regscale.models import (
|
|
45
|
+
File,
|
|
46
|
+
Sbom,
|
|
47
|
+
SecurityControl,
|
|
48
|
+
SecurityPlan,
|
|
49
|
+
Catalog,
|
|
50
|
+
ControlImplementation,
|
|
51
|
+
Assessment,
|
|
52
|
+
regscale_models,
|
|
53
|
+
)
|
|
43
54
|
from regscale.utils import PaginatedGraphQLClient
|
|
44
55
|
from regscale.utils.decorators import deprecated
|
|
45
56
|
|
|
@@ -439,13 +450,14 @@ def fetch_report_data(report_id: str) -> List[Dict]:
|
|
|
439
450
|
"""
|
|
440
451
|
try:
|
|
441
452
|
download_url = get_report_url_and_status(report_id)
|
|
442
|
-
logger.
|
|
453
|
+
logger.debug(f"Fetching report {report_id} from: {download_url}")
|
|
443
454
|
|
|
444
455
|
with closing(requests.get(url=download_url, stream=True, timeout=10)) as response:
|
|
445
456
|
response.raise_for_status()
|
|
446
|
-
logger.info(f"Streaming and parsing report {report_id}")
|
|
457
|
+
logger.info(f"Streaming and parsing report {report_id}...")
|
|
447
458
|
|
|
448
459
|
reader = csv.DictReader(codecs.iterdecode(response.iter_lines(), encoding="utf-8"), delimiter=",")
|
|
460
|
+
logger.info(f"Report {report_id} fetched successfully.")
|
|
449
461
|
return list(reader)
|
|
450
462
|
except requests.RequestException as e:
|
|
451
463
|
error_and_exit(f"Failed to fetch report {report_id}: {str(e)}")
|
|
@@ -586,7 +598,7 @@ def send_request(
|
|
|
586
598
|
"""
|
|
587
599
|
logger.debug("Sending a request to Wiz API")
|
|
588
600
|
api = Api()
|
|
589
|
-
payload =
|
|
601
|
+
payload = {"query": query, "variables": variables}
|
|
590
602
|
if api_endpoint_url is None:
|
|
591
603
|
api_endpoint_url = WizVariables.wizUrl
|
|
592
604
|
if WizVariables.wizAccessToken:
|
|
@@ -650,8 +662,7 @@ def get_report_url_and_status(report_id: str) -> str:
|
|
|
650
662
|
raise requests.RequestException("Failed to download report")
|
|
651
663
|
|
|
652
664
|
response_json = response.json()
|
|
653
|
-
errors
|
|
654
|
-
if errors:
|
|
665
|
+
if errors := response_json.get("errors"):
|
|
655
666
|
message = errors[0]["message"]
|
|
656
667
|
if RATE_LIMIT_MSG in message:
|
|
657
668
|
rate = errors[0]["extensions"]["retryAfter"]
|
|
@@ -664,6 +675,10 @@ def get_report_url_and_status(report_id: str) -> str:
|
|
|
664
675
|
status = response_json.get("data", {}).get("report", {}).get("lastRun", {}).get("status")
|
|
665
676
|
if status == "COMPLETED":
|
|
666
677
|
return response_json["data"]["report"]["lastRun"]["url"]
|
|
678
|
+
elif status == "EXPIRED":
|
|
679
|
+
logger.warning("Report %s is expired, rerunning report...", report_id)
|
|
680
|
+
rerun_expired_report({"reportId": report_id})
|
|
681
|
+
return get_report_url_and_status(report_id)
|
|
667
682
|
|
|
668
683
|
raise requests.RequestException("Download failed, exceeding the maximum number of retries")
|
|
669
684
|
|
|
@@ -680,13 +695,25 @@ def download_report(variables: Dict) -> requests.Response:
|
|
|
680
695
|
return response
|
|
681
696
|
|
|
682
697
|
|
|
698
|
+
def rerun_expired_report(variables: Dict) -> requests.Response:
|
|
699
|
+
"""
|
|
700
|
+
Rerun a report
|
|
701
|
+
|
|
702
|
+
:param Dict variables: Variables for Wiz request
|
|
703
|
+
:return: Response object from Wiz API
|
|
704
|
+
:rtype: requests.Response
|
|
705
|
+
"""
|
|
706
|
+
response = send_request(RERUN_REPORT_QUERY, variables=variables)
|
|
707
|
+
return response
|
|
708
|
+
|
|
709
|
+
|
|
683
710
|
def _sync_compliance(
|
|
684
711
|
wiz_project_id: str,
|
|
685
712
|
regscale_id: int,
|
|
686
713
|
regscale_module: str,
|
|
687
714
|
client_id: str,
|
|
688
715
|
client_secret: str,
|
|
689
|
-
catalog_id: int,
|
|
716
|
+
catalog_id: Optional[int] = None,
|
|
690
717
|
framework: Optional[str] = "NIST800-53R5",
|
|
691
718
|
) -> List[ComplianceReport]:
|
|
692
719
|
"""
|
|
@@ -697,7 +724,7 @@ def _sync_compliance(
|
|
|
697
724
|
:param str regscale_module: RegScale module
|
|
698
725
|
:param str client_id: Wiz Client ID
|
|
699
726
|
:param str client_secret: Wiz Client Secret
|
|
700
|
-
:param int catalog_id: Catalog ID, defaults to None
|
|
727
|
+
:param Optional[int] catalog_id: Catalog ID, defaults to None
|
|
701
728
|
:param Optional[str] framework: Framework, defaults to NIST800-53R5
|
|
702
729
|
:return: List of ComplianceReport objects
|
|
703
730
|
:rtype: List[ComplianceReport]
|
|
@@ -708,68 +735,84 @@ def _sync_compliance(
|
|
|
708
735
|
client_id=client_id,
|
|
709
736
|
client_secret=client_secret,
|
|
710
737
|
)
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
738
|
+
with compliance_job_progress:
|
|
739
|
+
report_job = compliance_job_progress.add_task("[#f68d1f]Fetching Wiz compliance report...", total=1)
|
|
740
|
+
fetch_regscale_data_job = compliance_job_progress.add_task(
|
|
741
|
+
"[#f68d1f]Fetching RegScale Catalog info for framework...", total=1
|
|
742
|
+
)
|
|
743
|
+
compliance_job_progress.update(report_job, completed=True, advance=1)
|
|
717
744
|
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
|
|
751
|
-
|
|
752
|
-
|
|
753
|
-
|
|
754
|
-
|
|
745
|
+
framework_mapping = {
|
|
746
|
+
"CSF": "NIST CSF v1.1",
|
|
747
|
+
"NIST800-53R5": "NIST SP 800-53 Revision 5",
|
|
748
|
+
"NIST800-53R4": "NIST SP 800-53 Revision 4",
|
|
749
|
+
}
|
|
750
|
+
sync_framework = framework_mapping.get(framework)
|
|
751
|
+
snake_framework = sync_framework.replace(" ", "_")
|
|
752
|
+
logger.debug(f"{snake_framework=}")
|
|
753
|
+
logger.info("Fetching Wiz compliance report for project ID %s...", wiz_project_id)
|
|
754
|
+
report_data = fetch_framework_report(wiz_project_id, snake_framework)
|
|
755
|
+
report_models = []
|
|
756
|
+
compliance_job_progress.update(report_job, completed=True, advance=1)
|
|
757
|
+
|
|
758
|
+
if catalog_id:
|
|
759
|
+
logger.info("Fetching all Controls for catalog #%d...", catalog_id)
|
|
760
|
+
catalog = Catalog.get_with_all_details(catalog_id=catalog_id)
|
|
761
|
+
controls = catalog.get("controls") if catalog else []
|
|
762
|
+
else:
|
|
763
|
+
# get all of the ControlImplementations for the security plan and get the controls from them
|
|
764
|
+
logger.info("Fetching all Controls for %s #%d...", regscale_module, regscale_id)
|
|
765
|
+
controls = SecurityControl.get_controls_by_parent_id_and_module(
|
|
766
|
+
parent_module=regscale_module, parent_id=regscale_id, return_dicts=True
|
|
767
|
+
)
|
|
768
|
+
logger.info("Received %d control(s) from RegScale.", len(controls))
|
|
769
|
+
|
|
770
|
+
passing_controls = {}
|
|
771
|
+
failing_controls = {}
|
|
772
|
+
controls_to_reports = {}
|
|
773
|
+
|
|
774
|
+
compliance_job_progress.update(fetch_regscale_data_job, completed=True, advance=1)
|
|
775
|
+
logger.info("Analyzing ComplianceReport for framework %s from Wiz...", sync_framework)
|
|
776
|
+
running_compliance_job = compliance_job_progress.add_task(
|
|
777
|
+
"[#f68d1f]Building compliance posture from wiz report...",
|
|
778
|
+
total=len(report_data),
|
|
779
|
+
)
|
|
780
|
+
for row in report_data:
|
|
781
|
+
try:
|
|
782
|
+
cr = ComplianceReport(**row)
|
|
783
|
+
if cr.framework == sync_framework:
|
|
784
|
+
check_compliance(
|
|
785
|
+
cr,
|
|
786
|
+
controls,
|
|
787
|
+
passing_controls,
|
|
788
|
+
failing_controls,
|
|
789
|
+
controls_to_reports,
|
|
790
|
+
)
|
|
791
|
+
report_models.append(cr)
|
|
792
|
+
except ValidationError:
|
|
793
|
+
error_message = traceback.format_exc()
|
|
794
|
+
logger.error(f"Error creating ComplianceReport: {error_message}")
|
|
795
|
+
finally:
|
|
755
796
|
compliance_job_progress.update(running_compliance_job, advance=1)
|
|
756
|
-
|
|
797
|
+
try:
|
|
798
|
+
saving_regscale_data_job = compliance_job_progress.add_task(
|
|
799
|
+
"[#f68d1f]Saving RegScale data...", total=len(controls_to_reports)
|
|
800
|
+
)
|
|
801
|
+
create_assessment_from_compliance_report(
|
|
802
|
+
controls_to_reports=controls_to_reports,
|
|
803
|
+
regscale_id=regscale_id,
|
|
804
|
+
regscale_module=regscale_module,
|
|
805
|
+
controls=controls,
|
|
806
|
+
progress=compliance_job_progress,
|
|
807
|
+
task=saving_regscale_data_job,
|
|
808
|
+
)
|
|
809
|
+
logger.info("Completed saving RegScale data.")
|
|
810
|
+
except Exception:
|
|
757
811
|
error_message = traceback.format_exc()
|
|
758
|
-
logger.error(f"Error creating
|
|
759
|
-
|
|
760
|
-
|
|
761
|
-
|
|
762
|
-
controls_to_reports=controls_to_reports,
|
|
763
|
-
regscale_id=regscale_id,
|
|
764
|
-
regscale_module=regscale_module,
|
|
765
|
-
controls=controls,
|
|
766
|
-
)
|
|
767
|
-
compliance_job_progress.update(saving_regscale_data_job, completed=True, advance=1)
|
|
768
|
-
|
|
769
|
-
except Exception:
|
|
770
|
-
error_message = traceback.format_exc()
|
|
771
|
-
logger.error(f"Error creating ControlImplementations from compliance report: {error_message}")
|
|
772
|
-
return report_models
|
|
812
|
+
logger.error(f"Error creating ControlImplementations from compliance report: {error_message}")
|
|
813
|
+
finally:
|
|
814
|
+
compliance_job_progress.update(saving_regscale_data_job, completed=True, advance=len(controls_to_reports))
|
|
815
|
+
return report_models
|
|
773
816
|
|
|
774
817
|
|
|
775
818
|
def check_compliance(
|
|
@@ -833,7 +876,7 @@ def _clean_passing_list(passing: Dict, failing: Dict) -> None:
|
|
|
833
876
|
|
|
834
877
|
|
|
835
878
|
def create_assessment_from_compliance_report(
|
|
836
|
-
controls_to_reports: Dict, regscale_id: int, regscale_module: str, controls: List
|
|
879
|
+
controls_to_reports: Dict, regscale_id: int, regscale_module: str, controls: List, progress: Progress, task: TaskID
|
|
837
880
|
) -> None:
|
|
838
881
|
"""
|
|
839
882
|
Create assessment from compliance report
|
|
@@ -842,6 +885,8 @@ def create_assessment_from_compliance_report(
|
|
|
842
885
|
:param int regscale_id: RegScale ID
|
|
843
886
|
:param str regscale_module: RegScale module
|
|
844
887
|
:param List controls: Controls
|
|
888
|
+
:param Progress progress: Progress object, used for progress bar updates
|
|
889
|
+
:param TaskID task: Task ID, used for progress bar updates
|
|
845
890
|
:return: None
|
|
846
891
|
:rtype: None
|
|
847
892
|
"""
|
|
@@ -854,6 +899,7 @@ def create_assessment_from_compliance_report(
|
|
|
854
899
|
break
|
|
855
900
|
filtered_results = [x for x in implementations if x.controlID == control_record_id]
|
|
856
901
|
create_report_assessment(filtered_results, reports, control_id)
|
|
902
|
+
progress.update(task, advance=1)
|
|
857
903
|
|
|
858
904
|
|
|
859
905
|
def create_report_assessment(filtered_results: List, reports: List, control_id: str) -> None:
|