regscale-cli 6.20.4.1__py3-none-any.whl → 6.20.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of regscale-cli might be problematic. Click here for more details.
- regscale/__init__.py +1 -1
- regscale/_version.py +39 -0
- regscale/core/app/internal/__init__.py +13 -0
- regscale/core/app/internal/model_editor.py +3 -3
- regscale/core/app/internal/set_permissions.py +173 -0
- regscale/core/app/utils/file_utils.py +11 -1
- regscale/core/app/utils/regscale_utils.py +34 -129
- regscale/core/utils/date.py +86 -30
- regscale/integrations/commercial/defender.py +3 -0
- regscale/integrations/commercial/qualys/__init__.py +40 -14
- regscale/integrations/commercial/qualys/containers.py +324 -0
- regscale/integrations/commercial/qualys/scanner.py +203 -8
- regscale/integrations/commercial/synqly/edr.py +10 -0
- regscale/integrations/commercial/wizv2/click.py +11 -7
- regscale/integrations/commercial/wizv2/constants.py +28 -0
- regscale/integrations/commercial/wizv2/issue.py +3 -2
- regscale/integrations/commercial/wizv2/parsers.py +23 -0
- regscale/integrations/commercial/wizv2/scanner.py +89 -30
- regscale/integrations/commercial/wizv2/utils.py +208 -75
- regscale/integrations/commercial/wizv2/variables.py +2 -1
- regscale/integrations/commercial/wizv2/wiz_auth.py +3 -3
- regscale/integrations/public/fedramp/fedramp_cis_crm.py +98 -20
- regscale/integrations/public/fedramp/fedramp_docx.py +2 -3
- regscale/integrations/scanner_integration.py +7 -2
- regscale/models/integration_models/cisa_kev_data.json +187 -5
- regscale/models/integration_models/synqly_models/capabilities.json +1 -1
- regscale/models/regscale_models/__init__.py +2 -0
- regscale/models/regscale_models/asset.py +1 -1
- regscale/models/regscale_models/catalog.py +16 -0
- regscale/models/regscale_models/file.py +2 -1
- regscale/models/regscale_models/form_field_value.py +59 -1
- regscale/models/regscale_models/issue.py +47 -0
- regscale/models/regscale_models/modules.py +88 -1
- regscale/models/regscale_models/organization.py +30 -0
- regscale/models/regscale_models/regscale_model.py +20 -6
- regscale/models/regscale_models/security_control.py +47 -0
- regscale/models/regscale_models/security_plan.py +32 -0
- regscale/models/regscale_models/vulnerability.py +3 -3
- regscale/models/regscale_models/vulnerability_mapping.py +2 -2
- regscale/regscale.py +2 -0
- {regscale_cli-6.20.4.1.dist-info → regscale_cli-6.20.6.0.dist-info}/METADATA +1 -1
- {regscale_cli-6.20.4.1.dist-info → regscale_cli-6.20.6.0.dist-info}/RECORD +49 -44
- tests/fixtures/test_fixture.py +33 -4
- tests/regscale/core/test_app.py +53 -32
- tests/regscale/test_init.py +94 -0
- {regscale_cli-6.20.4.1.dist-info → regscale_cli-6.20.6.0.dist-info}/LICENSE +0 -0
- {regscale_cli-6.20.4.1.dist-info → regscale_cli-6.20.6.0.dist-info}/WHEEL +0 -0
- {regscale_cli-6.20.4.1.dist-info → regscale_cli-6.20.6.0.dist-info}/entry_points.txt +0 -0
- {regscale_cli-6.20.4.1.dist-info → regscale_cli-6.20.6.0.dist-info}/top_level.txt +0 -0
|
@@ -7,23 +7,26 @@ import os
|
|
|
7
7
|
import time
|
|
8
8
|
import traceback
|
|
9
9
|
import xml.etree.ElementTree as ET
|
|
10
|
-
from
|
|
11
|
-
|
|
10
|
+
from datetime import date, datetime
|
|
11
|
+
from io import TextIOWrapper
|
|
12
12
|
from pathlib import Path
|
|
13
|
-
from
|
|
13
|
+
from typing import Any, Dict, Iterator, List, Optional, TextIO, Tuple, Union
|
|
14
|
+
|
|
15
|
+
from rich.progress import BarColumn, Progress, SpinnerColumn, TaskID, TextColumn, TimeElapsedColumn
|
|
14
16
|
|
|
15
17
|
from regscale.core.app.utils.app_utils import get_current_datetime
|
|
18
|
+
from regscale.core.utils.date import date_obj, date_str, normalize_timestamp
|
|
19
|
+
from regscale.integrations.commercial.qualys.qualys_error_handler import QualysErrorHandler
|
|
16
20
|
from regscale.integrations.commercial.qualys.variables import QualysVariables
|
|
17
21
|
from regscale.integrations.jsonl_scanner_integration import JSONLScannerIntegration
|
|
18
22
|
from regscale.integrations.scanner_integration import (
|
|
19
23
|
IntegrationAsset,
|
|
20
24
|
IntegrationFinding,
|
|
21
|
-
issue_due_date,
|
|
22
25
|
ScannerIntegrationType,
|
|
26
|
+
issue_due_date,
|
|
23
27
|
)
|
|
24
28
|
from regscale.integrations.variables import ScannerVariables
|
|
25
29
|
from regscale.models import AssetStatus, IssueSeverity, IssueStatus
|
|
26
|
-
from regscale.integrations.commercial.qualys.qualys_error_handler import QualysErrorHandler
|
|
27
30
|
|
|
28
31
|
logger = logging.getLogger("regscale")
|
|
29
32
|
|
|
@@ -55,6 +58,8 @@ class QualysTotalCloudJSONLIntegration(JSONLScannerIntegration):
|
|
|
55
58
|
# Constants for file paths
|
|
56
59
|
ASSETS_FILE = "./artifacts/qualys_total_cloud_assets.jsonl"
|
|
57
60
|
FINDINGS_FILE = "./artifacts/qualys_total_cloud_findings.jsonl"
|
|
61
|
+
CONTAINERS_FILE = "./artifacts/qualys_total_cloud_containers.jsonl"
|
|
62
|
+
CONTAINER_FINDINGS_FILE = "./artifacts/qualys_total_cloud_container_findings.jsonl"
|
|
58
63
|
|
|
59
64
|
def __init__(self, *args, **kwargs):
|
|
60
65
|
"""
|
|
@@ -65,6 +70,7 @@ class QualysTotalCloudJSONLIntegration(JSONLScannerIntegration):
|
|
|
65
70
|
"""
|
|
66
71
|
self.type = ScannerIntegrationType.VULNERABILITY
|
|
67
72
|
self.xml_data = kwargs.pop("xml_data", None)
|
|
73
|
+
self.containers = kwargs.pop("containers", None)
|
|
68
74
|
# Setting a dummy file path to avoid validation errors
|
|
69
75
|
if self.xml_data and "file_path" not in kwargs:
|
|
70
76
|
kwargs["file_path"] = None
|
|
@@ -875,7 +881,9 @@ class QualysTotalCloudJSONLIntegration(JSONLScannerIntegration):
|
|
|
875
881
|
logger.info(f"Found {num_findings} total findings in XML dictionary data")
|
|
876
882
|
|
|
877
883
|
# Process assets and findings
|
|
878
|
-
self._process_dict_assets_and_findings(
|
|
884
|
+
self._process_dict_assets_and_findings(
|
|
885
|
+
hosts_data=hosts_data, all_findings=all_findings, containers_data=self.containers
|
|
886
|
+
)
|
|
879
887
|
|
|
880
888
|
def _extract_hosts_from_dict(self):
|
|
881
889
|
"""Extract host data from XML dictionary structure."""
|
|
@@ -914,11 +922,19 @@ class QualysTotalCloudJSONLIntegration(JSONLScannerIntegration):
|
|
|
914
922
|
|
|
915
923
|
return all_findings
|
|
916
924
|
|
|
917
|
-
def _process_dict_assets_and_findings(self, hosts_data, all_findings):
|
|
918
|
-
"""
|
|
925
|
+
def _process_dict_assets_and_findings(self, hosts_data, all_findings, containers_data=None):
|
|
926
|
+
"""
|
|
927
|
+
Process assets and findings from dictionary data.
|
|
928
|
+
:param List[Dict[str, Any]] hosts_data: List of host data dictionaries
|
|
929
|
+
:param List[Dict[str, Any]] all_findings: List of findings dictionaries
|
|
930
|
+
:param List[Dict[str, Any]] containers_data: List of container data dictionaries
|
|
931
|
+
"""
|
|
919
932
|
with open(self.ASSETS_FILE, "w") as assets_file, open(self.FINDINGS_FILE, "w") as findings_file:
|
|
920
933
|
self._write_assets_from_dict(assets_file, hosts_data)
|
|
921
934
|
self._write_findings_from_dict(findings_file, all_findings)
|
|
935
|
+
if containers_data:
|
|
936
|
+
self._write_containers_from_dict(assets_file, containers_data)
|
|
937
|
+
self._write_container_findings_from_dict(findings_file, containers_data)
|
|
922
938
|
|
|
923
939
|
def _write_assets_from_dict(self, assets_file, hosts_data):
|
|
924
940
|
"""Write assets from dictionary data to JSONL file."""
|
|
@@ -949,6 +965,160 @@ class QualysTotalCloudJSONLIntegration(JSONLScannerIntegration):
|
|
|
949
965
|
|
|
950
966
|
logger.info(f"Wrote {findings_written} findings to {self.FINDINGS_FILE}")
|
|
951
967
|
|
|
968
|
+
def _write_containers_from_dict(
|
|
969
|
+
self, containers_file: TextIOWrapper, containers_data: List[Dict[str, Any]]
|
|
970
|
+
) -> None:
|
|
971
|
+
"""
|
|
972
|
+
Write containers from dictionary data to JSONL file.
|
|
973
|
+
|
|
974
|
+
:param TextIOWrapper containers_file: Open file handle to write containers to
|
|
975
|
+
:param List[Dict[str, Any]] containers_data: List of container dictionaries to process
|
|
976
|
+
"""
|
|
977
|
+
containers_written = 0
|
|
978
|
+
for container in containers_data:
|
|
979
|
+
try:
|
|
980
|
+
if container_asset := self.parse_container_asset(container=container):
|
|
981
|
+
self._write_item(containers_file, container_asset)
|
|
982
|
+
containers_written += 1
|
|
983
|
+
except Exception as e:
|
|
984
|
+
logger.error(f"Error processing container: {str(e)}")
|
|
985
|
+
logger.debug(traceback.format_exc())
|
|
986
|
+
|
|
987
|
+
logger.info("Wrote %s containers to %s", containers_written, containers_file.name)
|
|
988
|
+
|
|
989
|
+
def _write_container_findings_from_dict(
|
|
990
|
+
self, container_findings_file: TextIOWrapper, container_findings_data: List[Dict[str, Any]]
|
|
991
|
+
):
|
|
992
|
+
"""
|
|
993
|
+
Write container findings from dictionary data to JSONL file.
|
|
994
|
+
|
|
995
|
+
:param TextIOWrapper container_findings_file: Path to the container findings file
|
|
996
|
+
:param List[Dict[str, Any]] container_findings_data: Dictionary of container findings data
|
|
997
|
+
"""
|
|
998
|
+
findings_written = 0
|
|
999
|
+
for finding in container_findings_data:
|
|
1000
|
+
try:
|
|
1001
|
+
container_id = finding.get("containerId", "")
|
|
1002
|
+
if parsed_finding := self.parse_container_finding(finding=finding, container_id=container_id):
|
|
1003
|
+
self._write_item(container_findings_file, parsed_finding)
|
|
1004
|
+
findings_written += 1
|
|
1005
|
+
except Exception as e:
|
|
1006
|
+
logger.error(f"Error processing container finding: {str(e)}")
|
|
1007
|
+
logger.debug(traceback.format_exc())
|
|
1008
|
+
|
|
1009
|
+
logger.info("Wrote %s container findings to %s", findings_written, container_findings_file.name)
|
|
1010
|
+
|
|
1011
|
+
def parse_container_asset(self, container: dict) -> Optional[IntegrationAsset]:
|
|
1012
|
+
"""
|
|
1013
|
+
Parse a single container asset from Qualys container data.
|
|
1014
|
+
|
|
1015
|
+
:param container: Dictionary representing a container
|
|
1016
|
+
:return: IntegrationAsset object
|
|
1017
|
+
:rtype: Optional[IntegrationAsset]
|
|
1018
|
+
"""
|
|
1019
|
+
state_map = {
|
|
1020
|
+
"running": AssetStatus.Active,
|
|
1021
|
+
"stopped": AssetStatus.Inactive,
|
|
1022
|
+
"paused": AssetStatus.Inactive,
|
|
1023
|
+
"restarting": AssetStatus.Active,
|
|
1024
|
+
"exited": AssetStatus.Inactive,
|
|
1025
|
+
}
|
|
1026
|
+
try:
|
|
1027
|
+
# Extract container information
|
|
1028
|
+
container_id = container.get("containerId", "")
|
|
1029
|
+
name = container.get("name", "Unknown Container")
|
|
1030
|
+
image_id = container.get("imageId", "")
|
|
1031
|
+
state = container.get("state", "stopped")
|
|
1032
|
+
sha = container.get("sha", "")
|
|
1033
|
+
state_changed = self._convert_timestamp_to_date_str(container.get("stateChanged", ""))
|
|
1034
|
+
|
|
1035
|
+
return IntegrationAsset(
|
|
1036
|
+
name=name,
|
|
1037
|
+
identifier=container_id,
|
|
1038
|
+
asset_type="Virtual Machine (VM)",
|
|
1039
|
+
asset_category="Hardware",
|
|
1040
|
+
operating_system="Linux",
|
|
1041
|
+
status=state_map.get((state or "running").lower(), AssetStatus.Inactive),
|
|
1042
|
+
external_id=container_id,
|
|
1043
|
+
date_last_updated=state_changed,
|
|
1044
|
+
mac_address=None,
|
|
1045
|
+
notes=f"Qualys Container ID: {container_id}. Image ID: {image_id}. SHA: {sha}",
|
|
1046
|
+
parent_id=self.plan_id,
|
|
1047
|
+
parent_module="securityplans",
|
|
1048
|
+
is_virtual=True,
|
|
1049
|
+
)
|
|
1050
|
+
|
|
1051
|
+
except Exception as e:
|
|
1052
|
+
logger.error(f"Error parsing container asset: {str(e)}")
|
|
1053
|
+
logger.debug(traceback.format_exc())
|
|
1054
|
+
|
|
1055
|
+
def parse_container_finding(self, finding: dict, container_id: str):
|
|
1056
|
+
"""
|
|
1057
|
+
Parse a single container finding from Qualys container vulnerability data.
|
|
1058
|
+
|
|
1059
|
+
:param dict finding: Dictionary representing a container vulnerability
|
|
1060
|
+
:param str container_id: Container ID associated with the finding
|
|
1061
|
+
:return: IntegrationFinding object
|
|
1062
|
+
:rtype: Optional[IntegrationFinding]
|
|
1063
|
+
"""
|
|
1064
|
+
|
|
1065
|
+
vulns: List[dict] = finding.get("vulnerabilities")
|
|
1066
|
+
severity_map = {
|
|
1067
|
+
"1": IssueSeverity.Critical,
|
|
1068
|
+
"2": IssueSeverity.High,
|
|
1069
|
+
"3": IssueSeverity.Moderate,
|
|
1070
|
+
"4": IssueSeverity.Low,
|
|
1071
|
+
"5": IssueSeverity.NotAssigned,
|
|
1072
|
+
}
|
|
1073
|
+
|
|
1074
|
+
for vuln in vulns:
|
|
1075
|
+
try:
|
|
1076
|
+
# Extract finding information
|
|
1077
|
+
title = vuln.get("title", "Unknown Container Vulnerability")
|
|
1078
|
+
severity_num = vuln.get("severity", 0)
|
|
1079
|
+
severity = severity_map.get(str(severity_num), IssueSeverity.NotAssigned)
|
|
1080
|
+
description = vuln.get("result", "No description available")
|
|
1081
|
+
status = vuln.get("status", "New")
|
|
1082
|
+
vuln_id = vuln.get("id", "")
|
|
1083
|
+
|
|
1084
|
+
qid = vuln.get("qid", "")
|
|
1085
|
+
|
|
1086
|
+
# Get current time for any missing date fields
|
|
1087
|
+
current_time = self.scan_date or get_current_datetime()
|
|
1088
|
+
|
|
1089
|
+
# Convert timestamp to datetime if needed
|
|
1090
|
+
first_found = vuln.get("firstFound", current_time)
|
|
1091
|
+
last_found = vuln.get("lastFound", current_time)
|
|
1092
|
+
|
|
1093
|
+
# Handle timestamp conversion if it's a numeric timestamp
|
|
1094
|
+
first_found = self._convert_timestamp_to_date_str(first_found)
|
|
1095
|
+
last_found = self._convert_timestamp_to_date_str(last_found)
|
|
1096
|
+
|
|
1097
|
+
cve = next(iter(vuln.get("cveids", [])), "")
|
|
1098
|
+
# Create finding object
|
|
1099
|
+
return IntegrationFinding(
|
|
1100
|
+
title=title,
|
|
1101
|
+
description=description,
|
|
1102
|
+
severity=severity,
|
|
1103
|
+
status=self.get_finding_status(status),
|
|
1104
|
+
external_id=vuln_id,
|
|
1105
|
+
asset_identifier=container_id,
|
|
1106
|
+
cve=cve,
|
|
1107
|
+
category="Vulnerability",
|
|
1108
|
+
plugin_name=cve or f"QID-{qid}",
|
|
1109
|
+
control_labels=[f"QID-{qid}"],
|
|
1110
|
+
cvss_v3_base_score=vuln.get("cvss3Info", {}).get("baseScore"),
|
|
1111
|
+
cvss_v3_vector=vuln.get("cvss3Info", {}).get("temporalScore"),
|
|
1112
|
+
first_seen=first_found,
|
|
1113
|
+
last_seen=last_found,
|
|
1114
|
+
evidence=vuln.get("result", "No evidence available"),
|
|
1115
|
+
)
|
|
1116
|
+
|
|
1117
|
+
except Exception as e:
|
|
1118
|
+
logger.error(f"Error parsing container finding: {str(e)}")
|
|
1119
|
+
logger.debug(traceback.format_exc())
|
|
1120
|
+
continue # Continue to next vulnerability if this one fails
|
|
1121
|
+
|
|
952
1122
|
def _process_xml_elements(self, hosts):
|
|
953
1123
|
"""Process XML element hosts and detections with progress tracking."""
|
|
954
1124
|
# Convert XML elements to dictionaries first
|
|
@@ -1154,6 +1324,31 @@ class QualysTotalCloudJSONLIntegration(JSONLScannerIntegration):
|
|
|
1154
1324
|
|
|
1155
1325
|
return unique_id
|
|
1156
1326
|
|
|
1327
|
+
def _convert_timestamp_to_date_str(self, timestamp_value: Any) -> str:
|
|
1328
|
+
"""
|
|
1329
|
+
Convert a timestamp value to a date string with validation.
|
|
1330
|
+
|
|
1331
|
+
:param Any timestamp_value: The timestamp value to convert
|
|
1332
|
+
:return: Date string in ISO format
|
|
1333
|
+
:raises ValueError: If the timestamp is not numeric
|
|
1334
|
+
:rtype: str
|
|
1335
|
+
"""
|
|
1336
|
+
try:
|
|
1337
|
+
# Handle empty or None values early
|
|
1338
|
+
if not timestamp_value and timestamp_value != 0:
|
|
1339
|
+
raise ValueError(f"Invalid timestamp value: {timestamp_value}, defaulting to current datetime")
|
|
1340
|
+
|
|
1341
|
+
# Convert to integer timestamp
|
|
1342
|
+
timestamp_int = normalize_timestamp(timestamp_value)
|
|
1343
|
+
|
|
1344
|
+
s = date_obj(timestamp_int)
|
|
1345
|
+
if not s or timestamp_int == 0:
|
|
1346
|
+
raise ValueError(f"Invalid timestamp value: {timestamp_value}, defaulting to current datetime")
|
|
1347
|
+
return date_str(s)
|
|
1348
|
+
except ValueError as e:
|
|
1349
|
+
logger.error(f"Error converting timestamp to date string: {str(e)}")
|
|
1350
|
+
return get_current_datetime()
|
|
1351
|
+
|
|
1157
1352
|
def get_finding_status(self, status: Optional[str]) -> IssueStatus:
|
|
1158
1353
|
"""
|
|
1159
1354
|
Convert the Qualys status to a RegScale issue status.
|
|
@@ -87,4 +87,14 @@ def sync_sophos(regscale_ssp_id: int, url: str) -> None:
|
|
|
87
87
|
edr_sophos.run_sync(regscale_ssp_id=regscale_ssp_id, url=url)
|
|
88
88
|
|
|
89
89
|
|
|
90
|
+
@edr.command(name="sync_tanium")
|
|
91
|
+
@regscale_ssp_id()
|
|
92
|
+
def sync_tanium(regscale_ssp_id: int) -> None:
|
|
93
|
+
"""Sync Edr from Tanium to RegScale."""
|
|
94
|
+
from regscale.models.integration_models.synqly_models.connectors import Edr
|
|
95
|
+
|
|
96
|
+
edr_tanium = Edr("tanium")
|
|
97
|
+
edr_tanium.run_sync(regscale_ssp_id=regscale_ssp_id)
|
|
98
|
+
|
|
99
|
+
|
|
90
100
|
# pylint: enable=line-too-long
|
|
@@ -4,7 +4,6 @@
|
|
|
4
4
|
|
|
5
5
|
# standard python imports
|
|
6
6
|
import logging
|
|
7
|
-
import os
|
|
8
7
|
from typing import Optional
|
|
9
8
|
|
|
10
9
|
import click
|
|
@@ -343,16 +342,16 @@ def add_report_evidence(
|
|
|
343
342
|
@click.option( # type: ignore
|
|
344
343
|
"--client_id",
|
|
345
344
|
"-i",
|
|
346
|
-
help="Wiz Client ID
|
|
347
|
-
default=
|
|
345
|
+
help="Wiz Client ID, or can be set as environment variable wizClientId",
|
|
346
|
+
default="",
|
|
348
347
|
hide_input=False,
|
|
349
348
|
required=False,
|
|
350
349
|
)
|
|
351
350
|
@click.option( # type: ignore
|
|
352
351
|
"--client_secret",
|
|
353
352
|
"-s",
|
|
354
|
-
help="Wiz Client Secret
|
|
355
|
-
default=
|
|
353
|
+
help="Wiz Client Secret, or can be set as environment variable wizClientSecret",
|
|
354
|
+
default="",
|
|
356
355
|
hide_input=True,
|
|
357
356
|
required=False,
|
|
358
357
|
)
|
|
@@ -360,9 +359,9 @@ def add_report_evidence(
|
|
|
360
359
|
"--catalog_id",
|
|
361
360
|
"-c",
|
|
362
361
|
help="RegScale Catalog ID for the selected framework.",
|
|
363
|
-
prompt="RegScale Catalog ID",
|
|
364
362
|
hide_input=False,
|
|
365
|
-
required=
|
|
363
|
+
required=False,
|
|
364
|
+
default=None,
|
|
366
365
|
)
|
|
367
366
|
@click.option( # type: ignore
|
|
368
367
|
"--framework",
|
|
@@ -384,6 +383,11 @@ def sync_compliance(
|
|
|
384
383
|
"""Sync compliance posture from Wiz to RegScale"""
|
|
385
384
|
from regscale.integrations.commercial.wizv2.utils import _sync_compliance
|
|
386
385
|
|
|
386
|
+
if not client_secret:
|
|
387
|
+
client_secret = WizVariables.wizClientSecret
|
|
388
|
+
if not client_id:
|
|
389
|
+
client_id = WizVariables.wizClientId
|
|
390
|
+
|
|
387
391
|
_sync_compliance(
|
|
388
392
|
wiz_project_id=wiz_project_id,
|
|
389
393
|
regscale_id=regscale_id,
|
|
@@ -131,6 +131,9 @@ INVENTORY_QUERY = """
|
|
|
131
131
|
graphEntity{
|
|
132
132
|
id
|
|
133
133
|
providerUniqueId
|
|
134
|
+
publicExposures(first: 5) {
|
|
135
|
+
totalCount
|
|
136
|
+
}
|
|
134
137
|
name
|
|
135
138
|
type
|
|
136
139
|
projects {
|
|
@@ -304,6 +307,19 @@ DOWNLOAD_QUERY = """
|
|
|
304
307
|
}
|
|
305
308
|
}
|
|
306
309
|
"""
|
|
310
|
+
RERUN_REPORT_QUERY = """
|
|
311
|
+
mutation RerunReport($reportId: ID!) {
|
|
312
|
+
rerunReport(input: {id: $reportId}) {
|
|
313
|
+
report {
|
|
314
|
+
id
|
|
315
|
+
lastRun {
|
|
316
|
+
url
|
|
317
|
+
status
|
|
318
|
+
}
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
"""
|
|
307
323
|
ISSUE_QUERY = """query IssuesTable(
|
|
308
324
|
$filterBy: IssueFilters
|
|
309
325
|
$first: Int
|
|
@@ -422,6 +438,18 @@ VULNERABILITY_QUERY = """
|
|
|
422
438
|
name
|
|
423
439
|
detailedName
|
|
424
440
|
description
|
|
441
|
+
commentThread {
|
|
442
|
+
comments(first:100) {
|
|
443
|
+
edges {
|
|
444
|
+
node {
|
|
445
|
+
body,
|
|
446
|
+
author {
|
|
447
|
+
name
|
|
448
|
+
}
|
|
449
|
+
}
|
|
450
|
+
}
|
|
451
|
+
}
|
|
452
|
+
},
|
|
425
453
|
severity: vendorSeverity
|
|
426
454
|
weightedSeverity
|
|
427
455
|
status
|
|
@@ -5,8 +5,9 @@ import re
|
|
|
5
5
|
from typing import List, Dict, Any, Iterator, Optional
|
|
6
6
|
|
|
7
7
|
from regscale.core.app.utils.parser_utils import safe_datetime_str
|
|
8
|
-
from regscale.integrations.scanner_integration import
|
|
8
|
+
from regscale.integrations.scanner_integration import IntegrationFinding
|
|
9
9
|
from regscale.utils.dict_utils import get_value
|
|
10
|
+
from regscale.models import Issue
|
|
10
11
|
from .constants import (
|
|
11
12
|
get_wiz_issue_queries,
|
|
12
13
|
WizVulnerabilityType,
|
|
@@ -353,7 +354,7 @@ class WizIssue(WizVulnerabilityIntegration):
|
|
|
353
354
|
source_rule_id=source_rule_id,
|
|
354
355
|
vulnerability_type=vulnerability_type.value,
|
|
355
356
|
date_created=date_created,
|
|
356
|
-
due_date=
|
|
357
|
+
due_date=Issue.get_due_date(severity, self.app.config, "wiz", date_created),
|
|
357
358
|
recommendation_for_mitigation=source_rule.get("resolutionRecommendation")
|
|
358
359
|
or wiz_issue.get("description", ""),
|
|
359
360
|
poam_comments=None,
|
|
@@ -271,6 +271,29 @@ def get_ip_address_from_props(network_dict: Dict) -> Optional[str]:
|
|
|
271
271
|
return network_dict.get("ip4_address") or network_dict.get("ip6_address")
|
|
272
272
|
|
|
273
273
|
|
|
274
|
+
def get_ip_v4_from_props(network_dict: Dict) -> Optional[str]:
|
|
275
|
+
"""
|
|
276
|
+
Get IPv4 address from properties
|
|
277
|
+
:param Dict network_dict: Network dictionary
|
|
278
|
+
:return: IPv4 address if it can be parsed from the network dictionary
|
|
279
|
+
:rtype: Optional[str]
|
|
280
|
+
"""
|
|
281
|
+
ip = network_dict.get("address")
|
|
282
|
+
if ip:
|
|
283
|
+
logger.info("get_ip_v4_from_props: %s", ip)
|
|
284
|
+
return network_dict.get("address")
|
|
285
|
+
|
|
286
|
+
|
|
287
|
+
def get_ip_v6_from_props(network_dict: Dict) -> Optional[str]:
|
|
288
|
+
"""
|
|
289
|
+
Get IPv6 address from properties
|
|
290
|
+
:param Dict network_dict: Network dictionary
|
|
291
|
+
:return: IPv6 address if it can be parsed from the network dictionary
|
|
292
|
+
:rtype: Optional[str]
|
|
293
|
+
"""
|
|
294
|
+
return network_dict.get("ip6_address")
|
|
295
|
+
|
|
296
|
+
|
|
274
297
|
def fetch_wiz_data(
|
|
275
298
|
query: str,
|
|
276
299
|
variables: dict,
|