regscale-cli 6.23.0.1__py3-none-any.whl → 6.24.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of regscale-cli might be problematic. Click here for more details.
- regscale/_version.py +1 -1
- regscale/core/app/application.py +2 -0
- regscale/integrations/commercial/__init__.py +1 -0
- regscale/integrations/commercial/jira.py +95 -22
- regscale/integrations/commercial/sarif/sarif_converter.py +1 -1
- regscale/integrations/commercial/wizv2/click.py +132 -2
- regscale/integrations/commercial/wizv2/compliance_report.py +1574 -0
- regscale/integrations/commercial/wizv2/constants.py +72 -2
- regscale/integrations/commercial/wizv2/data_fetcher.py +61 -0
- regscale/integrations/commercial/wizv2/file_cleanup.py +104 -0
- regscale/integrations/commercial/wizv2/issue.py +775 -27
- regscale/integrations/commercial/wizv2/policy_compliance.py +599 -181
- regscale/integrations/commercial/wizv2/reports.py +243 -0
- regscale/integrations/commercial/wizv2/scanner.py +668 -245
- regscale/integrations/compliance_integration.py +534 -56
- regscale/integrations/due_date_handler.py +210 -0
- regscale/integrations/public/cci_importer.py +444 -0
- regscale/integrations/scanner_integration.py +718 -153
- regscale/models/integration_models/CCI_List.xml +1 -0
- regscale/models/integration_models/cisa_kev_data.json +18 -3
- regscale/models/integration_models/synqly_models/capabilities.json +1 -1
- regscale/models/regscale_models/control_implementation.py +13 -3
- regscale/models/regscale_models/form_field_value.py +1 -1
- regscale/models/regscale_models/milestone.py +1 -0
- regscale/models/regscale_models/regscale_model.py +225 -60
- regscale/models/regscale_models/security_plan.py +3 -2
- regscale/regscale.py +7 -0
- {regscale_cli-6.23.0.1.dist-info → regscale_cli-6.24.0.1.dist-info}/METADATA +17 -17
- {regscale_cli-6.23.0.1.dist-info → regscale_cli-6.24.0.1.dist-info}/RECORD +45 -28
- tests/fixtures/test_fixture.py +13 -8
- tests/regscale/integrations/public/__init__.py +0 -0
- tests/regscale/integrations/public/test_alienvault.py +220 -0
- tests/regscale/integrations/public/test_cci.py +458 -0
- tests/regscale/integrations/public/test_cisa.py +1021 -0
- tests/regscale/integrations/public/test_emass.py +518 -0
- tests/regscale/integrations/public/test_fedramp.py +851 -0
- tests/regscale/integrations/public/test_fedramp_cis_crm.py +3661 -0
- tests/regscale/integrations/public/test_file_uploads.py +506 -0
- tests/regscale/integrations/public/test_oscal.py +453 -0
- tests/regscale/models/test_form_field_value_integration.py +304 -0
- tests/regscale/models/test_module_integration.py +582 -0
- {regscale_cli-6.23.0.1.dist-info → regscale_cli-6.24.0.1.dist-info}/LICENSE +0 -0
- {regscale_cli-6.23.0.1.dist-info → regscale_cli-6.24.0.1.dist-info}/WHEEL +0 -0
- {regscale_cli-6.23.0.1.dist-info → regscale_cli-6.24.0.1.dist-info}/entry_points.txt +0 -0
- {regscale_cli-6.23.0.1.dist-info → regscale_cli-6.24.0.1.dist-info}/top_level.txt +0 -0
|
@@ -22,10 +22,11 @@ from regscale.core.app.application import Application
|
|
|
22
22
|
from regscale.core.app.utils.api_handler import APIHandler
|
|
23
23
|
from regscale.core.app.utils.app_utils import create_progress_object, get_current_datetime
|
|
24
24
|
from regscale.core.app.utils.catalog_utils.common import objective_to_control_dot
|
|
25
|
-
from regscale.core.utils.date import date_obj, date_str, datetime_str,
|
|
25
|
+
from regscale.core.utils.date import date_obj, date_str, datetime_str, get_day_increment
|
|
26
26
|
from regscale.integrations.commercial.durosuite.process_devices import scan_durosuite_devices
|
|
27
27
|
from regscale.integrations.commercial.durosuite.variables import DuroSuiteVariables
|
|
28
28
|
from regscale.integrations.commercial.stig_mapper_integration.mapping_engine import StigMappingEngine as STIGMapper
|
|
29
|
+
from regscale.integrations.due_date_handler import DueDateHandler
|
|
29
30
|
from regscale.integrations.public.cisa import pull_cisa_kev
|
|
30
31
|
from regscale.integrations.variables import ScannerVariables
|
|
31
32
|
from regscale.models import DateTimeEncoder, OpenIssueDict, Property, regscale_models
|
|
@@ -322,6 +323,8 @@ class IntegrationFinding:
|
|
|
322
323
|
:param str impact: The impact of the finding, defaults to an empty string.
|
|
323
324
|
:param str recommendation_for_mitigation: Recommendations for mitigating the finding, defaults to an empty string.
|
|
324
325
|
:param str asset_identifier: The identifier of the asset associated with the finding, defaults to an empty string.
|
|
326
|
+
:param str issue_asset_identifier_value: This is the value of all the assets affected by the issue, defaults to an
|
|
327
|
+
empty string.
|
|
325
328
|
:param Optional[str] cci_ref: The Common Configuration Enumeration reference for the finding, defaults to None.
|
|
326
329
|
:param str rule_id: The rule ID of the finding, defaults to an empty string.
|
|
327
330
|
:param str rule_version: The version of the rule associated with the finding, defaults to an empty string.
|
|
@@ -418,6 +421,7 @@ class IntegrationFinding:
|
|
|
418
421
|
impact: str = ""
|
|
419
422
|
recommendation_for_mitigation: str = ""
|
|
420
423
|
asset_identifier: str = ""
|
|
424
|
+
issue_asset_identifier_value: Optional[str] = None
|
|
421
425
|
comments: Optional[str] = None
|
|
422
426
|
source_report: Optional[str] = None
|
|
423
427
|
point_of_contact: Optional[str] = None
|
|
@@ -657,6 +661,10 @@ class ScannerIntegration(ABC):
|
|
|
657
661
|
|
|
658
662
|
# Set configuration options from kwargs
|
|
659
663
|
self.suppress_asset_not_found_errors = kwargs.get("suppress_asset_not_found_errors", False)
|
|
664
|
+
|
|
665
|
+
# Initialize due date handler for this integration
|
|
666
|
+
self.due_date_handler = DueDateHandler(self.title, config=self.app.config)
|
|
667
|
+
|
|
660
668
|
if self.is_component:
|
|
661
669
|
self.component = regscale_models.Component.get_object(self.plan_id)
|
|
662
670
|
self.parent_module: str = regscale_models.Component.get_module_string()
|
|
@@ -752,6 +760,74 @@ class ScannerIntegration(ABC):
|
|
|
752
760
|
|
|
753
761
|
return regscale_models.Issue.get_user_id()
|
|
754
762
|
|
|
763
|
+
def get_user_organization_id(self, user_id: Optional[str]) -> Optional[int]:
|
|
764
|
+
"""
|
|
765
|
+
Get the organization ID for a user.
|
|
766
|
+
|
|
767
|
+
:param Optional[str] user_id: The user ID to look up
|
|
768
|
+
:return: The organization ID or None if not found
|
|
769
|
+
:rtype: Optional[int]
|
|
770
|
+
"""
|
|
771
|
+
if not user_id:
|
|
772
|
+
return None
|
|
773
|
+
|
|
774
|
+
try:
|
|
775
|
+
from regscale.models import User
|
|
776
|
+
|
|
777
|
+
user = User.get_object(user_id)
|
|
778
|
+
return user.orgId if user else None
|
|
779
|
+
except Exception as e:
|
|
780
|
+
logger.debug(f"Unable to get user organization for user {user_id}: {e}")
|
|
781
|
+
return None
|
|
782
|
+
|
|
783
|
+
def get_ssp_organization_id(self) -> Optional[int]:
|
|
784
|
+
"""
|
|
785
|
+
Get the organization ID from the security plan.
|
|
786
|
+
|
|
787
|
+
:return: The organization ID or None if not found
|
|
788
|
+
:rtype: Optional[int]
|
|
789
|
+
"""
|
|
790
|
+
try:
|
|
791
|
+
from regscale.models import SecurityPlan
|
|
792
|
+
|
|
793
|
+
if ssp := SecurityPlan.get_object(self.plan_id):
|
|
794
|
+
# First try to get organization from SSP owner
|
|
795
|
+
if getattr(ssp, "systemOwnerId"):
|
|
796
|
+
if owner_org_id := self.get_user_organization_id(ssp.systemOwnerId):
|
|
797
|
+
return owner_org_id
|
|
798
|
+
# Fallback to SSP's direct organization
|
|
799
|
+
return ssp.orgId
|
|
800
|
+
except Exception as e:
|
|
801
|
+
logger.debug(f"Unable to get SSP organization for plan {self.plan_id}: {e}")
|
|
802
|
+
|
|
803
|
+
return None
|
|
804
|
+
|
|
805
|
+
def determine_issue_organization_id(self, issue_owner_id: Optional[str]) -> Optional[int]:
|
|
806
|
+
"""
|
|
807
|
+
Determine the organization ID for an issue based on the expected behavior:
|
|
808
|
+
|
|
809
|
+
1. If Issue Owner is set and has an Org, use Issue Owner's Org
|
|
810
|
+
2. Else if SSP Owner has an Org, use SSP Owner's Org
|
|
811
|
+
3. Else use SSP's Org if set
|
|
812
|
+
|
|
813
|
+
:param Optional[str] issue_owner_id: The issue owner ID
|
|
814
|
+
:return: The organization ID or None
|
|
815
|
+
:rtype: Optional[int]
|
|
816
|
+
"""
|
|
817
|
+
# First check if issue owner has an organization
|
|
818
|
+
if issue_owner_id:
|
|
819
|
+
if owner_org_id := self.get_user_organization_id(issue_owner_id):
|
|
820
|
+
logger.debug(f"Setting issue organization {owner_org_id} from issue owner {issue_owner_id}")
|
|
821
|
+
return owner_org_id
|
|
822
|
+
|
|
823
|
+
# Fallback to SSP organization (which includes SSP owner check)
|
|
824
|
+
if ssp_org_id := self.get_ssp_organization_id():
|
|
825
|
+
logger.debug(f"Setting issue organization {ssp_org_id} from SSP {self.plan_id}")
|
|
826
|
+
return ssp_org_id
|
|
827
|
+
|
|
828
|
+
logger.debug(f"No organization found for issue owner {issue_owner_id} or SSP {self.plan_id}")
|
|
829
|
+
return None
|
|
830
|
+
|
|
755
831
|
def get_cci_to_control_map(self) -> ThreadSafeDict[str, set[int]] | dict:
|
|
756
832
|
"""
|
|
757
833
|
Gets the CCI to control map
|
|
@@ -1046,39 +1122,104 @@ class ScannerIntegration(ABC):
|
|
|
1046
1122
|
:param Optional[str] component_name: The name of the component to associate the asset with. If None, the asset
|
|
1047
1123
|
is added directly to the security plan without a component association.
|
|
1048
1124
|
"""
|
|
1049
|
-
# Continue with normal asset creation/update
|
|
1050
1125
|
if not asset.identifier:
|
|
1051
1126
|
logger.warning("Asset has no identifier, skipping")
|
|
1052
1127
|
return
|
|
1053
1128
|
|
|
1054
|
-
|
|
1055
|
-
|
|
1056
|
-
logger.debug("Searching for component: %s...", component_name)
|
|
1057
|
-
component = component or self.components_by_title.get(component_name)
|
|
1058
|
-
if not component:
|
|
1059
|
-
logger.debug("No existing component found with name %s, proceeding to create it...", component_name)
|
|
1060
|
-
component = regscale_models.Component(
|
|
1061
|
-
title=component_name,
|
|
1062
|
-
componentType=asset.component_type,
|
|
1063
|
-
securityPlansId=self.plan_id,
|
|
1064
|
-
description=component_name,
|
|
1065
|
-
componentOwnerId=self.get_assessor_id(),
|
|
1066
|
-
).get_or_create()
|
|
1067
|
-
self.components.append(component)
|
|
1068
|
-
if component.securityPlansId and not self.is_component:
|
|
1069
|
-
component_mapping = regscale_models.ComponentMapping(
|
|
1070
|
-
componentId=component.id,
|
|
1071
|
-
securityPlanId=self.plan_id,
|
|
1072
|
-
)
|
|
1073
|
-
component_mapping.get_or_create()
|
|
1074
|
-
self.components_by_title[component_name] = component
|
|
1129
|
+
# Get or create component if needed
|
|
1130
|
+
component = self._get_or_create_component_for_asset(asset, component_name)
|
|
1075
1131
|
|
|
1132
|
+
# Create or update the asset
|
|
1076
1133
|
created, existing_or_new_asset = self.create_new_asset(asset, component=None)
|
|
1077
1134
|
|
|
1078
|
-
#
|
|
1135
|
+
# Update result counts
|
|
1079
1136
|
self.update_result_counts("assets", {"created": [1] if created else [], "updated": [] if created else [1]})
|
|
1080
1137
|
|
|
1081
|
-
#
|
|
1138
|
+
# Handle component mapping and DuroSuite processing
|
|
1139
|
+
self._handle_component_mapping_and_durosuite(existing_or_new_asset, component, asset, created)
|
|
1140
|
+
|
|
1141
|
+
def _get_or_create_component_for_asset(
|
|
1142
|
+
self, asset: IntegrationAsset, component_name: Optional[str]
|
|
1143
|
+
) -> Optional[regscale_models.Component]:
|
|
1144
|
+
"""
|
|
1145
|
+
Get or create a component for the asset if component_name is provided.
|
|
1146
|
+
|
|
1147
|
+
:param IntegrationAsset asset: The asset being processed
|
|
1148
|
+
:param Optional[str] component_name: Name of the component to associate with
|
|
1149
|
+
:return: The component object or None
|
|
1150
|
+
:rtype: Optional[regscale_models.Component]
|
|
1151
|
+
"""
|
|
1152
|
+
if not component_name:
|
|
1153
|
+
return getattr(self, "component") if self.is_component else None
|
|
1154
|
+
|
|
1155
|
+
component = getattr(self, "component") if self.is_component else None
|
|
1156
|
+
component = component or self.components_by_title.get(component_name)
|
|
1157
|
+
|
|
1158
|
+
if not component:
|
|
1159
|
+
component = self._create_new_component(asset, component_name)
|
|
1160
|
+
|
|
1161
|
+
self._handle_component_mapping(component)
|
|
1162
|
+
self.components_by_title[component_name] = component
|
|
1163
|
+
return component
|
|
1164
|
+
|
|
1165
|
+
def _create_new_component(self, asset: IntegrationAsset, component_name: str) -> regscale_models.Component:
|
|
1166
|
+
"""
|
|
1167
|
+
Create a new component for the asset.
|
|
1168
|
+
|
|
1169
|
+
:param IntegrationAsset asset: The asset being processed
|
|
1170
|
+
:param str component_name: Name of the component to create
|
|
1171
|
+
:return: The newly created component
|
|
1172
|
+
:rtype: regscale_models.Component
|
|
1173
|
+
"""
|
|
1174
|
+
logger.debug("No existing component found with name %s, proceeding to create it...", component_name)
|
|
1175
|
+
component = regscale_models.Component(
|
|
1176
|
+
title=component_name,
|
|
1177
|
+
componentType=asset.component_type,
|
|
1178
|
+
securityPlansId=self.plan_id,
|
|
1179
|
+
description=component_name,
|
|
1180
|
+
componentOwnerId=self.get_assessor_id(),
|
|
1181
|
+
).get_or_create()
|
|
1182
|
+
self.components.append(component)
|
|
1183
|
+
return component
|
|
1184
|
+
|
|
1185
|
+
def _handle_component_mapping(self, component: regscale_models.Component) -> None:
|
|
1186
|
+
"""
|
|
1187
|
+
Handle component mapping creation if needed.
|
|
1188
|
+
|
|
1189
|
+
:param regscale_models.Component component: The component to create mapping for
|
|
1190
|
+
"""
|
|
1191
|
+
if not (component.securityPlansId and not self.is_component):
|
|
1192
|
+
return
|
|
1193
|
+
|
|
1194
|
+
component_mapping = regscale_models.ComponentMapping(
|
|
1195
|
+
componentId=component.id,
|
|
1196
|
+
securityPlanId=self.plan_id,
|
|
1197
|
+
)
|
|
1198
|
+
mapping_result = component_mapping.get_or_create()
|
|
1199
|
+
|
|
1200
|
+
if mapping_result is None:
|
|
1201
|
+
logger.debug(
|
|
1202
|
+
f"Failed to create or find ComponentMapping for componentId={component.id}, securityPlanId={self.plan_id}"
|
|
1203
|
+
)
|
|
1204
|
+
else:
|
|
1205
|
+
mapping_id = getattr(mapping_result, "id", "unknown")
|
|
1206
|
+
logger.debug(f"Successfully handled ComponentMapping for componentId={component.id}, ID={mapping_id}")
|
|
1207
|
+
|
|
1208
|
+
def _handle_component_mapping_and_durosuite(
|
|
1209
|
+
self,
|
|
1210
|
+
existing_or_new_asset: Optional[regscale_models.Asset],
|
|
1211
|
+
component: Optional[regscale_models.Component],
|
|
1212
|
+
asset: IntegrationAsset,
|
|
1213
|
+
created: bool,
|
|
1214
|
+
) -> None:
|
|
1215
|
+
"""
|
|
1216
|
+
Handle component mapping and DuroSuite scanning after asset creation.
|
|
1217
|
+
|
|
1218
|
+
:param Optional[regscale_models.Asset] existing_or_new_asset: The asset that was created/updated
|
|
1219
|
+
:param Optional[regscale_models.Component] component: The associated component, if any
|
|
1220
|
+
:param IntegrationAsset asset: The original integration asset
|
|
1221
|
+
:param bool created: Whether the asset was newly created
|
|
1222
|
+
"""
|
|
1082
1223
|
if existing_or_new_asset and component:
|
|
1083
1224
|
_was_created, _asset_mapping = regscale_models.AssetMapping(
|
|
1084
1225
|
assetId=existing_or_new_asset.id,
|
|
@@ -1086,9 +1227,33 @@ class ScannerIntegration(ABC):
|
|
|
1086
1227
|
).get_or_create_with_status()
|
|
1087
1228
|
|
|
1088
1229
|
if created and DuroSuiteVariables.duroSuiteEnabled:
|
|
1089
|
-
# Check if this is a DuroSuite compatible asset
|
|
1090
1230
|
scan_durosuite_devices(asset=asset, plan_id=self.plan_id, progress=self.asset_progress)
|
|
1091
1231
|
|
|
1232
|
+
def _truncate_field(self, value: Optional[str], max_length: int, field_name: str) -> Optional[str]:
|
|
1233
|
+
"""
|
|
1234
|
+
Truncate a field to the maximum allowed length to prevent database errors.
|
|
1235
|
+
|
|
1236
|
+
:param Optional[str] value: The value to truncate
|
|
1237
|
+
:param int max_length: Maximum allowed length
|
|
1238
|
+
:param str field_name: Name of the field being truncated (for logging)
|
|
1239
|
+
:return: Truncated value or None
|
|
1240
|
+
:rtype: Optional[str]
|
|
1241
|
+
"""
|
|
1242
|
+
if not value:
|
|
1243
|
+
return value
|
|
1244
|
+
|
|
1245
|
+
if len(value) > max_length:
|
|
1246
|
+
truncated = value[:max_length]
|
|
1247
|
+
logger.warning(
|
|
1248
|
+
"Truncated %s field from %d to %d characters for value: %s...",
|
|
1249
|
+
field_name,
|
|
1250
|
+
len(value),
|
|
1251
|
+
max_length,
|
|
1252
|
+
truncated[:100],
|
|
1253
|
+
)
|
|
1254
|
+
return truncated
|
|
1255
|
+
return value
|
|
1256
|
+
|
|
1092
1257
|
def create_new_asset(
|
|
1093
1258
|
self, asset: IntegrationAsset, component: Optional[regscale_models.Component]
|
|
1094
1259
|
) -> tuple[bool, Optional[regscale_models.Asset]]:
|
|
@@ -1101,22 +1266,130 @@ class ScannerIntegration(ABC):
|
|
|
1101
1266
|
:return: Tuple of (was_created, newly created asset instance).
|
|
1102
1267
|
:rtype: tuple[bool, Optional[regscale_models.Asset]]
|
|
1103
1268
|
"""
|
|
1104
|
-
|
|
1269
|
+
if not self._validate_asset_requirements(asset):
|
|
1270
|
+
return False, None
|
|
1271
|
+
|
|
1272
|
+
asset_type = self._validate_and_map_asset_type(asset.asset_type)
|
|
1273
|
+
other_tracking_number = self._prepare_tracking_number(asset)
|
|
1274
|
+
field_data = self._prepare_truncated_asset_fields(asset, other_tracking_number)
|
|
1275
|
+
|
|
1276
|
+
new_asset = self._create_regscale_asset_model(asset, component, asset_type, field_data)
|
|
1277
|
+
|
|
1278
|
+
created, new_asset = new_asset.create_or_update_with_status(bulk_update=True)
|
|
1279
|
+
self.asset_map_by_identifier[asset.identifier] = new_asset
|
|
1280
|
+
logger.debug("Created new asset with identifier %s", asset.identifier)
|
|
1281
|
+
|
|
1282
|
+
self._handle_software_and_stig_processing(new_asset, asset, created)
|
|
1283
|
+
return created, new_asset
|
|
1284
|
+
|
|
1285
|
+
def _validate_asset_requirements(self, asset: IntegrationAsset) -> bool:
|
|
1286
|
+
"""Validate that the asset has required fields for creation."""
|
|
1105
1287
|
if not asset.name:
|
|
1106
1288
|
logger.warning(
|
|
1107
1289
|
"Asset name is required for asset creation. Skipping asset creation of asset_type: %s", asset.asset_type
|
|
1108
1290
|
)
|
|
1109
|
-
return False
|
|
1291
|
+
return False
|
|
1292
|
+
return True
|
|
1293
|
+
|
|
1294
|
+
def _validate_and_map_asset_type(self, asset_type: str) -> str:
|
|
1295
|
+
"""Validate and map asset type to valid RegScale values."""
|
|
1296
|
+
valid_asset_types = [
|
|
1297
|
+
"Physical Server",
|
|
1298
|
+
"Virtual Machine (VM)",
|
|
1299
|
+
"Appliance",
|
|
1300
|
+
"Network Router",
|
|
1301
|
+
"Network Switch",
|
|
1302
|
+
"Firewall",
|
|
1303
|
+
"Desktop",
|
|
1304
|
+
"Laptop",
|
|
1305
|
+
"Tablet",
|
|
1306
|
+
"Phone",
|
|
1307
|
+
"Other",
|
|
1308
|
+
]
|
|
1309
|
+
|
|
1310
|
+
if asset_type not in valid_asset_types:
|
|
1311
|
+
logger.debug(f"Asset type '{asset_type}' not in valid types, mapping to 'Other'")
|
|
1312
|
+
return "Other"
|
|
1313
|
+
return asset_type
|
|
1314
|
+
|
|
1315
|
+
def _prepare_tracking_number(self, asset: IntegrationAsset) -> str:
|
|
1316
|
+
"""Prepare and validate the tracking number for asset deduplication."""
|
|
1317
|
+
other_tracking_number = asset.other_tracking_number or asset.identifier
|
|
1318
|
+
if not other_tracking_number:
|
|
1319
|
+
logger.warning("No tracking number available for asset %s, using name as fallback", asset.name)
|
|
1320
|
+
other_tracking_number = asset.name
|
|
1321
|
+
return other_tracking_number
|
|
1322
|
+
|
|
1323
|
+
def _prepare_truncated_asset_fields(self, asset: IntegrationAsset, other_tracking_number: str) -> dict:
|
|
1324
|
+
"""Prepare and truncate asset fields to prevent database errors."""
|
|
1325
|
+
max_field_length = 450
|
|
1326
|
+
name = self._process_asset_name(asset, max_field_length)
|
|
1327
|
+
|
|
1328
|
+
return {
|
|
1329
|
+
"name": name,
|
|
1330
|
+
"azure_identifier": self._truncate_field(asset.azure_identifier, max_field_length, "azureIdentifier"),
|
|
1331
|
+
"aws_identifier": self._truncate_field(asset.aws_identifier, max_field_length, "awsIdentifier"),
|
|
1332
|
+
"google_identifier": self._truncate_field(asset.google_identifier, max_field_length, "googleIdentifier"),
|
|
1333
|
+
"other_cloud_identifier": self._truncate_field(
|
|
1334
|
+
asset.other_cloud_identifier, max_field_length, "otherCloudIdentifier"
|
|
1335
|
+
),
|
|
1336
|
+
"software_name": self._truncate_field(asset.software_name, max_field_length, "softwareName"),
|
|
1337
|
+
"other_tracking_number": self._truncate_field(
|
|
1338
|
+
other_tracking_number, max_field_length, "otherTrackingNumber"
|
|
1339
|
+
),
|
|
1340
|
+
}
|
|
1341
|
+
|
|
1342
|
+
def _process_asset_name(self, asset: IntegrationAsset, max_field_length: int) -> str:
|
|
1343
|
+
"""Process and truncate asset name, handling special cases like Azure resource paths."""
|
|
1344
|
+
name = self._truncate_field(asset.name, max_field_length, "name")
|
|
1345
|
+
|
|
1346
|
+
# For very long Azure resource paths, extract meaningful parts
|
|
1347
|
+
if asset.name and len(asset.name) > max_field_length and "/" in asset.name:
|
|
1348
|
+
name = self._shorten_azure_resource_path(asset.name, max_field_length)
|
|
1349
|
+
|
|
1350
|
+
return name
|
|
1351
|
+
|
|
1352
|
+
def _shorten_azure_resource_path(self, full_name: str, max_field_length: int) -> str:
|
|
1353
|
+
"""Shorten long Azure resource paths to meaningful parts."""
|
|
1354
|
+
parts = full_name.split("/")
|
|
1355
|
+
if len(parts) >= 4:
|
|
1356
|
+
# Extract key components from Azure resource path
|
|
1357
|
+
resource_group = next(
|
|
1358
|
+
(p for i, p in enumerate(parts) if i > 0 and parts[i - 1].lower() == "resourcegroups"), ""
|
|
1359
|
+
)
|
|
1360
|
+
resource_type = parts[-2] if len(parts) > 1 else ""
|
|
1361
|
+
resource_name = parts[-1]
|
|
1362
|
+
|
|
1363
|
+
# Build a shortened but meaningful name
|
|
1364
|
+
if resource_group:
|
|
1365
|
+
name = f"../{resource_group}/.../{resource_type}/{resource_name}"
|
|
1366
|
+
else:
|
|
1367
|
+
name = f".../{resource_type}/{resource_name}"
|
|
1368
|
+
|
|
1369
|
+
# Ensure it fits within limits
|
|
1370
|
+
if len(name) > max_field_length:
|
|
1371
|
+
name = name[-(max_field_length):]
|
|
1372
|
+
|
|
1373
|
+
logger.info(
|
|
1374
|
+
"Shortened long Azure resource path from %d to %d characters: %s", len(full_name), len(name), name
|
|
1375
|
+
)
|
|
1376
|
+
return name
|
|
1377
|
+
|
|
1378
|
+
return self._truncate_field(full_name, max_field_length, "name")
|
|
1110
1379
|
|
|
1380
|
+
def _create_regscale_asset_model(
|
|
1381
|
+
self, asset: IntegrationAsset, component: Optional[regscale_models.Component], asset_type: str, field_data: dict
|
|
1382
|
+
) -> regscale_models.Asset:
|
|
1383
|
+
"""Create the RegScale Asset model with all required fields."""
|
|
1111
1384
|
new_asset = regscale_models.Asset(
|
|
1112
|
-
name=
|
|
1385
|
+
name=field_data["name"],
|
|
1113
1386
|
description=asset.description,
|
|
1114
1387
|
bVirtual=asset.is_virtual,
|
|
1115
|
-
otherTrackingNumber=
|
|
1116
|
-
assetOwnerId=asset.asset_owner_id or "Unknown",
|
|
1388
|
+
otherTrackingNumber=field_data["other_tracking_number"],
|
|
1389
|
+
assetOwnerId=asset.asset_owner_id or regscale_models.Asset.get_user_id() or "Unknown",
|
|
1117
1390
|
parentId=component.id if component else self.plan_id,
|
|
1118
1391
|
parentModule=self.parent_module,
|
|
1119
|
-
assetType=
|
|
1392
|
+
assetType=asset_type,
|
|
1120
1393
|
dateLastUpdated=asset.date_last_updated or get_current_datetime(),
|
|
1121
1394
|
status=asset.status,
|
|
1122
1395
|
assetCategory=asset.asset_category,
|
|
@@ -1127,7 +1400,7 @@ class ScannerIntegration(ABC):
|
|
|
1127
1400
|
serialNumber=asset.serial_number,
|
|
1128
1401
|
assetTagNumber=asset.asset_tag_number,
|
|
1129
1402
|
bPublicFacing=asset.is_public_facing,
|
|
1130
|
-
azureIdentifier=
|
|
1403
|
+
azureIdentifier=field_data["azure_identifier"],
|
|
1131
1404
|
location=asset.location,
|
|
1132
1405
|
ipAddress=asset.ip_address,
|
|
1133
1406
|
iPv6Address=asset.ipv6_address,
|
|
@@ -1141,13 +1414,13 @@ class ScannerIntegration(ABC):
|
|
|
1141
1414
|
endOfLifeDate=asset.end_of_life_date,
|
|
1142
1415
|
vlanId=asset.vlan_id,
|
|
1143
1416
|
uri=asset.uri,
|
|
1144
|
-
awsIdentifier=
|
|
1145
|
-
googleIdentifier=
|
|
1146
|
-
otherCloudIdentifier=
|
|
1417
|
+
awsIdentifier=field_data["aws_identifier"],
|
|
1418
|
+
googleIdentifier=field_data["google_identifier"],
|
|
1419
|
+
otherCloudIdentifier=field_data["other_cloud_identifier"],
|
|
1147
1420
|
patchLevel=asset.patch_level,
|
|
1148
1421
|
cpe=asset.cpe,
|
|
1149
1422
|
softwareVersion=asset.software_version,
|
|
1150
|
-
softwareName=
|
|
1423
|
+
softwareName=field_data["software_name"],
|
|
1151
1424
|
softwareVendor=asset.software_vendor,
|
|
1152
1425
|
bLatestScan=asset.is_latest_scan,
|
|
1153
1426
|
bAuthenticatedScan=asset.is_authenticated_scan,
|
|
@@ -1156,20 +1429,21 @@ class ScannerIntegration(ABC):
|
|
|
1156
1429
|
softwareFunction=asset.software_function,
|
|
1157
1430
|
baselineConfiguration=asset.baseline_configuration,
|
|
1158
1431
|
)
|
|
1432
|
+
|
|
1159
1433
|
if self.asset_identifier_field:
|
|
1160
1434
|
setattr(new_asset, self.asset_identifier_field, asset.identifier)
|
|
1161
1435
|
|
|
1162
|
-
|
|
1163
|
-
# add to asset_map_by_identifier
|
|
1164
|
-
self.asset_map_by_identifier[asset.identifier] = new_asset
|
|
1165
|
-
logger.debug("Created new asset with identifier %s", asset.identifier)
|
|
1436
|
+
return new_asset
|
|
1166
1437
|
|
|
1438
|
+
def _handle_software_and_stig_processing(
|
|
1439
|
+
self, new_asset: regscale_models.Asset, asset: IntegrationAsset, created: bool
|
|
1440
|
+
) -> None:
|
|
1441
|
+
"""Handle post-asset creation tasks like software inventory and STIG mapping."""
|
|
1167
1442
|
self.handle_software_inventory(new_asset, asset.software_inventory, created)
|
|
1168
1443
|
self.create_asset_data_and_link(new_asset, asset)
|
|
1169
1444
|
self.create_or_update_ports_protocol(new_asset, asset)
|
|
1170
1445
|
if self.stig_mapper:
|
|
1171
1446
|
self.stig_mapper.map_associated_stigs_to_asset(asset=new_asset, ssp_id=self.plan_id)
|
|
1172
|
-
return created, new_asset
|
|
1173
1447
|
|
|
1174
1448
|
def handle_software_inventory(
|
|
1175
1449
|
self, new_asset: regscale_models.Asset, software_inventory: List[Dict[str, Any]], created: bool
|
|
@@ -1550,35 +1824,96 @@ class ScannerIntegration(ABC):
|
|
|
1550
1824
|
finding_id = self.get_finding_identifier(finding)
|
|
1551
1825
|
finding_id_lock = self._get_lock(finding_id)
|
|
1552
1826
|
|
|
1827
|
+
self._log_finding_processing_info(finding, finding_id, issue_status, title)
|
|
1828
|
+
|
|
1553
1829
|
with finding_id_lock:
|
|
1554
|
-
|
|
1555
|
-
|
|
1556
|
-
|
|
1557
|
-
|
|
1558
|
-
|
|
1559
|
-
|
|
1560
|
-
|
|
1561
|
-
|
|
1562
|
-
|
|
1563
|
-
|
|
1564
|
-
|
|
1565
|
-
# Check if we should consolidate closed issues based on integrationFindingId and issueDueDates
|
|
1566
|
-
elif issue_status == regscale_models.IssueStatus.Closed:
|
|
1567
|
-
existing_issues = regscale_models.Issue.find_by_integration_finding_id(finding_id)
|
|
1568
|
-
# Find a closed issue with matching due date to consolidate with
|
|
1569
|
-
matching_closed_issue = next(
|
|
1570
|
-
(
|
|
1571
|
-
issue
|
|
1572
|
-
for issue in existing_issues
|
|
1573
|
-
if issue.status == regscale_models.IssueStatus.Closed
|
|
1574
|
-
and date_str(issue.dueDate) == date_str(finding.due_date)
|
|
1575
|
-
),
|
|
1576
|
-
None,
|
|
1577
|
-
)
|
|
1578
|
-
if matching_closed_issue:
|
|
1579
|
-
return self._create_or_update_issue(finding, issue_status, title, matching_closed_issue)
|
|
1830
|
+
existing_issue = self._find_existing_issue_for_finding(finding_id, finding, issue_status)
|
|
1831
|
+
return self._create_or_update_issue(finding, issue_status, title, existing_issue)
|
|
1832
|
+
|
|
1833
|
+
def _log_finding_processing_info(
|
|
1834
|
+
self, finding: IntegrationFinding, finding_id: str, issue_status: regscale_models.IssueStatus, title: str
|
|
1835
|
+
) -> None:
|
|
1836
|
+
"""Log finding processing information for debugging."""
|
|
1837
|
+
logger.debug(
|
|
1838
|
+
f"PROCESSING FINDING: external_id={finding.external_id}, finding_id={finding_id}, status={issue_status}, title='{title[:50]}...'"
|
|
1839
|
+
)
|
|
1580
1840
|
|
|
1581
|
-
|
|
1841
|
+
if issue_status == regscale_models.IssueStatus.Closed:
|
|
1842
|
+
logger.debug(f"CLOSED FINDING: This will create/update a CLOSED issue (status={issue_status})")
|
|
1843
|
+
|
|
1844
|
+
def _find_existing_issue_for_finding(
|
|
1845
|
+
self, finding_id: str, finding: IntegrationFinding, issue_status: regscale_models.IssueStatus
|
|
1846
|
+
) -> Optional[regscale_models.Issue]:
|
|
1847
|
+
"""Find existing issue for the finding based on status and creation type."""
|
|
1848
|
+
if ScannerVariables.issueCreation.lower() == "perasset":
|
|
1849
|
+
return None
|
|
1850
|
+
|
|
1851
|
+
existing_issues = self._get_existing_issues_for_finding(finding_id, finding)
|
|
1852
|
+
|
|
1853
|
+
if issue_status == regscale_models.IssueStatus.Open:
|
|
1854
|
+
return self._find_issue_for_open_status(existing_issues, finding_id)
|
|
1855
|
+
elif issue_status == regscale_models.IssueStatus.Closed:
|
|
1856
|
+
return self._find_issue_for_closed_status(existing_issues, finding, finding_id)
|
|
1857
|
+
|
|
1858
|
+
return None
|
|
1859
|
+
|
|
1860
|
+
def _get_existing_issues_for_finding(
|
|
1861
|
+
self, finding_id: str, finding: IntegrationFinding
|
|
1862
|
+
) -> List[regscale_models.Issue]:
|
|
1863
|
+
"""Get existing issues for the finding using various lookup methods."""
|
|
1864
|
+
existing_issues = regscale_models.Issue.find_by_integration_finding_id(finding_id)
|
|
1865
|
+
|
|
1866
|
+
# If no issues found by integrationFindingId, try fallback lookup by identifier fields
|
|
1867
|
+
if not existing_issues and finding.external_id:
|
|
1868
|
+
existing_issues = self._find_issues_by_identifier_fallback(finding.external_id)
|
|
1869
|
+
|
|
1870
|
+
return existing_issues
|
|
1871
|
+
|
|
1872
|
+
def _find_issue_for_open_status(
|
|
1873
|
+
self, existing_issues: List[regscale_models.Issue], finding_id: str
|
|
1874
|
+
) -> Optional[regscale_models.Issue]:
|
|
1875
|
+
"""Find appropriate issue when the finding status is Open."""
|
|
1876
|
+
# Find an open issue to update first
|
|
1877
|
+
open_issue = next(
|
|
1878
|
+
(issue for issue in existing_issues if issue.status != regscale_models.IssueStatus.Closed), None
|
|
1879
|
+
)
|
|
1880
|
+
if open_issue:
|
|
1881
|
+
return open_issue
|
|
1882
|
+
|
|
1883
|
+
# If no open issue found, look for a closed issue to reopen
|
|
1884
|
+
closed_issue = next(
|
|
1885
|
+
(issue for issue in existing_issues if issue.status == regscale_models.IssueStatus.Closed), None
|
|
1886
|
+
)
|
|
1887
|
+
if closed_issue:
|
|
1888
|
+
logger.debug(f"Reopening closed issue {closed_issue.id} for finding {finding_id}")
|
|
1889
|
+
return closed_issue
|
|
1890
|
+
|
|
1891
|
+
return None
|
|
1892
|
+
|
|
1893
|
+
def _find_issue_for_closed_status(
|
|
1894
|
+
self, existing_issues: List[regscale_models.Issue], finding: IntegrationFinding, finding_id: str
|
|
1895
|
+
) -> Optional[regscale_models.Issue]:
|
|
1896
|
+
"""Find appropriate issue when the finding status is Closed."""
|
|
1897
|
+
# Find a closed issue with matching due date to consolidate with
|
|
1898
|
+
matching_closed_issue = next(
|
|
1899
|
+
(
|
|
1900
|
+
issue
|
|
1901
|
+
for issue in existing_issues
|
|
1902
|
+
if issue.status == regscale_models.IssueStatus.Closed
|
|
1903
|
+
and date_str(issue.dueDate) == date_str(finding.due_date)
|
|
1904
|
+
),
|
|
1905
|
+
None,
|
|
1906
|
+
)
|
|
1907
|
+
if matching_closed_issue:
|
|
1908
|
+
return matching_closed_issue
|
|
1909
|
+
|
|
1910
|
+
# If no matching closed issue, look for any existing issue to update
|
|
1911
|
+
any_existing_issue = next(iter(existing_issues), None) if existing_issues else None
|
|
1912
|
+
if any_existing_issue:
|
|
1913
|
+
logger.debug(f"Closing existing issue {any_existing_issue.id} for finding {finding_id}")
|
|
1914
|
+
return any_existing_issue
|
|
1915
|
+
|
|
1916
|
+
return None
|
|
1582
1917
|
|
|
1583
1918
|
def _create_or_update_issue(
|
|
1584
1919
|
self,
|
|
@@ -1639,6 +1974,90 @@ class ScannerIntegration(ABC):
|
|
|
1639
1974
|
self._handle_property_and_milestone_creation(issue, finding, existing_issue)
|
|
1640
1975
|
return issue
|
|
1641
1976
|
|
|
1977
|
+
def _find_issues_by_identifier_fallback(self, external_id: str) -> List[regscale_models.Issue]:
|
|
1978
|
+
"""
|
|
1979
|
+
Find issues by identifier fields (otherIdentifier or integration-specific field) as fallback.
|
|
1980
|
+
This helps with deduplication when integrationFindingId lookup fails.
|
|
1981
|
+
|
|
1982
|
+
:param str external_id: The external ID to search for
|
|
1983
|
+
:return: List of matching issues
|
|
1984
|
+
:rtype: List[regscale_models.Issue]
|
|
1985
|
+
"""
|
|
1986
|
+
fallback_issues = []
|
|
1987
|
+
|
|
1988
|
+
try:
|
|
1989
|
+
# Get all issues for this plan/component
|
|
1990
|
+
all_issues = regscale_models.Issue.get_all_by_parent(
|
|
1991
|
+
parent_id=self.plan_id,
|
|
1992
|
+
parent_module=self.parent_module,
|
|
1993
|
+
)
|
|
1994
|
+
|
|
1995
|
+
# Filter by source report to only check our integration's issues
|
|
1996
|
+
source_issues = [issue for issue in all_issues if issue.sourceReport == self.title]
|
|
1997
|
+
|
|
1998
|
+
# Look for matches by otherIdentifier
|
|
1999
|
+
for issue in source_issues:
|
|
2000
|
+
if getattr(issue, "otherIdentifier", None) == external_id:
|
|
2001
|
+
fallback_issues.append(issue)
|
|
2002
|
+
logger.debug(f"Found issue {issue.id} by otherIdentifier fallback: {external_id}")
|
|
2003
|
+
|
|
2004
|
+
# Also check integration-specific identifier field if configured
|
|
2005
|
+
elif (
|
|
2006
|
+
self.issue_identifier_field
|
|
2007
|
+
and hasattr(issue, self.issue_identifier_field)
|
|
2008
|
+
and getattr(issue, self.issue_identifier_field) == external_id
|
|
2009
|
+
):
|
|
2010
|
+
fallback_issues.append(issue)
|
|
2011
|
+
logger.debug(f"Found issue {issue.id} by {self.issue_identifier_field} fallback: {external_id}")
|
|
2012
|
+
|
|
2013
|
+
if fallback_issues:
|
|
2014
|
+
logger.info(
|
|
2015
|
+
f"Fallback deduplication found {len(fallback_issues)} existing issue(s) for external_id: {external_id}"
|
|
2016
|
+
)
|
|
2017
|
+
|
|
2018
|
+
except Exception as e:
|
|
2019
|
+
logger.warning(f"Error in fallback issue lookup for {external_id}: {e}")
|
|
2020
|
+
|
|
2021
|
+
return fallback_issues
|
|
2022
|
+
|
|
2023
|
+
def _set_issue_identifier_fields_internal(self, issue: regscale_models.Issue, finding: IntegrationFinding) -> None:
|
|
2024
|
+
"""Set issue identifier fields (e.g., wizId) on the issue object without saving."""
|
|
2025
|
+
if not finding.external_id:
|
|
2026
|
+
logger.debug(f"finding.external_id is empty: {finding.external_id}")
|
|
2027
|
+
return
|
|
2028
|
+
|
|
2029
|
+
logger.debug(f"Setting issue identifier fields: external_id={finding.external_id}")
|
|
2030
|
+
|
|
2031
|
+
# Set otherIdentifier field (the external ID field in Issue model)
|
|
2032
|
+
if not getattr(issue, "otherIdentifier", None): # Only set if not already set
|
|
2033
|
+
issue.otherIdentifier = finding.external_id
|
|
2034
|
+
logger.debug(f"Set otherIdentifier = {finding.external_id}")
|
|
2035
|
+
|
|
2036
|
+
# Set the specific identifier field if configured (e.g., wizId for Wiz)
|
|
2037
|
+
if self.issue_identifier_field and hasattr(issue, self.issue_identifier_field):
|
|
2038
|
+
current_value = getattr(issue, self.issue_identifier_field)
|
|
2039
|
+
if not current_value: # Only set if not already set
|
|
2040
|
+
setattr(issue, self.issue_identifier_field, finding.external_id)
|
|
2041
|
+
logger.debug(f"Set {self.issue_identifier_field} = {finding.external_id}")
|
|
2042
|
+
else:
|
|
2043
|
+
logger.debug(f"{self.issue_identifier_field} already set to: {current_value}")
|
|
2044
|
+
else:
|
|
2045
|
+
if self.issue_identifier_field: # Only log warning if field is configured
|
|
2046
|
+
logger.warning(
|
|
2047
|
+
f"Cannot set issue_identifier_field: field='{self.issue_identifier_field}', hasattr={hasattr(issue, self.issue_identifier_field)}"
|
|
2048
|
+
)
|
|
2049
|
+
|
|
2050
|
+
def _set_issue_identifier_fields(self, issue: regscale_models.Issue, finding: IntegrationFinding) -> None:
|
|
2051
|
+
"""Set issue identifier fields (e.g., wizId) and save them to the database."""
|
|
2052
|
+
self._set_issue_identifier_fields_internal(issue, finding)
|
|
2053
|
+
|
|
2054
|
+
# Explicitly save the issue to persist the identifier fields
|
|
2055
|
+
try:
|
|
2056
|
+
issue.save(bulk=True)
|
|
2057
|
+
logger.info(f"Saved issue {issue.id} with identifier fields")
|
|
2058
|
+
except Exception as e:
|
|
2059
|
+
logger.error(f"Failed to save issue identifier fields: {e}")
|
|
2060
|
+
|
|
1642
2061
|
def _set_basic_issue_fields(
|
|
1643
2062
|
self,
|
|
1644
2063
|
issue: regscale_models.Issue,
|
|
@@ -1664,25 +2083,31 @@ class ScannerIntegration(ABC):
|
|
|
1664
2083
|
issue.securityPlanId = self.plan_id if not self.is_component else None
|
|
1665
2084
|
issue.identification = finding.identification
|
|
1666
2085
|
issue.dateFirstDetected = finding.first_seen
|
|
1667
|
-
issue.assetIdentifier = asset_identifier
|
|
2086
|
+
issue.assetIdentifier = finding.issue_asset_identifier_value or asset_identifier
|
|
2087
|
+
|
|
2088
|
+
# Set organization ID based on Issue Owner or SSP Owner hierarchy
|
|
2089
|
+
issue.orgId = self.determine_issue_organization_id(issue.issueOwnerId)
|
|
1668
2090
|
|
|
1669
2091
|
def _set_issue_due_date(self, issue: regscale_models.Issue, finding: IntegrationFinding) -> None:
|
|
1670
|
-
"""Set the due date for the issue."""
|
|
2092
|
+
"""Set the due date for the issue using DueDateHandler."""
|
|
1671
2093
|
if not finding.due_date:
|
|
1672
2094
|
try:
|
|
1673
2095
|
base_created = finding.date_created or issue.dateCreated
|
|
1674
|
-
finding.due_date =
|
|
2096
|
+
finding.due_date = self.due_date_handler.calculate_due_date(
|
|
1675
2097
|
severity=finding.severity,
|
|
1676
2098
|
created_date=base_created,
|
|
1677
|
-
|
|
2099
|
+
cve=finding.cve,
|
|
2100
|
+
title=finding.title or self.title,
|
|
1678
2101
|
)
|
|
1679
|
-
except Exception:
|
|
2102
|
+
except Exception as e:
|
|
2103
|
+
logger.warning(f"Error calculating due date with DueDateHandler: {e}")
|
|
1680
2104
|
# Final fallback to a Low severity default if anything goes wrong
|
|
1681
2105
|
base_created = finding.date_created or issue.dateCreated
|
|
1682
|
-
finding.due_date =
|
|
2106
|
+
finding.due_date = self.due_date_handler.calculate_due_date(
|
|
1683
2107
|
severity=regscale_models.IssueSeverity.Low,
|
|
1684
2108
|
created_date=base_created,
|
|
1685
|
-
|
|
2109
|
+
cve=finding.cve,
|
|
2110
|
+
title=finding.title or self.title,
|
|
1686
2111
|
)
|
|
1687
2112
|
issue.dueDate = finding.due_date
|
|
1688
2113
|
|
|
@@ -1700,6 +2125,9 @@ class ScannerIntegration(ABC):
|
|
|
1700
2125
|
issue.cve = finding.cve
|
|
1701
2126
|
issue.assessmentId = finding.assessment_id
|
|
1702
2127
|
|
|
2128
|
+
# Set issue identifier fields (e.g., wizId, otherIdentifier) before save/create
|
|
2129
|
+
self._set_issue_identifier_fields_internal(issue, finding)
|
|
2130
|
+
|
|
1703
2131
|
def _set_control_fields(self, issue: regscale_models.Issue, finding: IntegrationFinding) -> None:
|
|
1704
2132
|
"""Set control-related fields for the issue."""
|
|
1705
2133
|
control_id = self.get_control_implementation_id_for_cci(finding.cci_ref) if finding.cci_ref else None
|
|
@@ -1743,13 +2171,21 @@ class ScannerIntegration(ABC):
|
|
|
1743
2171
|
) -> None:
|
|
1744
2172
|
"""Save or create the issue."""
|
|
1745
2173
|
if existing_issue:
|
|
2174
|
+
logger.debug(f"UPDATING EXISTING ISSUE: {existing_issue.id} with external_id={finding.external_id}")
|
|
1746
2175
|
logger.debug("Saving Old Issue: %s with assetIdentifier: %s", issue.id, issue.assetIdentifier)
|
|
1747
2176
|
issue.save(bulk=True)
|
|
1748
2177
|
logger.debug("Saved existing issue %s with assetIdentifier: %s", issue.id, issue.assetIdentifier)
|
|
1749
2178
|
else:
|
|
2179
|
+
logger.debug(
|
|
2180
|
+
f"➕ CREATING NEW ISSUE: external_id={finding.external_id}, title='{finding.title[:50]}...', status={finding.status}"
|
|
2181
|
+
)
|
|
1750
2182
|
issue = issue.create_or_update(
|
|
1751
2183
|
bulk_update=True, defaults={"otherIdentifier": self._get_other_identifier(finding, is_poam)}
|
|
1752
2184
|
)
|
|
2185
|
+
if issue.id:
|
|
2186
|
+
logger.debug(f"NEW ISSUE CREATED: RegScale ID={issue.id}, external_id={finding.external_id}")
|
|
2187
|
+
else:
|
|
2188
|
+
logger.warning(f"ISSUE CREATION FAILED: No ID assigned for external_id={finding.external_id}")
|
|
1753
2189
|
self.extra_data_to_properties(finding, issue.id)
|
|
1754
2190
|
|
|
1755
2191
|
self._handle_property_and_milestone_creation(issue, finding, existing_issue)
|
|
@@ -1766,65 +2202,137 @@ class ScannerIntegration(ABC):
|
|
|
1766
2202
|
|
|
1767
2203
|
:param regscale_models.Issue issue: The issue to handle properties for
|
|
1768
2204
|
:param IntegrationFinding finding: The finding data
|
|
1769
|
-
:param
|
|
2205
|
+
:param Optional[regscale_models.Issue] existing_issue: Existing issue for milestone comparison
|
|
1770
2206
|
:rtype: None
|
|
1771
2207
|
"""
|
|
2208
|
+
# Handle property creation
|
|
2209
|
+
self._create_issue_properties(issue, finding)
|
|
2210
|
+
|
|
2211
|
+
# Handle milestone creation
|
|
2212
|
+
self._create_issue_milestones(issue, finding, existing_issue)
|
|
2213
|
+
|
|
2214
|
+
def _create_issue_properties(self, issue: regscale_models.Issue, finding: IntegrationFinding) -> None:
|
|
2215
|
+
"""
|
|
2216
|
+
Create properties for an issue based on finding data.
|
|
2217
|
+
|
|
2218
|
+
:param regscale_models.Issue issue: The issue to create properties for
|
|
2219
|
+
:param IntegrationFinding finding: The finding data
|
|
2220
|
+
"""
|
|
1772
2221
|
if poc := finding.point_of_contact:
|
|
1773
|
-
|
|
1774
|
-
key="POC",
|
|
1775
|
-
value=poc,
|
|
1776
|
-
parentId=issue.id,
|
|
1777
|
-
parentModule="issues",
|
|
1778
|
-
).create_or_update()
|
|
1779
|
-
logger.debug("Added POC property %s to issue %s", poc, issue.id)
|
|
2222
|
+
self._create_property_safe(issue, "POC", poc, "POC property")
|
|
1780
2223
|
|
|
1781
2224
|
if finding.is_cwe:
|
|
2225
|
+
self._create_property_safe(issue, "CWE", finding.plugin_id, "CWE property")
|
|
2226
|
+
|
|
2227
|
+
def _create_property_safe(self, issue: regscale_models.Issue, key: str, value: str, property_type: str) -> None:
|
|
2228
|
+
"""
|
|
2229
|
+
Safely create a property with error handling.
|
|
2230
|
+
|
|
2231
|
+
:param regscale_models.Issue issue: The issue to create property for
|
|
2232
|
+
:param str key: The property key
|
|
2233
|
+
:param str value: The property value
|
|
2234
|
+
:param str property_type: Description for logging purposes
|
|
2235
|
+
"""
|
|
2236
|
+
try:
|
|
1782
2237
|
regscale_models.Property(
|
|
1783
|
-
key=
|
|
1784
|
-
value=
|
|
2238
|
+
key=key,
|
|
2239
|
+
value=value,
|
|
1785
2240
|
parentId=issue.id,
|
|
1786
2241
|
parentModule="issues",
|
|
1787
2242
|
).create_or_update()
|
|
1788
|
-
logger.debug("Added
|
|
2243
|
+
logger.debug("Added %s %s to issue %s", property_type, value, issue.id)
|
|
2244
|
+
except Exception as e:
|
|
2245
|
+
logger.warning("Failed to create %s: %s", property_type, str(e))
|
|
1789
2246
|
|
|
1790
|
-
|
|
1791
|
-
|
|
1792
|
-
|
|
1793
|
-
|
|
1794
|
-
|
|
1795
|
-
|
|
1796
|
-
|
|
1797
|
-
|
|
1798
|
-
|
|
1799
|
-
|
|
1800
|
-
|
|
1801
|
-
|
|
1802
|
-
|
|
1803
|
-
|
|
1804
|
-
|
|
1805
|
-
|
|
1806
|
-
|
|
1807
|
-
|
|
1808
|
-
|
|
1809
|
-
|
|
1810
|
-
|
|
1811
|
-
|
|
1812
|
-
|
|
1813
|
-
|
|
1814
|
-
|
|
1815
|
-
|
|
1816
|
-
|
|
1817
|
-
|
|
1818
|
-
|
|
1819
|
-
|
|
1820
|
-
|
|
1821
|
-
|
|
1822
|
-
|
|
1823
|
-
|
|
1824
|
-
|
|
1825
|
-
|
|
1826
|
-
|
|
1827
|
-
|
|
2247
|
+
def _create_issue_milestones(
|
|
2248
|
+
self,
|
|
2249
|
+
issue: regscale_models.Issue,
|
|
2250
|
+
finding: IntegrationFinding,
|
|
2251
|
+
existing_issue: Optional[regscale_models.Issue],
|
|
2252
|
+
) -> None:
|
|
2253
|
+
"""
|
|
2254
|
+
Create milestones for an issue based on status transitions.
|
|
2255
|
+
|
|
2256
|
+
:param regscale_models.Issue issue: The issue to create milestones for
|
|
2257
|
+
:param IntegrationFinding finding: The finding data
|
|
2258
|
+
:param Optional[regscale_models.Issue] existing_issue: Existing issue for comparison
|
|
2259
|
+
"""
|
|
2260
|
+
if not (ScannerVariables.useMilestones and issue.id):
|
|
2261
|
+
return
|
|
2262
|
+
|
|
2263
|
+
if self._should_create_reopened_milestone(existing_issue, issue):
|
|
2264
|
+
self._create_milestone_safe(
|
|
2265
|
+
issue, finding, "Issue reopened from", get_current_datetime(), "reopened milestone"
|
|
2266
|
+
)
|
|
2267
|
+
elif self._should_create_closed_milestone(existing_issue, issue):
|
|
2268
|
+
self._create_milestone_safe(issue, finding, "Issue closed from", issue.dateCompleted, "closed milestone")
|
|
2269
|
+
elif not existing_issue:
|
|
2270
|
+
self._create_milestone_safe(issue, finding, "Issue created from", self.scan_date, "new issue milestone")
|
|
2271
|
+
else:
|
|
2272
|
+
logger.debug("No milestone created for issue %s from finding %s", issue.id, finding.external_id)
|
|
2273
|
+
|
|
2274
|
+
def _should_create_reopened_milestone(
|
|
2275
|
+
self, existing_issue: Optional[regscale_models.Issue], issue: regscale_models.Issue
|
|
2276
|
+
) -> bool:
|
|
2277
|
+
"""
|
|
2278
|
+
Check if a reopened milestone should be created.
|
|
2279
|
+
|
|
2280
|
+
:param Optional[regscale_models.Issue] existing_issue: The existing issue
|
|
2281
|
+
:param regscale_models.Issue issue: The current issue
|
|
2282
|
+
:return: True if reopened milestone should be created
|
|
2283
|
+
:rtype: bool
|
|
2284
|
+
"""
|
|
2285
|
+
return (
|
|
2286
|
+
existing_issue
|
|
2287
|
+
and existing_issue.status == regscale_models.IssueStatus.Closed
|
|
2288
|
+
and issue.status == regscale_models.IssueStatus.Open
|
|
2289
|
+
)
|
|
2290
|
+
|
|
2291
|
+
def _should_create_closed_milestone(
|
|
2292
|
+
self, existing_issue: Optional[regscale_models.Issue], issue: regscale_models.Issue
|
|
2293
|
+
) -> bool:
|
|
2294
|
+
"""
|
|
2295
|
+
Check if a closed milestone should be created.
|
|
2296
|
+
|
|
2297
|
+
:param Optional[regscale_models.Issue] existing_issue: The existing issue
|
|
2298
|
+
:param regscale_models.Issue issue: The current issue
|
|
2299
|
+
:return: True if closed milestone should be created
|
|
2300
|
+
:rtype: bool
|
|
2301
|
+
"""
|
|
2302
|
+
return (
|
|
2303
|
+
existing_issue
|
|
2304
|
+
and existing_issue.status == regscale_models.IssueStatus.Open
|
|
2305
|
+
and issue.status == regscale_models.IssueStatus.Closed
|
|
2306
|
+
)
|
|
2307
|
+
|
|
2308
|
+
def _create_milestone_safe(
|
|
2309
|
+
self,
|
|
2310
|
+
issue: regscale_models.Issue,
|
|
2311
|
+
finding: IntegrationFinding,
|
|
2312
|
+
title_prefix: str,
|
|
2313
|
+
milestone_date: str,
|
|
2314
|
+
milestone_type: str,
|
|
2315
|
+
) -> None:
|
|
2316
|
+
"""
|
|
2317
|
+
Safely create a milestone with error handling.
|
|
2318
|
+
|
|
2319
|
+
:param regscale_models.Issue issue: The issue to create milestone for
|
|
2320
|
+
:param IntegrationFinding finding: The finding data
|
|
2321
|
+
:param str title_prefix: Prefix for milestone title
|
|
2322
|
+
:param str milestone_date: Date for the milestone
|
|
2323
|
+
:param str milestone_type: Description for logging purposes
|
|
2324
|
+
"""
|
|
2325
|
+
try:
|
|
2326
|
+
regscale_models.Milestone(
|
|
2327
|
+
title=f"{title_prefix} {self.title} scan",
|
|
2328
|
+
milestoneDate=milestone_date,
|
|
2329
|
+
responsiblePersonId=self.assessor_id,
|
|
2330
|
+
parentID=issue.id,
|
|
2331
|
+
parentModule="issues",
|
|
2332
|
+
).create_or_update()
|
|
2333
|
+
logger.debug("Added milestone for issue %s from finding %s", issue.id, finding.external_id)
|
|
2334
|
+
except Exception as e:
|
|
2335
|
+
logger.warning("Failed to create %s: %s", milestone_type, str(e))
|
|
1828
2336
|
|
|
1829
2337
|
@staticmethod
|
|
1830
2338
|
def extra_data_to_properties(finding: IntegrationFinding, issue_id: int) -> None:
|
|
@@ -1870,13 +2378,17 @@ class ScannerIntegration(ABC):
|
|
|
1870
2378
|
:rtype: str
|
|
1871
2379
|
"""
|
|
1872
2380
|
delimiter = "\n"
|
|
2381
|
+
|
|
2382
|
+
# Use issue_asset_identifier_value if available (e.g., providerUniqueId from Wiz)
|
|
2383
|
+
# This provides more meaningful asset identification for eMASS exports
|
|
2384
|
+
current_asset_identifier = finding.issue_asset_identifier_value or finding.asset_identifier
|
|
1873
2385
|
if not existing_issue or ScannerVariables.issueCreation.lower() == "perasset":
|
|
1874
|
-
return
|
|
2386
|
+
return current_asset_identifier
|
|
1875
2387
|
|
|
1876
2388
|
# Get existing asset identifiers
|
|
1877
2389
|
existing_asset_identifiers = set((existing_issue.assetIdentifier or "").split(delimiter))
|
|
1878
|
-
if
|
|
1879
|
-
existing_asset_identifiers.add(
|
|
2390
|
+
if current_asset_identifier not in existing_asset_identifiers:
|
|
2391
|
+
existing_asset_identifiers.add(current_asset_identifier)
|
|
1880
2392
|
|
|
1881
2393
|
return delimiter.join(existing_asset_identifiers)
|
|
1882
2394
|
|
|
@@ -1907,16 +2419,14 @@ class ScannerIntegration(ABC):
|
|
|
1907
2419
|
"""
|
|
1908
2420
|
Determine if the cve is part of the published CISA KEV list
|
|
1909
2421
|
|
|
2422
|
+
Note: Due date handling is now managed by DueDateHandler. This method only sets kevList field.
|
|
2423
|
+
|
|
1910
2424
|
:param str cve: The CVE to lookup in CISAs KEV list
|
|
1911
|
-
:param regscale_models.Issue issue: The issue to update kevList field
|
|
2425
|
+
:param regscale_models.Issue issue: The issue to update kevList field
|
|
1912
2426
|
:param Optional[ThreadSafeDict[str, Any]] cisa_kevs: The CISA KEV data to search the findings
|
|
1913
2427
|
:return: The updated issue
|
|
1914
2428
|
:rtype: regscale_models.Issue
|
|
1915
2429
|
"""
|
|
1916
|
-
from datetime import datetime
|
|
1917
|
-
|
|
1918
|
-
from regscale.core.app.utils.app_utils import convert_datetime_to_regscale_string
|
|
1919
|
-
|
|
1920
2430
|
issue.kevList = "No"
|
|
1921
2431
|
|
|
1922
2432
|
if cisa_kevs:
|
|
@@ -1929,14 +2439,6 @@ class ScannerIntegration(ABC):
|
|
|
1929
2439
|
None,
|
|
1930
2440
|
)
|
|
1931
2441
|
if kev_data:
|
|
1932
|
-
# If kev due date is before the issue date created, add the difference to the date created
|
|
1933
|
-
calculated_due_date = ScannerIntegration._calculate_kev_due_date(kev_data, issue.dateCreated)
|
|
1934
|
-
if calculated_due_date:
|
|
1935
|
-
issue.dueDate = calculated_due_date
|
|
1936
|
-
else:
|
|
1937
|
-
issue.dueDate = convert_datetime_to_regscale_string(
|
|
1938
|
-
datetime.strptime(kev_data["dueDate"], "%Y-%m-%d")
|
|
1939
|
-
)
|
|
1940
2442
|
issue.kevList = "Yes"
|
|
1941
2443
|
|
|
1942
2444
|
return issue
|
|
@@ -2245,6 +2747,9 @@ class ScannerIntegration(ABC):
|
|
|
2245
2747
|
# Finalize processing
|
|
2246
2748
|
self._finalize_finding_processing(scan_history, current_vulnerabilities)
|
|
2247
2749
|
|
|
2750
|
+
# Complete the finding progress bar
|
|
2751
|
+
self._complete_finding_progress(loading_findings, processed_findings_count)
|
|
2752
|
+
|
|
2248
2753
|
return processed_findings_count
|
|
2249
2754
|
|
|
2250
2755
|
def _setup_finding_progress(self):
|
|
@@ -2298,6 +2803,15 @@ class ScannerIntegration(ABC):
|
|
|
2298
2803
|
)
|
|
2299
2804
|
self.finding_progress.advance(loading_findings, 1)
|
|
2300
2805
|
|
|
2806
|
+
def _complete_finding_progress(self, loading_findings, processed_count):
|
|
2807
|
+
"""Complete the finding progress bar with final status."""
|
|
2808
|
+
self.finding_progress.update(
|
|
2809
|
+
loading_findings,
|
|
2810
|
+
completed=processed_count,
|
|
2811
|
+
total=max(processed_count, self.num_findings_to_process or processed_count),
|
|
2812
|
+
description=f"[green] Completed processing {processed_count} finding(s) from {self.title}",
|
|
2813
|
+
)
|
|
2814
|
+
|
|
2301
2815
|
def _process_findings_in_batches(
|
|
2302
2816
|
self, findings: Iterator[IntegrationFinding], process_finding_with_progress
|
|
2303
2817
|
) -> int:
|
|
@@ -2584,7 +3098,7 @@ class ScannerIntegration(ABC):
|
|
|
2584
3098
|
logger.debug(f"Creating vulnerability for finding {finding.external_id} (attempt {attempt + 1})")
|
|
2585
3099
|
vulnerability = self.create_vulnerability_from_finding(finding, asset, scan_history)
|
|
2586
3100
|
finding.vulnerability_id = vulnerability.id
|
|
2587
|
-
logger.
|
|
3101
|
+
logger.debug(f"Successfully created vulnerability {vulnerability.id} for finding {finding.external_id}")
|
|
2588
3102
|
|
|
2589
3103
|
if ScannerVariables.vulnerabilityCreation.lower() != "noissue":
|
|
2590
3104
|
# Handle associated issue
|
|
@@ -2797,6 +3311,14 @@ class ScannerIntegration(ABC):
|
|
|
2797
3311
|
logger.info("Skipping closing outdated issues.")
|
|
2798
3312
|
return 0
|
|
2799
3313
|
|
|
3314
|
+
# Check global preventAutoClose setting
|
|
3315
|
+
from regscale.core.app.application import Application
|
|
3316
|
+
|
|
3317
|
+
app = Application()
|
|
3318
|
+
if app.config.get("preventAutoClose", False):
|
|
3319
|
+
logger.info("Skipping closing outdated issues due to global preventAutoClose setting.")
|
|
3320
|
+
return 0
|
|
3321
|
+
|
|
2800
3322
|
closed_count = 0
|
|
2801
3323
|
affected_control_ids = set()
|
|
2802
3324
|
count_lock = threading.Lock()
|
|
@@ -2854,14 +3376,17 @@ class ScannerIntegration(ABC):
|
|
|
2854
3376
|
issue.save()
|
|
2855
3377
|
|
|
2856
3378
|
if ScannerVariables.useMilestones and issue.id:
|
|
2857
|
-
|
|
2858
|
-
|
|
2859
|
-
|
|
2860
|
-
|
|
2861
|
-
|
|
2862
|
-
|
|
2863
|
-
|
|
2864
|
-
|
|
3379
|
+
try:
|
|
3380
|
+
regscale_models.Milestone(
|
|
3381
|
+
title=f"Issue closed from {self.title} scan",
|
|
3382
|
+
milestoneDate=issue.dateCompleted,
|
|
3383
|
+
responsiblePersonId=self.assessor_id,
|
|
3384
|
+
completed=True,
|
|
3385
|
+
parentID=issue.id,
|
|
3386
|
+
parentModule="issues",
|
|
3387
|
+
).create_or_update()
|
|
3388
|
+
except Exception as e:
|
|
3389
|
+
logger.warning("Failed to create closed issue milestone: %s", str(e))
|
|
2865
3390
|
logger.debug("Created milestone for issue %s from %s tool", issue.id, self.title)
|
|
2866
3391
|
|
|
2867
3392
|
with count_lock:
|
|
@@ -2929,6 +3454,41 @@ class ScannerIntegration(ABC):
|
|
|
2929
3454
|
self.control_implementation_map[control_id] = control_implementation.save()
|
|
2930
3455
|
logger.info("Updated control implementation %d status to %s", control_id, new_status)
|
|
2931
3456
|
|
|
3457
|
+
def is_issue_protected_from_auto_close(self, issue: regscale_models.Issue) -> bool:
|
|
3458
|
+
"""
|
|
3459
|
+
Check if an issue is protected from automatic closure.
|
|
3460
|
+
|
|
3461
|
+
:param regscale_models.Issue issue: The issue to check
|
|
3462
|
+
:return: True if the issue should not be auto-closed
|
|
3463
|
+
:rtype: bool
|
|
3464
|
+
"""
|
|
3465
|
+
try:
|
|
3466
|
+
# Check global configuration setting
|
|
3467
|
+
app = Application()
|
|
3468
|
+
if app.config.get("preventAutoClose", False):
|
|
3469
|
+
logger.debug(f"Issue {issue.id} is protected from auto-closure by global preventAutoClose setting")
|
|
3470
|
+
return True
|
|
3471
|
+
|
|
3472
|
+
# Check for protection property
|
|
3473
|
+
properties = Property.get_all_by_parent(parent_id=issue.id, parent_module="issues")
|
|
3474
|
+
|
|
3475
|
+
for prop in properties:
|
|
3476
|
+
if prop.key == "PREVENT_AUTO_CLOSE" and prop.value.lower() == "true":
|
|
3477
|
+
logger.debug(f"Issue {issue.id} is protected from auto-closure by PREVENT_AUTO_CLOSE property")
|
|
3478
|
+
return True
|
|
3479
|
+
|
|
3480
|
+
# Check for manual reopen indicators in changes
|
|
3481
|
+
if issue.changes and "manually reopened" in issue.changes.lower():
|
|
3482
|
+
logger.debug(f"Issue {issue.id} is protected from auto-closure due to manual reopen indicator")
|
|
3483
|
+
return True
|
|
3484
|
+
|
|
3485
|
+
return False
|
|
3486
|
+
|
|
3487
|
+
except Exception as e:
|
|
3488
|
+
# If we can't check, err on the side of caution and protect the issue
|
|
3489
|
+
logger.warning(f"Could not check protection status for issue {issue.id}: {e}")
|
|
3490
|
+
return True
|
|
3491
|
+
|
|
2932
3492
|
def should_close_issue(self, issue: regscale_models.Issue, current_vulnerabilities: Dict[int, Set[int]]) -> bool:
|
|
2933
3493
|
"""
|
|
2934
3494
|
Determines if an issue should be closed based on current vulnerabilities.
|
|
@@ -2946,6 +3506,11 @@ class ScannerIntegration(ABC):
|
|
|
2946
3506
|
)
|
|
2947
3507
|
return False
|
|
2948
3508
|
|
|
3509
|
+
# Check if the issue is protected from auto-closure
|
|
3510
|
+
if self.is_issue_protected_from_auto_close(issue):
|
|
3511
|
+
logger.debug(f"Issue {issue.id} is protected from automatic closure")
|
|
3512
|
+
return False
|
|
3513
|
+
|
|
2949
3514
|
# If the issue has a vulnerability ID, check if it's still current for any asset
|
|
2950
3515
|
if issue.vulnerabilityId:
|
|
2951
3516
|
# Get vulnerability mappings for this issue
|
|
@@ -3316,11 +3881,11 @@ class ScannerIntegration(ABC):
|
|
|
3316
3881
|
:return: None
|
|
3317
3882
|
:rtype: None
|
|
3318
3883
|
"""
|
|
3319
|
-
finding.due_date =
|
|
3884
|
+
finding.due_date = self.due_date_handler.calculate_due_date(
|
|
3320
3885
|
severity=finding.severity,
|
|
3321
3886
|
created_date=finding.date_created or self.scan_date,
|
|
3322
|
-
|
|
3323
|
-
|
|
3887
|
+
cve=finding.cve,
|
|
3888
|
+
title=finding.title or self.title,
|
|
3324
3889
|
)
|
|
3325
3890
|
|
|
3326
3891
|
def _update_last_seen_date(self, finding: IntegrationFinding) -> None:
|