regscale-cli 6.27.3.0__py3-none-any.whl → 6.28.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of regscale-cli might be problematic. Click here for more details.
- regscale/_version.py +1 -1
- regscale/core/app/utils/app_utils.py +11 -2
- regscale/dev/cli.py +26 -0
- regscale/dev/version.py +72 -0
- regscale/integrations/commercial/__init__.py +15 -1
- regscale/integrations/commercial/amazon/amazon/__init__.py +0 -0
- regscale/integrations/commercial/amazon/amazon/common.py +204 -0
- regscale/integrations/commercial/amazon/common.py +48 -58
- regscale/integrations/commercial/aws/audit_manager_compliance.py +2671 -0
- regscale/integrations/commercial/aws/cli.py +3093 -55
- regscale/integrations/commercial/aws/cloudtrail_control_mappings.py +333 -0
- regscale/integrations/commercial/aws/cloudtrail_evidence.py +501 -0
- regscale/integrations/commercial/aws/cloudwatch_control_mappings.py +357 -0
- regscale/integrations/commercial/aws/cloudwatch_evidence.py +490 -0
- regscale/integrations/commercial/aws/config_compliance.py +914 -0
- regscale/integrations/commercial/aws/conformance_pack_mappings.py +198 -0
- regscale/integrations/commercial/aws/evidence_generator.py +283 -0
- regscale/integrations/commercial/aws/guardduty_control_mappings.py +340 -0
- regscale/integrations/commercial/aws/guardduty_evidence.py +1053 -0
- regscale/integrations/commercial/aws/iam_control_mappings.py +368 -0
- regscale/integrations/commercial/aws/iam_evidence.py +574 -0
- regscale/integrations/commercial/aws/inventory/__init__.py +223 -22
- regscale/integrations/commercial/aws/inventory/base.py +107 -5
- regscale/integrations/commercial/aws/inventory/resources/audit_manager.py +513 -0
- regscale/integrations/commercial/aws/inventory/resources/cloudtrail.py +315 -0
- regscale/integrations/commercial/aws/inventory/resources/cloudtrail_logs_metadata.py +476 -0
- regscale/integrations/commercial/aws/inventory/resources/cloudwatch.py +191 -0
- regscale/integrations/commercial/aws/inventory/resources/compute.py +66 -9
- regscale/integrations/commercial/aws/inventory/resources/config.py +464 -0
- regscale/integrations/commercial/aws/inventory/resources/containers.py +74 -9
- regscale/integrations/commercial/aws/inventory/resources/database.py +106 -31
- regscale/integrations/commercial/aws/inventory/resources/guardduty.py +286 -0
- regscale/integrations/commercial/aws/inventory/resources/iam.py +470 -0
- regscale/integrations/commercial/aws/inventory/resources/inspector.py +476 -0
- regscale/integrations/commercial/aws/inventory/resources/integration.py +175 -61
- regscale/integrations/commercial/aws/inventory/resources/kms.py +447 -0
- regscale/integrations/commercial/aws/inventory/resources/networking.py +103 -67
- regscale/integrations/commercial/aws/inventory/resources/s3.py +394 -0
- regscale/integrations/commercial/aws/inventory/resources/security.py +268 -72
- regscale/integrations/commercial/aws/inventory/resources/securityhub.py +473 -0
- regscale/integrations/commercial/aws/inventory/resources/storage.py +53 -29
- regscale/integrations/commercial/aws/inventory/resources/systems_manager.py +657 -0
- regscale/integrations/commercial/aws/inventory/resources/vpc.py +655 -0
- regscale/integrations/commercial/aws/kms_control_mappings.py +288 -0
- regscale/integrations/commercial/aws/kms_evidence.py +879 -0
- regscale/integrations/commercial/aws/ocsf/__init__.py +7 -0
- regscale/integrations/commercial/aws/ocsf/constants.py +115 -0
- regscale/integrations/commercial/aws/ocsf/mapper.py +435 -0
- regscale/integrations/commercial/aws/org_control_mappings.py +286 -0
- regscale/integrations/commercial/aws/org_evidence.py +666 -0
- regscale/integrations/commercial/aws/s3_control_mappings.py +356 -0
- regscale/integrations/commercial/aws/s3_evidence.py +632 -0
- regscale/integrations/commercial/aws/scanner.py +851 -206
- regscale/integrations/commercial/aws/security_hub.py +319 -0
- regscale/integrations/commercial/aws/session_manager.py +282 -0
- regscale/integrations/commercial/aws/ssm_control_mappings.py +291 -0
- regscale/integrations/commercial/aws/ssm_evidence.py +492 -0
- regscale/integrations/commercial/synqly/ticketing.py +27 -0
- regscale/integrations/compliance_integration.py +308 -38
- regscale/integrations/due_date_handler.py +3 -0
- regscale/integrations/scanner_integration.py +399 -84
- regscale/models/integration_models/cisa_kev_data.json +65 -5
- regscale/models/integration_models/synqly_models/capabilities.json +1 -1
- regscale/models/integration_models/synqly_models/connectors/vulnerabilities.py +17 -9
- regscale/models/regscale_models/assessment.py +2 -1
- regscale/models/regscale_models/control_objective.py +74 -5
- regscale/models/regscale_models/file.py +2 -0
- regscale/models/regscale_models/issue.py +2 -5
- {regscale_cli-6.27.3.0.dist-info → regscale_cli-6.28.1.0.dist-info}/METADATA +1 -1
- {regscale_cli-6.27.3.0.dist-info → regscale_cli-6.28.1.0.dist-info}/RECORD +113 -34
- tests/regscale/integrations/commercial/aws/__init__.py +0 -0
- tests/regscale/integrations/commercial/aws/test_audit_manager_compliance.py +1304 -0
- tests/regscale/integrations/commercial/aws/test_audit_manager_evidence_aggregation.py +341 -0
- tests/regscale/integrations/commercial/aws/test_aws_audit_manager_collector.py +1155 -0
- tests/regscale/integrations/commercial/aws/test_aws_cloudtrail_collector.py +534 -0
- tests/regscale/integrations/commercial/aws/test_aws_config_collector.py +400 -0
- tests/regscale/integrations/commercial/aws/test_aws_guardduty_collector.py +315 -0
- tests/regscale/integrations/commercial/aws/test_aws_iam_collector.py +458 -0
- tests/regscale/integrations/commercial/aws/test_aws_inspector_collector.py +353 -0
- tests/regscale/integrations/commercial/aws/test_aws_inventory_integration.py +530 -0
- tests/regscale/integrations/commercial/aws/test_aws_kms_collector.py +919 -0
- tests/regscale/integrations/commercial/aws/test_aws_s3_collector.py +722 -0
- tests/regscale/integrations/commercial/aws/test_aws_scanner_integration.py +722 -0
- tests/regscale/integrations/commercial/aws/test_aws_securityhub_collector.py +792 -0
- tests/regscale/integrations/commercial/aws/test_aws_systems_manager_collector.py +918 -0
- tests/regscale/integrations/commercial/aws/test_aws_vpc_collector.py +996 -0
- tests/regscale/integrations/commercial/aws/test_cli_evidence.py +431 -0
- tests/regscale/integrations/commercial/aws/test_cloudtrail_control_mappings.py +452 -0
- tests/regscale/integrations/commercial/aws/test_cloudtrail_evidence.py +788 -0
- tests/regscale/integrations/commercial/aws/test_config_compliance.py +298 -0
- tests/regscale/integrations/commercial/aws/test_conformance_pack_mappings.py +200 -0
- tests/regscale/integrations/commercial/aws/test_evidence_generator.py +386 -0
- tests/regscale/integrations/commercial/aws/test_guardduty_control_mappings.py +564 -0
- tests/regscale/integrations/commercial/aws/test_guardduty_evidence.py +1041 -0
- tests/regscale/integrations/commercial/aws/test_iam_control_mappings.py +718 -0
- tests/regscale/integrations/commercial/aws/test_iam_evidence.py +1375 -0
- tests/regscale/integrations/commercial/aws/test_kms_control_mappings.py +656 -0
- tests/regscale/integrations/commercial/aws/test_kms_evidence.py +1163 -0
- tests/regscale/integrations/commercial/aws/test_ocsf_mapper.py +370 -0
- tests/regscale/integrations/commercial/aws/test_org_control_mappings.py +546 -0
- tests/regscale/integrations/commercial/aws/test_org_evidence.py +1240 -0
- tests/regscale/integrations/commercial/aws/test_s3_control_mappings.py +672 -0
- tests/regscale/integrations/commercial/aws/test_s3_evidence.py +987 -0
- tests/regscale/integrations/commercial/aws/test_scanner_evidence.py +373 -0
- tests/regscale/integrations/commercial/aws/test_security_hub_config_filtering.py +539 -0
- tests/regscale/integrations/commercial/aws/test_session_manager.py +516 -0
- tests/regscale/integrations/commercial/aws/test_ssm_control_mappings.py +588 -0
- tests/regscale/integrations/commercial/aws/test_ssm_evidence.py +735 -0
- tests/regscale/integrations/commercial/test_aws.py +55 -56
- {regscale_cli-6.27.3.0.dist-info → regscale_cli-6.28.1.0.dist-info}/LICENSE +0 -0
- {regscale_cli-6.27.3.0.dist-info → regscale_cli-6.28.1.0.dist-info}/WHEEL +0 -0
- {regscale_cli-6.27.3.0.dist-info → regscale_cli-6.28.1.0.dist-info}/entry_points.txt +0 -0
- {regscale_cli-6.27.3.0.dist-info → regscale_cli-6.28.1.0.dist-info}/top_level.txt +0 -0
|
@@ -22,7 +22,7 @@ from regscale.core.app.application import Application
|
|
|
22
22
|
from regscale.core.app.utils.api_handler import APIHandler
|
|
23
23
|
from regscale.core.app.utils.app_utils import create_progress_object, get_current_datetime
|
|
24
24
|
from regscale.core.app.utils.catalog_utils.common import objective_to_control_dot
|
|
25
|
-
from regscale.core.utils.date import date_obj, date_str, datetime_str
|
|
25
|
+
from regscale.core.utils.date import date_obj, date_str, datetime_str
|
|
26
26
|
from regscale.integrations.commercial.durosuite.process_devices import scan_durosuite_devices
|
|
27
27
|
from regscale.integrations.commercial.durosuite.variables import DuroSuiteVariables
|
|
28
28
|
from regscale.integrations.commercial.stig_mapper_integration.mapping_engine import StigMappingEngine as STIGMapper
|
|
@@ -649,6 +649,9 @@ class ScannerIntegration(ABC):
|
|
|
649
649
|
# Error suppression options
|
|
650
650
|
suppress_asset_not_found_errors = False
|
|
651
651
|
|
|
652
|
+
# CCI mapping flag - set to False for integrations that don't use CCI references
|
|
653
|
+
enable_cci_mapping = True
|
|
654
|
+
|
|
652
655
|
def __init__(self, plan_id: int, tenant_id: int = 1, is_component: bool = False, **kwargs):
|
|
653
656
|
"""
|
|
654
657
|
Initialize the ScannerIntegration.
|
|
@@ -658,6 +661,7 @@ class ScannerIntegration(ABC):
|
|
|
658
661
|
:param bool is_component: Whether this is a component integration
|
|
659
662
|
:param kwargs: Additional keyword arguments
|
|
660
663
|
- suppress_asset_not_found_errors (bool): If True, suppress "Asset not found" error messages
|
|
664
|
+
- import_all_findings (bool): If True, import findings even if they are not associated to an asset
|
|
661
665
|
"""
|
|
662
666
|
self.app = Application()
|
|
663
667
|
self.alerted_assets: Set[str] = set()
|
|
@@ -669,6 +673,7 @@ class ScannerIntegration(ABC):
|
|
|
669
673
|
|
|
670
674
|
# Set configuration options from kwargs
|
|
671
675
|
self.suppress_asset_not_found_errors = kwargs.get("suppress_asset_not_found_errors", False)
|
|
676
|
+
self.import_all_findings = kwargs.get("import_all_findings", False)
|
|
672
677
|
|
|
673
678
|
# Initialize due date handler for this integration
|
|
674
679
|
self.due_date_handler = DueDateHandler(self.title, config=self.app.config)
|
|
@@ -711,6 +716,7 @@ class ScannerIntegration(ABC):
|
|
|
711
716
|
|
|
712
717
|
self.cci_to_control_map: ThreadSafeDict[str, set[int]] = ThreadSafeDict()
|
|
713
718
|
self._no_ccis: bool = False
|
|
719
|
+
self._cci_map_loaded: bool = False
|
|
714
720
|
self.cci_to_control_map_lock: threading.Lock = threading.Lock()
|
|
715
721
|
|
|
716
722
|
# Lock for thread-safe scan history count updates
|
|
@@ -726,6 +732,12 @@ class ScannerIntegration(ABC):
|
|
|
726
732
|
thread_safe_kev_data.update(kev_data)
|
|
727
733
|
self._kev_data = thread_safe_kev_data
|
|
728
734
|
|
|
735
|
+
# Issue lookup cache for performance optimization
|
|
736
|
+
# Eliminates N+1 API calls by caching issues and indexing by integrationFindingId
|
|
737
|
+
# Populated lazily on first use during findings processing
|
|
738
|
+
self._integration_finding_id_cache: Optional[ThreadSafeDict[str, List[regscale_models.Issue]]] = None
|
|
739
|
+
self._issue_cache_lock: threading.RLock = threading.RLock()
|
|
740
|
+
|
|
729
741
|
@classmethod
|
|
730
742
|
def _get_lock(cls, key: str) -> threading.RLock:
|
|
731
743
|
"""
|
|
@@ -864,15 +876,33 @@ class ScannerIntegration(ABC):
|
|
|
864
876
|
:return: The CCI to control map
|
|
865
877
|
:rtype: ThreadSafeDict[str, set[int]] | dict
|
|
866
878
|
"""
|
|
879
|
+
# If we know there are no CCIs, return immediately
|
|
867
880
|
if self._no_ccis:
|
|
868
881
|
return self.cci_to_control_map
|
|
882
|
+
|
|
883
|
+
# If we've already loaded (or attempted to load) the map, return it
|
|
884
|
+
if self._cci_map_loaded:
|
|
885
|
+
return self.cci_to_control_map
|
|
886
|
+
|
|
869
887
|
with self.cci_to_control_map_lock:
|
|
870
|
-
|
|
888
|
+
# Double-check inside the lock
|
|
889
|
+
if self._cci_map_loaded:
|
|
871
890
|
return self.cci_to_control_map
|
|
872
|
-
|
|
873
|
-
|
|
874
|
-
|
|
891
|
+
|
|
892
|
+
logger.debug("Loading CCI to control map...")
|
|
893
|
+
try:
|
|
894
|
+
loaded_map = regscale_models.map_ccis_to_control_ids(parent_id=self.plan_id) # type: ignore
|
|
895
|
+
if loaded_map:
|
|
896
|
+
self.cci_to_control_map.update(loaded_map)
|
|
897
|
+
else:
|
|
898
|
+
self._no_ccis = True
|
|
899
|
+
except Exception as e:
|
|
900
|
+
logger.debug(f"Could not load CCI to control map: {e}")
|
|
875
901
|
self._no_ccis = True
|
|
902
|
+
finally:
|
|
903
|
+
# Mark as loaded regardless of success/failure to prevent repeated attempts
|
|
904
|
+
self._cci_map_loaded = True
|
|
905
|
+
|
|
876
906
|
return self.cci_to_control_map
|
|
877
907
|
|
|
878
908
|
def get_control_to_cci_map(self) -> dict[int, set[str]]:
|
|
@@ -1905,16 +1935,74 @@ class ScannerIntegration(ABC):
|
|
|
1905
1935
|
|
|
1906
1936
|
return None
|
|
1907
1937
|
|
|
1938
|
+
def _populate_issue_lookup_cache(self) -> None:
|
|
1939
|
+
"""
|
|
1940
|
+
Populate the issue lookup cache by fetching all issues for the plan and indexing by integrationFindingId.
|
|
1941
|
+
|
|
1942
|
+
This eliminates N+1 API calls during findings processing by creating an in-memory index.
|
|
1943
|
+
Thread-safe for concurrent access.
|
|
1944
|
+
"""
|
|
1945
|
+
with self._issue_cache_lock:
|
|
1946
|
+
# Double-check locking pattern - check if cache already populated
|
|
1947
|
+
if self._integration_finding_id_cache is not None:
|
|
1948
|
+
return
|
|
1949
|
+
|
|
1950
|
+
module_str = "component" if self.is_component else "security plan"
|
|
1951
|
+
logger.info(f"Building issue lookup index for {module_str} {self.plan_id}...")
|
|
1952
|
+
start_time = time.time()
|
|
1953
|
+
|
|
1954
|
+
# Fetch all issues for the security plan
|
|
1955
|
+
all_issues = regscale_models.Issue.fetch_issues_by_ssp(app=self.app, ssp_id=self.plan_id)
|
|
1956
|
+
|
|
1957
|
+
# Build index: integrationFindingId -> List[Issue]
|
|
1958
|
+
cache = ThreadSafeDict()
|
|
1959
|
+
indexed_count = 0
|
|
1960
|
+
|
|
1961
|
+
for issue in all_issues:
|
|
1962
|
+
if issue.integrationFindingId:
|
|
1963
|
+
finding_id = issue.integrationFindingId
|
|
1964
|
+
if finding_id not in cache:
|
|
1965
|
+
cache[finding_id] = []
|
|
1966
|
+
cache[finding_id].append(issue)
|
|
1967
|
+
indexed_count += 1
|
|
1968
|
+
|
|
1969
|
+
self._integration_finding_id_cache = cache
|
|
1970
|
+
|
|
1971
|
+
elapsed = time.time() - start_time
|
|
1972
|
+
logger.info(
|
|
1973
|
+
f"Issue lookup index built: {indexed_count} issues indexed from {len(all_issues)} total issues "
|
|
1974
|
+
f"({len(cache)} unique finding IDs) in {elapsed:.2f}s"
|
|
1975
|
+
)
|
|
1976
|
+
|
|
1908
1977
|
def _get_existing_issues_for_finding(
|
|
1909
1978
|
self, finding_id: str, finding: IntegrationFinding
|
|
1910
1979
|
) -> List[regscale_models.Issue]:
|
|
1911
|
-
"""
|
|
1912
|
-
|
|
1980
|
+
"""
|
|
1981
|
+
Get existing issues for the finding using cached lookup (fast) or API fallback (slow).
|
|
1982
|
+
|
|
1983
|
+
NEW BEHAVIOR:
|
|
1984
|
+
- First lookup uses cache (O(1) dictionary lookup, no API call)
|
|
1985
|
+
- Cache is populated lazily on first call
|
|
1986
|
+
- Falls back to API only if finding not in cache and has external_id
|
|
1987
|
+
"""
|
|
1988
|
+
# Populate cache on first use (lazy initialization)
|
|
1989
|
+
if self._integration_finding_id_cache is None:
|
|
1990
|
+
self._populate_issue_lookup_cache()
|
|
1991
|
+
|
|
1992
|
+
# FAST PATH: Check cache first (O(1) lookup, no API call)
|
|
1993
|
+
existing_issues = self._integration_finding_id_cache.get(finding_id, [])
|
|
1913
1994
|
|
|
1914
|
-
#
|
|
1995
|
+
# FALLBACK PATH: Only if no issues found in cache AND external_id exists
|
|
1996
|
+
# This handles edge cases where integrationFindingId might be missing but other identifiers exist
|
|
1915
1997
|
if not existing_issues and finding.external_id:
|
|
1998
|
+
logger.debug(f"Issue not found in cache for finding_id={finding_id}, trying identifier fallback")
|
|
1916
1999
|
existing_issues = self._find_issues_by_identifier_fallback(finding.external_id)
|
|
1917
2000
|
|
|
2001
|
+
# Cache the fallback result to avoid future API lookups
|
|
2002
|
+
if existing_issues:
|
|
2003
|
+
with self._issue_cache_lock:
|
|
2004
|
+
self._integration_finding_id_cache[finding_id] = existing_issues
|
|
2005
|
+
|
|
1918
2006
|
return existing_issues
|
|
1919
2007
|
|
|
1920
2008
|
def _find_issue_for_open_status(
|
|
@@ -2805,11 +2893,29 @@ class ScannerIntegration(ABC):
|
|
|
2805
2893
|
scan_history = self.create_scan_history()
|
|
2806
2894
|
current_vulnerabilities: Dict[int, Set[int]] = defaultdict(set)
|
|
2807
2895
|
processed_findings_count = 0
|
|
2896
|
+
|
|
2897
|
+
# Convert iterator to list so we can check findings and avoid re-iteration issues
|
|
2898
|
+
findings_list = list(findings)
|
|
2899
|
+
|
|
2900
|
+
# Set the number of findings to process for progress tracking
|
|
2901
|
+
self.num_findings_to_process = len(findings_list)
|
|
2808
2902
|
loading_findings = self._setup_finding_progress()
|
|
2809
2903
|
|
|
2904
|
+
# Pre-load CCI to control map before threading ONLY if:
|
|
2905
|
+
# 1. The integration has CCI mapping enabled (enable_cci_mapping = True)
|
|
2906
|
+
# 2. Findings contain actual CCI references
|
|
2907
|
+
# This avoids expensive unnecessary API calls for integrations that don't use CCIs (e.g., AWS)
|
|
2908
|
+
if self.enable_cci_mapping:
|
|
2909
|
+
has_cci_refs = any(
|
|
2910
|
+
getattr(f, "cci_ref", None) is not None and getattr(f, "cci_ref", None) != "" for f in findings_list
|
|
2911
|
+
)
|
|
2912
|
+
if has_cci_refs:
|
|
2913
|
+
logger.debug("Pre-loading CCI to control map...")
|
|
2914
|
+
_ = self.get_cci_to_control_map()
|
|
2915
|
+
|
|
2810
2916
|
# Process findings
|
|
2811
2917
|
processed_findings_count = self._process_findings_with_threading(
|
|
2812
|
-
|
|
2918
|
+
iter(findings_list), scan_history, current_vulnerabilities, loading_findings
|
|
2813
2919
|
)
|
|
2814
2920
|
|
|
2815
2921
|
# Finalize processing
|
|
@@ -2818,6 +2924,8 @@ class ScannerIntegration(ABC):
|
|
|
2818
2924
|
# Complete the finding progress bar
|
|
2819
2925
|
self._complete_finding_progress(loading_findings, processed_findings_count)
|
|
2820
2926
|
|
|
2927
|
+
logger.info(f"Successfully processed {processed_findings_count} findings from {self.title}")
|
|
2928
|
+
|
|
2821
2929
|
return processed_findings_count
|
|
2822
2930
|
|
|
2823
2931
|
def _setup_finding_progress(self):
|
|
@@ -3055,10 +3163,12 @@ class ScannerIntegration(ABC):
|
|
|
3055
3163
|
|
|
3056
3164
|
def _process_checklist_finding(self, finding: IntegrationFinding) -> None:
|
|
3057
3165
|
"""Process a checklist finding."""
|
|
3058
|
-
|
|
3166
|
+
asset = self.get_asset_by_identifier(finding.asset_identifier)
|
|
3167
|
+
if not asset:
|
|
3059
3168
|
if not getattr(self, "suppress_asset_not_found_errors", False):
|
|
3060
3169
|
logger.error("2. Asset not found for identifier %s", finding.asset_identifier)
|
|
3061
|
-
|
|
3170
|
+
if not getattr(self, "import_all_findings", False):
|
|
3171
|
+
return
|
|
3062
3172
|
|
|
3063
3173
|
tool = regscale_models.ChecklistTool.STIGs
|
|
3064
3174
|
if finding.vulnerability_type == "Vulnerability Scan":
|
|
@@ -3073,7 +3183,7 @@ class ScannerIntegration(ABC):
|
|
|
3073
3183
|
logger.debug("Create or update checklist for %s", finding.external_id)
|
|
3074
3184
|
regscale_models.Checklist(
|
|
3075
3185
|
status=checklist_status_str,
|
|
3076
|
-
assetId=asset.id,
|
|
3186
|
+
assetId=asset.id if asset else None,
|
|
3077
3187
|
tool=tool,
|
|
3078
3188
|
baseline=finding.baseline,
|
|
3079
3189
|
vulnerabilityId=finding.vulnerability_number,
|
|
@@ -3108,7 +3218,8 @@ class ScannerIntegration(ABC):
|
|
|
3108
3218
|
"""Process a vulnerability finding and return whether vulnerability was created."""
|
|
3109
3219
|
logger.debug(f"Processing vulnerability for finding {finding.external_id} with status {finding.status}")
|
|
3110
3220
|
|
|
3111
|
-
|
|
3221
|
+
asset = self.get_asset_by_identifier(finding.asset_identifier)
|
|
3222
|
+
if asset:
|
|
3112
3223
|
logger.debug(f"Found asset {asset.id} for finding {finding.external_id}")
|
|
3113
3224
|
if vulnerability_id := self.handle_vulnerability(finding, asset, scan_history):
|
|
3114
3225
|
current_vulnerabilities[asset.id].add(vulnerability_id)
|
|
@@ -3120,6 +3231,15 @@ class ScannerIntegration(ABC):
|
|
|
3120
3231
|
logger.debug(f"Vulnerability creation failed for finding {finding.external_id}")
|
|
3121
3232
|
else:
|
|
3122
3233
|
logger.debug(f"No asset found for finding {finding.external_id} with identifier {finding.asset_identifier}")
|
|
3234
|
+
if getattr(self, "import_all_findings", False):
|
|
3235
|
+
logger.debug("import_all_findings is True, attempting to create vulnerability without asset")
|
|
3236
|
+
if vulnerability_id := self.handle_vulnerability(finding, None, scan_history):
|
|
3237
|
+
logger.debug(
|
|
3238
|
+
f"Vulnerability created successfully for finding {finding.external_id} with ID {vulnerability_id}"
|
|
3239
|
+
)
|
|
3240
|
+
return True
|
|
3241
|
+
else:
|
|
3242
|
+
logger.debug(f"Vulnerability creation failed for finding {finding.external_id}")
|
|
3123
3243
|
|
|
3124
3244
|
return False
|
|
3125
3245
|
|
|
@@ -3140,109 +3260,190 @@ class ScannerIntegration(ABC):
|
|
|
3140
3260
|
"""
|
|
3141
3261
|
logger.debug(f"Processing vulnerability for finding: {finding.external_id} - {finding.title}")
|
|
3142
3262
|
|
|
3143
|
-
#
|
|
3263
|
+
# Validate required fields
|
|
3264
|
+
if not self._has_required_vulnerability_fields(finding):
|
|
3265
|
+
return None
|
|
3266
|
+
|
|
3267
|
+
# Check asset requirements
|
|
3268
|
+
if not self._check_asset_requirements(finding, asset):
|
|
3269
|
+
return None
|
|
3270
|
+
|
|
3271
|
+
if asset:
|
|
3272
|
+
logger.debug(f"Found asset: {asset.id} for finding {finding.external_id}")
|
|
3273
|
+
|
|
3274
|
+
# Create vulnerability with retry logic
|
|
3275
|
+
return self._create_vulnerability_with_retry(finding, asset, scan_history)
|
|
3276
|
+
|
|
3277
|
+
def _has_required_vulnerability_fields(self, finding: IntegrationFinding) -> bool:
|
|
3278
|
+
"""Check if finding has required fields (plugin_name or cve)."""
|
|
3144
3279
|
plugin_name = getattr(finding, "plugin_name", None)
|
|
3145
3280
|
cve = getattr(finding, "cve", None)
|
|
3146
3281
|
|
|
3147
3282
|
if not plugin_name and not cve:
|
|
3148
3283
|
logger.warning("No Plugin Name or CVE found for finding %s", finding.title)
|
|
3149
3284
|
logger.debug(f"Finding plugin_name: {plugin_name}, cve: {cve}")
|
|
3150
|
-
return
|
|
3151
|
-
|
|
3152
|
-
if not asset:
|
|
3153
|
-
if not getattr(self, "suppress_asset_not_found_errors", False):
|
|
3154
|
-
logger.warning(
|
|
3155
|
-
"VulnerabilityMapping Error: Asset not found for identifier %s", finding.asset_identifier
|
|
3156
|
-
)
|
|
3157
|
-
return None
|
|
3285
|
+
return False
|
|
3158
3286
|
|
|
3159
|
-
logger.debug(f"Found asset: {asset.id} for finding {finding.external_id}")
|
|
3160
3287
|
logger.debug(f"Finding plugin_name: {plugin_name}, cve: {cve}")
|
|
3288
|
+
return True
|
|
3289
|
+
|
|
3290
|
+
def _check_asset_requirements(self, finding: IntegrationFinding, asset: Optional[regscale_models.Asset]) -> bool:
|
|
3291
|
+
"""Check if asset requirements are met."""
|
|
3292
|
+
if asset:
|
|
3293
|
+
return True
|
|
3294
|
+
|
|
3295
|
+
if getattr(self, "import_all_findings", False):
|
|
3296
|
+
logger.debug("Asset not found but import_all_findings is True, continuing without asset")
|
|
3297
|
+
return True
|
|
3298
|
+
|
|
3299
|
+
if not getattr(self, "suppress_asset_not_found_errors", False):
|
|
3300
|
+
logger.warning("VulnerabilityMapping Error: Asset not found for identifier %s", finding.asset_identifier)
|
|
3301
|
+
return False
|
|
3161
3302
|
|
|
3162
|
-
|
|
3303
|
+
def _create_vulnerability_with_retry(
|
|
3304
|
+
self,
|
|
3305
|
+
finding: IntegrationFinding,
|
|
3306
|
+
asset: Optional[regscale_models.Asset],
|
|
3307
|
+
scan_history: regscale_models.ScanHistory,
|
|
3308
|
+
) -> Optional[int]:
|
|
3309
|
+
"""Create vulnerability with retry logic."""
|
|
3163
3310
|
max_retries = 3
|
|
3164
3311
|
retry_delay = 2 # seconds
|
|
3165
3312
|
|
|
3166
3313
|
for attempt in range(max_retries):
|
|
3167
|
-
|
|
3168
|
-
|
|
3169
|
-
|
|
3170
|
-
|
|
3171
|
-
|
|
3172
|
-
|
|
3173
|
-
if ScannerVariables.vulnerabilityCreation.lower() != "noissue":
|
|
3174
|
-
# Handle associated issue
|
|
3175
|
-
self.create_or_update_issue_from_finding(
|
|
3176
|
-
title=finding.title,
|
|
3177
|
-
finding=finding,
|
|
3178
|
-
)
|
|
3314
|
+
vulnerability_id = self._try_create_vulnerability(
|
|
3315
|
+
finding, asset, scan_history, attempt, max_retries, retry_delay
|
|
3316
|
+
)
|
|
3317
|
+
if vulnerability_id is not None:
|
|
3318
|
+
return vulnerability_id
|
|
3179
3319
|
|
|
3180
|
-
|
|
3320
|
+
if attempt < max_retries - 1:
|
|
3321
|
+
time.sleep(retry_delay)
|
|
3322
|
+
retry_delay *= 2 # Exponential backoff
|
|
3181
3323
|
|
|
3182
|
-
|
|
3183
|
-
if attempt < max_retries - 1:
|
|
3184
|
-
logger.warning(
|
|
3185
|
-
f"Vulnerability creation failed for finding {finding.external_id} (attempt {attempt + 1}/{max_retries}): {e}. Retrying in {retry_delay} seconds..."
|
|
3186
|
-
)
|
|
3187
|
-
import time
|
|
3324
|
+
return None
|
|
3188
3325
|
|
|
3189
|
-
|
|
3190
|
-
|
|
3191
|
-
|
|
3192
|
-
|
|
3193
|
-
|
|
3194
|
-
|
|
3195
|
-
|
|
3326
|
+
def _try_create_vulnerability(
|
|
3327
|
+
self,
|
|
3328
|
+
finding: IntegrationFinding,
|
|
3329
|
+
asset: Optional[regscale_models.Asset],
|
|
3330
|
+
scan_history: regscale_models.ScanHistory,
|
|
3331
|
+
attempt: int,
|
|
3332
|
+
max_retries: int,
|
|
3333
|
+
retry_delay: int,
|
|
3334
|
+
) -> Optional[int]:
|
|
3335
|
+
"""Try to create vulnerability for a single attempt."""
|
|
3336
|
+
try:
|
|
3337
|
+
logger.debug(f"Creating vulnerability for finding {finding.external_id} (attempt {attempt + 1})")
|
|
3338
|
+
vulnerability = self.create_vulnerability_from_finding(finding, asset, scan_history)
|
|
3339
|
+
finding.vulnerability_id = vulnerability.id
|
|
3340
|
+
logger.debug(f"Successfully created vulnerability {vulnerability.id} for finding {finding.external_id}")
|
|
3341
|
+
|
|
3342
|
+
self._handle_associated_issue(finding)
|
|
3343
|
+
return vulnerability.id
|
|
3344
|
+
|
|
3345
|
+
except Exception as e:
|
|
3346
|
+
self._handle_vulnerability_creation_error(e, finding, attempt, max_retries, retry_delay)
|
|
3347
|
+
return None
|
|
3348
|
+
|
|
3349
|
+
def _handle_associated_issue(self, finding: IntegrationFinding) -> None:
|
|
3350
|
+
"""Handle associated issue creation if needed."""
|
|
3351
|
+
if ScannerVariables.vulnerabilityCreation.lower() != "noissue":
|
|
3352
|
+
self.create_or_update_issue_from_finding(
|
|
3353
|
+
title=finding.title,
|
|
3354
|
+
finding=finding,
|
|
3355
|
+
)
|
|
3356
|
+
|
|
3357
|
+
def _handle_vulnerability_creation_error(
|
|
3358
|
+
self, error: Exception, finding: IntegrationFinding, attempt: int, max_retries: int, retry_delay: int
|
|
3359
|
+
) -> None:
|
|
3360
|
+
"""Handle error during vulnerability creation."""
|
|
3361
|
+
if attempt < max_retries - 1:
|
|
3362
|
+
logger.warning(
|
|
3363
|
+
f"Vulnerability creation failed for finding {finding.external_id} "
|
|
3364
|
+
f"(attempt {attempt + 1}/{max_retries}): {error}. "
|
|
3365
|
+
f"Retrying in {retry_delay} seconds..."
|
|
3366
|
+
)
|
|
3367
|
+
else:
|
|
3368
|
+
logger.error(
|
|
3369
|
+
f"Failed to create vulnerability for finding {finding.external_id} "
|
|
3370
|
+
f"after {max_retries} attempts: {error}"
|
|
3371
|
+
)
|
|
3196
3372
|
|
|
3197
3373
|
def create_vulnerability_from_finding(
|
|
3198
|
-
self,
|
|
3374
|
+
self,
|
|
3375
|
+
finding: IntegrationFinding,
|
|
3376
|
+
asset: Optional[regscale_models.Asset],
|
|
3377
|
+
scan_history: regscale_models.ScanHistory,
|
|
3199
3378
|
) -> regscale_models.Vulnerability:
|
|
3200
3379
|
"""
|
|
3201
3380
|
Creates a vulnerability from an integration finding.
|
|
3202
3381
|
|
|
3203
3382
|
:param IntegrationFinding finding: The integration finding
|
|
3204
|
-
:param regscale_models.Asset asset: The associated asset
|
|
3383
|
+
:param Optional[regscale_models.Asset] asset: The associated asset (can be None if import_all_findings is True)
|
|
3205
3384
|
:param regscale_models.ScanHistory scan_history: The scan history
|
|
3206
3385
|
:return: The created vulnerability
|
|
3207
3386
|
:rtype: regscale_models.Vulnerability
|
|
3208
3387
|
"""
|
|
3209
3388
|
logger.debug(f"Creating vulnerability object for finding {finding.external_id}")
|
|
3210
3389
|
|
|
3211
|
-
|
|
3212
|
-
|
|
3213
|
-
|
|
3214
|
-
|
|
3215
|
-
logger.debug(f"
|
|
3390
|
+
# Create vulnerability object
|
|
3391
|
+
vulnerability = self._build_vulnerability_object(finding, asset, scan_history)
|
|
3392
|
+
|
|
3393
|
+
# Save vulnerability
|
|
3394
|
+
logger.debug(f"Calling create_or_update for vulnerability with title: {vulnerability.title}")
|
|
3395
|
+
vulnerability = vulnerability.create_or_update()
|
|
3396
|
+
logger.debug(f"Vulnerability created/updated with ID: {vulnerability.id}")
|
|
3397
|
+
|
|
3398
|
+
# Create mapping if asset exists
|
|
3399
|
+
if asset:
|
|
3400
|
+
self._create_vulnerability_mapping(vulnerability, finding, asset, scan_history)
|
|
3401
|
+
else:
|
|
3402
|
+
logger.debug(
|
|
3403
|
+
f"Skipping VulnerabilityMapping creation for vulnerability {vulnerability.id} - no asset provided"
|
|
3404
|
+
)
|
|
3216
3405
|
|
|
3217
|
-
vulnerability
|
|
3406
|
+
return vulnerability
|
|
3407
|
+
|
|
3408
|
+
def _build_vulnerability_object(
|
|
3409
|
+
self,
|
|
3410
|
+
finding: IntegrationFinding,
|
|
3411
|
+
asset: Optional[regscale_models.Asset],
|
|
3412
|
+
scan_history: regscale_models.ScanHistory,
|
|
3413
|
+
) -> regscale_models.Vulnerability:
|
|
3414
|
+
"""Build the vulnerability object from finding data."""
|
|
3415
|
+
# Get mapped values
|
|
3416
|
+
severity = self._get_mapped_severity(finding)
|
|
3417
|
+
ip_address = self._get_ip_address(finding, asset)
|
|
3418
|
+
dns = self._get_dns(asset)
|
|
3419
|
+
operating_system = self._get_operating_system(asset)
|
|
3420
|
+
|
|
3421
|
+
return regscale_models.Vulnerability(
|
|
3218
3422
|
title=finding.title,
|
|
3219
3423
|
cve=finding.cve,
|
|
3220
|
-
vprScore=(
|
|
3221
|
-
|
|
3222
|
-
), # If this is the VPR score, otherwise use a different field
|
|
3223
|
-
cvsSv3BaseScore=finding.cvss_v3_base_score or finding.cvss_v3_score or finding.cvss_score,
|
|
3424
|
+
vprScore=self._get_vpr_score(finding),
|
|
3425
|
+
cvsSv3BaseScore=self._get_cvss_v3_score(finding),
|
|
3224
3426
|
cvsSv2BaseScore=finding.cvss_v2_score,
|
|
3225
3427
|
cvsSv3BaseVector=finding.cvss_v3_vector,
|
|
3226
3428
|
cvsSv2BaseVector=finding.cvss_v2_vector,
|
|
3227
3429
|
scanId=scan_history.id,
|
|
3228
|
-
severity=
|
|
3430
|
+
severity=severity,
|
|
3229
3431
|
description=finding.description,
|
|
3230
3432
|
dateLastUpdated=finding.date_last_updated,
|
|
3231
3433
|
parentId=self.plan_id,
|
|
3232
3434
|
parentModule=self.parent_module,
|
|
3233
|
-
dns=
|
|
3435
|
+
dns=dns,
|
|
3234
3436
|
status=regscale_models.VulnerabilityStatus.Open,
|
|
3235
|
-
ipAddress=
|
|
3437
|
+
ipAddress=ip_address,
|
|
3236
3438
|
firstSeen=finding.first_seen,
|
|
3237
3439
|
lastSeen=finding.last_seen,
|
|
3238
|
-
plugInName=finding.cve or finding.plugin_name,
|
|
3239
|
-
plugInId=finding.plugin_id,
|
|
3240
|
-
exploitAvailable=None,
|
|
3241
|
-
plugInText=finding.plugin_text
|
|
3242
|
-
|
|
3243
|
-
|
|
3244
|
-
|
|
3245
|
-
operatingSystem=asset.operatingSystem if hasattr(asset, "operatingSystem") else None,
|
|
3440
|
+
plugInName=finding.cve or finding.plugin_name,
|
|
3441
|
+
plugInId=finding.plugin_id or finding.external_id,
|
|
3442
|
+
exploitAvailable=None,
|
|
3443
|
+
plugInText=finding.plugin_text or finding.observations,
|
|
3444
|
+
port=getattr(finding, "port", None),
|
|
3445
|
+
protocol=getattr(finding, "protocol", None),
|
|
3446
|
+
operatingSystem=operating_system,
|
|
3246
3447
|
fixedVersions=finding.fixed_versions,
|
|
3247
3448
|
buildVersion=finding.build_version,
|
|
3248
3449
|
fixStatus=finding.fix_status,
|
|
@@ -3253,14 +3454,68 @@ class ScannerIntegration(ABC):
|
|
|
3253
3454
|
affectedPackages=finding.affected_packages,
|
|
3254
3455
|
)
|
|
3255
3456
|
|
|
3256
|
-
|
|
3257
|
-
|
|
3258
|
-
logger.debug(f"
|
|
3457
|
+
def _get_mapped_severity(self, finding: IntegrationFinding) -> regscale_models.VulnerabilitySeverity:
|
|
3458
|
+
"""Get mapped severity for the finding."""
|
|
3459
|
+
logger.debug(f"Finding severity: '{finding.severity}' (type: {type(finding.severity)})")
|
|
3460
|
+
mapped_severity = self.issue_to_vulnerability_map.get(
|
|
3461
|
+
finding.severity, regscale_models.VulnerabilitySeverity.Low
|
|
3462
|
+
)
|
|
3463
|
+
logger.debug(f"Mapped severity: {mapped_severity}")
|
|
3464
|
+
return mapped_severity
|
|
3465
|
+
|
|
3466
|
+
def _get_ip_address(self, finding: IntegrationFinding, asset: Optional[regscale_models.Asset]) -> str:
|
|
3467
|
+
"""Get IP address from finding or asset."""
|
|
3468
|
+
if finding.ip_address:
|
|
3469
|
+
return finding.ip_address
|
|
3470
|
+
if asset and hasattr(asset, "ipAddress") and asset.ipAddress:
|
|
3471
|
+
return asset.ipAddress
|
|
3472
|
+
return ""
|
|
3473
|
+
|
|
3474
|
+
def _get_dns(self, asset: Optional[regscale_models.Asset]) -> str:
|
|
3475
|
+
"""Get DNS from asset."""
|
|
3476
|
+
if asset and hasattr(asset, "fqdn") and asset.fqdn:
|
|
3477
|
+
return asset.fqdn
|
|
3478
|
+
return "unknown"
|
|
3479
|
+
|
|
3480
|
+
def _get_operating_system(self, asset: Optional[regscale_models.Asset]) -> Optional[str]:
|
|
3481
|
+
"""Get operating system from asset."""
|
|
3482
|
+
if asset and hasattr(asset, "operatingSystem"):
|
|
3483
|
+
return asset.operatingSystem
|
|
3484
|
+
return None
|
|
3259
3485
|
|
|
3486
|
+
def _get_vpr_score(self, finding: IntegrationFinding) -> Optional[float]:
|
|
3487
|
+
"""Get VPR score from finding."""
|
|
3488
|
+
if hasattr(finding, "vprScore"):
|
|
3489
|
+
return finding.vpr_score
|
|
3490
|
+
return None
|
|
3491
|
+
|
|
3492
|
+
def _get_cvss_v3_score(self, finding: IntegrationFinding) -> Optional[float]:
|
|
3493
|
+
"""Get CVSS v3 score from finding."""
|
|
3494
|
+
return finding.cvss_v3_base_score or finding.cvss_v3_score or finding.cvss_score
|
|
3495
|
+
|
|
3496
|
+
def _create_vulnerability_mapping(
|
|
3497
|
+
self,
|
|
3498
|
+
vulnerability: regscale_models.Vulnerability,
|
|
3499
|
+
finding: IntegrationFinding,
|
|
3500
|
+
asset: regscale_models.Asset,
|
|
3501
|
+
scan_history: regscale_models.ScanHistory,
|
|
3502
|
+
) -> None:
|
|
3503
|
+
"""Create vulnerability mapping with retry logic."""
|
|
3260
3504
|
logger.debug(f"Creating vulnerability mapping for vulnerability {vulnerability.id}")
|
|
3261
3505
|
logger.debug(f"Scan History ID: {scan_history.id}, Asset ID: {asset.id}, Plan ID: {self.plan_id}")
|
|
3262
3506
|
|
|
3263
|
-
|
|
3507
|
+
mapping = self._build_vulnerability_mapping(vulnerability, finding, asset, scan_history)
|
|
3508
|
+
self._create_mapping_with_retry(mapping, vulnerability.id)
|
|
3509
|
+
|
|
3510
|
+
def _build_vulnerability_mapping(
|
|
3511
|
+
self,
|
|
3512
|
+
vulnerability: regscale_models.Vulnerability,
|
|
3513
|
+
finding: IntegrationFinding,
|
|
3514
|
+
asset: regscale_models.Asset,
|
|
3515
|
+
scan_history: regscale_models.ScanHistory,
|
|
3516
|
+
) -> regscale_models.VulnerabilityMapping:
|
|
3517
|
+
"""Build vulnerability mapping object."""
|
|
3518
|
+
return regscale_models.VulnerabilityMapping(
|
|
3264
3519
|
vulnerabilityId=vulnerability.id,
|
|
3265
3520
|
assetId=asset.id,
|
|
3266
3521
|
scanId=scan_history.id,
|
|
@@ -3275,15 +3530,75 @@ class ScannerIntegration(ABC):
|
|
|
3275
3530
|
dateLastUpdated=get_current_datetime(),
|
|
3276
3531
|
)
|
|
3277
3532
|
|
|
3278
|
-
|
|
3279
|
-
|
|
3280
|
-
|
|
3281
|
-
)
|
|
3533
|
+
def _create_mapping_with_retry(self, mapping: regscale_models.VulnerabilityMapping, vulnerability_id: int) -> None:
|
|
3534
|
+
"""Create vulnerability mapping with retry logic."""
|
|
3535
|
+
import logging
|
|
3282
3536
|
|
|
3283
|
-
|
|
3284
|
-
|
|
3537
|
+
max_retries = 3
|
|
3538
|
+
retry_delay = 0.5
|
|
3539
|
+
regscale_logger = logging.getLogger("regscale")
|
|
3540
|
+
original_level = regscale_logger.level
|
|
3285
3541
|
|
|
3286
|
-
|
|
3542
|
+
for attempt in range(max_retries):
|
|
3543
|
+
if self._try_create_mapping(
|
|
3544
|
+
mapping, vulnerability_id, attempt, max_retries, regscale_logger, original_level
|
|
3545
|
+
):
|
|
3546
|
+
break
|
|
3547
|
+
|
|
3548
|
+
if attempt < max_retries - 1:
|
|
3549
|
+
time.sleep(retry_delay)
|
|
3550
|
+
retry_delay *= 2 # Exponential backoff
|
|
3551
|
+
|
|
3552
|
+
def _try_create_mapping(
|
|
3553
|
+
self,
|
|
3554
|
+
mapping: regscale_models.VulnerabilityMapping,
|
|
3555
|
+
vulnerability_id: int,
|
|
3556
|
+
attempt: int,
|
|
3557
|
+
max_retries: int,
|
|
3558
|
+
regscale_logger: logging.Logger,
|
|
3559
|
+
original_level: int,
|
|
3560
|
+
) -> bool:
|
|
3561
|
+
"""Try to create mapping for a single attempt."""
|
|
3562
|
+
try:
|
|
3563
|
+
# Suppress error logging during retry attempts (but not the final attempt)
|
|
3564
|
+
if attempt < max_retries - 1:
|
|
3565
|
+
regscale_logger.setLevel(logging.CRITICAL)
|
|
3566
|
+
|
|
3567
|
+
mapping.create_unique()
|
|
3568
|
+
|
|
3569
|
+
# Restore original log level
|
|
3570
|
+
regscale_logger.setLevel(original_level)
|
|
3571
|
+
|
|
3572
|
+
if attempt > 0:
|
|
3573
|
+
logger.info(
|
|
3574
|
+
f"VulnerabilityMapping created successfully on attempt {attempt + 1} for vulnerability {vulnerability_id}"
|
|
3575
|
+
)
|
|
3576
|
+
else:
|
|
3577
|
+
logger.debug(f"Vulnerability mapping created for vulnerability {vulnerability_id}")
|
|
3578
|
+
return True
|
|
3579
|
+
|
|
3580
|
+
except Exception as mapping_error:
|
|
3581
|
+
# Restore original log level before handling the exception
|
|
3582
|
+
regscale_logger.setLevel(original_level)
|
|
3583
|
+
return self._handle_mapping_error(mapping_error, attempt, max_retries)
|
|
3584
|
+
|
|
3585
|
+
def _handle_mapping_error(self, error: Exception, attempt: int, max_retries: int) -> bool:
|
|
3586
|
+
"""Handle error during mapping creation."""
|
|
3587
|
+
if attempt >= max_retries - 1:
|
|
3588
|
+
logger.error(f"Failed to create VulnerabilityMapping after {max_retries} attempts: {error}")
|
|
3589
|
+
# Convert to a more specific exception type
|
|
3590
|
+
raise RuntimeError(f"VulnerabilityMapping creation failed after {max_retries} attempts") from error
|
|
3591
|
+
|
|
3592
|
+
# Check if it's a reference error
|
|
3593
|
+
error_str = str(error)
|
|
3594
|
+
if "400" in error_str and "Object reference" in error_str:
|
|
3595
|
+
logger.debug(
|
|
3596
|
+
f"VulnerabilityMapping creation failed due to reference error (attempt {attempt + 1}/{max_retries}). Retrying..."
|
|
3597
|
+
)
|
|
3598
|
+
return False
|
|
3599
|
+
|
|
3600
|
+
# Different error, re-raise with more context
|
|
3601
|
+
raise RuntimeError(f"Unexpected error during VulnerabilityMapping creation: {error}") from error
|
|
3287
3602
|
|
|
3288
3603
|
def _filter_vulns_open_by_other_tools(
|
|
3289
3604
|
self, all_vulns: list[regscale_models.Vulnerability]
|