regscale-cli 6.21.2.2__py3-none-any.whl → 6.22.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of regscale-cli might be problematic. Click here for more details.

Files changed (32) hide show
  1. regscale/_version.py +1 -1
  2. regscale/core/app/application.py +3 -0
  3. regscale/core/app/utils/app_utils.py +31 -0
  4. regscale/integrations/commercial/jira.py +27 -5
  5. regscale/integrations/commercial/qualys/__init__.py +160 -60
  6. regscale/integrations/commercial/qualys/scanner.py +300 -39
  7. regscale/integrations/commercial/synqly/edr.py +2 -8
  8. regscale/integrations/commercial/wizv2/async_client.py +4 -0
  9. regscale/integrations/commercial/wizv2/scanner.py +50 -24
  10. regscale/integrations/public/__init__.py +13 -0
  11. regscale/integrations/public/csam/__init__.py +0 -0
  12. regscale/integrations/public/csam/csam.py +1129 -0
  13. regscale/integrations/public/fedramp/fedramp_cis_crm.py +175 -51
  14. regscale/integrations/scanner_integration.py +513 -145
  15. regscale/models/integration_models/cisa_kev_data.json +64 -3
  16. regscale/models/integration_models/synqly_models/capabilities.json +1 -1
  17. regscale/models/regscale_models/__init__.py +2 -0
  18. regscale/models/regscale_models/catalog.py +1 -1
  19. regscale/models/regscale_models/control_implementation.py +8 -8
  20. regscale/models/regscale_models/form_field_value.py +5 -3
  21. regscale/models/regscale_models/inheritance.py +44 -0
  22. regscale/regscale.py +2 -0
  23. {regscale_cli-6.21.2.2.dist-info → regscale_cli-6.22.0.1.dist-info}/METADATA +1 -1
  24. {regscale_cli-6.21.2.2.dist-info → regscale_cli-6.22.0.1.dist-info}/RECORD +29 -29
  25. tests/regscale/models/test_tenable_integrations.py +811 -105
  26. regscale/integrations/public/fedramp/mappings/fedramp_r4_parts.json +0 -7388
  27. regscale/integrations/public/fedramp/mappings/fedramp_r5_parts.json +0 -9605
  28. regscale/integrations/public/fedramp/parts_mapper.py +0 -107
  29. {regscale_cli-6.21.2.2.dist-info → regscale_cli-6.22.0.1.dist-info}/LICENSE +0 -0
  30. {regscale_cli-6.21.2.2.dist-info → regscale_cli-6.22.0.1.dist-info}/WHEEL +0 -0
  31. {regscale_cli-6.21.2.2.dist-info → regscale_cli-6.22.0.1.dist-info}/entry_points.txt +0 -0
  32. {regscale_cli-6.21.2.2.dist-info → regscale_cli-6.22.0.1.dist-info}/top_level.txt +0 -0
@@ -1211,7 +1211,6 @@ class ScannerIntegration(ABC):
1211
1211
  name=software_name,
1212
1212
  parentHardwareAssetId=new_asset.id,
1213
1213
  version=software_version,
1214
- # references=software.get("references", []),
1215
1214
  )
1216
1215
  )
1217
1216
  else:
@@ -1615,7 +1614,40 @@ class ScannerIntegration(ABC):
1615
1614
  # Get consolidated asset identifier
1616
1615
  asset_identifier = self.get_consolidated_asset_identifier(finding, existing_issue)
1617
1616
 
1618
- # Update all fields
1617
+ # Set basic issue fields
1618
+ self._set_basic_issue_fields(issue, finding, issue_status, issue_title, asset_identifier)
1619
+
1620
+ # Set due date
1621
+ self._set_issue_due_date(issue, finding)
1622
+
1623
+ # Set additional issue fields
1624
+ self._set_additional_issue_fields(issue, finding, description, remediation_description)
1625
+
1626
+ # Set control-related fields
1627
+ self._set_control_fields(issue, finding)
1628
+
1629
+ # Set risk and operational fields
1630
+ self._set_risk_and_operational_fields(issue, finding, is_poam)
1631
+
1632
+ # Update KEV data if CVE exists
1633
+ if finding.cve:
1634
+ issue = self.lookup_kev_and_update_issue(cve=finding.cve, issue=issue, cisa_kevs=self._kev_data)
1635
+
1636
+ # Save or create the issue
1637
+ self._save_or_create_issue(issue, finding, existing_issue, is_poam)
1638
+
1639
+ self._handle_property_and_milestone_creation(issue, finding, existing_issue)
1640
+ return issue
1641
+
1642
+ def _set_basic_issue_fields(
1643
+ self,
1644
+ issue: regscale_models.Issue,
1645
+ finding: IntegrationFinding,
1646
+ issue_status: regscale_models.IssueStatus,
1647
+ issue_title: str,
1648
+ asset_identifier: str,
1649
+ ) -> None:
1650
+ """Set basic fields for the issue."""
1619
1651
  issue.parentId = self.plan_id
1620
1652
  issue.parentModule = self.parent_module
1621
1653
  issue.vulnerabilityId = finding.vulnerability_id
@@ -1632,7 +1664,10 @@ class ScannerIntegration(ABC):
1632
1664
  issue.securityPlanId = self.plan_id if not self.is_component else None
1633
1665
  issue.identification = finding.identification
1634
1666
  issue.dateFirstDetected = finding.first_seen
1635
- # Ensure a due date is always set using configured policy defaults (e.g., FedRAMP)
1667
+ issue.assetIdentifier = asset_identifier
1668
+
1669
+ def _set_issue_due_date(self, issue: regscale_models.Issue, finding: IntegrationFinding) -> None:
1670
+ """Set the due date for the issue."""
1636
1671
  if not finding.due_date:
1637
1672
  try:
1638
1673
  base_created = finding.date_created or issue.dateCreated
@@ -1650,22 +1685,27 @@ class ScannerIntegration(ABC):
1650
1685
  title=self.title,
1651
1686
  )
1652
1687
  issue.dueDate = finding.due_date
1688
+
1689
+ def _set_additional_issue_fields(
1690
+ self, issue: regscale_models.Issue, finding: IntegrationFinding, description: str, remediation_description: str
1691
+ ) -> None:
1692
+ """Set additional fields for the issue."""
1653
1693
  issue.description = description
1654
1694
  issue.sourceReport = finding.source_report or self.title
1655
1695
  issue.recommendedActions = finding.recommendation_for_mitigation
1656
- issue.assetIdentifier = asset_identifier
1657
1696
  issue.securityChecks = finding.security_check or finding.external_id
1658
1697
  issue.remediationDescription = remediation_description
1659
1698
  issue.integrationFindingId = self.get_finding_identifier(finding)
1660
1699
  issue.poamComments = finding.poam_comments
1661
1700
  issue.cve = finding.cve
1662
1701
  issue.assessmentId = finding.assessment_id
1702
+
1703
+ def _set_control_fields(self, issue: regscale_models.Issue, finding: IntegrationFinding) -> None:
1704
+ """Set control-related fields for the issue."""
1663
1705
  control_id = self.get_control_implementation_id_for_cci(finding.cci_ref) if finding.cci_ref else None
1664
- issue.controlId = control_id # TODO REMOVE
1665
- # Add the control implementation ids and the cci ref if it exists
1666
- # Get control implementation ID for CCI if it exists
1667
- # Only add CCI control ID if it exists
1706
+ # Note: controlId is deprecated, using controlImplementationIds instead
1668
1707
  cci_control_ids = [control_id] if control_id is not None else []
1708
+
1669
1709
  # Ensure failed control labels (e.g., AC-4(21)) are present in affectedControls
1670
1710
  if finding.affected_controls:
1671
1711
  issue.affectedControls = finding.affected_controls
@@ -1673,15 +1713,17 @@ class ScannerIntegration(ABC):
1673
1713
  issue.affectedControls = ", ".join(sorted({cl for cl in finding.control_labels if cl}))
1674
1714
 
1675
1715
  issue.controlImplementationIds = list(set(finding._control_implementation_ids + cci_control_ids)) # noqa
1676
- # Always ensure isPoam reflects current settings, even when updating existing issues
1716
+
1717
+ def _set_risk_and_operational_fields(
1718
+ self, issue: regscale_models.Issue, finding: IntegrationFinding, is_poam: bool
1719
+ ) -> None:
1720
+ """Set risk and operational fields for the issue."""
1677
1721
  issue.isPoam = is_poam
1678
1722
  issue.basisForAdjustment = (
1679
1723
  finding.basis_for_adjustment if finding.basis_for_adjustment else f"{self.title} import"
1680
1724
  )
1681
1725
  issue.pluginId = finding.plugin_id
1682
1726
  issue.originalRiskRating = regscale_models.Issue.assign_risk_rating(finding.severity)
1683
- # Current: changes
1684
- # Planned: planned changes
1685
1727
  issue.changes = "<p>Current: {}</p><p>Planned: {}</p>".format(
1686
1728
  finding.milestone_changes, finding.planned_milestone_changes
1687
1729
  )
@@ -1690,17 +1732,20 @@ class ScannerIntegration(ABC):
1690
1732
  issue.operationalRequirement = finding.operational_requirements
1691
1733
  issue.deviationRationale = finding.deviation_rationale
1692
1734
  issue.dateLastUpdated = get_current_datetime()
1693
- ## set affected controls if they exist
1694
1735
  issue.affectedControls = finding.affected_controls
1695
1736
 
1696
- if finding.cve:
1697
- issue = self.lookup_kev_and_update_issue(cve=finding.cve, issue=issue, cisa_kevs=self._kev_data)
1698
-
1737
+ def _save_or_create_issue(
1738
+ self,
1739
+ issue: regscale_models.Issue,
1740
+ finding: IntegrationFinding,
1741
+ existing_issue: Optional[regscale_models.Issue],
1742
+ is_poam: bool,
1743
+ ) -> None:
1744
+ """Save or create the issue."""
1699
1745
  if existing_issue:
1700
1746
  logger.debug("Saving Old Issue: %s with assetIdentifier: %s", issue.id, issue.assetIdentifier)
1701
1747
  issue.save(bulk=True)
1702
1748
  logger.debug("Saved existing issue %s with assetIdentifier: %s", issue.id, issue.assetIdentifier)
1703
-
1704
1749
  else:
1705
1750
  issue = issue.create_or_update(
1706
1751
  bulk_update=True, defaults={"otherIdentifier": self._get_other_identifier(finding, is_poam)}
@@ -2190,33 +2235,44 @@ class ScannerIntegration(ABC):
2190
2235
  scan_history = self.create_scan_history()
2191
2236
  current_vulnerabilities: Dict[int, Set[int]] = defaultdict(set)
2192
2237
  processed_findings_count = 0
2193
- loading_findings = self.finding_progress.add_task(
2238
+ loading_findings = self._setup_finding_progress()
2239
+
2240
+ # Process findings
2241
+ processed_findings_count = self._process_findings_with_threading(
2242
+ findings, scan_history, current_vulnerabilities, loading_findings
2243
+ )
2244
+
2245
+ # Finalize processing
2246
+ self._finalize_finding_processing(scan_history, current_vulnerabilities)
2247
+
2248
+ return processed_findings_count
2249
+
2250
+ def _setup_finding_progress(self):
2251
+ """Setup progress tracking for findings processing."""
2252
+ return self.finding_progress.add_task(
2194
2253
  f"[#f8b737]Processing {f'{self.num_findings_to_process} ' if self.num_findings_to_process else ''}finding(s) from {self.title}",
2195
2254
  total=self.num_findings_to_process if self.num_findings_to_process else None,
2196
2255
  )
2197
2256
 
2198
- # Locks for thread-safe operations
2257
+ def _process_findings_with_threading(
2258
+ self,
2259
+ findings: Iterator[IntegrationFinding],
2260
+ scan_history: regscale_models.ScanHistory,
2261
+ current_vulnerabilities: Dict[int, Set[int]],
2262
+ loading_findings,
2263
+ ) -> int:
2264
+ """Process findings using threading or sequential processing."""
2265
+ processed_findings_count = 0
2199
2266
  count_lock = threading.RLock()
2200
2267
 
2201
2268
  def process_finding_with_progress(finding_to_process: IntegrationFinding) -> None:
2202
- """
2203
- Process a single finding and update progress.
2204
-
2205
- :param IntegrationFinding finding_to_process: The finding to process
2206
- :rtype: None
2207
- """
2269
+ """Process a single finding and update progress."""
2208
2270
  nonlocal processed_findings_count
2209
2271
  try:
2210
2272
  self.process_finding(finding_to_process, scan_history, current_vulnerabilities)
2211
2273
  with count_lock:
2212
2274
  processed_findings_count += 1
2213
- if self.num_findings_to_process:
2214
- self.finding_progress.update(
2215
- loading_findings,
2216
- total=self.num_findings_to_process,
2217
- description=f"[#f8b737]Processing {self.num_findings_to_process} findings from {self.title}.",
2218
- )
2219
- self.finding_progress.advance(loading_findings, 1)
2275
+ self._update_finding_progress(loading_findings)
2220
2276
  except Exception as exc:
2221
2277
  self.log_error(
2222
2278
  "An error occurred when processing finding %s: %s",
@@ -2228,30 +2284,70 @@ class ScannerIntegration(ABC):
2228
2284
  for finding in findings:
2229
2285
  process_finding_with_progress(finding)
2230
2286
  else:
2231
- # Process findings in batches to control memory usage
2232
- batch_size = get_thread_workers_max() * 2 # Set batch size based on thread count
2233
- with concurrent.futures.ThreadPoolExecutor(max_workers=get_thread_workers_max()) as executor:
2234
- batch = []
2235
- for finding in findings:
2236
- batch.append(finding)
2237
- if len(batch) >= batch_size:
2238
- # Process this batch
2239
- list(executor.map(process_finding_with_progress, batch))
2240
- # Clear the batch
2241
- batch = []
2242
-
2243
- # Process any remaining items
2244
- if batch:
2287
+ processed_findings_count = self._process_findings_in_batches(findings, process_finding_with_progress)
2288
+
2289
+ return processed_findings_count
2290
+
2291
+ def _update_finding_progress(self, loading_findings):
2292
+ """Update the finding progress bar."""
2293
+ if self.num_findings_to_process:
2294
+ self.finding_progress.update(
2295
+ loading_findings,
2296
+ total=self.num_findings_to_process,
2297
+ description=f"[#f8b737]Processing {self.num_findings_to_process} findings from {self.title}.",
2298
+ )
2299
+ self.finding_progress.advance(loading_findings, 1)
2300
+
2301
+ def _process_findings_in_batches(
2302
+ self, findings: Iterator[IntegrationFinding], process_finding_with_progress
2303
+ ) -> int:
2304
+ """Process findings in batches using thread pool executor."""
2305
+ processed_findings_count = 0
2306
+ batch_size = get_thread_workers_max() * 2
2307
+
2308
+ with concurrent.futures.ThreadPoolExecutor(max_workers=get_thread_workers_max()) as executor:
2309
+ batch = []
2310
+ for finding in findings:
2311
+ batch.append(finding)
2312
+ if len(batch) >= batch_size:
2313
+ # Process this batch
2245
2314
  list(executor.map(process_finding_with_progress, batch))
2315
+ processed_findings_count += len(batch)
2316
+ # Clear the batch
2317
+ batch = []
2318
+
2319
+ # Process any remaining items
2320
+ if batch:
2321
+ list(executor.map(process_finding_with_progress, batch))
2322
+ processed_findings_count += len(batch)
2323
+
2324
+ return processed_findings_count
2246
2325
 
2247
- # Close outdated issues
2248
- self._results["scan_history"] = scan_history.save()
2326
+ def _finalize_finding_processing(
2327
+ self, scan_history: regscale_models.ScanHistory, current_vulnerabilities: Dict[int, Set[int]]
2328
+ ) -> None:
2329
+ """Finalize the finding processing by saving scan history and closing outdated issues."""
2330
+ logger.info(
2331
+ f"Saving scan history with final counts - Low: {scan_history.vLow}, Medium: {scan_history.vMedium}, High: {scan_history.vHigh}, Critical: {scan_history.vCritical}, Info: {scan_history.vInfo}"
2332
+ )
2333
+
2334
+ # Ensure scan history is properly saved with updated counts
2335
+ try:
2336
+ scan_history.save()
2337
+ except Exception as e:
2338
+ logger.error(f"Error saving scan history: {e}")
2339
+ # Try to save again with a fresh fetch
2340
+ try:
2341
+ scan_history.fetch()
2342
+ scan_history.save()
2343
+ except Exception as e2:
2344
+ logger.error(f"Failed to save scan history after retry: {e2}")
2345
+
2346
+ self._results["scan_history"] = scan_history
2249
2347
  self.update_result_counts("issues", regscale_models.Issue.bulk_save(progress_context=self.finding_progress))
2250
2348
  self.close_outdated_issues(current_vulnerabilities)
2251
2349
  self._perform_batch_operations(self.finding_progress)
2252
2350
 
2253
- return processed_findings_count
2254
-
2255
2351
  @staticmethod
2256
2352
  def parse_poam_id(poam_identifier: str) -> Optional[int]:
2257
2353
  """
@@ -2285,15 +2381,15 @@ class ScannerIntegration(ABC):
2285
2381
  parent_id=self.plan_id,
2286
2382
  parent_module=self.parent_module,
2287
2383
  )
2288
- self._max_poam_id = max(
2289
- (
2290
- parsed_id
2291
- for issue in issues
2292
- if issue.otherIdentifier
2293
- and (parsed_id := self.parse_poam_id(issue.otherIdentifier)) is not None
2294
- ),
2295
- default=0,
2296
- )
2384
+ # Extract parsed IDs for valid identifiers
2385
+ parsed_ids = []
2386
+ for issue in issues:
2387
+ if issue.otherIdentifier:
2388
+ parsed_id = self.parse_poam_id(issue.otherIdentifier)
2389
+ if parsed_id is not None:
2390
+ parsed_ids.append(parsed_id)
2391
+
2392
+ self._max_poam_id = max(parsed_ids, default=0)
2297
2393
 
2298
2394
  # Increment the cached max ID and store it
2299
2395
  self._max_poam_id = (self._max_poam_id or 0) + 1
@@ -2351,57 +2447,168 @@ class ScannerIntegration(ABC):
2351
2447
 
2352
2448
  # Process checklist if applicable
2353
2449
  if self.type == ScannerIntegrationType.CHECKLIST:
2354
- if not (asset := self.get_asset_by_identifier(finding.asset_identifier)):
2355
- if not getattr(self, "suppress_asset_not_found_errors", False):
2356
- logger.error("2. Asset not found for identifier %s", finding.asset_identifier)
2357
- return
2358
-
2359
- tool = regscale_models.ChecklistTool.STIGs
2360
- if finding.vulnerability_type == "Vulnerability Scan":
2361
- tool = regscale_models.ChecklistTool.VulnerabilityScanner
2362
-
2363
- if not finding.cci_ref:
2364
- finding.cci_ref = "CCI-000366"
2365
-
2366
- # Convert checklist status to string
2367
- checklist_status_str = str(finding.checklist_status.value)
2368
-
2369
- logger.debug("Create or update checklist for %s", finding.external_id)
2370
- regscale_models.Checklist(
2371
- status=checklist_status_str,
2372
- assetId=asset.id,
2373
- tool=tool,
2374
- baseline=finding.baseline,
2375
- vulnerabilityId=finding.vulnerability_number,
2376
- results=finding.results,
2377
- check=finding.title,
2378
- cci=finding.cci_ref,
2379
- ruleId=finding.rule_id,
2380
- version=finding.rule_version,
2381
- comments=finding.comments,
2382
- datePerformed=finding.date_created,
2383
- ).create_or_update()
2384
-
2385
- # For failing findings, handle control implementation updates
2386
- if finding.status != regscale_models.IssueStatus.Closed:
2387
- logger.debug("Handling failing checklist for %s", finding.external_id)
2388
- if self.type == ScannerIntegrationType.CHECKLIST:
2389
- self.handle_failing_checklist(finding=finding, plan_id=self.plan_id)
2390
- else:
2391
- logger.debug("Handling passing checklist for %s", finding.external_id)
2392
- self.handle_passing_checklist(finding=finding, plan_id=self.plan_id)
2450
+ self._process_checklist_finding(finding)
2393
2451
 
2394
2452
  # Process vulnerability if applicable
2395
2453
  if finding.status != regscale_models.IssueStatus.Closed or ScannerVariables.ingestClosedIssues:
2396
- if asset := self.get_asset_by_identifier(finding.asset_identifier):
2397
- if vulnerability_id := self.handle_vulnerability(finding, asset, scan_history):
2398
- current_vulnerabilities[asset.id].add(vulnerability_id)
2454
+ vulnerability_created = self._process_vulnerability_finding(finding, scan_history, current_vulnerabilities)
2455
+
2399
2456
  self.handle_failing_finding(
2400
2457
  issue_title=finding.issue_title or finding.title,
2401
2458
  finding=finding,
2402
2459
  )
2403
- # Update scan history severity counts
2404
- self.set_severity_count_for_scan(finding.severity, scan_history)
2460
+
2461
+ # Update scan history severity counts only if vulnerability was successfully created
2462
+ if vulnerability_created:
2463
+ logger.debug(
2464
+ f"Updating severity count for successfully created vulnerability with severity: {finding.severity}"
2465
+ )
2466
+ self.set_severity_count_for_scan(finding.severity, scan_history)
2467
+ else:
2468
+ logger.debug(
2469
+ f"Skipping severity count update for finding {finding.external_id} - no vulnerability created"
2470
+ )
2471
+
2472
+ def _process_checklist_finding(self, finding: IntegrationFinding) -> None:
2473
+ """Process a checklist finding."""
2474
+ if not (asset := self.get_asset_by_identifier(finding.asset_identifier)):
2475
+ if not getattr(self, "suppress_asset_not_found_errors", False):
2476
+ logger.error("2. Asset not found for identifier %s", finding.asset_identifier)
2477
+ return
2478
+
2479
+ tool = regscale_models.ChecklistTool.STIGs
2480
+ if finding.vulnerability_type == "Vulnerability Scan":
2481
+ tool = regscale_models.ChecklistTool.VulnerabilityScanner
2482
+
2483
+ if not finding.cci_ref:
2484
+ finding.cci_ref = "CCI-000366"
2485
+
2486
+ # Convert checklist status to string
2487
+ checklist_status_str = str(finding.checklist_status.value)
2488
+
2489
+ logger.debug("Create or update checklist for %s", finding.external_id)
2490
+ regscale_models.Checklist(
2491
+ status=checklist_status_str,
2492
+ assetId=asset.id,
2493
+ tool=tool,
2494
+ baseline=finding.baseline,
2495
+ vulnerabilityId=finding.vulnerability_number,
2496
+ results=finding.results,
2497
+ check=finding.title,
2498
+ cci=finding.cci_ref,
2499
+ ruleId=finding.rule_id,
2500
+ version=finding.rule_version,
2501
+ comments=finding.comments,
2502
+ datePerformed=finding.date_created,
2503
+ ).create_or_update()
2504
+
2505
+ # Handle checklist status
2506
+ self._handle_checklist_status(finding)
2507
+
2508
+ def _handle_checklist_status(self, finding: IntegrationFinding) -> None:
2509
+ """Handle the status of a checklist finding."""
2510
+ if finding.status != regscale_models.IssueStatus.Closed:
2511
+ logger.debug("Handling failing checklist for %s", finding.external_id)
2512
+ if self.type == ScannerIntegrationType.CHECKLIST:
2513
+ self.handle_failing_checklist(finding=finding, plan_id=self.plan_id)
2514
+ else:
2515
+ logger.debug("Handling passing checklist for %s", finding.external_id)
2516
+ self.handle_passing_checklist(finding=finding, plan_id=self.plan_id)
2517
+
2518
+ def _process_vulnerability_finding(
2519
+ self,
2520
+ finding: IntegrationFinding,
2521
+ scan_history: regscale_models.ScanHistory,
2522
+ current_vulnerabilities: Dict[int, Set[int]],
2523
+ ) -> bool:
2524
+ """Process a vulnerability finding and return whether vulnerability was created."""
2525
+ logger.debug(f"Processing vulnerability for finding {finding.external_id} with status {finding.status}")
2526
+
2527
+ if asset := self.get_asset_by_identifier(finding.asset_identifier):
2528
+ logger.debug(f"Found asset {asset.id} for finding {finding.external_id}")
2529
+ if vulnerability_id := self.handle_vulnerability(finding, asset, scan_history):
2530
+ current_vulnerabilities[asset.id].add(vulnerability_id)
2531
+ logger.debug(
2532
+ f"Vulnerability created successfully for finding {finding.external_id} with ID {vulnerability_id}"
2533
+ )
2534
+ return True
2535
+ else:
2536
+ logger.debug(f"Vulnerability creation failed for finding {finding.external_id}")
2537
+ else:
2538
+ logger.debug(f"No asset found for finding {finding.external_id} with identifier {finding.asset_identifier}")
2539
+
2540
+ return False
2541
+
2542
+ def handle_vulnerability(
2543
+ self,
2544
+ finding: IntegrationFinding,
2545
+ asset: Optional[regscale_models.Asset],
2546
+ scan_history: regscale_models.ScanHistory,
2547
+ ) -> Optional[int]:
2548
+ """
2549
+ Handles the vulnerabilities for a finding.
2550
+
2551
+ :param IntegrationFinding finding: The integration finding
2552
+ :param Optional[regscale_models.Asset] asset: The associated asset
2553
+ :param regscale_models.ScanHistory scan_history: The scan history
2554
+ :rtype: Optional[int]
2555
+ :return: The vulnerability ID
2556
+ """
2557
+ logger.debug(f"Processing vulnerability for finding: {finding.external_id} - {finding.title}")
2558
+
2559
+ # Check for required fields - either plugin_name or cve must be present
2560
+ plugin_name = getattr(finding, "plugin_name", None)
2561
+ cve = getattr(finding, "cve", None)
2562
+
2563
+ if not plugin_name and not cve:
2564
+ logger.warning("No Plugin Name or CVE found for finding %s", finding.title)
2565
+ logger.debug(f"Finding plugin_name: {plugin_name}, cve: {cve}")
2566
+ return None
2567
+
2568
+ if not asset:
2569
+ if not getattr(self, "suppress_asset_not_found_errors", False):
2570
+ logger.warning(
2571
+ "VulnerabilityMapping Error: Asset not found for identifier %s", finding.asset_identifier
2572
+ )
2573
+ return None
2574
+
2575
+ logger.debug(f"Found asset: {asset.id} for finding {finding.external_id}")
2576
+ logger.debug(f"Finding plugin_name: {plugin_name}, cve: {cve}")
2577
+
2578
+ # Add retry logic for vulnerability creation
2579
+ max_retries = 3
2580
+ retry_delay = 2 # seconds
2581
+
2582
+ for attempt in range(max_retries):
2583
+ try:
2584
+ logger.debug(f"Creating vulnerability for finding {finding.external_id} (attempt {attempt + 1})")
2585
+ vulnerability = self.create_vulnerability_from_finding(finding, asset, scan_history)
2586
+ finding.vulnerability_id = vulnerability.id
2587
+ logger.info(f"Successfully created vulnerability {vulnerability.id} for finding {finding.external_id}")
2588
+
2589
+ if ScannerVariables.vulnerabilityCreation.lower() != "noissue":
2590
+ # Handle associated issue
2591
+ self.create_or_update_issue_from_finding(
2592
+ title=finding.title,
2593
+ finding=finding,
2594
+ )
2595
+
2596
+ return vulnerability.id
2597
+
2598
+ except Exception as e:
2599
+ if attempt < max_retries - 1:
2600
+ logger.warning(
2601
+ f"Vulnerability creation failed for finding {finding.external_id} (attempt {attempt + 1}/{max_retries}): {e}. Retrying in {retry_delay} seconds..."
2602
+ )
2603
+ import time
2604
+
2605
+ time.sleep(retry_delay)
2606
+ retry_delay *= 2 # Exponential backoff
2607
+ else:
2608
+ logger.error(
2609
+ f"Failed to create vulnerability for finding {finding.external_id} after {max_retries} attempts: {e}"
2610
+ )
2611
+ return None
2405
2612
 
2406
2613
  def create_vulnerability_from_finding(
2407
2614
  self, finding: IntegrationFinding, asset: regscale_models.Asset, scan_history: regscale_models.ScanHistory
@@ -2415,6 +2622,14 @@ class ScannerIntegration(ABC):
2415
2622
  :return: The created vulnerability
2416
2623
  :rtype: regscale_models.Vulnerability
2417
2624
  """
2625
+ logger.debug(f"Creating vulnerability object for finding {finding.external_id}")
2626
+
2627
+ logger.debug(f"Finding severity: '{finding.severity}' (type: {type(finding.severity)})")
2628
+ mapped_severity = self.issue_to_vulnerability_map.get(
2629
+ finding.severity, regscale_models.VulnerabilitySeverity.Low
2630
+ )
2631
+ logger.debug(f"Mapped severity: {mapped_severity}")
2632
+
2418
2633
  vulnerability = regscale_models.Vulnerability(
2419
2634
  title=finding.title,
2420
2635
  cve=finding.cve,
@@ -2426,7 +2641,7 @@ class ScannerIntegration(ABC):
2426
2641
  cvsSv3BaseVector=finding.cvss_v3_vector,
2427
2642
  cvsSv2BaseVector=finding.cvss_v2_vector,
2428
2643
  scanId=scan_history.id,
2429
- severity=self.issue_to_vulnerability_map.get(finding.severity, regscale_models.VulnerabilitySeverity.Low),
2644
+ severity=mapped_severity,
2430
2645
  description=finding.description,
2431
2646
  dateLastUpdated=finding.date_last_updated,
2432
2647
  parentId=self.plan_id,
@@ -2454,8 +2669,14 @@ class ScannerIntegration(ABC):
2454
2669
  affectedPackages=finding.affected_packages,
2455
2670
  )
2456
2671
 
2672
+ logger.debug(f"Calling create_or_update for vulnerability with title: {vulnerability.title}")
2457
2673
  vulnerability = vulnerability.create_or_update()
2458
- regscale_models.VulnerabilityMapping(
2674
+ logger.debug(f"Vulnerability created/updated with ID: {vulnerability.id}")
2675
+
2676
+ logger.debug(f"Creating vulnerability mapping for vulnerability {vulnerability.id}")
2677
+ logger.debug(f"Scan History ID: {scan_history.id}, Asset ID: {asset.id}, Plan ID: {self.plan_id}")
2678
+
2679
+ vulnerability_mapping = regscale_models.VulnerabilityMapping(
2459
2680
  vulnerabilityId=vulnerability.id,
2460
2681
  assetId=asset.id,
2461
2682
  scanId=scan_history.id,
@@ -2468,46 +2689,17 @@ class ScannerIntegration(ABC):
2468
2689
  lastSeen=finding.last_seen,
2469
2690
  status=finding.status,
2470
2691
  dateLastUpdated=get_current_datetime(),
2471
- ).create_unique()
2472
- return vulnerability
2473
-
2474
- def handle_vulnerability(
2475
- self,
2476
- finding: IntegrationFinding,
2477
- asset: Optional[regscale_models.Asset],
2478
- scan_history: regscale_models.ScanHistory,
2479
- ) -> Optional[int]:
2480
- """
2481
- Handles the vulnerabilities for a finding.
2482
-
2483
- :param IntegrationFinding finding: The integration finding
2484
- :param Optional[regscale_models.Asset] asset: The associated asset
2485
- :param regscale_models.ScanHistory scan_history: The scan history
2486
- :rtype: Optional[int]
2487
- :return: The vulnerability ID
2488
- """
2489
- if not (finding.plugin_name or finding.cve):
2490
- logger.warning("No Plugin Name or CVE found for finding %s", finding.title)
2491
- return None
2492
-
2493
- if not asset:
2494
- if not getattr(self, "suppress_asset_not_found_errors", False):
2495
- logger.warning(
2496
- "VulnerabilityMapping Error: Asset not found for identifier %s", finding.asset_identifier
2497
- )
2498
- return None
2692
+ )
2499
2693
 
2500
- vulnerability = self.create_vulnerability_from_finding(finding, asset, scan_history)
2501
- finding.vulnerability_id = vulnerability.id
2694
+ logger.debug(
2695
+ f"Vulnerability mapping payload: vulnerabilityId={vulnerability_mapping.vulnerabilityId}, "
2696
+ f"assetId={vulnerability_mapping.assetId}, scanId={vulnerability_mapping.scanId}"
2697
+ )
2502
2698
 
2503
- if ScannerVariables.vulnerabilityCreation.lower() != "noissue":
2504
- # Handle associated issue
2505
- self.create_or_update_issue_from_finding(
2506
- title=finding.title,
2507
- finding=finding,
2508
- )
2699
+ vulnerability_mapping.create_unique()
2700
+ logger.debug(f"Vulnerability mapping created for vulnerability {vulnerability.id}")
2509
2701
 
2510
- return vulnerability.id
2702
+ return vulnerability
2511
2703
 
2512
2704
  def _filter_vulns_open_by_other_tools(
2513
2705
  self, all_vulns: list[regscale_models.Vulnerability]
@@ -2786,16 +2978,44 @@ class ScannerIntegration(ABC):
2786
2978
  :param regscale_models.ScanHistory scan_history: Scan history object
2787
2979
  :rtype: None
2788
2980
  """
2789
- if severity == regscale_models.IssueSeverity.Low:
2981
+ logger.debug(f"Setting severity count for scan {scan_history.id}: severity='{severity}'")
2982
+ logger.debug(
2983
+ f"Current counts - Low: {scan_history.vLow}, Medium: {scan_history.vMedium}, High: {scan_history.vHigh}, Critical: {scan_history.vCritical}, Info: {scan_history.vInfo}"
2984
+ )
2985
+
2986
+ if severity == regscale_models.IssueSeverity.Low.value:
2790
2987
  scan_history.vLow += 1
2791
- elif severity == regscale_models.IssueSeverity.Moderate:
2988
+ logger.debug(f"Incremented vLow count to {scan_history.vLow}")
2989
+ elif severity == regscale_models.IssueSeverity.Moderate.value:
2792
2990
  scan_history.vMedium += 1
2793
- elif severity == regscale_models.IssueSeverity.High:
2991
+ logger.debug(f"Incremented vMedium count to {scan_history.vMedium}")
2992
+ elif severity == regscale_models.IssueSeverity.High.value:
2794
2993
  scan_history.vHigh += 1
2795
- elif severity == regscale_models.IssueSeverity.Critical:
2994
+ logger.debug(f"Incremented vHigh count to {scan_history.vHigh}")
2995
+ elif severity == regscale_models.IssueSeverity.Critical.value:
2796
2996
  scan_history.vCritical += 1
2997
+ logger.debug(f"Incremented vCritical count to {scan_history.vCritical}")
2797
2998
  else:
2798
2999
  scan_history.vInfo += 1
3000
+ logger.debug(f"Incremented vInfo count to {scan_history.vInfo}")
3001
+
3002
+ logger.debug(
3003
+ f"Final counts - Low: {scan_history.vLow}, Medium: {scan_history.vMedium}, High: {scan_history.vHigh}, Critical: {scan_history.vCritical}, Info: {scan_history.vInfo}"
3004
+ )
3005
+
3006
+ # Save the scan history immediately to persist the count changes
3007
+ try:
3008
+ scan_history.save()
3009
+ logger.debug(f"Successfully saved scan history {scan_history.id} with updated counts")
3010
+ except Exception as e:
3011
+ logger.error(f"Error saving scan history {scan_history.id} after updating counts: {e}")
3012
+ # Try to save again with a fresh fetch
3013
+ try:
3014
+ scan_history.fetch()
3015
+ scan_history.save()
3016
+ logger.debug(f"Successfully saved scan history {scan_history.id} after retry")
3017
+ except Exception as e2:
3018
+ logger.error(f"Failed to save scan history {scan_history.id} after retry: {e2}")
2799
3019
 
2800
3020
  @classmethod
2801
3021
  def cci_assessment(cls, plan_id: int) -> None:
@@ -3309,3 +3529,151 @@ class ScannerIntegration(ABC):
3309
3529
  datetime.strptime(issue_date_created, "%Y-%m-%d %H:%M:%S") + diff
3310
3530
  )
3311
3531
  return None
3532
+
3533
+ def create_vulnerabilities_bulk(
3534
+ self,
3535
+ findings: List[IntegrationFinding],
3536
+ assets: Dict[str, regscale_models.Asset],
3537
+ scan_history: regscale_models.ScanHistory,
3538
+ ) -> Dict[str, int]:
3539
+ """
3540
+ Create vulnerabilities in bulk to improve performance and reduce API calls.
3541
+
3542
+ :param List[IntegrationFinding] findings: List of findings to create vulnerabilities for
3543
+ :param Dict[str, regscale_models.Asset] assets: Dictionary of assets by identifier
3544
+ :param regscale_models.ScanHistory scan_history: The scan history
3545
+ :return: Dictionary mapping finding external_id to vulnerability_id
3546
+ :rtype: Dict[str, int]
3547
+ """
3548
+ vulnerabilities_to_create, finding_to_vuln_map = self._prepare_vulnerabilities_for_bulk(
3549
+ findings, assets, scan_history
3550
+ )
3551
+
3552
+ if not vulnerabilities_to_create:
3553
+ logger.warning("No vulnerabilities to create in bulk")
3554
+ return {}
3555
+
3556
+ return self._execute_bulk_vulnerability_creation(
3557
+ vulnerabilities_to_create, finding_to_vuln_map, findings, assets, scan_history
3558
+ )
3559
+
3560
+ def _prepare_vulnerabilities_for_bulk(
3561
+ self,
3562
+ findings: List[IntegrationFinding],
3563
+ assets: Dict[str, regscale_models.Asset],
3564
+ scan_history: regscale_models.ScanHistory,
3565
+ ) -> tuple[List, Dict]:
3566
+ """Prepare vulnerability objects for bulk creation."""
3567
+ vulnerabilities_to_create = []
3568
+ finding_to_vuln_map = {}
3569
+
3570
+ for finding in findings:
3571
+ if not self._is_finding_valid_for_vulnerability(finding):
3572
+ continue
3573
+
3574
+ asset = assets.get(finding.asset_identifier)
3575
+ if not self._is_asset_valid(asset, finding):
3576
+ continue
3577
+
3578
+ vulnerability = self._create_vulnerability_object(finding, asset, scan_history)
3579
+ if vulnerability:
3580
+ vulnerabilities_to_create.append(vulnerability)
3581
+ finding_to_vuln_map[finding.external_id] = vulnerability
3582
+
3583
+ return vulnerabilities_to_create, finding_to_vuln_map
3584
+
3585
+ def _is_finding_valid_for_vulnerability(self, finding: IntegrationFinding) -> bool:
3586
+ """Check if a finding is valid for vulnerability creation."""
3587
+ if not (finding.plugin_name or finding.cve):
3588
+ logger.warning("No Plugin Name or CVE found for finding %s", finding.title)
3589
+ return False
3590
+ return True
3591
+
3592
+ def _is_asset_valid(self, asset: Optional[regscale_models.Asset], finding: IntegrationFinding) -> bool:
3593
+ """Check if an asset is valid for vulnerability creation."""
3594
+ if not asset:
3595
+ if not getattr(self, "suppress_asset_not_found_errors", False):
3596
+ logger.warning(
3597
+ "VulnerabilityMapping Error: Asset not found for identifier %s", finding.asset_identifier
3598
+ )
3599
+ return False
3600
+ return True
3601
+
3602
+ def _create_vulnerability_object(
3603
+ self,
3604
+ finding: IntegrationFinding,
3605
+ asset: regscale_models.Asset,
3606
+ scan_history: regscale_models.ScanHistory,
3607
+ ) -> Optional[regscale_models.Vulnerability]:
3608
+ """Create a vulnerability object from a finding."""
3609
+ try:
3610
+ return self.create_vulnerability_from_finding(finding, asset, scan_history)
3611
+ except Exception as e:
3612
+ logger.error(f"Failed to prepare vulnerability for finding {finding.external_id}: {e}")
3613
+ return None
3614
+
3615
+ def _execute_bulk_vulnerability_creation(
3616
+ self,
3617
+ vulnerabilities_to_create: List,
3618
+ finding_to_vuln_map: Dict,
3619
+ findings: List[IntegrationFinding],
3620
+ assets: Dict[str, regscale_models.Asset],
3621
+ scan_history: regscale_models.ScanHistory,
3622
+ ) -> Dict[str, int]:
3623
+ """Execute bulk vulnerability creation with fallback to individual creation."""
3624
+ try:
3625
+ created_vulnerabilities = regscale_models.Vulnerability.batch_create(
3626
+ vulnerabilities_to_create, progress_context=self.finding_progress
3627
+ )
3628
+
3629
+ result = self._map_vulnerabilities_to_findings(
3630
+ created_vulnerabilities, vulnerabilities_to_create, finding_to_vuln_map
3631
+ )
3632
+
3633
+ logger.info(f"Successfully created {len(created_vulnerabilities)} vulnerabilities in bulk")
3634
+ return result
3635
+
3636
+ except Exception as e:
3637
+ logger.error(f"Bulk vulnerability creation failed: {e}")
3638
+ logger.info("Falling back to individual vulnerability creation...")
3639
+ return self._create_vulnerabilities_individual(findings, assets, scan_history)
3640
+
3641
+ def _map_vulnerabilities_to_findings(
3642
+ self,
3643
+ created_vulnerabilities: List,
3644
+ vulnerabilities_to_create: List,
3645
+ finding_to_vuln_map: Dict,
3646
+ ) -> Dict[str, int]:
3647
+ """Map created vulnerabilities back to findings."""
3648
+ result = {}
3649
+ for i, created_vuln in enumerate(created_vulnerabilities):
3650
+ if i < len(vulnerabilities_to_create):
3651
+ original_vuln = vulnerabilities_to_create[i]
3652
+ # Find the finding that corresponds to this vulnerability
3653
+ for finding_id, vuln in finding_to_vuln_map.items():
3654
+ if vuln == original_vuln:
3655
+ result[finding_id] = created_vuln.id
3656
+ break
3657
+ return result
3658
+
3659
+ def _create_vulnerabilities_individual(
3660
+ self,
3661
+ findings: List[IntegrationFinding],
3662
+ assets: Dict[str, regscale_models.Asset],
3663
+ scan_history: regscale_models.ScanHistory,
3664
+ ) -> Dict[str, int]:
3665
+ """
3666
+ Create vulnerabilities individually as fallback.
3667
+
3668
+ :param List[IntegrationFinding] findings: List of findings
3669
+ :param Dict[str, regscale_models.Asset] assets: Dictionary of assets
3670
+ :param regscale_models.ScanHistory scan_history: The scan history
3671
+ :return: Dictionary mapping finding external_id to vulnerability_id
3672
+ :rtype: Dict[str, int]
3673
+ """
3674
+ result = {}
3675
+ for finding in findings:
3676
+ vulnerability_id = self.handle_vulnerability(finding, assets.get(finding.asset_identifier), scan_history)
3677
+ if vulnerability_id:
3678
+ result[finding.external_id] = vulnerability_id
3679
+ return result