regscale-cli 6.21.2.2__py3-none-any.whl → 6.22.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of regscale-cli might be problematic. Click here for more details.

Files changed (32) hide show
  1. regscale/_version.py +1 -1
  2. regscale/core/app/application.py +3 -0
  3. regscale/core/app/utils/app_utils.py +31 -0
  4. regscale/integrations/commercial/jira.py +27 -5
  5. regscale/integrations/commercial/qualys/__init__.py +160 -60
  6. regscale/integrations/commercial/qualys/scanner.py +300 -39
  7. regscale/integrations/commercial/synqly/edr.py +2 -8
  8. regscale/integrations/commercial/wizv2/async_client.py +4 -0
  9. regscale/integrations/commercial/wizv2/scanner.py +50 -24
  10. regscale/integrations/public/__init__.py +13 -0
  11. regscale/integrations/public/csam/__init__.py +0 -0
  12. regscale/integrations/public/csam/csam.py +1129 -0
  13. regscale/integrations/public/fedramp/fedramp_cis_crm.py +175 -51
  14. regscale/integrations/scanner_integration.py +513 -145
  15. regscale/models/integration_models/cisa_kev_data.json +64 -3
  16. regscale/models/integration_models/synqly_models/capabilities.json +1 -1
  17. regscale/models/regscale_models/__init__.py +2 -0
  18. regscale/models/regscale_models/catalog.py +1 -1
  19. regscale/models/regscale_models/control_implementation.py +8 -8
  20. regscale/models/regscale_models/form_field_value.py +5 -3
  21. regscale/models/regscale_models/inheritance.py +44 -0
  22. regscale/regscale.py +2 -0
  23. {regscale_cli-6.21.2.2.dist-info → regscale_cli-6.22.0.1.dist-info}/METADATA +1 -1
  24. {regscale_cli-6.21.2.2.dist-info → regscale_cli-6.22.0.1.dist-info}/RECORD +29 -29
  25. tests/regscale/models/test_tenable_integrations.py +811 -105
  26. regscale/integrations/public/fedramp/mappings/fedramp_r4_parts.json +0 -7388
  27. regscale/integrations/public/fedramp/mappings/fedramp_r5_parts.json +0 -9605
  28. regscale/integrations/public/fedramp/parts_mapper.py +0 -107
  29. {regscale_cli-6.21.2.2.dist-info → regscale_cli-6.22.0.1.dist-info}/LICENSE +0 -0
  30. {regscale_cli-6.21.2.2.dist-info → regscale_cli-6.22.0.1.dist-info}/WHEEL +0 -0
  31. {regscale_cli-6.21.2.2.dist-info → regscale_cli-6.22.0.1.dist-info}/entry_points.txt +0 -0
  32. {regscale_cli-6.21.2.2.dist-info → regscale_cli-6.22.0.1.dist-info}/top_level.txt +0 -0
@@ -27,18 +27,20 @@ from regscale.integrations.scanner_integration import (
27
27
  )
28
28
  from regscale.integrations.variables import ScannerVariables
29
29
  from regscale.models import AssetStatus, IssueSeverity, IssueStatus
30
+ from regscale import models as regscale_models
30
31
 
31
32
  logger = logging.getLogger("regscale")
32
33
 
33
34
  NO_RESULTS = "No results available"
34
35
  NO_DESCRIPTION = "No description available"
35
36
  NO_REMEDIATION = "No remediation information available"
37
+ SCANNING_TOOL_NAME = "Qualys Total Cloud"
36
38
 
37
39
 
38
40
  class QualysTotalCloudJSONLIntegration(JSONLScannerIntegration):
39
41
  """Class for handling Qualys Total Cloud scanner integration using JSONL."""
40
42
 
41
- title: str = "Qualys Total Cloud"
43
+ title: str = SCANNING_TOOL_NAME
42
44
  asset_identifier_field: str = "qualysId"
43
45
  finding_severity_map: Dict[str, Any] = {
44
46
  "0": IssueSeverity.NotAssigned.value,
@@ -73,34 +75,52 @@ class QualysTotalCloudJSONLIntegration(JSONLScannerIntegration):
73
75
  self.xml_data = kwargs.pop("xml_data", None)
74
76
  self.containers = kwargs.pop("containers", None)
75
77
  self.is_component = kwargs.get("is_component", False)
76
- # Setting a dummy file path to avoid validation errors
78
+
79
+ self._setup_file_path(kwargs)
80
+ self._apply_vulnerability_creation_setting(kwargs)
81
+ self._apply_ssl_verification_setting(kwargs)
82
+ self._apply_thread_workers_setting(kwargs)
83
+
84
+ super().__init__(*args, **kwargs)
85
+ # No need to initialize clients, they are inherited from the parent class
86
+
87
+ def _setup_file_path(self, kwargs: Dict[str, Any]) -> None:
88
+ """Setup file path for XML data processing."""
77
89
  if self.xml_data and "file_path" not in kwargs:
78
90
  kwargs["file_path"] = None
79
91
 
80
- # Apply ScannerVariables settings
81
- if not kwargs.get("vulnerability_creation"):
82
- # Check QualysVariables-specific override first
83
- if hasattr(QualysVariables, "vulnerabilityCreation") and QualysVariables.vulnerabilityCreation:
84
- kwargs["vulnerability_creation"] = QualysVariables.vulnerabilityCreation
85
- logger.info(f"Using Qualys-specific vulnerability creation mode: {kwargs['vulnerability_creation']}")
86
- # Use global ScannerVariables if no Qualys-specific setting
87
- elif hasattr(ScannerVariables, "vulnerabilityCreation"):
88
- kwargs["vulnerability_creation"] = ScannerVariables.vulnerabilityCreation
89
- logger.info(f"Using global vulnerability creation mode: {kwargs['vulnerability_creation']}")
90
-
91
- # Apply SSL verification setting from ScannerVariables
92
+ def _apply_vulnerability_creation_setting(self, kwargs: Dict[str, Any]) -> None:
93
+ """Apply vulnerability creation setting from variables."""
94
+ if kwargs.get("vulnerability_creation"):
95
+ return
96
+
97
+ if self._has_qualys_vulnerability_creation():
98
+ kwargs["vulnerability_creation"] = QualysVariables.vulnerabilityCreation
99
+ logger.info(f"Using Qualys-specific vulnerability creation mode: {kwargs['vulnerability_creation']}")
100
+ elif self._has_scanner_vulnerability_creation():
101
+ kwargs["vulnerability_creation"] = ScannerVariables.vulnerabilityCreation
102
+ logger.info(f"Using global vulnerability creation mode: {kwargs['vulnerability_creation']}")
103
+
104
+ def _has_qualys_vulnerability_creation(self) -> bool:
105
+ """Check if QualysVariables has vulnerability creation setting."""
106
+ return hasattr(QualysVariables, "vulnerabilityCreation") and QualysVariables.vulnerabilityCreation
107
+
108
+ def _has_scanner_vulnerability_creation(self) -> bool:
109
+ """Check if ScannerVariables has vulnerability creation setting."""
110
+ return hasattr(ScannerVariables, "vulnerabilityCreation")
111
+
112
+ def _apply_ssl_verification_setting(self, kwargs: Dict[str, Any]) -> None:
113
+ """Apply SSL verification setting from ScannerVariables."""
92
114
  if not kwargs.get("ssl_verify") and hasattr(ScannerVariables, "sslVerify"):
93
115
  kwargs["ssl_verify"] = ScannerVariables.sslVerify
94
116
  logger.debug(f"Using SSL verification setting: {kwargs['ssl_verify']}")
95
117
 
96
- # Apply ScannerVariables.threadMaxWorkers if available
118
+ def _apply_thread_workers_setting(self, kwargs: Dict[str, Any]) -> None:
119
+ """Apply thread max workers setting from ScannerVariables."""
97
120
  if not kwargs.get("max_workers") and hasattr(ScannerVariables, "threadMaxWorkers"):
98
121
  kwargs["max_workers"] = ScannerVariables.threadMaxWorkers
99
122
  logger.debug(f"Using thread max workers: {kwargs['max_workers']}")
100
123
 
101
- super().__init__(*args, **kwargs)
102
- # No need to initialize clients, they are inherited from the parent class
103
-
104
124
  def is_valid_file(self, data: Any, file_path: Union[Path, str]) -> Tuple[bool, Optional[Dict[str, Any]]]:
105
125
  """
106
126
  Check if the XML data is valid for Qualys Total Cloud.
@@ -230,6 +250,12 @@ class QualysTotalCloudJSONLIntegration(JSONLScannerIntegration):
230
250
  # Extract host information
231
251
  host_info = self._extract_host_information(processed_host)
232
252
 
253
+ # Log asset creation for debugging
254
+ logger.debug(f"Creating asset for host ID: {host_info['host_id']}")
255
+ logger.debug(f"Asset name: {host_info['name']}")
256
+ logger.debug(f"Plan ID: {self.plan_id}, Parent Module: {self.parent_module}")
257
+ logger.debug(f"Is Component: {self.is_component}")
258
+
233
259
  # Create and return the asset
234
260
  return IntegrationAsset(
235
261
  name=host_info["name"],
@@ -448,6 +474,23 @@ class QualysTotalCloudJSONLIntegration(JSONLScannerIntegration):
448
474
 
449
475
  current_time = self.scan_date or get_current_datetime()
450
476
 
477
+ # Extract CVSS scores and convert to float if possible
478
+ cvss_v3_score = detection.get("CVSS3_BASE")
479
+ cvss_v2_score = detection.get("CVSS_BASE")
480
+
481
+ # Convert CVSS scores to float if they're strings
482
+ if cvss_v3_score and isinstance(cvss_v3_score, str):
483
+ try:
484
+ cvss_v3_score = float(cvss_v3_score)
485
+ except (ValueError, TypeError):
486
+ cvss_v3_score = None
487
+
488
+ if cvss_v2_score and isinstance(cvss_v2_score, str):
489
+ try:
490
+ cvss_v2_score = float(cvss_v2_score)
491
+ except (ValueError, TypeError):
492
+ cvss_v2_score = None
493
+
451
494
  return {
452
495
  "qid": detection.get("QID", "Unknown"),
453
496
  "severity": detection.get("SEVERITY", "0"),
@@ -456,9 +499,9 @@ class QualysTotalCloudJSONLIntegration(JSONLScannerIntegration):
456
499
  "last_found": detection.get("LAST_FOUND_DATETIME", current_time),
457
500
  "unique_id": detection.get("UNIQUE_VULN_ID", f"QID-{detection.get('QID', 'Unknown')}"),
458
501
  "results": detection.get("RESULTS", NO_RESULTS),
459
- "cvss_v3_score": detection.get("CVSS3_BASE"),
502
+ "cvss_v3_score": cvss_v3_score,
460
503
  "cvss_v3_vector": detection.get("CVSS3_VECTOR", ""),
461
- "cvss_v2_score": detection.get("CVSS_BASE"),
504
+ "cvss_v2_score": cvss_v2_score,
462
505
  "cvss_v2_vector": detection.get("CVSS_VECTOR", ""),
463
506
  }
464
507
 
@@ -504,29 +547,79 @@ class QualysTotalCloudJSONLIntegration(JSONLScannerIntegration):
504
547
  if not detection:
505
548
  return ""
506
549
 
507
- cve_id = ""
508
550
  try:
509
- cve_list = detection.get("CVE_ID_LIST", {})
510
- if not cve_list:
511
- return ""
512
-
513
- if not isinstance(cve_list, dict):
514
- logger.warning(f"Expected dictionary for CVE_ID_LIST, got {type(cve_list)}")
515
- return ""
516
-
517
- if "CVE_ID" in cve_list:
518
- cve_data = cve_list.get("CVE_ID", [])
519
- if isinstance(cve_data, list) and cve_data:
520
- cve_id = str(cve_data[0]) if cve_data[0] else ""
521
- elif isinstance(cve_data, str):
522
- cve_id = cve_data
523
- elif cve_data:
524
- # Try to convert to string if it's something else
525
- cve_id = str(cve_data)
551
+ # Try to extract CVE from CVE_ID_LIST first
552
+ cve_id = self._extract_cve_from_cve_list(detection)
553
+ if cve_id:
554
+ return cve_id
555
+
556
+ # Try direct CVE fields if CVE_ID_LIST didn't work
557
+ cve_id = self._extract_cve_from_direct_fields(detection)
558
+ if cve_id:
559
+ return cve_id
560
+
526
561
  except Exception as e:
527
562
  logger.warning(f"Error extracting CVE_ID: {str(e)}")
528
563
 
529
- return cve_id
564
+ return ""
565
+
566
+ def _extract_cve_from_cve_list(self, detection: Dict[str, Any]) -> str:
567
+ """
568
+ Extract CVE ID from CVE_ID_LIST field.
569
+
570
+ :param Dict[str, Any] detection: Detection data
571
+ :return: CVE ID string
572
+ :rtype: str
573
+ """
574
+ cve_list = detection.get("CVE_ID_LIST", {})
575
+ if not cve_list:
576
+ return ""
577
+
578
+ if not isinstance(cve_list, dict):
579
+ logger.warning(f"Expected dictionary for CVE_ID_LIST, got {type(cve_list)}")
580
+ return ""
581
+
582
+ if "CVE_ID" not in cve_list:
583
+ return ""
584
+
585
+ cve_data = cve_list.get("CVE_ID", [])
586
+ return self._convert_cve_data_to_string(cve_data)
587
+
588
+ def _extract_cve_from_direct_fields(self, detection: Dict[str, Any]) -> str:
589
+ """
590
+ Extract CVE ID from direct CVE fields.
591
+
592
+ :param Dict[str, Any] detection: Detection data
593
+ :return: CVE ID string
594
+ :rtype: str
595
+ """
596
+ # Try CVE field directly
597
+ cve_id = detection.get("CVE", "")
598
+ if cve_id:
599
+ return str(cve_id)
600
+
601
+ # Try CVE_ID field directly
602
+ cve_id = detection.get("CVE_ID", "")
603
+ if cve_id:
604
+ return str(cve_id)
605
+
606
+ return ""
607
+
608
+ def _convert_cve_data_to_string(self, cve_data: Any) -> str:
609
+ """
610
+ Convert CVE data to string format.
611
+
612
+ :param Any cve_data: CVE data to convert
613
+ :return: CVE ID string
614
+ :rtype: str
615
+ """
616
+ if isinstance(cve_data, list) and cve_data:
617
+ return str(cve_data[0]) if cve_data[0] else ""
618
+ elif isinstance(cve_data, str):
619
+ return cve_data
620
+ elif cve_data:
621
+ return str(cve_data)
622
+ return ""
530
623
 
531
624
  def _extract_cve_id_from_xml(self, detection: Optional[Union[Dict[str, Any], ET.Element]]) -> str:
532
625
  """
@@ -648,6 +741,12 @@ class QualysTotalCloudJSONLIntegration(JSONLScannerIntegration):
648
741
  current_time = self.scan_date or get_current_datetime()
649
742
  qid = finding_data.get("qid", "Unknown")
650
743
 
744
+ # Log the finding data for debugging
745
+ logger.debug(f"Creating finding for QID {qid}, host {host_id}")
746
+ logger.debug(f"CVE: {finding_data.get('cve_id', 'None')}")
747
+ logger.debug(f"CVSS V3 Score: {finding_data.get('cvss_v3_score', 'None')}")
748
+ logger.debug(f"CVSS V2 Score: {finding_data.get('cvss_v2_score', 'None')}")
749
+
651
750
  return IntegrationFinding(
652
751
  title=finding_data.get("title", f"Qualys Vulnerability QID-{qid}"),
653
752
  description=finding_data.get("diagnosis", NO_DESCRIPTION),
@@ -1376,3 +1475,165 @@ class QualysTotalCloudJSONLIntegration(JSONLScannerIntegration):
1376
1475
 
1377
1476
  # Default to Open for any unknown status
1378
1477
  return IssueStatus.Open
1478
+
1479
+ def create_vulnerability_from_finding(
1480
+ self, finding: IntegrationFinding, asset: regscale_models.Asset, scan_history: regscale_models.ScanHistory
1481
+ ) -> regscale_models.Vulnerability:
1482
+ """
1483
+ Override the parent method to add better debugging and ensure proper vulnerability mapping creation.
1484
+
1485
+ :param IntegrationFinding finding: The integration finding
1486
+ :param regscale_models.Asset asset: The associated asset
1487
+ :param regscale_models.ScanHistory scan_history: The scan history
1488
+ :return: The created vulnerability
1489
+ :rtype: regscale_models.Vulnerability
1490
+ """
1491
+ logger.debug(f"Creating vulnerability from finding: {finding.title}")
1492
+ logger.debug(f"Asset ID: {asset.id}, Asset Name: {asset.name}")
1493
+ logger.debug(f"Scan History ID: {scan_history.id}")
1494
+ logger.debug(f"Plan ID: {self.plan_id}, Parent Module: {self.parent_module}")
1495
+ logger.debug(f"Is Component: {self.is_component}")
1496
+
1497
+ # Call the parent method
1498
+ vulnerability = super().create_vulnerability_from_finding(finding, asset, scan_history)
1499
+
1500
+ logger.debug(f"Created vulnerability with ID: {vulnerability.id}")
1501
+ logger.debug(f"Vulnerability parentId: {vulnerability.parentId}")
1502
+ logger.debug(f"Vulnerability parentModule: {vulnerability.parentModule}")
1503
+
1504
+ # Verify the vulnerability mapping was created
1505
+ try:
1506
+ mappings = regscale_models.VulnerabilityMapping.find_by_vulnerability(vulnerability.id)
1507
+ logger.debug(f"Found {len(mappings)} vulnerability mappings for vulnerability {vulnerability.id}")
1508
+ for mapping in mappings:
1509
+ logger.debug(
1510
+ f"Mapping - Asset ID: {mapping.assetId}, Scan ID: {mapping.scanId}, Security Plan ID: {mapping.securityPlanId}"
1511
+ )
1512
+ except Exception as e:
1513
+ logger.warning(f"Error checking vulnerability mappings: {e}")
1514
+
1515
+ return vulnerability
1516
+
1517
+ def handle_vulnerability(
1518
+ self,
1519
+ finding: IntegrationFinding,
1520
+ asset: Optional[regscale_models.Asset],
1521
+ scan_history: regscale_models.ScanHistory,
1522
+ ) -> Optional[int]:
1523
+ """
1524
+ Override parent method to ensure Qualys findings always create vulnerabilities.
1525
+ This ensures that Qualys vulnerabilities are properly populated in RegScale.
1526
+
1527
+ :param IntegrationFinding finding: The integration finding
1528
+ :param Optional[regscale_models.Asset] asset: The associated asset
1529
+ :param regscale_models.ScanHistory scan_history: The scan history
1530
+ :rtype: Optional[int]
1531
+ :return: The vulnerability ID
1532
+ """
1533
+ # Check for required fields - either plugin_name or cve must be present
1534
+ if not (finding.plugin_name or finding.cve):
1535
+ logger.warning(
1536
+ f"Qualys: Skipping vulnerability creation - missing plugin_name and cve for finding {finding.external_id}"
1537
+ )
1538
+ return None
1539
+
1540
+ # Ensure vulnerability creation is enabled for Qualys
1541
+ logger.debug(f"Qualys: Vulnerability creation setting: {self.vulnerability_creation}")
1542
+ if self.vulnerability_creation == "NoIssue":
1543
+ logger.debug(f"Qualys: Vulnerability creation disabled, skipping finding {finding.external_id}")
1544
+ return None
1545
+
1546
+ # Create vulnerability using parent method
1547
+ logger.debug(f"Qualys: Calling parent handle_vulnerability for finding {finding.external_id}")
1548
+ vulnerability_id = super().handle_vulnerability(finding, asset, scan_history)
1549
+
1550
+ if vulnerability_id:
1551
+ logger.debug(f"Qualys: Created vulnerability {vulnerability_id} for finding {finding.external_id}")
1552
+ else:
1553
+ logger.warning(f"Qualys: Failed to create vulnerability for finding {finding.external_id}")
1554
+
1555
+ return vulnerability_id
1556
+
1557
+ def set_severity_count_for_scan(self, severity: str, scan_history: regscale_models.ScanHistory) -> None:
1558
+ """
1559
+ Override parent method to ensure Qualys scan history severity counts are properly updated.
1560
+ This ensures that the vulnerability counts are accurately reflected in the scan history.
1561
+
1562
+ :param str severity: Severity of the vulnerability
1563
+ :param regscale_models.ScanHistory scan_history: Scan history object
1564
+ :rtype: None
1565
+ """
1566
+ # Use parent method to update severity counts
1567
+ super().set_severity_count_for_scan(severity, scan_history)
1568
+
1569
+ def create_scan_history(self) -> regscale_models.ScanHistory:
1570
+ """
1571
+ Override parent method to ensure Qualys scan history is properly created.
1572
+ This ensures that the scanning tool name is correctly set for Qualys scans.
1573
+ Also reuses existing scan history records for the same day and tool to avoid duplicates.
1574
+
1575
+ :return: A newly created or reused ScanHistory object
1576
+ :rtype: regscale_models.ScanHistory
1577
+ """
1578
+ logger.debug(f"Creating scan history for plan {self.plan_id}, module {self.parent_module}")
1579
+
1580
+ try:
1581
+ # Load existing scans for the plan/module
1582
+ existing_scans = regscale_models.ScanHistory.get_all_by_parent(
1583
+ parent_id=self.plan_id, parent_module=self.parent_module
1584
+ )
1585
+
1586
+ # Normalize target date to date component only
1587
+ target_dt = self.scan_date if self.scan_date else get_current_datetime()
1588
+ target_date_only = target_dt.split("T")[0] if isinstance(target_dt, str) else str(target_dt)[:10]
1589
+
1590
+ # Find an existing scan for today and this tool
1591
+ for scan in existing_scans:
1592
+ try:
1593
+ if getattr(scan, "scanningTool", None) == SCANNING_TOOL_NAME and getattr(scan, "scanDate", None):
1594
+ scan_date = str(scan.scanDate)
1595
+ scan_date_only = scan_date.split("T")[0]
1596
+ if scan_date_only == target_date_only:
1597
+ # Reuse this scan history; refresh last updated
1598
+ logger.debug(f"Reusing existing scan history {scan.id} for {target_date_only}")
1599
+ scan.dateLastUpdated = get_current_datetime()
1600
+ scan.lastUpdatedById = self.assessor_id
1601
+ scan.save()
1602
+ return scan
1603
+ except Exception:
1604
+ # Skip any malformed scan records
1605
+ continue
1606
+
1607
+ # No existing same-day scan found, create new
1608
+ logger.debug("No existing scan history found for today, creating new one")
1609
+ scan_history = regscale_models.ScanHistory(
1610
+ parentId=self.plan_id,
1611
+ parentModule=self.parent_module,
1612
+ scanningTool=SCANNING_TOOL_NAME, # Ensure proper scanning tool name
1613
+ scanDate=self.scan_date if self.scan_date else get_current_datetime(),
1614
+ createdById=self.assessor_id,
1615
+ lastUpdatedById=self.assessor_id,
1616
+ tenantsId=self.tenant_id,
1617
+ vLow=0,
1618
+ vMedium=0,
1619
+ vHigh=0,
1620
+ vCritical=0,
1621
+ ).create()
1622
+
1623
+ logger.debug(f"Created new scan history with ID: {scan_history.id}")
1624
+
1625
+ # Ensure the scan history is properly created and cached
1626
+ count = 0
1627
+ regscale_models.ScanHistory.delete_object_cache(scan_history)
1628
+ while not regscale_models.ScanHistory.get_object(object_id=scan_history.id) or count > 10:
1629
+ logger.info("Waiting for ScanHistory to be created...")
1630
+ time.sleep(1)
1631
+ count += 1
1632
+ regscale_models.ScanHistory.delete_object_cache(scan_history)
1633
+
1634
+ return scan_history
1635
+
1636
+ except Exception as e:
1637
+ logger.error(f"Error in create_scan_history: {e}")
1638
+ # Fallback: create new scan history using parent method
1639
+ return super().create_scan_history()
@@ -73,18 +73,12 @@ def sync_sentinelone(regscale_ssp_id: int, edr_events_url: str) -> None:
73
73
 
74
74
  @edr.command(name="sync_sophos")
75
75
  @regscale_ssp_id()
76
- @click.option(
77
- "--url",
78
- type=click.STRING,
79
- help="Base URL for the Sophos Endpoint API.",
80
- required=False,
81
- )
82
- def sync_sophos(regscale_ssp_id: int, url: str) -> None:
76
+ def sync_sophos(regscale_ssp_id: int) -> None:
83
77
  """Sync Edr from Sophos to RegScale."""
84
78
  from regscale.models.integration_models.synqly_models.connectors import Edr
85
79
 
86
80
  edr_sophos = Edr("sophos")
87
- edr_sophos.run_sync(regscale_ssp_id=regscale_ssp_id, url=url)
81
+ edr_sophos.run_sync(regscale_ssp_id=regscale_ssp_id)
88
82
 
89
83
 
90
84
  @edr.command(name="sync_tanium")
@@ -156,6 +156,10 @@ class AsyncWizGraphQLClient:
156
156
  nodes = topic_data.get("nodes", [])
157
157
  page_info = topic_data.get("pageInfo", {})
158
158
 
159
+ # Handle case where nodes is explicitly None
160
+ if nodes is None:
161
+ nodes = []
162
+
159
163
  all_nodes.extend(nodes)
160
164
 
161
165
  has_next_page = page_info.get("hasNextPage", False)
@@ -5,9 +5,10 @@ import json
5
5
  import logging
6
6
  import os
7
7
  import re
8
- from typing import Any, Dict, Iterator, List, Optional, Union, Tuple
8
+ from collections.abc import Iterator
9
+ from typing import Any, Dict, List, Optional, Tuple, Union
9
10
 
10
- from regscale.core.app.utils.app_utils import check_file_path, get_current_datetime, error_and_exit
11
+ from regscale.core.app.utils.app_utils import check_file_path, error_and_exit, get_current_datetime
11
12
  from regscale.core.utils import get_base_protocol_from_port
12
13
  from regscale.core.utils.date import format_to_regscale_iso
13
14
  from regscale.integrations.commercial.wizv2.async_client import run_async_queries
@@ -42,7 +43,11 @@ from regscale.integrations.commercial.wizv2.utils import (
42
43
  )
43
44
  from regscale.integrations.commercial.wizv2.variables import WizVariables
44
45
  from regscale.integrations.commercial.wizv2.wiz_auth import wiz_authenticate
45
- from regscale.integrations.scanner_integration import IntegrationAsset, IntegrationFinding, ScannerIntegration
46
+ from regscale.integrations.scanner_integration import (
47
+ IntegrationAsset,
48
+ IntegrationFinding,
49
+ ScannerIntegration,
50
+ )
46
51
  from regscale.integrations.variables import ScannerVariables
47
52
  from regscale.models import IssueStatus, regscale_models
48
53
  from regscale.models.regscale_models.compliance_settings import ComplianceSettings
@@ -119,7 +124,7 @@ class WizVulnerabilityIntegration(ScannerIntegration):
119
124
  # Use async concurrent queries for better performance
120
125
  yield from self.fetch_findings_async(*args, **kwargs)
121
126
  except Exception as e:
122
- logger.warning(f"Async query failed, falling back to sync: {str(e)}")
127
+ logger.warning(f"Async query failed, falling back to sync: {e!s}")
123
128
  # Fallback to synchronous method
124
129
  yield from self.fetch_findings_sync(**kwargs)
125
130
  else:
@@ -195,8 +200,7 @@ class WizVulnerabilityIntegration(ScannerIntegration):
195
200
  progress_tracker=self.finding_progress,
196
201
  max_concurrent=5,
197
202
  )
198
- else:
199
- return self._load_cached_data_with_progress(query_configs)
203
+ return self._load_cached_data_with_progress(query_configs)
200
204
 
201
205
  def _process_query_results(
202
206
  self,
@@ -337,7 +341,7 @@ class WizVulnerabilityIntegration(ScannerIntegration):
337
341
  self.finding_progress.advance(main_task, len(query_configs))
338
342
 
339
343
  except Exception as e:
340
- logger.error(f"Error in async findings fetch: {str(e)}", exc_info=True)
344
+ logger.error(f"Error in async findings fetch: {e!s}", exc_info=True)
341
345
  if "main_task" in locals():
342
346
  self.finding_progress.update(
343
347
  main_task, description=f"[red]✗ Error in concurrent queries: {str(e)[:50]}..."
@@ -396,7 +400,7 @@ class WizVulnerabilityIntegration(ScannerIntegration):
396
400
 
397
401
  try:
398
402
  if file_path and os.path.exists(file_path):
399
- with open(file_path, "r", encoding="utf-8") as file:
403
+ with open(file_path, encoding="utf-8") as file:
400
404
  nodes = json.load(file)
401
405
 
402
406
  logger.info(f"Loaded {len(nodes)} cached {query_type} findings from {file_path}")
@@ -751,17 +755,16 @@ class WizVulnerabilityIntegration(ScannerIntegration):
751
755
  # Route to specific parsing method based on vulnerability type
752
756
  if vulnerability_type == WizVulnerabilityType.SECRET_FINDING:
753
757
  return self._parse_secret_finding(node)
754
- elif vulnerability_type == WizVulnerabilityType.NETWORK_EXPOSURE_FINDING:
758
+ if vulnerability_type == WizVulnerabilityType.NETWORK_EXPOSURE_FINDING:
755
759
  return self._parse_network_exposure_finding(node)
756
- elif vulnerability_type == WizVulnerabilityType.EXTERNAL_ATTACH_SURFACE:
760
+ if vulnerability_type == WizVulnerabilityType.EXTERNAL_ATTACH_SURFACE:
757
761
  return self._parse_external_attack_surface_finding(node)
758
- elif vulnerability_type == WizVulnerabilityType.EXCESSIVE_ACCESS_FINDING:
762
+ if vulnerability_type == WizVulnerabilityType.EXCESSIVE_ACCESS_FINDING:
759
763
  return self._parse_excessive_access_finding(node)
760
- elif vulnerability_type == WizVulnerabilityType.END_OF_LIFE_FINDING:
764
+ if vulnerability_type == WizVulnerabilityType.END_OF_LIFE_FINDING:
761
765
  return self._parse_end_of_life_finding(node)
762
- else:
763
- # Fallback to generic parsing for any other types
764
- return self._parse_generic_finding(node, vulnerability_type)
766
+ # Fallback to generic parsing for any other types
767
+ return self._parse_generic_finding(node, vulnerability_type)
765
768
  except (KeyError, TypeError, ValueError) as e:
766
769
  logger.error("Error parsing Wiz finding: %s", str(e), exc_info=True)
767
770
  return None
@@ -1183,10 +1186,9 @@ class WizVulnerabilityIntegration(ScannerIntegration):
1183
1186
 
1184
1187
  if status_lower == "open":
1185
1188
  return IssueStatus.Open
1186
- elif status_lower in ["resolved", "rejected"]:
1189
+ if status_lower in ["resolved", "rejected"]:
1187
1190
  return IssueStatus.Closed
1188
- else:
1189
- return IssueStatus.Open
1191
+ return IssueStatus.Open
1190
1192
 
1191
1193
  def fetch_assets(self, *args, **kwargs) -> Iterator[IntegrationAsset]:
1192
1194
  """
@@ -1435,6 +1437,7 @@ class WizVulnerabilityIntegration(ScannerIntegration):
1435
1437
  def get_software_name(software_name_dict: dict, wiz_entity_properties: dict, node: dict) -> Optional[str]:
1436
1438
  """
1437
1439
  Gets the software name from the software name dictionary or Wiz entity properties.
1440
+ If no software name is present, assigns a name based on the parent asset and assigned component type.
1438
1441
 
1439
1442
  :param dict software_name_dict: Dictionary containing software name and vendor
1440
1443
  :param dict wiz_entity_properties: Properties of the Wiz entity
@@ -1442,9 +1445,32 @@ class WizVulnerabilityIntegration(ScannerIntegration):
1442
1445
  :return: Software name
1443
1446
  :rtype: Optional[str]
1444
1447
  """
1445
- if map_category(node) == regscale_models.AssetCategory.Software:
1446
- return software_name_dict.get("software_name") or wiz_entity_properties.get("nativeType")
1447
- return None
1448
+ if map_category(node) != regscale_models.AssetCategory.Software:
1449
+ return None
1450
+
1451
+ # First try CPE-derived software name
1452
+ if software_name := software_name_dict.get("software_name"):
1453
+ return software_name
1454
+
1455
+ # Then try nativeType if it exists and looks meaningful
1456
+ native_type = wiz_entity_properties.get("nativeType")
1457
+ if native_type and not native_type.startswith(("Microsoft.", "AWS::", "Google.")):
1458
+ return native_type
1459
+
1460
+ # Finally, generate a name based on parent asset and component type
1461
+ parent_name = node.get("name", "")
1462
+ component_type = node.get("type", "").replace("_", " ").title()
1463
+
1464
+ if not parent_name:
1465
+ return component_type
1466
+
1467
+ # Clean up parent name for better readability by removing
1468
+ # common prefixes/suffixes that aren't meaningful
1469
+ cleaned_parent = parent_name
1470
+ for prefix in ["1-", "temp-", "test-"]:
1471
+ if cleaned_parent.lower().startswith(prefix):
1472
+ cleaned_parent = cleaned_parent[len(prefix) :]
1473
+ return f"{cleaned_parent} - {component_type}" if cleaned_parent else component_type
1448
1474
 
1449
1475
  # Pre-compiled regex for better performance (ReDoS-safe pattern)
1450
1476
  _PACKAGE_PATTERN = re.compile(r"([^()]+) \(([^()]+)\)")
@@ -1489,7 +1515,7 @@ class WizVulnerabilityIntegration(ScannerIntegration):
1489
1515
  file_mod_time = datetime.datetime.fromtimestamp(os.path.getmtime(file_path))
1490
1516
  if current_time - file_mod_time < fetch_interval:
1491
1517
  logger.info("File %s is newer than %s hours. Using cached data...", file_path, fetch_interval)
1492
- with open(file_path, "r", encoding="utf-8") as file:
1518
+ with open(file_path, encoding="utf-8") as file:
1493
1519
  return json.load(file)
1494
1520
  else:
1495
1521
  logger.info("File %s is older than %s hours. Fetching new data...", file_path, fetch_interval)
@@ -1583,7 +1609,7 @@ class WizVulnerabilityIntegration(ScannerIntegration):
1583
1609
  asset_info = self._search_single_file(identifier, file_path)
1584
1610
  if asset_info:
1585
1611
  return asset_info, file_path
1586
- except (json.JSONDecodeError, IOError) as e:
1612
+ except (OSError, json.JSONDecodeError) as e:
1587
1613
  logger.debug("Error reading %s: %s", file_path, e)
1588
1614
  continue
1589
1615
 
@@ -1600,7 +1626,7 @@ class WizVulnerabilityIntegration(ScannerIntegration):
1600
1626
  """
1601
1627
  logger.debug("Searching for asset %s in %s", identifier, file_path)
1602
1628
 
1603
- with open(file_path, "r", encoding="utf-8") as f:
1629
+ with open(file_path, encoding="utf-8") as f:
1604
1630
  data = json.load(f)
1605
1631
 
1606
1632
  if not isinstance(data, list):
@@ -25,6 +25,19 @@ def fedramp():
25
25
  pass
26
26
 
27
27
 
28
+ @click.group(
29
+ cls=LazyGroup,
30
+ lazy_subcommands={
31
+ "import_ssp": "regscale.integrations.public.csam.csam.import_ssp",
32
+ "import_poam": "regscale.integrations.public.csam.csam.import_poam",
33
+ },
34
+ name="csam",
35
+ )
36
+ def csam():
37
+ """[BETA] Integration with DoJ's CSAM GRC Tool."""
38
+ pass
39
+
40
+
28
41
  @click.group(
29
42
  cls=LazyGroup,
30
43
  lazy_subcommands={
File without changes