regscale-cli 6.16.0.0__py3-none-any.whl → 6.16.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of regscale-cli might be problematic. Click here for more details.

Files changed (65) hide show
  1. regscale/__init__.py +1 -1
  2. regscale/core/app/application.py +1 -0
  3. regscale/core/app/internal/login.py +1 -1
  4. regscale/core/app/internal/poam_editor.py +1 -1
  5. regscale/core/app/utils/app_utils.py +1 -1
  6. regscale/core/app/utils/parser_utils.py +2 -2
  7. regscale/integrations/commercial/__init__.py +2 -2
  8. regscale/integrations/commercial/ad.py +1 -1
  9. regscale/integrations/commercial/azure/intune.py +1 -0
  10. regscale/integrations/commercial/grype/__init__.py +3 -0
  11. regscale/integrations/commercial/grype/commands.py +72 -0
  12. regscale/integrations/commercial/grype/scanner.py +390 -0
  13. regscale/integrations/commercial/import_all/import_all_cmd.py +2 -2
  14. regscale/integrations/commercial/nessus/scanner.py +3 -0
  15. regscale/integrations/commercial/opentext/__init__.py +6 -0
  16. regscale/integrations/commercial/opentext/commands.py +77 -0
  17. regscale/integrations/commercial/opentext/scanner.py +449 -85
  18. regscale/integrations/commercial/sap/sysdig/sysdig_scanner.py +4 -0
  19. regscale/integrations/commercial/sap/tenable/click.py +1 -1
  20. regscale/integrations/commercial/sap/tenable/scanner.py +8 -2
  21. regscale/integrations/commercial/tenablev2/click.py +39 -16
  22. regscale/integrations/commercial/trivy/__init__.py +5 -0
  23. regscale/integrations/commercial/trivy/commands.py +74 -0
  24. regscale/integrations/commercial/trivy/scanner.py +276 -0
  25. regscale/integrations/commercial/wizv2/click.py +9 -21
  26. regscale/integrations/commercial/wizv2/scanner.py +2 -1
  27. regscale/integrations/commercial/wizv2/utils.py +146 -70
  28. regscale/integrations/jsonl_scanner_integration.py +869 -0
  29. regscale/integrations/public/fedramp/fedramp_common.py +4 -4
  30. regscale/integrations/public/fedramp/import_workbook.py +1 -1
  31. regscale/integrations/public/fedramp/inventory_items.py +3 -3
  32. regscale/integrations/public/fedramp/poam/scanner.py +51 -44
  33. regscale/integrations/public/fedramp/ssp_logger.py +6 -6
  34. regscale/integrations/scanner_integration.py +268 -64
  35. regscale/models/app_models/mapping.py +3 -3
  36. regscale/models/integration_models/amazon_models/inspector.py +15 -17
  37. regscale/models/integration_models/aqua.py +1 -5
  38. regscale/models/integration_models/cisa_kev_data.json +100 -10
  39. regscale/models/integration_models/ecr_models/ecr.py +2 -6
  40. regscale/models/integration_models/{flat_file_importer.py → flat_file_importer/__init__.py} +7 -4
  41. regscale/models/integration_models/grype_import.py +3 -3
  42. regscale/models/integration_models/prisma.py +3 -3
  43. regscale/models/integration_models/synqly_models/capabilities.json +1 -1
  44. regscale/models/integration_models/synqly_models/connectors/assets.py +1 -0
  45. regscale/models/integration_models/synqly_models/connectors/vulnerabilities.py +2 -0
  46. regscale/models/integration_models/tenable_models/integration.py +46 -10
  47. regscale/models/integration_models/trivy_import.py +1 -1
  48. regscale/models/integration_models/xray.py +1 -1
  49. regscale/models/regscale_models/__init__.py +2 -0
  50. regscale/models/regscale_models/control_implementation.py +18 -44
  51. regscale/models/regscale_models/inherited_control.py +61 -0
  52. regscale/models/regscale_models/issue.py +3 -2
  53. regscale/models/regscale_models/mixins/parent_cache.py +1 -1
  54. regscale/models/regscale_models/regscale_model.py +73 -7
  55. regscale/models/regscale_models/vulnerability.py +61 -8
  56. {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.2.0.dist-info}/METADATA +3 -3
  57. {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.2.0.dist-info}/RECORD +62 -56
  58. tests/regscale/core/test_logz.py +8 -0
  59. regscale/integrations/commercial/grype.py +0 -165
  60. regscale/integrations/commercial/opentext/click.py +0 -99
  61. regscale/integrations/commercial/trivy.py +0 -162
  62. {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.2.0.dist-info}/LICENSE +0 -0
  63. {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.2.0.dist-info}/WHEEL +0 -0
  64. {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.2.0.dist-info}/entry_points.txt +0 -0
  65. {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.2.0.dist-info}/top_level.txt +0 -0
@@ -74,9 +74,11 @@ class SAPConcurSysDigScanner(ScannerIntegration):
74
74
  raise ValueError("Path is required")
75
75
 
76
76
  logger.info(f"Fetching assets from {path}")
77
+ self.num_assets_to_process = 0
77
78
  with open(path, "r", newline="") as csvfile:
78
79
  reader = csv.DictReader(csvfile)
79
80
  for row in reader:
81
+ self.num_assets_to_process += 1
80
82
  yield self.parse_assets(row)
81
83
 
82
84
  def fetch_findings(self, *args: Tuple, **kwargs: dict) -> Iterator[IntegrationFinding]:
@@ -94,9 +96,11 @@ class SAPConcurSysDigScanner(ScannerIntegration):
94
96
 
95
97
  logger.info(f"Fetching findings from {path}")
96
98
 
99
+ self.num_findings_to_process = 0
97
100
  with open(path, "r", newline="") as csvfile:
98
101
  reader = csv.DictReader(csvfile)
99
102
  for row in reader:
103
+ self.num_findings_to_process += 1
100
104
  yield from self.parse_findings(finding=row, kwargs=kwargs)
101
105
 
102
106
  def parse_findings(self, finding: Dict[str, Any], **kwargs: dict) -> Iterator[IntegrationFinding]:
@@ -16,7 +16,7 @@ logger = logging.getLogger("regscale")
16
16
  @regscale_ssp_id()
17
17
  @click.option(
18
18
  "--path",
19
- type=click.STRING,
19
+ type=click.Path(exists=True, file_okay=True, dir_okay=False, readable=True),
20
20
  help="Path to the CSV file containing the SAP Concur data.",
21
21
  required=True,
22
22
  )
@@ -30,7 +30,8 @@ class SAPConcurScanner(ScannerIntegration):
30
30
  "low": regscale_models.IssueSeverity.Low,
31
31
  }
32
32
 
33
- def parse_assets(self, asset: Dict[str, Any]) -> IntegrationAsset:
33
+ @staticmethod
34
+ def parse_assets(asset: Dict[str, Any]) -> IntegrationAsset:
34
35
  """
35
36
  Parse a single asset from the vulnerability data.
36
37
 
@@ -88,9 +89,11 @@ class SAPConcurScanner(ScannerIntegration):
88
89
  raise ValueError("Path is required")
89
90
 
90
91
  logger.info(f"Fetching assets from {path}")
92
+ self.num_assets_to_process = 0
91
93
  with open(path, "r", newline="") as csvfile:
92
94
  reader = csv.DictReader(csvfile)
93
95
  for row in reader:
96
+ self.num_assets_to_process += 1
94
97
  yield self.parse_assets(row)
95
98
 
96
99
  def fetch_findings(self, *args: Tuple, **kwargs: dict) -> Iterator[IntegrationFinding]:
@@ -108,9 +111,11 @@ class SAPConcurScanner(ScannerIntegration):
108
111
 
109
112
  logger.info(f"Fetching findings from {path}")
110
113
 
114
+ self.num_findings_to_process = 0
111
115
  with open(path, "r", newline="") as csvfile:
112
116
  reader = csv.DictReader(csvfile)
113
117
  for row in reader:
118
+ self.num_assets_to_process += 1
114
119
  yield from self.parse_findings(row)
115
120
 
116
121
  def parse_findings(self, finding: Dict[str, Any]) -> Iterator[IntegrationFinding]:
@@ -132,8 +137,9 @@ class SAPConcurScanner(ScannerIntegration):
132
137
  for cve in cves:
133
138
  yield self._create_finding(finding, severity, cve.strip())
134
139
 
140
+ @staticmethod
135
141
  def _create_finding(
136
- self, finding: Dict[str, Any], severity: regscale_models.IssueSeverity, cve: str = ""
142
+ finding: Dict[str, Any], severity: regscale_models.IssueSeverity, cve: str = ""
137
143
  ) -> IntegrationFinding:
138
144
  """
139
145
  Create an IntegrationFinding object from the given data.
@@ -3,6 +3,7 @@
3
3
  """Tenable integration for RegScale CLI"""
4
4
 
5
5
  import queue
6
+ from concurrent.futures import wait
6
7
  from typing import TYPE_CHECKING, Any
7
8
 
8
9
  from regscale.integrations.integration_override import IntegrationOverride
@@ -723,16 +724,20 @@ def fetch_vulns(query_id: int = 0, regscale_ssp_id: int = 0):
723
724
  # make sure folder exists
724
725
  with tempfile.TemporaryDirectory() as temp_dir:
725
726
  logger.info("Saving Tenable SC data to disk...%s", temp_dir)
726
- consume_iterator_to_file(iterator=vulns, dir_path=Path(temp_dir), scanner=sc)
727
+ num_assets_processed, num_findings_to_process = consume_iterator_to_file(
728
+ iterator=vulns, dir_path=Path(temp_dir), scanner=sc
729
+ )
727
730
  iterables = tenable_dir_to_tuple_generator(Path(temp_dir))
728
731
  try:
729
732
  sc.sync_assets(
730
733
  plan_id=regscale_ssp_id,
731
734
  integration_assets=(asset for sublist in iterables[0] for asset in sublist),
735
+ asset_count=num_assets_processed,
732
736
  )
733
737
  sc.sync_findings(
734
738
  plan_id=regscale_ssp_id,
735
739
  integration_findings=(finding for sublist in iterables[1] for finding in sublist),
740
+ finding_count=num_findings_to_process,
736
741
  )
737
742
  except IndexError as ex:
738
743
  logger.error("Error processing Tenable SC data: %s", ex)
@@ -754,23 +759,26 @@ def tenable_dir_to_tuple_generator(dir_path: Path):
754
759
  return assets_gen, findings_gen
755
760
 
756
761
 
757
- def consume_iterator_to_file(iterator: AnalysisResultsIterator, dir_path: Path, scanner: SCIntegration) -> int:
762
+ def consume_iterator_to_file(iterator: AnalysisResultsIterator, dir_path: Path, scanner: SCIntegration) -> tuple:
758
763
  """
759
764
  Consume an iterator and write the results to a file
760
765
 
761
766
  :param AnalysisResultsIterator iterator: Tenable SC iterator
762
767
  :param Path dir_path: The directory to save the pickled files
763
768
  :param SCIntegration scanner: Tenable SC Integration object
764
- :rtype: int
765
- :return: The total count of items processed
769
+ :rtype: tuple
770
+ :return: The total count of assets and findings processed
766
771
  """
767
772
  app = Application()
768
773
  logger.info("Consuming Tenable SC iterator...")
769
774
  override = IntegrationOverride(app)
775
+ asset_count = 0
776
+ findings_count = 0
770
777
  total_count = ThreadSafeCounter()
771
778
  page_number = ThreadSafeCounter()
772
779
  rec_count = ThreadSafeCounter()
773
780
  process_list = queue.Queue()
781
+ futures_lst = []
774
782
  with ThreadPoolExecutor(max_workers=5) as executor:
775
783
  for dat in iterator:
776
784
  total_count.increment()
@@ -778,19 +786,32 @@ def consume_iterator_to_file(iterator: AnalysisResultsIterator, dir_path: Path,
778
786
  rec_count.increment()
779
787
  if rec_count.value == len(iterator.page):
780
788
  page_number.increment()
781
- executor.submit(
782
- process_sc_chunk,
783
- app=app,
784
- vulns=pop_queue(queue=process_list, queue_len=len(iterator.page)),
785
- page=page_number.value,
786
- dir_path=dir_path,
787
- sc=scanner,
788
- override=override,
789
+ futures_lst.append(
790
+ executor.submit(
791
+ process_sc_chunk,
792
+ app=app,
793
+ vulns=pop_queue(queue=process_list, queue_len=len(iterator.page)),
794
+ page=page_number.value,
795
+ dir_path=dir_path,
796
+ sc=scanner,
797
+ override=override,
798
+ )
789
799
  )
790
800
  rec_count.set(0)
801
+ # Collect results from all threads
802
+ asset_count = 0
803
+ findings_count = 0
804
+ # Wait for completion
805
+ wait(futures_lst)
806
+
807
+ for future in futures_lst:
808
+ findings, assets = future.result()
809
+ asset_count += assets
810
+ findings_count += findings
811
+
791
812
  if total_count.value == 0:
792
813
  logger.warning("No Tenable SC data found.")
793
- return total_count.value
814
+ return asset_count, findings_count
794
815
 
795
816
 
796
817
  def pop_queue(queue: queue.Queue, queue_len: int) -> list:
@@ -816,12 +837,13 @@ def pop_queue(queue: queue.Queue, queue_len: int) -> list:
816
837
  return retrieved_items
817
838
 
818
839
 
819
- def process_sc_chunk(**kwargs) -> None:
840
+ def process_sc_chunk(**kwargs) -> tuple:
820
841
  """
821
842
  Process Tenable SC chunk
822
843
 
823
844
  :param kwargs: Keyword arguments
824
- :rtype: None
845
+ :rtype: tuple
846
+ :return: Tuple of findings and assets
825
847
  """
826
848
  # iterator.page, iterator.page_count, file_path, query_id, ssp_id
827
849
  integration_mapping = kwargs.get("override")
@@ -833,7 +855,7 @@ def process_sc_chunk(**kwargs) -> None:
833
855
  tenable_sc: SCIntegration = kwargs.get("sc")
834
856
  thread = current_thread()
835
857
  if not len(vulns):
836
- return
858
+ return (0, 0)
837
859
  # I can't add a to-do thanks to sonarlint, but we need to add CVE lookup from plugin id
838
860
  # append file to path
839
861
  # Process to RegScale
@@ -857,6 +879,7 @@ def process_sc_chunk(**kwargs) -> None:
857
879
  kwargs.get("page"),
858
880
  )
859
881
  logger.debug(f"Completed thread: name={thread.name}, idnet={get_ident()}, id={get_native_id()}")
882
+ return (len(findings), len(assets))
860
883
 
861
884
 
862
885
  def get_last_pull_epoch(regscale_ssp_id: int) -> int:
@@ -0,0 +1,5 @@
1
+ """
2
+ Module for processing Trivy scan results and loading them into RegScale.
3
+ """
4
+
5
+ from .commands import import_scans
@@ -0,0 +1,74 @@
1
+ """
2
+ This module contains the command line interface for the Trivy scanner integration.
3
+ """
4
+
5
+ from datetime import datetime
6
+ from typing import Optional
7
+
8
+ import click
9
+ from pathlib import Path
10
+
11
+ from regscale.models.integration_models.flat_file_importer import FlatFileImporter
12
+
13
+
14
+ @click.group()
15
+ def trivy():
16
+ """Performs actions from the Trivy scanner integration."""
17
+ pass
18
+
19
+
20
+ @trivy.command("import_scans")
21
+ @FlatFileImporter.common_scanner_options(
22
+ message="File path to the folder containing JFrog XRay .json files to process to RegScale.",
23
+ prompt="File path for Grype files",
24
+ import_name="grype",
25
+ )
26
+ @click.option(
27
+ "--destination",
28
+ "-d",
29
+ help="Path to download the files to. If not provided, files will be downloaded to the temporary directory.",
30
+ type=click.Path(exists=True, dir_okay=True),
31
+ required=False,
32
+ )
33
+ @click.option(
34
+ "--file_pattern",
35
+ "-fp",
36
+ help="[Optional] File pattern to match (e.g., '*.json')",
37
+ required=False,
38
+ )
39
+ def import_scans(
40
+ destination: Optional[Path],
41
+ file_pattern: str,
42
+ folder_path: Path,
43
+ regscale_ssp_id: int,
44
+ scan_date: datetime,
45
+ mappings_path: Path,
46
+ disable_mapping: bool,
47
+ s3_bucket: str,
48
+ s3_prefix: str,
49
+ aws_profile: str,
50
+ upload_file: bool,
51
+ ) -> None:
52
+ """
53
+ Process Trivy scan results from a folder containing Trivy scan files and load into RegScale.
54
+ """
55
+ from regscale.integrations.commercial.trivy.scanner import TrivyIntegration
56
+
57
+ if s3_bucket and not folder_path:
58
+ folder_path = s3_bucket
59
+ ti = TrivyIntegration(
60
+ plan_id=regscale_ssp_id,
61
+ file_path=str(folder_path) if folder_path else None,
62
+ s3_bucket=s3_bucket,
63
+ s3_prefix=s3_prefix,
64
+ aws_profile=aws_profile,
65
+ scan_date=scan_date,
66
+ mappings_path=str(mappings_path) if mappings_path else None,
67
+ disable_mapping=disable_mapping,
68
+ download_destination=destination,
69
+ file_pattern=file_pattern,
70
+ read_files_only=True,
71
+ upload_file=upload_file,
72
+ )
73
+
74
+ ti.sync_assets_and_findings()
@@ -0,0 +1,276 @@
1
+ """
2
+ Trivy Scanner Integration for RegScale.
3
+
4
+ This module provides integration between Trivy scanner and RegScale,
5
+ allowing you to import Trivy scan results into RegScale as assets and findings.
6
+ """
7
+
8
+ import logging
9
+ import os
10
+ from typing import Any, Dict, List, Optional, Union, Tuple, TypeVar
11
+
12
+ from pathlib import Path
13
+
14
+ from regscale.core.app.utils.parser_utils import safe_datetime_str
15
+ from regscale.integrations.jsonl_scanner_integration import JSONLScannerIntegration
16
+ from regscale.integrations.scanner_integration import IntegrationAsset, IntegrationFinding
17
+ from regscale.models import IssueSeverity, AssetStatus, IssueStatus
18
+
19
+ logger = logging.getLogger("regscale")
20
+
21
+ # Define generic types for items that can be written to file
22
+ T = TypeVar("T")
23
+ ItemType = TypeVar("ItemType", IntegrationAsset, IntegrationFinding)
24
+
25
+
26
+ class TrivyIntegration(JSONLScannerIntegration):
27
+ """Class for handling Trivy scanner integration."""
28
+
29
+ title: str = "Trivy"
30
+ asset_identifier_field: str = "otherTrackingNumber"
31
+ finding_severity_map: Dict[str, Any] = {
32
+ "CRITICAL": IssueSeverity.Critical.value,
33
+ "HIGH": IssueSeverity.High.value,
34
+ "MEDIUM": IssueSeverity.Moderate.value,
35
+ "LOW": IssueSeverity.Low.value,
36
+ "UNKNOWN": IssueSeverity.High.value,
37
+ "NEGLIGIBLE": IssueSeverity.High.value,
38
+ }
39
+
40
+ # Constants for file paths
41
+ ASSETS_FILE = "./artifacts/trivy_assets.jsonl"
42
+ FINDINGS_FILE = "./artifacts/trivy_findings.jsonl"
43
+
44
+ def __init__(self, *args, **kwargs):
45
+ """
46
+ Initialize the TrivyIntegration object.
47
+
48
+ :param Any kwargs: Keyword arguments
49
+ """
50
+ kwargs["read_files_only"] = True
51
+ kwargs["file_pattern"] = "*.json"
52
+ self.disable_mapping = kwargs["disable_mapping"] = True
53
+ super().__init__(*args, **kwargs)
54
+
55
+ def is_valid_file(self, data: Any, file_path: Union[Path, str]) -> Tuple[bool, Optional[Dict[str, Any]]]:
56
+ """
57
+ Check if the provided data is a valid Trivy scan result.
58
+
59
+ Validates that the data is from a Trivy JSON file with the required structure.
60
+ Logs a warning with the file path and returns (False, None) if invalid.
61
+
62
+ :param Any data: Data parsed from the file (string content when read_files_only is True, or file path otherwise)
63
+ :param Union[Path, str] file_path: Path to the file being processed
64
+ :return: Tuple of (is_valid, validated_data) where validated_data is the parsed JSON if valid
65
+ :rtype: Tuple[bool, Optional[Dict[str, Any]]]
66
+ """
67
+
68
+ # Check Trivy-specific structure
69
+ if not isinstance(data, dict):
70
+ logger.warning(f"File {file_path} is not a dict, skipping")
71
+ return False, None
72
+
73
+ if "Results" not in data:
74
+ logger.warning(f"File {file_path} has no 'Results' key, skipping")
75
+ return False, None
76
+
77
+ if not isinstance(data.get("Results"), list):
78
+ logger.warning(f"File {file_path} 'Results' is not a list, skipping")
79
+ return False, None
80
+
81
+ if "Metadata" not in data:
82
+ logger.warning(f"File {file_path} has no 'Metadata' key, skipping")
83
+ return False, None
84
+
85
+ return True, data
86
+
87
+ def parse_asset(self, file_path: Union[Path, str], data: Dict[str, Any]) -> IntegrationAsset:
88
+ """
89
+ Parse a single asset from Trivy scan data.
90
+
91
+ :param Union[Path, str] file_path: Path to the file containing the asset data
92
+ :param Dict[str, Any] data: The parsed JSON data
93
+ :return: IntegrationAsset object
94
+ :rtype: IntegrationAsset
95
+ """
96
+ # Convert path to string if it's not already
97
+ file_path_str = str(file_path)
98
+
99
+ # Get metadata and OS information
100
+ metadata = data.get("Metadata", {})
101
+ os_data = metadata.get("OS", {})
102
+
103
+ # Determine identifier from file name or data
104
+ if "sha256-" in file_path_str:
105
+ # Extract the sha256 from the filename
106
+ base_name = os.path.basename(file_path_str)
107
+ identifier = "sha256-" + base_name.split("sha256-")[1].split(".json")[0]
108
+ else:
109
+ identifier = metadata.get("ImageID", "Unknown")
110
+
111
+ # Get artifact name for other tracking number and fqdn
112
+ artifact_name = data.get("ArtifactName", identifier)
113
+
114
+ # Create and return the asset
115
+ return IntegrationAsset(
116
+ identifier=identifier,
117
+ name=identifier,
118
+ ip_address="0.0.0.0",
119
+ cpu=0,
120
+ ram=0,
121
+ status=AssetStatus.Active,
122
+ asset_type="Other",
123
+ asset_category="Software",
124
+ operating_system=f"{os_data.get('Family', '')} {os_data.get('Name', '')}",
125
+ notes=f"{os.path.basename(file_path_str)}",
126
+ other_tracking_number=artifact_name,
127
+ parent_id=self.plan_id,
128
+ parent_module="securityplans",
129
+ fqdn=artifact_name,
130
+ )
131
+
132
+ def _get_findings_data_from_file(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
133
+ """
134
+ Extract findings data from Trivy file data.
135
+
136
+ :param Dict[str, Any] data: The data from the Trivy file
137
+ :return: List of finding items
138
+ :rtype: List[Dict[str, Any]]
139
+ """
140
+ if not data or not isinstance(data, dict):
141
+ return []
142
+
143
+ findings = []
144
+
145
+ # Process all results
146
+ for result in data.get("Results", []):
147
+ if not isinstance(result, dict):
148
+ continue
149
+
150
+ # Extract vulnerabilities from the result
151
+ vulnerabilities = result.get("Vulnerabilities", [])
152
+ if not isinstance(vulnerabilities, list):
153
+ continue
154
+
155
+ # Add each vulnerability to the findings list
156
+ findings.extend(vulnerabilities)
157
+
158
+ return findings
159
+
160
+ def parse_finding(self, asset_identifier: str, data: Dict[str, Any], item: Dict[str, Any]) -> IntegrationFinding:
161
+ """
162
+ Parse a single finding from Trivy scan data.
163
+
164
+ :param str asset_identifier: The identifier of the asset this finding belongs to
165
+ :param Dict[str, Any] data: The parsed JSON data (for metadata)
166
+ :param Dict[str, Any] item: The finding data
167
+ :return: IntegrationFinding object
168
+ :rtype: IntegrationFinding
169
+ """
170
+ # Get scan date from the finding or use current time
171
+ scan_date = safe_datetime_str(data.get("CreatedAt"))
172
+
173
+ # Process severity
174
+ severity_str = item.get("Severity", "UNKNOWN")
175
+ severity_value = self.finding_severity_map.get(severity_str.upper(), IssueSeverity.High.value)
176
+ try:
177
+ severity = IssueSeverity(severity_value)
178
+ except ValueError:
179
+ severity = IssueSeverity.High
180
+
181
+ # Get CVSS fields
182
+ cvss_fields = self._get_cvss_score(item)
183
+
184
+ # Get data source information
185
+ data_source = item.get("DataSource", {})
186
+ plugin_name = data_source.get("Name", self.title)
187
+ plugin_id = data_source.get("ID", self.title)
188
+
189
+ metadata = data.get("Metadata", {})
190
+ os_family = metadata.get("OS", {}).get("Family", "")
191
+ os_name = metadata.get("OS", {}).get("Name", "")
192
+ if os_family and os_name == "unknown":
193
+ affected_os = "unknown"
194
+ else:
195
+ affected_os = f"{os_family} {os_name}"
196
+
197
+ # Set image digest from artifact name
198
+ artifact_name = data.get("ArtifactName", "")
199
+ image_digest = ""
200
+ if "@" in artifact_name:
201
+ image_digest = artifact_name.split("@")[1]
202
+
203
+ build_version = (
204
+ metadata.get("ImageConfig", {}).get("config", {}).get("Labels", {}).get("io.buildah.version", "")
205
+ )
206
+ pkg_name = item.get("PkgName", "")
207
+ cve = item.get("VulnerabilityID", "")
208
+
209
+ # Create and return the finding
210
+ return IntegrationFinding(
211
+ title=f"{cve}: {pkg_name}" if cve else pkg_name,
212
+ description=item.get("Description", "No description available"),
213
+ severity=severity,
214
+ status=IssueStatus.Open,
215
+ cvss_v3_score=cvss_fields.get("V3Score"),
216
+ cvss_v3_vector=cvss_fields.get("V3Vector") or "",
217
+ cvss_v2_score=cvss_fields.get("V2Score"),
218
+ cvss_v2_vector=cvss_fields.get("V2Vector") or "",
219
+ plugin_name=plugin_name,
220
+ plugin_id=plugin_id,
221
+ asset_identifier=asset_identifier,
222
+ cve=cve,
223
+ first_seen=scan_date,
224
+ last_seen=scan_date,
225
+ scan_date=scan_date,
226
+ date_created=scan_date,
227
+ category="Software",
228
+ control_labels=[],
229
+ installed_versions=item.get("InstalledVersion", ""),
230
+ affected_os=affected_os,
231
+ affected_packages=item.get("PkgID", ""),
232
+ image_digest=image_digest,
233
+ package_path=item.get("PkgIdentifier", {}).get("PURL", ""),
234
+ build_version=build_version,
235
+ fixed_versions=item.get("FixedVersion", ""),
236
+ fix_status=item.get("Status", ""),
237
+ )
238
+
239
+ @staticmethod
240
+ def _get_cvss_score(finding: Dict) -> dict:
241
+ """
242
+ Get the CVSS v3 and v2 scores and vectors from the cvss data.
243
+
244
+ :param Dict finding: The cvss data
245
+ :return: The CVSS fields
246
+ :rtype: dict
247
+ """
248
+ values = {
249
+ "V3Score": None,
250
+ "V2Score": None,
251
+ "V3Vector": None,
252
+ "V2Vector": None,
253
+ }
254
+
255
+ if cvs := finding.get("CVSS"):
256
+ if nvd := cvs.get("nvd"):
257
+ values["V3Score"] = nvd.get("V3Score", None)
258
+ values["V3Vector"] = nvd.get("V3Vector", None)
259
+ values["V2Score"] = nvd.get("V2Score", None)
260
+ values["V2Vector"] = nvd.get("V2Vector", None)
261
+ elif redhat := cvs.get("redhat"):
262
+ values["V3Score"] = redhat.get("V3Score", None)
263
+ values["V3Vector"] = redhat.get("V3Vector", None)
264
+ values["V2Score"] = redhat.get("V2Score", None)
265
+ values["V2Vector"] = redhat.get("V2Vector", None)
266
+ elif ghsa := cvs.get("ghsa"):
267
+ values["V3Score"] = ghsa.get("V3Score", None)
268
+ values["V3Vector"] = ghsa.get("V3Vector", None)
269
+ values["V2Score"] = ghsa.get("V2Score", None)
270
+ values["V2Vector"] = ghsa.get("V2Vector", None)
271
+ elif bitnami := cvs.get("bitnami"):
272
+ values["V3Score"] = bitnami.get("V3Score", None)
273
+ values["V3Vector"] = bitnami.get("V3Vector", None)
274
+ values["V2Score"] = bitnami.get("V2Score", None)
275
+ values["V2Vector"] = bitnami.get("V2Vector", None)
276
+ return values
@@ -394,13 +394,6 @@ def add_report_evidence(
394
394
  default="NIST800-53R5",
395
395
  required=True,
396
396
  )
397
- @click.option( # type: ignore
398
- "--include_not_implemented",
399
- "-n",
400
- is_flag=True,
401
- help="Include not implemented controls",
402
- default=False,
403
- )
404
397
  def sync_compliance(
405
398
  wiz_project_id,
406
399
  regscale_id,
@@ -409,21 +402,16 @@ def sync_compliance(
409
402
  client_secret,
410
403
  catalog_id,
411
404
  framework,
412
- include_not_implemented,
413
405
  ):
414
406
  """Sync compliance posture from Wiz to RegScale"""
415
- from regscale.core.app.utils.app_utils import create_progress_object
416
407
  from regscale.integrations.commercial.wizv2.utils import _sync_compliance
417
408
 
418
- compliance_job_progress = create_progress_object()
419
- with compliance_job_progress:
420
- _sync_compliance(
421
- wiz_project_id=wiz_project_id,
422
- regscale_id=regscale_id,
423
- regscale_module=regscale_module,
424
- include_not_implemented=include_not_implemented,
425
- client_id=client_id,
426
- client_secret=client_secret,
427
- catalog_id=catalog_id,
428
- framework=framework,
429
- )
409
+ _sync_compliance(
410
+ wiz_project_id=wiz_project_id,
411
+ regscale_id=regscale_id,
412
+ regscale_module=regscale_module,
413
+ client_id=client_id,
414
+ client_secret=client_secret,
415
+ catalog_id=catalog_id,
416
+ framework=framework,
417
+ )
@@ -101,6 +101,7 @@ class WizVulnerabilityIntegration(ScannerIntegration):
101
101
  raise ValueError("Wiz project ID is required")
102
102
 
103
103
  logger.info("Fetching Wiz findings...")
104
+ self.num_findings_to_process = 0
104
105
 
105
106
  for wiz_vulnerability_type in self.get_query_types(project_id=project_id):
106
107
  logger.info("Fetching Wiz findings for %s...", wiz_vulnerability_type["type"])
@@ -114,8 +115,8 @@ class WizVulnerabilityIntegration(ScannerIntegration):
114
115
  topic_key=wiz_vulnerability_type["topic_key"],
115
116
  file_path=wiz_vulnerability_type["file_path"],
116
117
  )
118
+ self.num_findings_to_process += len(nodes)
117
119
  yield from self.parse_findings(nodes, wiz_vulnerability_type["type"])
118
-
119
120
  logger.info("Finished fetching Wiz findings.")
120
121
 
121
122
  def parse_findings(