regscale-cli 6.16.0.0__py3-none-any.whl → 6.16.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of regscale-cli might be problematic. Click here for more details.

Files changed (45) hide show
  1. regscale/__init__.py +1 -1
  2. regscale/core/app/application.py +1 -0
  3. regscale/core/app/utils/app_utils.py +1 -1
  4. regscale/core/app/utils/parser_utils.py +2 -2
  5. regscale/integrations/commercial/azure/intune.py +1 -0
  6. regscale/integrations/commercial/nessus/scanner.py +3 -0
  7. regscale/integrations/commercial/sap/sysdig/sysdig_scanner.py +4 -0
  8. regscale/integrations/commercial/sap/tenable/click.py +1 -1
  9. regscale/integrations/commercial/sap/tenable/scanner.py +8 -2
  10. regscale/integrations/commercial/tenablev2/click.py +39 -16
  11. regscale/integrations/commercial/wizv2/click.py +9 -21
  12. regscale/integrations/commercial/wizv2/scanner.py +2 -1
  13. regscale/integrations/commercial/wizv2/utils.py +145 -69
  14. regscale/integrations/public/fedramp/import_workbook.py +1 -1
  15. regscale/integrations/public/fedramp/poam/scanner.py +51 -44
  16. regscale/integrations/public/fedramp/ssp_logger.py +6 -6
  17. regscale/integrations/scanner_integration.py +96 -23
  18. regscale/models/app_models/mapping.py +3 -3
  19. regscale/models/integration_models/amazon_models/inspector.py +15 -17
  20. regscale/models/integration_models/aqua.py +1 -5
  21. regscale/models/integration_models/cisa_kev_data.json +85 -10
  22. regscale/models/integration_models/ecr_models/ecr.py +2 -6
  23. regscale/models/integration_models/flat_file_importer.py +7 -4
  24. regscale/models/integration_models/grype_import.py +3 -3
  25. regscale/models/integration_models/prisma.py +3 -3
  26. regscale/models/integration_models/synqly_models/capabilities.json +1 -1
  27. regscale/models/integration_models/synqly_models/connectors/assets.py +1 -0
  28. regscale/models/integration_models/synqly_models/connectors/vulnerabilities.py +2 -0
  29. regscale/models/integration_models/tenable_models/integration.py +4 -3
  30. regscale/models/integration_models/trivy_import.py +1 -1
  31. regscale/models/integration_models/xray.py +1 -1
  32. regscale/models/regscale_models/__init__.py +2 -0
  33. regscale/models/regscale_models/control_implementation.py +18 -44
  34. regscale/models/regscale_models/inherited_control.py +61 -0
  35. regscale/models/regscale_models/issue.py +3 -2
  36. regscale/models/regscale_models/mixins/parent_cache.py +1 -1
  37. regscale/models/regscale_models/regscale_model.py +72 -6
  38. regscale/models/regscale_models/vulnerability.py +40 -8
  39. {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.1.0.dist-info}/METADATA +1 -1
  40. {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.1.0.dist-info}/RECORD +45 -44
  41. tests/regscale/core/test_logz.py +8 -0
  42. {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.1.0.dist-info}/LICENSE +0 -0
  43. {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.1.0.dist-info}/WHEEL +0 -0
  44. {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.1.0.dist-info}/entry_points.txt +0 -0
  45. {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.1.0.dist-info}/top_level.txt +0 -0
regscale/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "6.16.0.0"
1
+ __version__ = "6.16.1.0"
@@ -86,6 +86,7 @@ class Application(metaclass=Singleton):
86
86
  "dependabotRepo": "<myGithubRepoNameGoesHere>",
87
87
  "dependabotToken": "<myGithubPersonalAccessTokenGoesHere>",
88
88
  "domain": "https://regscale.yourcompany.com/",
89
+ "disableCache": False,
89
90
  "evidenceFolder": "./evidence",
90
91
  "passScore": 80,
91
92
  "failScore": 30,
@@ -642,7 +642,7 @@ def save_to_json(file: Path, data: Any, output_log: bool) -> None:
642
642
  with open(file, "w", encoding="utf-8") as outfile:
643
643
  outfile.write(str(data))
644
644
  if output_log:
645
- logger.info("Data successfully saved to %s", file.name)
645
+ logger.info("Data successfully saved to %s", file.absolute())
646
646
 
647
647
 
648
648
  def save_data_to(file: Path, data: Any, output_log: bool = True, transpose_data: bool = True) -> None:
@@ -23,7 +23,7 @@ def safe_float(value: Any, default: float = 0.0, field_name: str = "value") -> f
23
23
  try:
24
24
  return float(value)
25
25
  except (ValueError, TypeError):
26
- logger.warning(f"Invalid float {field_name}: {value}. Defaulting to {default}")
26
+ logger.debug(f"Invalid float {field_name}: {value}. Defaulting to {default}")
27
27
  return default
28
28
 
29
29
 
@@ -43,7 +43,7 @@ def safe_int(value: Any, default: int = 0, field_name: str = "value") -> int:
43
43
  try:
44
44
  return int(value)
45
45
  except (ValueError, TypeError):
46
- logger.warning(f"Invalid integer {field_name}: {value}. Defaulting to {default}")
46
+ logger.debug(f"Invalid integer {field_name}: {value}. Defaulting to {default}")
47
47
  return default
48
48
 
49
49
 
@@ -73,6 +73,7 @@ def sync_intune(regscale_id: int, regscale_module: str):
73
73
  )
74
74
  in_scan.sync_findings(
75
75
  plan_id=regscale_id,
76
+ finding_count=len(devices),
76
77
  integration_findings=fetch_intune_findings(devices=devices),
77
78
  )
78
79
  else:
@@ -79,6 +79,7 @@ class NessusIntegration(ScannerIntegration):
79
79
  self.log_file_warning_and_exit(path)
80
80
  if not self.check_collection(file_collection, path):
81
81
  return
82
+ self.num_findings_to_process = 0
82
83
  for file in iterate_files(file_collection):
83
84
  content = read_file(file)
84
85
  root = ET.fromstring(content)
@@ -87,6 +88,7 @@ class NessusIntegration(ScannerIntegration):
87
88
  for nessus_vulnerability in root.iterfind(f"./Report/ReportHost[@name='{asset_name}']/ReportItem"):
88
89
  parsed_vulnerability = self.parse_finding(nessus_vulnerability, asset_name)
89
90
  if parsed_vulnerability:
91
+ self.num_findings_to_process += 1
90
92
  yield parsed_vulnerability
91
93
  self.move_files(file_collection)
92
94
 
@@ -214,6 +216,7 @@ class NessusIntegration(ScannerIntegration):
214
216
  tree = ElementTree(root)
215
217
  assets = nfr.scan.report_hosts(root)
216
218
  cpe_items = cpe_xml_to_dict(tree) # type: ignore
219
+ self.num_assets_to_process = len(assets)
217
220
  for asset in assets:
218
221
  asset_properties = self.get_asset_properties(root, cpe_items, asset)
219
222
  parsed_asset = self.parse_asset(asset_properties)
@@ -74,9 +74,11 @@ class SAPConcurSysDigScanner(ScannerIntegration):
74
74
  raise ValueError("Path is required")
75
75
 
76
76
  logger.info(f"Fetching assets from {path}")
77
+ self.num_assets_to_process = 0
77
78
  with open(path, "r", newline="") as csvfile:
78
79
  reader = csv.DictReader(csvfile)
79
80
  for row in reader:
81
+ self.num_assets_to_process += 1
80
82
  yield self.parse_assets(row)
81
83
 
82
84
  def fetch_findings(self, *args: Tuple, **kwargs: dict) -> Iterator[IntegrationFinding]:
@@ -94,9 +96,11 @@ class SAPConcurSysDigScanner(ScannerIntegration):
94
96
 
95
97
  logger.info(f"Fetching findings from {path}")
96
98
 
99
+ self.num_findings_to_process = 0
97
100
  with open(path, "r", newline="") as csvfile:
98
101
  reader = csv.DictReader(csvfile)
99
102
  for row in reader:
103
+ self.num_findings_to_process += 1
100
104
  yield from self.parse_findings(finding=row, kwargs=kwargs)
101
105
 
102
106
  def parse_findings(self, finding: Dict[str, Any], **kwargs: dict) -> Iterator[IntegrationFinding]:
@@ -16,7 +16,7 @@ logger = logging.getLogger("regscale")
16
16
  @regscale_ssp_id()
17
17
  @click.option(
18
18
  "--path",
19
- type=click.STRING,
19
+ type=click.Path(exists=True, file_okay=True, dir_okay=False, readable=True),
20
20
  help="Path to the CSV file containing the SAP Concur data.",
21
21
  required=True,
22
22
  )
@@ -30,7 +30,8 @@ class SAPConcurScanner(ScannerIntegration):
30
30
  "low": regscale_models.IssueSeverity.Low,
31
31
  }
32
32
 
33
- def parse_assets(self, asset: Dict[str, Any]) -> IntegrationAsset:
33
+ @staticmethod
34
+ def parse_assets(asset: Dict[str, Any]) -> IntegrationAsset:
34
35
  """
35
36
  Parse a single asset from the vulnerability data.
36
37
 
@@ -88,9 +89,11 @@ class SAPConcurScanner(ScannerIntegration):
88
89
  raise ValueError("Path is required")
89
90
 
90
91
  logger.info(f"Fetching assets from {path}")
92
+ self.num_assets_to_process = 0
91
93
  with open(path, "r", newline="") as csvfile:
92
94
  reader = csv.DictReader(csvfile)
93
95
  for row in reader:
96
+ self.num_assets_to_process += 1
94
97
  yield self.parse_assets(row)
95
98
 
96
99
  def fetch_findings(self, *args: Tuple, **kwargs: dict) -> Iterator[IntegrationFinding]:
@@ -108,9 +111,11 @@ class SAPConcurScanner(ScannerIntegration):
108
111
 
109
112
  logger.info(f"Fetching findings from {path}")
110
113
 
114
+ self.num_findings_to_process = 0
111
115
  with open(path, "r", newline="") as csvfile:
112
116
  reader = csv.DictReader(csvfile)
113
117
  for row in reader:
118
+ self.num_assets_to_process += 1
114
119
  yield from self.parse_findings(row)
115
120
 
116
121
  def parse_findings(self, finding: Dict[str, Any]) -> Iterator[IntegrationFinding]:
@@ -132,8 +137,9 @@ class SAPConcurScanner(ScannerIntegration):
132
137
  for cve in cves:
133
138
  yield self._create_finding(finding, severity, cve.strip())
134
139
 
140
+ @staticmethod
135
141
  def _create_finding(
136
- self, finding: Dict[str, Any], severity: regscale_models.IssueSeverity, cve: str = ""
142
+ finding: Dict[str, Any], severity: regscale_models.IssueSeverity, cve: str = ""
137
143
  ) -> IntegrationFinding:
138
144
  """
139
145
  Create an IntegrationFinding object from the given data.
@@ -3,6 +3,7 @@
3
3
  """Tenable integration for RegScale CLI"""
4
4
 
5
5
  import queue
6
+ from concurrent.futures import wait
6
7
  from typing import TYPE_CHECKING, Any
7
8
 
8
9
  from regscale.integrations.integration_override import IntegrationOverride
@@ -723,16 +724,20 @@ def fetch_vulns(query_id: int = 0, regscale_ssp_id: int = 0):
723
724
  # make sure folder exists
724
725
  with tempfile.TemporaryDirectory() as temp_dir:
725
726
  logger.info("Saving Tenable SC data to disk...%s", temp_dir)
726
- consume_iterator_to_file(iterator=vulns, dir_path=Path(temp_dir), scanner=sc)
727
+ num_assets_processed, num_findings_to_process = consume_iterator_to_file(
728
+ iterator=vulns, dir_path=Path(temp_dir), scanner=sc
729
+ )
727
730
  iterables = tenable_dir_to_tuple_generator(Path(temp_dir))
728
731
  try:
729
732
  sc.sync_assets(
730
733
  plan_id=regscale_ssp_id,
731
734
  integration_assets=(asset for sublist in iterables[0] for asset in sublist),
735
+ asset_count=num_assets_processed,
732
736
  )
733
737
  sc.sync_findings(
734
738
  plan_id=regscale_ssp_id,
735
739
  integration_findings=(finding for sublist in iterables[1] for finding in sublist),
740
+ finding_count=num_findings_to_process,
736
741
  )
737
742
  except IndexError as ex:
738
743
  logger.error("Error processing Tenable SC data: %s", ex)
@@ -754,23 +759,26 @@ def tenable_dir_to_tuple_generator(dir_path: Path):
754
759
  return assets_gen, findings_gen
755
760
 
756
761
 
757
- def consume_iterator_to_file(iterator: AnalysisResultsIterator, dir_path: Path, scanner: SCIntegration) -> int:
762
+ def consume_iterator_to_file(iterator: AnalysisResultsIterator, dir_path: Path, scanner: SCIntegration) -> tuple:
758
763
  """
759
764
  Consume an iterator and write the results to a file
760
765
 
761
766
  :param AnalysisResultsIterator iterator: Tenable SC iterator
762
767
  :param Path dir_path: The directory to save the pickled files
763
768
  :param SCIntegration scanner: Tenable SC Integration object
764
- :rtype: int
765
- :return: The total count of items processed
769
+ :rtype: tuple
770
+ :return: The total count of assets and findings processed
766
771
  """
767
772
  app = Application()
768
773
  logger.info("Consuming Tenable SC iterator...")
769
774
  override = IntegrationOverride(app)
775
+ asset_count = 0
776
+ findings_count = 0
770
777
  total_count = ThreadSafeCounter()
771
778
  page_number = ThreadSafeCounter()
772
779
  rec_count = ThreadSafeCounter()
773
780
  process_list = queue.Queue()
781
+ futures_lst = []
774
782
  with ThreadPoolExecutor(max_workers=5) as executor:
775
783
  for dat in iterator:
776
784
  total_count.increment()
@@ -778,19 +786,32 @@ def consume_iterator_to_file(iterator: AnalysisResultsIterator, dir_path: Path,
778
786
  rec_count.increment()
779
787
  if rec_count.value == len(iterator.page):
780
788
  page_number.increment()
781
- executor.submit(
782
- process_sc_chunk,
783
- app=app,
784
- vulns=pop_queue(queue=process_list, queue_len=len(iterator.page)),
785
- page=page_number.value,
786
- dir_path=dir_path,
787
- sc=scanner,
788
- override=override,
789
+ futures_lst.append(
790
+ executor.submit(
791
+ process_sc_chunk,
792
+ app=app,
793
+ vulns=pop_queue(queue=process_list, queue_len=len(iterator.page)),
794
+ page=page_number.value,
795
+ dir_path=dir_path,
796
+ sc=scanner,
797
+ override=override,
798
+ )
789
799
  )
790
800
  rec_count.set(0)
801
+ # Collect results from all threads
802
+ asset_count = 0
803
+ findings_count = 0
804
+ # Wait for completion
805
+ wait(futures_lst)
806
+
807
+ for future in futures_lst:
808
+ findings, assets = future.result()
809
+ asset_count += assets
810
+ findings_count += findings
811
+
791
812
  if total_count.value == 0:
792
813
  logger.warning("No Tenable SC data found.")
793
- return total_count.value
814
+ return asset_count, findings_count
794
815
 
795
816
 
796
817
  def pop_queue(queue: queue.Queue, queue_len: int) -> list:
@@ -816,12 +837,13 @@ def pop_queue(queue: queue.Queue, queue_len: int) -> list:
816
837
  return retrieved_items
817
838
 
818
839
 
819
- def process_sc_chunk(**kwargs) -> None:
840
+ def process_sc_chunk(**kwargs) -> tuple:
820
841
  """
821
842
  Process Tenable SC chunk
822
843
 
823
844
  :param kwargs: Keyword arguments
824
- :rtype: None
845
+ :rtype: tuple
846
+ :return: Tuple of findings and assets
825
847
  """
826
848
  # iterator.page, iterator.page_count, file_path, query_id, ssp_id
827
849
  integration_mapping = kwargs.get("override")
@@ -833,7 +855,7 @@ def process_sc_chunk(**kwargs) -> None:
833
855
  tenable_sc: SCIntegration = kwargs.get("sc")
834
856
  thread = current_thread()
835
857
  if not len(vulns):
836
- return
858
+ return (0, 0)
837
859
  # I can't add a to-do thanks to sonarlint, but we need to add CVE lookup from plugin id
838
860
  # append file to path
839
861
  # Process to RegScale
@@ -857,6 +879,7 @@ def process_sc_chunk(**kwargs) -> None:
857
879
  kwargs.get("page"),
858
880
  )
859
881
  logger.debug(f"Completed thread: name={thread.name}, idnet={get_ident()}, id={get_native_id()}")
882
+ return (len(findings), len(assets))
860
883
 
861
884
 
862
885
  def get_last_pull_epoch(regscale_ssp_id: int) -> int:
@@ -394,13 +394,6 @@ def add_report_evidence(
394
394
  default="NIST800-53R5",
395
395
  required=True,
396
396
  )
397
- @click.option( # type: ignore
398
- "--include_not_implemented",
399
- "-n",
400
- is_flag=True,
401
- help="Include not implemented controls",
402
- default=False,
403
- )
404
397
  def sync_compliance(
405
398
  wiz_project_id,
406
399
  regscale_id,
@@ -409,21 +402,16 @@ def sync_compliance(
409
402
  client_secret,
410
403
  catalog_id,
411
404
  framework,
412
- include_not_implemented,
413
405
  ):
414
406
  """Sync compliance posture from Wiz to RegScale"""
415
- from regscale.core.app.utils.app_utils import create_progress_object
416
407
  from regscale.integrations.commercial.wizv2.utils import _sync_compliance
417
408
 
418
- compliance_job_progress = create_progress_object()
419
- with compliance_job_progress:
420
- _sync_compliance(
421
- wiz_project_id=wiz_project_id,
422
- regscale_id=regscale_id,
423
- regscale_module=regscale_module,
424
- include_not_implemented=include_not_implemented,
425
- client_id=client_id,
426
- client_secret=client_secret,
427
- catalog_id=catalog_id,
428
- framework=framework,
429
- )
409
+ _sync_compliance(
410
+ wiz_project_id=wiz_project_id,
411
+ regscale_id=regscale_id,
412
+ regscale_module=regscale_module,
413
+ client_id=client_id,
414
+ client_secret=client_secret,
415
+ catalog_id=catalog_id,
416
+ framework=framework,
417
+ )
@@ -101,6 +101,7 @@ class WizVulnerabilityIntegration(ScannerIntegration):
101
101
  raise ValueError("Wiz project ID is required")
102
102
 
103
103
  logger.info("Fetching Wiz findings...")
104
+ self.num_findings_to_process = 0
104
105
 
105
106
  for wiz_vulnerability_type in self.get_query_types(project_id=project_id):
106
107
  logger.info("Fetching Wiz findings for %s...", wiz_vulnerability_type["type"])
@@ -114,8 +115,8 @@ class WizVulnerabilityIntegration(ScannerIntegration):
114
115
  topic_key=wiz_vulnerability_type["topic_key"],
115
116
  file_path=wiz_vulnerability_type["file_path"],
116
117
  )
118
+ self.num_findings_to_process += len(nodes)
117
119
  yield from self.parse_findings(nodes, wiz_vulnerability_type["type"])
118
-
119
120
  logger.info("Finished fetching Wiz findings.")
120
121
 
121
122
  def parse_findings(