regscale-cli 6.16.0.0__py3-none-any.whl → 6.16.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of regscale-cli might be problematic. Click here for more details.

Files changed (65) hide show
  1. regscale/__init__.py +1 -1
  2. regscale/core/app/application.py +1 -0
  3. regscale/core/app/internal/login.py +1 -1
  4. regscale/core/app/internal/poam_editor.py +1 -1
  5. regscale/core/app/utils/app_utils.py +1 -1
  6. regscale/core/app/utils/parser_utils.py +2 -2
  7. regscale/integrations/commercial/__init__.py +2 -2
  8. regscale/integrations/commercial/ad.py +1 -1
  9. regscale/integrations/commercial/azure/intune.py +1 -0
  10. regscale/integrations/commercial/grype/__init__.py +3 -0
  11. regscale/integrations/commercial/grype/commands.py +72 -0
  12. regscale/integrations/commercial/grype/scanner.py +390 -0
  13. regscale/integrations/commercial/import_all/import_all_cmd.py +2 -2
  14. regscale/integrations/commercial/nessus/scanner.py +3 -0
  15. regscale/integrations/commercial/opentext/__init__.py +6 -0
  16. regscale/integrations/commercial/opentext/commands.py +77 -0
  17. regscale/integrations/commercial/opentext/scanner.py +449 -85
  18. regscale/integrations/commercial/sap/sysdig/sysdig_scanner.py +4 -0
  19. regscale/integrations/commercial/sap/tenable/click.py +1 -1
  20. regscale/integrations/commercial/sap/tenable/scanner.py +8 -2
  21. regscale/integrations/commercial/tenablev2/click.py +39 -16
  22. regscale/integrations/commercial/trivy/__init__.py +5 -0
  23. regscale/integrations/commercial/trivy/commands.py +74 -0
  24. regscale/integrations/commercial/trivy/scanner.py +276 -0
  25. regscale/integrations/commercial/wizv2/click.py +9 -21
  26. regscale/integrations/commercial/wizv2/scanner.py +2 -1
  27. regscale/integrations/commercial/wizv2/utils.py +146 -70
  28. regscale/integrations/jsonl_scanner_integration.py +869 -0
  29. regscale/integrations/public/fedramp/fedramp_common.py +4 -4
  30. regscale/integrations/public/fedramp/import_workbook.py +1 -1
  31. regscale/integrations/public/fedramp/inventory_items.py +3 -3
  32. regscale/integrations/public/fedramp/poam/scanner.py +51 -44
  33. regscale/integrations/public/fedramp/ssp_logger.py +6 -6
  34. regscale/integrations/scanner_integration.py +268 -64
  35. regscale/models/app_models/mapping.py +3 -3
  36. regscale/models/integration_models/amazon_models/inspector.py +15 -17
  37. regscale/models/integration_models/aqua.py +1 -5
  38. regscale/models/integration_models/cisa_kev_data.json +100 -10
  39. regscale/models/integration_models/ecr_models/ecr.py +2 -6
  40. regscale/models/integration_models/{flat_file_importer.py → flat_file_importer/__init__.py} +7 -4
  41. regscale/models/integration_models/grype_import.py +3 -3
  42. regscale/models/integration_models/prisma.py +3 -3
  43. regscale/models/integration_models/synqly_models/capabilities.json +1 -1
  44. regscale/models/integration_models/synqly_models/connectors/assets.py +1 -0
  45. regscale/models/integration_models/synqly_models/connectors/vulnerabilities.py +2 -0
  46. regscale/models/integration_models/tenable_models/integration.py +46 -10
  47. regscale/models/integration_models/trivy_import.py +1 -1
  48. regscale/models/integration_models/xray.py +1 -1
  49. regscale/models/regscale_models/__init__.py +2 -0
  50. regscale/models/regscale_models/control_implementation.py +18 -44
  51. regscale/models/regscale_models/inherited_control.py +61 -0
  52. regscale/models/regscale_models/issue.py +3 -2
  53. regscale/models/regscale_models/mixins/parent_cache.py +1 -1
  54. regscale/models/regscale_models/regscale_model.py +73 -7
  55. regscale/models/regscale_models/vulnerability.py +61 -8
  56. {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.2.0.dist-info}/METADATA +3 -3
  57. {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.2.0.dist-info}/RECORD +62 -56
  58. tests/regscale/core/test_logz.py +8 -0
  59. regscale/integrations/commercial/grype.py +0 -165
  60. regscale/integrations/commercial/opentext/click.py +0 -99
  61. regscale/integrations/commercial/trivy.py +0 -162
  62. {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.2.0.dist-info}/LICENSE +0 -0
  63. {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.2.0.dist-info}/WHEEL +0 -0
  64. {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.2.0.dist-info}/entry_points.txt +0 -0
  65. {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.2.0.dist-info}/top_level.txt +0 -0
@@ -9,12 +9,12 @@ import enum
9
9
  import hashlib
10
10
  import json
11
11
  import logging
12
- import re
13
12
  import threading
14
13
  import time
15
14
  from abc import ABC, abstractmethod
16
15
  from collections import defaultdict
17
- from typing import Any, Dict, Generic, Iterator, List, Optional, Set, TypeVar, Union
16
+ from concurrent.futures import ThreadPoolExecutor
17
+ from typing import Any, Dict, Generic, Iterator, List, Optional, Set, TypeVar, Union, Callable
18
18
 
19
19
  from rich.progress import Progress, TaskID
20
20
 
@@ -345,6 +345,15 @@ class IntegrationFinding:
345
345
  :param Optional[str] remediation: The remediation of the finding, defaults to None.
346
346
  :param Optional[str] source_rule_id: The source rule ID of the finding, defaults to None.
347
347
  :param Optional[str] poam_id: The POAM ID of the finding, defaults to None.
348
+ :param Optional[str] cvss_v3_vector: The CVSS v3 vector of the finding, defaults to None.
349
+ :param Optional[str] cvss_v2_vector: The CVSS v2 vector of the finding, defaults to None.
350
+ :param Optional[str] affected_os: The affected OS of the finding, defaults to None.
351
+ :param Optional[str] image_digest: The image digest of the finding, defaults to None.
352
+ :param Optional[str] affected_packages: The affected packages of the finding, defaults to None.
353
+ :param Optional[str] installed_versions: The installed versions of the finding, defaults to None.
354
+ :param Optional[str] fixed_versions: The fixed versions of the finding, defaults to None.
355
+ :param Optional[str] fix_status: The fix status of the finding, defaults to None.
356
+ :param Optional[str] build_version: The build version of the finding, defaults to None.
348
357
  """
349
358
 
350
359
  control_labels: List[str]
@@ -367,6 +376,16 @@ class IntegrationFinding:
367
376
  dns: Optional[str] = None
368
377
  severity_int: int = 0
369
378
  security_check: Optional[str] = None
379
+ cvss_v3_vector: Optional[str] = None
380
+ cvss_v2_vector: Optional[str] = None
381
+ affected_os: Optional[str] = None
382
+ package_path: Optional[str] = None
383
+ image_digest: Optional[str] = None
384
+ affected_packages: Optional[str] = None
385
+ installed_versions: Optional[str] = None
386
+ fixed_versions: Optional[str] = None
387
+ fix_status: Optional[str] = None
388
+ build_version: Optional[str] = None
370
389
 
371
390
  # Issues
372
391
  issue_title: str = ""
@@ -537,6 +556,7 @@ class ScannerIntegration(ABC):
537
556
  _lock_registry: ThreadSafeDict = ThreadSafeDict()
538
557
  _global_lock = threading.Lock() # Class-level lock
539
558
  _kev_data = ThreadSafeDict() # Class-level lock
559
+ _results = ThreadSafeDict()
540
560
 
541
561
  # Error handling
542
562
  errors: List[str] = []
@@ -578,7 +598,7 @@ class ScannerIntegration(ABC):
578
598
  self.app = Application()
579
599
  self.alerted_assets: Set[str] = set()
580
600
  self.regscale_version: str = APIHandler().regscale_version # noqa
581
- logger.info(f"RegScale Version: {self.regscale_version}")
601
+ logger.debug(f"RegScale Version: {self.regscale_version}")
582
602
  self.plan_id: int = plan_id
583
603
  self.tenant_id: int = tenant_id
584
604
  self.components: ThreadSafeList[Any] = ThreadSafeList()
@@ -801,13 +821,20 @@ class ScannerIntegration(ABC):
801
821
  :return: The finding identifier
802
822
  :rtype: str
803
823
  """
824
+ # We could have a string truncation error platform side on IntegrationFindingId nvarchar(450)
804
825
  prefix = f"{self.plan_id}:"
805
- if ScannerVariables.tenableGroupByPlugin and finding.plugin_id:
806
- return f"{prefix}{finding.plugin_id}"
826
+ if (
827
+ ScannerVariables.tenableGroupByPlugin
828
+ and finding.plugin_id
829
+ and "tenable" in (finding.source_report or self.title).lower()
830
+ ):
831
+ res = f"{prefix}{finding.plugin_id}"
832
+ return res[:450]
807
833
  prefix += finding.cve or finding.plugin_id or finding.rule_id or self.hash_string(finding.external_id).__str__()
808
834
  if ScannerVariables.issueCreation.lower() == "perasset":
809
- return f"{prefix}:{finding.asset_identifier}"
810
- return prefix
835
+ res = f"{prefix}:{finding.asset_identifier}"
836
+ return res[:450]
837
+ return prefix[:450]
811
838
 
812
839
  def get_or_create_assessment(self, control_implementation_id: int) -> regscale_models.Assessment:
813
840
  """
@@ -963,6 +990,9 @@ class ScannerIntegration(ABC):
963
990
 
964
991
  created, existing_or_new_asset = self.create_new_asset(asset, component=None)
965
992
 
993
+ # update results expects a dict[str, list] to update result counts
994
+ self.update_result_counts("assets", {"created": [1] if created else [], "updated": [] if created else [1]})
995
+
966
996
  # If the asset is associated with a component, create a mapping between them.
967
997
  if existing_or_new_asset and component:
968
998
  _was_created, _asset_mapping = regscale_models.AssetMapping(
@@ -1170,7 +1200,7 @@ class ScannerIntegration(ABC):
1170
1200
  """
1171
1201
  logger.info("Updating RegScale assets...")
1172
1202
  loading_assets = self._setup_progress_bar()
1173
- logger.info("Pre-populating cache")
1203
+ logger.debug("Pre-populating cache")
1174
1204
  regscale_models.AssetMapping.populate_cache_by_plan(self.plan_id)
1175
1205
  regscale_models.ComponentMapping.populate_cache_by_plan(self.plan_id)
1176
1206
 
@@ -1200,34 +1230,126 @@ class ScannerIntegration(ABC):
1200
1230
 
1201
1231
  def _process_assets(self, assets: Iterator[IntegrationAsset], loading_assets: TaskID) -> int:
1202
1232
  """
1203
- Processes the assets using single or multi-threaded approach based on THREAD_MAX_WORKERS.
1233
+ Process assets using single or multi-threaded approach based on THREAD_MAX_WORKERS.
1204
1234
 
1205
- :param Iterator[IntegrationAsset] assets: The assets to process
1206
- :param TaskID loading_assets: The task ID for the progress bar
1207
- :return: The number of assets processed
1235
+ :param Iterator[IntegrationAsset] assets: Assets to process
1236
+ :param TaskID loading_assets: Task ID for the progress bar
1237
+ :return: Number of assets processed
1208
1238
  :rtype: int
1209
1239
  """
1210
- assets_processed = 0
1211
- # prime cache
1240
+ self._prime_asset_cache()
1241
+ process_func = self._create_process_function(loading_assets)
1242
+ max_workers = get_thread_workers_max()
1243
+
1244
+ if max_workers == 1:
1245
+ return self._process_single_threaded(assets, process_func)
1246
+ return self._process_multi_threaded(assets, process_func, max_workers)
1247
+
1248
+ def _prime_asset_cache(self) -> None:
1249
+ """
1250
+ Prime the asset cache by fetching assets for the given plan.
1251
+
1252
+ :rtype: None
1253
+ """
1212
1254
  regscale_models.Asset.get_all_by_parent(
1213
1255
  parent_id=self.plan_id, parent_module=regscale_models.SecurityPlan.get_module_string()
1214
1256
  )
1215
1257
 
1216
- process_func = lambda my_asset: self._process_single_asset(my_asset, loading_assets) # noqa: E731
1258
+ def _create_process_function(self, loading_assets: TaskID) -> Callable[[IntegrationAsset], bool]:
1259
+ """
1260
+ Create a function to process a single asset.
1261
+
1262
+ :param TaskID loading_assets: Task ID for the progress bar
1263
+ :return: Function that processes an asset and returns success status
1264
+ :rtype: Callable[[IntegrationAsset], bool]
1265
+ """
1266
+ return lambda asset: self._process_single_asset(asset, loading_assets)
1267
+
1268
+ def _process_single_threaded(
1269
+ self, assets: Iterator[IntegrationAsset], process_func: Callable[[IntegrationAsset], bool]
1270
+ ) -> int:
1271
+ """
1272
+ Process assets sequentially in a single thread.
1273
+
1274
+ :param Iterator[IntegrationAsset] assets: Assets to process
1275
+ :param Callable[[IntegrationAsset], bool] process_func: Function to process each asset
1276
+ :return: Number of assets processed
1277
+ :rtype: int
1278
+ """
1279
+ assets_processed = 0
1280
+ for asset in assets:
1281
+ if process_func(asset):
1282
+ assets_processed = self._update_processed_count(assets_processed)
1283
+ return assets_processed
1284
+
1285
+ def _process_multi_threaded(
1286
+ self, assets: Iterator[IntegrationAsset], process_func: Callable[[IntegrationAsset], bool], max_workers: int
1287
+ ) -> int:
1288
+ """
1289
+ Process assets in batches using multiple threads.
1290
+
1291
+ :param Iterator[IntegrationAsset] assets: Assets to process
1292
+ :param Callable[[IntegrationAsset], bool] process_func: Function to process each asset
1293
+ :param int max_workers: Maximum number of worker threads
1294
+ :return: Number of assets processed
1295
+ :rtype: int
1296
+ """
1297
+ batch_size = max_workers * 2
1298
+ assets_processed = 0
1299
+
1300
+ with ThreadPoolExecutor(max_workers=max_workers) as executor:
1301
+ batch = []
1302
+ futures = []
1217
1303
 
1218
- if get_thread_workers_max() == 1:
1219
1304
  for asset in assets:
1220
- if process_func(asset):
1221
- assets_processed = self._update_processed_count(assets_processed)
1222
- else:
1223
- with concurrent.futures.ThreadPoolExecutor(max_workers=get_thread_workers_max()) as executor:
1224
- future_to_asset = {executor.submit(process_func, asset): asset for asset in assets}
1225
- for future in concurrent.futures.as_completed(future_to_asset):
1226
- if future.result():
1227
- assets_processed = self._update_processed_count(assets_processed)
1305
+ batch.append(asset)
1306
+ if len(batch) >= batch_size:
1307
+ assets_processed += self._submit_and_process_batch(executor, process_func, batch, futures)
1308
+ batch = []
1309
+ futures = []
1310
+
1311
+ if batch: # Process any remaining items
1312
+ assets_processed += self._submit_and_process_batch(executor, process_func, batch, futures)
1228
1313
 
1229
1314
  return assets_processed
1230
1315
 
1316
+ def _submit_and_process_batch(
1317
+ self,
1318
+ executor: ThreadPoolExecutor,
1319
+ process_func: Callable[[IntegrationAsset], bool],
1320
+ batch: List[IntegrationAsset],
1321
+ futures: List,
1322
+ ) -> int:
1323
+ """
1324
+ Submit a batch of assets for processing and count successful completions.
1325
+
1326
+ :param ThreadPoolExecutor executor: Thread pool executor for parallel processing
1327
+ :param Callable[[IntegrationAsset], bool] process_func: Function to process each asset
1328
+ :param List[IntegrationAsset] batch: Batch of assets to process
1329
+ :param List futures: List to store future objects
1330
+ :return: Number of assets processed in this batch
1331
+ :rtype: int
1332
+ """
1333
+ assets_processed = 0
1334
+ for asset in batch:
1335
+ futures.append(executor.submit(process_func, asset))
1336
+
1337
+ for future in concurrent.futures.as_completed(futures):
1338
+ if future.result():
1339
+ assets_processed = self._update_processed_count(assets_processed)
1340
+
1341
+ return assets_processed
1342
+
1343
+ def _update_processed_count(self, current_count: int) -> int:
1344
+ """
1345
+ Increment the processed count.
1346
+
1347
+ :param int current_count: Current number of processed items
1348
+ :return: Updated count
1349
+ :rtype: int
1350
+ """
1351
+ return current_count + 1
1352
+
1231
1353
  def _process_single_asset(self, asset: IntegrationAsset, loading_assets: TaskID) -> bool:
1232
1354
  """
1233
1355
  Processes a single asset and handles any exceptions.
@@ -1258,22 +1380,52 @@ class ScannerIntegration(ABC):
1258
1380
  logger.info("Processed %d assets.", assets_processed)
1259
1381
  return assets_processed
1260
1382
 
1383
+ def update_result_counts(self, key: str, results: dict[str, list]) -> None:
1384
+ """
1385
+ Updates the results dictionary with the given key and results.
1386
+
1387
+ :param str key: The key to update
1388
+ :param dict[str, list] results: The results to update, example: ["updated": [], "created": []]
1389
+ :rtype: None
1390
+ """
1391
+ if key not in self._results:
1392
+ self._results[key] = {"created_count": 0, "updated_count": 0}
1393
+ self._results[key]["created_count"] += len(results.get("created", []))
1394
+ self._results[key]["updated_count"] += len(results.get("updated", []))
1395
+
1261
1396
  def _perform_batch_operations(self, progress: Progress) -> None:
1262
1397
  """
1263
1398
  Performs batch operations for assets, software inventory, and data.
1264
1399
 
1265
1400
  :rtype: None
1266
1401
  """
1267
- logger.info("Bulk saving assets...")
1268
- regscale_models.Asset.bulk_save(progress_context=progress)
1269
- regscale_models.Issue.bulk_save(progress_context=progress)
1270
- regscale_models.Property.bulk_save(progress_context=progress)
1271
-
1402
+ logger.debug("Bulk saving assets...")
1403
+ self.update_result_counts("assets", regscale_models.Asset.bulk_save(progress_context=progress))
1404
+ logger.debug("Done bulk saving assets.")
1405
+ logger.debug("Bulk saving issues...")
1406
+ self.update_result_counts("issues", regscale_models.Issue.bulk_save(progress_context=progress))
1407
+ logger.debug("Done bulk saving issues.")
1408
+ logger.debug("Bulk saving properties...")
1409
+ self.update_result_counts("properties", regscale_models.Property.bulk_save(progress_context=progress))
1410
+ logger.debug("Done bulk saving properties.")
1411
+
1412
+ software_inventory = {}
1272
1413
  if self.software_to_create:
1273
- regscale_models.SoftwareInventory.batch_create(items=self.software_to_create, progress_context=progress)
1414
+ logger.debug("Bulk creating software inventory...")
1415
+ software_inventory["created_count"] = len(
1416
+ regscale_models.SoftwareInventory.batch_create(items=self.software_to_create, progress_context=progress)
1417
+ )
1418
+ logger.debug("Done bulk creating software inventory.")
1274
1419
  if self.software_to_update:
1275
- regscale_models.SoftwareInventory.batch_update(items=self.software_to_update, progress_context=progress)
1276
- regscale_models.Data.bulk_save(progress_context=progress)
1420
+ logger.debug("Bulk updating software inventory...")
1421
+ software_inventory["updated_updated"] = len(
1422
+ regscale_models.SoftwareInventory.batch_update(items=self.software_to_update, progress_context=progress)
1423
+ )
1424
+ logger.debug("Done bulk updating software inventory.")
1425
+ self._results["software_inventory"] = software_inventory
1426
+ logger.debug("Bulk saving data records...")
1427
+ self.update_result_counts("data", regscale_models.Data.bulk_save(progress_context=progress))
1428
+ logger.debug("Done bulk saving data records.")
1277
1429
 
1278
1430
  @staticmethod
1279
1431
  def get_issue_title(finding: IntegrationFinding) -> str:
@@ -1389,7 +1541,7 @@ class ScannerIntegration(ABC):
1389
1541
  issue.issueOwnerId = self.assessor_id
1390
1542
  issue.securityPlanId = self.plan_id
1391
1543
  issue.identification = "Vulnerability Assessment"
1392
- issue.dateFirstDetected = finding.date_created
1544
+ issue.dateFirstDetected = finding.first_seen
1393
1545
  issue.dueDate = finding.due_date
1394
1546
  issue.description = description
1395
1547
  issue.sourceReport = finding.source_report or self.title
@@ -1803,9 +1955,8 @@ class ScannerIntegration(ABC):
1803
1955
  scan_history = self.create_scan_history()
1804
1956
  current_vulnerabilities: Dict[int, Set[int]] = defaultdict(set)
1805
1957
  processed_findings_count = 0
1806
- findings_to_process = self.num_findings_to_process
1807
1958
  loading_findings = self.finding_progress.add_task(
1808
- f"[#f8b737]Processing {f'{findings_to_process} ' if findings_to_process else ''}finding(s) from {self.title}",
1959
+ f"[#f8b737]Processing {f'{self.num_findings_to_process} ' if self.num_findings_to_process else ''}finding(s) from {self.title}",
1809
1960
  total=self.num_findings_to_process if self.num_findings_to_process else None,
1810
1961
  )
1811
1962
 
@@ -1824,13 +1975,11 @@ class ScannerIntegration(ABC):
1824
1975
  self.process_finding(finding_to_process, scan_history, current_vulnerabilities)
1825
1976
  with count_lock:
1826
1977
  processed_findings_count += 1
1827
- if findings_to_process and self.finding_progress.tasks[loading_findings].total != float(
1828
- findings_to_process
1829
- ):
1978
+ if self.num_findings_to_process:
1830
1979
  self.finding_progress.update(
1831
1980
  loading_findings,
1832
- total=findings_to_process,
1833
- description=f"[#f8b737]Processing {findings_to_process} findings from {self.title}.",
1981
+ total=self.num_findings_to_process,
1982
+ description=f"[#f8b737]Processing {self.num_findings_to_process} findings from {self.title}.",
1834
1983
  )
1835
1984
  self.finding_progress.advance(loading_findings, 1)
1836
1985
  except Exception as exc:
@@ -1844,12 +1993,25 @@ class ScannerIntegration(ABC):
1844
1993
  for finding in findings:
1845
1994
  process_finding_with_progress(finding)
1846
1995
  else:
1996
+ # Process findings in batches to control memory usage
1997
+ batch_size = get_thread_workers_max() * 2 # Set batch size based on thread count
1847
1998
  with concurrent.futures.ThreadPoolExecutor(max_workers=get_thread_workers_max()) as executor:
1848
- list(executor.map(process_finding_with_progress, findings))
1999
+ batch = []
2000
+ for finding in findings:
2001
+ batch.append(finding)
2002
+ if len(batch) >= batch_size:
2003
+ # Process this batch
2004
+ list(executor.map(process_finding_with_progress, batch))
2005
+ # Clear the batch
2006
+ batch = []
2007
+
2008
+ # Process any remaining items
2009
+ if batch:
2010
+ list(executor.map(process_finding_with_progress, batch))
1849
2011
 
1850
2012
  # Close outdated issues
1851
- scan_history.save()
1852
- regscale_models.Issue.bulk_save(progress_context=self.finding_progress)
2013
+ self._results["scan_history"] = scan_history.save()
2014
+ self.update_result_counts("issues", regscale_models.Issue.bulk_save(progress_context=self.finding_progress))
1853
2015
  self.close_outdated_issues(current_vulnerabilities)
1854
2016
 
1855
2017
  return processed_findings_count
@@ -2027,6 +2189,9 @@ class ScannerIntegration(ABC):
2027
2189
  finding.vpr_score if hasattr(finding, "vprScore") else None
2028
2190
  ), # If this is the VPR score, otherwise use a different field
2029
2191
  cvsSv3BaseScore=finding.cvss_v3_base_score or finding.cvss_v3_score or finding.cvss_score,
2192
+ cvsSv2BaseScore=finding.cvss_v2_score,
2193
+ cvsSv3BaseVector=finding.cvss_v3_vector,
2194
+ cvsSv2BaseVector=finding.cvss_v2_vector,
2030
2195
  scanId=scan_history.id,
2031
2196
  severity=self.issue_to_vulnerability_map.get(finding.severity, regscale_models.VulnerabilitySeverity.Low),
2032
2197
  description=finding.description,
@@ -2039,29 +2204,36 @@ class ScannerIntegration(ABC):
2039
2204
  firstSeen=finding.first_seen,
2040
2205
  lastSeen=finding.last_seen,
2041
2206
  plugInName=finding.cve or finding.plugin_name, # Use CVE if available, otherwise use plugin name
2042
- # plugInId=finding.plugin_id, # Vulnerability.pluginId is an int, but it is a string on Issue
2207
+ plugInId=finding.plugin_id,
2043
2208
  exploitAvailable=None, # Set this if you have information about exploit availability
2044
2209
  plugInText=finding.observations, # or finding.evidence, whichever is more appropriate
2045
2210
  port=finding.port if hasattr(finding, "port") else None,
2046
2211
  protocol=finding.protocol if hasattr(finding, "protocol") else None,
2047
2212
  operatingSystem=asset.operating_system if hasattr(asset, "operating_system") else None,
2213
+ fixedVersions=finding.fixed_versions,
2214
+ buildVersion=finding.build_version,
2215
+ fixStatus=finding.fix_status,
2216
+ installedVersions=finding.installed_versions,
2217
+ affectedOS=finding.affected_os,
2218
+ packagePath=finding.package_path,
2219
+ imageDigest=finding.image_digest,
2220
+ affectedPackages=finding.affected_packages,
2048
2221
  )
2049
2222
 
2050
2223
  vulnerability = vulnerability.create_or_update()
2051
- if re.match(r"^\d+\.\d+(\.\d+){0,2}$", self.regscale_version) or self.regscale_version >= "5.64.0":
2052
- regscale_models.VulnerabilityMapping(
2053
- vulnerabilityId=vulnerability.id,
2054
- assetId=asset.id,
2055
- scanId=scan_history.id,
2056
- securityPlansId=self.plan_id,
2057
- createdById=self.assessor_id,
2058
- tenantsId=self.tenant_id,
2059
- isPublic=True,
2060
- dateCreated=get_current_datetime(),
2061
- firstSeen=finding.first_seen,
2062
- lastSeen=finding.last_seen,
2063
- status=finding.status,
2064
- ).create_unique()
2224
+ regscale_models.VulnerabilityMapping(
2225
+ vulnerabilityId=vulnerability.id,
2226
+ assetId=asset.id,
2227
+ scanId=scan_history.id,
2228
+ securityPlansId=self.plan_id,
2229
+ createdById=self.assessor_id,
2230
+ tenantsId=self.tenant_id,
2231
+ isPublic=True,
2232
+ dateCreated=get_current_datetime(),
2233
+ firstSeen=finding.first_seen,
2234
+ lastSeen=finding.last_seen,
2235
+ status=finding.status,
2236
+ ).create_unique()
2065
2237
  return vulnerability
2066
2238
 
2067
2239
  def handle_vulnerability(
@@ -2205,7 +2377,9 @@ class ScannerIntegration(ABC):
2205
2377
  )
2206
2378
 
2207
2379
  # Create a progress bar
2208
- task_id = self.finding_progress.add_task("[cyan]Closing outdated issues...", total=len(open_issues))
2380
+ task_id = self.finding_progress.add_task(
2381
+ f"[cyan]Analyzing {len(open_issues)} issue(s) and closing any outdated issue(s)...", total=len(open_issues)
2382
+ )
2209
2383
 
2210
2384
  for issue in open_issues:
2211
2385
  if self.should_close_issue(issue, current_vulnerabilities):
@@ -2230,7 +2404,10 @@ class ScannerIntegration(ABC):
2230
2404
  for control_id in affected_control_ids:
2231
2405
  self.update_control_implementation_status_after_close(control_id)
2232
2406
 
2233
- logger.info("Closed %d outdated issues.", closed_count)
2407
+ if closed_count > 0:
2408
+ logger.info("Closed %d outdated issues.", closed_count)
2409
+ else:
2410
+ logger.info("No outdated issues to close.")
2234
2411
  return closed_count
2235
2412
 
2236
2413
  def update_control_implementation_status_after_close(self, control_id: int) -> None:
@@ -2370,7 +2547,7 @@ class ScannerIntegration(ABC):
2370
2547
  :return: The number of findings processed
2371
2548
  :rtype: int
2372
2549
  """
2373
- logger.info("Syncing %s findings...", cls.title)
2550
+ logger.info("Syncing %s findings...", kwargs.get("title", cls.title))
2374
2551
  instance = cls(plan_id=plan_id)
2375
2552
  instance.set_keys(**kwargs)
2376
2553
  # If a progress object was passed, use it instead of creating a new one
@@ -2394,7 +2571,25 @@ class ScannerIntegration(ABC):
2394
2571
  else:
2395
2572
  logger.info("All findings have been processed successfully.")
2396
2573
 
2397
- logger.info("Processed %d findings.", findings_processed)
2574
+ if scan_history := instance._results.get("scan_history"):
2575
+ logger.info(
2576
+ "Processed %d findings: %d Critical(s), %d High(s), %d Moderate(s), %d Low(s).",
2577
+ findings_processed,
2578
+ scan_history.vCritical,
2579
+ scan_history.vHigh,
2580
+ scan_history.vMedium,
2581
+ scan_history.vLow,
2582
+ )
2583
+ else:
2584
+ logger.info("Processed %d findings.", findings_processed)
2585
+ issue_created_count = instance._results.get("issues", {}).get("created_count", 0)
2586
+ issue_updated_count = instance._results.get("issues", {}).get("updated_count", 0)
2587
+ if issue_created_count or issue_updated_count:
2588
+ logger.info(
2589
+ "Created %d issue(s) and updated %d issue(s) in RegScale.",
2590
+ issue_created_count,
2591
+ issue_updated_count,
2592
+ )
2398
2593
  return findings_processed
2399
2594
 
2400
2595
  @classmethod
@@ -2406,7 +2601,7 @@ class ScannerIntegration(ABC):
2406
2601
  :return: The number of assets processed
2407
2602
  :rtype: int
2408
2603
  """
2409
- logger.info("Syncing %s assets...", cls.title)
2604
+ logger.info("Syncing %s assets...", kwargs.get("title", cls.title))
2410
2605
  instance = cls(plan_id=plan_id, **kwargs)
2411
2606
  instance.set_keys(**kwargs)
2412
2607
  instance.asset_progress = kwargs.pop("progress") if "progress" in kwargs else create_progress_object()
@@ -2425,7 +2620,16 @@ class ScannerIntegration(ABC):
2425
2620
  logger.info("All assets have been processed successfully.")
2426
2621
 
2427
2622
  APIHandler().log_api_summary()
2428
- logger.info("%d assets processed.", assets_processed)
2623
+ created_count = instance._results.get("assets", {}).get("created_count", 0)
2624
+ updated_count = instance._results.get("assets", {}).get("updated_count", 0)
2625
+ dedupe_count = assets_processed - (created_count + updated_count)
2626
+ logger.info(
2627
+ "%d assets processed and %d asset(s) deduped. %d asset(s) created & %d asset(s) updated in RegScale.",
2628
+ assets_processed,
2629
+ dedupe_count,
2630
+ created_count,
2631
+ updated_count,
2632
+ )
2429
2633
  return assets_processed
2430
2634
 
2431
2635
  @classmethod
@@ -226,7 +226,7 @@ class Mapping(BaseModel):
226
226
  """
227
227
  return self.mapping.get(key)
228
228
 
229
- def get_value(self, dat: Optional[dict], key: str, default_val: Optional[Any] = "", warnings: bool = True) -> Any:
229
+ def get_value(self, dat: Optional[dict], key: str, default_val: Optional[Any] = "") -> Any:
230
230
  """
231
231
  Get the value from a dictionary by mapped key
232
232
 
@@ -241,8 +241,8 @@ class Mapping(BaseModel):
241
241
  if key == "None" or key is None:
242
242
  return default_val
243
243
  mapped_key = self.mapping.get(key)
244
- if not mapped_key and warnings:
245
- self._logger.warning(f"Value for key '{key}' not found in mapping.")
244
+ if not mapped_key:
245
+ self._logger.debug(f"Value for key '{key}' not found in mapping.")
246
246
  if dat and mapped_key:
247
247
  val = dat.get(mapped_key)
248
248
  if isinstance(val, str):
@@ -129,21 +129,21 @@ class InspectorRecord(BaseModel):
129
129
  platform_key = list(details.keys())[0] if details.keys() else None
130
130
 
131
131
  return InspectorRecord(
132
- aws_account_id=mapping.get_value(finding, "awsAccountId", "", warnings=False),
133
- description=mapping.get_value(finding, "description", warnings=False),
134
- exploit_available=mapping.get_value(finding, "exploitAvailable", warnings=False),
135
- finding_arn=mapping.get_value(finding, "findingArn", warnings=False),
136
- first_seen=mapping.get_value(finding, "firstObservedAt", warnings=False),
137
- fix_available=mapping.get_value(finding, "fixAvailable", warnings=False),
138
- last_seen=mapping.get_value(finding, "lastObservedAt", warnings=False),
132
+ aws_account_id=mapping.get_value(finding, "awsAccountId", ""),
133
+ description=mapping.get_value(finding, "description"),
134
+ exploit_available=mapping.get_value(finding, "exploitAvailable"),
135
+ finding_arn=mapping.get_value(finding, "findingArn"),
136
+ first_seen=mapping.get_value(finding, "firstObservedAt"),
137
+ fix_available=mapping.get_value(finding, "fixAvailable"),
138
+ last_seen=mapping.get_value(finding, "lastObservedAt"),
139
139
  remediation=mapping.get_value(finding, "remediation", {}).get("recommendation", {}).get("text", ""),
140
- severity=mapping.get_value(finding, "Severity", warnings=False),
141
- status=mapping.get_value(finding, "status", warnings=False),
142
- title=mapping.get_value(finding, "title", warnings=False),
140
+ severity=mapping.get_value(finding, "Severity"),
141
+ status=mapping.get_value(finding, "status"),
142
+ title=mapping.get_value(finding, "title"),
143
143
  resource_type=resource.get("type"),
144
144
  resource_id=resource.get("id"),
145
145
  region=resource.get("region"),
146
- last_updated=mapping.get_value(finding, "updatedAt", warnings=False),
146
+ last_updated=mapping.get_value(finding, "updatedAt"),
147
147
  platform=resource.get("details", {}).get(platform_key, {}).get("platform", ""),
148
148
  resource_tags=" ,".join(resource.get("details", {}).get(platform_key, {}).get("imageTags", "")),
149
149
  affected_packages=cls.get_vulnerable_package_info(vulnerabilities, "name"),
@@ -152,18 +152,16 @@ class InspectorRecord(BaseModel):
152
152
  package_remediation=cls.get_vulnerable_package_info(vulnerabilities, "remediation"),
153
153
  vulnerability_id=vulnerabilities.get("vulnerabilityId") if vulnerabilities else None,
154
154
  vendor=vulnerabilities.get("source") if vulnerabilities else None,
155
- vendor_severity=mapping.get_value(finding, "severity", warnings=False),
155
+ vendor_severity=mapping.get_value(finding, "severity"),
156
156
  vendor_advisory=vulnerabilities.get("sourceUrl") if vulnerabilities else None,
157
157
  vendor_advisory_published=vulnerabilities.get("vendorCreatedAt") if vulnerabilities else None,
158
158
  package_manager=cls.get_vulnerable_package_info(
159
- mapping.get_value(finding, "packageVulnerabilityDetails", {}, warnings=False), "packageManager"
159
+ mapping.get_value(finding, "packageVulnerabilityDetails", {}), "packageManager"
160
160
  ),
161
161
  file_path=cls.get_vulnerable_package_info(
162
- mapping.get_value(finding, "packageVulnerabilityDetails", {}, warnings=False), "filePath"
163
- ),
164
- reference_urls=mapping.get_value(finding, "packageVulnerabilityDetails", {}, warnings=False).get(
165
- "sourceUrl"
162
+ mapping.get_value(finding, "packageVulnerabilityDetails", {}), "filePath"
166
163
  ),
164
+ reference_urls=mapping.get_value(finding, "packageVulnerabilityDetails", {}).get("sourceUrl"),
167
165
  )
168
166
 
169
167
  @classmethod
@@ -209,11 +209,7 @@ class Aqua(FlatFileImporter):
209
209
  self.nvd_cvss_v2_severity,
210
210
  self.vendor_cvss_v3_severity,
211
211
  # This field may or may not be available in the file (Coalfire has it, BMC does not.)
212
- (
213
- self.vendor_cvss_v2_severity
214
- if self.mapping.get_value(dat, self.vendor_cvss_v2_severity, warnings=False)
215
- else None
216
- ),
212
+ (self.vendor_cvss_v2_severity if self.mapping.get_value(dat, self.vendor_cvss_v2_severity) else None),
217
213
  ]
218
214
  severity = "info"
219
215
  for key in precedence_order: