regscale-cli 6.16.1.0__py3-none-any.whl → 6.16.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of regscale-cli might be problematic. Click here for more details.

Files changed (35) hide show
  1. regscale/__init__.py +1 -1
  2. regscale/core/app/internal/login.py +1 -1
  3. regscale/core/app/internal/poam_editor.py +1 -1
  4. regscale/integrations/commercial/__init__.py +2 -2
  5. regscale/integrations/commercial/ad.py +1 -1
  6. regscale/integrations/commercial/grype/__init__.py +3 -0
  7. regscale/integrations/commercial/grype/commands.py +72 -0
  8. regscale/integrations/commercial/grype/scanner.py +390 -0
  9. regscale/integrations/commercial/import_all/import_all_cmd.py +2 -2
  10. regscale/integrations/commercial/opentext/__init__.py +6 -0
  11. regscale/integrations/commercial/opentext/commands.py +77 -0
  12. regscale/integrations/commercial/opentext/scanner.py +449 -85
  13. regscale/integrations/commercial/trivy/__init__.py +5 -0
  14. regscale/integrations/commercial/trivy/commands.py +74 -0
  15. regscale/integrations/commercial/trivy/scanner.py +276 -0
  16. regscale/integrations/commercial/wizv2/utils.py +1 -1
  17. regscale/integrations/jsonl_scanner_integration.py +869 -0
  18. regscale/integrations/public/fedramp/fedramp_common.py +4 -4
  19. regscale/integrations/public/fedramp/inventory_items.py +3 -3
  20. regscale/integrations/scanner_integration.py +172 -41
  21. regscale/models/integration_models/cisa_kev_data.json +20 -5
  22. regscale/models/integration_models/synqly_models/capabilities.json +1 -1
  23. regscale/models/integration_models/tenable_models/integration.py +42 -7
  24. regscale/models/regscale_models/regscale_model.py +1 -1
  25. regscale/models/regscale_models/vulnerability.py +21 -0
  26. {regscale_cli-6.16.1.0.dist-info → regscale_cli-6.16.2.0.dist-info}/METADATA +3 -3
  27. {regscale_cli-6.16.1.0.dist-info → regscale_cli-6.16.2.0.dist-info}/RECORD +32 -27
  28. regscale/integrations/commercial/grype.py +0 -165
  29. regscale/integrations/commercial/opentext/click.py +0 -99
  30. regscale/integrations/commercial/trivy.py +0 -162
  31. /regscale/models/integration_models/{flat_file_importer.py → flat_file_importer/__init__.py} +0 -0
  32. {regscale_cli-6.16.1.0.dist-info → regscale_cli-6.16.2.0.dist-info}/LICENSE +0 -0
  33. {regscale_cli-6.16.1.0.dist-info → regscale_cli-6.16.2.0.dist-info}/WHEEL +0 -0
  34. {regscale_cli-6.16.1.0.dist-info → regscale_cli-6.16.2.0.dist-info}/entry_points.txt +0 -0
  35. {regscale_cli-6.16.1.0.dist-info → regscale_cli-6.16.2.0.dist-info}/top_level.txt +0 -0
@@ -1878,11 +1878,11 @@ def check_control_list_length(ssp_obj: SSP, mapping: dict) -> Optional[list]:
1878
1878
  return missing_controls
1879
1879
 
1880
1880
 
1881
- def log_controls_info(ssp_obj: dict, current_imps: List[dict]) -> None:
1881
+ def log_controls_info(ssp_obj: SSP, current_imps: List[dict]) -> None:
1882
1882
  """
1883
1883
  Log controls info
1884
1884
 
1885
- :param dict ssp_obj: SSP object
1885
+ :param SSP ssp_obj: SSP object
1886
1886
  :param List[dict] current_imps: List of current implementations
1887
1887
  :return None:
1888
1888
  :rtype None:
@@ -2305,8 +2305,8 @@ def fetch_existing_stakeholders(api: Api, config: dict, regscale_ssp: dict) -> l
2305
2305
  if response.ok:
2306
2306
  logger.info(
2307
2307
  f"Found {len(response.json())} existing stakeholders",
2308
- record_type="stackholders",
2309
- model_layer="stackholders",
2308
+ record_type="stakeholders",
2309
+ model_layer="stakeholders",
2310
2310
  )
2311
2311
  return response.json()
2312
2312
  return []
@@ -59,7 +59,7 @@ def get_dict_value(outer_key: str, inner_key: str, source_dict: Dict) -> Optiona
59
59
  outer_dict = source_dict.get(outer_key, None)
60
60
  if outer_dict is not None and isinstance(outer_dict, dict):
61
61
  val = outer_dict.get(inner_key, None)
62
- logger.info(f"{val}")
62
+ logger.debug(f"{val=}")
63
63
  return val
64
64
  return None
65
65
 
@@ -205,8 +205,8 @@ def parse_inventory_items(trv: FedrampTraversal, components_dict: Dict):
205
205
 
206
206
  for imp_component in implemented_components:
207
207
  if not isinstance(components_dict, dict):
208
- logger.info(components_dict)
209
- logger.info("components do not exist yet")
208
+ logger.debug(components_dict)
209
+ logger.info("Components do not exist yet.")
210
210
  continue
211
211
  comp = components_dict.get(imp_component.get("component_uuid"))
212
212
  comp_id = comp.get("id") if comp else None
@@ -9,12 +9,12 @@ import enum
9
9
  import hashlib
10
10
  import json
11
11
  import logging
12
- import re
13
12
  import threading
14
13
  import time
15
14
  from abc import ABC, abstractmethod
16
15
  from collections import defaultdict
17
- from typing import Any, Dict, Generic, Iterator, List, Optional, Set, TypeVar, Union
16
+ from concurrent.futures import ThreadPoolExecutor
17
+ from typing import Any, Dict, Generic, Iterator, List, Optional, Set, TypeVar, Union, Callable
18
18
 
19
19
  from rich.progress import Progress, TaskID
20
20
 
@@ -345,6 +345,15 @@ class IntegrationFinding:
345
345
  :param Optional[str] remediation: The remediation of the finding, defaults to None.
346
346
  :param Optional[str] source_rule_id: The source rule ID of the finding, defaults to None.
347
347
  :param Optional[str] poam_id: The POAM ID of the finding, defaults to None.
348
+ :param Optional[str] cvss_v3_vector: The CVSS v3 vector of the finding, defaults to None.
349
+ :param Optional[str] cvss_v2_vector: The CVSS v2 vector of the finding, defaults to None.
350
+ :param Optional[str] affected_os: The affected OS of the finding, defaults to None.
351
+ :param Optional[str] image_digest: The image digest of the finding, defaults to None.
352
+ :param Optional[str] affected_packages: The affected packages of the finding, defaults to None.
353
+ :param Optional[str] installed_versions: The installed versions of the finding, defaults to None.
354
+ :param Optional[str] fixed_versions: The fixed versions of the finding, defaults to None.
355
+ :param Optional[str] fix_status: The fix status of the finding, defaults to None.
356
+ :param Optional[str] build_version: The build version of the finding, defaults to None.
348
357
  """
349
358
 
350
359
  control_labels: List[str]
@@ -367,6 +376,16 @@ class IntegrationFinding:
367
376
  dns: Optional[str] = None
368
377
  severity_int: int = 0
369
378
  security_check: Optional[str] = None
379
+ cvss_v3_vector: Optional[str] = None
380
+ cvss_v2_vector: Optional[str] = None
381
+ affected_os: Optional[str] = None
382
+ package_path: Optional[str] = None
383
+ image_digest: Optional[str] = None
384
+ affected_packages: Optional[str] = None
385
+ installed_versions: Optional[str] = None
386
+ fixed_versions: Optional[str] = None
387
+ fix_status: Optional[str] = None
388
+ build_version: Optional[str] = None
370
389
 
371
390
  # Issues
372
391
  issue_title: str = ""
@@ -1211,34 +1230,126 @@ class ScannerIntegration(ABC):
1211
1230
 
1212
1231
  def _process_assets(self, assets: Iterator[IntegrationAsset], loading_assets: TaskID) -> int:
1213
1232
  """
1214
- Processes the assets using single or multi-threaded approach based on THREAD_MAX_WORKERS.
1233
+ Process assets using single or multi-threaded approach based on THREAD_MAX_WORKERS.
1215
1234
 
1216
- :param Iterator[IntegrationAsset] assets: The assets to process
1217
- :param TaskID loading_assets: The task ID for the progress bar
1218
- :return: The number of assets processed
1235
+ :param Iterator[IntegrationAsset] assets: Assets to process
1236
+ :param TaskID loading_assets: Task ID for the progress bar
1237
+ :return: Number of assets processed
1219
1238
  :rtype: int
1220
1239
  """
1221
- assets_processed = 0
1222
- # prime cache
1240
+ self._prime_asset_cache()
1241
+ process_func = self._create_process_function(loading_assets)
1242
+ max_workers = get_thread_workers_max()
1243
+
1244
+ if max_workers == 1:
1245
+ return self._process_single_threaded(assets, process_func)
1246
+ return self._process_multi_threaded(assets, process_func, max_workers)
1247
+
1248
+ def _prime_asset_cache(self) -> None:
1249
+ """
1250
+ Prime the asset cache by fetching assets for the given plan.
1251
+
1252
+ :rtype: None
1253
+ """
1223
1254
  regscale_models.Asset.get_all_by_parent(
1224
1255
  parent_id=self.plan_id, parent_module=regscale_models.SecurityPlan.get_module_string()
1225
1256
  )
1226
1257
 
1227
- process_func = lambda my_asset: self._process_single_asset(my_asset, loading_assets) # noqa: E731
1258
+ def _create_process_function(self, loading_assets: TaskID) -> Callable[[IntegrationAsset], bool]:
1259
+ """
1260
+ Create a function to process a single asset.
1261
+
1262
+ :param TaskID loading_assets: Task ID for the progress bar
1263
+ :return: Function that processes an asset and returns success status
1264
+ :rtype: Callable[[IntegrationAsset], bool]
1265
+ """
1266
+ return lambda asset: self._process_single_asset(asset, loading_assets)
1267
+
1268
+ def _process_single_threaded(
1269
+ self, assets: Iterator[IntegrationAsset], process_func: Callable[[IntegrationAsset], bool]
1270
+ ) -> int:
1271
+ """
1272
+ Process assets sequentially in a single thread.
1273
+
1274
+ :param Iterator[IntegrationAsset] assets: Assets to process
1275
+ :param Callable[[IntegrationAsset], bool] process_func: Function to process each asset
1276
+ :return: Number of assets processed
1277
+ :rtype: int
1278
+ """
1279
+ assets_processed = 0
1280
+ for asset in assets:
1281
+ if process_func(asset):
1282
+ assets_processed = self._update_processed_count(assets_processed)
1283
+ return assets_processed
1284
+
1285
+ def _process_multi_threaded(
1286
+ self, assets: Iterator[IntegrationAsset], process_func: Callable[[IntegrationAsset], bool], max_workers: int
1287
+ ) -> int:
1288
+ """
1289
+ Process assets in batches using multiple threads.
1290
+
1291
+ :param Iterator[IntegrationAsset] assets: Assets to process
1292
+ :param Callable[[IntegrationAsset], bool] process_func: Function to process each asset
1293
+ :param int max_workers: Maximum number of worker threads
1294
+ :return: Number of assets processed
1295
+ :rtype: int
1296
+ """
1297
+ batch_size = max_workers * 2
1298
+ assets_processed = 0
1299
+
1300
+ with ThreadPoolExecutor(max_workers=max_workers) as executor:
1301
+ batch = []
1302
+ futures = []
1228
1303
 
1229
- if get_thread_workers_max() == 1:
1230
1304
  for asset in assets:
1231
- if process_func(asset):
1232
- assets_processed = self._update_processed_count(assets_processed)
1233
- else:
1234
- with concurrent.futures.ThreadPoolExecutor(max_workers=get_thread_workers_max()) as executor:
1235
- future_to_asset = {executor.submit(process_func, asset): asset for asset in assets}
1236
- for future in concurrent.futures.as_completed(future_to_asset):
1237
- if future.result():
1238
- assets_processed = self._update_processed_count(assets_processed)
1305
+ batch.append(asset)
1306
+ if len(batch) >= batch_size:
1307
+ assets_processed += self._submit_and_process_batch(executor, process_func, batch, futures)
1308
+ batch = []
1309
+ futures = []
1310
+
1311
+ if batch: # Process any remaining items
1312
+ assets_processed += self._submit_and_process_batch(executor, process_func, batch, futures)
1313
+
1314
+ return assets_processed
1315
+
1316
+ def _submit_and_process_batch(
1317
+ self,
1318
+ executor: ThreadPoolExecutor,
1319
+ process_func: Callable[[IntegrationAsset], bool],
1320
+ batch: List[IntegrationAsset],
1321
+ futures: List,
1322
+ ) -> int:
1323
+ """
1324
+ Submit a batch of assets for processing and count successful completions.
1325
+
1326
+ :param ThreadPoolExecutor executor: Thread pool executor for parallel processing
1327
+ :param Callable[[IntegrationAsset], bool] process_func: Function to process each asset
1328
+ :param List[IntegrationAsset] batch: Batch of assets to process
1329
+ :param List futures: List to store future objects
1330
+ :return: Number of assets processed in this batch
1331
+ :rtype: int
1332
+ """
1333
+ assets_processed = 0
1334
+ for asset in batch:
1335
+ futures.append(executor.submit(process_func, asset))
1336
+
1337
+ for future in concurrent.futures.as_completed(futures):
1338
+ if future.result():
1339
+ assets_processed = self._update_processed_count(assets_processed)
1239
1340
 
1240
1341
  return assets_processed
1241
1342
 
1343
+ def _update_processed_count(self, current_count: int) -> int:
1344
+ """
1345
+ Increment the processed count.
1346
+
1347
+ :param int current_count: Current number of processed items
1348
+ :return: Updated count
1349
+ :rtype: int
1350
+ """
1351
+ return current_count + 1
1352
+
1242
1353
  def _process_single_asset(self, asset: IntegrationAsset, loading_assets: TaskID) -> bool:
1243
1354
  """
1244
1355
  Processes a single asset and handles any exceptions.
@@ -1430,7 +1541,7 @@ class ScannerIntegration(ABC):
1430
1541
  issue.issueOwnerId = self.assessor_id
1431
1542
  issue.securityPlanId = self.plan_id
1432
1543
  issue.identification = "Vulnerability Assessment"
1433
- issue.dateFirstDetected = finding.date_created
1544
+ issue.dateFirstDetected = finding.first_seen
1434
1545
  issue.dueDate = finding.due_date
1435
1546
  issue.description = description
1436
1547
  issue.sourceReport = finding.source_report or self.title
@@ -1844,9 +1955,8 @@ class ScannerIntegration(ABC):
1844
1955
  scan_history = self.create_scan_history()
1845
1956
  current_vulnerabilities: Dict[int, Set[int]] = defaultdict(set)
1846
1957
  processed_findings_count = 0
1847
- findings_to_process = self.num_findings_to_process
1848
1958
  loading_findings = self.finding_progress.add_task(
1849
- f"[#f8b737]Processing {f'{findings_to_process} ' if findings_to_process else ''}finding(s) from {self.title}",
1959
+ f"[#f8b737]Processing {f'{self.num_findings_to_process} ' if self.num_findings_to_process else ''}finding(s) from {self.title}",
1850
1960
  total=self.num_findings_to_process if self.num_findings_to_process else None,
1851
1961
  )
1852
1962
 
@@ -1865,13 +1975,11 @@ class ScannerIntegration(ABC):
1865
1975
  self.process_finding(finding_to_process, scan_history, current_vulnerabilities)
1866
1976
  with count_lock:
1867
1977
  processed_findings_count += 1
1868
- if findings_to_process and self.finding_progress.tasks[loading_findings].total != float(
1869
- findings_to_process
1870
- ):
1978
+ if self.num_findings_to_process:
1871
1979
  self.finding_progress.update(
1872
1980
  loading_findings,
1873
- total=findings_to_process,
1874
- description=f"[#f8b737]Processing {findings_to_process} findings from {self.title}.",
1981
+ total=self.num_findings_to_process,
1982
+ description=f"[#f8b737]Processing {self.num_findings_to_process} findings from {self.title}.",
1875
1983
  )
1876
1984
  self.finding_progress.advance(loading_findings, 1)
1877
1985
  except Exception as exc:
@@ -1885,8 +1993,21 @@ class ScannerIntegration(ABC):
1885
1993
  for finding in findings:
1886
1994
  process_finding_with_progress(finding)
1887
1995
  else:
1996
+ # Process findings in batches to control memory usage
1997
+ batch_size = get_thread_workers_max() * 2 # Set batch size based on thread count
1888
1998
  with concurrent.futures.ThreadPoolExecutor(max_workers=get_thread_workers_max()) as executor:
1889
- list(executor.map(process_finding_with_progress, findings))
1999
+ batch = []
2000
+ for finding in findings:
2001
+ batch.append(finding)
2002
+ if len(batch) >= batch_size:
2003
+ # Process this batch
2004
+ list(executor.map(process_finding_with_progress, batch))
2005
+ # Clear the batch
2006
+ batch = []
2007
+
2008
+ # Process any remaining items
2009
+ if batch:
2010
+ list(executor.map(process_finding_with_progress, batch))
1890
2011
 
1891
2012
  # Close outdated issues
1892
2013
  self._results["scan_history"] = scan_history.save()
@@ -2068,6 +2189,9 @@ class ScannerIntegration(ABC):
2068
2189
  finding.vpr_score if hasattr(finding, "vprScore") else None
2069
2190
  ), # If this is the VPR score, otherwise use a different field
2070
2191
  cvsSv3BaseScore=finding.cvss_v3_base_score or finding.cvss_v3_score or finding.cvss_score,
2192
+ cvsSv2BaseScore=finding.cvss_v2_score,
2193
+ cvsSv3BaseVector=finding.cvss_v3_vector,
2194
+ cvsSv2BaseVector=finding.cvss_v2_vector,
2071
2195
  scanId=scan_history.id,
2072
2196
  severity=self.issue_to_vulnerability_map.get(finding.severity, regscale_models.VulnerabilitySeverity.Low),
2073
2197
  description=finding.description,
@@ -2086,23 +2210,30 @@ class ScannerIntegration(ABC):
2086
2210
  port=finding.port if hasattr(finding, "port") else None,
2087
2211
  protocol=finding.protocol if hasattr(finding, "protocol") else None,
2088
2212
  operatingSystem=asset.operating_system if hasattr(asset, "operating_system") else None,
2213
+ fixedVersions=finding.fixed_versions,
2214
+ buildVersion=finding.build_version,
2215
+ fixStatus=finding.fix_status,
2216
+ installedVersions=finding.installed_versions,
2217
+ affectedOS=finding.affected_os,
2218
+ packagePath=finding.package_path,
2219
+ imageDigest=finding.image_digest,
2220
+ affectedPackages=finding.affected_packages,
2089
2221
  )
2090
2222
 
2091
2223
  vulnerability = vulnerability.create_or_update()
2092
- if re.match(r"^\d+\.\d+(\.\d+){0,2}$", self.regscale_version) or self.regscale_version >= "5.64.0":
2093
- regscale_models.VulnerabilityMapping(
2094
- vulnerabilityId=vulnerability.id,
2095
- assetId=asset.id,
2096
- scanId=scan_history.id,
2097
- securityPlansId=self.plan_id,
2098
- createdById=self.assessor_id,
2099
- tenantsId=self.tenant_id,
2100
- isPublic=True,
2101
- dateCreated=get_current_datetime(),
2102
- firstSeen=finding.first_seen,
2103
- lastSeen=finding.last_seen,
2104
- status=finding.status,
2105
- ).create_unique()
2224
+ regscale_models.VulnerabilityMapping(
2225
+ vulnerabilityId=vulnerability.id,
2226
+ assetId=asset.id,
2227
+ scanId=scan_history.id,
2228
+ securityPlansId=self.plan_id,
2229
+ createdById=self.assessor_id,
2230
+ tenantsId=self.tenant_id,
2231
+ isPublic=True,
2232
+ dateCreated=get_current_datetime(),
2233
+ firstSeen=finding.first_seen,
2234
+ lastSeen=finding.last_seen,
2235
+ status=finding.status,
2236
+ ).create_unique()
2106
2237
  return vulnerability
2107
2238
 
2108
2239
  def handle_vulnerability(
@@ -1,9 +1,24 @@
1
1
  {
2
2
  "title": "CISA Catalog of Known Exploited Vulnerabilities",
3
- "catalogVersion": "2025.03.19",
4
- "dateReleased": "2025-03-19T18:00:10.5417Z",
5
- "count": 1307,
3
+ "catalogVersion": "2025.03.24",
4
+ "dateReleased": "2025-03-24T18:01:34.066Z",
5
+ "count": 1308,
6
6
  "vulnerabilities": [
7
+ {
8
+ "cveID": "CVE-2025-30154",
9
+ "vendorProject": "reviewdog",
10
+ "product": "action-setup GitHub Action",
11
+ "vulnerabilityName": "reviewdog\/action-setup GitHub Action Embedded Malicious Code Vulnerability",
12
+ "dateAdded": "2025-03-24",
13
+ "shortDescription": "reviewdog action-setup GitHub Action contains an embedded malicious code vulnerability that dumps exposed secrets to Github Actions Workflow Logs.",
14
+ "requiredAction": "Apply mitigations per vendor instructions, follow applicable BOD 22-01 guidance for cloud services, or discontinue use of the product if mitigations are unavailable.",
15
+ "dueDate": "2025-04-14",
16
+ "knownRansomwareCampaignUse": "Unknown",
17
+ "notes": "This vulnerability affects a common open-source project, third-party library, or a protocol used by different products. For more information, please see: https:\/\/github.com\/reviewdog\/reviewdog\/security\/advisories\/GHSA-qmg3-hpqr-gqvc ; https:\/\/nvd.nist.gov\/vuln\/detail\/CVE-2025-30154",
18
+ "cwes": [
19
+ "CWE-506"
20
+ ]
21
+ },
7
22
  {
8
23
  "cveID": "CVE-2017-12637",
9
24
  "vendorProject": "SAP",
@@ -55,11 +70,11 @@
55
70
  "product": "changed-files GitHub Action",
56
71
  "vulnerabilityName": "tj-actions\/changed-files GitHub Action Embedded Malicious Code Vulnerability",
57
72
  "dateAdded": "2025-03-18",
58
- "shortDescription": "The tj-actions\/changed-files GitHub Action contains an embedded malicious code vulnerability that allows a remote attacker to discover secrets by reading actions logs. These secrets may include, but are not limited to, valid AWS access keys, GitHub personal access tokens (PATs), npm tokens, and private RSA keys.",
73
+ "shortDescription": "tj-actions\/changed-files GitHub Action contains an embedded malicious code vulnerability that allows a remote attacker to discover secrets by reading Github Actions Workflow Logs. These secrets may include, but are not limited to, valid AWS access keys, GitHub personal access tokens (PATs), npm tokens, and private RSA keys.",
59
74
  "requiredAction": "Apply mitigations per vendor instructions, follow applicable BOD 22-01 guidance for cloud services, or discontinue use of the product if mitigations are unavailable.",
60
75
  "dueDate": "2025-04-08",
61
76
  "knownRansomwareCampaignUse": "Unknown",
62
- "notes": "https:\/\/github.com\/tj-actions\/changed-files\/blob\/45fb12d7a8bedb4da42342e52fe054c6c2c3fd73\/README.md?plain=1#L20-L28 ; https:\/\/nvd.nist.gov\/vuln\/detail\/CVE-2025-30066",
77
+ "notes": "This vulnerability affects a common open-source project, third-party library, or a protocol used by different products. For more information, please see: https:\/\/github.com\/tj-actions\/changed-files\/blob\/45fb12d7a8bedb4da42342e52fe054c6c2c3fd73\/README.md?plain=1#L20-L28 ; https:\/\/nvd.nist.gov\/vuln\/detail\/CVE-2025-30066",
63
78
  "cwes": [
64
79
  "CWE-506"
65
80
  ]