regscale-cli 6.16.0.0__py3-none-any.whl → 6.16.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of regscale-cli might be problematic. Click here for more details.

Files changed (45) hide show
  1. regscale/__init__.py +1 -1
  2. regscale/core/app/application.py +1 -0
  3. regscale/core/app/utils/app_utils.py +1 -1
  4. regscale/core/app/utils/parser_utils.py +2 -2
  5. regscale/integrations/commercial/azure/intune.py +1 -0
  6. regscale/integrations/commercial/nessus/scanner.py +3 -0
  7. regscale/integrations/commercial/sap/sysdig/sysdig_scanner.py +4 -0
  8. regscale/integrations/commercial/sap/tenable/click.py +1 -1
  9. regscale/integrations/commercial/sap/tenable/scanner.py +8 -2
  10. regscale/integrations/commercial/tenablev2/click.py +39 -16
  11. regscale/integrations/commercial/wizv2/click.py +9 -21
  12. regscale/integrations/commercial/wizv2/scanner.py +2 -1
  13. regscale/integrations/commercial/wizv2/utils.py +145 -69
  14. regscale/integrations/public/fedramp/import_workbook.py +1 -1
  15. regscale/integrations/public/fedramp/poam/scanner.py +51 -44
  16. regscale/integrations/public/fedramp/ssp_logger.py +6 -6
  17. regscale/integrations/scanner_integration.py +96 -23
  18. regscale/models/app_models/mapping.py +3 -3
  19. regscale/models/integration_models/amazon_models/inspector.py +15 -17
  20. regscale/models/integration_models/aqua.py +1 -5
  21. regscale/models/integration_models/cisa_kev_data.json +85 -10
  22. regscale/models/integration_models/ecr_models/ecr.py +2 -6
  23. regscale/models/integration_models/flat_file_importer.py +7 -4
  24. regscale/models/integration_models/grype_import.py +3 -3
  25. regscale/models/integration_models/prisma.py +3 -3
  26. regscale/models/integration_models/synqly_models/capabilities.json +1 -1
  27. regscale/models/integration_models/synqly_models/connectors/assets.py +1 -0
  28. regscale/models/integration_models/synqly_models/connectors/vulnerabilities.py +2 -0
  29. regscale/models/integration_models/tenable_models/integration.py +4 -3
  30. regscale/models/integration_models/trivy_import.py +1 -1
  31. regscale/models/integration_models/xray.py +1 -1
  32. regscale/models/regscale_models/__init__.py +2 -0
  33. regscale/models/regscale_models/control_implementation.py +18 -44
  34. regscale/models/regscale_models/inherited_control.py +61 -0
  35. regscale/models/regscale_models/issue.py +3 -2
  36. regscale/models/regscale_models/mixins/parent_cache.py +1 -1
  37. regscale/models/regscale_models/regscale_model.py +72 -6
  38. regscale/models/regscale_models/vulnerability.py +40 -8
  39. {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.1.0.dist-info}/METADATA +1 -1
  40. {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.1.0.dist-info}/RECORD +45 -44
  41. tests/regscale/core/test_logz.py +8 -0
  42. {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.1.0.dist-info}/LICENSE +0 -0
  43. {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.1.0.dist-info}/WHEEL +0 -0
  44. {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.1.0.dist-info}/entry_points.txt +0 -0
  45. {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.1.0.dist-info}/top_level.txt +0 -0
@@ -537,6 +537,7 @@ class ScannerIntegration(ABC):
537
537
  _lock_registry: ThreadSafeDict = ThreadSafeDict()
538
538
  _global_lock = threading.Lock() # Class-level lock
539
539
  _kev_data = ThreadSafeDict() # Class-level lock
540
+ _results = ThreadSafeDict()
540
541
 
541
542
  # Error handling
542
543
  errors: List[str] = []
@@ -578,7 +579,7 @@ class ScannerIntegration(ABC):
578
579
  self.app = Application()
579
580
  self.alerted_assets: Set[str] = set()
580
581
  self.regscale_version: str = APIHandler().regscale_version # noqa
581
- logger.info(f"RegScale Version: {self.regscale_version}")
582
+ logger.debug(f"RegScale Version: {self.regscale_version}")
582
583
  self.plan_id: int = plan_id
583
584
  self.tenant_id: int = tenant_id
584
585
  self.components: ThreadSafeList[Any] = ThreadSafeList()
@@ -801,13 +802,20 @@ class ScannerIntegration(ABC):
801
802
  :return: The finding identifier
802
803
  :rtype: str
803
804
  """
805
+ # We could have a string truncation error platform side on IntegrationFindingId nvarchar(450)
804
806
  prefix = f"{self.plan_id}:"
805
- if ScannerVariables.tenableGroupByPlugin and finding.plugin_id:
806
- return f"{prefix}{finding.plugin_id}"
807
+ if (
808
+ ScannerVariables.tenableGroupByPlugin
809
+ and finding.plugin_id
810
+ and "tenable" in (finding.source_report or self.title).lower()
811
+ ):
812
+ res = f"{prefix}{finding.plugin_id}"
813
+ return res[:450]
807
814
  prefix += finding.cve or finding.plugin_id or finding.rule_id or self.hash_string(finding.external_id).__str__()
808
815
  if ScannerVariables.issueCreation.lower() == "perasset":
809
- return f"{prefix}:{finding.asset_identifier}"
810
- return prefix
816
+ res = f"{prefix}:{finding.asset_identifier}"
817
+ return res[:450]
818
+ return prefix[:450]
811
819
 
812
820
  def get_or_create_assessment(self, control_implementation_id: int) -> regscale_models.Assessment:
813
821
  """
@@ -963,6 +971,9 @@ class ScannerIntegration(ABC):
963
971
 
964
972
  created, existing_or_new_asset = self.create_new_asset(asset, component=None)
965
973
 
974
+ # update results expects a dict[str, list] to update result counts
975
+ self.update_result_counts("assets", {"created": [1] if created else [], "updated": [] if created else [1]})
976
+
966
977
  # If the asset is associated with a component, create a mapping between them.
967
978
  if existing_or_new_asset and component:
968
979
  _was_created, _asset_mapping = regscale_models.AssetMapping(
@@ -1170,7 +1181,7 @@ class ScannerIntegration(ABC):
1170
1181
  """
1171
1182
  logger.info("Updating RegScale assets...")
1172
1183
  loading_assets = self._setup_progress_bar()
1173
- logger.info("Pre-populating cache")
1184
+ logger.debug("Pre-populating cache")
1174
1185
  regscale_models.AssetMapping.populate_cache_by_plan(self.plan_id)
1175
1186
  regscale_models.ComponentMapping.populate_cache_by_plan(self.plan_id)
1176
1187
 
@@ -1258,22 +1269,52 @@ class ScannerIntegration(ABC):
1258
1269
  logger.info("Processed %d assets.", assets_processed)
1259
1270
  return assets_processed
1260
1271
 
1272
+ def update_result_counts(self, key: str, results: dict[str, list]) -> None:
1273
+ """
1274
+ Updates the results dictionary with the given key and results.
1275
+
1276
+ :param str key: The key to update
1277
+ :param dict[str, list] results: The results to update, example: ["updated": [], "created": []]
1278
+ :rtype: None
1279
+ """
1280
+ if key not in self._results:
1281
+ self._results[key] = {"created_count": 0, "updated_count": 0}
1282
+ self._results[key]["created_count"] += len(results.get("created", []))
1283
+ self._results[key]["updated_count"] += len(results.get("updated", []))
1284
+
1261
1285
  def _perform_batch_operations(self, progress: Progress) -> None:
1262
1286
  """
1263
1287
  Performs batch operations for assets, software inventory, and data.
1264
1288
 
1265
1289
  :rtype: None
1266
1290
  """
1267
- logger.info("Bulk saving assets...")
1268
- regscale_models.Asset.bulk_save(progress_context=progress)
1269
- regscale_models.Issue.bulk_save(progress_context=progress)
1270
- regscale_models.Property.bulk_save(progress_context=progress)
1271
-
1291
+ logger.debug("Bulk saving assets...")
1292
+ self.update_result_counts("assets", regscale_models.Asset.bulk_save(progress_context=progress))
1293
+ logger.debug("Done bulk saving assets.")
1294
+ logger.debug("Bulk saving issues...")
1295
+ self.update_result_counts("issues", regscale_models.Issue.bulk_save(progress_context=progress))
1296
+ logger.debug("Done bulk saving issues.")
1297
+ logger.debug("Bulk saving properties...")
1298
+ self.update_result_counts("properties", regscale_models.Property.bulk_save(progress_context=progress))
1299
+ logger.debug("Done bulk saving properties.")
1300
+
1301
+ software_inventory = {}
1272
1302
  if self.software_to_create:
1273
- regscale_models.SoftwareInventory.batch_create(items=self.software_to_create, progress_context=progress)
1303
+ logger.debug("Bulk creating software inventory...")
1304
+ software_inventory["created_count"] = len(
1305
+ regscale_models.SoftwareInventory.batch_create(items=self.software_to_create, progress_context=progress)
1306
+ )
1307
+ logger.debug("Done bulk creating software inventory.")
1274
1308
  if self.software_to_update:
1275
- regscale_models.SoftwareInventory.batch_update(items=self.software_to_update, progress_context=progress)
1276
- regscale_models.Data.bulk_save(progress_context=progress)
1309
+ logger.debug("Bulk updating software inventory...")
1310
+ software_inventory["updated_updated"] = len(
1311
+ regscale_models.SoftwareInventory.batch_update(items=self.software_to_update, progress_context=progress)
1312
+ )
1313
+ logger.debug("Done bulk updating software inventory.")
1314
+ self._results["software_inventory"] = software_inventory
1315
+ logger.debug("Bulk saving data records...")
1316
+ self.update_result_counts("data", regscale_models.Data.bulk_save(progress_context=progress))
1317
+ logger.debug("Done bulk saving data records.")
1277
1318
 
1278
1319
  @staticmethod
1279
1320
  def get_issue_title(finding: IntegrationFinding) -> str:
@@ -1848,8 +1889,8 @@ class ScannerIntegration(ABC):
1848
1889
  list(executor.map(process_finding_with_progress, findings))
1849
1890
 
1850
1891
  # Close outdated issues
1851
- scan_history.save()
1852
- regscale_models.Issue.bulk_save(progress_context=self.finding_progress)
1892
+ self._results["scan_history"] = scan_history.save()
1893
+ self.update_result_counts("issues", regscale_models.Issue.bulk_save(progress_context=self.finding_progress))
1853
1894
  self.close_outdated_issues(current_vulnerabilities)
1854
1895
 
1855
1896
  return processed_findings_count
@@ -2039,7 +2080,7 @@ class ScannerIntegration(ABC):
2039
2080
  firstSeen=finding.first_seen,
2040
2081
  lastSeen=finding.last_seen,
2041
2082
  plugInName=finding.cve or finding.plugin_name, # Use CVE if available, otherwise use plugin name
2042
- # plugInId=finding.plugin_id, # Vulnerability.pluginId is an int, but it is a string on Issue
2083
+ plugInId=finding.plugin_id,
2043
2084
  exploitAvailable=None, # Set this if you have information about exploit availability
2044
2085
  plugInText=finding.observations, # or finding.evidence, whichever is more appropriate
2045
2086
  port=finding.port if hasattr(finding, "port") else None,
@@ -2205,7 +2246,9 @@ class ScannerIntegration(ABC):
2205
2246
  )
2206
2247
 
2207
2248
  # Create a progress bar
2208
- task_id = self.finding_progress.add_task("[cyan]Closing outdated issues...", total=len(open_issues))
2249
+ task_id = self.finding_progress.add_task(
2250
+ f"[cyan]Analyzing {len(open_issues)} issue(s) and closing any outdated issue(s)...", total=len(open_issues)
2251
+ )
2209
2252
 
2210
2253
  for issue in open_issues:
2211
2254
  if self.should_close_issue(issue, current_vulnerabilities):
@@ -2230,7 +2273,10 @@ class ScannerIntegration(ABC):
2230
2273
  for control_id in affected_control_ids:
2231
2274
  self.update_control_implementation_status_after_close(control_id)
2232
2275
 
2233
- logger.info("Closed %d outdated issues.", closed_count)
2276
+ if closed_count > 0:
2277
+ logger.info("Closed %d outdated issues.", closed_count)
2278
+ else:
2279
+ logger.info("No outdated issues to close.")
2234
2280
  return closed_count
2235
2281
 
2236
2282
  def update_control_implementation_status_after_close(self, control_id: int) -> None:
@@ -2370,7 +2416,7 @@ class ScannerIntegration(ABC):
2370
2416
  :return: The number of findings processed
2371
2417
  :rtype: int
2372
2418
  """
2373
- logger.info("Syncing %s findings...", cls.title)
2419
+ logger.info("Syncing %s findings...", kwargs.get("title", cls.title))
2374
2420
  instance = cls(plan_id=plan_id)
2375
2421
  instance.set_keys(**kwargs)
2376
2422
  # If a progress object was passed, use it instead of creating a new one
@@ -2394,7 +2440,25 @@ class ScannerIntegration(ABC):
2394
2440
  else:
2395
2441
  logger.info("All findings have been processed successfully.")
2396
2442
 
2397
- logger.info("Processed %d findings.", findings_processed)
2443
+ if scan_history := instance._results.get("scan_history"):
2444
+ logger.info(
2445
+ "Processed %d findings: %d Critical(s), %d High(s), %d Moderate(s), %d Low(s).",
2446
+ findings_processed,
2447
+ scan_history.vCritical,
2448
+ scan_history.vHigh,
2449
+ scan_history.vMedium,
2450
+ scan_history.vLow,
2451
+ )
2452
+ else:
2453
+ logger.info("Processed %d findings.", findings_processed)
2454
+ issue_created_count = instance._results.get("issues", {}).get("created_count", 0)
2455
+ issue_updated_count = instance._results.get("issues", {}).get("updated_count", 0)
2456
+ if issue_created_count or issue_updated_count:
2457
+ logger.info(
2458
+ "Created %d issue(s) and updated %d issue(s) in RegScale.",
2459
+ issue_created_count,
2460
+ issue_updated_count,
2461
+ )
2398
2462
  return findings_processed
2399
2463
 
2400
2464
  @classmethod
@@ -2406,7 +2470,7 @@ class ScannerIntegration(ABC):
2406
2470
  :return: The number of assets processed
2407
2471
  :rtype: int
2408
2472
  """
2409
- logger.info("Syncing %s assets...", cls.title)
2473
+ logger.info("Syncing %s assets...", kwargs.get("title", cls.title))
2410
2474
  instance = cls(plan_id=plan_id, **kwargs)
2411
2475
  instance.set_keys(**kwargs)
2412
2476
  instance.asset_progress = kwargs.pop("progress") if "progress" in kwargs else create_progress_object()
@@ -2425,7 +2489,16 @@ class ScannerIntegration(ABC):
2425
2489
  logger.info("All assets have been processed successfully.")
2426
2490
 
2427
2491
  APIHandler().log_api_summary()
2428
- logger.info("%d assets processed.", assets_processed)
2492
+ created_count = instance._results.get("assets", {}).get("created_count", 0)
2493
+ updated_count = instance._results.get("assets", {}).get("updated_count", 0)
2494
+ dedupe_count = assets_processed - (created_count + updated_count)
2495
+ logger.info(
2496
+ "%d assets processed and %d asset(s) deduped. %d asset(s) created & %d asset(s) updated in RegScale.",
2497
+ assets_processed,
2498
+ dedupe_count,
2499
+ created_count,
2500
+ updated_count,
2501
+ )
2429
2502
  return assets_processed
2430
2503
 
2431
2504
  @classmethod
@@ -226,7 +226,7 @@ class Mapping(BaseModel):
226
226
  """
227
227
  return self.mapping.get(key)
228
228
 
229
- def get_value(self, dat: Optional[dict], key: str, default_val: Optional[Any] = "", warnings: bool = True) -> Any:
229
+ def get_value(self, dat: Optional[dict], key: str, default_val: Optional[Any] = "") -> Any:
230
230
  """
231
231
  Get the value from a dictionary by mapped key
232
232
 
@@ -241,8 +241,8 @@ class Mapping(BaseModel):
241
241
  if key == "None" or key is None:
242
242
  return default_val
243
243
  mapped_key = self.mapping.get(key)
244
- if not mapped_key and warnings:
245
- self._logger.warning(f"Value for key '{key}' not found in mapping.")
244
+ if not mapped_key:
245
+ self._logger.debug(f"Value for key '{key}' not found in mapping.")
246
246
  if dat and mapped_key:
247
247
  val = dat.get(mapped_key)
248
248
  if isinstance(val, str):
@@ -129,21 +129,21 @@ class InspectorRecord(BaseModel):
129
129
  platform_key = list(details.keys())[0] if details.keys() else None
130
130
 
131
131
  return InspectorRecord(
132
- aws_account_id=mapping.get_value(finding, "awsAccountId", "", warnings=False),
133
- description=mapping.get_value(finding, "description", warnings=False),
134
- exploit_available=mapping.get_value(finding, "exploitAvailable", warnings=False),
135
- finding_arn=mapping.get_value(finding, "findingArn", warnings=False),
136
- first_seen=mapping.get_value(finding, "firstObservedAt", warnings=False),
137
- fix_available=mapping.get_value(finding, "fixAvailable", warnings=False),
138
- last_seen=mapping.get_value(finding, "lastObservedAt", warnings=False),
132
+ aws_account_id=mapping.get_value(finding, "awsAccountId", ""),
133
+ description=mapping.get_value(finding, "description"),
134
+ exploit_available=mapping.get_value(finding, "exploitAvailable"),
135
+ finding_arn=mapping.get_value(finding, "findingArn"),
136
+ first_seen=mapping.get_value(finding, "firstObservedAt"),
137
+ fix_available=mapping.get_value(finding, "fixAvailable"),
138
+ last_seen=mapping.get_value(finding, "lastObservedAt"),
139
139
  remediation=mapping.get_value(finding, "remediation", {}).get("recommendation", {}).get("text", ""),
140
- severity=mapping.get_value(finding, "Severity", warnings=False),
141
- status=mapping.get_value(finding, "status", warnings=False),
142
- title=mapping.get_value(finding, "title", warnings=False),
140
+ severity=mapping.get_value(finding, "Severity"),
141
+ status=mapping.get_value(finding, "status"),
142
+ title=mapping.get_value(finding, "title"),
143
143
  resource_type=resource.get("type"),
144
144
  resource_id=resource.get("id"),
145
145
  region=resource.get("region"),
146
- last_updated=mapping.get_value(finding, "updatedAt", warnings=False),
146
+ last_updated=mapping.get_value(finding, "updatedAt"),
147
147
  platform=resource.get("details", {}).get(platform_key, {}).get("platform", ""),
148
148
  resource_tags=" ,".join(resource.get("details", {}).get(platform_key, {}).get("imageTags", "")),
149
149
  affected_packages=cls.get_vulnerable_package_info(vulnerabilities, "name"),
@@ -152,18 +152,16 @@ class InspectorRecord(BaseModel):
152
152
  package_remediation=cls.get_vulnerable_package_info(vulnerabilities, "remediation"),
153
153
  vulnerability_id=vulnerabilities.get("vulnerabilityId") if vulnerabilities else None,
154
154
  vendor=vulnerabilities.get("source") if vulnerabilities else None,
155
- vendor_severity=mapping.get_value(finding, "severity", warnings=False),
155
+ vendor_severity=mapping.get_value(finding, "severity"),
156
156
  vendor_advisory=vulnerabilities.get("sourceUrl") if vulnerabilities else None,
157
157
  vendor_advisory_published=vulnerabilities.get("vendorCreatedAt") if vulnerabilities else None,
158
158
  package_manager=cls.get_vulnerable_package_info(
159
- mapping.get_value(finding, "packageVulnerabilityDetails", {}, warnings=False), "packageManager"
159
+ mapping.get_value(finding, "packageVulnerabilityDetails", {}), "packageManager"
160
160
  ),
161
161
  file_path=cls.get_vulnerable_package_info(
162
- mapping.get_value(finding, "packageVulnerabilityDetails", {}, warnings=False), "filePath"
163
- ),
164
- reference_urls=mapping.get_value(finding, "packageVulnerabilityDetails", {}, warnings=False).get(
165
- "sourceUrl"
162
+ mapping.get_value(finding, "packageVulnerabilityDetails", {}), "filePath"
166
163
  ),
164
+ reference_urls=mapping.get_value(finding, "packageVulnerabilityDetails", {}).get("sourceUrl"),
167
165
  )
168
166
 
169
167
  @classmethod
@@ -209,11 +209,7 @@ class Aqua(FlatFileImporter):
209
209
  self.nvd_cvss_v2_severity,
210
210
  self.vendor_cvss_v3_severity,
211
211
  # This field may or may not be available in the file (Coalfire has it, BMC does not.)
212
- (
213
- self.vendor_cvss_v2_severity
214
- if self.mapping.get_value(dat, self.vendor_cvss_v2_severity, warnings=False)
215
- else None
216
- ),
212
+ (self.vendor_cvss_v2_severity if self.mapping.get_value(dat, self.vendor_cvss_v2_severity) else None),
217
213
  ]
218
214
  severity = "info"
219
215
  for key in precedence_order:
@@ -1,9 +1,84 @@
1
1
  {
2
2
  "title": "CISA Catalog of Known Exploited Vulnerabilities",
3
- "catalogVersion": "2025.03.13",
4
- "dateReleased": "2025-03-13T19:37:52.2229Z",
5
- "count": 1302,
3
+ "catalogVersion": "2025.03.19",
4
+ "dateReleased": "2025-03-19T18:00:10.5417Z",
5
+ "count": 1307,
6
6
  "vulnerabilities": [
7
+ {
8
+ "cveID": "CVE-2017-12637",
9
+ "vendorProject": "SAP",
10
+ "product": "NetWeaver",
11
+ "vulnerabilityName": "SAP NetWeaver Directory Traversal Vulnerability",
12
+ "dateAdded": "2025-03-19",
13
+ "shortDescription": "SAP NetWeaver Application Server (AS) Java contains a directory traversal vulnerability in scheduler\/ui\/js\/ffffffffbca41eb4\/UIUtilJavaScriptJS that allows a remote attacker to read arbitrary files via a .. (dot dot) in the query string.",
14
+ "requiredAction": "Apply mitigations per vendor instructions, follow applicable BOD 22-01 guidance for cloud services, or discontinue use of the product if mitigations are unavailable.",
15
+ "dueDate": "2025-04-09",
16
+ "knownRansomwareCampaignUse": "Unknown",
17
+ "notes": "SAP users must have an account to log in and access the patch: https:\/\/me.sap.com\/notes\/3476549 ; https:\/\/nvd.nist.gov\/vuln\/detail\/CVE-2017-12637",
18
+ "cwes": [
19
+ "CWE-22"
20
+ ]
21
+ },
22
+ {
23
+ "cveID": "CVE-2024-48248",
24
+ "vendorProject": "NAKIVO",
25
+ "product": "Backup and Replication",
26
+ "vulnerabilityName": "NAKIVO Backup and Replication Absolute Path Traversal Vulnerability",
27
+ "dateAdded": "2025-03-19",
28
+ "shortDescription": "NAKIVO Backup and Replication contains an absolute path traversal vulnerability that enables an attacker to read arbitrary files.",
29
+ "requiredAction": "Apply mitigations per vendor instructions, follow applicable BOD 22-01 guidance for cloud services, or discontinue use of the product if mitigations are unavailable.",
30
+ "dueDate": "2025-04-09",
31
+ "knownRansomwareCampaignUse": "Unknown",
32
+ "notes": "https:\/\/helpcenter.nakivo.com\/Release-Notes\/Content\/Release-Notes.htm ; https:\/\/nvd.nist.gov\/vuln\/detail\/CVE-2024-48248",
33
+ "cwes": [
34
+ "CWE-36"
35
+ ]
36
+ },
37
+ {
38
+ "cveID": "CVE-2025-1316",
39
+ "vendorProject": "Edimax",
40
+ "product": "IC-7100 IP Camera",
41
+ "vulnerabilityName": "Edimax IC-7100 IP Camera OS Command Injection Vulnerability",
42
+ "dateAdded": "2025-03-19",
43
+ "shortDescription": "Edimax IC-7100 IP camera contains an OS command injection vulnerability due to improper input sanitization that allows an attacker to achieve remote code execution via specially crafted requests. The impacted product could be end-of-life (EoL) and\/or end-of-service (EoS). Users should discontinue product utilization.",
44
+ "requiredAction": "Apply mitigations per vendor instructions, follow applicable BOD 22-01 guidance for cloud services, or discontinue use of the product if mitigations are unavailable.",
45
+ "dueDate": "2025-04-09",
46
+ "knownRansomwareCampaignUse": "Unknown",
47
+ "notes": "https:\/\/www.edimax.com\/edimax\/post\/post\/data\/edimax\/global\/press_releases\/4801\/ ; https:\/\/nvd.nist.gov\/vuln\/detail\/CVE-2025-1316",
48
+ "cwes": [
49
+ "CWE-78"
50
+ ]
51
+ },
52
+ {
53
+ "cveID": "CVE-2025-30066",
54
+ "vendorProject": "tj-actions",
55
+ "product": "changed-files GitHub Action",
56
+ "vulnerabilityName": "tj-actions\/changed-files GitHub Action Embedded Malicious Code Vulnerability",
57
+ "dateAdded": "2025-03-18",
58
+ "shortDescription": "The tj-actions\/changed-files GitHub Action contains an embedded malicious code vulnerability that allows a remote attacker to discover secrets by reading actions logs. These secrets may include, but are not limited to, valid AWS access keys, GitHub personal access tokens (PATs), npm tokens, and private RSA keys.",
59
+ "requiredAction": "Apply mitigations per vendor instructions, follow applicable BOD 22-01 guidance for cloud services, or discontinue use of the product if mitigations are unavailable.",
60
+ "dueDate": "2025-04-08",
61
+ "knownRansomwareCampaignUse": "Unknown",
62
+ "notes": "https:\/\/github.com\/tj-actions\/changed-files\/blob\/45fb12d7a8bedb4da42342e52fe054c6c2c3fd73\/README.md?plain=1#L20-L28 ; https:\/\/nvd.nist.gov\/vuln\/detail\/CVE-2025-30066",
63
+ "cwes": [
64
+ "CWE-506"
65
+ ]
66
+ },
67
+ {
68
+ "cveID": "CVE-2025-24472",
69
+ "vendorProject": "Fortinet",
70
+ "product": "FortiOS and FortiProxy",
71
+ "vulnerabilityName": "Fortinet FortiOS and FortiProxy Authentication Bypass Vulnerability",
72
+ "dateAdded": "2025-03-18",
73
+ "shortDescription": " Fortinet FortiOS and FortiProxy contain an authentication bypass vulnerability that allows a remote attacker to gain super-admin privileges via crafted CSF proxy requests.",
74
+ "requiredAction": "Apply mitigations per vendor instructions, follow applicable BOD 22-01 guidance for cloud services, or discontinue use of the product if mitigations are unavailable.",
75
+ "dueDate": "2025-04-08",
76
+ "knownRansomwareCampaignUse": "Known",
77
+ "notes": "https:\/\/fortiguard.fortinet.com\/psirt\/FG-IR-24-535 ; https:\/\/nvd.nist.gov\/vuln\/detail\/CVE-2025-24472",
78
+ "cwes": [
79
+ "CWE-288"
80
+ ]
81
+ },
7
82
  {
8
83
  "cveID": "CVE-2025-21590",
9
84
  "vendorProject": "Juniper",
@@ -733,7 +808,7 @@
733
808
  "requiredAction": "Apply mitigations per vendor instructions or discontinue use of the product if mitigations are unavailable.",
734
809
  "dueDate": "2025-02-25",
735
810
  "knownRansomwareCampaignUse": "Unknown",
736
- "notes": "https:\/\/ofbiz.apache.org\/security.html ; https:\/\/nvd.nist.gov\/vuln\/detail\/CVE-2024-45195",
811
+ "notes": "This vulnerability affects a common open-source component, third-party library, or a protocol used by different products. For more information, please see: https:\/\/ofbiz.apache.org\/security.html ; https:\/\/nvd.nist.gov\/vuln\/detail\/CVE-2024-45195",
737
812
  "cwes": [
738
813
  "CWE-425"
739
814
  ]
@@ -852,7 +927,7 @@
852
927
  "shortDescription": "Fortinet FortiOS and FortiProxy contain an authentication bypass vulnerability that may allow an unauthenticated, remote attacker to gain super-admin privileges via crafted requests to Node.js websocket module.",
853
928
  "requiredAction": "Apply mitigations per vendor instructions or discontinue use of the product if mitigations are unavailable.",
854
929
  "dueDate": "2025-01-21",
855
- "knownRansomwareCampaignUse": "Unknown",
930
+ "knownRansomwareCampaignUse": "Known",
856
931
  "notes": "https:\/\/fortiguard.fortinet.com\/psirt\/FG-IR-24-535 ; https:\/\/nvd.nist.gov\/vuln\/detail\/CVE-2024-55591",
857
932
  "cwes": [
858
933
  "CWE-288"
@@ -867,7 +942,7 @@
867
942
  "shortDescription": "Qlik Sense contains an HTTP tunneling vulnerability that allows an attacker to escalate privileges and execute HTTP requests on the backend server hosting the software.",
868
943
  "requiredAction": "Apply mitigations per vendor instructions or discontinue use of the product if mitigations are unavailable.",
869
944
  "dueDate": "2025-02-03",
870
- "knownRansomwareCampaignUse": "Unknown",
945
+ "knownRansomwareCampaignUse": "Known",
871
946
  "notes": "https:\/\/community.qlik.com\/t5\/Official-Support-Articles\/Critical-Security-fixes-for-Qlik-Sense-Enterprise-for-Windows\/tac-p\/2120510 ; https:\/\/nvd.nist.gov\/vuln\/detail\/CVE-2023-48365",
872
947
  "cwes": [
873
948
  "CWE-444"
@@ -2454,7 +2529,7 @@
2454
2529
  {
2455
2530
  "cveID": "CVE-2024-5217",
2456
2531
  "vendorProject": "ServiceNow",
2457
- "product": "Utah, Vancouver, and Washington DC Now",
2532
+ "product": "Utah, Vancouver, and Washington DC Now Platform",
2458
2533
  "vulnerabilityName": "ServiceNow Incomplete List of Disallowed Inputs Vulnerability",
2459
2534
  "dateAdded": "2024-07-29",
2460
2535
  "shortDescription": "ServiceNow Washington DC, Vancouver, and earlier Now Platform releases contain an incomplete list of disallowed inputs vulnerability in the GlideExpression script. An unauthenticated user could exploit this vulnerability to execute code remotely.",
@@ -2469,10 +2544,10 @@
2469
2544
  {
2470
2545
  "cveID": "CVE-2024-4879",
2471
2546
  "vendorProject": "ServiceNow",
2472
- "product": "Utah, Vancouver, and Washington DC Now",
2547
+ "product": "Utah, Vancouver, and Washington DC Now Platform",
2473
2548
  "vulnerabilityName": "ServiceNow Improper Input Validation Vulnerability",
2474
2549
  "dateAdded": "2024-07-29",
2475
- "shortDescription": "ServiceNow Utah, Vancouver, and Washington DC Now releases contain a jelly template injection vulnerability in UI macros. An unauthenticated user could exploit this vulnerability to execute code remotely. ",
2550
+ "shortDescription": "ServiceNow Utah, Vancouver, and Washington DC Now Platform releases contain a jelly template injection vulnerability in UI macros. An unauthenticated user could exploit this vulnerability to execute code remotely. ",
2476
2551
  "requiredAction": "Apply mitigations per vendor instructions or discontinue use of the product if mitigations are unavailable.",
2477
2552
  "dueDate": "2024-08-19",
2478
2553
  "knownRansomwareCampaignUse": "Unknown",
@@ -6309,7 +6384,7 @@
6309
6384
  "shortDescription": "Microsoft Windows Common Log File System (CLFS) driver contains an unspecified vulnerability that allows for privilege escalation.",
6310
6385
  "requiredAction": "Apply updates per vendor instructions.",
6311
6386
  "dueDate": "2023-03-07",
6312
- "knownRansomwareCampaignUse": "Unknown",
6387
+ "knownRansomwareCampaignUse": "Known",
6313
6388
  "notes": "https:\/\/msrc.microsoft.com\/update-guide\/en-US\/vulnerability\/CVE-2023-23376; https:\/\/nvd.nist.gov\/vuln\/detail\/CVE-2023-23376",
6314
6389
  "cwes": [
6315
6390
  "CWE-122"
@@ -89,9 +89,7 @@ class ECR(FlatFileImporter):
89
89
  :rtype: Asset
90
90
  """
91
91
  name = self.mapping.get_value(dat, "Name") or self.mapping.get_value(dat, "name")
92
- if repository_name := self.mapping.get_value(
93
- dat, "repositoryName", self.raw_dict.get("repositoryName", ""), warnings=False
94
- ):
92
+ if repository_name := self.mapping.get_value(dat, "repositoryName", self.raw_dict.get("repositoryName", "")):
95
93
  if (image_id_data := self.raw_dict.get("imageId", {}).get("imageDigest", "").split(":")) and len(
96
94
  image_id_data
97
95
  ) > 1:
@@ -134,9 +132,7 @@ class ECR(FlatFileImporter):
134
132
  """
135
133
  vulns: List[Vulnerability] = []
136
134
  hostname = dat.get("Name") or dat.get("name")
137
- if repository_name := self.mapping.get_value(
138
- dat, "repositoryName", self.raw_dict.get("repositoryName", ""), warnings=False
139
- ):
135
+ if repository_name := self.mapping.get_value(dat, "repositoryName", self.raw_dict.get("repositoryName", "")):
140
136
  image_id_data = self.raw_dict.get("imageId", {}).get("imageDigest", "").split(":")
141
137
  if len(image_id_data) > 1:
142
138
  image_id = image_id_data[1]
@@ -192,7 +192,7 @@ class FlatFileImporter(ABC):
192
192
  finding_severity_map=self.finding_severity_map,
193
193
  )
194
194
  flat_int.asset_identifier_field = self.asset_identifier_field
195
- logger.info(f"Asset Identifier Field: {flat_int.asset_identifier_field}")
195
+ logger.debug(f"Asset Identifier Field: {flat_int.asset_identifier_field}")
196
196
  flat_int.title = self.attributes.name
197
197
  self.create_assets(kwargs["asset_func"]) # type: ignore # Pass in the function to create an asset
198
198
  self.create_vulns(kwargs["vuln_func"]) # type: ignore # Pass in the function to create a vuln
@@ -430,7 +430,7 @@ class FlatFileImporter(ABC):
430
430
  :return: Tuple of header and data from csv file
431
431
  :rtype: tuple
432
432
  """
433
- logger.info("flatfileimporter: Converting csv to dict")
433
+ logger.debug("flatfileimporter: Converting csv to dict")
434
434
  # if file is empty, error and exit
435
435
  if not file.read(1):
436
436
  error_and_exit("File is empty")
@@ -446,6 +446,7 @@ class FlatFileImporter(ABC):
446
446
  self.handle_extra_headers(header=header)
447
447
 
448
448
  data = list(reader)
449
+ logger.debug("flatfileimporter: Done converting csv to dict.")
449
450
  return data, header
450
451
 
451
452
  def convert_xlsx_to_dict(self, file: TextIO, start_line_number: int = 0) -> tuple:
@@ -457,7 +458,7 @@ class FlatFileImporter(ABC):
457
458
  :return: Tuple of data and header from xlsx file
458
459
  :rtype: tuple
459
460
  """
460
- logger.info("flatfileimporter: Converting xlsx to dict")
461
+ logger.debug("flatfileimporter: Converting xlsx to dict")
461
462
  # Load the workbook
462
463
  workbook = load_workbook(filename=file.name)
463
464
 
@@ -485,6 +486,7 @@ class FlatFileImporter(ABC):
485
486
  except SyntaxError as rex:
486
487
  # Object is probably not a list, so just leave it as a string
487
488
  self.attributes.app.logger.debug("SyntaxError: %s", rex)
489
+ logger.debug("flatfileimporter: Done converting xlsx to dict.")
488
490
  return data_dict, header
489
491
 
490
492
  def count_vuln_by_severity(self, severity: str, asset_id: int) -> int:
@@ -531,6 +533,7 @@ class FlatFileImporter(ABC):
531
533
  :param Callable func: The function to process the data
532
534
  :rtype: None
533
535
  """
536
+ from regscale.integrations.scanner_integration import IntegrationAsset
534
537
 
535
538
  res = func(dat)
536
539
  if not res:
@@ -744,7 +747,7 @@ class FlatFileImporter(ABC):
744
747
  check_file_path(str(processed_dir.absolute()))
745
748
  try:
746
749
  self.attributes.logger.info(
747
- "Renaming %s to %s, ...",
750
+ "Renaming %s to %s...",
748
751
  file_path.name,
749
752
  new_file_path.name,
750
753
  )
@@ -48,12 +48,12 @@ class GrypeImport(FlatFileImporter):
48
48
  if kwargs.get("scan_date"):
49
49
  self.scan_date = kwargs.pop("scan_date")
50
50
  else:
51
- self.scan_date = safe_datetime_str(self.mapping.get_value(self.validater.data, "timestamp", warnings=False))
51
+ self.scan_date = safe_datetime_str(self.mapping.get_value(self.validater.data, "timestamp"))
52
52
  # even if a user doesn't specify a scan_date, we want to remove it from the kwargs and use the scan_date from
53
53
  # the attributes after the scan_date is set in the previous logic
54
54
  if "scan_date" in kwargs:
55
55
  kwargs.pop("scan_date")
56
- source_target_data = self.mapping.get_value(self.validater.data, "source", {}, warnings=False).get("target", {})
56
+ source_target_data = self.mapping.get_value(self.validater.data, "source", {}).get("target", {})
57
57
 
58
58
  if "sha256-" in kwargs["file_name"]:
59
59
  logger.debug("found sha256 in file name %s", kwargs["file_name"])
@@ -66,7 +66,7 @@ class GrypeImport(FlatFileImporter):
66
66
  self.other_tracking_number = source_target_data.get("userInput", "Unknown")
67
67
  self.os = source_target_data.get("os", "Linux")
68
68
  self.notes = f"{kwargs['file_name']}"
69
- vuln_count = len(self.mapping.get_value(self.validater.data, "matches", [], warnings=False))
69
+ vuln_count = len(self.mapping.get_value(self.validater.data, "matches", []))
70
70
  super().__init__(
71
71
  logger=logger,
72
72
  headers=self.headers,
@@ -80,7 +80,7 @@ class Prisma(FlatFileImporter):
80
80
  **{
81
81
  "id": 0,
82
82
  "name": hostname,
83
- "ipAddress": self.mapping.get_value(dat, "IP Address", warnings=False),
83
+ "ipAddress": self.mapping.get_value(dat, "IP Address"),
84
84
  "isPublic": True,
85
85
  "status": "Active (On Network)",
86
86
  "assetCategory": "Hardware",
@@ -132,8 +132,8 @@ class Prisma(FlatFileImporter):
132
132
  severity=severity,
133
133
  plugInName=self.mapping.get_value(dat, self.vuln_title),
134
134
  plugInId=(
135
- self.mapping.get_value(dat, VULNERABILITY_ID, warnings=False)
136
- if self.mapping.get_value(dat, VULNERABILITY_ID, warnings=False)
135
+ self.mapping.get_value(dat, VULNERABILITY_ID)
136
+ if self.mapping.get_value(dat, VULNERABILITY_ID)
137
137
  else None
138
138
  ),
139
139
  cve=cve,