regscale-cli 6.16.0.0__py3-none-any.whl → 6.16.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of regscale-cli might be problematic. Click here for more details.

Files changed (45) hide show
  1. regscale/__init__.py +1 -1
  2. regscale/core/app/application.py +1 -0
  3. regscale/core/app/utils/app_utils.py +1 -1
  4. regscale/core/app/utils/parser_utils.py +2 -2
  5. regscale/integrations/commercial/azure/intune.py +1 -0
  6. regscale/integrations/commercial/nessus/scanner.py +3 -0
  7. regscale/integrations/commercial/sap/sysdig/sysdig_scanner.py +4 -0
  8. regscale/integrations/commercial/sap/tenable/click.py +1 -1
  9. regscale/integrations/commercial/sap/tenable/scanner.py +8 -2
  10. regscale/integrations/commercial/tenablev2/click.py +39 -16
  11. regscale/integrations/commercial/wizv2/click.py +9 -21
  12. regscale/integrations/commercial/wizv2/scanner.py +2 -1
  13. regscale/integrations/commercial/wizv2/utils.py +145 -69
  14. regscale/integrations/public/fedramp/import_workbook.py +1 -1
  15. regscale/integrations/public/fedramp/poam/scanner.py +51 -44
  16. regscale/integrations/public/fedramp/ssp_logger.py +6 -6
  17. regscale/integrations/scanner_integration.py +96 -23
  18. regscale/models/app_models/mapping.py +3 -3
  19. regscale/models/integration_models/amazon_models/inspector.py +15 -17
  20. regscale/models/integration_models/aqua.py +1 -5
  21. regscale/models/integration_models/cisa_kev_data.json +85 -10
  22. regscale/models/integration_models/ecr_models/ecr.py +2 -6
  23. regscale/models/integration_models/flat_file_importer.py +7 -4
  24. regscale/models/integration_models/grype_import.py +3 -3
  25. regscale/models/integration_models/prisma.py +3 -3
  26. regscale/models/integration_models/synqly_models/capabilities.json +1 -1
  27. regscale/models/integration_models/synqly_models/connectors/assets.py +1 -0
  28. regscale/models/integration_models/synqly_models/connectors/vulnerabilities.py +2 -0
  29. regscale/models/integration_models/tenable_models/integration.py +4 -3
  30. regscale/models/integration_models/trivy_import.py +1 -1
  31. regscale/models/integration_models/xray.py +1 -1
  32. regscale/models/regscale_models/__init__.py +2 -0
  33. regscale/models/regscale_models/control_implementation.py +18 -44
  34. regscale/models/regscale_models/inherited_control.py +61 -0
  35. regscale/models/regscale_models/issue.py +3 -2
  36. regscale/models/regscale_models/mixins/parent_cache.py +1 -1
  37. regscale/models/regscale_models/regscale_model.py +72 -6
  38. regscale/models/regscale_models/vulnerability.py +40 -8
  39. {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.1.0.dist-info}/METADATA +1 -1
  40. {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.1.0.dist-info}/RECORD +45 -44
  41. tests/regscale/core/test_logz.py +8 -0
  42. {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.1.0.dist-info}/LICENSE +0 -0
  43. {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.1.0.dist-info}/WHEEL +0 -0
  44. {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.1.0.dist-info}/entry_points.txt +0 -0
  45. {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.1.0.dist-info}/top_level.txt +0 -0
@@ -1,3 +1,7 @@
1
+ """
2
+ Wiz V2 Utils
3
+ """
4
+
1
5
  import codecs
2
6
  import csv
3
7
  import datetime
@@ -32,10 +36,10 @@ from regscale.integrations.commercial.wizv2.constants import (
32
36
  MAX_RETRIES,
33
37
  CHECK_INTERVAL_FOR_DOWNLOAD_REPORT,
34
38
  )
39
+ from regscale.integrations.commercial.wizv2.models import ComplianceReport, ComplianceCheckStatus
35
40
  from regscale.integrations.commercial.wizv2.variables import WizVariables
36
41
  from regscale.integrations.commercial.wizv2.wiz_auth import wiz_authenticate
37
42
  from regscale.models import File, Sbom, SecurityPlan, Catalog, ControlImplementation, Assessment, regscale_models
38
- from regscale.integrations.commercial.wizv2.models import ComplianceReport, ComplianceCheckStatus
39
43
  from regscale.utils import PaginatedGraphQLClient
40
44
  from regscale.utils.decorators import deprecated
41
45
 
@@ -51,41 +55,29 @@ def get_notes_from_wiz_props(wiz_entity_properties: Dict, external_id: str) -> s
51
55
  :return: Notes
52
56
  :rtype: str
53
57
  """
58
+ # Define property mappings with display names and keys
59
+ property_mappings = [
60
+ ("External ID", lambda: external_id, lambda x: x),
61
+ ("Cloud Platform", "cloudPlatform", str),
62
+ ("Provider Unique ID", "providerUniqueId", str),
63
+ ("cloudProviderURL", "cloudProviderURL", lambda x: f'<a href="{x}" target="_blank">{x}</a>'),
64
+ ("Vertex ID", "_vertexID", str),
65
+ ("Severity Name", "severity_name", str),
66
+ ("Severity Description", "severity_description", str),
67
+ ]
68
+
54
69
  notes = []
55
- notes.append(f"External ID: {external_id}") if external_id else None
56
- (
57
- notes.append(f"Cloud Platform: {wiz_entity_properties.get('cloudPlatform')}")
58
- if wiz_entity_properties.get("cloudPlatform")
59
- else None
60
- )
61
- (
62
- notes.append(f"Provider Unique ID: {wiz_entity_properties.get('providerUniqueId')}")
63
- if wiz_entity_properties.get("providerUniqueId")
64
- else None
65
- )
66
- (
67
- notes.append(
68
- f"""cloudProviderURL:<a href="{wiz_entity_properties.get("cloudProviderURL")}"
69
- target="_blank">{wiz_entity_properties.get("cloudProviderURL")}</a>"""
70
- )
71
- if wiz_entity_properties.get("cloudProviderURL")
72
- else None
73
- )
74
- (
75
- notes.append(f"Vertex ID: {wiz_entity_properties.get('_vertexID')}")
76
- if wiz_entity_properties.get("_vertexID")
77
- else None
78
- )
79
- (
80
- notes.append(f"Severity Name: {wiz_entity_properties.get('severity_name')}")
81
- if wiz_entity_properties.get("severity_name")
82
- else None
83
- )
84
- (
85
- notes.append(f"Severity Description: {wiz_entity_properties.get('severity_description')}")
86
- if wiz_entity_properties.get("severity_description")
87
- else None
88
- )
70
+ for display_name, key_or_func, formatter in property_mappings:
71
+ # Handle external_id special case
72
+ if callable(key_or_func):
73
+ value = key_or_func()
74
+ else:
75
+ value = wiz_entity_properties.get(key_or_func)
76
+
77
+ if value:
78
+ formatted_value = formatter(value)
79
+ notes.append(f"{display_name}: {formatted_value}")
80
+
89
81
  return "<br>".join(notes)
90
82
 
91
83
 
@@ -108,7 +100,6 @@ def create_asset_type(asset_type: str) -> str:
108
100
  :return: Asset type
109
101
  :rtype: str
110
102
  """
111
- #
112
103
  asset_type = asset_type.title().replace("_", " ")
113
104
  meta_data_list = regscale_models.Metadata.get_metadata_by_module_field(module="assets", field="Asset Type")
114
105
  if not any(meta_data.value == asset_type for meta_data in meta_data_list):
@@ -360,8 +351,9 @@ def create_report_if_needed(
360
351
  )
361
352
  logger.info(f"Wiz compliance report created with ID {wiz_report_id}")
362
353
  return [wiz_report_id]
363
-
364
- return [report["id"] for report in reports if any(frame in report["name"] for frame in frames)]
354
+ logger.debug(f"Returning report ids for these reports {(report['name'] for report in reports)}")
355
+ reports = [report["id"] for report in reports if any(frame in report["name"] for frame in frames)]
356
+ return reports
365
357
 
366
358
 
367
359
  def fetch_and_process_report_data(wiz_report_ids: List) -> List:
@@ -384,6 +376,95 @@ def fetch_and_process_report_data(wiz_report_ids: List) -> List:
384
376
  return report_data
385
377
 
386
378
 
379
+ def get_or_create_report_id(
380
+ project_id: str,
381
+ frameworks: List[str],
382
+ wiz_frameworks: List[Dict],
383
+ existing_reports: List[Dict],
384
+ target_framework: str,
385
+ ) -> str:
386
+ """
387
+ Get an existing report ID or create a new one for the target framework.
388
+
389
+ :param project_id: Project identifier
390
+ :param frameworks: List of framework names
391
+ :param wiz_frameworks: List of framework details with IDs
392
+ :param existing_reports: List of existing reports
393
+ :param target_framework: Target framework name with underscores
394
+ :return: Single report ID
395
+ """
396
+ report_name = f"{target_framework}_project_{project_id}"
397
+
398
+ # Check for existing report with exact name
399
+ for report in existing_reports:
400
+ if report.get("name") == report_name:
401
+ logger.info(f"Found existing report '{report_name}' with ID {report['id']}")
402
+ return report["id"]
403
+
404
+ # Create new report if no exact match found
405
+ try:
406
+ framework_index = frameworks.index(target_framework)
407
+ framework_id = wiz_frameworks[framework_index].get("id")
408
+
409
+ report_id = create_compliance_report(
410
+ wiz_project_id=project_id, report_name=report_name, framework_id=framework_id
411
+ )
412
+ logger.info(f"Created new report '{report_name}' with ID {report_id}")
413
+ return report_id
414
+ except ValueError:
415
+ logger.error(f"Framework '{target_framework}' not found in frameworks list")
416
+ raise
417
+
418
+
419
+ def fetch_report_data(report_id: str) -> List[Dict]:
420
+ """
421
+ Fetch and process data for a single report ID.
422
+
423
+ :param report_id: Report identifier
424
+ :return: List of report data rows
425
+ """
426
+ try:
427
+ download_url = get_report_url_and_status(report_id)
428
+ logger.info(f"Fetching report {report_id} from: {download_url}")
429
+
430
+ with closing(requests.get(url=download_url, stream=True, timeout=10)) as response:
431
+ response.raise_for_status()
432
+ logger.info(f"Streaming and parsing report {report_id}")
433
+
434
+ reader = csv.DictReader(codecs.iterdecode(response.iter_lines(), encoding="utf-8"), delimiter=",")
435
+ return list(reader)
436
+ except requests.RequestException as e:
437
+ error_and_exit(f"Failed to fetch report {report_id}: {str(e)}")
438
+ except csv.Error as e:
439
+ error_and_exit(f"Failed to parse CSV for report {report_id}: {str(e)}")
440
+
441
+
442
+ # Usage example
443
+ def process_single_report(
444
+ project_id: str,
445
+ frameworks: List[str],
446
+ wiz_frameworks: List[Dict],
447
+ existing_reports: List[Dict],
448
+ target_framework: str,
449
+ ) -> List[Dict]:
450
+ """Process a single report and return its data.
451
+ :param project_id: Project identifier
452
+ :param frameworks: List of framework names
453
+ :param wiz_frameworks: List of framework details with IDs
454
+ :param existing_reports: List of existing reports
455
+ :param target_framework: Target framework name with underscores
456
+ :return: List of report data rows
457
+ """
458
+ report_id = get_or_create_report_id(
459
+ project_id=project_id,
460
+ frameworks=frameworks,
461
+ wiz_frameworks=wiz_frameworks,
462
+ existing_reports=existing_reports,
463
+ target_framework=target_framework,
464
+ )
465
+ return fetch_report_data(report_id)
466
+
467
+
387
468
  def fetch_framework_report(wiz_project_id: str, snake_framework: str) -> List[Any]:
388
469
  """
389
470
  Fetch Framework Report from Wiz.
@@ -395,10 +476,17 @@ def fetch_framework_report(wiz_project_id: str, snake_framework: str) -> List[An
395
476
  """
396
477
  wiz_frameworks = fetch_frameworks()
397
478
  frames = get_framework_names(wiz_frameworks)
398
- reports = list(query_reports())
479
+ reports = list(query_reports(wiz_project_id))
480
+
481
+ report_data = process_single_report(
482
+ project_id=wiz_project_id,
483
+ frameworks=frames,
484
+ wiz_frameworks=wiz_frameworks,
485
+ existing_reports=reports,
486
+ target_framework=snake_framework,
487
+ )
399
488
 
400
- wiz_report_ids = create_report_if_needed(wiz_project_id, frames, wiz_frameworks, reports, snake_framework)
401
- return fetch_and_process_report_data(wiz_report_ids)
489
+ return report_data
402
490
 
403
491
 
404
492
  def fetch_frameworks() -> list:
@@ -425,23 +513,21 @@ def fetch_frameworks() -> list:
425
513
  """
426
514
  variables = {
427
515
  "policyTypes": "CLOUD",
428
- "first": 500,
429
516
  }
430
517
  resp = send_request(
431
518
  query=query,
432
519
  variables=variables,
433
520
  api_endpoint_url=WizVariables.wizUrl,
434
521
  )
435
-
436
- if resp.ok:
437
- # ["data"]["securityFrameworks"]["nodes"]
522
+ logger.debug(f"Response: {resp}")
523
+ if resp and resp.ok:
438
524
  data = resp.json()
439
525
  return data.get("data", {}).get("securityFrameworks", {}).get("nodes")
440
526
  else:
441
527
  error_and_exit(f"Wiz Error: {resp.status_code if resp else None} - {resp.text if resp else 'No response'}")
442
528
 
443
529
 
444
- def query_reports() -> list:
530
+ def query_reports(wiz_project_id: str) -> list:
445
531
  """
446
532
  Query Report table from Wiz
447
533
 
@@ -450,7 +536,7 @@ def query_reports() -> list:
450
536
  """
451
537
 
452
538
  # The variables sent along with the above query
453
- variables = {"first": 100, "filterBy": {}}
539
+ variables = {"first": 100, "filterBy": {"projectId": f"{wiz_project_id}"}}
454
540
 
455
541
  res = send_request(
456
542
  query=REPORTS_QUERY,
@@ -462,6 +548,7 @@ def query_reports() -> list:
462
548
  if "errors" in res.json().keys():
463
549
  error_and_exit(f'Wiz Error: {res.json()["errors"]}')
464
550
  json_result = res.json()
551
+ logger.debug("JSON Result: %s", json_result)
465
552
  result = json_result.get("data", {}).get("reports", {}).get("nodes")
466
553
  except requests.JSONDecodeError:
467
554
  error_and_exit(f"Unable to fetch reports from Wiz: {res.status_code}, {res.reason}")
@@ -583,7 +670,6 @@ def _sync_compliance(
583
670
  wiz_project_id: str,
584
671
  regscale_id: int,
585
672
  regscale_module: str,
586
- include_not_implemented: bool,
587
673
  client_id: str,
588
674
  client_secret: str,
589
675
  catalog_id: int,
@@ -595,7 +681,6 @@ def _sync_compliance(
595
681
  :param str wiz_project_id: Wiz Project ID
596
682
  :param int regscale_id: RegScale ID
597
683
  :param str regscale_module: RegScale module
598
- :param bool include_not_implemented: Include not implemented controls
599
684
  :param str client_id: Wiz Client ID
600
685
  :param str client_secret: Wiz Client Secret
601
686
  :param int catalog_id: Catalog ID, defaults to None
@@ -631,12 +716,12 @@ def _sync_compliance(
631
716
 
632
717
  catalog = Catalog.get_with_all_details(catalog_id=catalog_id)
633
718
  controls = catalog.get("controls") if catalog else []
634
- passing_controls = dict()
635
- failing_controls = dict()
636
- controls_to_reports = dict()
637
- existing_implementations = ControlImplementation.get_existing_control_implementations(parent_id=regscale_id)
719
+ passing_controls = {}
720
+ failing_controls = {}
721
+ controls_to_reports = {}
722
+
638
723
  compliance_job_progress.update(fetch_regscale_data_job, completed=True, advance=1)
639
- logger.info(f"Analyzing ComplianceReport for framework {sync_framework} from Wiz")
724
+ logger.info("Analyzing ComplianceReport for framework %s from Wiz" % sync_framework)
640
725
  running_compliance_job = compliance_job_progress.add_task(
641
726
  "[#f68d1f]Building compliance posture from wiz report...",
642
727
  total=len(report_data),
@@ -654,20 +739,11 @@ def _sync_compliance(
654
739
  )
655
740
  report_models.append(cr)
656
741
  compliance_job_progress.update(running_compliance_job, advance=1)
657
- except ValidationError as e:
658
- logger.error(f"Error creating ComplianceReport: {e}")
742
+ except ValidationError:
743
+ error_message = traceback.format_exc()
744
+ logger.error(f"Error creating ComplianceReport: {error_message}")
659
745
  try:
660
746
  saving_regscale_data_job = compliance_job_progress.add_task("[#f68d1f]Saving RegScale data...", total=1)
661
- ControlImplementation.create_control_implementations(
662
- controls=controls,
663
- parent_id=regscale_id,
664
- parent_module=regscale_module,
665
- existing_implementation_dict=existing_implementations,
666
- full_controls=passing_controls,
667
- partial_controls={},
668
- failing_controls=failing_controls,
669
- include_not_implemented=include_not_implemented,
670
- )
671
747
  create_assessment_from_compliance_report(
672
748
  controls_to_reports=controls_to_reports,
673
749
  regscale_id=regscale_id,
@@ -676,9 +752,9 @@ def _sync_compliance(
676
752
  )
677
753
  compliance_job_progress.update(saving_regscale_data_job, completed=True, advance=1)
678
754
 
679
- except Exception as e:
680
- logger.error(f"Error creating ControlImplementations from compliance report: {e}")
681
- traceback.print_exc()
755
+ except Exception:
756
+ error_message = traceback.format_exc()
757
+ logger.error(f"Error creating ControlImplementations from compliance report: {error_message}")
682
758
  return report_models
683
759
 
684
760
 
@@ -217,7 +217,7 @@ def map_inventory_to_asset(
217
217
  "ram": 0,
218
218
  "diskStorage": 0,
219
219
  "description": "",
220
- "endOfLifeDate": date_str(mapping.get_value(inventory, "End-of-Life ", "", warnings=False)),
220
+ "endOfLifeDate": date_str(mapping.get_value(inventory, "End-of-Life ", "")),
221
221
  "purchaseDate": None,
222
222
  "status": "Active (On Network)",
223
223
  "wizId": "",
@@ -4,7 +4,10 @@
4
4
 
5
5
  import logging
6
6
  import re
7
- from typing import Iterator, List, Optional
7
+ from typing import Iterator, List, Optional, TYPE_CHECKING
8
+
9
+ if TYPE_CHECKING:
10
+ import numpy
8
11
 
9
12
  from openpyxl import load_workbook # type: ignore
10
13
  from openpyxl.utils import column_index_from_string # type: ignore
@@ -25,6 +28,14 @@ from regscale.validation.address import validate_ip_address, validate_mac_addres
25
28
 
26
29
  logger = logging.getLogger("regscale")
27
30
 
31
+ WEAKNESS_DETECTOR_SOURCE = "Weakness Detector Source"
32
+ ASSET_IDENTIFIER = "Asset Identifier"
33
+ SCHEDULED_COMPLETION_DATE = "Scheduled Completion Date"
34
+ MILESTONE_CHANGES = "Milestone Changes"
35
+ ORIGINAL_RISK_RATING = "Original Risk Rating"
36
+ ADJUSTED_RISK_RATING = "Adjusted Risk Rating"
37
+ FILE_PATH_ERROR = "File path is required."
38
+
28
39
 
29
40
  class FedrampPoamIntegration(ScannerIntegration):
30
41
  """Integration class for FedRAMP POAM scanning."""
@@ -50,40 +61,43 @@ class FedrampPoamIntegration(ScannerIntegration):
50
61
  skipped_records: int = 0
51
62
  processed_assets: set[str] = set() # Track processed assets across all methods
52
63
 
53
- # TODO: Pair this down to only usable data
54
64
  fedramp_poam_columns = [
55
65
  "POAM ID",
56
66
  "Weakness Name",
57
67
  "Weakness Description",
58
- "Weakness Detector Source",
68
+ WEAKNESS_DETECTOR_SOURCE,
59
69
  "Weakness Source Identifier",
60
- "Asset Identifier",
70
+ ASSET_IDENTIFIER,
61
71
  "Point of Contact",
62
72
  "Resources Required",
63
73
  "Overall Remediation Plan",
64
74
  "Original Detection Date",
65
- "Scheduled Completion Date",
75
+ SCHEDULED_COMPLETION_DATE,
66
76
  "Planned Milestones",
67
- "Milestone Changes",
77
+ MILESTONE_CHANGES,
68
78
  "Status Date",
69
- # "Vendor Dependency",
70
- # "Last Vendor Check-in Date",
71
- # "Vendor Dependent Product Name",
72
- "Original Risk Rating",
73
- "Adjusted Risk Rating",
79
+ ORIGINAL_RISK_RATING,
80
+ ADJUSTED_RISK_RATING,
74
81
  "Risk Adjustment",
75
82
  "False Positive",
76
83
  "Operational Requirement",
77
84
  "Deviation Rationale",
78
- # "Supporting Documents",
79
85
  "Comments",
80
- # "Auto-Approve",
81
- # "Binding Operational Directive 22-01 tracking",
82
- # "Binding Operational Directive 22-01 Due Date",
83
- # "CVE",
84
- # "Service Name",
85
86
  ]
86
87
 
88
+ """
89
+ Unused columns:
90
+ # "Vendor Dependency",
91
+ # "Last Vendor Check-in Date",
92
+ # "Vendor Dependent Product Name",
93
+ # "Supporting Documents",
94
+ # "Auto-Approve",
95
+ # "Binding Operational Directive 22-01 tracking",
96
+ # "Binding Operational Directive 22-01 Due Date",
97
+ # "CVE",
98
+ # "Service Name",
99
+ """
100
+
87
101
  def __init__(self, plan_id: int, **kwargs: dict):
88
102
  super().__init__(plan_id=plan_id)
89
103
  try:
@@ -91,7 +105,7 @@ class FedrampPoamIntegration(ScannerIntegration):
91
105
  if "file_path" in kwargs:
92
106
  self.file_path = kwargs["file_path"]
93
107
  if not self.file_path:
94
- raise ValueError("File path is required")
108
+ error_and_exit(FILE_PATH_ERROR)
95
109
  self.workbook = self.workbook or load_workbook(filename=self.file_path, data_only=True, read_only=True)
96
110
  self.poam_sheets = kwargs.get("poam_sheets") or [
97
111
  sheet for sheet in self.workbook.sheetnames if re.search("POA&M Items", sheet)
@@ -130,11 +144,10 @@ class FedrampPoamIntegration(ScannerIntegration):
130
144
  """
131
145
  Fetches findings from FedRAMP POAM files.
132
146
 
133
- :raises ValueError: If file path is not set
134
147
  :yield: Iterator of validated integration findings
135
148
  """
136
149
  if not self.file_path:
137
- raise ValueError("File path is required")
150
+ error_and_exit(FILE_PATH_ERROR)
138
151
 
139
152
  findings = []
140
153
  try:
@@ -256,7 +269,7 @@ class FedrampPoamIntegration(ScannerIntegration):
256
269
  "critical": IssueSeverity.High.name,
257
270
  "low": IssueSeverity.Low.name,
258
271
  }
259
- res = validator.mapping.get_value(data, "Original Risk Rating")
272
+ res = validator.mapping.get_value(data, ORIGINAL_RISK_RATING)
260
273
  if res.lower() not in [mem.lower() for mem in IssueSeverity.__members__]:
261
274
  res = dat_map.get(res.lower(), IssueSeverity.Low.name)
262
275
  return res
@@ -345,7 +358,7 @@ class FedrampPoamIntegration(ScannerIntegration):
345
358
  plugin_id_int = abs(hash(poam_id)) % (10**9)
346
359
 
347
360
  # Get asset identifiers
348
- asset_ids = val_mapping.get_value(data, "Asset Identifier")
361
+ asset_ids = val_mapping.get_value(data, ASSET_IDENTIFIER)
349
362
  if not asset_ids:
350
363
  logger.warning(f"No asset identifier found on row {index}, sheet {sheet}. Skipping.")
351
364
  yield from findings
@@ -374,8 +387,8 @@ class FedrampPoamIntegration(ScannerIntegration):
374
387
  date_str(val_mapping.get_value(data, "Original Detection Date")) or get_current_datetime()
375
388
  )
376
389
  due_date = date_str(
377
- val_mapping.get_value(data, "Scheduled Completion Date")
378
- if val_mapping.get_value(data, "Scheduled Completion Date") != "#REF!"
390
+ val_mapping.get_value(data, SCHEDULED_COMPLETION_DATE)
391
+ if val_mapping.get_value(data, SCHEDULED_COMPLETION_DATE) != "#REF!"
379
392
  else ""
380
393
  )
381
394
  severity: IssueSeverity = getattr(IssueSeverity, category.title(), IssueSeverity.NotAssigned)
@@ -408,18 +421,18 @@ class FedrampPoamIntegration(ScannerIntegration):
408
421
  date_last_updated=status_date,
409
422
  due_date=due_date,
410
423
  cve=cve, # Single CVE per finding
411
- plugin_name=val_mapping.get_value(data, "Weakness Detector Source") or "",
424
+ plugin_name=val_mapping.get_value(data, WEAKNESS_DETECTOR_SOURCE) or "",
412
425
  plugin_id=str(unique_plugin_id),
413
- observations=str(val_mapping.get_value(data, "Milestone Changes")) or "",
426
+ observations=str(val_mapping.get_value(data, MILESTONE_CHANGES)) or "",
414
427
  poam_comments=self.empty(val_mapping.get_value(data, "Comments")),
415
428
  remediation=self.empty(val_mapping.get_value(data, "Overall Remediation Plan")),
416
429
  basis_for_adjustment=str(self.get_basis_for_adjustment(val_mapping=val_mapping, data=data)),
417
430
  vulnerability_type="FedRAMP",
418
- source_report=str(val_mapping.get_value(data, "Weakness Detector Source")),
431
+ source_report=str(val_mapping.get_value(data, WEAKNESS_DETECTOR_SOURCE)),
419
432
  point_of_contact=str(val_mapping.get_value(data, "Point of Contact")),
420
- milestone_changes=str(val_mapping.get_value(data, "Milestone Changes")),
433
+ milestone_changes=str(val_mapping.get_value(data, MILESTONE_CHANGES)),
421
434
  planned_milestone_changes=str(val_mapping.get_value(data, "Planned Milestones")),
422
- adjusted_risk_rating=val_mapping.get_value(data, "Adjusted Risk Rating"),
435
+ adjusted_risk_rating=val_mapping.get_value(data, ADJUSTED_RISK_RATING),
423
436
  risk_adjustment=self.determine_risk_adjustment(val_mapping.get_value(data, "Risk Adjustment")),
424
437
  operational_requirements=str(val_mapping.get_value(data, "Operational Requirement")),
425
438
  deviation_rationale=str(val_mapping.get_value(data, "Deviation Rationale")),
@@ -452,7 +465,7 @@ class FedrampPoamIntegration(ScannerIntegration):
452
465
  res = date_str(status_map.get(resolve_status), "%m-%d-%Y")
453
466
  if res:
454
467
  logger.warning(
455
- f"Status Date missing on row %i, sheet %s, defaulting to %s: %s",
468
+ "Status Date missing on row %i, sheet %s, defaulting to %s: %s",
456
469
  index,
457
470
  sheet,
458
471
  resolve_status.lower().replace("_", " "),
@@ -482,7 +495,7 @@ class FedrampPoamIntegration(ScannerIntegration):
482
495
  else:
483
496
  logger.error("Validator mapping is None")
484
497
  return row_assets
485
- asset_ids = val_mapping.get_value(data, "Asset Identifier")
498
+ asset_ids = val_mapping.get_value(data, ASSET_IDENTIFIER)
486
499
  if not asset_ids:
487
500
  return row_assets
488
501
  asset_id_list = self.gen_asset_list(asset_ids)
@@ -643,8 +656,8 @@ class FedrampPoamIntegration(ScannerIntegration):
643
656
  :rtype: Optional[str]
644
657
  """
645
658
  basis_for_adjustment = self.empty(val_mapping.get_value(data, "Comments")) # e.g. row 23
646
- risk_rating = val_mapping.get_value(data, "Original Risk Rating")
647
- adjusted_risk_rating = val_mapping.get_value(data, "Adjusted Risk Rating")
659
+ risk_rating = val_mapping.get_value(data, ORIGINAL_RISK_RATING)
660
+ adjusted_risk_rating = val_mapping.get_value(data, ADJUSTED_RISK_RATING)
648
661
 
649
662
  if (adjusted_risk_rating != risk_rating) and not basis_for_adjustment:
650
663
  return "POAM Import"
@@ -745,19 +758,12 @@ class FedrampPoamIntegration(ScannerIntegration):
745
758
  """
746
759
  Fetch assets from FedRAMP POAM files.
747
760
 
748
- Args:
749
- *args: Variable length argument list
750
- **kwargs: Arbitrary keyword arguments
751
-
752
- Returns:
753
- Iterator[IntegrationAsset]: Iterator of parsed integration assets
754
-
755
- Raises:
756
- ValueError: If file_path is not set
757
- POAMProcessingError: If there's an error processing the POAM file
761
+ :raises POAMProcessingError: If there's an error processing the POAM file
762
+ :return: Iterator of parsed integration assets
763
+ :rtype: Iterator[IntegrationAsset]
758
764
  """
759
765
  if not self.file_path:
760
- raise ValueError("File path is required")
766
+ error_and_exit(FILE_PATH_ERROR)
761
767
 
762
768
  assets = []
763
769
  total_processed = 0
@@ -808,6 +814,7 @@ class FedrampPoamIntegration(ScannerIntegration):
808
814
  finally:
809
815
  logger.info(f"Completed processing with {total_processed} assets and {self.error_records} errors")
810
816
 
817
+ self.num_assets_to_process = len(assets)
811
818
  return iter(assets)
812
819
 
813
820
  def find_max_row(self, start_row: int, ws: Worksheet) -> int:
@@ -8,23 +8,22 @@ import logging
8
8
  from regscale.core.app.logz import create_logger
9
9
 
10
10
 
11
- class CaptureEventsHandler:
11
+ class CaptureEventsHandler(logging.Handler):
12
12
  def __init__(self, events, errors, infos):
13
- self.handler = logging.Handler()
14
13
  self.events = events
15
14
  self.errors = errors
16
15
  self.infos = infos
16
+ super().__init__()
17
17
 
18
18
  def emit(self, record):
19
- self.handler.emit(record)
20
19
  try:
21
- log_entry = self.handler.format(record)
20
+ log_entry = self.format(record)
22
21
  if record.levelname == "INFO":
23
22
  self.events.append(log_entry)
24
23
  elif record.levelname == "ERROR":
25
24
  self.errors.append(log_entry)
26
25
  except Exception:
27
- self.handler.handleError(record)
26
+ self.handleError(record)
28
27
 
29
28
 
30
29
  class SSPLogger:
@@ -33,7 +32,8 @@ class SSPLogger:
33
32
  self.errors = []
34
33
  self.infos = []
35
34
  self.capture_handler = CaptureEventsHandler(self.events, self.errors, self.infos)
36
- self.logger = create_logger(custom_handler=self.capture_handler)
35
+ logger = create_logger(custom_handler=self.capture_handler)
36
+ self.logger = logger
37
37
 
38
38
  def create_logger(self):
39
39
  return self.logger