regscale-cli 6.21.1.0__py3-none-any.whl → 6.21.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of regscale-cli might be problematic. Click here for more details.
- regscale/_version.py +1 -1
- regscale/core/app/application.py +8 -0
- regscale/integrations/commercial/__init__.py +8 -8
- regscale/integrations/commercial/import_all/import_all_cmd.py +2 -2
- regscale/integrations/commercial/microsoft_defender/__init__.py +0 -0
- regscale/integrations/commercial/{defender.py → microsoft_defender/defender.py} +38 -612
- regscale/integrations/commercial/microsoft_defender/defender_api.py +286 -0
- regscale/integrations/commercial/microsoft_defender/defender_constants.py +80 -0
- regscale/integrations/commercial/microsoft_defender/defender_scanner.py +168 -0
- regscale/integrations/commercial/qualys/__init__.py +24 -86
- regscale/integrations/commercial/qualys/containers.py +2 -0
- regscale/integrations/commercial/qualys/scanner.py +7 -2
- regscale/integrations/commercial/sonarcloud.py +110 -71
- regscale/integrations/commercial/wizv2/click.py +4 -1
- regscale/integrations/commercial/wizv2/data_fetcher.py +401 -0
- regscale/integrations/commercial/wizv2/finding_processor.py +295 -0
- regscale/integrations/commercial/wizv2/policy_compliance.py +1471 -204
- regscale/integrations/commercial/wizv2/policy_compliance_helpers.py +564 -0
- regscale/integrations/commercial/wizv2/scanner.py +4 -4
- regscale/integrations/compliance_integration.py +213 -61
- regscale/integrations/public/fedramp/fedramp_five.py +92 -7
- regscale/integrations/scanner_integration.py +27 -4
- regscale/models/__init__.py +1 -1
- regscale/models/integration_models/cisa_kev_data.json +79 -3
- regscale/models/integration_models/synqly_models/capabilities.json +1 -1
- regscale/models/regscale_models/issue.py +29 -9
- regscale/models/regscale_models/milestone.py +15 -14
- {regscale_cli-6.21.1.0.dist-info → regscale_cli-6.21.2.1.dist-info}/METADATA +1 -1
- {regscale_cli-6.21.1.0.dist-info → regscale_cli-6.21.2.1.dist-info}/RECORD +33 -28
- tests/regscale/test_authorization.py +0 -65
- tests/regscale/test_init.py +0 -96
- {regscale_cli-6.21.1.0.dist-info → regscale_cli-6.21.2.1.dist-info}/LICENSE +0 -0
- {regscale_cli-6.21.1.0.dist-info → regscale_cli-6.21.2.1.dist-info}/WHEEL +0 -0
- {regscale_cli-6.21.1.0.dist-info → regscale_cli-6.21.2.1.dist-info}/entry_points.txt +0 -0
- {regscale_cli-6.21.1.0.dist-info → regscale_cli-6.21.2.1.dist-info}/top_level.txt +0 -0
|
@@ -114,6 +114,7 @@ class ComplianceIntegration(ScannerIntegration, ABC):
|
|
|
114
114
|
create_issues: bool = True,
|
|
115
115
|
update_control_status: bool = True,
|
|
116
116
|
create_poams: bool = False,
|
|
117
|
+
parent_module: str = "securityplans",
|
|
117
118
|
**kwargs,
|
|
118
119
|
):
|
|
119
120
|
"""
|
|
@@ -154,7 +155,8 @@ class ComplianceIntegration(ScannerIntegration, ABC):
|
|
|
154
155
|
self._impl_id_by_control: Dict[str, int] = {}
|
|
155
156
|
# Key: ControlImplementation.id -> Assessment created/updated today
|
|
156
157
|
self._assessment_by_impl_today: Dict[int, regscale_models.Assessment] = {}
|
|
157
|
-
|
|
158
|
+
# suppress asset not found errors in non-debug modes
|
|
159
|
+
self.suppress_asset_not_found_errors = logger.level != logging.DEBUG
|
|
158
160
|
# Set scan date
|
|
159
161
|
self.scan_date = get_current_datetime()
|
|
160
162
|
|
|
@@ -199,7 +201,7 @@ class ComplianceIntegration(ScannerIntegration, ABC):
|
|
|
199
201
|
self._load_existing_assessments()
|
|
200
202
|
|
|
201
203
|
self._cache_loaded = True
|
|
202
|
-
logger.info("
|
|
204
|
+
logger.info("Loaded existing records cache to prevent duplicates:")
|
|
203
205
|
logger.info(f" - Assets: {len(self._existing_assets_cache)}")
|
|
204
206
|
logger.info(f" - Issues: {len(self._existing_issues_cache)}")
|
|
205
207
|
logger.info(f" - Assessments: {len(self._existing_assessments_cache)}")
|
|
@@ -238,22 +240,59 @@ class ComplianceIntegration(ScannerIntegration, ABC):
|
|
|
238
240
|
"""
|
|
239
241
|
Load existing issues into cache.
|
|
240
242
|
|
|
243
|
+
Uses both plan-level and control-level queries to ensure all relevant issues are found.
|
|
244
|
+
|
|
241
245
|
:return: None
|
|
242
246
|
:rtype: None
|
|
243
247
|
"""
|
|
244
248
|
try:
|
|
245
|
-
|
|
246
|
-
|
|
249
|
+
all_issues = set()
|
|
250
|
+
|
|
251
|
+
# Method 1: Get issues directly associated with the plan
|
|
252
|
+
plan_issues = regscale_models.Issue.get_all_by_parent(
|
|
247
253
|
parent_id=self.plan_id, parent_module=self.parent_module
|
|
248
254
|
)
|
|
255
|
+
all_issues.update(plan_issues)
|
|
256
|
+
logger.debug(f"Found {len(plan_issues)} issues directly under plan {self.plan_id}")
|
|
249
257
|
|
|
250
|
-
|
|
258
|
+
# Method 2: Get issues associated with control implementations (matches scanner integration logic)
|
|
259
|
+
try:
|
|
260
|
+
issues_by_impl = regscale_models.Issue.get_open_issues_ids_by_implementation_id(
|
|
261
|
+
plan_id=self.plan_id, is_component=getattr(self, "is_component", False)
|
|
262
|
+
)
|
|
263
|
+
impl_issues_count = 0
|
|
264
|
+
for impl_id, issue_list in issues_by_impl.items():
|
|
265
|
+
for issue_dict in issue_list:
|
|
266
|
+
# issue_dict contains issue data, need to get the actual issue object
|
|
267
|
+
issue_id = issue_dict.get("id")
|
|
268
|
+
if issue_id:
|
|
269
|
+
try:
|
|
270
|
+
issue = regscale_models.Issue.get_object(object_id=issue_id)
|
|
271
|
+
if issue:
|
|
272
|
+
all_issues.add(issue)
|
|
273
|
+
impl_issues_count += 1
|
|
274
|
+
except Exception as e:
|
|
275
|
+
logger.debug(f"Could not load issue {issue_id}: {e}")
|
|
276
|
+
|
|
277
|
+
logger.debug(f"Found {impl_issues_count} additional issues via control implementations")
|
|
278
|
+
except Exception as e:
|
|
279
|
+
logger.debug(f"Could not load issues by control implementation: {e}")
|
|
280
|
+
|
|
281
|
+
logger.debug(f"Total unique issues found: {len(all_issues)} for plan {self.plan_id}")
|
|
282
|
+
|
|
283
|
+
wiz_issues = 0
|
|
284
|
+
for issue in all_issues:
|
|
251
285
|
# Cache by external_id and other_identifier for flexible lookup
|
|
252
286
|
if hasattr(issue, "externalId") and issue.externalId:
|
|
253
287
|
self._existing_issues_cache[issue.externalId] = issue
|
|
288
|
+
if "wiz-policy" in issue.externalId.lower():
|
|
289
|
+
wiz_issues += 1
|
|
290
|
+
logger.debug(f"Cached Wiz issue: {issue.id} -> external_id: {issue.externalId}")
|
|
254
291
|
if hasattr(issue, "otherIdentifier") and issue.otherIdentifier:
|
|
255
292
|
self._existing_issues_cache[issue.otherIdentifier] = issue
|
|
256
293
|
|
|
294
|
+
logger.debug(f"Cached {wiz_issues} Wiz policy issues out of {len(all_issues)} total issues")
|
|
295
|
+
|
|
257
296
|
except Exception as e:
|
|
258
297
|
logger.debug(f"Error loading existing issues: {e}")
|
|
259
298
|
|
|
@@ -435,7 +474,7 @@ class ComplianceIntegration(ScannerIntegration, ABC):
|
|
|
435
474
|
logger.error(f"Error processing compliance item: {e}")
|
|
436
475
|
continue
|
|
437
476
|
|
|
438
|
-
logger.
|
|
477
|
+
logger.debug(
|
|
439
478
|
f"Processed {len(self.all_compliance_items)} compliance items: "
|
|
440
479
|
f"{len(self.all_compliance_items) - len(self.failed_compliance_items)} passing, "
|
|
441
480
|
f"{len(self.failed_compliance_items)} failing"
|
|
@@ -592,61 +631,13 @@ class ComplianceIntegration(ScannerIntegration, ABC):
|
|
|
592
631
|
logger.info(f"Starting {self.title} compliance sync...")
|
|
593
632
|
|
|
594
633
|
try:
|
|
595
|
-
# Create scan history
|
|
596
634
|
scan_history = self.create_scan_history()
|
|
597
|
-
|
|
598
|
-
# Process compliance data
|
|
599
635
|
self.process_compliance_data()
|
|
600
636
|
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
# Use batch results recorded during bulk_save
|
|
606
|
-
results = getattr(self, "_results", {}).get("assets", {})
|
|
607
|
-
created = results.get("created_count", 0)
|
|
608
|
-
updated = results.get("updated_count", 0)
|
|
609
|
-
deleted = results.get("deleted_count", 0) if isinstance(results, dict) else 0
|
|
610
|
-
if deleted:
|
|
611
|
-
logger.info(
|
|
612
|
-
f"Assets processed: {assets_processed} (created: {created}, updated: {updated}, deleted: {deleted})"
|
|
613
|
-
)
|
|
614
|
-
else:
|
|
615
|
-
logger.info(f"Assets processed: {assets_processed} (created: {created}, updated: {updated})")
|
|
616
|
-
|
|
617
|
-
# Create/update control assessments first so issues can link controlId/assessmentId
|
|
618
|
-
if self.update_control_status:
|
|
619
|
-
self._process_control_assessments()
|
|
620
|
-
|
|
621
|
-
# Create issues only (no vulnerabilities) for failed compliance items
|
|
622
|
-
if self.create_issues:
|
|
623
|
-
findings = list(self.fetch_findings())
|
|
624
|
-
if findings:
|
|
625
|
-
for finding in findings:
|
|
626
|
-
try:
|
|
627
|
-
asset = self.get_asset_by_identifier(finding.asset_identifier)
|
|
628
|
-
if not asset:
|
|
629
|
-
# Attempt to create the asset on-demand from cached compliance data
|
|
630
|
-
asset = self._ensure_asset_for_finding(finding)
|
|
631
|
-
if not asset:
|
|
632
|
-
logger.error(
|
|
633
|
-
f"Asset not found for identifier {finding.asset_identifier} — "
|
|
634
|
-
"skipping issue creation for this finding"
|
|
635
|
-
)
|
|
636
|
-
continue
|
|
637
|
-
|
|
638
|
-
# Directly create/update issue without vulnerability processing
|
|
639
|
-
issue_title = self.get_issue_title(finding)
|
|
640
|
-
self.create_or_update_issue_from_finding(title=issue_title, finding=finding)
|
|
641
|
-
except Exception as e:
|
|
642
|
-
logger.error(f"Error processing finding: {e}")
|
|
643
|
-
|
|
644
|
-
# Do not write scan history for Wiz policy compliance when disabled
|
|
645
|
-
try:
|
|
646
|
-
if getattr(self, "enable_scan_history", True):
|
|
647
|
-
self._update_scan_history(scan_history)
|
|
648
|
-
except Exception:
|
|
649
|
-
self._update_scan_history(scan_history)
|
|
637
|
+
self._sync_assets()
|
|
638
|
+
self._sync_control_assessments()
|
|
639
|
+
self._sync_issues()
|
|
640
|
+
self._finalize_scan_history(scan_history)
|
|
650
641
|
|
|
651
642
|
logger.info(f"Completed {self.title} compliance sync")
|
|
652
643
|
|
|
@@ -654,6 +645,166 @@ class ComplianceIntegration(ScannerIntegration, ABC):
|
|
|
654
645
|
logger.error(f"Error during compliance sync: {e}")
|
|
655
646
|
raise
|
|
656
647
|
|
|
648
|
+
def _sync_assets(self) -> None:
|
|
649
|
+
"""
|
|
650
|
+
Process and sync assets from compliance items.
|
|
651
|
+
|
|
652
|
+
:return: None
|
|
653
|
+
:rtype: None
|
|
654
|
+
"""
|
|
655
|
+
assets = list(self.fetch_assets())
|
|
656
|
+
if not assets:
|
|
657
|
+
logger.debug("No assets generated from compliance items")
|
|
658
|
+
return
|
|
659
|
+
|
|
660
|
+
assets_processed = self.update_regscale_assets(iter(assets))
|
|
661
|
+
self._log_asset_results(assets_processed)
|
|
662
|
+
|
|
663
|
+
def _log_asset_results(self, assets_processed: int) -> None:
|
|
664
|
+
"""
|
|
665
|
+
Log asset processing results.
|
|
666
|
+
|
|
667
|
+
:param int assets_processed: Number of assets processed
|
|
668
|
+
:return: None
|
|
669
|
+
:rtype: None
|
|
670
|
+
"""
|
|
671
|
+
results = getattr(self, "_results", {}).get("assets", {})
|
|
672
|
+
created = results.get("created_count", 0)
|
|
673
|
+
updated = results.get("updated_count", 0)
|
|
674
|
+
deleted = results.get("deleted_count", 0) if isinstance(results, dict) else 0
|
|
675
|
+
|
|
676
|
+
if deleted > 0:
|
|
677
|
+
logger.info(
|
|
678
|
+
f"Assets processed: {assets_processed} (created: {created}, updated: {updated}, deleted: {deleted})"
|
|
679
|
+
)
|
|
680
|
+
elif created > 0 or updated > 0:
|
|
681
|
+
logger.info(f"Assets processed: {assets_processed} (created: {created}, updated: {updated})")
|
|
682
|
+
else:
|
|
683
|
+
logger.debug(f"Assets processed: {assets_processed} (no changes made)")
|
|
684
|
+
|
|
685
|
+
def _sync_control_assessments(self) -> None:
|
|
686
|
+
"""
|
|
687
|
+
Process control assessments if enabled.
|
|
688
|
+
|
|
689
|
+
:return: None
|
|
690
|
+
:rtype: None
|
|
691
|
+
"""
|
|
692
|
+
if self.update_control_status:
|
|
693
|
+
self._process_control_assessments()
|
|
694
|
+
|
|
695
|
+
def _sync_issues(self) -> None:
|
|
696
|
+
"""
|
|
697
|
+
Process and sync issues from failed compliance items.
|
|
698
|
+
|
|
699
|
+
:return: None
|
|
700
|
+
:rtype: None
|
|
701
|
+
"""
|
|
702
|
+
if not self.create_issues:
|
|
703
|
+
return
|
|
704
|
+
|
|
705
|
+
findings = list(self.fetch_findings())
|
|
706
|
+
if not findings:
|
|
707
|
+
logger.debug("No findings to process into issues")
|
|
708
|
+
return
|
|
709
|
+
|
|
710
|
+
issues_created, issues_skipped = self._process_findings_to_issues(findings)
|
|
711
|
+
self._log_issue_results(issues_created, issues_skipped)
|
|
712
|
+
|
|
713
|
+
def _process_findings_to_issues(self, findings: List[IntegrationFinding]) -> tuple[int, int]:
|
|
714
|
+
"""
|
|
715
|
+
Process findings into issues and return counts.
|
|
716
|
+
|
|
717
|
+
:param findings: List of findings to process
|
|
718
|
+
:return: Tuple of (issues_created, issues_skipped)
|
|
719
|
+
"""
|
|
720
|
+
issues_created = 0
|
|
721
|
+
issues_skipped = 0
|
|
722
|
+
|
|
723
|
+
for finding in findings:
|
|
724
|
+
try:
|
|
725
|
+
if self._process_single_finding(finding):
|
|
726
|
+
issues_created += 1
|
|
727
|
+
else:
|
|
728
|
+
issues_skipped += 1
|
|
729
|
+
except Exception as e:
|
|
730
|
+
logger.error(f"Error processing finding: {e}")
|
|
731
|
+
issues_skipped += 1
|
|
732
|
+
|
|
733
|
+
return issues_created, issues_skipped
|
|
734
|
+
|
|
735
|
+
def _process_single_finding(self, finding: IntegrationFinding) -> bool:
|
|
736
|
+
"""
|
|
737
|
+
Process a single finding into an issue.
|
|
738
|
+
|
|
739
|
+
:param finding: Finding to process
|
|
740
|
+
:return: True if issue was created/updated, False if skipped
|
|
741
|
+
"""
|
|
742
|
+
asset = self._get_or_create_asset_for_finding(finding)
|
|
743
|
+
if not asset:
|
|
744
|
+
self._log_asset_not_found_error(finding)
|
|
745
|
+
return False
|
|
746
|
+
|
|
747
|
+
issue_title = self.get_issue_title(finding)
|
|
748
|
+
issue = self.create_or_update_issue_from_finding(title=issue_title, finding=finding)
|
|
749
|
+
return issue is not None
|
|
750
|
+
|
|
751
|
+
def _get_or_create_asset_for_finding(self, finding: IntegrationFinding) -> Optional[regscale_models.Asset]:
|
|
752
|
+
"""
|
|
753
|
+
Get existing asset or create one on-demand for the finding.
|
|
754
|
+
|
|
755
|
+
:param IntegrationFinding finding: Finding needing an asset
|
|
756
|
+
:return: Asset if found/created, None otherwise
|
|
757
|
+
:rtype: Optional[regscale_models.Asset]
|
|
758
|
+
"""
|
|
759
|
+
asset = self.get_asset_by_identifier(finding.asset_identifier)
|
|
760
|
+
if not asset:
|
|
761
|
+
asset = self._ensure_asset_for_finding(finding)
|
|
762
|
+
return asset
|
|
763
|
+
|
|
764
|
+
def _log_asset_not_found_error(self, finding: IntegrationFinding) -> None:
|
|
765
|
+
"""
|
|
766
|
+
Log error when asset is not found for a finding.
|
|
767
|
+
|
|
768
|
+
:param IntegrationFinding finding: Finding with missing asset
|
|
769
|
+
:return: None
|
|
770
|
+
:rtype: None
|
|
771
|
+
"""
|
|
772
|
+
if not getattr(self, "suppress_asset_not_found_errors", False):
|
|
773
|
+
logger.error(
|
|
774
|
+
f"Asset not found for identifier {finding.asset_identifier} — "
|
|
775
|
+
"skipping issue creation for this finding"
|
|
776
|
+
)
|
|
777
|
+
|
|
778
|
+
def _log_issue_results(self, issues_created: int, issues_skipped: int) -> None:
|
|
779
|
+
"""
|
|
780
|
+
Log issue processing results.
|
|
781
|
+
|
|
782
|
+
:param int issues_created: Number of issues created/updated
|
|
783
|
+
:param int issues_skipped: Number of issues skipped
|
|
784
|
+
:return: None
|
|
785
|
+
:rtype: None
|
|
786
|
+
"""
|
|
787
|
+
if issues_created > 0:
|
|
788
|
+
logger.info(f"Issues processed: {issues_created} created/updated, {issues_skipped} skipped")
|
|
789
|
+
elif issues_skipped > 0:
|
|
790
|
+
logger.warning(f"Issues processed: 0 created, {issues_skipped} skipped (assets not found)")
|
|
791
|
+
else:
|
|
792
|
+
logger.debug("No issues processed")
|
|
793
|
+
|
|
794
|
+
def _finalize_scan_history(self, scan_history: regscale_models.ScanHistory) -> None:
|
|
795
|
+
"""
|
|
796
|
+
Finalize scan history with error handling.
|
|
797
|
+
|
|
798
|
+
:param regscale_models.ScanHistory scan_history: Scan history to update
|
|
799
|
+
:return: None
|
|
800
|
+
:rtype: None
|
|
801
|
+
"""
|
|
802
|
+
try:
|
|
803
|
+
if getattr(self, "enable_scan_history", True):
|
|
804
|
+
self._update_scan_history(scan_history)
|
|
805
|
+
except Exception:
|
|
806
|
+
self._update_scan_history(scan_history)
|
|
807
|
+
|
|
657
808
|
def _ensure_asset_for_finding(self, finding: IntegrationFinding) -> Optional[regscale_models.Asset]:
|
|
658
809
|
"""
|
|
659
810
|
Ensure an asset exists for the given finding.
|
|
@@ -733,14 +884,15 @@ class ComplianceIntegration(ScannerIntegration, ABC):
|
|
|
733
884
|
)
|
|
734
885
|
assessments_created += created
|
|
735
886
|
|
|
736
|
-
|
|
887
|
+
if assessments_created > 0:
|
|
888
|
+
logger.info(f"Successfully created {assessments_created} control assessments")
|
|
737
889
|
passing_assessments = len([cid for cid in all_control_ids if cid not in self.failing_controls])
|
|
738
890
|
failing_assessments = len([cid for cid in all_control_ids if cid in self.failing_controls])
|
|
739
891
|
logger.info(f"Assessment breakdown: {passing_assessments} passing, {failing_assessments} failing")
|
|
740
|
-
logger.
|
|
892
|
+
logger.debug(f"Control implementation mappings created: {len(self._impl_id_by_control)}")
|
|
741
893
|
if self._impl_id_by_control:
|
|
742
894
|
logger.debug(f"Sample mappings: {dict(list(self._impl_id_by_control.items())[:5])}")
|
|
743
|
-
logger.
|
|
895
|
+
logger.debug(f"Today's assessments by implementation: {len(self._assessment_by_impl_today)}")
|
|
744
896
|
if self._assessment_by_impl_today:
|
|
745
897
|
logger.debug(f"Sample assessment mappings: {dict(list(self._assessment_by_impl_today.items())[:5])}")
|
|
746
898
|
|
|
@@ -1063,7 +1063,7 @@ def _update_existing_control(
|
|
|
1063
1063
|
existing_control,
|
|
1064
1064
|
control_dict.get("status"),
|
|
1065
1065
|
new_statement if new_statement else control_dict.get("statement"),
|
|
1066
|
-
control_dict.get("
|
|
1066
|
+
control_dict.get("origination"),
|
|
1067
1067
|
primary_role if primary_role and isinstance(primary_role, dict) and primary_role.get("id") else None,
|
|
1068
1068
|
parent_id,
|
|
1069
1069
|
)
|
|
@@ -1087,14 +1087,93 @@ def _update_existing_control(
|
|
|
1087
1087
|
|
|
1088
1088
|
def add_roles_to_control_implementation(implementation: ControlImplementation, roles: List[Dict]):
|
|
1089
1089
|
"""
|
|
1090
|
-
|
|
1090
|
+
Updates roles for a control implementation by checking existing roles and adding/removing as appropriate.
|
|
1091
|
+
This prevents duplicate roles on successive imports.
|
|
1092
|
+
:param ControlImplementation implementation: The control implementation.
|
|
1093
|
+
:param List[Dict] roles: The list of roles to set.
|
|
1094
|
+
"""
|
|
1095
|
+
if not implementation or not implementation.id:
|
|
1096
|
+
logger.warning("Control implementation is missing or has no ID, cannot update roles")
|
|
1097
|
+
return
|
|
1098
|
+
|
|
1099
|
+
try:
|
|
1100
|
+
# Get existing roles for this control implementation
|
|
1101
|
+
from regscale.models.regscale_models.implementation_role import ImplementationRole
|
|
1102
|
+
|
|
1103
|
+
# Get existing roles
|
|
1104
|
+
existing_roles = ImplementationRole.get_all_by_parent(
|
|
1105
|
+
parent_id=implementation.id, parent_module=implementation._module_string
|
|
1106
|
+
)
|
|
1107
|
+
existing_role_ids = {role.roleId for role in existing_roles if role and role.roleId}
|
|
1108
|
+
|
|
1109
|
+
# Get target role IDs from the new roles list
|
|
1110
|
+
target_role_ids = {role.get("id") for role in roles if isinstance(role, dict) and role.get("id")}
|
|
1111
|
+
|
|
1112
|
+
# Find roles to add (in target but not in existing)
|
|
1113
|
+
roles_to_add = target_role_ids - existing_role_ids
|
|
1114
|
+
|
|
1115
|
+
# Find roles to remove (in existing but not in target)
|
|
1116
|
+
roles_to_remove = existing_role_ids - target_role_ids
|
|
1117
|
+
|
|
1118
|
+
# Add new roles
|
|
1119
|
+
for role_id in roles_to_add:
|
|
1120
|
+
try:
|
|
1121
|
+
implementation.add_role(role_id)
|
|
1122
|
+
logger.debug(f"Added role {role_id} to control implementation {implementation.id}")
|
|
1123
|
+
except Exception as e:
|
|
1124
|
+
logger.warning(f"Failed to add role {role_id} to control implementation {implementation.id}: {e}")
|
|
1125
|
+
|
|
1126
|
+
# Remove roles that are no longer needed
|
|
1127
|
+
_remove_roles_from_control_implementation(implementation, roles_to_remove, existing_roles)
|
|
1128
|
+
|
|
1129
|
+
if roles_to_add or roles_to_remove:
|
|
1130
|
+
logger.info(
|
|
1131
|
+
f"Updated roles for control implementation {implementation.id}: added {len(roles_to_add)}, removed {len(roles_to_remove)}"
|
|
1132
|
+
)
|
|
1133
|
+
else:
|
|
1134
|
+
logger.debug(f"No role changes needed for control implementation {implementation.id}")
|
|
1135
|
+
|
|
1136
|
+
except Exception as e:
|
|
1137
|
+
logger.error(f"Error updating roles for control implementation {implementation.id}: {e}")
|
|
1138
|
+
# Fallback to old behavior if there's an error
|
|
1139
|
+
_fallback_add_roles_to_control_implementation(implementation, roles)
|
|
1140
|
+
|
|
1141
|
+
|
|
1142
|
+
def _remove_roles_from_control_implementation(
|
|
1143
|
+
implementation: ControlImplementation, roles_to_remove: set, existing_roles: List
|
|
1144
|
+
):
|
|
1145
|
+
"""
|
|
1146
|
+
Removes roles that are no longer needed from a control implementation.
|
|
1147
|
+
:param ControlImplementation implementation: The control implementation to remove roles from.
|
|
1148
|
+
:param set roles_to_remove: Set of role IDs that should be removed.
|
|
1149
|
+
:param List existing_roles: List of existing ImplementationRole objects.
|
|
1150
|
+
"""
|
|
1151
|
+
for role_id in roles_to_remove:
|
|
1152
|
+
try:
|
|
1153
|
+
# Find the ImplementationRole record to delete
|
|
1154
|
+
for existing_role in existing_roles:
|
|
1155
|
+
if existing_role.roleId == role_id:
|
|
1156
|
+
existing_role.delete()
|
|
1157
|
+
logger.debug(f"Removed role {role_id} from control implementation {implementation.id}")
|
|
1158
|
+
break
|
|
1159
|
+
except Exception as e:
|
|
1160
|
+
logger.warning(f"Failed to remove role {role_id} from control implementation {implementation.id}: {e}")
|
|
1161
|
+
|
|
1162
|
+
|
|
1163
|
+
def _fallback_add_roles_to_control_implementation(implementation: ControlImplementation, roles: List[Dict]):
|
|
1164
|
+
"""
|
|
1165
|
+
Fallback method for adding roles to a control implementation when the main method fails.
|
|
1166
|
+
This uses the old behavior of simply adding roles without checking for duplicates.
|
|
1091
1167
|
:param ControlImplementation implementation: The control implementation.
|
|
1092
|
-
:param List[Dict] roles: The list of roles.
|
|
1168
|
+
:param List[Dict] roles: The list of roles to add.
|
|
1093
1169
|
"""
|
|
1094
|
-
if roles and len(roles) > 0
|
|
1170
|
+
if roles and len(roles) > 0:
|
|
1095
1171
|
for role in roles:
|
|
1096
1172
|
if isinstance(role, dict) and role.get("id"):
|
|
1097
|
-
|
|
1173
|
+
try:
|
|
1174
|
+
implementation.add_role(role.get("id"))
|
|
1175
|
+
except Exception as add_error:
|
|
1176
|
+
logger.warning(f"Failed to add role {role.get('id')}: {add_error}")
|
|
1098
1177
|
|
|
1099
1178
|
|
|
1100
1179
|
def get_primary_and_supporting_roles(roles: List, parent_id: int) -> Tuple[List, Dict]:
|
|
@@ -1446,7 +1525,9 @@ def handle_matching_objectives(
|
|
|
1446
1525
|
part_statement = f"{part.get('value', '')}" if not new_statement else new_statement
|
|
1447
1526
|
statements_used.append(part_statement)
|
|
1448
1527
|
|
|
1449
|
-
has_existing_obj = check_for_existing_objective(
|
|
1528
|
+
has_existing_obj = check_for_existing_objective(
|
|
1529
|
+
control_implementation, objective, status, part_statement, origination
|
|
1530
|
+
)
|
|
1450
1531
|
if has_existing_obj:
|
|
1451
1532
|
continue
|
|
1452
1533
|
duplicate = True if part_statement in statements_used else False
|
|
@@ -1467,6 +1548,7 @@ def check_for_existing_objective(
|
|
|
1467
1548
|
objective: ControlObjective,
|
|
1468
1549
|
status: Optional[str],
|
|
1469
1550
|
part_statement: str,
|
|
1551
|
+
origination: Optional[str] = None,
|
|
1470
1552
|
) -> bool:
|
|
1471
1553
|
"""
|
|
1472
1554
|
Check for existing implementation objectives.
|
|
@@ -1474,6 +1556,7 @@ def check_for_existing_objective(
|
|
|
1474
1556
|
:param ControlObjective objective: The control objective object.
|
|
1475
1557
|
:param Optional[str] status: The status of the implementation.
|
|
1476
1558
|
:param str part_statement: The part statement.
|
|
1559
|
+
:param Optional[str] origination: The origination of the implementation.
|
|
1477
1560
|
:return: True if an existing implementation objective is found, False otherwise.
|
|
1478
1561
|
:rtype: bool
|
|
1479
1562
|
"""
|
|
@@ -1495,6 +1578,8 @@ def check_for_existing_objective(
|
|
|
1495
1578
|
status = status.value
|
|
1496
1579
|
existing_obj.status = status_map.get(status, status)
|
|
1497
1580
|
existing_obj.statement = part_statement
|
|
1581
|
+
if origination is not None:
|
|
1582
|
+
existing_obj.responsibility = origination
|
|
1498
1583
|
existing_obj.parentObjectiveId = objective.id
|
|
1499
1584
|
existing_obj.save()
|
|
1500
1585
|
return True
|
|
@@ -1755,7 +1840,7 @@ def update_existing_control(
|
|
|
1755
1840
|
if not control.stepsToImplement and steps_to_implement:
|
|
1756
1841
|
control.stepsToImplement = steps_to_implement
|
|
1757
1842
|
control.implementation = state_text
|
|
1758
|
-
control.responsibility = responsibility
|
|
1843
|
+
control.responsibility = map_responsibility(responsibility)
|
|
1759
1844
|
control.systemRoleId = primary_role.get("id") if primary_role and isinstance(primary_role, dict) else None
|
|
1760
1845
|
# Clean statement
|
|
1761
1846
|
# So, exclusion
|
|
@@ -286,6 +286,8 @@ class IntegrationAsset:
|
|
|
286
286
|
|
|
287
287
|
source_data: Optional[Dict[str, Any]] = None
|
|
288
288
|
url: Optional[str] = None
|
|
289
|
+
software_function: Optional[str] = None
|
|
290
|
+
baseline_configuration: Optional[str] = None
|
|
289
291
|
ports_and_protocols: List[Dict[str, Any]] = dataclasses.field(default_factory=list)
|
|
290
292
|
software_inventory: List[Dict[str, Any]] = dataclasses.field(default_factory=list)
|
|
291
293
|
|
|
@@ -572,6 +574,11 @@ class ScannerIntegration(ABC):
|
|
|
572
574
|
|
|
573
575
|
:param int plan_id: The ID of the security plan
|
|
574
576
|
:param int tenant_id: The ID of the tenant, defaults to 1
|
|
577
|
+
|
|
578
|
+
Configuration options available via kwargs:
|
|
579
|
+
- suppress_asset_not_found_errors (bool): When True, suppresses "Asset not found" error messages
|
|
580
|
+
that are commonly logged when assets referenced in findings don't exist in RegScale.
|
|
581
|
+
This can help reduce log noise in environments with many missing assets.
|
|
575
582
|
"""
|
|
576
583
|
|
|
577
584
|
stig_mapper = None
|
|
@@ -627,13 +634,18 @@ class ScannerIntegration(ABC):
|
|
|
627
634
|
close_outdated_findings = True
|
|
628
635
|
closed_count = 0
|
|
629
636
|
|
|
637
|
+
# Error suppression options
|
|
638
|
+
suppress_asset_not_found_errors = False
|
|
639
|
+
|
|
630
640
|
def __init__(self, plan_id: int, tenant_id: int = 1, is_component: bool = False, **kwargs):
|
|
631
641
|
"""
|
|
632
642
|
Initialize the ScannerIntegration.
|
|
633
643
|
|
|
634
644
|
:param int plan_id: The ID of the security plan
|
|
635
645
|
:param int tenant_id: The ID of the tenant, defaults to 1
|
|
646
|
+
:param bool is_component: Whether this is a component integration
|
|
636
647
|
:param kwargs: Additional keyword arguments
|
|
648
|
+
- suppress_asset_not_found_errors (bool): If True, suppress "Asset not found" error messages
|
|
637
649
|
"""
|
|
638
650
|
self.app = Application()
|
|
639
651
|
self.alerted_assets: Set[str] = set()
|
|
@@ -642,6 +654,9 @@ class ScannerIntegration(ABC):
|
|
|
642
654
|
self.plan_id: int = plan_id
|
|
643
655
|
self.tenant_id: int = tenant_id
|
|
644
656
|
self.is_component: bool = is_component
|
|
657
|
+
|
|
658
|
+
# Set configuration options from kwargs
|
|
659
|
+
self.suppress_asset_not_found_errors = kwargs.get("suppress_asset_not_found_errors", False)
|
|
645
660
|
if self.is_component:
|
|
646
661
|
self.component = regscale_models.Component.get_object(self.plan_id)
|
|
647
662
|
self.parent_module: str = regscale_models.Component.get_module_string()
|
|
@@ -1138,6 +1153,8 @@ class ScannerIntegration(ABC):
|
|
|
1138
1153
|
bAuthenticatedScan=asset.is_authenticated_scan,
|
|
1139
1154
|
systemAdministratorId=asset.system_administrator_id,
|
|
1140
1155
|
scanningTool=asset.scanning_tool,
|
|
1156
|
+
softwareFunction=asset.software_function,
|
|
1157
|
+
baselineConfiguration=asset.baseline_configuration,
|
|
1141
1158
|
)
|
|
1142
1159
|
if self.asset_identifier_field:
|
|
1143
1160
|
setattr(new_asset, self.asset_identifier_field, asset.identifier)
|
|
@@ -2081,7 +2098,8 @@ class ScannerIntegration(ABC):
|
|
|
2081
2098
|
asset = self.asset_map_by_identifier.get(identifier)
|
|
2082
2099
|
if not asset and identifier not in self.alerted_assets:
|
|
2083
2100
|
self.alerted_assets.add(identifier)
|
|
2084
|
-
|
|
2101
|
+
if not getattr(self, "suppress_asset_not_found_errors", False):
|
|
2102
|
+
self.log_error("1. Asset not found for identifier %s", identifier)
|
|
2085
2103
|
return asset
|
|
2086
2104
|
|
|
2087
2105
|
def get_issue_by_integration_finding_id(self, integration_finding_id: str) -> Optional[regscale_models.Issue]:
|
|
@@ -2106,7 +2124,8 @@ class ScannerIntegration(ABC):
|
|
|
2106
2124
|
"""
|
|
2107
2125
|
logger.debug("Processing checklist %s", finding.external_id)
|
|
2108
2126
|
if not (asset := self.get_asset_by_identifier(finding.asset_identifier)):
|
|
2109
|
-
|
|
2127
|
+
if not getattr(self, "suppress_asset_not_found_errors", False):
|
|
2128
|
+
logger.error("2. Asset not found for identifier %s", finding.asset_identifier)
|
|
2110
2129
|
return 0
|
|
2111
2130
|
|
|
2112
2131
|
tool = regscale_models.ChecklistTool.STIGs
|
|
@@ -2333,7 +2352,8 @@ class ScannerIntegration(ABC):
|
|
|
2333
2352
|
# Process checklist if applicable
|
|
2334
2353
|
if self.type == ScannerIntegrationType.CHECKLIST:
|
|
2335
2354
|
if not (asset := self.get_asset_by_identifier(finding.asset_identifier)):
|
|
2336
|
-
|
|
2355
|
+
if not getattr(self, "suppress_asset_not_found_errors", False):
|
|
2356
|
+
logger.error("2. Asset not found for identifier %s", finding.asset_identifier)
|
|
2337
2357
|
return
|
|
2338
2358
|
|
|
2339
2359
|
tool = regscale_models.ChecklistTool.STIGs
|
|
@@ -2471,7 +2491,10 @@ class ScannerIntegration(ABC):
|
|
|
2471
2491
|
return None
|
|
2472
2492
|
|
|
2473
2493
|
if not asset:
|
|
2474
|
-
|
|
2494
|
+
if not getattr(self, "suppress_asset_not_found_errors", False):
|
|
2495
|
+
logger.warning(
|
|
2496
|
+
"VulnerabilityMapping Error: Asset not found for identifier %s", finding.asset_identifier
|
|
2497
|
+
)
|
|
2475
2498
|
return None
|
|
2476
2499
|
|
|
2477
2500
|
vulnerability = self.create_vulnerability_from_finding(finding, asset, scan_history)
|
regscale/models/__init__.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
#!/usr/bin/env python3
|
|
2
2
|
# -*- coding: utf-8 -*-
|
|
3
3
|
"""standard python imports"""
|
|
4
|
-
from regscale.models.app_models.click import regscale_id, regscale_module, regscale_ssp_id
|
|
4
|
+
from regscale.models.app_models.click import regscale_id, regscale_module, regscale_ssp_id, ssp_or_component_id
|
|
5
5
|
from .app_models import *
|
|
6
6
|
from .integration_models import *
|
|
7
7
|
from .regscale_models import *
|