regscale-cli 6.21.2.0__py3-none-any.whl → 6.28.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- regscale/_version.py +1 -1
- regscale/airflow/hierarchy.py +2 -2
- regscale/core/app/api.py +5 -2
- regscale/core/app/application.py +36 -6
- regscale/core/app/internal/control_editor.py +73 -21
- regscale/core/app/internal/evidence.py +727 -204
- regscale/core/app/internal/login.py +4 -2
- regscale/core/app/internal/model_editor.py +219 -64
- regscale/core/app/utils/app_utils.py +86 -12
- regscale/core/app/utils/catalog_utils/common.py +1 -1
- regscale/core/login.py +21 -4
- regscale/core/utils/async_graphql_client.py +363 -0
- regscale/core/utils/date.py +77 -1
- regscale/dev/cli.py +26 -0
- regscale/dev/code_gen.py +109 -24
- regscale/dev/version.py +72 -0
- regscale/integrations/commercial/__init__.py +30 -2
- regscale/integrations/commercial/aws/audit_manager_compliance.py +3908 -0
- regscale/integrations/commercial/aws/cli.py +3107 -54
- regscale/integrations/commercial/aws/cloudtrail_control_mappings.py +333 -0
- regscale/integrations/commercial/aws/cloudtrail_evidence.py +501 -0
- regscale/integrations/commercial/aws/cloudwatch_control_mappings.py +357 -0
- regscale/integrations/commercial/aws/cloudwatch_evidence.py +490 -0
- regscale/integrations/commercial/{amazon → aws}/common.py +71 -19
- regscale/integrations/commercial/aws/config_compliance.py +914 -0
- regscale/integrations/commercial/aws/conformance_pack_mappings.py +198 -0
- regscale/integrations/commercial/aws/control_compliance_analyzer.py +439 -0
- regscale/integrations/commercial/aws/evidence_generator.py +283 -0
- regscale/integrations/commercial/aws/guardduty_control_mappings.py +340 -0
- regscale/integrations/commercial/aws/guardduty_evidence.py +1053 -0
- regscale/integrations/commercial/aws/iam_control_mappings.py +368 -0
- regscale/integrations/commercial/aws/iam_evidence.py +574 -0
- regscale/integrations/commercial/aws/inventory/__init__.py +338 -22
- regscale/integrations/commercial/aws/inventory/base.py +107 -5
- regscale/integrations/commercial/aws/inventory/resources/analytics.py +390 -0
- regscale/integrations/commercial/aws/inventory/resources/applications.py +234 -0
- regscale/integrations/commercial/aws/inventory/resources/audit_manager.py +513 -0
- regscale/integrations/commercial/aws/inventory/resources/cloudtrail.py +315 -0
- regscale/integrations/commercial/aws/inventory/resources/cloudtrail_logs_metadata.py +476 -0
- regscale/integrations/commercial/aws/inventory/resources/cloudwatch.py +191 -0
- regscale/integrations/commercial/aws/inventory/resources/compute.py +328 -9
- regscale/integrations/commercial/aws/inventory/resources/config.py +464 -0
- regscale/integrations/commercial/aws/inventory/resources/containers.py +74 -9
- regscale/integrations/commercial/aws/inventory/resources/database.py +481 -31
- regscale/integrations/commercial/aws/inventory/resources/developer_tools.py +253 -0
- regscale/integrations/commercial/aws/inventory/resources/guardduty.py +286 -0
- regscale/integrations/commercial/aws/inventory/resources/iam.py +470 -0
- regscale/integrations/commercial/aws/inventory/resources/inspector.py +476 -0
- regscale/integrations/commercial/aws/inventory/resources/integration.py +175 -61
- regscale/integrations/commercial/aws/inventory/resources/kms.py +447 -0
- regscale/integrations/commercial/aws/inventory/resources/machine_learning.py +358 -0
- regscale/integrations/commercial/aws/inventory/resources/networking.py +390 -67
- regscale/integrations/commercial/aws/inventory/resources/s3.py +394 -0
- regscale/integrations/commercial/aws/inventory/resources/security.py +268 -72
- regscale/integrations/commercial/aws/inventory/resources/securityhub.py +473 -0
- regscale/integrations/commercial/aws/inventory/resources/storage.py +288 -29
- regscale/integrations/commercial/aws/inventory/resources/systems_manager.py +657 -0
- regscale/integrations/commercial/aws/inventory/resources/vpc.py +655 -0
- regscale/integrations/commercial/aws/kms_control_mappings.py +288 -0
- regscale/integrations/commercial/aws/kms_evidence.py +879 -0
- regscale/integrations/commercial/aws/ocsf/__init__.py +7 -0
- regscale/integrations/commercial/aws/ocsf/constants.py +115 -0
- regscale/integrations/commercial/aws/ocsf/mapper.py +435 -0
- regscale/integrations/commercial/aws/org_control_mappings.py +286 -0
- regscale/integrations/commercial/aws/org_evidence.py +666 -0
- regscale/integrations/commercial/aws/s3_control_mappings.py +356 -0
- regscale/integrations/commercial/aws/s3_evidence.py +632 -0
- regscale/integrations/commercial/aws/scanner.py +1072 -205
- regscale/integrations/commercial/aws/security_hub.py +319 -0
- regscale/integrations/commercial/aws/session_manager.py +282 -0
- regscale/integrations/commercial/aws/ssm_control_mappings.py +291 -0
- regscale/integrations/commercial/aws/ssm_evidence.py +492 -0
- regscale/integrations/commercial/jira.py +489 -153
- regscale/integrations/commercial/microsoft_defender/defender.py +326 -5
- regscale/integrations/commercial/microsoft_defender/defender_api.py +348 -14
- regscale/integrations/commercial/microsoft_defender/defender_constants.py +157 -0
- regscale/integrations/commercial/qualys/__init__.py +167 -68
- regscale/integrations/commercial/qualys/scanner.py +305 -39
- regscale/integrations/commercial/sarif/sairf_importer.py +432 -0
- regscale/integrations/commercial/sarif/sarif_converter.py +67 -0
- regscale/integrations/commercial/sicura/api.py +79 -42
- regscale/integrations/commercial/sicura/commands.py +8 -2
- regscale/integrations/commercial/sicura/scanner.py +83 -44
- regscale/integrations/commercial/stigv2/ckl_parser.py +5 -5
- regscale/integrations/commercial/synqly/assets.py +133 -16
- regscale/integrations/commercial/synqly/edr.py +2 -8
- regscale/integrations/commercial/synqly/query_builder.py +536 -0
- regscale/integrations/commercial/synqly/ticketing.py +27 -0
- regscale/integrations/commercial/synqly/vulnerabilities.py +165 -28
- regscale/integrations/commercial/tenablev2/cis_parsers.py +453 -0
- regscale/integrations/commercial/tenablev2/cis_scanner.py +447 -0
- regscale/integrations/commercial/tenablev2/commands.py +146 -5
- regscale/integrations/commercial/tenablev2/scanner.py +1 -3
- regscale/integrations/commercial/tenablev2/stig_parsers.py +113 -57
- regscale/integrations/commercial/wizv2/WizDataMixin.py +1 -1
- regscale/integrations/commercial/wizv2/click.py +191 -76
- regscale/integrations/commercial/wizv2/compliance/__init__.py +15 -0
- regscale/integrations/commercial/wizv2/{policy_compliance_helpers.py → compliance/helpers.py} +78 -60
- regscale/integrations/commercial/wizv2/compliance_report.py +1592 -0
- regscale/integrations/commercial/wizv2/core/__init__.py +133 -0
- regscale/integrations/commercial/wizv2/{async_client.py → core/client.py} +7 -3
- regscale/integrations/commercial/wizv2/{constants.py → core/constants.py} +92 -89
- regscale/integrations/commercial/wizv2/core/file_operations.py +237 -0
- regscale/integrations/commercial/wizv2/fetchers/__init__.py +11 -0
- regscale/integrations/commercial/wizv2/{data_fetcher.py → fetchers/policy_assessment.py} +66 -9
- regscale/integrations/commercial/wizv2/file_cleanup.py +104 -0
- regscale/integrations/commercial/wizv2/issue.py +776 -28
- regscale/integrations/commercial/wizv2/models/__init__.py +0 -0
- regscale/integrations/commercial/wizv2/parsers/__init__.py +34 -0
- regscale/integrations/commercial/wizv2/{parsers.py → parsers/main.py} +1 -1
- regscale/integrations/commercial/wizv2/processors/__init__.py +11 -0
- regscale/integrations/commercial/wizv2/{finding_processor.py → processors/finding.py} +1 -1
- regscale/integrations/commercial/wizv2/reports.py +243 -0
- regscale/integrations/commercial/wizv2/sbom.py +1 -1
- regscale/integrations/commercial/wizv2/scanner.py +1031 -441
- regscale/integrations/commercial/wizv2/utils/__init__.py +48 -0
- regscale/integrations/commercial/wizv2/{utils.py → utils/main.py} +116 -61
- regscale/integrations/commercial/wizv2/variables.py +89 -3
- regscale/integrations/compliance_integration.py +1036 -151
- regscale/integrations/control_matcher.py +432 -0
- regscale/integrations/due_date_handler.py +333 -0
- regscale/integrations/milestone_manager.py +291 -0
- regscale/integrations/public/__init__.py +14 -0
- regscale/integrations/public/cci_importer.py +834 -0
- regscale/integrations/public/csam/__init__.py +0 -0
- regscale/integrations/public/csam/csam.py +938 -0
- regscale/integrations/public/csam/csam_agency_defined.py +179 -0
- regscale/integrations/public/csam/csam_common.py +154 -0
- regscale/integrations/public/csam/csam_controls.py +432 -0
- regscale/integrations/public/csam/csam_poam.py +124 -0
- regscale/integrations/public/fedramp/click.py +77 -6
- regscale/integrations/public/fedramp/docx_parser.py +10 -1
- regscale/integrations/public/fedramp/fedramp_cis_crm.py +675 -289
- regscale/integrations/public/fedramp/fedramp_five.py +1 -1
- regscale/integrations/public/fedramp/poam/scanner.py +75 -7
- regscale/integrations/public/fedramp/poam_export_v5.py +888 -0
- regscale/integrations/scanner_integration.py +1961 -430
- regscale/models/integration_models/CCI_List.xml +1 -0
- regscale/models/integration_models/aqua.py +2 -2
- regscale/models/integration_models/cisa_kev_data.json +805 -11
- regscale/models/integration_models/flat_file_importer/__init__.py +5 -8
- regscale/models/integration_models/nexpose.py +36 -10
- regscale/models/integration_models/qualys.py +3 -4
- regscale/models/integration_models/synqly_models/capabilities.json +1 -1
- regscale/models/integration_models/synqly_models/connectors/vulnerabilities.py +87 -18
- regscale/models/integration_models/synqly_models/filter_parser.py +332 -0
- regscale/models/integration_models/synqly_models/ocsf_mapper.py +124 -25
- regscale/models/integration_models/synqly_models/synqly_model.py +89 -16
- regscale/models/locking.py +12 -8
- regscale/models/platform.py +4 -2
- regscale/models/regscale_models/__init__.py +7 -0
- regscale/models/regscale_models/assessment.py +2 -1
- regscale/models/regscale_models/catalog.py +1 -1
- regscale/models/regscale_models/compliance_settings.py +251 -1
- regscale/models/regscale_models/component.py +1 -0
- regscale/models/regscale_models/control_implementation.py +236 -41
- regscale/models/regscale_models/control_objective.py +74 -5
- regscale/models/regscale_models/file.py +2 -0
- regscale/models/regscale_models/form_field_value.py +5 -3
- regscale/models/regscale_models/inheritance.py +44 -0
- regscale/models/regscale_models/issue.py +301 -102
- regscale/models/regscale_models/milestone.py +33 -14
- regscale/models/regscale_models/organization.py +3 -0
- regscale/models/regscale_models/regscale_model.py +310 -73
- regscale/models/regscale_models/security_plan.py +4 -2
- regscale/models/regscale_models/vulnerability.py +3 -3
- regscale/regscale.py +25 -4
- regscale/templates/__init__.py +0 -0
- regscale/utils/threading/threadhandler.py +20 -15
- regscale/validation/record.py +23 -1
- {regscale_cli-6.21.2.0.dist-info → regscale_cli-6.28.2.1.dist-info}/METADATA +17 -33
- {regscale_cli-6.21.2.0.dist-info → regscale_cli-6.28.2.1.dist-info}/RECORD +310 -111
- tests/core/__init__.py +0 -0
- tests/core/utils/__init__.py +0 -0
- tests/core/utils/test_async_graphql_client.py +472 -0
- tests/fixtures/test_fixture.py +13 -8
- tests/regscale/core/test_login.py +171 -4
- tests/regscale/integrations/commercial/__init__.py +0 -0
- tests/regscale/integrations/commercial/aws/__init__.py +0 -0
- tests/regscale/integrations/commercial/aws/test_audit_manager_compliance.py +1304 -0
- tests/regscale/integrations/commercial/aws/test_audit_manager_evidence_aggregation.py +341 -0
- tests/regscale/integrations/commercial/aws/test_aws_analytics_collector.py +260 -0
- tests/regscale/integrations/commercial/aws/test_aws_applications_collector.py +242 -0
- tests/regscale/integrations/commercial/aws/test_aws_audit_manager_collector.py +1155 -0
- tests/regscale/integrations/commercial/aws/test_aws_cloudtrail_collector.py +534 -0
- tests/regscale/integrations/commercial/aws/test_aws_config_collector.py +400 -0
- tests/regscale/integrations/commercial/aws/test_aws_developer_tools_collector.py +203 -0
- tests/regscale/integrations/commercial/aws/test_aws_guardduty_collector.py +315 -0
- tests/regscale/integrations/commercial/aws/test_aws_iam_collector.py +458 -0
- tests/regscale/integrations/commercial/aws/test_aws_inspector_collector.py +353 -0
- tests/regscale/integrations/commercial/aws/test_aws_inventory_integration.py +530 -0
- tests/regscale/integrations/commercial/aws/test_aws_kms_collector.py +919 -0
- tests/regscale/integrations/commercial/aws/test_aws_machine_learning_collector.py +237 -0
- tests/regscale/integrations/commercial/aws/test_aws_s3_collector.py +722 -0
- tests/regscale/integrations/commercial/aws/test_aws_scanner_integration.py +722 -0
- tests/regscale/integrations/commercial/aws/test_aws_securityhub_collector.py +792 -0
- tests/regscale/integrations/commercial/aws/test_aws_systems_manager_collector.py +918 -0
- tests/regscale/integrations/commercial/aws/test_aws_vpc_collector.py +996 -0
- tests/regscale/integrations/commercial/aws/test_cli_evidence.py +431 -0
- tests/regscale/integrations/commercial/aws/test_cloudtrail_control_mappings.py +452 -0
- tests/regscale/integrations/commercial/aws/test_cloudtrail_evidence.py +788 -0
- tests/regscale/integrations/commercial/aws/test_config_compliance.py +298 -0
- tests/regscale/integrations/commercial/aws/test_conformance_pack_mappings.py +200 -0
- tests/regscale/integrations/commercial/aws/test_control_compliance_analyzer.py +375 -0
- tests/regscale/integrations/commercial/aws/test_datetime_parsing.py +223 -0
- tests/regscale/integrations/commercial/aws/test_evidence_generator.py +386 -0
- tests/regscale/integrations/commercial/aws/test_guardduty_control_mappings.py +564 -0
- tests/regscale/integrations/commercial/aws/test_guardduty_evidence.py +1041 -0
- tests/regscale/integrations/commercial/aws/test_iam_control_mappings.py +718 -0
- tests/regscale/integrations/commercial/aws/test_iam_evidence.py +1375 -0
- tests/regscale/integrations/commercial/aws/test_kms_control_mappings.py +656 -0
- tests/regscale/integrations/commercial/aws/test_kms_evidence.py +1163 -0
- tests/regscale/integrations/commercial/aws/test_ocsf_mapper.py +370 -0
- tests/regscale/integrations/commercial/aws/test_org_control_mappings.py +546 -0
- tests/regscale/integrations/commercial/aws/test_org_evidence.py +1240 -0
- tests/regscale/integrations/commercial/aws/test_s3_control_mappings.py +672 -0
- tests/regscale/integrations/commercial/aws/test_s3_evidence.py +987 -0
- tests/regscale/integrations/commercial/aws/test_scanner_evidence.py +373 -0
- tests/regscale/integrations/commercial/aws/test_security_hub_config_filtering.py +539 -0
- tests/regscale/integrations/commercial/aws/test_session_manager.py +516 -0
- tests/regscale/integrations/commercial/aws/test_ssm_control_mappings.py +588 -0
- tests/regscale/integrations/commercial/aws/test_ssm_evidence.py +735 -0
- tests/regscale/integrations/commercial/conftest.py +28 -0
- tests/regscale/integrations/commercial/microsoft_defender/__init__.py +1 -0
- tests/regscale/integrations/commercial/microsoft_defender/test_defender.py +1517 -0
- tests/regscale/integrations/commercial/microsoft_defender/test_defender_api.py +1748 -0
- tests/regscale/integrations/commercial/microsoft_defender/test_defender_constants.py +327 -0
- tests/regscale/integrations/commercial/microsoft_defender/test_defender_scanner.py +487 -0
- tests/regscale/integrations/commercial/test_aws.py +3742 -0
- tests/regscale/integrations/commercial/test_burp.py +48 -0
- tests/regscale/integrations/commercial/test_crowdstrike.py +49 -0
- tests/regscale/integrations/commercial/test_dependabot.py +341 -0
- tests/regscale/integrations/commercial/test_gcp.py +1543 -0
- tests/regscale/integrations/commercial/test_gitlab.py +549 -0
- tests/regscale/integrations/commercial/test_ip_mac_address_length.py +84 -0
- tests/regscale/integrations/commercial/test_jira.py +2204 -0
- tests/regscale/integrations/commercial/test_npm_audit.py +42 -0
- tests/regscale/integrations/commercial/test_okta.py +1228 -0
- tests/regscale/integrations/commercial/test_sarif_converter.py +251 -0
- tests/regscale/integrations/commercial/test_sicura.py +349 -0
- tests/regscale/integrations/commercial/test_snow.py +423 -0
- tests/regscale/integrations/commercial/test_sonarcloud.py +394 -0
- tests/regscale/integrations/commercial/test_sqlserver.py +186 -0
- tests/regscale/integrations/commercial/test_stig.py +33 -0
- tests/regscale/integrations/commercial/test_stig_mapper.py +153 -0
- tests/regscale/integrations/commercial/test_stigv2.py +406 -0
- tests/regscale/integrations/commercial/test_wiz.py +1365 -0
- tests/regscale/integrations/commercial/test_wiz_inventory.py +256 -0
- tests/regscale/integrations/commercial/wizv2/__init__.py +339 -0
- tests/regscale/integrations/commercial/wizv2/compliance/__init__.py +1 -0
- tests/regscale/integrations/commercial/wizv2/compliance/test_helpers.py +903 -0
- tests/regscale/integrations/commercial/wizv2/core/__init__.py +1 -0
- tests/regscale/integrations/commercial/wizv2/core/test_auth.py +701 -0
- tests/regscale/integrations/commercial/wizv2/core/test_client.py +1037 -0
- tests/regscale/integrations/commercial/wizv2/core/test_file_operations.py +989 -0
- tests/regscale/integrations/commercial/wizv2/fetchers/__init__.py +1 -0
- tests/regscale/integrations/commercial/wizv2/fetchers/test_policy_assessment.py +805 -0
- tests/regscale/integrations/commercial/wizv2/parsers/__init__.py +1 -0
- tests/regscale/integrations/commercial/wizv2/parsers/test_main.py +1153 -0
- tests/regscale/integrations/commercial/wizv2/processors/__init__.py +1 -0
- tests/regscale/integrations/commercial/wizv2/processors/test_finding.py +671 -0
- tests/regscale/integrations/commercial/wizv2/test_WizDataMixin.py +537 -0
- tests/regscale/integrations/commercial/wizv2/test_click_comprehensive.py +851 -0
- tests/regscale/integrations/commercial/wizv2/test_compliance_report_comprehensive.py +910 -0
- tests/regscale/integrations/commercial/wizv2/test_compliance_report_normalization.py +138 -0
- tests/regscale/integrations/commercial/wizv2/test_file_cleanup.py +283 -0
- tests/regscale/integrations/commercial/wizv2/test_file_operations.py +260 -0
- tests/regscale/integrations/commercial/wizv2/test_issue.py +343 -0
- tests/regscale/integrations/commercial/wizv2/test_issue_comprehensive.py +1203 -0
- tests/regscale/integrations/commercial/wizv2/test_reports.py +497 -0
- tests/regscale/integrations/commercial/wizv2/test_sbom.py +643 -0
- tests/regscale/integrations/commercial/wizv2/test_scanner_comprehensive.py +805 -0
- tests/regscale/integrations/commercial/wizv2/test_wiz_click_client_id.py +165 -0
- tests/regscale/integrations/commercial/wizv2/test_wiz_compliance_report.py +1394 -0
- tests/regscale/integrations/commercial/wizv2/test_wiz_compliance_unit.py +341 -0
- tests/regscale/integrations/commercial/wizv2/test_wiz_control_normalization.py +138 -0
- tests/regscale/integrations/commercial/wizv2/test_wiz_findings_comprehensive.py +364 -0
- tests/regscale/integrations/commercial/wizv2/test_wiz_inventory_comprehensive.py +644 -0
- tests/regscale/integrations/commercial/wizv2/test_wiz_status_mapping.py +149 -0
- tests/regscale/integrations/commercial/wizv2/test_wizv2.py +1218 -0
- tests/regscale/integrations/commercial/wizv2/test_wizv2_utils.py +519 -0
- tests/regscale/integrations/commercial/wizv2/utils/__init__.py +1 -0
- tests/regscale/integrations/commercial/wizv2/utils/test_main.py +1523 -0
- tests/regscale/integrations/public/__init__.py +0 -0
- tests/regscale/integrations/public/fedramp/__init__.py +1 -0
- tests/regscale/integrations/public/fedramp/test_gen_asset_list.py +150 -0
- tests/regscale/integrations/public/fedramp/test_poam_export_v5.py +1293 -0
- tests/regscale/integrations/public/test_alienvault.py +220 -0
- tests/regscale/integrations/public/test_cci.py +1053 -0
- tests/regscale/integrations/public/test_cisa.py +1021 -0
- tests/regscale/integrations/public/test_emass.py +518 -0
- tests/regscale/integrations/public/test_fedramp.py +1152 -0
- tests/regscale/integrations/public/test_fedramp_cis_crm.py +3661 -0
- tests/regscale/integrations/public/test_file_uploads.py +506 -0
- tests/regscale/integrations/public/test_oscal.py +453 -0
- tests/regscale/integrations/test_compliance_status_mapping.py +406 -0
- tests/regscale/integrations/test_control_matcher.py +1421 -0
- tests/regscale/integrations/test_control_matching.py +155 -0
- tests/regscale/integrations/test_milestone_manager.py +408 -0
- tests/regscale/models/test_control_implementation.py +118 -3
- tests/regscale/models/test_form_field_value_integration.py +304 -0
- tests/regscale/models/test_issue.py +378 -1
- tests/regscale/models/test_module_integration.py +582 -0
- tests/regscale/models/test_tenable_integrations.py +811 -105
- regscale/integrations/commercial/wizv2/policy_compliance.py +0 -3057
- regscale/integrations/public/fedramp/mappings/fedramp_r4_parts.json +0 -7388
- regscale/integrations/public/fedramp/mappings/fedramp_r5_parts.json +0 -9605
- regscale/integrations/public/fedramp/parts_mapper.py +0 -107
- /regscale/integrations/commercial/{amazon → sarif}/__init__.py +0 -0
- /regscale/integrations/commercial/wizv2/{wiz_auth.py → core/auth.py} +0 -0
- {regscale_cli-6.21.2.0.dist-info → regscale_cli-6.28.2.1.dist-info}/LICENSE +0 -0
- {regscale_cli-6.21.2.0.dist-info → regscale_cli-6.28.2.1.dist-info}/WHEEL +0 -0
- {regscale_cli-6.21.2.0.dist-info → regscale_cli-6.28.2.1.dist-info}/entry_points.txt +0 -0
- {regscale_cli-6.21.2.0.dist-info → regscale_cli-6.28.2.1.dist-info}/top_level.txt +0 -0
|
@@ -22,10 +22,12 @@ from regscale.core.app.application import Application
|
|
|
22
22
|
from regscale.core.app.utils.api_handler import APIHandler
|
|
23
23
|
from regscale.core.app.utils.app_utils import create_progress_object, get_current_datetime
|
|
24
24
|
from regscale.core.app.utils.catalog_utils.common import objective_to_control_dot
|
|
25
|
-
from regscale.core.utils.date import date_obj, date_str, datetime_str
|
|
25
|
+
from regscale.core.utils.date import date_obj, date_str, datetime_str
|
|
26
26
|
from regscale.integrations.commercial.durosuite.process_devices import scan_durosuite_devices
|
|
27
27
|
from regscale.integrations.commercial.durosuite.variables import DuroSuiteVariables
|
|
28
28
|
from regscale.integrations.commercial.stig_mapper_integration.mapping_engine import StigMappingEngine as STIGMapper
|
|
29
|
+
from regscale.integrations.due_date_handler import DueDateHandler
|
|
30
|
+
from regscale.integrations.milestone_manager import MilestoneManager
|
|
29
31
|
from regscale.integrations.public.cisa import pull_cisa_kev
|
|
30
32
|
from regscale.integrations.variables import ScannerVariables
|
|
31
33
|
from regscale.models import DateTimeEncoder, OpenIssueDict, Property, regscale_models
|
|
@@ -47,6 +49,31 @@ def get_thread_workers_max() -> int:
|
|
|
47
49
|
return ScannerVariables.threadMaxWorkers
|
|
48
50
|
|
|
49
51
|
|
|
52
|
+
def _create_config_override(
|
|
53
|
+
config: Optional[Dict[str, Dict]],
|
|
54
|
+
integration_name: str,
|
|
55
|
+
critical: Optional[int],
|
|
56
|
+
high: Optional[int],
|
|
57
|
+
moderate: Optional[int],
|
|
58
|
+
low: Optional[int],
|
|
59
|
+
) -> Dict[str, Dict]:
|
|
60
|
+
"""Create a config override for legacy parameter support."""
|
|
61
|
+
override_config = config.copy() if config else {}
|
|
62
|
+
if "issues" not in override_config:
|
|
63
|
+
override_config["issues"] = {}
|
|
64
|
+
if integration_name not in override_config["issues"]:
|
|
65
|
+
override_config["issues"][integration_name] = {}
|
|
66
|
+
|
|
67
|
+
integration_config = override_config["issues"][integration_name]
|
|
68
|
+
severity_params = {"critical": critical, "high": high, "moderate": moderate, "low": low}
|
|
69
|
+
|
|
70
|
+
for param_name, param_value in severity_params.items():
|
|
71
|
+
if param_value is not None:
|
|
72
|
+
integration_config[param_name] = param_value
|
|
73
|
+
|
|
74
|
+
return override_config
|
|
75
|
+
|
|
76
|
+
|
|
50
77
|
def issue_due_date(
|
|
51
78
|
severity: regscale_models.IssueSeverity,
|
|
52
79
|
created_date: str,
|
|
@@ -60,6 +87,9 @@ def issue_due_date(
|
|
|
60
87
|
"""
|
|
61
88
|
Calculate the due date for an issue based on its severity and creation date.
|
|
62
89
|
|
|
90
|
+
DEPRECATED: This function is kept for backward compatibility. New code should use DueDateHandler directly.
|
|
91
|
+
This function now uses DueDateHandler internally to ensure consistent behavior and proper validation.
|
|
92
|
+
|
|
63
93
|
:param regscale_models.IssueSeverity severity: The severity of the issue.
|
|
64
94
|
:param str created_date: The creation date of the issue.
|
|
65
95
|
:param Optional[int] critical: Days until due for high severity issues.
|
|
@@ -71,40 +101,19 @@ def issue_due_date(
|
|
|
71
101
|
:return: The due date for the issue.
|
|
72
102
|
:rtype: str
|
|
73
103
|
"""
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
if
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
regscale_models.IssueSeverity.Critical: critical,
|
|
88
|
-
regscale_models.IssueSeverity.High: high,
|
|
89
|
-
regscale_models.IssueSeverity.Moderate: moderate,
|
|
90
|
-
regscale_models.IssueSeverity.Low: low,
|
|
91
|
-
}
|
|
92
|
-
|
|
93
|
-
if title and config:
|
|
94
|
-
# if title in a config key, use that key
|
|
95
|
-
issues_dict = config.get("issues", {})
|
|
96
|
-
matching_key = next((key.lower() for key in issues_dict if title.lower() in key.lower()), None)
|
|
97
|
-
if matching_key:
|
|
98
|
-
title_config = issues_dict.get(matching_key, {})
|
|
99
|
-
due_date_map = {
|
|
100
|
-
regscale_models.IssueSeverity.Critical: title_config.get("critical", critical),
|
|
101
|
-
regscale_models.IssueSeverity.High: title_config.get("high", high),
|
|
102
|
-
regscale_models.IssueSeverity.Moderate: title_config.get("moderate", moderate),
|
|
103
|
-
regscale_models.IssueSeverity.Low: title_config.get("low", low),
|
|
104
|
-
}
|
|
105
|
-
|
|
106
|
-
days = due_date_map.get(severity, low)
|
|
107
|
-
return date_str(get_day_increment(start=created_date, days=days))
|
|
104
|
+
integration_name = title or "default"
|
|
105
|
+
|
|
106
|
+
# Check if individual parameters need config override
|
|
107
|
+
if any(param is not None for param in [critical, high, moderate, low]):
|
|
108
|
+
config = _create_config_override(config, integration_name, critical, high, moderate, low)
|
|
109
|
+
|
|
110
|
+
due_date_handler = DueDateHandler(integration_name, config=config)
|
|
111
|
+
return due_date_handler.calculate_due_date(
|
|
112
|
+
severity=severity,
|
|
113
|
+
created_date=created_date,
|
|
114
|
+
cve=None, # Legacy function doesn't have CVE parameter
|
|
115
|
+
title=title,
|
|
116
|
+
)
|
|
108
117
|
|
|
109
118
|
|
|
110
119
|
class ManagedDefaultDict(Generic[K, V]):
|
|
@@ -322,6 +331,8 @@ class IntegrationFinding:
|
|
|
322
331
|
:param str impact: The impact of the finding, defaults to an empty string.
|
|
323
332
|
:param str recommendation_for_mitigation: Recommendations for mitigating the finding, defaults to an empty string.
|
|
324
333
|
:param str asset_identifier: The identifier of the asset associated with the finding, defaults to an empty string.
|
|
334
|
+
:param str issue_asset_identifier_value: This is the value of all the assets affected by the issue, defaults to an
|
|
335
|
+
empty string.
|
|
325
336
|
:param Optional[str] cci_ref: The Common Configuration Enumeration reference for the finding, defaults to None.
|
|
326
337
|
:param str rule_id: The rule ID of the finding, defaults to an empty string.
|
|
327
338
|
:param str rule_version: The version of the rule associated with the finding, defaults to an empty string.
|
|
@@ -418,6 +429,7 @@ class IntegrationFinding:
|
|
|
418
429
|
impact: str = ""
|
|
419
430
|
recommendation_for_mitigation: str = ""
|
|
420
431
|
asset_identifier: str = ""
|
|
432
|
+
issue_asset_identifier_value: Optional[str] = None
|
|
421
433
|
comments: Optional[str] = None
|
|
422
434
|
source_report: Optional[str] = None
|
|
423
435
|
point_of_contact: Optional[str] = None
|
|
@@ -637,6 +649,9 @@ class ScannerIntegration(ABC):
|
|
|
637
649
|
# Error suppression options
|
|
638
650
|
suppress_asset_not_found_errors = False
|
|
639
651
|
|
|
652
|
+
# CCI mapping flag - set to False for integrations that don't use CCI references
|
|
653
|
+
enable_cci_mapping = True
|
|
654
|
+
|
|
640
655
|
def __init__(self, plan_id: int, tenant_id: int = 1, is_component: bool = False, **kwargs):
|
|
641
656
|
"""
|
|
642
657
|
Initialize the ScannerIntegration.
|
|
@@ -646,6 +661,7 @@ class ScannerIntegration(ABC):
|
|
|
646
661
|
:param bool is_component: Whether this is a component integration
|
|
647
662
|
:param kwargs: Additional keyword arguments
|
|
648
663
|
- suppress_asset_not_found_errors (bool): If True, suppress "Asset not found" error messages
|
|
664
|
+
- import_all_findings (bool): If True, import findings even if they are not associated to an asset
|
|
649
665
|
"""
|
|
650
666
|
self.app = Application()
|
|
651
667
|
self.alerted_assets: Set[str] = set()
|
|
@@ -657,6 +673,14 @@ class ScannerIntegration(ABC):
|
|
|
657
673
|
|
|
658
674
|
# Set configuration options from kwargs
|
|
659
675
|
self.suppress_asset_not_found_errors = kwargs.get("suppress_asset_not_found_errors", False)
|
|
676
|
+
self.import_all_findings = kwargs.get("import_all_findings", False)
|
|
677
|
+
|
|
678
|
+
# Initialize due date handler for this integration
|
|
679
|
+
self.due_date_handler = DueDateHandler(self.title, config=self.app.config)
|
|
680
|
+
|
|
681
|
+
# Initialize milestone manager for this integration
|
|
682
|
+
self.milestone_manager = None # Lazy initialization after scan_date is set
|
|
683
|
+
|
|
660
684
|
if self.is_component:
|
|
661
685
|
self.component = regscale_models.Component.get_object(self.plan_id)
|
|
662
686
|
self.parent_module: str = regscale_models.Component.get_module_string()
|
|
@@ -692,8 +716,12 @@ class ScannerIntegration(ABC):
|
|
|
692
716
|
|
|
693
717
|
self.cci_to_control_map: ThreadSafeDict[str, set[int]] = ThreadSafeDict()
|
|
694
718
|
self._no_ccis: bool = False
|
|
719
|
+
self._cci_map_loaded: bool = False
|
|
695
720
|
self.cci_to_control_map_lock: threading.Lock = threading.Lock()
|
|
696
721
|
|
|
722
|
+
# Lock for thread-safe scan history count updates
|
|
723
|
+
self.scan_history_lock: threading.RLock = threading.RLock()
|
|
724
|
+
|
|
697
725
|
self.assessment_map: ThreadSafeDict[int, regscale_models.Assessment] = ThreadSafeDict()
|
|
698
726
|
self.assessor_id: str = self.get_assessor_id()
|
|
699
727
|
self.asset_progress: Progress = create_progress_object()
|
|
@@ -704,6 +732,12 @@ class ScannerIntegration(ABC):
|
|
|
704
732
|
thread_safe_kev_data.update(kev_data)
|
|
705
733
|
self._kev_data = thread_safe_kev_data
|
|
706
734
|
|
|
735
|
+
# Issue lookup cache for performance optimization
|
|
736
|
+
# Eliminates N+1 API calls by caching issues and indexing by integrationFindingId
|
|
737
|
+
# Populated lazily on first use during findings processing
|
|
738
|
+
self._integration_finding_id_cache: Optional[ThreadSafeDict[str, List[regscale_models.Issue]]] = None
|
|
739
|
+
self._issue_cache_lock: threading.RLock = threading.RLock()
|
|
740
|
+
|
|
707
741
|
@classmethod
|
|
708
742
|
def _get_lock(cls, key: str) -> threading.RLock:
|
|
709
743
|
"""
|
|
@@ -722,6 +756,21 @@ class ScannerIntegration(ABC):
|
|
|
722
756
|
cls._lock_registry[key] = lock
|
|
723
757
|
return lock
|
|
724
758
|
|
|
759
|
+
def get_milestone_manager(self) -> MilestoneManager:
|
|
760
|
+
"""
|
|
761
|
+
Get or initialize the milestone manager.
|
|
762
|
+
|
|
763
|
+
:return: MilestoneManager instance
|
|
764
|
+
:rtype: MilestoneManager
|
|
765
|
+
"""
|
|
766
|
+
if self.milestone_manager is None:
|
|
767
|
+
self.milestone_manager = MilestoneManager(
|
|
768
|
+
integration_title=self.title,
|
|
769
|
+
assessor_id=self.assessor_id,
|
|
770
|
+
scan_date=self.scan_date or get_current_datetime(),
|
|
771
|
+
)
|
|
772
|
+
return self.milestone_manager
|
|
773
|
+
|
|
725
774
|
@staticmethod
|
|
726
775
|
def load_stig_mapper() -> Optional[STIGMapper]:
|
|
727
776
|
"""
|
|
@@ -752,6 +801,74 @@ class ScannerIntegration(ABC):
|
|
|
752
801
|
|
|
753
802
|
return regscale_models.Issue.get_user_id()
|
|
754
803
|
|
|
804
|
+
def get_user_organization_id(self, user_id: Optional[str]) -> Optional[int]:
|
|
805
|
+
"""
|
|
806
|
+
Get the organization ID for a user.
|
|
807
|
+
|
|
808
|
+
:param Optional[str] user_id: The user ID to look up
|
|
809
|
+
:return: The organization ID or None if not found
|
|
810
|
+
:rtype: Optional[int]
|
|
811
|
+
"""
|
|
812
|
+
if not user_id:
|
|
813
|
+
return None
|
|
814
|
+
|
|
815
|
+
try:
|
|
816
|
+
from regscale.models import User
|
|
817
|
+
|
|
818
|
+
user = User.get_object(user_id)
|
|
819
|
+
return user.orgId if user else None
|
|
820
|
+
except Exception as e:
|
|
821
|
+
logger.debug(f"Unable to get user organization for user {user_id}: {e}")
|
|
822
|
+
return None
|
|
823
|
+
|
|
824
|
+
def get_ssp_organization_id(self) -> Optional[int]:
|
|
825
|
+
"""
|
|
826
|
+
Get the organization ID from the security plan.
|
|
827
|
+
|
|
828
|
+
:return: The organization ID or None if not found
|
|
829
|
+
:rtype: Optional[int]
|
|
830
|
+
"""
|
|
831
|
+
try:
|
|
832
|
+
from regscale.models import SecurityPlan
|
|
833
|
+
|
|
834
|
+
if ssp := SecurityPlan.get_object(self.plan_id):
|
|
835
|
+
# First try to get organization from SSP owner
|
|
836
|
+
if getattr(ssp, "systemOwnerId"):
|
|
837
|
+
if owner_org_id := self.get_user_organization_id(ssp.systemOwnerId):
|
|
838
|
+
return owner_org_id
|
|
839
|
+
# Fallback to SSP's direct organization
|
|
840
|
+
return ssp.orgId
|
|
841
|
+
except Exception as e:
|
|
842
|
+
logger.debug(f"Unable to get SSP organization for plan {self.plan_id}: {e}")
|
|
843
|
+
|
|
844
|
+
return None
|
|
845
|
+
|
|
846
|
+
def determine_issue_organization_id(self, issue_owner_id: Optional[str]) -> Optional[int]:
|
|
847
|
+
"""
|
|
848
|
+
Determine the organization ID for an issue based on the expected behavior:
|
|
849
|
+
|
|
850
|
+
1. If Issue Owner is set and has an Org, use Issue Owner's Org
|
|
851
|
+
2. Else if SSP Owner has an Org, use SSP Owner's Org
|
|
852
|
+
3. Else use SSP's Org if set
|
|
853
|
+
|
|
854
|
+
:param Optional[str] issue_owner_id: The issue owner ID
|
|
855
|
+
:return: The organization ID or None
|
|
856
|
+
:rtype: Optional[int]
|
|
857
|
+
"""
|
|
858
|
+
# First check if issue owner has an organization
|
|
859
|
+
if issue_owner_id:
|
|
860
|
+
if owner_org_id := self.get_user_organization_id(issue_owner_id):
|
|
861
|
+
logger.debug(f"Setting issue organization {owner_org_id} from issue owner {issue_owner_id}")
|
|
862
|
+
return owner_org_id
|
|
863
|
+
|
|
864
|
+
# Fallback to SSP organization (which includes SSP owner check)
|
|
865
|
+
if ssp_org_id := self.get_ssp_organization_id():
|
|
866
|
+
logger.debug(f"Setting issue organization {ssp_org_id} from SSP {self.plan_id}")
|
|
867
|
+
return ssp_org_id
|
|
868
|
+
|
|
869
|
+
logger.debug(f"No organization found for issue owner {issue_owner_id} or SSP {self.plan_id}")
|
|
870
|
+
return None
|
|
871
|
+
|
|
755
872
|
def get_cci_to_control_map(self) -> ThreadSafeDict[str, set[int]] | dict:
|
|
756
873
|
"""
|
|
757
874
|
Gets the CCI to control map
|
|
@@ -759,15 +876,33 @@ class ScannerIntegration(ABC):
|
|
|
759
876
|
:return: The CCI to control map
|
|
760
877
|
:rtype: ThreadSafeDict[str, set[int]] | dict
|
|
761
878
|
"""
|
|
879
|
+
# If we know there are no CCIs, return immediately
|
|
762
880
|
if self._no_ccis:
|
|
763
881
|
return self.cci_to_control_map
|
|
882
|
+
|
|
883
|
+
# If we've already loaded (or attempted to load) the map, return it
|
|
884
|
+
if self._cci_map_loaded:
|
|
885
|
+
return self.cci_to_control_map
|
|
886
|
+
|
|
764
887
|
with self.cci_to_control_map_lock:
|
|
765
|
-
|
|
888
|
+
# Double-check inside the lock
|
|
889
|
+
if self._cci_map_loaded:
|
|
766
890
|
return self.cci_to_control_map
|
|
767
|
-
|
|
768
|
-
|
|
769
|
-
|
|
891
|
+
|
|
892
|
+
logger.debug("Loading CCI to control map...")
|
|
893
|
+
try:
|
|
894
|
+
loaded_map = regscale_models.map_ccis_to_control_ids(parent_id=self.plan_id) # type: ignore
|
|
895
|
+
if loaded_map:
|
|
896
|
+
self.cci_to_control_map.update(loaded_map)
|
|
897
|
+
else:
|
|
898
|
+
self._no_ccis = True
|
|
899
|
+
except Exception as e:
|
|
900
|
+
logger.debug(f"Could not load CCI to control map: {e}")
|
|
770
901
|
self._no_ccis = True
|
|
902
|
+
finally:
|
|
903
|
+
# Mark as loaded regardless of success/failure to prevent repeated attempts
|
|
904
|
+
self._cci_map_loaded = True
|
|
905
|
+
|
|
771
906
|
return self.cci_to_control_map
|
|
772
907
|
|
|
773
908
|
def get_control_to_cci_map(self) -> dict[int, set[str]]:
|
|
@@ -914,15 +1049,18 @@ class ScannerIntegration(ABC):
|
|
|
914
1049
|
return res[:450]
|
|
915
1050
|
return prefix[:450]
|
|
916
1051
|
|
|
917
|
-
def get_or_create_assessment(
|
|
1052
|
+
def get_or_create_assessment(
|
|
1053
|
+
self, control_implementation_id: int, status: Optional[regscale_models.AssessmentResultsStatus] = None
|
|
1054
|
+
) -> regscale_models.Assessment:
|
|
918
1055
|
"""
|
|
919
|
-
Gets or creates a RegScale assessment
|
|
1056
|
+
Gets or creates a RegScale assessment.
|
|
920
1057
|
|
|
921
1058
|
:param int control_implementation_id: The ID of the control implementation
|
|
1059
|
+
:param Optional[regscale_models.AssessmentResultsStatus] status: Optional status override (used by cci_assessment)
|
|
922
1060
|
:return: The assessment
|
|
923
1061
|
:rtype: regscale_models.Assessment
|
|
924
1062
|
"""
|
|
925
|
-
logger.
|
|
1063
|
+
logger.debug("Getting or create assessment for control implementation %d", control_implementation_id)
|
|
926
1064
|
assessment: Optional[regscale_models.Assessment] = self.assessment_map.get(control_implementation_id)
|
|
927
1065
|
if assessment:
|
|
928
1066
|
logger.debug(
|
|
@@ -934,7 +1072,7 @@ class ScannerIntegration(ABC):
|
|
|
934
1072
|
plannedStart=get_current_datetime(),
|
|
935
1073
|
plannedFinish=get_current_datetime(),
|
|
936
1074
|
status=regscale_models.AssessmentStatus.COMPLETE.value,
|
|
937
|
-
assessmentResult=regscale_models.AssessmentResultsStatus.FAIL.value,
|
|
1075
|
+
assessmentResult=status.value if status else regscale_models.AssessmentResultsStatus.FAIL.value,
|
|
938
1076
|
actualFinish=get_current_datetime(),
|
|
939
1077
|
leadAssessorId=self.assessor_id,
|
|
940
1078
|
parentId=control_implementation_id,
|
|
@@ -1046,39 +1184,120 @@ class ScannerIntegration(ABC):
|
|
|
1046
1184
|
:param Optional[str] component_name: The name of the component to associate the asset with. If None, the asset
|
|
1047
1185
|
is added directly to the security plan without a component association.
|
|
1048
1186
|
"""
|
|
1049
|
-
# Continue with normal asset creation/update
|
|
1050
1187
|
if not asset.identifier:
|
|
1051
1188
|
logger.warning("Asset has no identifier, skipping")
|
|
1052
1189
|
return
|
|
1053
1190
|
|
|
1054
|
-
|
|
1055
|
-
|
|
1056
|
-
logger.debug("Searching for component: %s...", component_name)
|
|
1057
|
-
component = component or self.components_by_title.get(component_name)
|
|
1058
|
-
if not component:
|
|
1059
|
-
logger.debug("No existing component found with name %s, proceeding to create it...", component_name)
|
|
1060
|
-
component = regscale_models.Component(
|
|
1061
|
-
title=component_name,
|
|
1062
|
-
componentType=asset.component_type,
|
|
1063
|
-
securityPlansId=self.plan_id,
|
|
1064
|
-
description=component_name,
|
|
1065
|
-
componentOwnerId=self.get_assessor_id(),
|
|
1066
|
-
).get_or_create()
|
|
1067
|
-
self.components.append(component)
|
|
1068
|
-
if component.securityPlansId and not self.is_component:
|
|
1069
|
-
component_mapping = regscale_models.ComponentMapping(
|
|
1070
|
-
componentId=component.id,
|
|
1071
|
-
securityPlanId=self.plan_id,
|
|
1072
|
-
)
|
|
1073
|
-
component_mapping.get_or_create()
|
|
1074
|
-
self.components_by_title[component_name] = component
|
|
1191
|
+
# Get or create component if needed
|
|
1192
|
+
component = self._get_or_create_component_for_asset(asset, component_name)
|
|
1075
1193
|
|
|
1194
|
+
# Create or update the asset
|
|
1076
1195
|
created, existing_or_new_asset = self.create_new_asset(asset, component=None)
|
|
1077
1196
|
|
|
1078
|
-
#
|
|
1079
|
-
|
|
1197
|
+
# Note: Result counts are updated during bulk_save() operation, not here
|
|
1198
|
+
# to avoid double-counting and ensure accurate counts from actual database operations
|
|
1199
|
+
|
|
1200
|
+
# Handle component mapping and DuroSuite processing
|
|
1201
|
+
self._handle_component_mapping_and_durosuite(existing_or_new_asset, component, asset, created)
|
|
1202
|
+
|
|
1203
|
+
def _get_or_create_component_for_asset(
|
|
1204
|
+
self, asset: IntegrationAsset, component_name: Optional[str]
|
|
1205
|
+
) -> Optional[regscale_models.Component]:
|
|
1206
|
+
"""
|
|
1207
|
+
Get or create a component for the asset if component_name is provided.
|
|
1208
|
+
|
|
1209
|
+
:param IntegrationAsset asset: The asset being processed
|
|
1210
|
+
:param Optional[str] component_name: Name of the component to associate with
|
|
1211
|
+
:return: The component object or None
|
|
1212
|
+
:rtype: Optional[regscale_models.Component]
|
|
1213
|
+
"""
|
|
1214
|
+
if not component_name:
|
|
1215
|
+
return getattr(self, "component") if self.is_component else None
|
|
1216
|
+
|
|
1217
|
+
component = getattr(self, "component") if self.is_component else None
|
|
1218
|
+
component = component or self.components_by_title.get(component_name)
|
|
1219
|
+
|
|
1220
|
+
if not component:
|
|
1221
|
+
component = self._create_new_component(asset, component_name)
|
|
1222
|
+
|
|
1223
|
+
self._handle_component_mapping(component)
|
|
1224
|
+
self.components_by_title[component_name] = component
|
|
1225
|
+
return component
|
|
1226
|
+
|
|
1227
|
+
def _get_compliance_settings_id(self) -> Optional[int]:
|
|
1228
|
+
"""
|
|
1229
|
+
Get the compliance settings ID from the security plan.
|
|
1230
|
+
|
|
1231
|
+
:return: The compliance settings ID if available
|
|
1232
|
+
:rtype: Optional[int]
|
|
1233
|
+
"""
|
|
1234
|
+
try:
|
|
1235
|
+
security_plan = regscale_models.SecurityPlan.get_object(object_id=self.plan_id)
|
|
1236
|
+
if security_plan and hasattr(security_plan, "complianceSettingsId"):
|
|
1237
|
+
return security_plan.complianceSettingsId
|
|
1238
|
+
except Exception as e:
|
|
1239
|
+
logger.debug(f"Failed to get compliance settings ID from security plan {self.plan_id}: {e}")
|
|
1240
|
+
return None
|
|
1241
|
+
|
|
1242
|
+
def _create_new_component(self, asset: IntegrationAsset, component_name: str) -> regscale_models.Component:
|
|
1243
|
+
"""
|
|
1244
|
+
Create a new component for the asset.
|
|
1245
|
+
|
|
1246
|
+
:param IntegrationAsset asset: The asset being processed
|
|
1247
|
+
:param str component_name: Name of the component to create
|
|
1248
|
+
:return: The newly created component
|
|
1249
|
+
:rtype: regscale_models.Component
|
|
1250
|
+
"""
|
|
1251
|
+
logger.debug("No existing component found with name %s, proceeding to create it...", component_name)
|
|
1252
|
+
component = regscale_models.Component(
|
|
1253
|
+
title=component_name,
|
|
1254
|
+
componentType=asset.component_type,
|
|
1255
|
+
securityPlansId=self.plan_id,
|
|
1256
|
+
description=component_name,
|
|
1257
|
+
componentOwnerId=self.get_assessor_id(),
|
|
1258
|
+
complianceSettingsId=self._get_compliance_settings_id(),
|
|
1259
|
+
).get_or_create()
|
|
1260
|
+
self.components.append(component)
|
|
1261
|
+
return component
|
|
1262
|
+
|
|
1263
|
+
def _handle_component_mapping(self, component: regscale_models.Component) -> None:
|
|
1264
|
+
"""
|
|
1265
|
+
Handle component mapping creation if needed.
|
|
1266
|
+
|
|
1267
|
+
:param regscale_models.Component component: The component to create mapping for
|
|
1268
|
+
"""
|
|
1269
|
+
if not (component.securityPlansId and not self.is_component):
|
|
1270
|
+
return
|
|
1271
|
+
|
|
1272
|
+
component_mapping = regscale_models.ComponentMapping(
|
|
1273
|
+
componentId=component.id,
|
|
1274
|
+
securityPlanId=self.plan_id,
|
|
1275
|
+
)
|
|
1276
|
+
mapping_result = component_mapping.get_or_create()
|
|
1277
|
+
|
|
1278
|
+
if mapping_result is None:
|
|
1279
|
+
logger.debug(
|
|
1280
|
+
f"Failed to create or find ComponentMapping for componentId={component.id}, securityPlanId={self.plan_id}"
|
|
1281
|
+
)
|
|
1282
|
+
else:
|
|
1283
|
+
mapping_id = getattr(mapping_result, "id", "unknown")
|
|
1284
|
+
logger.debug(f"Successfully handled ComponentMapping for componentId={component.id}, ID={mapping_id}")
|
|
1285
|
+
|
|
1286
|
+
def _handle_component_mapping_and_durosuite(
|
|
1287
|
+
self,
|
|
1288
|
+
existing_or_new_asset: Optional[regscale_models.Asset],
|
|
1289
|
+
component: Optional[regscale_models.Component],
|
|
1290
|
+
asset: IntegrationAsset,
|
|
1291
|
+
created: bool,
|
|
1292
|
+
) -> None:
|
|
1293
|
+
"""
|
|
1294
|
+
Handle component mapping and DuroSuite scanning after asset creation.
|
|
1080
1295
|
|
|
1081
|
-
|
|
1296
|
+
:param Optional[regscale_models.Asset] existing_or_new_asset: The asset that was created/updated
|
|
1297
|
+
:param Optional[regscale_models.Component] component: The associated component, if any
|
|
1298
|
+
:param IntegrationAsset asset: The original integration asset
|
|
1299
|
+
:param bool created: Whether the asset was newly created
|
|
1300
|
+
"""
|
|
1082
1301
|
if existing_or_new_asset and component:
|
|
1083
1302
|
_was_created, _asset_mapping = regscale_models.AssetMapping(
|
|
1084
1303
|
assetId=existing_or_new_asset.id,
|
|
@@ -1086,9 +1305,33 @@ class ScannerIntegration(ABC):
|
|
|
1086
1305
|
).get_or_create_with_status()
|
|
1087
1306
|
|
|
1088
1307
|
if created and DuroSuiteVariables.duroSuiteEnabled:
|
|
1089
|
-
# Check if this is a DuroSuite compatible asset
|
|
1090
1308
|
scan_durosuite_devices(asset=asset, plan_id=self.plan_id, progress=self.asset_progress)
|
|
1091
1309
|
|
|
1310
|
+
def _truncate_field(self, value: Optional[str], max_length: int, field_name: str) -> Optional[str]:
|
|
1311
|
+
"""
|
|
1312
|
+
Truncate a field to the maximum allowed length to prevent database errors.
|
|
1313
|
+
|
|
1314
|
+
:param Optional[str] value: The value to truncate
|
|
1315
|
+
:param int max_length: Maximum allowed length
|
|
1316
|
+
:param str field_name: Name of the field being truncated (for logging)
|
|
1317
|
+
:return: Truncated value or None
|
|
1318
|
+
:rtype: Optional[str]
|
|
1319
|
+
"""
|
|
1320
|
+
if not value:
|
|
1321
|
+
return value
|
|
1322
|
+
|
|
1323
|
+
if len(value) > max_length:
|
|
1324
|
+
truncated = value[:max_length]
|
|
1325
|
+
logger.warning(
|
|
1326
|
+
"Truncated %s field from %d to %d characters for value: %s...",
|
|
1327
|
+
field_name,
|
|
1328
|
+
len(value),
|
|
1329
|
+
max_length,
|
|
1330
|
+
truncated[:100],
|
|
1331
|
+
)
|
|
1332
|
+
return truncated
|
|
1333
|
+
return value
|
|
1334
|
+
|
|
1092
1335
|
def create_new_asset(
|
|
1093
1336
|
self, asset: IntegrationAsset, component: Optional[regscale_models.Component]
|
|
1094
1337
|
) -> tuple[bool, Optional[regscale_models.Asset]]:
|
|
@@ -1101,22 +1344,130 @@ class ScannerIntegration(ABC):
|
|
|
1101
1344
|
:return: Tuple of (was_created, newly created asset instance).
|
|
1102
1345
|
:rtype: tuple[bool, Optional[regscale_models.Asset]]
|
|
1103
1346
|
"""
|
|
1104
|
-
|
|
1347
|
+
if not self._validate_asset_requirements(asset):
|
|
1348
|
+
return False, None
|
|
1349
|
+
|
|
1350
|
+
asset_type = self._validate_and_map_asset_type(asset.asset_type)
|
|
1351
|
+
other_tracking_number = self._prepare_tracking_number(asset)
|
|
1352
|
+
field_data = self._prepare_truncated_asset_fields(asset, other_tracking_number)
|
|
1353
|
+
|
|
1354
|
+
new_asset = self._create_regscale_asset_model(asset, component, asset_type, field_data)
|
|
1355
|
+
|
|
1356
|
+
created, new_asset = new_asset.create_or_update_with_status(bulk_update=True)
|
|
1357
|
+
self.asset_map_by_identifier[asset.identifier] = new_asset
|
|
1358
|
+
logger.debug("Created new asset with identifier %s", asset.identifier)
|
|
1359
|
+
|
|
1360
|
+
self._handle_software_and_stig_processing(new_asset, asset, created)
|
|
1361
|
+
return created, new_asset
|
|
1362
|
+
|
|
1363
|
+
def _validate_asset_requirements(self, asset: IntegrationAsset) -> bool:
|
|
1364
|
+
"""Validate that the asset has required fields for creation."""
|
|
1105
1365
|
if not asset.name:
|
|
1106
1366
|
logger.warning(
|
|
1107
1367
|
"Asset name is required for asset creation. Skipping asset creation of asset_type: %s", asset.asset_type
|
|
1108
1368
|
)
|
|
1109
|
-
return False
|
|
1369
|
+
return False
|
|
1370
|
+
return True
|
|
1371
|
+
|
|
1372
|
+
def _validate_and_map_asset_type(self, asset_type: str) -> str:
|
|
1373
|
+
"""Validate and map asset type to valid RegScale values."""
|
|
1374
|
+
valid_asset_types = [
|
|
1375
|
+
"Physical Server",
|
|
1376
|
+
"Virtual Machine (VM)",
|
|
1377
|
+
"Appliance",
|
|
1378
|
+
"Network Router",
|
|
1379
|
+
"Network Switch",
|
|
1380
|
+
"Firewall",
|
|
1381
|
+
"Desktop",
|
|
1382
|
+
"Laptop",
|
|
1383
|
+
"Tablet",
|
|
1384
|
+
"Phone",
|
|
1385
|
+
"Other",
|
|
1386
|
+
]
|
|
1387
|
+
|
|
1388
|
+
if asset_type not in valid_asset_types:
|
|
1389
|
+
logger.debug(f"Asset type '{asset_type}' not in valid types, mapping to 'Other'")
|
|
1390
|
+
return "Other"
|
|
1391
|
+
return asset_type
|
|
1392
|
+
|
|
1393
|
+
def _prepare_tracking_number(self, asset: IntegrationAsset) -> str:
|
|
1394
|
+
"""Prepare and validate the tracking number for asset deduplication."""
|
|
1395
|
+
other_tracking_number = asset.other_tracking_number or asset.identifier
|
|
1396
|
+
if not other_tracking_number:
|
|
1397
|
+
logger.warning("No tracking number available for asset %s, using name as fallback", asset.name)
|
|
1398
|
+
other_tracking_number = asset.name
|
|
1399
|
+
return other_tracking_number
|
|
1400
|
+
|
|
1401
|
+
def _prepare_truncated_asset_fields(self, asset: IntegrationAsset, other_tracking_number: str) -> dict:
|
|
1402
|
+
"""Prepare and truncate asset fields to prevent database errors."""
|
|
1403
|
+
max_field_length = 450
|
|
1404
|
+
name = self._process_asset_name(asset, max_field_length)
|
|
1405
|
+
|
|
1406
|
+
return {
|
|
1407
|
+
"name": name,
|
|
1408
|
+
"azure_identifier": self._truncate_field(asset.azure_identifier, max_field_length, "azureIdentifier"),
|
|
1409
|
+
"aws_identifier": self._truncate_field(asset.aws_identifier, max_field_length, "awsIdentifier"),
|
|
1410
|
+
"google_identifier": self._truncate_field(asset.google_identifier, max_field_length, "googleIdentifier"),
|
|
1411
|
+
"other_cloud_identifier": self._truncate_field(
|
|
1412
|
+
asset.other_cloud_identifier, max_field_length, "otherCloudIdentifier"
|
|
1413
|
+
),
|
|
1414
|
+
"software_name": self._truncate_field(asset.software_name, max_field_length, "softwareName"),
|
|
1415
|
+
"other_tracking_number": self._truncate_field(
|
|
1416
|
+
other_tracking_number, max_field_length, "otherTrackingNumber"
|
|
1417
|
+
),
|
|
1418
|
+
}
|
|
1419
|
+
|
|
1420
|
+
def _process_asset_name(self, asset: IntegrationAsset, max_field_length: int) -> str:
|
|
1421
|
+
"""Process and truncate asset name, handling special cases like Azure resource paths."""
|
|
1422
|
+
name = self._truncate_field(asset.name, max_field_length, "name")
|
|
1423
|
+
|
|
1424
|
+
# For very long Azure resource paths, extract meaningful parts
|
|
1425
|
+
if asset.name and len(asset.name) > max_field_length and "/" in asset.name:
|
|
1426
|
+
name = self._shorten_azure_resource_path(asset.name, max_field_length)
|
|
1427
|
+
|
|
1428
|
+
return name
|
|
1429
|
+
|
|
1430
|
+
def _shorten_azure_resource_path(self, full_name: str, max_field_length: int) -> str:
|
|
1431
|
+
"""Shorten long Azure resource paths to meaningful parts."""
|
|
1432
|
+
parts = full_name.split("/")
|
|
1433
|
+
if len(parts) >= 4:
|
|
1434
|
+
# Extract key components from Azure resource path
|
|
1435
|
+
resource_group = next(
|
|
1436
|
+
(p for i, p in enumerate(parts) if i > 0 and parts[i - 1].lower() == "resourcegroups"), ""
|
|
1437
|
+
)
|
|
1438
|
+
resource_type = parts[-2] if len(parts) > 1 else ""
|
|
1439
|
+
resource_name = parts[-1]
|
|
1440
|
+
|
|
1441
|
+
# Build a shortened but meaningful name
|
|
1442
|
+
if resource_group:
|
|
1443
|
+
name = f"../{resource_group}/.../{resource_type}/{resource_name}"
|
|
1444
|
+
else:
|
|
1445
|
+
name = f".../{resource_type}/{resource_name}"
|
|
1446
|
+
|
|
1447
|
+
# Ensure it fits within limits
|
|
1448
|
+
if len(name) > max_field_length:
|
|
1449
|
+
name = name[-(max_field_length):]
|
|
1450
|
+
|
|
1451
|
+
logger.info(
|
|
1452
|
+
"Shortened long Azure resource path from %d to %d characters: %s", len(full_name), len(name), name
|
|
1453
|
+
)
|
|
1454
|
+
return name
|
|
1110
1455
|
|
|
1456
|
+
return self._truncate_field(full_name, max_field_length, "name")
|
|
1457
|
+
|
|
1458
|
+
def _create_regscale_asset_model(
|
|
1459
|
+
self, asset: IntegrationAsset, component: Optional[regscale_models.Component], asset_type: str, field_data: dict
|
|
1460
|
+
) -> regscale_models.Asset:
|
|
1461
|
+
"""Create the RegScale Asset model with all required fields."""
|
|
1111
1462
|
new_asset = regscale_models.Asset(
|
|
1112
|
-
name=
|
|
1463
|
+
name=field_data["name"],
|
|
1113
1464
|
description=asset.description,
|
|
1114
1465
|
bVirtual=asset.is_virtual,
|
|
1115
|
-
otherTrackingNumber=
|
|
1116
|
-
assetOwnerId=asset.asset_owner_id or "Unknown",
|
|
1466
|
+
otherTrackingNumber=field_data["other_tracking_number"],
|
|
1467
|
+
assetOwnerId=asset.asset_owner_id or regscale_models.Asset.get_user_id() or "Unknown",
|
|
1117
1468
|
parentId=component.id if component else self.plan_id,
|
|
1118
1469
|
parentModule=self.parent_module,
|
|
1119
|
-
assetType=
|
|
1470
|
+
assetType=asset_type,
|
|
1120
1471
|
dateLastUpdated=asset.date_last_updated or get_current_datetime(),
|
|
1121
1472
|
status=asset.status,
|
|
1122
1473
|
assetCategory=asset.asset_category,
|
|
@@ -1127,7 +1478,7 @@ class ScannerIntegration(ABC):
|
|
|
1127
1478
|
serialNumber=asset.serial_number,
|
|
1128
1479
|
assetTagNumber=asset.asset_tag_number,
|
|
1129
1480
|
bPublicFacing=asset.is_public_facing,
|
|
1130
|
-
azureIdentifier=
|
|
1481
|
+
azureIdentifier=field_data["azure_identifier"],
|
|
1131
1482
|
location=asset.location,
|
|
1132
1483
|
ipAddress=asset.ip_address,
|
|
1133
1484
|
iPv6Address=asset.ipv6_address,
|
|
@@ -1141,13 +1492,13 @@ class ScannerIntegration(ABC):
|
|
|
1141
1492
|
endOfLifeDate=asset.end_of_life_date,
|
|
1142
1493
|
vlanId=asset.vlan_id,
|
|
1143
1494
|
uri=asset.uri,
|
|
1144
|
-
awsIdentifier=
|
|
1145
|
-
googleIdentifier=
|
|
1146
|
-
otherCloudIdentifier=
|
|
1495
|
+
awsIdentifier=field_data["aws_identifier"],
|
|
1496
|
+
googleIdentifier=field_data["google_identifier"],
|
|
1497
|
+
otherCloudIdentifier=field_data["other_cloud_identifier"],
|
|
1147
1498
|
patchLevel=asset.patch_level,
|
|
1148
1499
|
cpe=asset.cpe,
|
|
1149
1500
|
softwareVersion=asset.software_version,
|
|
1150
|
-
softwareName=
|
|
1501
|
+
softwareName=field_data["software_name"],
|
|
1151
1502
|
softwareVendor=asset.software_vendor,
|
|
1152
1503
|
bLatestScan=asset.is_latest_scan,
|
|
1153
1504
|
bAuthenticatedScan=asset.is_authenticated_scan,
|
|
@@ -1156,20 +1507,21 @@ class ScannerIntegration(ABC):
|
|
|
1156
1507
|
softwareFunction=asset.software_function,
|
|
1157
1508
|
baselineConfiguration=asset.baseline_configuration,
|
|
1158
1509
|
)
|
|
1510
|
+
|
|
1159
1511
|
if self.asset_identifier_field:
|
|
1160
1512
|
setattr(new_asset, self.asset_identifier_field, asset.identifier)
|
|
1161
1513
|
|
|
1162
|
-
|
|
1163
|
-
# add to asset_map_by_identifier
|
|
1164
|
-
self.asset_map_by_identifier[asset.identifier] = new_asset
|
|
1165
|
-
logger.debug("Created new asset with identifier %s", asset.identifier)
|
|
1514
|
+
return new_asset
|
|
1166
1515
|
|
|
1516
|
+
def _handle_software_and_stig_processing(
|
|
1517
|
+
self, new_asset: regscale_models.Asset, asset: IntegrationAsset, created: bool
|
|
1518
|
+
) -> None:
|
|
1519
|
+
"""Handle post-asset creation tasks like software inventory and STIG mapping."""
|
|
1167
1520
|
self.handle_software_inventory(new_asset, asset.software_inventory, created)
|
|
1168
1521
|
self.create_asset_data_and_link(new_asset, asset)
|
|
1169
1522
|
self.create_or_update_ports_protocol(new_asset, asset)
|
|
1170
1523
|
if self.stig_mapper:
|
|
1171
1524
|
self.stig_mapper.map_associated_stigs_to_asset(asset=new_asset, ssp_id=self.plan_id)
|
|
1172
|
-
return created, new_asset
|
|
1173
1525
|
|
|
1174
1526
|
def handle_software_inventory(
|
|
1175
1527
|
self, new_asset: regscale_models.Asset, software_inventory: List[Dict[str, Any]], created: bool
|
|
@@ -1211,7 +1563,6 @@ class ScannerIntegration(ABC):
|
|
|
1211
1563
|
name=software_name,
|
|
1212
1564
|
parentHardwareAssetId=new_asset.id,
|
|
1213
1565
|
version=software_version,
|
|
1214
|
-
# references=software.get("references", []),
|
|
1215
1566
|
)
|
|
1216
1567
|
)
|
|
1217
1568
|
else:
|
|
@@ -1551,35 +1902,154 @@ class ScannerIntegration(ABC):
|
|
|
1551
1902
|
finding_id = self.get_finding_identifier(finding)
|
|
1552
1903
|
finding_id_lock = self._get_lock(finding_id)
|
|
1553
1904
|
|
|
1905
|
+
self._log_finding_processing_info(finding, finding_id, issue_status, title)
|
|
1906
|
+
|
|
1554
1907
|
with finding_id_lock:
|
|
1555
|
-
|
|
1556
|
-
|
|
1557
|
-
|
|
1558
|
-
|
|
1559
|
-
|
|
1560
|
-
|
|
1561
|
-
|
|
1562
|
-
|
|
1563
|
-
|
|
1564
|
-
|
|
1565
|
-
|
|
1566
|
-
|
|
1567
|
-
|
|
1568
|
-
|
|
1569
|
-
|
|
1570
|
-
|
|
1571
|
-
|
|
1572
|
-
|
|
1573
|
-
|
|
1574
|
-
|
|
1575
|
-
|
|
1576
|
-
|
|
1577
|
-
|
|
1578
|
-
|
|
1579
|
-
|
|
1580
|
-
|
|
1908
|
+
existing_issue = self._find_existing_issue_for_finding(finding_id, finding, issue_status)
|
|
1909
|
+
return self._create_or_update_issue(finding, issue_status, title, existing_issue)
|
|
1910
|
+
|
|
1911
|
+
def _log_finding_processing_info(
|
|
1912
|
+
self, finding: IntegrationFinding, finding_id: str, issue_status: regscale_models.IssueStatus, title: str
|
|
1913
|
+
) -> None:
|
|
1914
|
+
"""Log finding processing information for debugging."""
|
|
1915
|
+
logger.debug(
|
|
1916
|
+
f"PROCESSING FINDING: external_id={finding.external_id}, finding_id={finding_id}, status={issue_status}, title='{title[:50]}...'"
|
|
1917
|
+
)
|
|
1918
|
+
|
|
1919
|
+
if issue_status == regscale_models.IssueStatus.Closed:
|
|
1920
|
+
logger.debug(f"CLOSED FINDING: This will create/update a CLOSED issue (status={issue_status})")
|
|
1921
|
+
|
|
1922
|
+
def _find_existing_issue_for_finding(
|
|
1923
|
+
self, finding_id: str, finding: IntegrationFinding, issue_status: regscale_models.IssueStatus
|
|
1924
|
+
) -> Optional[regscale_models.Issue]:
|
|
1925
|
+
"""Find existing issue for the finding based on status and creation type."""
|
|
1926
|
+
if ScannerVariables.issueCreation.lower() == "perasset":
|
|
1927
|
+
return None
|
|
1928
|
+
|
|
1929
|
+
existing_issues = self._get_existing_issues_for_finding(finding_id, finding)
|
|
1930
|
+
|
|
1931
|
+
if issue_status == regscale_models.IssueStatus.Open:
|
|
1932
|
+
return self._find_issue_for_open_status(existing_issues, finding_id)
|
|
1933
|
+
elif issue_status == regscale_models.IssueStatus.Closed:
|
|
1934
|
+
return self._find_issue_for_closed_status(existing_issues, finding, finding_id)
|
|
1935
|
+
|
|
1936
|
+
return None
|
|
1937
|
+
|
|
1938
|
+
def _populate_issue_lookup_cache(self) -> None:
|
|
1939
|
+
"""
|
|
1940
|
+
Populate the issue lookup cache by fetching all issues for the plan and indexing by integrationFindingId.
|
|
1941
|
+
|
|
1942
|
+
This eliminates N+1 API calls during findings processing by creating an in-memory index.
|
|
1943
|
+
Thread-safe for concurrent access.
|
|
1944
|
+
"""
|
|
1945
|
+
with self._issue_cache_lock:
|
|
1946
|
+
# Double-check locking pattern - check if cache already populated
|
|
1947
|
+
if self._integration_finding_id_cache is not None:
|
|
1948
|
+
return
|
|
1949
|
+
|
|
1950
|
+
module_str = "component" if self.is_component else "security plan"
|
|
1951
|
+
logger.info(f"Building issue lookup index for {module_str} {self.plan_id}...")
|
|
1952
|
+
start_time = time.time()
|
|
1953
|
+
|
|
1954
|
+
# Fetch all issues for the security plan
|
|
1955
|
+
all_issues = regscale_models.Issue.fetch_issues_by_ssp(app=self.app, ssp_id=self.plan_id)
|
|
1956
|
+
|
|
1957
|
+
# Build index: integrationFindingId -> List[Issue]
|
|
1958
|
+
cache = ThreadSafeDict()
|
|
1959
|
+
indexed_count = 0
|
|
1581
1960
|
|
|
1582
|
-
|
|
1961
|
+
for issue in all_issues:
|
|
1962
|
+
if issue.integrationFindingId:
|
|
1963
|
+
finding_id = issue.integrationFindingId
|
|
1964
|
+
if finding_id not in cache:
|
|
1965
|
+
cache[finding_id] = []
|
|
1966
|
+
cache[finding_id].append(issue)
|
|
1967
|
+
indexed_count += 1
|
|
1968
|
+
|
|
1969
|
+
self._integration_finding_id_cache = cache
|
|
1970
|
+
|
|
1971
|
+
elapsed = time.time() - start_time
|
|
1972
|
+
logger.info(
|
|
1973
|
+
f"Issue lookup index built: {indexed_count} issues indexed from {len(all_issues)} total issues "
|
|
1974
|
+
f"({len(cache)} unique finding IDs) in {elapsed:.2f}s"
|
|
1975
|
+
)
|
|
1976
|
+
|
|
1977
|
+
def _get_existing_issues_for_finding(
|
|
1978
|
+
self, finding_id: str, finding: IntegrationFinding
|
|
1979
|
+
) -> List[regscale_models.Issue]:
|
|
1980
|
+
"""
|
|
1981
|
+
Get existing issues for the finding using cached lookup (fast) or API fallback (slow).
|
|
1982
|
+
|
|
1983
|
+
NEW BEHAVIOR:
|
|
1984
|
+
- First lookup uses cache (O(1) dictionary lookup, no API call)
|
|
1985
|
+
- Cache is populated lazily on first call
|
|
1986
|
+
- Falls back to API only if finding not in cache and has external_id
|
|
1987
|
+
"""
|
|
1988
|
+
# Populate cache on first use (lazy initialization)
|
|
1989
|
+
if self._integration_finding_id_cache is None:
|
|
1990
|
+
self._populate_issue_lookup_cache()
|
|
1991
|
+
|
|
1992
|
+
# FAST PATH: Check cache first (O(1) lookup, no API call)
|
|
1993
|
+
existing_issues = self._integration_finding_id_cache.get(finding_id, [])
|
|
1994
|
+
|
|
1995
|
+
# FALLBACK PATH: Only if no issues found in cache AND external_id exists
|
|
1996
|
+
# This handles edge cases where integrationFindingId might be missing but other identifiers exist
|
|
1997
|
+
if not existing_issues and finding.external_id:
|
|
1998
|
+
logger.debug(f"Issue not found in cache for finding_id={finding_id}, trying identifier fallback")
|
|
1999
|
+
existing_issues = self._find_issues_by_identifier_fallback(finding.external_id)
|
|
2000
|
+
|
|
2001
|
+
# Cache the fallback result to avoid future API lookups
|
|
2002
|
+
if existing_issues:
|
|
2003
|
+
with self._issue_cache_lock:
|
|
2004
|
+
self._integration_finding_id_cache[finding_id] = existing_issues
|
|
2005
|
+
|
|
2006
|
+
return existing_issues
|
|
2007
|
+
|
|
2008
|
+
def _find_issue_for_open_status(
|
|
2009
|
+
self, existing_issues: List[regscale_models.Issue], finding_id: str
|
|
2010
|
+
) -> Optional[regscale_models.Issue]:
|
|
2011
|
+
"""Find appropriate issue when the finding status is Open."""
|
|
2012
|
+
# Find an open issue to update first
|
|
2013
|
+
open_issue = next(
|
|
2014
|
+
(issue for issue in existing_issues if issue.status != regscale_models.IssueStatus.Closed), None
|
|
2015
|
+
)
|
|
2016
|
+
if open_issue:
|
|
2017
|
+
return open_issue
|
|
2018
|
+
|
|
2019
|
+
# If no open issue found, look for a closed issue to reopen
|
|
2020
|
+
closed_issue = next(
|
|
2021
|
+
(issue for issue in existing_issues if issue.status == regscale_models.IssueStatus.Closed), None
|
|
2022
|
+
)
|
|
2023
|
+
if closed_issue:
|
|
2024
|
+
logger.debug(f"Reopening closed issue {closed_issue.id} for finding {finding_id}")
|
|
2025
|
+
return closed_issue
|
|
2026
|
+
|
|
2027
|
+
return None
|
|
2028
|
+
|
|
2029
|
+
def _find_issue_for_closed_status(
|
|
2030
|
+
self, existing_issues: List[regscale_models.Issue], finding: IntegrationFinding, finding_id: str
|
|
2031
|
+
) -> Optional[regscale_models.Issue]:
|
|
2032
|
+
"""Find appropriate issue when the finding status is Closed."""
|
|
2033
|
+
# Find a closed issue with matching due date to consolidate with
|
|
2034
|
+
matching_closed_issue = next(
|
|
2035
|
+
(
|
|
2036
|
+
issue
|
|
2037
|
+
for issue in existing_issues
|
|
2038
|
+
if issue.status == regscale_models.IssueStatus.Closed
|
|
2039
|
+
and date_str(issue.dueDate) == date_str(finding.due_date)
|
|
2040
|
+
),
|
|
2041
|
+
None,
|
|
2042
|
+
)
|
|
2043
|
+
if matching_closed_issue:
|
|
2044
|
+
return matching_closed_issue
|
|
2045
|
+
|
|
2046
|
+
# If no matching closed issue, look for any existing issue to update
|
|
2047
|
+
any_existing_issue = next(iter(existing_issues), None) if existing_issues else None
|
|
2048
|
+
if any_existing_issue:
|
|
2049
|
+
logger.debug(f"Closing existing issue {any_existing_issue.id} for finding {finding_id}")
|
|
2050
|
+
return any_existing_issue
|
|
2051
|
+
|
|
2052
|
+
return None
|
|
1583
2053
|
|
|
1584
2054
|
def _create_or_update_issue(
|
|
1585
2055
|
self,
|
|
@@ -1615,7 +2085,124 @@ class ScannerIntegration(ABC):
|
|
|
1615
2085
|
# Get consolidated asset identifier
|
|
1616
2086
|
asset_identifier = self.get_consolidated_asset_identifier(finding, existing_issue)
|
|
1617
2087
|
|
|
1618
|
-
#
|
|
2088
|
+
# Set basic issue fields
|
|
2089
|
+
self._set_basic_issue_fields(issue, finding, issue_status, issue_title, asset_identifier)
|
|
2090
|
+
|
|
2091
|
+
# Set due date
|
|
2092
|
+
self._set_issue_due_date(issue, finding)
|
|
2093
|
+
|
|
2094
|
+
# Set additional issue fields
|
|
2095
|
+
self._set_additional_issue_fields(issue, finding, description, remediation_description)
|
|
2096
|
+
|
|
2097
|
+
# Set control-related fields
|
|
2098
|
+
self._set_control_fields(issue, finding)
|
|
2099
|
+
|
|
2100
|
+
# Set risk and operational fields
|
|
2101
|
+
self._set_risk_and_operational_fields(issue, finding, is_poam)
|
|
2102
|
+
|
|
2103
|
+
# Update KEV data if CVE exists
|
|
2104
|
+
if finding.cve:
|
|
2105
|
+
issue = self.lookup_kev_and_update_issue(cve=finding.cve, issue=issue, cisa_kevs=self._kev_data)
|
|
2106
|
+
|
|
2107
|
+
# Save or create the issue
|
|
2108
|
+
self._save_or_create_issue(issue, finding, existing_issue, is_poam)
|
|
2109
|
+
|
|
2110
|
+
self._handle_property_and_milestone_creation(issue, finding, existing_issue)
|
|
2111
|
+
return issue
|
|
2112
|
+
|
|
2113
|
+
def _find_issues_by_identifier_fallback(self, external_id: str) -> List[regscale_models.Issue]:
|
|
2114
|
+
"""
|
|
2115
|
+
Find issues by identifier fields (otherIdentifier or integration-specific field) as fallback.
|
|
2116
|
+
This helps with deduplication when integrationFindingId lookup fails.
|
|
2117
|
+
|
|
2118
|
+
:param str external_id: The external ID to search for
|
|
2119
|
+
:return: List of matching issues
|
|
2120
|
+
:rtype: List[regscale_models.Issue]
|
|
2121
|
+
"""
|
|
2122
|
+
fallback_issues = []
|
|
2123
|
+
|
|
2124
|
+
try:
|
|
2125
|
+
# Get all issues for this plan/component
|
|
2126
|
+
all_issues = regscale_models.Issue.get_all_by_parent(
|
|
2127
|
+
parent_id=self.plan_id,
|
|
2128
|
+
parent_module=self.parent_module,
|
|
2129
|
+
)
|
|
2130
|
+
|
|
2131
|
+
# Filter by source report to only check our integration's issues
|
|
2132
|
+
source_issues = [issue for issue in all_issues if issue.sourceReport == self.title]
|
|
2133
|
+
|
|
2134
|
+
# Look for matches by otherIdentifier
|
|
2135
|
+
for issue in source_issues:
|
|
2136
|
+
if getattr(issue, "otherIdentifier", None) == external_id:
|
|
2137
|
+
fallback_issues.append(issue)
|
|
2138
|
+
logger.debug(f"Found issue {issue.id} by otherIdentifier fallback: {external_id}")
|
|
2139
|
+
|
|
2140
|
+
# Also check integration-specific identifier field if configured
|
|
2141
|
+
elif (
|
|
2142
|
+
self.issue_identifier_field
|
|
2143
|
+
and hasattr(issue, self.issue_identifier_field)
|
|
2144
|
+
and getattr(issue, self.issue_identifier_field) == external_id
|
|
2145
|
+
):
|
|
2146
|
+
fallback_issues.append(issue)
|
|
2147
|
+
logger.debug(f"Found issue {issue.id} by {self.issue_identifier_field} fallback: {external_id}")
|
|
2148
|
+
|
|
2149
|
+
if fallback_issues:
|
|
2150
|
+
logger.debug(
|
|
2151
|
+
f"Fallback deduplication found {len(fallback_issues)} existing issue(s) for external_id: {external_id}"
|
|
2152
|
+
)
|
|
2153
|
+
|
|
2154
|
+
except Exception as e:
|
|
2155
|
+
logger.warning(f"Error in fallback issue lookup for {external_id}: {e}")
|
|
2156
|
+
|
|
2157
|
+
return fallback_issues
|
|
2158
|
+
|
|
2159
|
+
def _set_issue_identifier_fields_internal(self, issue: regscale_models.Issue, finding: IntegrationFinding) -> None:
|
|
2160
|
+
"""Set issue identifier fields (e.g., wizId) on the issue object without saving."""
|
|
2161
|
+
if not finding.external_id:
|
|
2162
|
+
logger.debug(f"finding.external_id is empty: {finding.external_id}")
|
|
2163
|
+
return
|
|
2164
|
+
|
|
2165
|
+
logger.debug(f"Setting issue identifier fields: external_id={finding.external_id}")
|
|
2166
|
+
|
|
2167
|
+
# Set otherIdentifier field (the external ID field in Issue model)
|
|
2168
|
+
if not getattr(issue, "otherIdentifier", None): # Only set if not already set
|
|
2169
|
+
issue.otherIdentifier = finding.external_id
|
|
2170
|
+
logger.debug(f"Set otherIdentifier = {finding.external_id}")
|
|
2171
|
+
|
|
2172
|
+
# Set the specific identifier field if configured (e.g., wizId for Wiz)
|
|
2173
|
+
if self.issue_identifier_field and hasattr(issue, self.issue_identifier_field):
|
|
2174
|
+
current_value = getattr(issue, self.issue_identifier_field)
|
|
2175
|
+
if not current_value: # Only set if not already set
|
|
2176
|
+
setattr(issue, self.issue_identifier_field, finding.external_id)
|
|
2177
|
+
logger.debug(f"Set {self.issue_identifier_field} = {finding.external_id}")
|
|
2178
|
+
else:
|
|
2179
|
+
logger.debug(f"{self.issue_identifier_field} already set to: {current_value}")
|
|
2180
|
+
else:
|
|
2181
|
+
if self.issue_identifier_field: # Only log warning if field is configured
|
|
2182
|
+
logger.warning(
|
|
2183
|
+
f"Cannot set issue_identifier_field: field='{self.issue_identifier_field}', hasattr={hasattr(issue, self.issue_identifier_field)}"
|
|
2184
|
+
)
|
|
2185
|
+
|
|
2186
|
+
def _set_issue_identifier_fields(self, issue: regscale_models.Issue, finding: IntegrationFinding) -> None:
|
|
2187
|
+
"""Set issue identifier fields (e.g., wizId) and save them to the database."""
|
|
2188
|
+
self._set_issue_identifier_fields_internal(issue, finding)
|
|
2189
|
+
|
|
2190
|
+
# Explicitly save the issue to persist the identifier fields
|
|
2191
|
+
try:
|
|
2192
|
+
issue.save(bulk=True)
|
|
2193
|
+
logger.info(f"Saved issue {issue.id} with identifier fields")
|
|
2194
|
+
except Exception as e:
|
|
2195
|
+
logger.error(f"Failed to save issue identifier fields: {e}")
|
|
2196
|
+
|
|
2197
|
+
def _set_basic_issue_fields(
|
|
2198
|
+
self,
|
|
2199
|
+
issue: regscale_models.Issue,
|
|
2200
|
+
finding: IntegrationFinding,
|
|
2201
|
+
issue_status: regscale_models.IssueStatus,
|
|
2202
|
+
issue_title: str,
|
|
2203
|
+
asset_identifier: str,
|
|
2204
|
+
) -> None:
|
|
2205
|
+
"""Set basic fields for the issue."""
|
|
1619
2206
|
issue.parentId = self.plan_id
|
|
1620
2207
|
issue.parentModule = self.parent_module
|
|
1621
2208
|
issue.vulnerabilityId = finding.vulnerability_id
|
|
@@ -1632,40 +2219,65 @@ class ScannerIntegration(ABC):
|
|
|
1632
2219
|
issue.securityPlanId = self.plan_id if not self.is_component else None
|
|
1633
2220
|
issue.identification = finding.identification
|
|
1634
2221
|
issue.dateFirstDetected = finding.first_seen
|
|
1635
|
-
|
|
2222
|
+
issue.assetIdentifier = finding.issue_asset_identifier_value or asset_identifier
|
|
2223
|
+
|
|
2224
|
+
# Set organization ID based on Issue Owner or SSP Owner hierarchy
|
|
2225
|
+
issue.orgId = self.determine_issue_organization_id(issue.issueOwnerId)
|
|
2226
|
+
|
|
2227
|
+
def _set_issue_due_date(self, issue: regscale_models.Issue, finding: IntegrationFinding) -> None:
|
|
2228
|
+
"""Set the due date for the issue using DueDateHandler."""
|
|
2229
|
+
# Always calculate or validate due date to ensure it's not in the past
|
|
1636
2230
|
if not finding.due_date:
|
|
2231
|
+
# No due date set, calculate new one
|
|
1637
2232
|
try:
|
|
1638
2233
|
base_created = finding.date_created or issue.dateCreated
|
|
1639
|
-
finding.due_date =
|
|
2234
|
+
finding.due_date = self.due_date_handler.calculate_due_date(
|
|
1640
2235
|
severity=finding.severity,
|
|
1641
2236
|
created_date=base_created,
|
|
1642
|
-
|
|
2237
|
+
cve=finding.cve,
|
|
2238
|
+
title=finding.title or self.title,
|
|
1643
2239
|
)
|
|
1644
|
-
except Exception:
|
|
2240
|
+
except Exception as e:
|
|
2241
|
+
logger.warning(f"Error calculating due date with DueDateHandler: {e}")
|
|
1645
2242
|
# Final fallback to a Low severity default if anything goes wrong
|
|
1646
2243
|
base_created = finding.date_created or issue.dateCreated
|
|
1647
|
-
finding.due_date =
|
|
2244
|
+
finding.due_date = self.due_date_handler.calculate_due_date(
|
|
1648
2245
|
severity=regscale_models.IssueSeverity.Low,
|
|
1649
2246
|
created_date=base_created,
|
|
1650
|
-
|
|
2247
|
+
cve=finding.cve,
|
|
2248
|
+
title=finding.title or self.title,
|
|
1651
2249
|
)
|
|
2250
|
+
else:
|
|
2251
|
+
# Due date already exists, but validate it's not in the past (if noPastDueDates is enabled)
|
|
2252
|
+
finding.due_date = self.due_date_handler._ensure_future_due_date(
|
|
2253
|
+
finding.due_date, self.due_date_handler.integration_timelines.get(finding.severity, 60)
|
|
2254
|
+
)
|
|
2255
|
+
|
|
1652
2256
|
issue.dueDate = finding.due_date
|
|
2257
|
+
|
|
2258
|
+
def _set_additional_issue_fields(
|
|
2259
|
+
self, issue: regscale_models.Issue, finding: IntegrationFinding, description: str, remediation_description: str
|
|
2260
|
+
) -> None:
|
|
2261
|
+
"""Set additional fields for the issue."""
|
|
1653
2262
|
issue.description = description
|
|
1654
2263
|
issue.sourceReport = finding.source_report or self.title
|
|
1655
2264
|
issue.recommendedActions = finding.recommendation_for_mitigation
|
|
1656
|
-
issue.assetIdentifier = asset_identifier
|
|
1657
2265
|
issue.securityChecks = finding.security_check or finding.external_id
|
|
1658
2266
|
issue.remediationDescription = remediation_description
|
|
1659
2267
|
issue.integrationFindingId = self.get_finding_identifier(finding)
|
|
1660
2268
|
issue.poamComments = finding.poam_comments
|
|
1661
2269
|
issue.cve = finding.cve
|
|
1662
2270
|
issue.assessmentId = finding.assessment_id
|
|
2271
|
+
|
|
2272
|
+
# Set issue identifier fields (e.g., wizId, otherIdentifier) before save/create
|
|
2273
|
+
self._set_issue_identifier_fields_internal(issue, finding)
|
|
2274
|
+
|
|
2275
|
+
def _set_control_fields(self, issue: regscale_models.Issue, finding: IntegrationFinding) -> None:
|
|
2276
|
+
"""Set control-related fields for the issue."""
|
|
1663
2277
|
control_id = self.get_control_implementation_id_for_cci(finding.cci_ref) if finding.cci_ref else None
|
|
1664
|
-
|
|
1665
|
-
# Add the control implementation ids and the cci ref if it exists
|
|
1666
|
-
# Get control implementation ID for CCI if it exists
|
|
1667
|
-
# Only add CCI control ID if it exists
|
|
2278
|
+
# Note: controlId is deprecated, using controlImplementationIds instead
|
|
1668
2279
|
cci_control_ids = [control_id] if control_id is not None else []
|
|
2280
|
+
|
|
1669
2281
|
# Ensure failed control labels (e.g., AC-4(21)) are present in affectedControls
|
|
1670
2282
|
if finding.affected_controls:
|
|
1671
2283
|
issue.affectedControls = finding.affected_controls
|
|
@@ -1673,15 +2285,17 @@ class ScannerIntegration(ABC):
|
|
|
1673
2285
|
issue.affectedControls = ", ".join(sorted({cl for cl in finding.control_labels if cl}))
|
|
1674
2286
|
|
|
1675
2287
|
issue.controlImplementationIds = list(set(finding._control_implementation_ids + cci_control_ids)) # noqa
|
|
1676
|
-
|
|
2288
|
+
|
|
2289
|
+
def _set_risk_and_operational_fields(
|
|
2290
|
+
self, issue: regscale_models.Issue, finding: IntegrationFinding, is_poam: bool
|
|
2291
|
+
) -> None:
|
|
2292
|
+
"""Set risk and operational fields for the issue."""
|
|
1677
2293
|
issue.isPoam = is_poam
|
|
1678
2294
|
issue.basisForAdjustment = (
|
|
1679
2295
|
finding.basis_for_adjustment if finding.basis_for_adjustment else f"{self.title} import"
|
|
1680
2296
|
)
|
|
1681
2297
|
issue.pluginId = finding.plugin_id
|
|
1682
2298
|
issue.originalRiskRating = regscale_models.Issue.assign_risk_rating(finding.severity)
|
|
1683
|
-
# Current: changes
|
|
1684
|
-
# Planned: planned changes
|
|
1685
2299
|
issue.changes = "<p>Current: {}</p><p>Planned: {}</p>".format(
|
|
1686
2300
|
finding.milestone_changes, finding.planned_milestone_changes
|
|
1687
2301
|
)
|
|
@@ -1690,21 +2304,32 @@ class ScannerIntegration(ABC):
|
|
|
1690
2304
|
issue.operationalRequirement = finding.operational_requirements
|
|
1691
2305
|
issue.deviationRationale = finding.deviation_rationale
|
|
1692
2306
|
issue.dateLastUpdated = get_current_datetime()
|
|
1693
|
-
## set affected controls if they exist
|
|
1694
2307
|
issue.affectedControls = finding.affected_controls
|
|
1695
2308
|
|
|
1696
|
-
|
|
1697
|
-
|
|
1698
|
-
|
|
2309
|
+
def _save_or_create_issue(
|
|
2310
|
+
self,
|
|
2311
|
+
issue: regscale_models.Issue,
|
|
2312
|
+
finding: IntegrationFinding,
|
|
2313
|
+
existing_issue: Optional[regscale_models.Issue],
|
|
2314
|
+
is_poam: bool,
|
|
2315
|
+
) -> None:
|
|
2316
|
+
"""Save or create the issue."""
|
|
1699
2317
|
if existing_issue:
|
|
2318
|
+
logger.debug(f"UPDATING EXISTING ISSUE: {existing_issue.id} with external_id={finding.external_id}")
|
|
1700
2319
|
logger.debug("Saving Old Issue: %s with assetIdentifier: %s", issue.id, issue.assetIdentifier)
|
|
1701
2320
|
issue.save(bulk=True)
|
|
1702
2321
|
logger.debug("Saved existing issue %s with assetIdentifier: %s", issue.id, issue.assetIdentifier)
|
|
1703
|
-
|
|
1704
2322
|
else:
|
|
2323
|
+
logger.debug(
|
|
2324
|
+
f"➕ CREATING NEW ISSUE: external_id={finding.external_id}, title='{finding.title[:50]}...', status={finding.status}"
|
|
2325
|
+
)
|
|
1705
2326
|
issue = issue.create_or_update(
|
|
1706
2327
|
bulk_update=True, defaults={"otherIdentifier": self._get_other_identifier(finding, is_poam)}
|
|
1707
2328
|
)
|
|
2329
|
+
if issue.id:
|
|
2330
|
+
logger.debug(f"NEW ISSUE CREATED: RegScale ID={issue.id}, external_id={finding.external_id}")
|
|
2331
|
+
else:
|
|
2332
|
+
logger.warning(f"ISSUE CREATION FAILED: No ID assigned for external_id={finding.external_id}")
|
|
1708
2333
|
self.extra_data_to_properties(finding, issue.id)
|
|
1709
2334
|
|
|
1710
2335
|
self._handle_property_and_milestone_creation(issue, finding, existing_issue)
|
|
@@ -1721,65 +2346,91 @@ class ScannerIntegration(ABC):
|
|
|
1721
2346
|
|
|
1722
2347
|
:param regscale_models.Issue issue: The issue to handle properties for
|
|
1723
2348
|
:param IntegrationFinding finding: The finding data
|
|
1724
|
-
:param
|
|
2349
|
+
:param Optional[regscale_models.Issue] existing_issue: Existing issue for milestone comparison
|
|
1725
2350
|
:rtype: None
|
|
1726
2351
|
"""
|
|
2352
|
+
# Handle property creation
|
|
2353
|
+
self._create_issue_properties(issue, finding)
|
|
2354
|
+
|
|
2355
|
+
# Handle milestone creation
|
|
2356
|
+
self._create_issue_milestones(issue, finding, existing_issue)
|
|
2357
|
+
|
|
2358
|
+
def _create_issue_properties(self, issue: regscale_models.Issue, finding: IntegrationFinding) -> None:
|
|
2359
|
+
"""
|
|
2360
|
+
Create properties for an issue based on finding data.
|
|
2361
|
+
|
|
2362
|
+
:param regscale_models.Issue issue: The issue to create properties for
|
|
2363
|
+
:param IntegrationFinding finding: The finding data
|
|
2364
|
+
"""
|
|
1727
2365
|
if poc := finding.point_of_contact:
|
|
1728
|
-
|
|
1729
|
-
key="POC",
|
|
1730
|
-
value=poc,
|
|
1731
|
-
parentId=issue.id,
|
|
1732
|
-
parentModule="issues",
|
|
1733
|
-
).create_or_update()
|
|
1734
|
-
logger.debug("Added POC property %s to issue %s", poc, issue.id)
|
|
2366
|
+
self._create_property_safe(issue, "POC", poc, "POC property")
|
|
1735
2367
|
|
|
1736
2368
|
if finding.is_cwe:
|
|
2369
|
+
self._create_property_safe(issue, "CWE", finding.plugin_id, "CWE property")
|
|
2370
|
+
|
|
2371
|
+
def _create_property_safe(self, issue: regscale_models.Issue, key: str, value: str, property_type: str) -> None:
|
|
2372
|
+
"""
|
|
2373
|
+
Safely create a property with error handling.
|
|
2374
|
+
Validates that the issue has a valid ID before attempting to create the property.
|
|
2375
|
+
|
|
2376
|
+
:param regscale_models.Issue issue: The issue to create property for
|
|
2377
|
+
:param str key: The property key
|
|
2378
|
+
:param str value: The property value
|
|
2379
|
+
:param str property_type: Description for logging purposes
|
|
2380
|
+
"""
|
|
2381
|
+
# Validate that the issue has a valid ID, if not, create the issue
|
|
2382
|
+
if not issue or not issue.id or issue.id == 0:
|
|
2383
|
+
issue = issue.create_or_update()
|
|
2384
|
+
|
|
2385
|
+
# Validate that the issue has a valid ID, if not, skip the property creation
|
|
2386
|
+
if not issue or not issue.id or issue.id == 0:
|
|
2387
|
+
logger.debug(
|
|
2388
|
+
"Skipping %s creation: issue ID is invalid (issue=%s, id=%s)",
|
|
2389
|
+
property_type,
|
|
2390
|
+
"None" if not issue else "present",
|
|
2391
|
+
issue.id if issue else "N/A",
|
|
2392
|
+
)
|
|
2393
|
+
return
|
|
2394
|
+
|
|
2395
|
+
try:
|
|
1737
2396
|
regscale_models.Property(
|
|
1738
|
-
key=
|
|
1739
|
-
value=
|
|
2397
|
+
key=key,
|
|
2398
|
+
value=value,
|
|
1740
2399
|
parentId=issue.id,
|
|
1741
2400
|
parentModule="issues",
|
|
1742
2401
|
).create_or_update()
|
|
1743
|
-
logger.debug("Added
|
|
2402
|
+
logger.debug("Added %s %s to issue %s", property_type, value, issue.id)
|
|
2403
|
+
except Exception as e:
|
|
2404
|
+
logger.warning("Failed to create %s for issue %s: %s", property_type, issue.id, str(e))
|
|
1744
2405
|
|
|
1745
|
-
|
|
1746
|
-
|
|
1747
|
-
|
|
1748
|
-
|
|
1749
|
-
|
|
1750
|
-
|
|
1751
|
-
|
|
1752
|
-
|
|
1753
|
-
|
|
1754
|
-
|
|
1755
|
-
|
|
1756
|
-
|
|
1757
|
-
|
|
1758
|
-
|
|
1759
|
-
|
|
1760
|
-
|
|
1761
|
-
|
|
1762
|
-
|
|
1763
|
-
|
|
1764
|
-
|
|
1765
|
-
|
|
1766
|
-
|
|
1767
|
-
|
|
1768
|
-
|
|
1769
|
-
|
|
1770
|
-
|
|
1771
|
-
|
|
1772
|
-
|
|
1773
|
-
regscale_models.Milestone(
|
|
1774
|
-
title=f"Issue created from {self.title} scan",
|
|
1775
|
-
milestoneDate=self.scan_date,
|
|
1776
|
-
responsiblePersonId=self.assessor_id,
|
|
1777
|
-
parentID=issue.id,
|
|
1778
|
-
parentModule="issues",
|
|
1779
|
-
).create_or_update()
|
|
1780
|
-
logger.debug("Created milestone for issue %s from finding %s", issue.id, finding.external_id)
|
|
1781
|
-
else:
|
|
1782
|
-
logger.debug("No milestone created for issue %s from finding %s", issue.id, finding.external_id)
|
|
2406
|
+
def _create_issue_milestones(
|
|
2407
|
+
self,
|
|
2408
|
+
issue: regscale_models.Issue,
|
|
2409
|
+
finding: IntegrationFinding,
|
|
2410
|
+
existing_issue: Optional[regscale_models.Issue],
|
|
2411
|
+
) -> None:
|
|
2412
|
+
"""
|
|
2413
|
+
Create milestones for an issue based on status transitions.
|
|
2414
|
+
|
|
2415
|
+
Delegates to MilestoneManager for cleaner separation of concerns.
|
|
2416
|
+
Also ensures existing issues have creation milestones (backfills if missing).
|
|
2417
|
+
|
|
2418
|
+
:param regscale_models.Issue issue: The issue to create milestones for
|
|
2419
|
+
:param IntegrationFinding finding: The finding data
|
|
2420
|
+
:param Optional[regscale_models.Issue] existing_issue: Existing issue for comparison
|
|
2421
|
+
"""
|
|
2422
|
+
milestone_manager = self.get_milestone_manager()
|
|
2423
|
+
|
|
2424
|
+
# For existing issues, ensure they have a creation milestone (backfill if missing)
|
|
2425
|
+
if existing_issue:
|
|
2426
|
+
milestone_manager.ensure_creation_milestone_exists(issue=issue, finding=finding)
|
|
2427
|
+
|
|
2428
|
+
# Handle status transition milestones
|
|
2429
|
+
milestone_manager.create_milestones_for_issue(
|
|
2430
|
+
issue=issue,
|
|
2431
|
+
finding=finding,
|
|
2432
|
+
existing_issue=existing_issue,
|
|
2433
|
+
)
|
|
1783
2434
|
|
|
1784
2435
|
@staticmethod
|
|
1785
2436
|
def extra_data_to_properties(finding: IntegrationFinding, issue_id: int) -> None:
|
|
@@ -1825,13 +2476,17 @@ class ScannerIntegration(ABC):
|
|
|
1825
2476
|
:rtype: str
|
|
1826
2477
|
"""
|
|
1827
2478
|
delimiter = "\n"
|
|
2479
|
+
|
|
2480
|
+
# Use issue_asset_identifier_value if available (e.g., providerUniqueId from Wiz)
|
|
2481
|
+
# This provides more meaningful asset identification for eMASS exports
|
|
2482
|
+
current_asset_identifier = finding.issue_asset_identifier_value or finding.asset_identifier
|
|
1828
2483
|
if not existing_issue or ScannerVariables.issueCreation.lower() == "perasset":
|
|
1829
|
-
return
|
|
2484
|
+
return current_asset_identifier
|
|
1830
2485
|
|
|
1831
2486
|
# Get existing asset identifiers
|
|
1832
2487
|
existing_asset_identifiers = set((existing_issue.assetIdentifier or "").split(delimiter))
|
|
1833
|
-
if
|
|
1834
|
-
existing_asset_identifiers.add(
|
|
2488
|
+
if current_asset_identifier not in existing_asset_identifiers:
|
|
2489
|
+
existing_asset_identifiers.add(current_asset_identifier)
|
|
1835
2490
|
|
|
1836
2491
|
return delimiter.join(existing_asset_identifiers)
|
|
1837
2492
|
|
|
@@ -1862,16 +2517,14 @@ class ScannerIntegration(ABC):
|
|
|
1862
2517
|
"""
|
|
1863
2518
|
Determine if the cve is part of the published CISA KEV list
|
|
1864
2519
|
|
|
2520
|
+
Note: Due date handling is now managed by DueDateHandler. This method only sets kevList field.
|
|
2521
|
+
|
|
1865
2522
|
:param str cve: The CVE to lookup in CISAs KEV list
|
|
1866
|
-
:param regscale_models.Issue issue: The issue to update kevList field
|
|
2523
|
+
:param regscale_models.Issue issue: The issue to update kevList field
|
|
1867
2524
|
:param Optional[ThreadSafeDict[str, Any]] cisa_kevs: The CISA KEV data to search the findings
|
|
1868
2525
|
:return: The updated issue
|
|
1869
2526
|
:rtype: regscale_models.Issue
|
|
1870
2527
|
"""
|
|
1871
|
-
from datetime import datetime
|
|
1872
|
-
|
|
1873
|
-
from regscale.core.app.utils.app_utils import convert_datetime_to_regscale_string
|
|
1874
|
-
|
|
1875
2528
|
issue.kevList = "No"
|
|
1876
2529
|
|
|
1877
2530
|
if cisa_kevs:
|
|
@@ -1884,14 +2537,6 @@ class ScannerIntegration(ABC):
|
|
|
1884
2537
|
None,
|
|
1885
2538
|
)
|
|
1886
2539
|
if kev_data:
|
|
1887
|
-
# If kev due date is before the issue date created, add the difference to the date created
|
|
1888
|
-
calculated_due_date = ScannerIntegration._calculate_kev_due_date(kev_data, issue.dateCreated)
|
|
1889
|
-
if calculated_due_date:
|
|
1890
|
-
issue.dueDate = calculated_due_date
|
|
1891
|
-
else:
|
|
1892
|
-
issue.dueDate = convert_datetime_to_regscale_string(
|
|
1893
|
-
datetime.strptime(kev_data["dueDate"], "%Y-%m-%d")
|
|
1894
|
-
)
|
|
1895
2540
|
issue.kevList = "Yes"
|
|
1896
2541
|
|
|
1897
2542
|
return issue
|
|
@@ -1952,6 +2597,8 @@ class ScannerIntegration(ABC):
|
|
|
1952
2597
|
if found_issue.controlImplementationIds:
|
|
1953
2598
|
for control_id in found_issue.controlImplementationIds:
|
|
1954
2599
|
self.update_control_implementation_status_after_close(control_id)
|
|
2600
|
+
# Update assessment status to reflect the control implementation status
|
|
2601
|
+
self.update_assessment_status_from_control_implementation(control_id)
|
|
1955
2602
|
|
|
1956
2603
|
def handle_failing_checklist(
|
|
1957
2604
|
self,
|
|
@@ -1976,11 +2623,13 @@ class ScannerIntegration(ABC):
|
|
|
1976
2623
|
if failing_objective.name.lower().startswith("cci-"):
|
|
1977
2624
|
implementation_id = self.get_control_implementation_id_for_cci(failing_objective.name)
|
|
1978
2625
|
else:
|
|
1979
|
-
|
|
1980
|
-
|
|
1981
|
-
|
|
1982
|
-
|
|
1983
|
-
|
|
2626
|
+
implementation_id = self._fallback_implementation_id(failing_objective)
|
|
2627
|
+
|
|
2628
|
+
if not implementation_id or implementation_id is None:
|
|
2629
|
+
logger.warning(
|
|
2630
|
+
"Could not map objective to a Control Implementation for objective #%i.", failing_objective.id
|
|
2631
|
+
)
|
|
2632
|
+
continue
|
|
1984
2633
|
|
|
1985
2634
|
failing_option = regscale_models.ImplementationOption(
|
|
1986
2635
|
name="Failed STIG",
|
|
@@ -2002,13 +2651,36 @@ class ScannerIntegration(ABC):
|
|
|
2002
2651
|
).create_or_update()
|
|
2003
2652
|
|
|
2004
2653
|
# Create assessment and control test result
|
|
2005
|
-
assessment = self.get_or_create_assessment(
|
|
2654
|
+
assessment = self.get_or_create_assessment(
|
|
2655
|
+
implementation_id, status=regscale_models.AssessmentResultsStatus.FAIL
|
|
2656
|
+
)
|
|
2006
2657
|
if implementation_id:
|
|
2007
2658
|
control_test = self.create_or_get_control_test(finding, implementation_id)
|
|
2008
2659
|
self.create_control_test_result(
|
|
2009
2660
|
finding, control_test, assessment, regscale_models.ControlTestResultStatus.FAIL
|
|
2010
2661
|
)
|
|
2011
2662
|
|
|
2663
|
+
def _fallback_implementation_id(self, objective: regscale_models.ControlObjective) -> Optional[int]:
|
|
2664
|
+
"""
|
|
2665
|
+
Fallback method to get control implementation ID from objective name if CCI mapping fails.
|
|
2666
|
+
|
|
2667
|
+
:param regscale_models.ControlObjective objective: The control objective
|
|
2668
|
+
:return: The control implementation ID if found, None otherwise
|
|
2669
|
+
:rtype: Optional[int]
|
|
2670
|
+
"""
|
|
2671
|
+
control_label = objective_to_control_dot(objective.name)
|
|
2672
|
+
if implementation_id := self.control_implementation_id_map.get(control_label):
|
|
2673
|
+
return implementation_id
|
|
2674
|
+
|
|
2675
|
+
if control_id := self.control_id_to_implementation_map.get(objective.securityControlId):
|
|
2676
|
+
if control_label := self.control_map.get(control_id):
|
|
2677
|
+
implementation_id = self.control_implementation_id_map.get(control_label)
|
|
2678
|
+
if not implementation_id:
|
|
2679
|
+
print("No dice.")
|
|
2680
|
+
return implementation_id
|
|
2681
|
+
logger.debug("Could not find fallback implementation ID for objective #%i", objective.id)
|
|
2682
|
+
return None
|
|
2683
|
+
|
|
2012
2684
|
def handle_passing_checklist(
|
|
2013
2685
|
self,
|
|
2014
2686
|
finding: IntegrationFinding,
|
|
@@ -2032,15 +2704,12 @@ class ScannerIntegration(ABC):
|
|
|
2032
2704
|
if passing_objective.name.lower().startswith("cci-"):
|
|
2033
2705
|
implementation_id = self.get_control_implementation_id_for_cci(passing_objective.name)
|
|
2034
2706
|
else:
|
|
2035
|
-
|
|
2036
|
-
|
|
2037
|
-
|
|
2038
|
-
|
|
2039
|
-
|
|
2040
|
-
|
|
2041
|
-
# Skip if we couldn't determine the implementation ID
|
|
2042
|
-
if implementation_id is None:
|
|
2043
|
-
logger.warning("Could not determine implementation ID for objective %s", passing_objective.name)
|
|
2707
|
+
implementation_id = self._fallback_implementation_id(passing_objective)
|
|
2708
|
+
|
|
2709
|
+
if not implementation_id or implementation_id is None:
|
|
2710
|
+
logger.warning(
|
|
2711
|
+
"Could not map objective to a Control Implementation for objective #%i.", passing_objective.id
|
|
2712
|
+
)
|
|
2044
2713
|
continue
|
|
2045
2714
|
|
|
2046
2715
|
passing_option = regscale_models.ImplementationOption(
|
|
@@ -2063,7 +2732,9 @@ class ScannerIntegration(ABC):
|
|
|
2063
2732
|
).create_or_update()
|
|
2064
2733
|
|
|
2065
2734
|
# Create assessment and control test result
|
|
2066
|
-
assessment = self.get_or_create_assessment(
|
|
2735
|
+
assessment = self.get_or_create_assessment(
|
|
2736
|
+
implementation_id, status=regscale_models.AssessmentResultsStatus.PASS
|
|
2737
|
+
)
|
|
2067
2738
|
control_test = self.create_or_get_control_test(finding, implementation_id)
|
|
2068
2739
|
self.create_control_test_result(
|
|
2069
2740
|
finding, control_test, assessment, regscale_models.ControlTestResultStatus.PASS
|
|
@@ -2089,17 +2760,45 @@ class ScannerIntegration(ABC):
|
|
|
2089
2760
|
|
|
2090
2761
|
def get_asset_by_identifier(self, identifier: str) -> Optional[regscale_models.Asset]:
|
|
2091
2762
|
"""
|
|
2092
|
-
Gets an asset by its identifier
|
|
2763
|
+
Gets an asset by its identifier with fallback lookups.
|
|
2764
|
+
|
|
2765
|
+
REG-17044: Enhanced to support multiple identifier fields (qualysId, IP, FQDN)
|
|
2766
|
+
to improve asset matching and reduce "asset not found" errors.
|
|
2093
2767
|
|
|
2094
2768
|
:param str identifier: The identifier of the asset
|
|
2095
2769
|
:return: The asset
|
|
2096
2770
|
:rtype: Optional[regscale_models.Asset]
|
|
2097
2771
|
"""
|
|
2098
|
-
|
|
2772
|
+
# Try primary identifier field first
|
|
2773
|
+
if asset := self.asset_map_by_identifier.get(identifier):
|
|
2774
|
+
return asset
|
|
2775
|
+
|
|
2776
|
+
# Fallback: Try common identifier fields
|
|
2777
|
+
# This helps when asset_identifier_field doesn't match or assets use different identifiers
|
|
2778
|
+
if not asset and identifier:
|
|
2779
|
+
for cached_asset in self.asset_map_by_identifier.values():
|
|
2780
|
+
# Try IP address lookup
|
|
2781
|
+
if getattr(cached_asset, "ipAddress", None) == identifier:
|
|
2782
|
+
logger.debug(f"Found asset {cached_asset.id} by IP address fallback: {identifier}")
|
|
2783
|
+
return cached_asset
|
|
2784
|
+
# Try FQDN lookup
|
|
2785
|
+
if getattr(cached_asset, "fqdn", None) == identifier:
|
|
2786
|
+
logger.debug(f"Found asset {cached_asset.id} by FQDN fallback: {identifier}")
|
|
2787
|
+
return cached_asset
|
|
2788
|
+
# Try DNS lookup
|
|
2789
|
+
if getattr(cached_asset, "dns", None) == identifier:
|
|
2790
|
+
logger.debug(f"Found asset {cached_asset.id} by DNS fallback: {identifier}")
|
|
2791
|
+
return cached_asset
|
|
2792
|
+
|
|
2793
|
+
# Log error if still not found
|
|
2099
2794
|
if not asset and identifier not in self.alerted_assets:
|
|
2100
2795
|
self.alerted_assets.add(identifier)
|
|
2101
2796
|
if not getattr(self, "suppress_asset_not_found_errors", False):
|
|
2102
|
-
self.log_error(
|
|
2797
|
+
self.log_error(
|
|
2798
|
+
"Asset not found for identifier '%s' (tried %s, ipAddress, fqdn, dns)",
|
|
2799
|
+
identifier,
|
|
2800
|
+
self.asset_identifier_field,
|
|
2801
|
+
)
|
|
2103
2802
|
return asset
|
|
2104
2803
|
|
|
2105
2804
|
def get_issue_by_integration_finding_id(self, integration_finding_id: str) -> Optional[regscale_models.Issue]:
|
|
@@ -2128,7 +2827,11 @@ class ScannerIntegration(ABC):
|
|
|
2128
2827
|
logger.error("2. Asset not found for identifier %s", finding.asset_identifier)
|
|
2129
2828
|
return 0
|
|
2130
2829
|
|
|
2131
|
-
tool =
|
|
2830
|
+
tool = (
|
|
2831
|
+
regscale_models.ChecklistTool.CISBenchmarks
|
|
2832
|
+
if "simp.cis" in str(finding.vulnerability_number).lower()
|
|
2833
|
+
else regscale_models.ChecklistTool.STIGs
|
|
2834
|
+
)
|
|
2132
2835
|
if finding.vulnerability_type == "Vulnerability Scan":
|
|
2133
2836
|
tool = regscale_models.ChecklistTool.VulnerabilityScanner
|
|
2134
2837
|
|
|
@@ -2190,33 +2893,70 @@ class ScannerIntegration(ABC):
|
|
|
2190
2893
|
scan_history = self.create_scan_history()
|
|
2191
2894
|
current_vulnerabilities: Dict[int, Set[int]] = defaultdict(set)
|
|
2192
2895
|
processed_findings_count = 0
|
|
2193
|
-
|
|
2194
|
-
|
|
2195
|
-
|
|
2896
|
+
|
|
2897
|
+
# Convert iterator to list so we can check findings and avoid re-iteration issues
|
|
2898
|
+
findings_list = list(findings)
|
|
2899
|
+
|
|
2900
|
+
# Set the number of findings to process for progress tracking
|
|
2901
|
+
self.num_findings_to_process = len(findings_list)
|
|
2902
|
+
loading_findings = self._setup_finding_progress()
|
|
2903
|
+
|
|
2904
|
+
# Pre-load CCI to control map before threading ONLY if:
|
|
2905
|
+
# 1. The integration has CCI mapping enabled (enable_cci_mapping = True)
|
|
2906
|
+
# 2. Findings contain actual CCI references
|
|
2907
|
+
# This avoids expensive unnecessary API calls for integrations that don't use CCIs (e.g., AWS)
|
|
2908
|
+
if self.enable_cci_mapping:
|
|
2909
|
+
has_cci_refs = any(
|
|
2910
|
+
getattr(f, "cci_ref", None) is not None and getattr(f, "cci_ref", None) != "" for f in findings_list
|
|
2911
|
+
)
|
|
2912
|
+
if has_cci_refs:
|
|
2913
|
+
logger.debug("Pre-loading CCI to control map...")
|
|
2914
|
+
_ = self.get_cci_to_control_map()
|
|
2915
|
+
|
|
2916
|
+
# Process findings
|
|
2917
|
+
processed_findings_count = self._process_findings_with_threading(
|
|
2918
|
+
iter(findings_list), scan_history, current_vulnerabilities, loading_findings
|
|
2196
2919
|
)
|
|
2197
2920
|
|
|
2198
|
-
#
|
|
2921
|
+
# Finalize processing
|
|
2922
|
+
self._finalize_finding_processing(scan_history, current_vulnerabilities)
|
|
2923
|
+
|
|
2924
|
+
# Complete the finding progress bar
|
|
2925
|
+
self._complete_finding_progress(loading_findings, processed_findings_count)
|
|
2926
|
+
|
|
2927
|
+
logger.info(f"Successfully processed {processed_findings_count} findings from {self.title}")
|
|
2928
|
+
|
|
2929
|
+
return processed_findings_count
|
|
2930
|
+
|
|
2931
|
+
def _setup_finding_progress(self):
|
|
2932
|
+
"""Setup progress tracking for findings processing."""
|
|
2933
|
+
# Backwards compatibility: check if finding_progress exists and has add_task method
|
|
2934
|
+
if self.finding_progress is not None and hasattr(self.finding_progress, "add_task"):
|
|
2935
|
+
return self.finding_progress.add_task(
|
|
2936
|
+
f"[#f8b737]Processing {f'{self.num_findings_to_process} ' if self.num_findings_to_process else ''}finding(s) from {self.title}",
|
|
2937
|
+
total=self.num_findings_to_process if self.num_findings_to_process else None,
|
|
2938
|
+
)
|
|
2939
|
+
return None
|
|
2940
|
+
|
|
2941
|
+
def _process_findings_with_threading(
|
|
2942
|
+
self,
|
|
2943
|
+
findings: Iterator[IntegrationFinding],
|
|
2944
|
+
scan_history: regscale_models.ScanHistory,
|
|
2945
|
+
current_vulnerabilities: Dict[int, Set[int]],
|
|
2946
|
+
loading_findings,
|
|
2947
|
+
) -> int:
|
|
2948
|
+
"""Process findings using threading or sequential processing."""
|
|
2949
|
+
processed_findings_count = 0
|
|
2199
2950
|
count_lock = threading.RLock()
|
|
2200
2951
|
|
|
2201
2952
|
def process_finding_with_progress(finding_to_process: IntegrationFinding) -> None:
|
|
2202
|
-
"""
|
|
2203
|
-
Process a single finding and update progress.
|
|
2204
|
-
|
|
2205
|
-
:param IntegrationFinding finding_to_process: The finding to process
|
|
2206
|
-
:rtype: None
|
|
2207
|
-
"""
|
|
2953
|
+
"""Process a single finding and update progress."""
|
|
2208
2954
|
nonlocal processed_findings_count
|
|
2209
2955
|
try:
|
|
2210
2956
|
self.process_finding(finding_to_process, scan_history, current_vulnerabilities)
|
|
2211
2957
|
with count_lock:
|
|
2212
2958
|
processed_findings_count += 1
|
|
2213
|
-
|
|
2214
|
-
self.finding_progress.update(
|
|
2215
|
-
loading_findings,
|
|
2216
|
-
total=self.num_findings_to_process,
|
|
2217
|
-
description=f"[#f8b737]Processing {self.num_findings_to_process} findings from {self.title}.",
|
|
2218
|
-
)
|
|
2219
|
-
self.finding_progress.advance(loading_findings, 1)
|
|
2959
|
+
self._update_finding_progress(loading_findings)
|
|
2220
2960
|
except Exception as exc:
|
|
2221
2961
|
self.log_error(
|
|
2222
2962
|
"An error occurred when processing finding %s: %s",
|
|
@@ -2228,30 +2968,87 @@ class ScannerIntegration(ABC):
|
|
|
2228
2968
|
for finding in findings:
|
|
2229
2969
|
process_finding_with_progress(finding)
|
|
2230
2970
|
else:
|
|
2231
|
-
|
|
2232
|
-
|
|
2233
|
-
|
|
2234
|
-
|
|
2235
|
-
|
|
2236
|
-
|
|
2237
|
-
|
|
2238
|
-
|
|
2239
|
-
|
|
2240
|
-
|
|
2241
|
-
|
|
2242
|
-
|
|
2243
|
-
|
|
2244
|
-
|
|
2971
|
+
processed_findings_count = self._process_findings_in_batches(findings, process_finding_with_progress)
|
|
2972
|
+
|
|
2973
|
+
return processed_findings_count
|
|
2974
|
+
|
|
2975
|
+
def _update_finding_progress(self, loading_findings):
|
|
2976
|
+
"""Update the finding progress bar."""
|
|
2977
|
+
# Backwards compatibility: check if finding_progress exists and has required methods
|
|
2978
|
+
if self.finding_progress is None or not hasattr(self.finding_progress, "update"):
|
|
2979
|
+
return
|
|
2980
|
+
|
|
2981
|
+
if self.num_findings_to_process:
|
|
2982
|
+
self.finding_progress.update(
|
|
2983
|
+
loading_findings,
|
|
2984
|
+
total=self.num_findings_to_process,
|
|
2985
|
+
description=f"[#f8b737]Processing {self.num_findings_to_process} findings from {self.title}.",
|
|
2986
|
+
)
|
|
2987
|
+
if hasattr(self.finding_progress, "advance"):
|
|
2988
|
+
self.finding_progress.advance(loading_findings, 1)
|
|
2989
|
+
|
|
2990
|
+
def _complete_finding_progress(self, loading_findings, processed_count):
|
|
2991
|
+
"""Complete the finding progress bar with final status."""
|
|
2992
|
+
# Backwards compatibility: check if finding_progress exists and has update method
|
|
2993
|
+
if self.finding_progress is not None and hasattr(self.finding_progress, "update"):
|
|
2994
|
+
self.finding_progress.update(
|
|
2995
|
+
loading_findings,
|
|
2996
|
+
completed=processed_count,
|
|
2997
|
+
total=max(processed_count, self.num_findings_to_process or processed_count),
|
|
2998
|
+
description=f"[green] Completed processing {processed_count} finding(s) from {self.title}",
|
|
2999
|
+
)
|
|
3000
|
+
|
|
3001
|
+
def _process_findings_in_batches(
|
|
3002
|
+
self, findings: Iterator[IntegrationFinding], process_finding_with_progress
|
|
3003
|
+
) -> int:
|
|
3004
|
+
"""Process findings in batches using thread pool executor."""
|
|
3005
|
+
processed_findings_count = 0
|
|
3006
|
+
batch_size = get_thread_workers_max() * 2
|
|
3007
|
+
|
|
3008
|
+
with concurrent.futures.ThreadPoolExecutor(max_workers=get_thread_workers_max()) as executor:
|
|
3009
|
+
batch = []
|
|
3010
|
+
for finding in findings:
|
|
3011
|
+
batch.append(finding)
|
|
3012
|
+
if len(batch) >= batch_size:
|
|
3013
|
+
# Process this batch
|
|
2245
3014
|
list(executor.map(process_finding_with_progress, batch))
|
|
3015
|
+
processed_findings_count += len(batch)
|
|
3016
|
+
# Clear the batch
|
|
3017
|
+
batch = []
|
|
3018
|
+
|
|
3019
|
+
# Process any remaining items
|
|
3020
|
+
if batch:
|
|
3021
|
+
list(executor.map(process_finding_with_progress, batch))
|
|
3022
|
+
processed_findings_count += len(batch)
|
|
3023
|
+
|
|
3024
|
+
return processed_findings_count
|
|
2246
3025
|
|
|
2247
|
-
|
|
2248
|
-
self.
|
|
3026
|
+
def _finalize_finding_processing(
|
|
3027
|
+
self, scan_history: regscale_models.ScanHistory, current_vulnerabilities: Dict[int, Set[int]]
|
|
3028
|
+
) -> None:
|
|
3029
|
+
"""Finalize the finding processing by saving scan history and closing outdated vulnerabilities and issues."""
|
|
3030
|
+
logger.info(
|
|
3031
|
+
f"Saving scan history with final counts - Low: {scan_history.vLow}, Medium: {scan_history.vMedium}, High: {scan_history.vHigh}, Critical: {scan_history.vCritical}, Info: {scan_history.vInfo}"
|
|
3032
|
+
)
|
|
3033
|
+
|
|
3034
|
+
# Ensure scan history is properly saved with updated counts
|
|
3035
|
+
try:
|
|
3036
|
+
scan_history.save()
|
|
3037
|
+
except Exception as e:
|
|
3038
|
+
logger.error(f"Error saving scan history: {e}")
|
|
3039
|
+
# Try to save again with a fresh fetch
|
|
3040
|
+
try:
|
|
3041
|
+
scan_history.fetch()
|
|
3042
|
+
scan_history.save()
|
|
3043
|
+
except Exception as e2:
|
|
3044
|
+
logger.error(f"Failed to save scan history after retry: {e2}")
|
|
3045
|
+
|
|
3046
|
+
self._results["scan_history"] = scan_history
|
|
2249
3047
|
self.update_result_counts("issues", regscale_models.Issue.bulk_save(progress_context=self.finding_progress))
|
|
3048
|
+
self.close_outdated_vulnerabilities(current_vulnerabilities)
|
|
2250
3049
|
self.close_outdated_issues(current_vulnerabilities)
|
|
2251
3050
|
self._perform_batch_operations(self.finding_progress)
|
|
2252
3051
|
|
|
2253
|
-
return processed_findings_count
|
|
2254
|
-
|
|
2255
3052
|
@staticmethod
|
|
2256
3053
|
def parse_poam_id(poam_identifier: str) -> Optional[int]:
|
|
2257
3054
|
"""
|
|
@@ -2285,15 +3082,15 @@ class ScannerIntegration(ABC):
|
|
|
2285
3082
|
parent_id=self.plan_id,
|
|
2286
3083
|
parent_module=self.parent_module,
|
|
2287
3084
|
)
|
|
2288
|
-
|
|
2289
|
-
|
|
2290
|
-
|
|
2291
|
-
|
|
2292
|
-
|
|
2293
|
-
|
|
2294
|
-
|
|
2295
|
-
|
|
2296
|
-
)
|
|
3085
|
+
# Extract parsed IDs for valid identifiers
|
|
3086
|
+
parsed_ids = []
|
|
3087
|
+
for issue in issues:
|
|
3088
|
+
if issue.otherIdentifier:
|
|
3089
|
+
parsed_id = self.parse_poam_id(issue.otherIdentifier)
|
|
3090
|
+
if parsed_id is not None:
|
|
3091
|
+
parsed_ids.append(parsed_id)
|
|
3092
|
+
|
|
3093
|
+
self._max_poam_id = max(parsed_ids, default=0)
|
|
2297
3094
|
|
|
2298
3095
|
# Increment the cached max ID and store it
|
|
2299
3096
|
self._max_poam_id = (self._max_poam_id or 0) + 1
|
|
@@ -2351,99 +3148,312 @@ class ScannerIntegration(ABC):
|
|
|
2351
3148
|
|
|
2352
3149
|
# Process checklist if applicable
|
|
2353
3150
|
if self.type == ScannerIntegrationType.CHECKLIST:
|
|
2354
|
-
|
|
2355
|
-
if not getattr(self, "suppress_asset_not_found_errors", False):
|
|
2356
|
-
logger.error("2. Asset not found for identifier %s", finding.asset_identifier)
|
|
2357
|
-
return
|
|
2358
|
-
|
|
2359
|
-
tool = regscale_models.ChecklistTool.STIGs
|
|
2360
|
-
if finding.vulnerability_type == "Vulnerability Scan":
|
|
2361
|
-
tool = regscale_models.ChecklistTool.VulnerabilityScanner
|
|
2362
|
-
|
|
2363
|
-
if not finding.cci_ref:
|
|
2364
|
-
finding.cci_ref = "CCI-000366"
|
|
2365
|
-
|
|
2366
|
-
# Convert checklist status to string
|
|
2367
|
-
checklist_status_str = str(finding.checklist_status.value)
|
|
2368
|
-
|
|
2369
|
-
logger.debug("Create or update checklist for %s", finding.external_id)
|
|
2370
|
-
regscale_models.Checklist(
|
|
2371
|
-
status=checklist_status_str,
|
|
2372
|
-
assetId=asset.id,
|
|
2373
|
-
tool=tool,
|
|
2374
|
-
baseline=finding.baseline,
|
|
2375
|
-
vulnerabilityId=finding.vulnerability_number,
|
|
2376
|
-
results=finding.results,
|
|
2377
|
-
check=finding.title,
|
|
2378
|
-
cci=finding.cci_ref,
|
|
2379
|
-
ruleId=finding.rule_id,
|
|
2380
|
-
version=finding.rule_version,
|
|
2381
|
-
comments=finding.comments,
|
|
2382
|
-
datePerformed=finding.date_created,
|
|
2383
|
-
).create_or_update()
|
|
2384
|
-
|
|
2385
|
-
# For failing findings, handle control implementation updates
|
|
2386
|
-
if finding.status != regscale_models.IssueStatus.Closed:
|
|
2387
|
-
logger.debug("Handling failing checklist for %s", finding.external_id)
|
|
2388
|
-
if self.type == ScannerIntegrationType.CHECKLIST:
|
|
2389
|
-
self.handle_failing_checklist(finding=finding, plan_id=self.plan_id)
|
|
2390
|
-
else:
|
|
2391
|
-
logger.debug("Handling passing checklist for %s", finding.external_id)
|
|
2392
|
-
self.handle_passing_checklist(finding=finding, plan_id=self.plan_id)
|
|
3151
|
+
self._process_checklist_finding(finding)
|
|
2393
3152
|
|
|
2394
3153
|
# Process vulnerability if applicable
|
|
3154
|
+
# IMPORTANT: Always track vulnerabilities regardless of status to enable proper issue closure logic
|
|
3155
|
+
# This ensures that current_vulnerabilities dict accurately reflects the scan state
|
|
3156
|
+
vulnerability_created = self._process_vulnerability_finding(finding, scan_history, current_vulnerabilities)
|
|
3157
|
+
|
|
3158
|
+
# Only create/update issues for non-closed findings (unless ingestClosedIssues is enabled)
|
|
2395
3159
|
if finding.status != regscale_models.IssueStatus.Closed or ScannerVariables.ingestClosedIssues:
|
|
2396
|
-
if asset := self.get_asset_by_identifier(finding.asset_identifier):
|
|
2397
|
-
if vulnerability_id := self.handle_vulnerability(finding, asset, scan_history):
|
|
2398
|
-
current_vulnerabilities[asset.id].add(vulnerability_id)
|
|
2399
3160
|
self.handle_failing_finding(
|
|
2400
3161
|
issue_title=finding.issue_title or finding.title,
|
|
2401
3162
|
finding=finding,
|
|
2402
3163
|
)
|
|
2403
|
-
|
|
2404
|
-
|
|
3164
|
+
|
|
3165
|
+
# Update scan history severity counts only if vulnerability was successfully created
|
|
3166
|
+
if vulnerability_created:
|
|
3167
|
+
logger.debug(
|
|
3168
|
+
f"Updating severity count for successfully created vulnerability with severity: {finding.severity}"
|
|
3169
|
+
)
|
|
3170
|
+
self.set_severity_count_for_scan(finding.severity, scan_history, self.scan_history_lock)
|
|
3171
|
+
else:
|
|
3172
|
+
logger.debug(f"Skipping severity count update for finding {finding.external_id} - no vulnerability created")
|
|
3173
|
+
|
|
3174
|
+
def _process_checklist_finding(self, finding: IntegrationFinding) -> None:
|
|
3175
|
+
"""Process a checklist finding."""
|
|
3176
|
+
asset = self.get_asset_by_identifier(finding.asset_identifier)
|
|
3177
|
+
if not asset:
|
|
3178
|
+
if not getattr(self, "suppress_asset_not_found_errors", False):
|
|
3179
|
+
logger.error("2. Asset not found for identifier %s", finding.asset_identifier)
|
|
3180
|
+
if not getattr(self, "import_all_findings", False):
|
|
3181
|
+
return
|
|
3182
|
+
|
|
3183
|
+
tool = regscale_models.ChecklistTool.STIGs
|
|
3184
|
+
if finding.vulnerability_type == "Vulnerability Scan":
|
|
3185
|
+
tool = regscale_models.ChecklistTool.VulnerabilityScanner
|
|
3186
|
+
|
|
3187
|
+
if not finding.cci_ref:
|
|
3188
|
+
finding.cci_ref = "CCI-000366"
|
|
3189
|
+
|
|
3190
|
+
# Convert checklist status to string
|
|
3191
|
+
checklist_status_str = str(finding.checklist_status.value)
|
|
3192
|
+
|
|
3193
|
+
logger.debug("Create or update checklist for %s", finding.external_id)
|
|
3194
|
+
regscale_models.Checklist(
|
|
3195
|
+
status=checklist_status_str,
|
|
3196
|
+
assetId=asset.id if asset else None,
|
|
3197
|
+
tool=tool,
|
|
3198
|
+
baseline=finding.baseline,
|
|
3199
|
+
vulnerabilityId=finding.vulnerability_number,
|
|
3200
|
+
results=finding.results,
|
|
3201
|
+
check=finding.title,
|
|
3202
|
+
cci=finding.cci_ref,
|
|
3203
|
+
ruleId=finding.rule_id,
|
|
3204
|
+
version=finding.rule_version,
|
|
3205
|
+
comments=finding.comments,
|
|
3206
|
+
datePerformed=finding.date_created,
|
|
3207
|
+
).create_or_update()
|
|
3208
|
+
|
|
3209
|
+
# Handle checklist status
|
|
3210
|
+
self._handle_checklist_status(finding)
|
|
3211
|
+
|
|
3212
|
+
def _handle_checklist_status(self, finding: IntegrationFinding) -> None:
|
|
3213
|
+
"""Handle the status of a checklist finding."""
|
|
3214
|
+
if finding.status != regscale_models.IssueStatus.Closed:
|
|
3215
|
+
logger.debug("Handling failing checklist for %s", finding.external_id)
|
|
3216
|
+
if self.type == ScannerIntegrationType.CHECKLIST:
|
|
3217
|
+
self.handle_failing_checklist(finding=finding, plan_id=self.plan_id)
|
|
3218
|
+
else:
|
|
3219
|
+
logger.debug("Handling passing checklist for %s", finding.external_id)
|
|
3220
|
+
self.handle_passing_checklist(finding=finding, plan_id=self.plan_id)
|
|
3221
|
+
|
|
3222
|
+
def _process_vulnerability_finding(
|
|
3223
|
+
self,
|
|
3224
|
+
finding: IntegrationFinding,
|
|
3225
|
+
scan_history: regscale_models.ScanHistory,
|
|
3226
|
+
current_vulnerabilities: Dict[int, Set[int]],
|
|
3227
|
+
) -> bool:
|
|
3228
|
+
"""Process a vulnerability finding and return whether vulnerability was created."""
|
|
3229
|
+
logger.debug(f"Processing vulnerability for finding {finding.external_id} with status {finding.status}")
|
|
3230
|
+
|
|
3231
|
+
asset = self.get_asset_by_identifier(finding.asset_identifier)
|
|
3232
|
+
if asset:
|
|
3233
|
+
logger.debug(f"Found asset {asset.id} for finding {finding.external_id}")
|
|
3234
|
+
if vulnerability_id := self.handle_vulnerability(finding, asset, scan_history):
|
|
3235
|
+
current_vulnerabilities[asset.id].add(vulnerability_id)
|
|
3236
|
+
logger.debug(
|
|
3237
|
+
f"Vulnerability created successfully for finding {finding.external_id} with ID {vulnerability_id}"
|
|
3238
|
+
)
|
|
3239
|
+
return True
|
|
3240
|
+
else:
|
|
3241
|
+
logger.debug(f"Vulnerability creation failed for finding {finding.external_id}")
|
|
3242
|
+
else:
|
|
3243
|
+
logger.debug(f"No asset found for finding {finding.external_id} with identifier {finding.asset_identifier}")
|
|
3244
|
+
if getattr(self, "import_all_findings", False):
|
|
3245
|
+
logger.debug("import_all_findings is True, attempting to create vulnerability without asset")
|
|
3246
|
+
if vulnerability_id := self.handle_vulnerability(finding, None, scan_history):
|
|
3247
|
+
logger.debug(
|
|
3248
|
+
f"Vulnerability created successfully for finding {finding.external_id} with ID {vulnerability_id}"
|
|
3249
|
+
)
|
|
3250
|
+
return True
|
|
3251
|
+
else:
|
|
3252
|
+
logger.debug(f"Vulnerability creation failed for finding {finding.external_id}")
|
|
3253
|
+
|
|
3254
|
+
return False
|
|
3255
|
+
|
|
3256
|
+
def handle_vulnerability(
|
|
3257
|
+
self,
|
|
3258
|
+
finding: IntegrationFinding,
|
|
3259
|
+
asset: Optional[regscale_models.Asset],
|
|
3260
|
+
scan_history: regscale_models.ScanHistory,
|
|
3261
|
+
) -> Optional[int]:
|
|
3262
|
+
"""
|
|
3263
|
+
Handles the vulnerabilities for a finding.
|
|
3264
|
+
|
|
3265
|
+
:param IntegrationFinding finding: The integration finding
|
|
3266
|
+
:param Optional[regscale_models.Asset] asset: The associated asset
|
|
3267
|
+
:param regscale_models.ScanHistory scan_history: The scan history
|
|
3268
|
+
:rtype: Optional[int]
|
|
3269
|
+
:return: The vulnerability ID
|
|
3270
|
+
"""
|
|
3271
|
+
logger.debug(f"Processing vulnerability for finding: {finding.external_id} - {finding.title}")
|
|
3272
|
+
|
|
3273
|
+
# Validate required fields
|
|
3274
|
+
if not self._has_required_vulnerability_fields(finding):
|
|
3275
|
+
return None
|
|
3276
|
+
|
|
3277
|
+
# Check asset requirements
|
|
3278
|
+
if not self._check_asset_requirements(finding, asset):
|
|
3279
|
+
return None
|
|
3280
|
+
|
|
3281
|
+
if asset:
|
|
3282
|
+
logger.debug(f"Found asset: {asset.id} for finding {finding.external_id}")
|
|
3283
|
+
|
|
3284
|
+
# Create vulnerability with retry logic
|
|
3285
|
+
return self._create_vulnerability_with_retry(finding, asset, scan_history)
|
|
3286
|
+
|
|
3287
|
+
def _has_required_vulnerability_fields(self, finding: IntegrationFinding) -> bool:
|
|
3288
|
+
"""Check if finding has required fields (plugin_name or cve)."""
|
|
3289
|
+
plugin_name = getattr(finding, "plugin_name", None)
|
|
3290
|
+
cve = getattr(finding, "cve", None)
|
|
3291
|
+
|
|
3292
|
+
if not plugin_name and not cve:
|
|
3293
|
+
logger.warning("No Plugin Name or CVE found for finding %s", finding.title)
|
|
3294
|
+
logger.debug(f"Finding plugin_name: {plugin_name}, cve: {cve}")
|
|
3295
|
+
return False
|
|
3296
|
+
|
|
3297
|
+
logger.debug(f"Finding plugin_name: {plugin_name}, cve: {cve}")
|
|
3298
|
+
return True
|
|
3299
|
+
|
|
3300
|
+
def _check_asset_requirements(self, finding: IntegrationFinding, asset: Optional[regscale_models.Asset]) -> bool:
|
|
3301
|
+
"""Check if asset requirements are met."""
|
|
3302
|
+
if asset:
|
|
3303
|
+
return True
|
|
3304
|
+
|
|
3305
|
+
if getattr(self, "import_all_findings", False):
|
|
3306
|
+
logger.debug("Asset not found but import_all_findings is True, continuing without asset")
|
|
3307
|
+
return True
|
|
3308
|
+
|
|
3309
|
+
if not getattr(self, "suppress_asset_not_found_errors", False):
|
|
3310
|
+
logger.warning("VulnerabilityMapping Error: Asset not found for identifier %s", finding.asset_identifier)
|
|
3311
|
+
return False
|
|
3312
|
+
|
|
3313
|
+
def _create_vulnerability_with_retry(
|
|
3314
|
+
self,
|
|
3315
|
+
finding: IntegrationFinding,
|
|
3316
|
+
asset: Optional[regscale_models.Asset],
|
|
3317
|
+
scan_history: regscale_models.ScanHistory,
|
|
3318
|
+
) -> Optional[int]:
|
|
3319
|
+
"""Create vulnerability with retry logic."""
|
|
3320
|
+
max_retries = 3
|
|
3321
|
+
retry_delay = 2 # seconds
|
|
3322
|
+
|
|
3323
|
+
for attempt in range(max_retries):
|
|
3324
|
+
vulnerability_id = self._try_create_vulnerability(
|
|
3325
|
+
finding, asset, scan_history, attempt, max_retries, retry_delay
|
|
3326
|
+
)
|
|
3327
|
+
if vulnerability_id is not None:
|
|
3328
|
+
return vulnerability_id
|
|
3329
|
+
|
|
3330
|
+
if attempt < max_retries - 1:
|
|
3331
|
+
time.sleep(retry_delay)
|
|
3332
|
+
retry_delay *= 2 # Exponential backoff
|
|
3333
|
+
|
|
3334
|
+
return None
|
|
3335
|
+
|
|
3336
|
+
def _try_create_vulnerability(
|
|
3337
|
+
self,
|
|
3338
|
+
finding: IntegrationFinding,
|
|
3339
|
+
asset: Optional[regscale_models.Asset],
|
|
3340
|
+
scan_history: regscale_models.ScanHistory,
|
|
3341
|
+
attempt: int,
|
|
3342
|
+
max_retries: int,
|
|
3343
|
+
retry_delay: int,
|
|
3344
|
+
) -> Optional[int]:
|
|
3345
|
+
"""Try to create vulnerability for a single attempt."""
|
|
3346
|
+
try:
|
|
3347
|
+
logger.debug(f"Creating vulnerability for finding {finding.external_id} (attempt {attempt + 1})")
|
|
3348
|
+
vulnerability = self.create_vulnerability_from_finding(finding, asset, scan_history)
|
|
3349
|
+
finding.vulnerability_id = vulnerability.id
|
|
3350
|
+
logger.debug(f"Successfully created vulnerability {vulnerability.id} for finding {finding.external_id}")
|
|
3351
|
+
|
|
3352
|
+
self._handle_associated_issue(finding)
|
|
3353
|
+
return vulnerability.id
|
|
3354
|
+
|
|
3355
|
+
except Exception as e:
|
|
3356
|
+
self._handle_vulnerability_creation_error(e, finding, attempt, max_retries, retry_delay)
|
|
3357
|
+
return None
|
|
3358
|
+
|
|
3359
|
+
def _handle_associated_issue(self, finding: IntegrationFinding) -> None:
|
|
3360
|
+
"""Handle associated issue creation if needed."""
|
|
3361
|
+
if ScannerVariables.vulnerabilityCreation.lower() != "noissue":
|
|
3362
|
+
self.create_or_update_issue_from_finding(
|
|
3363
|
+
title=finding.title,
|
|
3364
|
+
finding=finding,
|
|
3365
|
+
)
|
|
3366
|
+
|
|
3367
|
+
def _handle_vulnerability_creation_error(
|
|
3368
|
+
self, error: Exception, finding: IntegrationFinding, attempt: int, max_retries: int, retry_delay: int
|
|
3369
|
+
) -> None:
|
|
3370
|
+
"""Handle error during vulnerability creation."""
|
|
3371
|
+
if attempt < max_retries - 1:
|
|
3372
|
+
logger.warning(
|
|
3373
|
+
f"Vulnerability creation failed for finding {finding.external_id} "
|
|
3374
|
+
f"(attempt {attempt + 1}/{max_retries}): {error}. "
|
|
3375
|
+
f"Retrying in {retry_delay} seconds..."
|
|
3376
|
+
)
|
|
3377
|
+
else:
|
|
3378
|
+
logger.error(
|
|
3379
|
+
f"Failed to create vulnerability for finding {finding.external_id} "
|
|
3380
|
+
f"after {max_retries} attempts: {error}"
|
|
3381
|
+
)
|
|
2405
3382
|
|
|
2406
3383
|
def create_vulnerability_from_finding(
|
|
2407
|
-
self,
|
|
3384
|
+
self,
|
|
3385
|
+
finding: IntegrationFinding,
|
|
3386
|
+
asset: Optional[regscale_models.Asset],
|
|
3387
|
+
scan_history: regscale_models.ScanHistory,
|
|
2408
3388
|
) -> regscale_models.Vulnerability:
|
|
2409
3389
|
"""
|
|
2410
3390
|
Creates a vulnerability from an integration finding.
|
|
2411
3391
|
|
|
2412
3392
|
:param IntegrationFinding finding: The integration finding
|
|
2413
|
-
:param regscale_models.Asset asset: The associated asset
|
|
3393
|
+
:param Optional[regscale_models.Asset] asset: The associated asset (can be None if import_all_findings is True)
|
|
2414
3394
|
:param regscale_models.ScanHistory scan_history: The scan history
|
|
2415
3395
|
:return: The created vulnerability
|
|
2416
3396
|
:rtype: regscale_models.Vulnerability
|
|
2417
3397
|
"""
|
|
2418
|
-
vulnerability
|
|
3398
|
+
logger.debug(f"Creating vulnerability object for finding {finding.external_id}")
|
|
3399
|
+
|
|
3400
|
+
# Create vulnerability object
|
|
3401
|
+
vulnerability = self._build_vulnerability_object(finding, asset, scan_history)
|
|
3402
|
+
|
|
3403
|
+
# Save vulnerability
|
|
3404
|
+
logger.debug(f"Calling create_or_update for vulnerability with title: {vulnerability.title}")
|
|
3405
|
+
vulnerability = vulnerability.create_or_update()
|
|
3406
|
+
logger.debug(f"Vulnerability created/updated with ID: {vulnerability.id}")
|
|
3407
|
+
|
|
3408
|
+
# Create mapping if asset exists
|
|
3409
|
+
if asset:
|
|
3410
|
+
self._create_vulnerability_mapping(vulnerability, finding, asset, scan_history)
|
|
3411
|
+
else:
|
|
3412
|
+
logger.debug(
|
|
3413
|
+
f"Skipping VulnerabilityMapping creation for vulnerability {vulnerability.id} - no asset provided"
|
|
3414
|
+
)
|
|
3415
|
+
|
|
3416
|
+
return vulnerability
|
|
3417
|
+
|
|
3418
|
+
def _build_vulnerability_object(
|
|
3419
|
+
self,
|
|
3420
|
+
finding: IntegrationFinding,
|
|
3421
|
+
asset: Optional[regscale_models.Asset],
|
|
3422
|
+
scan_history: regscale_models.ScanHistory,
|
|
3423
|
+
) -> regscale_models.Vulnerability:
|
|
3424
|
+
"""Build the vulnerability object from finding data."""
|
|
3425
|
+
# Get mapped values
|
|
3426
|
+
severity = self._get_mapped_severity(finding)
|
|
3427
|
+
ip_address = self._get_ip_address(finding, asset)
|
|
3428
|
+
dns = self._get_dns(asset)
|
|
3429
|
+
operating_system = self._get_operating_system(asset)
|
|
3430
|
+
|
|
3431
|
+
return regscale_models.Vulnerability(
|
|
2419
3432
|
title=finding.title,
|
|
2420
3433
|
cve=finding.cve,
|
|
2421
|
-
vprScore=(
|
|
2422
|
-
|
|
2423
|
-
), # If this is the VPR score, otherwise use a different field
|
|
2424
|
-
cvsSv3BaseScore=finding.cvss_v3_base_score or finding.cvss_v3_score or finding.cvss_score,
|
|
3434
|
+
vprScore=self._get_vpr_score(finding),
|
|
3435
|
+
cvsSv3BaseScore=self._get_cvss_v3_score(finding),
|
|
2425
3436
|
cvsSv2BaseScore=finding.cvss_v2_score,
|
|
2426
3437
|
cvsSv3BaseVector=finding.cvss_v3_vector,
|
|
2427
3438
|
cvsSv2BaseVector=finding.cvss_v2_vector,
|
|
2428
3439
|
scanId=scan_history.id,
|
|
2429
|
-
severity=
|
|
3440
|
+
severity=severity,
|
|
2430
3441
|
description=finding.description,
|
|
2431
3442
|
dateLastUpdated=finding.date_last_updated,
|
|
2432
3443
|
parentId=self.plan_id,
|
|
2433
3444
|
parentModule=self.parent_module,
|
|
2434
|
-
dns=
|
|
3445
|
+
dns=dns,
|
|
2435
3446
|
status=regscale_models.VulnerabilityStatus.Open,
|
|
2436
|
-
ipAddress=
|
|
3447
|
+
ipAddress=ip_address,
|
|
2437
3448
|
firstSeen=finding.first_seen,
|
|
2438
3449
|
lastSeen=finding.last_seen,
|
|
2439
|
-
plugInName=finding.cve or finding.plugin_name,
|
|
2440
|
-
plugInId=finding.plugin_id,
|
|
2441
|
-
exploitAvailable=None,
|
|
2442
|
-
plugInText=finding.plugin_text
|
|
2443
|
-
|
|
2444
|
-
|
|
2445
|
-
|
|
2446
|
-
operatingSystem=asset.operatingSystem if hasattr(asset, "operatingSystem") else None,
|
|
3450
|
+
plugInName=finding.cve or finding.plugin_name,
|
|
3451
|
+
plugInId=finding.plugin_id or finding.external_id,
|
|
3452
|
+
exploitAvailable=None,
|
|
3453
|
+
plugInText=finding.plugin_text or finding.observations,
|
|
3454
|
+
port=getattr(finding, "port", None),
|
|
3455
|
+
protocol=getattr(finding, "protocol", None),
|
|
3456
|
+
operatingSystem=operating_system,
|
|
2447
3457
|
fixedVersions=finding.fixed_versions,
|
|
2448
3458
|
buildVersion=finding.build_version,
|
|
2449
3459
|
fixStatus=finding.fix_status,
|
|
@@ -2454,8 +3464,68 @@ class ScannerIntegration(ABC):
|
|
|
2454
3464
|
affectedPackages=finding.affected_packages,
|
|
2455
3465
|
)
|
|
2456
3466
|
|
|
2457
|
-
|
|
2458
|
-
|
|
3467
|
+
def _get_mapped_severity(self, finding: IntegrationFinding) -> regscale_models.VulnerabilitySeverity:
|
|
3468
|
+
"""Get mapped severity for the finding."""
|
|
3469
|
+
logger.debug(f"Finding severity: '{finding.severity}' (type: {type(finding.severity)})")
|
|
3470
|
+
mapped_severity = self.issue_to_vulnerability_map.get(
|
|
3471
|
+
finding.severity, regscale_models.VulnerabilitySeverity.Low
|
|
3472
|
+
)
|
|
3473
|
+
logger.debug(f"Mapped severity: {mapped_severity}")
|
|
3474
|
+
return mapped_severity
|
|
3475
|
+
|
|
3476
|
+
def _get_ip_address(self, finding: IntegrationFinding, asset: Optional[regscale_models.Asset]) -> str:
|
|
3477
|
+
"""Get IP address from finding or asset."""
|
|
3478
|
+
if finding.ip_address:
|
|
3479
|
+
return finding.ip_address
|
|
3480
|
+
if asset and hasattr(asset, "ipAddress") and asset.ipAddress:
|
|
3481
|
+
return asset.ipAddress
|
|
3482
|
+
return ""
|
|
3483
|
+
|
|
3484
|
+
def _get_dns(self, asset: Optional[regscale_models.Asset]) -> str:
|
|
3485
|
+
"""Get DNS from asset."""
|
|
3486
|
+
if asset and hasattr(asset, "fqdn") and asset.fqdn:
|
|
3487
|
+
return asset.fqdn
|
|
3488
|
+
return "unknown"
|
|
3489
|
+
|
|
3490
|
+
def _get_operating_system(self, asset: Optional[regscale_models.Asset]) -> Optional[str]:
|
|
3491
|
+
"""Get operating system from asset."""
|
|
3492
|
+
if asset and hasattr(asset, "operatingSystem"):
|
|
3493
|
+
return asset.operatingSystem
|
|
3494
|
+
return None
|
|
3495
|
+
|
|
3496
|
+
def _get_vpr_score(self, finding: IntegrationFinding) -> Optional[float]:
|
|
3497
|
+
"""Get VPR score from finding."""
|
|
3498
|
+
if hasattr(finding, "vprScore"):
|
|
3499
|
+
return finding.vpr_score
|
|
3500
|
+
return None
|
|
3501
|
+
|
|
3502
|
+
def _get_cvss_v3_score(self, finding: IntegrationFinding) -> Optional[float]:
|
|
3503
|
+
"""Get CVSS v3 score from finding."""
|
|
3504
|
+
return finding.cvss_v3_base_score or finding.cvss_v3_score or finding.cvss_score
|
|
3505
|
+
|
|
3506
|
+
def _create_vulnerability_mapping(
|
|
3507
|
+
self,
|
|
3508
|
+
vulnerability: regscale_models.Vulnerability,
|
|
3509
|
+
finding: IntegrationFinding,
|
|
3510
|
+
asset: regscale_models.Asset,
|
|
3511
|
+
scan_history: regscale_models.ScanHistory,
|
|
3512
|
+
) -> None:
|
|
3513
|
+
"""Create vulnerability mapping with retry logic."""
|
|
3514
|
+
logger.debug(f"Creating vulnerability mapping for vulnerability {vulnerability.id}")
|
|
3515
|
+
logger.debug(f"Scan History ID: {scan_history.id}, Asset ID: {asset.id}, Plan ID: {self.plan_id}")
|
|
3516
|
+
|
|
3517
|
+
mapping = self._build_vulnerability_mapping(vulnerability, finding, asset, scan_history)
|
|
3518
|
+
self._create_mapping_with_retry(mapping, vulnerability.id)
|
|
3519
|
+
|
|
3520
|
+
def _build_vulnerability_mapping(
|
|
3521
|
+
self,
|
|
3522
|
+
vulnerability: regscale_models.Vulnerability,
|
|
3523
|
+
finding: IntegrationFinding,
|
|
3524
|
+
asset: regscale_models.Asset,
|
|
3525
|
+
scan_history: regscale_models.ScanHistory,
|
|
3526
|
+
) -> regscale_models.VulnerabilityMapping:
|
|
3527
|
+
"""Build vulnerability mapping object."""
|
|
3528
|
+
return regscale_models.VulnerabilityMapping(
|
|
2459
3529
|
vulnerabilityId=vulnerability.id,
|
|
2460
3530
|
assetId=asset.id,
|
|
2461
3531
|
scanId=scan_history.id,
|
|
@@ -2468,46 +3538,77 @@ class ScannerIntegration(ABC):
|
|
|
2468
3538
|
lastSeen=finding.last_seen,
|
|
2469
3539
|
status=finding.status,
|
|
2470
3540
|
dateLastUpdated=get_current_datetime(),
|
|
2471
|
-
)
|
|
2472
|
-
return vulnerability
|
|
3541
|
+
)
|
|
2473
3542
|
|
|
2474
|
-
def
|
|
2475
|
-
|
|
2476
|
-
|
|
2477
|
-
|
|
2478
|
-
|
|
2479
|
-
|
|
2480
|
-
""
|
|
2481
|
-
|
|
3543
|
+
def _create_mapping_with_retry(self, mapping: regscale_models.VulnerabilityMapping, vulnerability_id: int) -> None:
|
|
3544
|
+
"""Create vulnerability mapping with retry logic."""
|
|
3545
|
+
import logging
|
|
3546
|
+
|
|
3547
|
+
max_retries = 3
|
|
3548
|
+
retry_delay = 0.5
|
|
3549
|
+
regscale_logger = logging.getLogger("regscale")
|
|
3550
|
+
original_level = regscale_logger.level
|
|
2482
3551
|
|
|
2483
|
-
|
|
2484
|
-
|
|
2485
|
-
|
|
2486
|
-
|
|
2487
|
-
|
|
2488
|
-
"""
|
|
2489
|
-
if not (finding.plugin_name or finding.cve):
|
|
2490
|
-
logger.warning("No Plugin Name or CVE found for finding %s", finding.title)
|
|
2491
|
-
return None
|
|
3552
|
+
for attempt in range(max_retries):
|
|
3553
|
+
if self._try_create_mapping(
|
|
3554
|
+
mapping, vulnerability_id, attempt, max_retries, regscale_logger, original_level
|
|
3555
|
+
):
|
|
3556
|
+
break
|
|
2492
3557
|
|
|
2493
|
-
|
|
2494
|
-
|
|
2495
|
-
|
|
2496
|
-
"VulnerabilityMapping Error: Asset not found for identifier %s", finding.asset_identifier
|
|
2497
|
-
)
|
|
2498
|
-
return None
|
|
3558
|
+
if attempt < max_retries - 1:
|
|
3559
|
+
time.sleep(retry_delay)
|
|
3560
|
+
retry_delay *= 2 # Exponential backoff
|
|
2499
3561
|
|
|
2500
|
-
|
|
2501
|
-
|
|
3562
|
+
def _try_create_mapping(
|
|
3563
|
+
self,
|
|
3564
|
+
mapping: regscale_models.VulnerabilityMapping,
|
|
3565
|
+
vulnerability_id: int,
|
|
3566
|
+
attempt: int,
|
|
3567
|
+
max_retries: int,
|
|
3568
|
+
regscale_logger: logging.Logger,
|
|
3569
|
+
original_level: int,
|
|
3570
|
+
) -> bool:
|
|
3571
|
+
"""Try to create mapping for a single attempt."""
|
|
3572
|
+
try:
|
|
3573
|
+
# Suppress error logging during retry attempts (but not the final attempt)
|
|
3574
|
+
if attempt < max_retries - 1:
|
|
3575
|
+
regscale_logger.setLevel(logging.CRITICAL)
|
|
2502
3576
|
|
|
2503
|
-
|
|
2504
|
-
|
|
2505
|
-
|
|
2506
|
-
|
|
2507
|
-
|
|
3577
|
+
mapping.create_unique()
|
|
3578
|
+
|
|
3579
|
+
# Restore original log level
|
|
3580
|
+
regscale_logger.setLevel(original_level)
|
|
3581
|
+
|
|
3582
|
+
if attempt > 0:
|
|
3583
|
+
logger.info(
|
|
3584
|
+
f"VulnerabilityMapping created successfully on attempt {attempt + 1} for vulnerability {vulnerability_id}"
|
|
3585
|
+
)
|
|
3586
|
+
else:
|
|
3587
|
+
logger.debug(f"Vulnerability mapping created for vulnerability {vulnerability_id}")
|
|
3588
|
+
return True
|
|
3589
|
+
|
|
3590
|
+
except Exception as mapping_error:
|
|
3591
|
+
# Restore original log level before handling the exception
|
|
3592
|
+
regscale_logger.setLevel(original_level)
|
|
3593
|
+
return self._handle_mapping_error(mapping_error, attempt, max_retries)
|
|
3594
|
+
|
|
3595
|
+
def _handle_mapping_error(self, error: Exception, attempt: int, max_retries: int) -> bool:
|
|
3596
|
+
"""Handle error during mapping creation."""
|
|
3597
|
+
if attempt >= max_retries - 1:
|
|
3598
|
+
logger.error(f"Failed to create VulnerabilityMapping after {max_retries} attempts: {error}")
|
|
3599
|
+
# Convert to a more specific exception type
|
|
3600
|
+
raise RuntimeError(f"VulnerabilityMapping creation failed after {max_retries} attempts") from error
|
|
3601
|
+
|
|
3602
|
+
# Check if it's a reference error
|
|
3603
|
+
error_str = str(error)
|
|
3604
|
+
if "400" in error_str and "Object reference" in error_str:
|
|
3605
|
+
logger.debug(
|
|
3606
|
+
f"VulnerabilityMapping creation failed due to reference error (attempt {attempt + 1}/{max_retries}). Retrying..."
|
|
2508
3607
|
)
|
|
3608
|
+
return False
|
|
2509
3609
|
|
|
2510
|
-
|
|
3610
|
+
# Different error, re-raise with more context
|
|
3611
|
+
raise RuntimeError(f"Unexpected error during VulnerabilityMapping creation: {error}") from error
|
|
2511
3612
|
|
|
2512
3613
|
def _filter_vulns_open_by_other_tools(
|
|
2513
3614
|
self, all_vulns: list[regscale_models.Vulnerability]
|
|
@@ -2532,13 +3633,34 @@ class ScannerIntegration(ABC):
|
|
|
2532
3633
|
vuln_list.append(vuln)
|
|
2533
3634
|
return vuln_list
|
|
2534
3635
|
|
|
2535
|
-
def close_outdated_vulnerabilities(self, current_vulnerabilities: Dict[int, Set[int]]) ->
|
|
3636
|
+
def close_outdated_vulnerabilities(self, current_vulnerabilities: Dict[int, Set[int]]) -> int:
|
|
2536
3637
|
"""
|
|
2537
3638
|
Closes vulnerabilities that are not in the current set of vulnerability IDs for each asset.
|
|
2538
3639
|
|
|
2539
3640
|
:param Dict[int, Set[int]] current_vulnerabilities: Dictionary of asset IDs to lists of current vulnerability IDs
|
|
2540
|
-
:
|
|
3641
|
+
:return: Number of vulnerabilities closed
|
|
3642
|
+
:rtype: int
|
|
2541
3643
|
"""
|
|
3644
|
+
if not self.close_outdated_findings:
|
|
3645
|
+
logger.info("Skipping closing outdated vulnerabilities.")
|
|
3646
|
+
return 0
|
|
3647
|
+
|
|
3648
|
+
# Check global preventAutoClose setting
|
|
3649
|
+
from regscale.core.app.application import Application
|
|
3650
|
+
|
|
3651
|
+
app = Application()
|
|
3652
|
+
if app.config.get("preventAutoClose", False):
|
|
3653
|
+
logger.info("Skipping closing outdated vulnerabilities due to global preventAutoClose setting.")
|
|
3654
|
+
return 0
|
|
3655
|
+
|
|
3656
|
+
# REG-17044: Add defensive logging to track vulnerability closure state
|
|
3657
|
+
logger.debug(f"Vulnerability Closure Analysis for {self.title}:")
|
|
3658
|
+
logger.debug(f" - Assets with current vulnerabilities: {len(current_vulnerabilities)}")
|
|
3659
|
+
total_current_vulns = sum(len(vuln_set) for vuln_set in current_vulnerabilities.values())
|
|
3660
|
+
logger.debug(f" - Total current vulnerabilities tracked: {total_current_vulns}")
|
|
3661
|
+
if total_current_vulns == 0:
|
|
3662
|
+
logger.warning("No current vulnerabilities tracked - this may close all vulnerabilities!")
|
|
3663
|
+
|
|
2542
3664
|
# Get all current vulnerability IDs
|
|
2543
3665
|
current_vuln_ids = {vuln_id for vuln_ids in current_vulnerabilities.values() for vuln_id in vuln_ids}
|
|
2544
3666
|
|
|
@@ -2561,9 +3683,14 @@ class ScannerIntegration(ABC):
|
|
|
2561
3683
|
vuln.dateClosed = get_current_datetime()
|
|
2562
3684
|
vuln.save()
|
|
2563
3685
|
closed_count += 1
|
|
2564
|
-
logger.
|
|
3686
|
+
logger.debug("Closed vulnerability %d", vuln.id)
|
|
2565
3687
|
|
|
2566
|
-
|
|
3688
|
+
(
|
|
3689
|
+
logger.info("Closed %d outdated vulnerabilities.", closed_count)
|
|
3690
|
+
if closed_count > 0
|
|
3691
|
+
else logger.info("No outdated vulnerabilities to close.")
|
|
3692
|
+
)
|
|
3693
|
+
return closed_count
|
|
2567
3694
|
|
|
2568
3695
|
@classmethod
|
|
2569
3696
|
def close_mappings_list(cls, vuln: regscale_models.Vulnerability) -> None:
|
|
@@ -2601,31 +3728,101 @@ class ScannerIntegration(ABC):
|
|
|
2601
3728
|
:return: Number of issues closed
|
|
2602
3729
|
:rtype: int
|
|
2603
3730
|
"""
|
|
2604
|
-
if not self.
|
|
2605
|
-
logger.info("Skipping closing outdated issues.")
|
|
3731
|
+
if not self._should_close_issues(current_vulnerabilities):
|
|
2606
3732
|
return 0
|
|
2607
3733
|
|
|
2608
|
-
|
|
3734
|
+
self._log_vulnerability_closure_analysis(current_vulnerabilities)
|
|
3735
|
+
|
|
2609
3736
|
affected_control_ids = set()
|
|
2610
3737
|
count_lock = threading.Lock()
|
|
2611
3738
|
|
|
2612
3739
|
open_issues = regscale_models.Issue.fetch_issues_by_ssp(
|
|
2613
3740
|
None, ssp_id=self.plan_id, status=regscale_models.IssueStatus.Open.value
|
|
2614
3741
|
)
|
|
2615
|
-
|
|
2616
|
-
|
|
3742
|
+
|
|
3743
|
+
task_id = self._init_closure_task(len(open_issues))
|
|
3744
|
+
self._process_issues_for_closure(
|
|
3745
|
+
open_issues, current_vulnerabilities, count_lock, affected_control_ids, task_id
|
|
2617
3746
|
)
|
|
3747
|
+
self._update_affected_control_statuses(affected_control_ids)
|
|
2618
3748
|
|
|
2619
|
-
|
|
2620
|
-
|
|
2621
|
-
|
|
3749
|
+
closed_count = len(affected_control_ids)
|
|
3750
|
+
self._log_closure_results(closed_count)
|
|
3751
|
+
return closed_count
|
|
3752
|
+
|
|
3753
|
+
def _should_close_issues(self, current_vulnerabilities: Dict[int, Set[int]]) -> bool:
|
|
3754
|
+
"""
|
|
3755
|
+
Check if issues should be closed based on settings.
|
|
3756
|
+
|
|
3757
|
+
:param Dict[int, Set[int]] current_vulnerabilities: Current vulnerabilities
|
|
3758
|
+
:return: True if should proceed with closing, False otherwise
|
|
3759
|
+
:rtype: bool
|
|
3760
|
+
"""
|
|
3761
|
+
if not self.close_outdated_findings:
|
|
3762
|
+
logger.info("Skipping closing outdated issues.")
|
|
3763
|
+
return False
|
|
3764
|
+
|
|
3765
|
+
from regscale.core.app.application import Application
|
|
3766
|
+
|
|
3767
|
+
app = Application()
|
|
3768
|
+
if app.config.get("preventAutoClose", False):
|
|
3769
|
+
logger.info("Skipping closing outdated issues due to global preventAutoClose setting.")
|
|
3770
|
+
return False
|
|
3771
|
+
|
|
3772
|
+
return True
|
|
3773
|
+
|
|
3774
|
+
def _log_vulnerability_closure_analysis(self, current_vulnerabilities: Dict[int, Set[int]]) -> None:
|
|
3775
|
+
"""
|
|
3776
|
+
Log analysis of current vulnerabilities for debugging.
|
|
3777
|
+
|
|
3778
|
+
:param Dict[int, Set[int]] current_vulnerabilities: Current vulnerabilities
|
|
3779
|
+
:rtype: None
|
|
3780
|
+
"""
|
|
3781
|
+
logger.debug(f"Issue Closure Analysis for {self.title}:")
|
|
3782
|
+
total_current_vulns = sum(len(vuln_set) for vuln_set in current_vulnerabilities.values())
|
|
3783
|
+
logger.debug(f" - Total current vulnerabilities to check against: {total_current_vulns}")
|
|
3784
|
+
if total_current_vulns == 0:
|
|
3785
|
+
logger.warning("No current vulnerabilities tracked - this may close all issues!")
|
|
3786
|
+
|
|
3787
|
+
def _init_closure_task(self, total_issues: int):
|
|
3788
|
+
"""
|
|
3789
|
+
Initialize progress task for issue closure.
|
|
3790
|
+
|
|
3791
|
+
:param int total_issues: Total number of issues
|
|
3792
|
+
:return: Task ID or None
|
|
3793
|
+
"""
|
|
3794
|
+
if self.finding_progress is not None and hasattr(self.finding_progress, "add_task"):
|
|
3795
|
+
return self.finding_progress.add_task(
|
|
3796
|
+
f"[cyan]Analyzing {total_issues} issue(s) and closing any outdated issue(s)...",
|
|
3797
|
+
total=total_issues,
|
|
3798
|
+
)
|
|
3799
|
+
return None
|
|
3800
|
+
|
|
3801
|
+
def _process_issues_for_closure(
|
|
3802
|
+
self,
|
|
3803
|
+
open_issues: list,
|
|
3804
|
+
current_vulnerabilities: Dict[int, Set[int]],
|
|
3805
|
+
count_lock,
|
|
3806
|
+
affected_control_ids: set,
|
|
3807
|
+
task_id,
|
|
3808
|
+
) -> None:
|
|
3809
|
+
"""
|
|
3810
|
+
Process all issues for potential closure.
|
|
3811
|
+
|
|
3812
|
+
:param list open_issues: Open issues to process
|
|
3813
|
+
:param Dict[int, Set[int]] current_vulnerabilities: Current vulnerabilities
|
|
3814
|
+
:param count_lock: Threading lock
|
|
3815
|
+
:param set affected_control_ids: Set to track affected controls
|
|
3816
|
+
:param task_id: Progress task ID
|
|
3817
|
+
:rtype: None
|
|
3818
|
+
"""
|
|
2622
3819
|
|
|
2623
|
-
|
|
2624
|
-
"""
|
|
3820
|
+
def _process_single_issue(iss: regscale_models.Issue):
|
|
2625
3821
|
if self.should_close_issue(iss, current_vulnerabilities):
|
|
2626
3822
|
self._close_issue(iss, count_lock, affected_control_ids)
|
|
2627
|
-
|
|
2628
|
-
|
|
3823
|
+
if task_id is not None and self.finding_progress is not None and hasattr(self.finding_progress, "update"):
|
|
3824
|
+
with count_lock:
|
|
3825
|
+
self.finding_progress.update(task_id, advance=1)
|
|
2629
3826
|
|
|
2630
3827
|
max_workers = get_thread_workers_max()
|
|
2631
3828
|
if max_workers == 1:
|
|
@@ -2634,15 +3831,28 @@ class ScannerIntegration(ABC):
|
|
|
2634
3831
|
else:
|
|
2635
3832
|
self._process_issues_multithreaded(open_issues, _process_single_issue, max_workers)
|
|
2636
3833
|
|
|
3834
|
+
def _update_affected_control_statuses(self, affected_control_ids: set) -> None:
|
|
3835
|
+
"""
|
|
3836
|
+
Update status for all affected control implementations.
|
|
3837
|
+
|
|
3838
|
+
:param set affected_control_ids: Control IDs to update
|
|
3839
|
+
:rtype: None
|
|
3840
|
+
"""
|
|
2637
3841
|
for control_id in affected_control_ids:
|
|
2638
3842
|
self.update_control_implementation_status_after_close(control_id)
|
|
3843
|
+
self.update_assessment_status_from_control_implementation(control_id)
|
|
2639
3844
|
|
|
2640
|
-
|
|
3845
|
+
def _log_closure_results(self, closed_count: int) -> None:
|
|
3846
|
+
"""
|
|
3847
|
+
Log results of issue closure operation.
|
|
3848
|
+
|
|
3849
|
+
:param int closed_count: Number of issues closed
|
|
3850
|
+
:rtype: None
|
|
3851
|
+
"""
|
|
3852
|
+
if closed_count > 0:
|
|
2641
3853
|
logger.info("Closed %d outdated issues.", closed_count)
|
|
2642
|
-
|
|
2643
|
-
|
|
2644
|
-
)
|
|
2645
|
-
return closed_count
|
|
3854
|
+
else:
|
|
3855
|
+
logger.info("No outdated issues to close.")
|
|
2646
3856
|
|
|
2647
3857
|
def _close_issue(self, issue: regscale_models.Issue, count_lock: threading.Lock, affected_control_ids: set):
|
|
2648
3858
|
"""
|
|
@@ -2661,15 +3871,18 @@ class ScannerIntegration(ABC):
|
|
|
2661
3871
|
issue.dateLastUpdated = get_current_datetime()
|
|
2662
3872
|
issue.save()
|
|
2663
3873
|
|
|
2664
|
-
if ScannerVariables.useMilestones:
|
|
2665
|
-
|
|
2666
|
-
|
|
2667
|
-
|
|
2668
|
-
|
|
2669
|
-
|
|
2670
|
-
|
|
2671
|
-
|
|
2672
|
-
|
|
3874
|
+
if ScannerVariables.useMilestones and issue.id:
|
|
3875
|
+
try:
|
|
3876
|
+
regscale_models.Milestone(
|
|
3877
|
+
title=f"Issue closed from {self.title} scan",
|
|
3878
|
+
milestoneDate=issue.dateCompleted,
|
|
3879
|
+
responsiblePersonId=self.assessor_id,
|
|
3880
|
+
completed=True,
|
|
3881
|
+
parentID=issue.id,
|
|
3882
|
+
parentModule="issues",
|
|
3883
|
+
).create_or_update()
|
|
3884
|
+
except Exception as e:
|
|
3885
|
+
logger.warning("Failed to create closed issue milestone: %s", str(e))
|
|
2673
3886
|
logger.debug("Created milestone for issue %s from %s tool", issue.id, self.title)
|
|
2674
3887
|
|
|
2675
3888
|
with count_lock:
|
|
@@ -2735,7 +3948,103 @@ class ScannerIntegration(ABC):
|
|
|
2735
3948
|
if control_implementation.status != new_status:
|
|
2736
3949
|
control_implementation.status = new_status
|
|
2737
3950
|
self.control_implementation_map[control_id] = control_implementation.save()
|
|
2738
|
-
logger.
|
|
3951
|
+
logger.debug("Updated control implementation %d status to %s", control_id, new_status)
|
|
3952
|
+
|
|
3953
|
+
def update_assessment_status_from_control_implementation(self, control_implementation_id: int) -> None:
|
|
3954
|
+
"""
|
|
3955
|
+
Updates the assessment status based on the control implementation status.
|
|
3956
|
+
Treats the ControlImplementation status as the source of truth.
|
|
3957
|
+
|
|
3958
|
+
Sets assessment to PASS if ControlImplementation status is FULLY_IMPLEMENTED,
|
|
3959
|
+
otherwise sets it to FAIL.
|
|
3960
|
+
|
|
3961
|
+
This method should be called after update_control_implementation_status_after_close
|
|
3962
|
+
to ensure assessments reflect the final control implementation state.
|
|
3963
|
+
|
|
3964
|
+
:param int control_implementation_id: The ID of the control implementation
|
|
3965
|
+
:rtype: None
|
|
3966
|
+
"""
|
|
3967
|
+
# Get the cached assessment for this control implementation
|
|
3968
|
+
assessment = self.assessment_map.get(control_implementation_id)
|
|
3969
|
+
|
|
3970
|
+
if not assessment:
|
|
3971
|
+
logger.debug(
|
|
3972
|
+
"No assessment found in cache for control implementation %d, skipping assessment update",
|
|
3973
|
+
control_implementation_id,
|
|
3974
|
+
)
|
|
3975
|
+
return
|
|
3976
|
+
|
|
3977
|
+
# Get the control implementation to check its status
|
|
3978
|
+
control_implementation = self.control_implementation_map.get(
|
|
3979
|
+
control_implementation_id
|
|
3980
|
+
) or regscale_models.ControlImplementation.get_object(object_id=control_implementation_id)
|
|
3981
|
+
|
|
3982
|
+
if not control_implementation:
|
|
3983
|
+
logger.warning("Control implementation %d not found, cannot update assessment", control_implementation_id)
|
|
3984
|
+
return
|
|
3985
|
+
|
|
3986
|
+
# Determine assessment result based on control implementation status
|
|
3987
|
+
# Treat ControlImplementation status as the source of truth
|
|
3988
|
+
new_assessment_result = (
|
|
3989
|
+
regscale_models.AssessmentResultsStatus.PASS
|
|
3990
|
+
if control_implementation.status == regscale_models.ImplementationStatus.FULLY_IMPLEMENTED.value
|
|
3991
|
+
else regscale_models.AssessmentResultsStatus.FAIL
|
|
3992
|
+
)
|
|
3993
|
+
|
|
3994
|
+
# Only update if the status has changed
|
|
3995
|
+
if assessment.assessmentResult != new_assessment_result.value:
|
|
3996
|
+
assessment.assessmentResult = new_assessment_result.value
|
|
3997
|
+
assessment.save()
|
|
3998
|
+
logger.debug(
|
|
3999
|
+
"Updated assessment %d for control implementation %d: assessmentResult=%s (based on control status: %s)",
|
|
4000
|
+
assessment.id,
|
|
4001
|
+
control_implementation_id,
|
|
4002
|
+
new_assessment_result.value,
|
|
4003
|
+
control_implementation.status,
|
|
4004
|
+
)
|
|
4005
|
+
else:
|
|
4006
|
+
logger.debug(
|
|
4007
|
+
"Assessment %d already has correct status %s for control implementation %d",
|
|
4008
|
+
assessment.id,
|
|
4009
|
+
assessment.assessmentResult,
|
|
4010
|
+
control_implementation_id,
|
|
4011
|
+
)
|
|
4012
|
+
|
|
4013
|
+
@staticmethod
|
|
4014
|
+
def is_issue_protected_from_auto_close(issue: regscale_models.Issue) -> bool:
|
|
4015
|
+
"""
|
|
4016
|
+
Check if an issue is protected from automatic closure.
|
|
4017
|
+
|
|
4018
|
+
:param regscale_models.Issue issue: The issue to check
|
|
4019
|
+
:return: True if the issue should not be auto-closed
|
|
4020
|
+
:rtype: bool
|
|
4021
|
+
"""
|
|
4022
|
+
try:
|
|
4023
|
+
# Check global configuration setting
|
|
4024
|
+
app = Application()
|
|
4025
|
+
if app.config.get("preventAutoClose", False):
|
|
4026
|
+
logger.debug(f"Issue {issue.id} is protected from auto-closure by global preventAutoClose setting")
|
|
4027
|
+
return True
|
|
4028
|
+
|
|
4029
|
+
# Check for protection property
|
|
4030
|
+
properties = Property.get_all_by_parent(parent_id=issue.id, parent_module="issues")
|
|
4031
|
+
|
|
4032
|
+
for prop in properties:
|
|
4033
|
+
if prop.key == "PREVENT_AUTO_CLOSE" and prop.value.lower() == "true":
|
|
4034
|
+
logger.debug(f"Issue {issue.id} is protected from auto-closure by PREVENT_AUTO_CLOSE property")
|
|
4035
|
+
return True
|
|
4036
|
+
|
|
4037
|
+
# Check for manual reopen indicators in changes
|
|
4038
|
+
if issue.changes and "manually reopened" in issue.changes.lower():
|
|
4039
|
+
logger.debug(f"Issue {issue.id} is protected from auto-closure due to manual reopen indicator")
|
|
4040
|
+
return True
|
|
4041
|
+
|
|
4042
|
+
return False
|
|
4043
|
+
|
|
4044
|
+
except Exception as e:
|
|
4045
|
+
# If we can't check, err on the side of caution and protect the issue
|
|
4046
|
+
logger.warning(f"Could not check protection status for issue {issue.id}: {e}")
|
|
4047
|
+
return True
|
|
2739
4048
|
|
|
2740
4049
|
def should_close_issue(self, issue: regscale_models.Issue, current_vulnerabilities: Dict[int, Set[int]]) -> bool:
|
|
2741
4050
|
"""
|
|
@@ -2754,6 +4063,11 @@ class ScannerIntegration(ABC):
|
|
|
2754
4063
|
)
|
|
2755
4064
|
return False
|
|
2756
4065
|
|
|
4066
|
+
# Check if the issue is protected from auto-closure
|
|
4067
|
+
if self.is_issue_protected_from_auto_close(issue):
|
|
4068
|
+
logger.debug(f"Issue {issue.id} is protected from automatic closure")
|
|
4069
|
+
return False
|
|
4070
|
+
|
|
2757
4071
|
# If the issue has a vulnerability ID, check if it's still current for any asset
|
|
2758
4072
|
if issue.vulnerabilityId:
|
|
2759
4073
|
# Get vulnerability mappings for this issue
|
|
@@ -2779,23 +4093,55 @@ class ScannerIntegration(ABC):
|
|
|
2779
4093
|
return True
|
|
2780
4094
|
|
|
2781
4095
|
@staticmethod
|
|
2782
|
-
def set_severity_count_for_scan(
|
|
4096
|
+
def set_severity_count_for_scan(
|
|
4097
|
+
severity: str, scan_history: regscale_models.ScanHistory, lock: Optional[threading.RLock] = None
|
|
4098
|
+
) -> None:
|
|
2783
4099
|
"""
|
|
2784
|
-
Increments the count of the severity
|
|
4100
|
+
Increments the count of the severity in a thread-safe manner.
|
|
4101
|
+
|
|
4102
|
+
NOTE: This method does NOT save the scan_history object. The caller is responsible
|
|
4103
|
+
for saving the scan_history after all increments are complete to avoid race conditions
|
|
4104
|
+
and excessive database writes in multi-threaded environments.
|
|
4105
|
+
|
|
2785
4106
|
:param str severity: Severity of the vulnerability
|
|
2786
4107
|
:param regscale_models.ScanHistory scan_history: Scan history object
|
|
4108
|
+
:param Optional[threading.RLock] lock: Thread lock for synchronization (recommended in multi-threaded context)
|
|
2787
4109
|
:rtype: None
|
|
2788
4110
|
"""
|
|
2789
|
-
|
|
2790
|
-
|
|
2791
|
-
|
|
2792
|
-
scan_history.
|
|
2793
|
-
|
|
2794
|
-
|
|
2795
|
-
|
|
2796
|
-
|
|
4111
|
+
|
|
4112
|
+
def _increment_severity():
|
|
4113
|
+
"""Internal method to perform the actual increment."""
|
|
4114
|
+
logger.debug(f"Setting severity count for scan {scan_history.id}: severity='{severity}'")
|
|
4115
|
+
logger.debug(
|
|
4116
|
+
f"Current counts - Low: {scan_history.vLow}, Medium: {scan_history.vMedium}, High: {scan_history.vHigh}, Critical: {scan_history.vCritical}, Info: {scan_history.vInfo}"
|
|
4117
|
+
)
|
|
4118
|
+
|
|
4119
|
+
if severity.lower() == regscale_models.IssueSeverity.Low.value.lower():
|
|
4120
|
+
scan_history.vLow += 1
|
|
4121
|
+
logger.debug(f"Incremented vLow count to {scan_history.vLow}")
|
|
4122
|
+
elif severity.lower() == regscale_models.IssueSeverity.Moderate.value.lower():
|
|
4123
|
+
scan_history.vMedium += 1
|
|
4124
|
+
logger.debug(f"Incremented vMedium count to {scan_history.vMedium}")
|
|
4125
|
+
elif severity.lower() == regscale_models.IssueSeverity.High.value.lower():
|
|
4126
|
+
scan_history.vHigh += 1
|
|
4127
|
+
logger.debug(f"Incremented vHigh count to {scan_history.vHigh}")
|
|
4128
|
+
elif severity.lower() == regscale_models.IssueSeverity.Critical.value.lower():
|
|
4129
|
+
scan_history.vCritical += 1
|
|
4130
|
+
logger.debug(f"Incremented vCritical count to {scan_history.vCritical}")
|
|
4131
|
+
else:
|
|
4132
|
+
scan_history.vInfo += 1
|
|
4133
|
+
logger.debug(f"Incremented vInfo count to {scan_history.vInfo}")
|
|
4134
|
+
|
|
4135
|
+
logger.debug(
|
|
4136
|
+
f"Updated counts - Low: {scan_history.vLow}, Medium: {scan_history.vMedium}, High: {scan_history.vHigh}, Critical: {scan_history.vCritical}, Info: {scan_history.vInfo}"
|
|
4137
|
+
)
|
|
4138
|
+
|
|
4139
|
+
# Use lock if provided for thread-safe increments
|
|
4140
|
+
if lock:
|
|
4141
|
+
with lock:
|
|
4142
|
+
_increment_severity()
|
|
2797
4143
|
else:
|
|
2798
|
-
|
|
4144
|
+
_increment_severity()
|
|
2799
4145
|
|
|
2800
4146
|
@classmethod
|
|
2801
4147
|
def cci_assessment(cls, plan_id: int) -> None:
|
|
@@ -2944,13 +4290,10 @@ class ScannerIntegration(ABC):
|
|
|
2944
4290
|
APIHandler().log_api_summary()
|
|
2945
4291
|
created_count = instance._results.get("assets", {}).get("created_count", 0)
|
|
2946
4292
|
updated_count = instance._results.get("assets", {}).get("updated_count", 0)
|
|
2947
|
-
|
|
2948
|
-
# Ensure dedupe_count is always a positive value
|
|
2949
|
-
dedupe_count = dedupe_count if dedupe_count >= 0 else dedupe_count * -1
|
|
4293
|
+
total_assets = created_count + updated_count
|
|
2950
4294
|
logger.info(
|
|
2951
|
-
"%d
|
|
2952
|
-
|
|
2953
|
-
dedupe_count,
|
|
4295
|
+
"%d asset(s) synced to RegScale: %d created, %d updated.",
|
|
4296
|
+
total_assets,
|
|
2954
4297
|
created_count,
|
|
2955
4298
|
updated_count,
|
|
2956
4299
|
)
|
|
@@ -3096,11 +4439,11 @@ class ScannerIntegration(ABC):
|
|
|
3096
4439
|
:return: None
|
|
3097
4440
|
:rtype: None
|
|
3098
4441
|
"""
|
|
3099
|
-
finding.due_date =
|
|
4442
|
+
finding.due_date = self.due_date_handler.calculate_due_date(
|
|
3100
4443
|
severity=finding.severity,
|
|
3101
4444
|
created_date=finding.date_created or self.scan_date,
|
|
3102
|
-
|
|
3103
|
-
|
|
4445
|
+
cve=finding.cve,
|
|
4446
|
+
title=finding.title or self.title,
|
|
3104
4447
|
)
|
|
3105
4448
|
|
|
3106
4449
|
def _update_last_seen_date(self, finding: IntegrationFinding) -> None:
|
|
@@ -3209,30 +4552,14 @@ class ScannerIntegration(ABC):
|
|
|
3209
4552
|
:rtype: int
|
|
3210
4553
|
"""
|
|
3211
4554
|
logger.info("Updating RegScale checklists...")
|
|
3212
|
-
loading_findings = self.
|
|
3213
|
-
f"[#f8b737]Creating and updating checklists from {self.title}."
|
|
3214
|
-
)
|
|
4555
|
+
loading_findings = self._init_checklist_progress_task()
|
|
3215
4556
|
checklists_processed = 0
|
|
3216
4557
|
|
|
3217
4558
|
def process_finding(finding_to_process: IntegrationFinding) -> None:
|
|
3218
|
-
"""
|
|
3219
|
-
Process a single finding and update the progress bar.
|
|
3220
|
-
|
|
3221
|
-
:param IntegrationFinding finding_to_process: The finding to process
|
|
3222
|
-
:rtype: None
|
|
3223
|
-
"""
|
|
3224
4559
|
nonlocal checklists_processed
|
|
3225
4560
|
try:
|
|
3226
4561
|
self.process_checklist(finding_to_process)
|
|
3227
|
-
|
|
3228
|
-
self.num_findings_to_process
|
|
3229
|
-
):
|
|
3230
|
-
self.finding_progress.update(
|
|
3231
|
-
loading_findings,
|
|
3232
|
-
total=self.num_findings_to_process,
|
|
3233
|
-
description=f"[#f8b737]Creating and updating {self.num_findings_to_process} checklists from {self.title}.",
|
|
3234
|
-
)
|
|
3235
|
-
self.finding_progress.advance(loading_findings, 1)
|
|
4562
|
+
self._update_checklist_progress(loading_findings)
|
|
3236
4563
|
checklists_processed += 1
|
|
3237
4564
|
except Exception as exc:
|
|
3238
4565
|
self.log_error(
|
|
@@ -3242,6 +4569,64 @@ class ScannerIntegration(ABC):
|
|
|
3242
4569
|
exc,
|
|
3243
4570
|
)
|
|
3244
4571
|
|
|
4572
|
+
self._execute_checklist_processing(findings, process_finding)
|
|
4573
|
+
return checklists_processed
|
|
4574
|
+
|
|
4575
|
+
def _init_checklist_progress_task(self):
|
|
4576
|
+
"""
|
|
4577
|
+
Initialize progress task for checklist processing.
|
|
4578
|
+
|
|
4579
|
+
:return: Task ID or None
|
|
4580
|
+
"""
|
|
4581
|
+
if self.finding_progress is not None and hasattr(self.finding_progress, "add_task"):
|
|
4582
|
+
return self.finding_progress.add_task(f"[#f8b737]Creating and updating checklists from {self.title}.")
|
|
4583
|
+
return None
|
|
4584
|
+
|
|
4585
|
+
def _update_checklist_progress(self, loading_findings) -> None:
|
|
4586
|
+
"""
|
|
4587
|
+
Update checklist processing progress.
|
|
4588
|
+
|
|
4589
|
+
:param loading_findings: Progress task ID
|
|
4590
|
+
:rtype: None
|
|
4591
|
+
"""
|
|
4592
|
+
if not (
|
|
4593
|
+
loading_findings is not None
|
|
4594
|
+
and self.finding_progress is not None
|
|
4595
|
+
and hasattr(self.finding_progress, "tasks")
|
|
4596
|
+
and hasattr(self.finding_progress, "update")
|
|
4597
|
+
):
|
|
4598
|
+
return
|
|
4599
|
+
|
|
4600
|
+
if self._should_update_progress_total(loading_findings):
|
|
4601
|
+
self.finding_progress.update(
|
|
4602
|
+
loading_findings,
|
|
4603
|
+
total=self.num_findings_to_process,
|
|
4604
|
+
description=f"[#f8b737]Creating and updating {self.num_findings_to_process} checklists from {self.title}.",
|
|
4605
|
+
)
|
|
4606
|
+
|
|
4607
|
+
if hasattr(self.finding_progress, "advance"):
|
|
4608
|
+
self.finding_progress.advance(loading_findings, 1)
|
|
4609
|
+
|
|
4610
|
+
def _should_update_progress_total(self, loading_findings) -> bool:
|
|
4611
|
+
"""
|
|
4612
|
+
Check if progress total should be updated.
|
|
4613
|
+
|
|
4614
|
+
:param loading_findings: Progress task ID
|
|
4615
|
+
:return: True if should update, False otherwise
|
|
4616
|
+
:rtype: bool
|
|
4617
|
+
"""
|
|
4618
|
+
return self.num_findings_to_process and self.finding_progress.tasks[loading_findings].total != float(
|
|
4619
|
+
self.num_findings_to_process
|
|
4620
|
+
)
|
|
4621
|
+
|
|
4622
|
+
def _execute_checklist_processing(self, findings: List[IntegrationFinding], process_finding) -> None:
|
|
4623
|
+
"""
|
|
4624
|
+
Execute checklist processing sequentially or in parallel.
|
|
4625
|
+
|
|
4626
|
+
:param List[IntegrationFinding] findings: Findings to process
|
|
4627
|
+
:param process_finding: Function to process each finding
|
|
4628
|
+
:rtype: None
|
|
4629
|
+
"""
|
|
3245
4630
|
if get_thread_workers_max() == 1:
|
|
3246
4631
|
for finding in findings:
|
|
3247
4632
|
process_finding(finding)
|
|
@@ -3249,8 +4634,6 @@ class ScannerIntegration(ABC):
|
|
|
3249
4634
|
with concurrent.futures.ThreadPoolExecutor(max_workers=get_thread_workers_max()) as executor:
|
|
3250
4635
|
list(executor.map(process_finding, findings))
|
|
3251
4636
|
|
|
3252
|
-
return checklists_processed
|
|
3253
|
-
|
|
3254
4637
|
def create_control_test_result(
|
|
3255
4638
|
self,
|
|
3256
4639
|
finding: IntegrationFinding,
|
|
@@ -3309,3 +4692,151 @@ class ScannerIntegration(ABC):
|
|
|
3309
4692
|
datetime.strptime(issue_date_created, "%Y-%m-%d %H:%M:%S") + diff
|
|
3310
4693
|
)
|
|
3311
4694
|
return None
|
|
4695
|
+
|
|
4696
|
+
def create_vulnerabilities_bulk(
|
|
4697
|
+
self,
|
|
4698
|
+
findings: List[IntegrationFinding],
|
|
4699
|
+
assets: Dict[str, regscale_models.Asset],
|
|
4700
|
+
scan_history: regscale_models.ScanHistory,
|
|
4701
|
+
) -> Dict[str, int]:
|
|
4702
|
+
"""
|
|
4703
|
+
Create vulnerabilities in bulk to improve performance and reduce API calls.
|
|
4704
|
+
|
|
4705
|
+
:param List[IntegrationFinding] findings: List of findings to create vulnerabilities for
|
|
4706
|
+
:param Dict[str, regscale_models.Asset] assets: Dictionary of assets by identifier
|
|
4707
|
+
:param regscale_models.ScanHistory scan_history: The scan history
|
|
4708
|
+
:return: Dictionary mapping finding external_id to vulnerability_id
|
|
4709
|
+
:rtype: Dict[str, int]
|
|
4710
|
+
"""
|
|
4711
|
+
vulnerabilities_to_create, finding_to_vuln_map = self._prepare_vulnerabilities_for_bulk(
|
|
4712
|
+
findings, assets, scan_history
|
|
4713
|
+
)
|
|
4714
|
+
|
|
4715
|
+
if not vulnerabilities_to_create:
|
|
4716
|
+
logger.warning("No vulnerabilities to create in bulk")
|
|
4717
|
+
return {}
|
|
4718
|
+
|
|
4719
|
+
return self._execute_bulk_vulnerability_creation(
|
|
4720
|
+
vulnerabilities_to_create, finding_to_vuln_map, findings, assets, scan_history
|
|
4721
|
+
)
|
|
4722
|
+
|
|
4723
|
+
def _prepare_vulnerabilities_for_bulk(
|
|
4724
|
+
self,
|
|
4725
|
+
findings: List[IntegrationFinding],
|
|
4726
|
+
assets: Dict[str, regscale_models.Asset],
|
|
4727
|
+
scan_history: regscale_models.ScanHistory,
|
|
4728
|
+
) -> tuple[List, Dict]:
|
|
4729
|
+
"""Prepare vulnerability objects for bulk creation."""
|
|
4730
|
+
vulnerabilities_to_create = []
|
|
4731
|
+
finding_to_vuln_map = {}
|
|
4732
|
+
|
|
4733
|
+
for finding in findings:
|
|
4734
|
+
if not self._is_finding_valid_for_vulnerability(finding):
|
|
4735
|
+
continue
|
|
4736
|
+
|
|
4737
|
+
asset = assets.get(finding.asset_identifier)
|
|
4738
|
+
if not self._is_asset_valid(asset, finding):
|
|
4739
|
+
continue
|
|
4740
|
+
|
|
4741
|
+
vulnerability = self._create_vulnerability_object(finding, asset, scan_history)
|
|
4742
|
+
if vulnerability:
|
|
4743
|
+
vulnerabilities_to_create.append(vulnerability)
|
|
4744
|
+
finding_to_vuln_map[finding.external_id] = vulnerability
|
|
4745
|
+
|
|
4746
|
+
return vulnerabilities_to_create, finding_to_vuln_map
|
|
4747
|
+
|
|
4748
|
+
def _is_finding_valid_for_vulnerability(self, finding: IntegrationFinding) -> bool:
|
|
4749
|
+
"""Check if a finding is valid for vulnerability creation."""
|
|
4750
|
+
if not (finding.plugin_name or finding.cve):
|
|
4751
|
+
logger.warning("No Plugin Name or CVE found for finding %s", finding.title)
|
|
4752
|
+
return False
|
|
4753
|
+
return True
|
|
4754
|
+
|
|
4755
|
+
def _is_asset_valid(self, asset: Optional[regscale_models.Asset], finding: IntegrationFinding) -> bool:
|
|
4756
|
+
"""Check if an asset is valid for vulnerability creation."""
|
|
4757
|
+
if not asset:
|
|
4758
|
+
if not getattr(self, "suppress_asset_not_found_errors", False):
|
|
4759
|
+
logger.warning(
|
|
4760
|
+
"VulnerabilityMapping Error: Asset not found for identifier %s", finding.asset_identifier
|
|
4761
|
+
)
|
|
4762
|
+
return False
|
|
4763
|
+
return True
|
|
4764
|
+
|
|
4765
|
+
def _create_vulnerability_object(
|
|
4766
|
+
self,
|
|
4767
|
+
finding: IntegrationFinding,
|
|
4768
|
+
asset: regscale_models.Asset,
|
|
4769
|
+
scan_history: regscale_models.ScanHistory,
|
|
4770
|
+
) -> Optional[regscale_models.Vulnerability]:
|
|
4771
|
+
"""Create a vulnerability object from a finding."""
|
|
4772
|
+
try:
|
|
4773
|
+
return self.create_vulnerability_from_finding(finding, asset, scan_history)
|
|
4774
|
+
except Exception as e:
|
|
4775
|
+
logger.error(f"Failed to prepare vulnerability for finding {finding.external_id}: {e}")
|
|
4776
|
+
return None
|
|
4777
|
+
|
|
4778
|
+
def _execute_bulk_vulnerability_creation(
|
|
4779
|
+
self,
|
|
4780
|
+
vulnerabilities_to_create: List,
|
|
4781
|
+
finding_to_vuln_map: Dict,
|
|
4782
|
+
findings: List[IntegrationFinding],
|
|
4783
|
+
assets: Dict[str, regscale_models.Asset],
|
|
4784
|
+
scan_history: regscale_models.ScanHistory,
|
|
4785
|
+
) -> Dict[str, int]:
|
|
4786
|
+
"""Execute bulk vulnerability creation with fallback to individual creation."""
|
|
4787
|
+
try:
|
|
4788
|
+
created_vulnerabilities = regscale_models.Vulnerability.batch_create(
|
|
4789
|
+
vulnerabilities_to_create, progress_context=self.finding_progress
|
|
4790
|
+
)
|
|
4791
|
+
|
|
4792
|
+
result = self._map_vulnerabilities_to_findings(
|
|
4793
|
+
created_vulnerabilities, vulnerabilities_to_create, finding_to_vuln_map
|
|
4794
|
+
)
|
|
4795
|
+
|
|
4796
|
+
logger.info(f"Successfully created {len(created_vulnerabilities)} vulnerabilities in bulk")
|
|
4797
|
+
return result
|
|
4798
|
+
|
|
4799
|
+
except Exception as e:
|
|
4800
|
+
logger.error(f"Bulk vulnerability creation failed: {e}")
|
|
4801
|
+
logger.info("Falling back to individual vulnerability creation...")
|
|
4802
|
+
return self._create_vulnerabilities_individual(findings, assets, scan_history)
|
|
4803
|
+
|
|
4804
|
+
def _map_vulnerabilities_to_findings(
|
|
4805
|
+
self,
|
|
4806
|
+
created_vulnerabilities: List,
|
|
4807
|
+
vulnerabilities_to_create: List,
|
|
4808
|
+
finding_to_vuln_map: Dict,
|
|
4809
|
+
) -> Dict[str, int]:
|
|
4810
|
+
"""Map created vulnerabilities back to findings."""
|
|
4811
|
+
result = {}
|
|
4812
|
+
for i, created_vuln in enumerate(created_vulnerabilities):
|
|
4813
|
+
if i < len(vulnerabilities_to_create):
|
|
4814
|
+
original_vuln = vulnerabilities_to_create[i]
|
|
4815
|
+
# Find the finding that corresponds to this vulnerability
|
|
4816
|
+
for finding_id, vuln in finding_to_vuln_map.items():
|
|
4817
|
+
if vuln == original_vuln:
|
|
4818
|
+
result[finding_id] = created_vuln.id
|
|
4819
|
+
break
|
|
4820
|
+
return result
|
|
4821
|
+
|
|
4822
|
+
def _create_vulnerabilities_individual(
|
|
4823
|
+
self,
|
|
4824
|
+
findings: List[IntegrationFinding],
|
|
4825
|
+
assets: Dict[str, regscale_models.Asset],
|
|
4826
|
+
scan_history: regscale_models.ScanHistory,
|
|
4827
|
+
) -> Dict[str, int]:
|
|
4828
|
+
"""
|
|
4829
|
+
Create vulnerabilities individually as fallback.
|
|
4830
|
+
|
|
4831
|
+
:param List[IntegrationFinding] findings: List of findings
|
|
4832
|
+
:param Dict[str, regscale_models.Asset] assets: Dictionary of assets
|
|
4833
|
+
:param regscale_models.ScanHistory scan_history: The scan history
|
|
4834
|
+
:return: Dictionary mapping finding external_id to vulnerability_id
|
|
4835
|
+
:rtype: Dict[str, int]
|
|
4836
|
+
"""
|
|
4837
|
+
result = {}
|
|
4838
|
+
for finding in findings:
|
|
4839
|
+
vulnerability_id = self.handle_vulnerability(finding, assets.get(finding.asset_identifier), scan_history)
|
|
4840
|
+
if vulnerability_id:
|
|
4841
|
+
result[finding.external_id] = vulnerability_id
|
|
4842
|
+
return result
|