regscale-cli 6.21.2.0__py3-none-any.whl → 6.28.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- regscale/_version.py +1 -1
- regscale/airflow/hierarchy.py +2 -2
- regscale/core/app/api.py +5 -2
- regscale/core/app/application.py +36 -6
- regscale/core/app/internal/control_editor.py +73 -21
- regscale/core/app/internal/evidence.py +727 -204
- regscale/core/app/internal/login.py +4 -2
- regscale/core/app/internal/model_editor.py +219 -64
- regscale/core/app/utils/app_utils.py +86 -12
- regscale/core/app/utils/catalog_utils/common.py +1 -1
- regscale/core/login.py +21 -4
- regscale/core/utils/async_graphql_client.py +363 -0
- regscale/core/utils/date.py +77 -1
- regscale/dev/cli.py +26 -0
- regscale/dev/code_gen.py +109 -24
- regscale/dev/version.py +72 -0
- regscale/integrations/commercial/__init__.py +30 -2
- regscale/integrations/commercial/aws/audit_manager_compliance.py +3908 -0
- regscale/integrations/commercial/aws/cli.py +3107 -54
- regscale/integrations/commercial/aws/cloudtrail_control_mappings.py +333 -0
- regscale/integrations/commercial/aws/cloudtrail_evidence.py +501 -0
- regscale/integrations/commercial/aws/cloudwatch_control_mappings.py +357 -0
- regscale/integrations/commercial/aws/cloudwatch_evidence.py +490 -0
- regscale/integrations/commercial/{amazon → aws}/common.py +71 -19
- regscale/integrations/commercial/aws/config_compliance.py +914 -0
- regscale/integrations/commercial/aws/conformance_pack_mappings.py +198 -0
- regscale/integrations/commercial/aws/control_compliance_analyzer.py +439 -0
- regscale/integrations/commercial/aws/evidence_generator.py +283 -0
- regscale/integrations/commercial/aws/guardduty_control_mappings.py +340 -0
- regscale/integrations/commercial/aws/guardduty_evidence.py +1053 -0
- regscale/integrations/commercial/aws/iam_control_mappings.py +368 -0
- regscale/integrations/commercial/aws/iam_evidence.py +574 -0
- regscale/integrations/commercial/aws/inventory/__init__.py +338 -22
- regscale/integrations/commercial/aws/inventory/base.py +107 -5
- regscale/integrations/commercial/aws/inventory/resources/analytics.py +390 -0
- regscale/integrations/commercial/aws/inventory/resources/applications.py +234 -0
- regscale/integrations/commercial/aws/inventory/resources/audit_manager.py +513 -0
- regscale/integrations/commercial/aws/inventory/resources/cloudtrail.py +315 -0
- regscale/integrations/commercial/aws/inventory/resources/cloudtrail_logs_metadata.py +476 -0
- regscale/integrations/commercial/aws/inventory/resources/cloudwatch.py +191 -0
- regscale/integrations/commercial/aws/inventory/resources/compute.py +328 -9
- regscale/integrations/commercial/aws/inventory/resources/config.py +464 -0
- regscale/integrations/commercial/aws/inventory/resources/containers.py +74 -9
- regscale/integrations/commercial/aws/inventory/resources/database.py +481 -31
- regscale/integrations/commercial/aws/inventory/resources/developer_tools.py +253 -0
- regscale/integrations/commercial/aws/inventory/resources/guardduty.py +286 -0
- regscale/integrations/commercial/aws/inventory/resources/iam.py +470 -0
- regscale/integrations/commercial/aws/inventory/resources/inspector.py +476 -0
- regscale/integrations/commercial/aws/inventory/resources/integration.py +175 -61
- regscale/integrations/commercial/aws/inventory/resources/kms.py +447 -0
- regscale/integrations/commercial/aws/inventory/resources/machine_learning.py +358 -0
- regscale/integrations/commercial/aws/inventory/resources/networking.py +390 -67
- regscale/integrations/commercial/aws/inventory/resources/s3.py +394 -0
- regscale/integrations/commercial/aws/inventory/resources/security.py +268 -72
- regscale/integrations/commercial/aws/inventory/resources/securityhub.py +473 -0
- regscale/integrations/commercial/aws/inventory/resources/storage.py +288 -29
- regscale/integrations/commercial/aws/inventory/resources/systems_manager.py +657 -0
- regscale/integrations/commercial/aws/inventory/resources/vpc.py +655 -0
- regscale/integrations/commercial/aws/kms_control_mappings.py +288 -0
- regscale/integrations/commercial/aws/kms_evidence.py +879 -0
- regscale/integrations/commercial/aws/ocsf/__init__.py +7 -0
- regscale/integrations/commercial/aws/ocsf/constants.py +115 -0
- regscale/integrations/commercial/aws/ocsf/mapper.py +435 -0
- regscale/integrations/commercial/aws/org_control_mappings.py +286 -0
- regscale/integrations/commercial/aws/org_evidence.py +666 -0
- regscale/integrations/commercial/aws/s3_control_mappings.py +356 -0
- regscale/integrations/commercial/aws/s3_evidence.py +632 -0
- regscale/integrations/commercial/aws/scanner.py +1072 -205
- regscale/integrations/commercial/aws/security_hub.py +319 -0
- regscale/integrations/commercial/aws/session_manager.py +282 -0
- regscale/integrations/commercial/aws/ssm_control_mappings.py +291 -0
- regscale/integrations/commercial/aws/ssm_evidence.py +492 -0
- regscale/integrations/commercial/jira.py +489 -153
- regscale/integrations/commercial/microsoft_defender/defender.py +326 -5
- regscale/integrations/commercial/microsoft_defender/defender_api.py +348 -14
- regscale/integrations/commercial/microsoft_defender/defender_constants.py +157 -0
- regscale/integrations/commercial/qualys/__init__.py +167 -68
- regscale/integrations/commercial/qualys/scanner.py +305 -39
- regscale/integrations/commercial/sarif/sairf_importer.py +432 -0
- regscale/integrations/commercial/sarif/sarif_converter.py +67 -0
- regscale/integrations/commercial/sicura/api.py +79 -42
- regscale/integrations/commercial/sicura/commands.py +8 -2
- regscale/integrations/commercial/sicura/scanner.py +83 -44
- regscale/integrations/commercial/stigv2/ckl_parser.py +5 -5
- regscale/integrations/commercial/synqly/assets.py +133 -16
- regscale/integrations/commercial/synqly/edr.py +2 -8
- regscale/integrations/commercial/synqly/query_builder.py +536 -0
- regscale/integrations/commercial/synqly/ticketing.py +27 -0
- regscale/integrations/commercial/synqly/vulnerabilities.py +165 -28
- regscale/integrations/commercial/tenablev2/cis_parsers.py +453 -0
- regscale/integrations/commercial/tenablev2/cis_scanner.py +447 -0
- regscale/integrations/commercial/tenablev2/commands.py +146 -5
- regscale/integrations/commercial/tenablev2/scanner.py +1 -3
- regscale/integrations/commercial/tenablev2/stig_parsers.py +113 -57
- regscale/integrations/commercial/wizv2/WizDataMixin.py +1 -1
- regscale/integrations/commercial/wizv2/click.py +191 -76
- regscale/integrations/commercial/wizv2/compliance/__init__.py +15 -0
- regscale/integrations/commercial/wizv2/{policy_compliance_helpers.py → compliance/helpers.py} +78 -60
- regscale/integrations/commercial/wizv2/compliance_report.py +1592 -0
- regscale/integrations/commercial/wizv2/core/__init__.py +133 -0
- regscale/integrations/commercial/wizv2/{async_client.py → core/client.py} +7 -3
- regscale/integrations/commercial/wizv2/{constants.py → core/constants.py} +92 -89
- regscale/integrations/commercial/wizv2/core/file_operations.py +237 -0
- regscale/integrations/commercial/wizv2/fetchers/__init__.py +11 -0
- regscale/integrations/commercial/wizv2/{data_fetcher.py → fetchers/policy_assessment.py} +66 -9
- regscale/integrations/commercial/wizv2/file_cleanup.py +104 -0
- regscale/integrations/commercial/wizv2/issue.py +776 -28
- regscale/integrations/commercial/wizv2/models/__init__.py +0 -0
- regscale/integrations/commercial/wizv2/parsers/__init__.py +34 -0
- regscale/integrations/commercial/wizv2/{parsers.py → parsers/main.py} +1 -1
- regscale/integrations/commercial/wizv2/processors/__init__.py +11 -0
- regscale/integrations/commercial/wizv2/{finding_processor.py → processors/finding.py} +1 -1
- regscale/integrations/commercial/wizv2/reports.py +243 -0
- regscale/integrations/commercial/wizv2/sbom.py +1 -1
- regscale/integrations/commercial/wizv2/scanner.py +1031 -441
- regscale/integrations/commercial/wizv2/utils/__init__.py +48 -0
- regscale/integrations/commercial/wizv2/{utils.py → utils/main.py} +116 -61
- regscale/integrations/commercial/wizv2/variables.py +89 -3
- regscale/integrations/compliance_integration.py +1036 -151
- regscale/integrations/control_matcher.py +432 -0
- regscale/integrations/due_date_handler.py +333 -0
- regscale/integrations/milestone_manager.py +291 -0
- regscale/integrations/public/__init__.py +14 -0
- regscale/integrations/public/cci_importer.py +834 -0
- regscale/integrations/public/csam/__init__.py +0 -0
- regscale/integrations/public/csam/csam.py +938 -0
- regscale/integrations/public/csam/csam_agency_defined.py +179 -0
- regscale/integrations/public/csam/csam_common.py +154 -0
- regscale/integrations/public/csam/csam_controls.py +432 -0
- regscale/integrations/public/csam/csam_poam.py +124 -0
- regscale/integrations/public/fedramp/click.py +77 -6
- regscale/integrations/public/fedramp/docx_parser.py +10 -1
- regscale/integrations/public/fedramp/fedramp_cis_crm.py +675 -289
- regscale/integrations/public/fedramp/fedramp_five.py +1 -1
- regscale/integrations/public/fedramp/poam/scanner.py +75 -7
- regscale/integrations/public/fedramp/poam_export_v5.py +888 -0
- regscale/integrations/scanner_integration.py +1961 -430
- regscale/models/integration_models/CCI_List.xml +1 -0
- regscale/models/integration_models/aqua.py +2 -2
- regscale/models/integration_models/cisa_kev_data.json +805 -11
- regscale/models/integration_models/flat_file_importer/__init__.py +5 -8
- regscale/models/integration_models/nexpose.py +36 -10
- regscale/models/integration_models/qualys.py +3 -4
- regscale/models/integration_models/synqly_models/capabilities.json +1 -1
- regscale/models/integration_models/synqly_models/connectors/vulnerabilities.py +87 -18
- regscale/models/integration_models/synqly_models/filter_parser.py +332 -0
- regscale/models/integration_models/synqly_models/ocsf_mapper.py +124 -25
- regscale/models/integration_models/synqly_models/synqly_model.py +89 -16
- regscale/models/locking.py +12 -8
- regscale/models/platform.py +4 -2
- regscale/models/regscale_models/__init__.py +7 -0
- regscale/models/regscale_models/assessment.py +2 -1
- regscale/models/regscale_models/catalog.py +1 -1
- regscale/models/regscale_models/compliance_settings.py +251 -1
- regscale/models/regscale_models/component.py +1 -0
- regscale/models/regscale_models/control_implementation.py +236 -41
- regscale/models/regscale_models/control_objective.py +74 -5
- regscale/models/regscale_models/file.py +2 -0
- regscale/models/regscale_models/form_field_value.py +5 -3
- regscale/models/regscale_models/inheritance.py +44 -0
- regscale/models/regscale_models/issue.py +301 -102
- regscale/models/regscale_models/milestone.py +33 -14
- regscale/models/regscale_models/organization.py +3 -0
- regscale/models/regscale_models/regscale_model.py +310 -73
- regscale/models/regscale_models/security_plan.py +4 -2
- regscale/models/regscale_models/vulnerability.py +3 -3
- regscale/regscale.py +25 -4
- regscale/templates/__init__.py +0 -0
- regscale/utils/threading/threadhandler.py +20 -15
- regscale/validation/record.py +23 -1
- {regscale_cli-6.21.2.0.dist-info → regscale_cli-6.28.2.1.dist-info}/METADATA +17 -33
- {regscale_cli-6.21.2.0.dist-info → regscale_cli-6.28.2.1.dist-info}/RECORD +310 -111
- tests/core/__init__.py +0 -0
- tests/core/utils/__init__.py +0 -0
- tests/core/utils/test_async_graphql_client.py +472 -0
- tests/fixtures/test_fixture.py +13 -8
- tests/regscale/core/test_login.py +171 -4
- tests/regscale/integrations/commercial/__init__.py +0 -0
- tests/regscale/integrations/commercial/aws/__init__.py +0 -0
- tests/regscale/integrations/commercial/aws/test_audit_manager_compliance.py +1304 -0
- tests/regscale/integrations/commercial/aws/test_audit_manager_evidence_aggregation.py +341 -0
- tests/regscale/integrations/commercial/aws/test_aws_analytics_collector.py +260 -0
- tests/regscale/integrations/commercial/aws/test_aws_applications_collector.py +242 -0
- tests/regscale/integrations/commercial/aws/test_aws_audit_manager_collector.py +1155 -0
- tests/regscale/integrations/commercial/aws/test_aws_cloudtrail_collector.py +534 -0
- tests/regscale/integrations/commercial/aws/test_aws_config_collector.py +400 -0
- tests/regscale/integrations/commercial/aws/test_aws_developer_tools_collector.py +203 -0
- tests/regscale/integrations/commercial/aws/test_aws_guardduty_collector.py +315 -0
- tests/regscale/integrations/commercial/aws/test_aws_iam_collector.py +458 -0
- tests/regscale/integrations/commercial/aws/test_aws_inspector_collector.py +353 -0
- tests/regscale/integrations/commercial/aws/test_aws_inventory_integration.py +530 -0
- tests/regscale/integrations/commercial/aws/test_aws_kms_collector.py +919 -0
- tests/regscale/integrations/commercial/aws/test_aws_machine_learning_collector.py +237 -0
- tests/regscale/integrations/commercial/aws/test_aws_s3_collector.py +722 -0
- tests/regscale/integrations/commercial/aws/test_aws_scanner_integration.py +722 -0
- tests/regscale/integrations/commercial/aws/test_aws_securityhub_collector.py +792 -0
- tests/regscale/integrations/commercial/aws/test_aws_systems_manager_collector.py +918 -0
- tests/regscale/integrations/commercial/aws/test_aws_vpc_collector.py +996 -0
- tests/regscale/integrations/commercial/aws/test_cli_evidence.py +431 -0
- tests/regscale/integrations/commercial/aws/test_cloudtrail_control_mappings.py +452 -0
- tests/regscale/integrations/commercial/aws/test_cloudtrail_evidence.py +788 -0
- tests/regscale/integrations/commercial/aws/test_config_compliance.py +298 -0
- tests/regscale/integrations/commercial/aws/test_conformance_pack_mappings.py +200 -0
- tests/regscale/integrations/commercial/aws/test_control_compliance_analyzer.py +375 -0
- tests/regscale/integrations/commercial/aws/test_datetime_parsing.py +223 -0
- tests/regscale/integrations/commercial/aws/test_evidence_generator.py +386 -0
- tests/regscale/integrations/commercial/aws/test_guardduty_control_mappings.py +564 -0
- tests/regscale/integrations/commercial/aws/test_guardduty_evidence.py +1041 -0
- tests/regscale/integrations/commercial/aws/test_iam_control_mappings.py +718 -0
- tests/regscale/integrations/commercial/aws/test_iam_evidence.py +1375 -0
- tests/regscale/integrations/commercial/aws/test_kms_control_mappings.py +656 -0
- tests/regscale/integrations/commercial/aws/test_kms_evidence.py +1163 -0
- tests/regscale/integrations/commercial/aws/test_ocsf_mapper.py +370 -0
- tests/regscale/integrations/commercial/aws/test_org_control_mappings.py +546 -0
- tests/regscale/integrations/commercial/aws/test_org_evidence.py +1240 -0
- tests/regscale/integrations/commercial/aws/test_s3_control_mappings.py +672 -0
- tests/regscale/integrations/commercial/aws/test_s3_evidence.py +987 -0
- tests/regscale/integrations/commercial/aws/test_scanner_evidence.py +373 -0
- tests/regscale/integrations/commercial/aws/test_security_hub_config_filtering.py +539 -0
- tests/regscale/integrations/commercial/aws/test_session_manager.py +516 -0
- tests/regscale/integrations/commercial/aws/test_ssm_control_mappings.py +588 -0
- tests/regscale/integrations/commercial/aws/test_ssm_evidence.py +735 -0
- tests/regscale/integrations/commercial/conftest.py +28 -0
- tests/regscale/integrations/commercial/microsoft_defender/__init__.py +1 -0
- tests/regscale/integrations/commercial/microsoft_defender/test_defender.py +1517 -0
- tests/regscale/integrations/commercial/microsoft_defender/test_defender_api.py +1748 -0
- tests/regscale/integrations/commercial/microsoft_defender/test_defender_constants.py +327 -0
- tests/regscale/integrations/commercial/microsoft_defender/test_defender_scanner.py +487 -0
- tests/regscale/integrations/commercial/test_aws.py +3742 -0
- tests/regscale/integrations/commercial/test_burp.py +48 -0
- tests/regscale/integrations/commercial/test_crowdstrike.py +49 -0
- tests/regscale/integrations/commercial/test_dependabot.py +341 -0
- tests/regscale/integrations/commercial/test_gcp.py +1543 -0
- tests/regscale/integrations/commercial/test_gitlab.py +549 -0
- tests/regscale/integrations/commercial/test_ip_mac_address_length.py +84 -0
- tests/regscale/integrations/commercial/test_jira.py +2204 -0
- tests/regscale/integrations/commercial/test_npm_audit.py +42 -0
- tests/regscale/integrations/commercial/test_okta.py +1228 -0
- tests/regscale/integrations/commercial/test_sarif_converter.py +251 -0
- tests/regscale/integrations/commercial/test_sicura.py +349 -0
- tests/regscale/integrations/commercial/test_snow.py +423 -0
- tests/regscale/integrations/commercial/test_sonarcloud.py +394 -0
- tests/regscale/integrations/commercial/test_sqlserver.py +186 -0
- tests/regscale/integrations/commercial/test_stig.py +33 -0
- tests/regscale/integrations/commercial/test_stig_mapper.py +153 -0
- tests/regscale/integrations/commercial/test_stigv2.py +406 -0
- tests/regscale/integrations/commercial/test_wiz.py +1365 -0
- tests/regscale/integrations/commercial/test_wiz_inventory.py +256 -0
- tests/regscale/integrations/commercial/wizv2/__init__.py +339 -0
- tests/regscale/integrations/commercial/wizv2/compliance/__init__.py +1 -0
- tests/regscale/integrations/commercial/wizv2/compliance/test_helpers.py +903 -0
- tests/regscale/integrations/commercial/wizv2/core/__init__.py +1 -0
- tests/regscale/integrations/commercial/wizv2/core/test_auth.py +701 -0
- tests/regscale/integrations/commercial/wizv2/core/test_client.py +1037 -0
- tests/regscale/integrations/commercial/wizv2/core/test_file_operations.py +989 -0
- tests/regscale/integrations/commercial/wizv2/fetchers/__init__.py +1 -0
- tests/regscale/integrations/commercial/wizv2/fetchers/test_policy_assessment.py +805 -0
- tests/regscale/integrations/commercial/wizv2/parsers/__init__.py +1 -0
- tests/regscale/integrations/commercial/wizv2/parsers/test_main.py +1153 -0
- tests/regscale/integrations/commercial/wizv2/processors/__init__.py +1 -0
- tests/regscale/integrations/commercial/wizv2/processors/test_finding.py +671 -0
- tests/regscale/integrations/commercial/wizv2/test_WizDataMixin.py +537 -0
- tests/regscale/integrations/commercial/wizv2/test_click_comprehensive.py +851 -0
- tests/regscale/integrations/commercial/wizv2/test_compliance_report_comprehensive.py +910 -0
- tests/regscale/integrations/commercial/wizv2/test_compliance_report_normalization.py +138 -0
- tests/regscale/integrations/commercial/wizv2/test_file_cleanup.py +283 -0
- tests/regscale/integrations/commercial/wizv2/test_file_operations.py +260 -0
- tests/regscale/integrations/commercial/wizv2/test_issue.py +343 -0
- tests/regscale/integrations/commercial/wizv2/test_issue_comprehensive.py +1203 -0
- tests/regscale/integrations/commercial/wizv2/test_reports.py +497 -0
- tests/regscale/integrations/commercial/wizv2/test_sbom.py +643 -0
- tests/regscale/integrations/commercial/wizv2/test_scanner_comprehensive.py +805 -0
- tests/regscale/integrations/commercial/wizv2/test_wiz_click_client_id.py +165 -0
- tests/regscale/integrations/commercial/wizv2/test_wiz_compliance_report.py +1394 -0
- tests/regscale/integrations/commercial/wizv2/test_wiz_compliance_unit.py +341 -0
- tests/regscale/integrations/commercial/wizv2/test_wiz_control_normalization.py +138 -0
- tests/regscale/integrations/commercial/wizv2/test_wiz_findings_comprehensive.py +364 -0
- tests/regscale/integrations/commercial/wizv2/test_wiz_inventory_comprehensive.py +644 -0
- tests/regscale/integrations/commercial/wizv2/test_wiz_status_mapping.py +149 -0
- tests/regscale/integrations/commercial/wizv2/test_wizv2.py +1218 -0
- tests/regscale/integrations/commercial/wizv2/test_wizv2_utils.py +519 -0
- tests/regscale/integrations/commercial/wizv2/utils/__init__.py +1 -0
- tests/regscale/integrations/commercial/wizv2/utils/test_main.py +1523 -0
- tests/regscale/integrations/public/__init__.py +0 -0
- tests/regscale/integrations/public/fedramp/__init__.py +1 -0
- tests/regscale/integrations/public/fedramp/test_gen_asset_list.py +150 -0
- tests/regscale/integrations/public/fedramp/test_poam_export_v5.py +1293 -0
- tests/regscale/integrations/public/test_alienvault.py +220 -0
- tests/regscale/integrations/public/test_cci.py +1053 -0
- tests/regscale/integrations/public/test_cisa.py +1021 -0
- tests/regscale/integrations/public/test_emass.py +518 -0
- tests/regscale/integrations/public/test_fedramp.py +1152 -0
- tests/regscale/integrations/public/test_fedramp_cis_crm.py +3661 -0
- tests/regscale/integrations/public/test_file_uploads.py +506 -0
- tests/regscale/integrations/public/test_oscal.py +453 -0
- tests/regscale/integrations/test_compliance_status_mapping.py +406 -0
- tests/regscale/integrations/test_control_matcher.py +1421 -0
- tests/regscale/integrations/test_control_matching.py +155 -0
- tests/regscale/integrations/test_milestone_manager.py +408 -0
- tests/regscale/models/test_control_implementation.py +118 -3
- tests/regscale/models/test_form_field_value_integration.py +304 -0
- tests/regscale/models/test_issue.py +378 -1
- tests/regscale/models/test_module_integration.py +582 -0
- tests/regscale/models/test_tenable_integrations.py +811 -105
- regscale/integrations/commercial/wizv2/policy_compliance.py +0 -3057
- regscale/integrations/public/fedramp/mappings/fedramp_r4_parts.json +0 -7388
- regscale/integrations/public/fedramp/mappings/fedramp_r5_parts.json +0 -9605
- regscale/integrations/public/fedramp/parts_mapper.py +0 -107
- /regscale/integrations/commercial/{amazon → sarif}/__init__.py +0 -0
- /regscale/integrations/commercial/wizv2/{wiz_auth.py → core/auth.py} +0 -0
- {regscale_cli-6.21.2.0.dist-info → regscale_cli-6.28.2.1.dist-info}/LICENSE +0 -0
- {regscale_cli-6.21.2.0.dist-info → regscale_cli-6.28.2.1.dist-info}/WHEEL +0 -0
- {regscale_cli-6.21.2.0.dist-info → regscale_cli-6.28.2.1.dist-info}/entry_points.txt +0 -0
- {regscale_cli-6.21.2.0.dist-info → regscale_cli-6.28.2.1.dist-info}/top_level.txt +0 -0
|
@@ -7,9 +7,10 @@ import time
|
|
|
7
7
|
from typing import Any, Dict, Iterator, List, Optional, Tuple
|
|
8
8
|
|
|
9
9
|
from regscale.core.utils.date import date_str, datetime_str
|
|
10
|
-
from regscale.integrations.commercial.
|
|
10
|
+
from regscale.integrations.commercial.aws.common import (
|
|
11
11
|
check_finding_severity,
|
|
12
12
|
determine_status_and_results,
|
|
13
|
+
fetch_aws_findings,
|
|
13
14
|
get_comments,
|
|
14
15
|
get_due_date,
|
|
15
16
|
)
|
|
@@ -36,7 +37,9 @@ class AWSInventoryIntegration(ScannerIntegration):
|
|
|
36
37
|
|
|
37
38
|
title = "AWS"
|
|
38
39
|
asset_identifier_field = "awsIdentifier"
|
|
39
|
-
issue_identifier_field = "awsIdentifier
|
|
40
|
+
issue_identifier_field = "" # Use default otherIdentifier - awsIdentifier doesn't exist on Issue model
|
|
41
|
+
suppress_asset_not_found_errors = True # Suppress asset not found errors for AWS findings
|
|
42
|
+
enable_cci_mapping = False # AWS findings don't use CCI references
|
|
40
43
|
finding_severity_map = {
|
|
41
44
|
"CRITICAL": regscale_models.IssueSeverity.High,
|
|
42
45
|
"HIGH": regscale_models.IssueSeverity.High,
|
|
@@ -57,9 +60,12 @@ class AWSInventoryIntegration(ScannerIntegration):
|
|
|
57
60
|
:param int plan_id: The RegScale plan ID
|
|
58
61
|
"""
|
|
59
62
|
super().__init__(plan_id=plan_id, kwargs=kwargs)
|
|
63
|
+
# Override parent's default - suppress asset not found errors for AWS
|
|
64
|
+
self.suppress_asset_not_found_errors = True
|
|
60
65
|
self.collector: Optional[AWSInventoryCollector] = None
|
|
61
66
|
self.discovered_assets: List[IntegrationAsset] = []
|
|
62
67
|
self.processed_asset_identifiers: set = set() # Track processed assets to avoid duplicates
|
|
68
|
+
self.finding_progress = None # Initialize progress object as None
|
|
63
69
|
|
|
64
70
|
def authenticate(
|
|
65
71
|
self,
|
|
@@ -67,20 +73,30 @@ class AWSInventoryIntegration(ScannerIntegration):
|
|
|
67
73
|
aws_secret_access_key: Optional[str],
|
|
68
74
|
region: str = os.getenv("AWS_REGION", "us-east-1"),
|
|
69
75
|
aws_session_token: Optional[str] = os.getenv("AWS_SESSION_TOKEN"),
|
|
76
|
+
profile: Optional[str] = None,
|
|
77
|
+
account_id: Optional[str] = None,
|
|
78
|
+
tags: Optional[Dict[str, str]] = None,
|
|
70
79
|
) -> None:
|
|
71
80
|
"""
|
|
72
81
|
Authenticate with AWS and initialize the inventory collector.
|
|
73
82
|
|
|
74
|
-
:param str aws_access_key_id: Optional AWS access key ID
|
|
75
|
-
:param str aws_secret_access_key: Optional AWS secret access key
|
|
83
|
+
:param str aws_access_key_id: Optional AWS access key ID (overrides profile)
|
|
84
|
+
:param str aws_secret_access_key: Optional AWS secret access key (overrides profile)
|
|
76
85
|
:param str region: AWS region to collect inventory from
|
|
77
|
-
:param str aws_session_token: Optional AWS session
|
|
86
|
+
:param str aws_session_token: Optional AWS session token (overrides profile)
|
|
87
|
+
:param str profile: Optional AWS profile name from ~/.aws/credentials
|
|
88
|
+
:param str account_id: Optional AWS account ID to filter resources
|
|
89
|
+
:param dict tags: Optional dictionary of tag key-value pairs to filter resources
|
|
78
90
|
"""
|
|
79
91
|
self.collector = AWSInventoryCollector(
|
|
80
92
|
region=region,
|
|
93
|
+
profile=profile,
|
|
81
94
|
aws_access_key_id=aws_access_key_id,
|
|
82
95
|
aws_secret_access_key=aws_secret_access_key,
|
|
83
96
|
aws_session_token=aws_session_token,
|
|
97
|
+
account_id=account_id,
|
|
98
|
+
tags=tags,
|
|
99
|
+
collect_findings=False, # Disable findings collection for asset-only sync
|
|
84
100
|
)
|
|
85
101
|
|
|
86
102
|
def fetch_aws_data_if_needed(
|
|
@@ -89,29 +105,43 @@ class AWSInventoryIntegration(ScannerIntegration):
|
|
|
89
105
|
aws_access_key_id: Optional[str],
|
|
90
106
|
aws_secret_access_key: Optional[str],
|
|
91
107
|
aws_session_token: Optional[str] = None,
|
|
108
|
+
profile: Optional[str] = None,
|
|
109
|
+
account_id: Optional[str] = None,
|
|
110
|
+
tags: Optional[Dict[str, str]] = None,
|
|
111
|
+
force_refresh: bool = False,
|
|
92
112
|
) -> Dict[str, Any]:
|
|
93
113
|
"""
|
|
94
114
|
Fetch AWS inventory data, using cached data if available and not expired.
|
|
95
115
|
|
|
96
116
|
:param str region: AWS region to collect inventory from
|
|
97
|
-
:param str aws_access_key_id: Optional AWS access key ID
|
|
98
|
-
:param str aws_secret_access_key: Optional AWS secret access key
|
|
99
|
-
:param str aws_session_token: Optional AWS session
|
|
117
|
+
:param str aws_access_key_id: Optional AWS access key ID (overrides profile)
|
|
118
|
+
:param str aws_secret_access_key: Optional AWS secret access key (overrides profile)
|
|
119
|
+
:param str aws_session_token: Optional AWS session token (overrides profile)
|
|
120
|
+
:param str profile: Optional AWS profile name from ~/.aws/credentials
|
|
121
|
+
:param str account_id: Optional AWS account ID to filter resources
|
|
122
|
+
:param dict tags: Optional dictionary of tag key-value pairs to filter resources
|
|
123
|
+
:param bool force_refresh: Force refresh inventory data, ignoring cache
|
|
100
124
|
:return: Dictionary containing AWS inventory data
|
|
101
125
|
:rtype: Dict[str, Any]
|
|
102
126
|
"""
|
|
103
127
|
from regscale.models import DateTimeEncoder
|
|
104
128
|
|
|
105
|
-
# Check if we have cached data that's still valid
|
|
106
|
-
if os.path.exists(INVENTORY_FILE_PATH):
|
|
129
|
+
# Check if we have cached data that's still valid (unless force_refresh is True)
|
|
130
|
+
if not force_refresh and os.path.exists(INVENTORY_FILE_PATH):
|
|
107
131
|
file_age = time.time() - os.path.getmtime(INVENTORY_FILE_PATH)
|
|
108
132
|
if file_age < CACHE_TTL_SECONDS:
|
|
133
|
+
logger.info(f"Using cached AWS inventory data (age: {int(file_age / 60)} minutes)")
|
|
109
134
|
with open(INVENTORY_FILE_PATH, "r", encoding="utf-8") as file:
|
|
110
135
|
return json.load(file)
|
|
111
136
|
|
|
137
|
+
if force_refresh and os.path.exists(INVENTORY_FILE_PATH):
|
|
138
|
+
logger.info("Force refresh enabled - ignoring cached inventory data")
|
|
139
|
+
|
|
112
140
|
# No valid cache, need to fetch new data
|
|
113
141
|
if not self.collector:
|
|
114
|
-
self.authenticate(
|
|
142
|
+
self.authenticate(
|
|
143
|
+
aws_access_key_id, aws_secret_access_key, region, aws_session_token, profile, account_id, tags
|
|
144
|
+
)
|
|
115
145
|
|
|
116
146
|
if not self.collector:
|
|
117
147
|
raise RuntimeError("Failed to initialize AWS inventory collector")
|
|
@@ -138,17 +168,11 @@ class AWSInventoryIntegration(ScannerIntegration):
|
|
|
138
168
|
:yield: Iterator[IntegrationAsset]
|
|
139
169
|
"""
|
|
140
170
|
for asset in assets:
|
|
141
|
-
if not isinstance(asset, dict)
|
|
171
|
+
if not isinstance(asset, dict):
|
|
142
172
|
logger.warning(f"Skipping {asset_type} due to invalid data format: {asset}")
|
|
143
173
|
continue
|
|
144
174
|
try:
|
|
145
|
-
|
|
146
|
-
for user in assets[asset]:
|
|
147
|
-
self.num_assets_to_process += 1
|
|
148
|
-
yield parser_method(user)
|
|
149
|
-
else:
|
|
150
|
-
self.num_assets_to_process += 1
|
|
151
|
-
yield parser_method(asset)
|
|
175
|
+
yield parser_method(asset)
|
|
152
176
|
except Exception as e:
|
|
153
177
|
logger.error(f"Error parsing {asset_type} {asset}: {str(e)}", exc_info=True)
|
|
154
178
|
|
|
@@ -164,7 +188,14 @@ class AWSInventoryIntegration(ScannerIntegration):
|
|
|
164
188
|
:param callable parser_method: Method to parse the asset
|
|
165
189
|
:yield: Iterator[IntegrationAsset]
|
|
166
190
|
"""
|
|
167
|
-
|
|
191
|
+
section_data = inventory.get(section_key, [])
|
|
192
|
+
|
|
193
|
+
# Handle special case for IAM - need to extract Roles list from IAM dict
|
|
194
|
+
if section_key == "IAM" and isinstance(section_data, dict):
|
|
195
|
+
assets = section_data.get(asset_type, [])
|
|
196
|
+
else:
|
|
197
|
+
assets = section_data
|
|
198
|
+
|
|
168
199
|
yield from self._process_asset_collection(assets, asset_type, parser_method)
|
|
169
200
|
|
|
170
201
|
def get_asset_configs(self) -> List[Tuple[str, str, callable]]:
|
|
@@ -192,17 +223,34 @@ class AWSInventoryIntegration(ScannerIntegration):
|
|
|
192
223
|
aws_access_key_id: Optional[str] = None,
|
|
193
224
|
aws_secret_access_key: Optional[str] = None,
|
|
194
225
|
aws_session_token: Optional[str] = None,
|
|
226
|
+
profile: Optional[str] = None,
|
|
227
|
+
account_id: Optional[str] = None,
|
|
228
|
+
tags: Optional[Dict[str, str]] = None,
|
|
229
|
+
force_refresh: bool = False,
|
|
195
230
|
) -> Iterator[IntegrationAsset]:
|
|
196
231
|
"""
|
|
197
232
|
Fetch AWS assets from the inventory.
|
|
198
233
|
|
|
199
234
|
:param str region: AWS region to collect inventory from
|
|
200
|
-
:param str aws_access_key_id: Optional AWS access key ID
|
|
201
|
-
:param str aws_secret_access_key: Optional AWS secret access key
|
|
202
|
-
:param str aws_session_token: Optional AWS session
|
|
235
|
+
:param str aws_access_key_id: Optional AWS access key ID (overrides profile)
|
|
236
|
+
:param str aws_secret_access_key: Optional AWS secret access key (overrides profile)
|
|
237
|
+
:param str aws_session_token: Optional AWS session token (overrides profile)
|
|
238
|
+
:param str profile: Optional AWS profile name from ~/.aws/credentials
|
|
239
|
+
:param str account_id: Optional AWS account ID to filter resources
|
|
240
|
+
:param dict tags: Optional dictionary of tag key-value pairs to filter resources
|
|
241
|
+
:param bool force_refresh: Force refresh inventory data, ignoring cache
|
|
203
242
|
:yield: Iterator[IntegrationAsset]
|
|
204
243
|
"""
|
|
205
|
-
inventory = self.fetch_aws_data_if_needed(
|
|
244
|
+
inventory = self.fetch_aws_data_if_needed(
|
|
245
|
+
region,
|
|
246
|
+
aws_access_key_id,
|
|
247
|
+
aws_secret_access_key,
|
|
248
|
+
aws_session_token,
|
|
249
|
+
profile,
|
|
250
|
+
account_id,
|
|
251
|
+
tags,
|
|
252
|
+
force_refresh,
|
|
253
|
+
)
|
|
206
254
|
# Process each asset type using the corresponding parser
|
|
207
255
|
asset_configs = self.get_asset_configs()
|
|
208
256
|
|
|
@@ -211,6 +259,91 @@ class AWSInventoryIntegration(ScannerIntegration):
|
|
|
211
259
|
for section_key, asset_type, parser_method in asset_configs:
|
|
212
260
|
yield from self._process_inventory_section(inventory, section_key, asset_type, parser_method)
|
|
213
261
|
|
|
262
|
+
def _calculate_ec2_storage(self, instance: Dict[str, Any]) -> int:
|
|
263
|
+
"""
|
|
264
|
+
Calculate total storage from EC2 block devices.
|
|
265
|
+
|
|
266
|
+
:param Dict[str, Any] instance: The EC2 instance data
|
|
267
|
+
:return: Total storage in GB
|
|
268
|
+
:rtype: int
|
|
269
|
+
"""
|
|
270
|
+
total_storage = 0
|
|
271
|
+
for device in instance.get("BlockDeviceMappings", []):
|
|
272
|
+
if "Ebs" in device:
|
|
273
|
+
# Note: We need to add a call to describe_volumes to get actual size
|
|
274
|
+
total_storage += 8 # Default to 8 GB if size unknown
|
|
275
|
+
return total_storage
|
|
276
|
+
|
|
277
|
+
def _determine_ec2_asset_type(
|
|
278
|
+
self, image_name: str, platform: Optional[str]
|
|
279
|
+
) -> tuple[Any, Any, Any, Any, list[str]]:
|
|
280
|
+
"""
|
|
281
|
+
Determine EC2 asset type, category, component type, and names based on image and platform.
|
|
282
|
+
|
|
283
|
+
:param str image_name: Lowercase image name
|
|
284
|
+
:param Optional[str] platform: Platform type (e.g., 'windows')
|
|
285
|
+
:return: Tuple of (operating_system, asset_type, asset_category, component_type, component_names)
|
|
286
|
+
:rtype: tuple
|
|
287
|
+
"""
|
|
288
|
+
# Check for Palo Alto device first
|
|
289
|
+
if "pa-vm-aws" in image_name:
|
|
290
|
+
return (
|
|
291
|
+
regscale_models.AssetOperatingSystem.PaloAlto,
|
|
292
|
+
regscale_models.AssetType.Appliance,
|
|
293
|
+
regscale_models.AssetCategory.Hardware,
|
|
294
|
+
regscale_models.ComponentType.Hardware,
|
|
295
|
+
["Palo Alto Networks IDPS"],
|
|
296
|
+
)
|
|
297
|
+
|
|
298
|
+
# Check for Windows platform
|
|
299
|
+
if platform == "windows":
|
|
300
|
+
return (
|
|
301
|
+
regscale_models.AssetOperatingSystem.WindowsServer,
|
|
302
|
+
regscale_models.AssetType.VM,
|
|
303
|
+
regscale_models.AssetCategory.Hardware,
|
|
304
|
+
regscale_models.ComponentType.Hardware,
|
|
305
|
+
[EC_INSTANCES],
|
|
306
|
+
)
|
|
307
|
+
|
|
308
|
+
# Default to Linux
|
|
309
|
+
return (
|
|
310
|
+
regscale_models.AssetOperatingSystem.Linux,
|
|
311
|
+
regscale_models.AssetType.VM,
|
|
312
|
+
regscale_models.AssetCategory.Hardware,
|
|
313
|
+
regscale_models.ComponentType.Hardware,
|
|
314
|
+
[EC_INSTANCES],
|
|
315
|
+
)
|
|
316
|
+
|
|
317
|
+
def _build_ec2_notes(
|
|
318
|
+
self, description: str, instance: Dict[str, Any], image_info: Dict[str, Any], cpu_count: int, ram: int
|
|
319
|
+
) -> str:
|
|
320
|
+
"""
|
|
321
|
+
Build detailed notes for EC2 instance.
|
|
322
|
+
|
|
323
|
+
:param str description: Instance description
|
|
324
|
+
:param Dict[str, Any] instance: The EC2 instance data
|
|
325
|
+
:param Dict[str, Any] image_info: AMI image information
|
|
326
|
+
:param int cpu_count: Number of vCPUs
|
|
327
|
+
:param int ram: RAM in GB
|
|
328
|
+
:return: Formatted notes string
|
|
329
|
+
:rtype: str
|
|
330
|
+
"""
|
|
331
|
+
return f"""Description: {description}
|
|
332
|
+
AMI ID: {instance.get('ImageId', '')}
|
|
333
|
+
AMI Description: {image_info.get('Description', '')}
|
|
334
|
+
Architecture: {instance.get('Architecture', '')}
|
|
335
|
+
Root Device Type: {image_info.get('RootDeviceType', '')}
|
|
336
|
+
Virtualization: {image_info.get('VirtualizationType', '')}
|
|
337
|
+
Instance Type: {instance.get('InstanceType', '')}
|
|
338
|
+
vCPUs: {cpu_count}
|
|
339
|
+
RAM: {ram}GB
|
|
340
|
+
State: {instance.get('State')}
|
|
341
|
+
Platform Details: {instance.get('PlatformDetails', 'Linux')}
|
|
342
|
+
Private IP: {instance.get('PrivateIpAddress', 'N/A')}
|
|
343
|
+
Public IP: {instance.get('PublicIpAddress', 'N/A')}
|
|
344
|
+
VPC ID: {instance.get('VpcId', 'N/A')}
|
|
345
|
+
Subnet ID: {instance.get('SubnetId', 'N/A')}"""
|
|
346
|
+
|
|
214
347
|
def parse_ec2_instance(self, instance: Dict[str, Any]) -> IntegrationAsset:
|
|
215
348
|
"""Parse EC2 instance data into an IntegrationAsset.
|
|
216
349
|
|
|
@@ -224,15 +357,8 @@ class AWSInventoryIntegration(ScannerIntegration):
|
|
|
224
357
|
)
|
|
225
358
|
name = instance_name
|
|
226
359
|
|
|
227
|
-
# Calculate
|
|
228
|
-
total_storage =
|
|
229
|
-
for device in instance.get("BlockDeviceMappings", []):
|
|
230
|
-
if "Ebs" in device:
|
|
231
|
-
# Note: We need to add a call to describe_volumes to get actual size
|
|
232
|
-
total_storage += 8 # Default to 8 GB if size unknown
|
|
233
|
-
|
|
234
|
-
# Calculate RAM based on instance type
|
|
235
|
-
# This would need a mapping of instance types to RAM
|
|
360
|
+
# Calculate resources
|
|
361
|
+
total_storage = self._calculate_ec2_storage(instance)
|
|
236
362
|
ram = 16 # Default to 16 GB for c5.2xlarge
|
|
237
363
|
|
|
238
364
|
# Get CPU info
|
|
@@ -246,26 +372,10 @@ class AWSInventoryIntegration(ScannerIntegration):
|
|
|
246
372
|
image_info = instance.get("ImageInfo", {})
|
|
247
373
|
image_name = image_info.get("Name", "").lower()
|
|
248
374
|
|
|
249
|
-
#
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
asset_type = regscale_models.AssetType.Appliance
|
|
254
|
-
asset_category = regscale_models.AssetCategory.Hardware
|
|
255
|
-
component_type = regscale_models.ComponentType.Hardware
|
|
256
|
-
component_names = ["Palo Alto Networks IDPS"]
|
|
257
|
-
elif instance.get("Platform") == "windows":
|
|
258
|
-
operating_system = regscale_models.AssetOperatingSystem.WindowsServer
|
|
259
|
-
asset_type = regscale_models.AssetType.VM
|
|
260
|
-
asset_category = regscale_models.AssetCategory.Hardware
|
|
261
|
-
component_type = regscale_models.ComponentType.Hardware
|
|
262
|
-
component_names = [EC_INSTANCES]
|
|
263
|
-
else:
|
|
264
|
-
operating_system = regscale_models.AssetOperatingSystem.Linux
|
|
265
|
-
asset_type = regscale_models.AssetType.VM
|
|
266
|
-
asset_category = regscale_models.AssetCategory.Hardware
|
|
267
|
-
component_type = regscale_models.ComponentType.Hardware
|
|
268
|
-
component_names = [EC_INSTANCES]
|
|
375
|
+
# Determine asset type and OS
|
|
376
|
+
operating_system, asset_type, asset_category, component_type, component_names = self._determine_ec2_asset_type(
|
|
377
|
+
image_name, instance.get("Platform")
|
|
378
|
+
)
|
|
269
379
|
|
|
270
380
|
os_version = image_info.get("Description", "")
|
|
271
381
|
|
|
@@ -280,29 +390,21 @@ class AWSInventoryIntegration(ScannerIntegration):
|
|
|
280
390
|
# Create description
|
|
281
391
|
description = f"{instance_name} - {instance.get('PlatformDetails', 'Linux')} instance running on {instance.get('InstanceType', '')} with {cpu_count} vCPUs and {ram}GB RAM"
|
|
282
392
|
|
|
283
|
-
# Build notes
|
|
284
|
-
notes =
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
vCPUs: {cpu_count}
|
|
292
|
-
RAM: {ram}GB
|
|
293
|
-
State: {instance.get('State')}
|
|
294
|
-
Platform Details: {instance.get('PlatformDetails', 'Linux')}
|
|
295
|
-
Private IP: {instance.get('PrivateIpAddress', 'N/A')}
|
|
296
|
-
Public IP: {instance.get('PublicIpAddress', 'N/A')}
|
|
297
|
-
VPC ID: {instance.get('VpcId', 'N/A')}
|
|
298
|
-
Subnet ID: {instance.get('SubnetId', 'N/A')}"""
|
|
393
|
+
# Build notes
|
|
394
|
+
notes = self._build_ec2_notes(description, instance, image_info, cpu_count, ram)
|
|
395
|
+
|
|
396
|
+
# Build full ARN for EC2 instance: arn:aws:ec2:region:account-id:instance/instance-id
|
|
397
|
+
instance_id = instance.get("InstanceId", "")
|
|
398
|
+
region = instance.get("Region", "us-east-1")
|
|
399
|
+
account_id = instance.get("OwnerId", "")
|
|
400
|
+
instance_arn = f"arn:aws:ec2:{region}:{account_id}:instance/{instance_id}"
|
|
299
401
|
|
|
300
402
|
# Create URI for AWS Console link
|
|
301
|
-
uri = f"https://console.aws.amazon.com/ec2/v2/home?region={
|
|
403
|
+
uri = f"https://console.aws.amazon.com/ec2/v2/home?region={region}#InstanceDetails:instanceId={instance_id}"
|
|
302
404
|
|
|
303
405
|
return IntegrationAsset(
|
|
304
406
|
name=name,
|
|
305
|
-
identifier=
|
|
407
|
+
identifier=instance_arn,
|
|
306
408
|
asset_type=asset_type,
|
|
307
409
|
asset_category=asset_category,
|
|
308
410
|
component_type=component_type,
|
|
@@ -322,12 +424,12 @@ Subnet ID: {instance.get('SubnetId', 'N/A')}"""
|
|
|
322
424
|
ram=ram,
|
|
323
425
|
operating_system=operating_system,
|
|
324
426
|
os_version=os_version,
|
|
325
|
-
location=
|
|
427
|
+
location=region,
|
|
326
428
|
notes=notes,
|
|
327
429
|
model=instance.get("InstanceType"),
|
|
328
430
|
manufacturer="AWS",
|
|
329
431
|
is_public_facing=is_public_facing,
|
|
330
|
-
aws_identifier=
|
|
432
|
+
aws_identifier=instance_arn, # Use full ARN for asset matching with findings
|
|
331
433
|
vlan_id=instance.get("SubnetId"),
|
|
332
434
|
uri=uri,
|
|
333
435
|
source_data=instance,
|
|
@@ -368,7 +470,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
368
470
|
return IntegrationAsset(
|
|
369
471
|
# Required fields
|
|
370
472
|
name=name,
|
|
371
|
-
identifier=str(function.get("
|
|
473
|
+
identifier=str(function.get("FunctionArn", "")),
|
|
372
474
|
asset_type=regscale_models.AssetType.Other,
|
|
373
475
|
asset_category=regscale_models.AssetCategory.Software,
|
|
374
476
|
component_type=regscale_models.ComponentType.Software,
|
|
@@ -447,11 +549,11 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
447
549
|
:rtype: IntegrationAsset
|
|
448
550
|
"""
|
|
449
551
|
name = bucket.get("Name", "")
|
|
450
|
-
|
|
552
|
+
arn = f"arn:aws:s3:::{bucket.get('Name')}"
|
|
451
553
|
return IntegrationAsset(
|
|
452
554
|
# Required fields
|
|
453
555
|
name=name,
|
|
454
|
-
identifier=
|
|
556
|
+
identifier=arn,
|
|
455
557
|
asset_type=regscale_models.AssetType.Other,
|
|
456
558
|
asset_category=regscale_models.AssetCategory.Hardware,
|
|
457
559
|
component_type=regscale_models.ComponentType.Hardware,
|
|
@@ -464,7 +566,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
464
566
|
location=bucket.get("Region"),
|
|
465
567
|
# Cloud identifiers
|
|
466
568
|
external_id=bucket.get("Name"),
|
|
467
|
-
aws_identifier=
|
|
569
|
+
aws_identifier=arn,
|
|
468
570
|
uri=f"https://{bucket.get('Name')}.s3.amazonaws.com",
|
|
469
571
|
# Additional metadata
|
|
470
572
|
manufacturer="AWS",
|
|
@@ -491,7 +593,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
491
593
|
return IntegrationAsset(
|
|
492
594
|
# Required fields
|
|
493
595
|
name=name,
|
|
494
|
-
identifier=str(db.get("
|
|
596
|
+
identifier=str(db.get("DBInstanceArn", "")),
|
|
495
597
|
asset_type=regscale_models.AssetType.VM,
|
|
496
598
|
asset_category=regscale_models.AssetCategory.Hardware,
|
|
497
599
|
component_type=regscale_models.ComponentType.Hardware,
|
|
@@ -539,7 +641,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
539
641
|
return IntegrationAsset(
|
|
540
642
|
# Required fields
|
|
541
643
|
name=name,
|
|
542
|
-
identifier=str(table.get("
|
|
644
|
+
identifier=str(table.get("TableArn", "")),
|
|
543
645
|
asset_type=regscale_models.AssetType.Other,
|
|
544
646
|
asset_category=regscale_models.AssetCategory.Software,
|
|
545
647
|
component_type=regscale_models.ComponentType.Software,
|
|
@@ -578,10 +680,16 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
578
680
|
if vpc.get("IsDefault"):
|
|
579
681
|
notes = "Default VPC\n" + notes
|
|
580
682
|
|
|
683
|
+
# Build full ARN for VPC: arn:aws:ec2:region:account-id:vpc/vpc-id
|
|
684
|
+
vpc_id = vpc.get("VpcId", "")
|
|
685
|
+
region = vpc.get("Region", "us-east-1")
|
|
686
|
+
account_id = vpc.get("OwnerId", "")
|
|
687
|
+
vpc_arn = f"arn:aws:ec2:{region}:{account_id}:vpc/{vpc_id}"
|
|
688
|
+
|
|
581
689
|
return IntegrationAsset(
|
|
582
690
|
# Required fields
|
|
583
691
|
name=name,
|
|
584
|
-
identifier=
|
|
692
|
+
identifier=vpc_arn,
|
|
585
693
|
asset_type=regscale_models.AssetType.NetworkRouter,
|
|
586
694
|
asset_category=regscale_models.AssetCategory.Hardware,
|
|
587
695
|
component_type=regscale_models.ComponentType.Hardware,
|
|
@@ -595,12 +703,12 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
595
703
|
if vpc.get("State") == "available"
|
|
596
704
|
else regscale_models.AssetStatus.Inactive
|
|
597
705
|
),
|
|
598
|
-
location=
|
|
706
|
+
location=region,
|
|
599
707
|
# Network information
|
|
600
|
-
vlan_id=
|
|
708
|
+
vlan_id=vpc_id,
|
|
601
709
|
# Cloud identifiers
|
|
602
|
-
external_id=
|
|
603
|
-
aws_identifier=
|
|
710
|
+
external_id=vpc_id,
|
|
711
|
+
aws_identifier=vpc_arn, # Use full ARN for asset matching with findings
|
|
604
712
|
# Additional metadata
|
|
605
713
|
manufacturer="AWS",
|
|
606
714
|
notes=f"CIDR: {vpc.get('CidrBlock')}",
|
|
@@ -623,7 +731,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
623
731
|
return IntegrationAsset(
|
|
624
732
|
# Required fields
|
|
625
733
|
name=name,
|
|
626
|
-
identifier=
|
|
734
|
+
identifier=lb.get("LoadBalancerArn"),
|
|
627
735
|
asset_type=regscale_models.AssetType.NetworkRouter,
|
|
628
736
|
asset_category=regscale_models.AssetCategory.Hardware,
|
|
629
737
|
component_type=regscale_models.ComponentType.Hardware,
|
|
@@ -673,7 +781,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
673
781
|
return IntegrationAsset(
|
|
674
782
|
# Required fields
|
|
675
783
|
name=name,
|
|
676
|
-
identifier=str(repo.get("
|
|
784
|
+
identifier=str(repo.get("RepositoryArn", "")),
|
|
677
785
|
asset_type=regscale_models.AssetType.Other,
|
|
678
786
|
asset_category=regscale_models.AssetCategory.Software,
|
|
679
787
|
component_type=regscale_models.ComponentType.Software,
|
|
@@ -693,6 +801,94 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
693
801
|
source_data=repo,
|
|
694
802
|
)
|
|
695
803
|
|
|
804
|
+
def _validate_aws_credentials(
|
|
805
|
+
self,
|
|
806
|
+
profile: Optional[str],
|
|
807
|
+
aws_secret_key_id: Optional[str],
|
|
808
|
+
aws_secret_access_key: Optional[str],
|
|
809
|
+
region: Optional[str],
|
|
810
|
+
) -> None:
|
|
811
|
+
"""
|
|
812
|
+
Validate AWS credentials and region are provided.
|
|
813
|
+
|
|
814
|
+
:param profile: AWS profile name
|
|
815
|
+
:param aws_secret_key_id: AWS access key ID
|
|
816
|
+
:param aws_secret_access_key: AWS secret access key
|
|
817
|
+
:param region: AWS region
|
|
818
|
+
:raises ValueError: If credentials are not provided
|
|
819
|
+
"""
|
|
820
|
+
if not profile and (not aws_secret_key_id or not aws_secret_access_key):
|
|
821
|
+
raise ValueError(
|
|
822
|
+
"AWS Profile or Access Credentials are required.\nPlease provide --profile or set environment "
|
|
823
|
+
"variables (AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) or pass as arguments."
|
|
824
|
+
)
|
|
825
|
+
if not region:
|
|
826
|
+
logger.warning("AWS region not provided. Defaulting to 'us-east-1'.")
|
|
827
|
+
|
|
828
|
+
def _get_severity_config(self) -> Optional[str]:
|
|
829
|
+
"""
|
|
830
|
+
Get minimum severity from config.
|
|
831
|
+
|
|
832
|
+
:return: Minimum severity or None
|
|
833
|
+
:rtype: Optional[str]
|
|
834
|
+
"""
|
|
835
|
+
try:
|
|
836
|
+
minimum_severity = self.app.config.get("issues", {}).get("amazon", {}).get("minimumSeverity")
|
|
837
|
+
if minimum_severity:
|
|
838
|
+
logger.info(f"Using minimumSeverity from config: {minimum_severity}")
|
|
839
|
+
return minimum_severity
|
|
840
|
+
except (KeyError, AttributeError):
|
|
841
|
+
logger.debug("No minimumSeverity configured, fetching all findings")
|
|
842
|
+
return None
|
|
843
|
+
|
|
844
|
+
def _get_posture_management_config(self) -> bool:
|
|
845
|
+
"""
|
|
846
|
+
Get posture management only setting from config.
|
|
847
|
+
|
|
848
|
+
:return: Posture management only setting (defaults to False)
|
|
849
|
+
:rtype: bool
|
|
850
|
+
"""
|
|
851
|
+
try:
|
|
852
|
+
posture_management_only = (
|
|
853
|
+
self.app.config.get("issues", {}).get("amazon", {}).get("postureManagementOnly", False)
|
|
854
|
+
)
|
|
855
|
+
if posture_management_only:
|
|
856
|
+
logger.info("Fetching posture management findings only (security standards compliance checks)")
|
|
857
|
+
else:
|
|
858
|
+
logger.info("Fetching all Security Hub findings (CVEs from Inspector + compliance checks)")
|
|
859
|
+
return posture_management_only
|
|
860
|
+
except (KeyError, AttributeError):
|
|
861
|
+
logger.debug("No postureManagementOnly configured, defaulting to False (includes CVEs)")
|
|
862
|
+
return False
|
|
863
|
+
|
|
864
|
+
def _create_aws_session(
|
|
865
|
+
self,
|
|
866
|
+
aws_secret_key_id: Optional[str],
|
|
867
|
+
aws_secret_access_key: Optional[str],
|
|
868
|
+
region: str,
|
|
869
|
+
profile: Optional[str],
|
|
870
|
+
**kwargs,
|
|
871
|
+
):
|
|
872
|
+
"""
|
|
873
|
+
Create AWS session with profile or explicit credentials.
|
|
874
|
+
|
|
875
|
+
:param aws_secret_key_id: AWS access key ID
|
|
876
|
+
:param aws_secret_access_key: AWS secret access key
|
|
877
|
+
:param region: AWS region
|
|
878
|
+
:param profile: AWS profile name
|
|
879
|
+
:return: Boto3 session
|
|
880
|
+
"""
|
|
881
|
+
import boto3
|
|
882
|
+
|
|
883
|
+
if aws_secret_key_id or aws_secret_access_key:
|
|
884
|
+
return boto3.Session(
|
|
885
|
+
region_name=region,
|
|
886
|
+
aws_access_key_id=aws_secret_key_id,
|
|
887
|
+
aws_secret_access_key=aws_secret_access_key,
|
|
888
|
+
aws_session_token=kwargs.get("aws_session_token"),
|
|
889
|
+
)
|
|
890
|
+
return boto3.Session(profile_name=profile, region_name=region)
|
|
891
|
+
|
|
696
892
|
def fetch_findings(self, *args, **kwargs) -> Iterator[IntegrationFinding]:
|
|
697
893
|
"""
|
|
698
894
|
Fetch security findings from AWS Security Hub.
|
|
@@ -700,38 +896,35 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
700
896
|
|
|
701
897
|
:yield: Iterator[IntegrationFinding]
|
|
702
898
|
"""
|
|
703
|
-
import boto3
|
|
704
|
-
|
|
705
|
-
from regscale.integrations.commercial.amazon.common import fetch_aws_findings
|
|
706
|
-
|
|
707
899
|
aws_secret_key_id = kwargs.get("aws_access_key_id") or os.getenv("AWS_ACCESS_KEY_ID")
|
|
708
900
|
aws_secret_access_key = kwargs.get("aws_secret_access_key") or os.getenv("AWS_SECRET_ACCESS_KEY")
|
|
709
901
|
region = kwargs.get("region") or os.getenv("AWS_REGION", "us-east-1")
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
|
|
722
|
-
|
|
902
|
+
profile = kwargs.get("profile")
|
|
903
|
+
|
|
904
|
+
self._validate_aws_credentials(profile, aws_secret_key_id, aws_secret_access_key, region)
|
|
905
|
+
|
|
906
|
+
minimum_severity = self._get_severity_config()
|
|
907
|
+
posture_management_only = self._get_posture_management_config()
|
|
908
|
+
|
|
909
|
+
# Create a copy of kwargs excluding parameters we're passing explicitly
|
|
910
|
+
session_kwargs = {
|
|
911
|
+
k: v
|
|
912
|
+
for k, v in kwargs.items()
|
|
913
|
+
if k not in ("aws_access_key_id", "aws_secret_access_key", "region", "profile")
|
|
914
|
+
}
|
|
915
|
+
session = self._create_aws_session(aws_secret_key_id, aws_secret_access_key, region, profile, **session_kwargs)
|
|
723
916
|
client = session.client("securityhub")
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
917
|
+
|
|
918
|
+
aws_findings = fetch_aws_findings(
|
|
919
|
+
aws_client=client, minimum_severity=minimum_severity, posture_management_only=posture_management_only
|
|
920
|
+
)
|
|
921
|
+
|
|
727
922
|
self.discovered_assets.clear()
|
|
728
923
|
self.processed_asset_identifiers.clear()
|
|
729
924
|
|
|
730
|
-
self.num_findings_to_process = len(aws_findings)
|
|
731
925
|
for finding in aws_findings:
|
|
732
926
|
yield from iter(self.parse_finding(finding))
|
|
733
927
|
|
|
734
|
-
# Log discovered assets count
|
|
735
928
|
if self.discovered_assets:
|
|
736
929
|
logger.info(f"Discovered {len(self.discovered_assets)} assets from Security Hub findings")
|
|
737
930
|
|
|
@@ -754,34 +947,108 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
754
947
|
:return: Tuple of (findings_processed, assets_processed)
|
|
755
948
|
:rtype: tuple[int, int]
|
|
756
949
|
"""
|
|
950
|
+
from regscale.core.app.utils.app_utils import create_progress_object
|
|
951
|
+
|
|
757
952
|
logger.info("Starting AWS Security Hub findings and assets sync...")
|
|
758
953
|
|
|
759
|
-
#
|
|
760
|
-
|
|
954
|
+
# Create progress bar context for the entire operation
|
|
955
|
+
with create_progress_object() as progress:
|
|
956
|
+
# Store progress object for use by nested methods
|
|
957
|
+
self.finding_progress = progress
|
|
761
958
|
|
|
762
|
-
|
|
763
|
-
|
|
764
|
-
self.processed_asset_identifiers.clear()
|
|
959
|
+
# First, fetch findings to discover assets (but don't sync findings yet)
|
|
960
|
+
logger.info("Discovering assets from AWS Security Hub findings...")
|
|
765
961
|
|
|
766
|
-
|
|
767
|
-
|
|
962
|
+
# Reset discovered assets for this run
|
|
963
|
+
self.discovered_assets.clear()
|
|
964
|
+
self.processed_asset_identifiers.clear()
|
|
768
965
|
|
|
769
|
-
|
|
770
|
-
|
|
771
|
-
logger.info(f"Creating {len(self.discovered_assets)} assets discovered from findings...")
|
|
772
|
-
self.num_assets_to_process = len(self.discovered_assets)
|
|
773
|
-
assets_processed = self.update_regscale_assets(self.get_discovered_assets())
|
|
774
|
-
logger.info(f"Successfully created {assets_processed} assets")
|
|
775
|
-
else:
|
|
776
|
-
logger.info("No assets discovered from findings")
|
|
777
|
-
assets_processed = 0
|
|
966
|
+
# Fetch findings to discover assets - store them to avoid re-fetching
|
|
967
|
+
findings_list = list(self.fetch_findings(**kwargs))
|
|
778
968
|
|
|
779
|
-
|
|
780
|
-
|
|
781
|
-
|
|
969
|
+
# Sync the discovered assets first
|
|
970
|
+
if self.discovered_assets:
|
|
971
|
+
logger.info(f"Creating {len(self.discovered_assets)} assets discovered from findings...")
|
|
972
|
+
self.num_assets_to_process = len(self.discovered_assets)
|
|
973
|
+
assets_processed = self.update_regscale_assets(self.get_discovered_assets())
|
|
974
|
+
logger.info(f"Successfully created {assets_processed} assets")
|
|
975
|
+
else:
|
|
976
|
+
logger.info("No assets discovered from findings")
|
|
977
|
+
assets_processed = 0
|
|
978
|
+
|
|
979
|
+
# Now process the findings we already fetched (avoid double-fetching)
|
|
980
|
+
logger.info("Now syncing findings with created assets...")
|
|
981
|
+
findings_processed = self.update_regscale_findings(findings_list)
|
|
982
|
+
|
|
983
|
+
# Log completion summary
|
|
984
|
+
logger.info(
|
|
985
|
+
f"AWS Security Hub sync completed successfully: {findings_processed} findings processed, {assets_processed} assets created"
|
|
986
|
+
)
|
|
782
987
|
|
|
783
988
|
return findings_processed, assets_processed
|
|
784
989
|
|
|
990
|
+
@classmethod
|
|
991
|
+
def sync_findings(cls, plan_id: int, **kwargs) -> int:
|
|
992
|
+
"""
|
|
993
|
+
Sync AWS Security Hub findings to RegScale.
|
|
994
|
+
|
|
995
|
+
:param int plan_id: The RegScale plan ID
|
|
996
|
+
:param kwargs: Additional keyword arguments including:
|
|
997
|
+
- region (str): AWS region
|
|
998
|
+
- profile (Optional[str]): AWS profile name
|
|
999
|
+
- aws_access_key_id (Optional[str]): AWS access key ID
|
|
1000
|
+
- aws_secret_access_key (Optional[str]): AWS secret access key
|
|
1001
|
+
- aws_session_token (Optional[str]): AWS session token
|
|
1002
|
+
- account_id (Optional[str]): AWS account ID to filter by
|
|
1003
|
+
- tags (Optional[Dict[str, str]]): Tags to filter by
|
|
1004
|
+
- import_all_findings (bool): Import all findings even without matching assets
|
|
1005
|
+
:return: Number of findings processed
|
|
1006
|
+
:rtype: int
|
|
1007
|
+
"""
|
|
1008
|
+
# Extract parameters from kwargs
|
|
1009
|
+
region = kwargs.get("region", "us-east-1")
|
|
1010
|
+
profile = kwargs.get("profile")
|
|
1011
|
+
aws_access_key_id = kwargs.get("aws_access_key_id")
|
|
1012
|
+
aws_secret_access_key = kwargs.get("aws_secret_access_key")
|
|
1013
|
+
aws_session_token = kwargs.get("aws_session_token")
|
|
1014
|
+
account_id = kwargs.get("account_id")
|
|
1015
|
+
tags = kwargs.get("tags")
|
|
1016
|
+
import_all_findings = kwargs.get("import_all_findings", False)
|
|
1017
|
+
|
|
1018
|
+
instance = cls(plan_id=plan_id, import_all_findings=import_all_findings)
|
|
1019
|
+
instance.authenticate(
|
|
1020
|
+
aws_access_key_id=aws_access_key_id,
|
|
1021
|
+
aws_secret_access_key=aws_secret_access_key,
|
|
1022
|
+
region=region,
|
|
1023
|
+
aws_session_token=aws_session_token,
|
|
1024
|
+
profile=profile,
|
|
1025
|
+
account_id=account_id,
|
|
1026
|
+
tags=tags,
|
|
1027
|
+
)
|
|
1028
|
+
|
|
1029
|
+
# Load assets first
|
|
1030
|
+
logger.info("Loading asset map from RegScale...")
|
|
1031
|
+
instance.asset_map_by_identifier.update(instance.get_asset_map())
|
|
1032
|
+
|
|
1033
|
+
# Fetch and sync findings
|
|
1034
|
+
logger.info("Fetching and syncing AWS Security Hub findings...")
|
|
1035
|
+
findings = list(
|
|
1036
|
+
instance.fetch_findings(
|
|
1037
|
+
profile=profile,
|
|
1038
|
+
aws_access_key_id=aws_access_key_id,
|
|
1039
|
+
aws_secret_access_key=aws_secret_access_key,
|
|
1040
|
+
aws_session_token=aws_session_token,
|
|
1041
|
+
region=region,
|
|
1042
|
+
account_id=account_id,
|
|
1043
|
+
tags=tags,
|
|
1044
|
+
)
|
|
1045
|
+
)
|
|
1046
|
+
|
|
1047
|
+
# Process findings - progress bar will be created inside update_regscale_findings if needed
|
|
1048
|
+
findings_processed = instance.update_regscale_findings(findings)
|
|
1049
|
+
|
|
1050
|
+
return findings_processed
|
|
1051
|
+
|
|
785
1052
|
def get_configured_issue_status(self) -> IssueStatus:
|
|
786
1053
|
"""
|
|
787
1054
|
Get the configured issue status from the configuration.
|
|
@@ -839,6 +1106,70 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
839
1106
|
|
|
840
1107
|
return should_process
|
|
841
1108
|
|
|
1109
|
+
def is_service_enabled_for_resource(self, resource_type: str) -> bool:
|
|
1110
|
+
"""
|
|
1111
|
+
Check if the AWS service for a given resource type is enabled in config.
|
|
1112
|
+
|
|
1113
|
+
:param str resource_type: AWS resource type (e.g., 'AwsEc2Instance', 'AwsS3Bucket')
|
|
1114
|
+
:return: True if the service is enabled or config not found, False otherwise
|
|
1115
|
+
:rtype: bool
|
|
1116
|
+
"""
|
|
1117
|
+
# Map resource types to service configuration keys
|
|
1118
|
+
resource_to_service_map = {
|
|
1119
|
+
"AwsEc2Instance": ("compute", "ec2"),
|
|
1120
|
+
"AwsEc2SecurityGroup": ("security", "securityhub"),
|
|
1121
|
+
"AwsEc2Subnet": ("networking", "vpc"),
|
|
1122
|
+
"AwsS3Bucket": ("storage", "s3"),
|
|
1123
|
+
"AwsRdsDbInstance": ("database", "rds"),
|
|
1124
|
+
"AwsLambdaFunction": ("compute", "lambda"),
|
|
1125
|
+
"AwsEcrRepository": ("containers", "ecr"),
|
|
1126
|
+
"AwsIamUser": ("security", "iam"),
|
|
1127
|
+
"AwsIamRole": ("security", "iam"),
|
|
1128
|
+
"AwsDynamoDbTable": ("database", "dynamodb"),
|
|
1129
|
+
"AwsKmsKey": ("security", "kms"),
|
|
1130
|
+
"AwsSecretsManagerSecret": ("security", "secrets_manager"),
|
|
1131
|
+
"AwsCloudTrailTrail": ("security", "cloudtrail"),
|
|
1132
|
+
"AwsConfigConfigurationRecorder": ("security", "config"),
|
|
1133
|
+
"AwsGuardDutyDetector": ("security", "guardduty"),
|
|
1134
|
+
"AwsInspector2": ("security", "inspector"),
|
|
1135
|
+
"AwsAuditManagerAssessment": ("security", "audit_manager"),
|
|
1136
|
+
}
|
|
1137
|
+
|
|
1138
|
+
try:
|
|
1139
|
+
# Get the service category and service name for this resource type
|
|
1140
|
+
service_info = resource_to_service_map.get(resource_type)
|
|
1141
|
+
if not service_info:
|
|
1142
|
+
# If resource type not in map, allow it by default (don't filter unknowns)
|
|
1143
|
+
logger.debug(f"Resource type '{resource_type}' not in service map, allowing by default")
|
|
1144
|
+
return True
|
|
1145
|
+
|
|
1146
|
+
category, service_name = service_info
|
|
1147
|
+
|
|
1148
|
+
# Check if the service is enabled in config
|
|
1149
|
+
enabled_services = self.app.config.get("aws", {}).get("inventory", {}).get("enabled_services", {})
|
|
1150
|
+
|
|
1151
|
+
# Check if category is enabled
|
|
1152
|
+
category_config = enabled_services.get(category, {})
|
|
1153
|
+
if not category_config.get("enabled", True):
|
|
1154
|
+
logger.debug(f"Service category '{category}' is disabled, filtering resource type '{resource_type}'")
|
|
1155
|
+
return False
|
|
1156
|
+
|
|
1157
|
+
# Check if specific service is enabled
|
|
1158
|
+
services = category_config.get("services", {})
|
|
1159
|
+
is_enabled = services.get(service_name, True)
|
|
1160
|
+
|
|
1161
|
+
if not is_enabled:
|
|
1162
|
+
logger.debug(
|
|
1163
|
+
f"Service '{service_name}' in category '{category}' is disabled, filtering resource type '{resource_type}'"
|
|
1164
|
+
)
|
|
1165
|
+
|
|
1166
|
+
return is_enabled
|
|
1167
|
+
|
|
1168
|
+
except (KeyError, AttributeError) as e:
|
|
1169
|
+
# If config not found or malformed, allow by default (don't filter)
|
|
1170
|
+
logger.debug(f"Could not check service enablement for '{resource_type}': {e}. Allowing by default.")
|
|
1171
|
+
return True
|
|
1172
|
+
|
|
842
1173
|
@staticmethod
|
|
843
1174
|
def get_baseline(resource: dict) -> str:
|
|
844
1175
|
"""
|
|
@@ -876,6 +1207,524 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
876
1207
|
except IndexError:
|
|
877
1208
|
return None
|
|
878
1209
|
|
|
1210
|
+
def _discover_asset_from_resource(self, resource: dict, finding: dict) -> None:
|
|
1211
|
+
"""
|
|
1212
|
+
Discover and track asset from finding resource.
|
|
1213
|
+
|
|
1214
|
+
:param dict resource: AWS Security Hub resource
|
|
1215
|
+
:param dict finding: AWS Security Hub finding
|
|
1216
|
+
"""
|
|
1217
|
+
asset = self.parse_resource_to_asset(resource, finding)
|
|
1218
|
+
if asset and asset.identifier not in self.processed_asset_identifiers:
|
|
1219
|
+
self.discovered_assets.append(asset)
|
|
1220
|
+
self.processed_asset_identifiers.add(asset.identifier)
|
|
1221
|
+
logger.debug(f"Discovered asset from finding: {asset.name} ({asset.identifier})")
|
|
1222
|
+
|
|
1223
|
+
def _get_friendly_severity(self, severity: str) -> str:
|
|
1224
|
+
"""
|
|
1225
|
+
Convert severity level to friendly name.
|
|
1226
|
+
|
|
1227
|
+
:param str severity: Raw severity level
|
|
1228
|
+
:return: Friendly severity name (low, moderate, high)
|
|
1229
|
+
:rtype: str
|
|
1230
|
+
"""
|
|
1231
|
+
if severity in ["CRITICAL", "HIGH"]:
|
|
1232
|
+
return "high"
|
|
1233
|
+
elif severity in ["MEDIUM", "MODERATE"]:
|
|
1234
|
+
return "moderate"
|
|
1235
|
+
return "low"
|
|
1236
|
+
|
|
1237
|
+
def _get_due_date_for_finding(self, finding: dict, friendly_sev: str) -> str:
|
|
1238
|
+
"""
|
|
1239
|
+
Calculate due date for finding based on severity.
|
|
1240
|
+
|
|
1241
|
+
:param dict finding: AWS Security Hub finding
|
|
1242
|
+
:param str friendly_sev: Friendly severity name
|
|
1243
|
+
:return: Due date string
|
|
1244
|
+
:rtype: str
|
|
1245
|
+
"""
|
|
1246
|
+
try:
|
|
1247
|
+
days = self.app.config["issues"]["amazon"][friendly_sev]
|
|
1248
|
+
except KeyError:
|
|
1249
|
+
logger.warning("Invalid severity level, defaulting to 30 day due date")
|
|
1250
|
+
days = 30
|
|
1251
|
+
return datetime_str(get_due_date(date_str(finding["CreatedAt"]), days))
|
|
1252
|
+
|
|
1253
|
+
def _construct_plugin_id(self, finding: dict, resource: dict = None) -> tuple[str, str]:
|
|
1254
|
+
"""
|
|
1255
|
+
Construct plugin name and ID from finding.
|
|
1256
|
+
|
|
1257
|
+
:param dict finding: AWS Security Hub finding
|
|
1258
|
+
:param dict resource: Optional resource dict for per-resource plugin ID
|
|
1259
|
+
:return: Tuple of (plugin_name, plugin_id)
|
|
1260
|
+
:rtype: tuple[str, str]
|
|
1261
|
+
"""
|
|
1262
|
+
plugin_name = next(iter(finding.get("Types", [])), "Unknown")
|
|
1263
|
+
finding_id = finding.get("Id", "")
|
|
1264
|
+
|
|
1265
|
+
# Extract UUID from ARN or full ID
|
|
1266
|
+
if "/" in finding_id:
|
|
1267
|
+
finding_uuid = finding_id.split("/")[-1]
|
|
1268
|
+
else:
|
|
1269
|
+
finding_uuid = finding_id.split(":")[-1]
|
|
1270
|
+
|
|
1271
|
+
# Sanitize plugin name for ID
|
|
1272
|
+
sanitized_name = plugin_name.replace(" ", "_").replace("/", "_").replace(":", "_")
|
|
1273
|
+
|
|
1274
|
+
# If we have multiple resources for this finding, include resource identifier
|
|
1275
|
+
# This ensures proper deduplication when a finding affects multiple resources
|
|
1276
|
+
if resource and len(finding.get("Resources", [])) > 1:
|
|
1277
|
+
resource_id = resource.get("Id", "")
|
|
1278
|
+
# Extract just the resource identifier part from ARN
|
|
1279
|
+
if "/" in resource_id:
|
|
1280
|
+
resource_suffix = resource_id.split("/")[-1]
|
|
1281
|
+
elif ":" in resource_id:
|
|
1282
|
+
resource_suffix = resource_id.split(":")[-1]
|
|
1283
|
+
else:
|
|
1284
|
+
resource_suffix = resource_id
|
|
1285
|
+
|
|
1286
|
+
# Sanitize and append resource suffix
|
|
1287
|
+
resource_suffix = resource_suffix.replace(" ", "_").replace("/", "_").replace(":", "_")
|
|
1288
|
+
plugin_id = f"{sanitized_name}_{finding_uuid}_{resource_suffix}"
|
|
1289
|
+
else:
|
|
1290
|
+
plugin_id = f"{sanitized_name}_{finding_uuid}"
|
|
1291
|
+
|
|
1292
|
+
return plugin_name, plugin_id
|
|
1293
|
+
|
|
1294
|
+
def _extract_cvss_scores(self, cvss_list: list) -> list:
|
|
1295
|
+
"""
|
|
1296
|
+
Extract CVSS scores from vulnerability data.
|
|
1297
|
+
|
|
1298
|
+
:param list cvss_list: List of CVSS data
|
|
1299
|
+
:return: List of formatted CVSS score strings
|
|
1300
|
+
:rtype: list
|
|
1301
|
+
"""
|
|
1302
|
+
cvss_scores = []
|
|
1303
|
+
for cvss in cvss_list:
|
|
1304
|
+
cvss_version = cvss.get("Version", "")
|
|
1305
|
+
cvss_score = cvss.get("BaseScore", 0)
|
|
1306
|
+
cvss_vector = cvss.get("BaseVector", "")
|
|
1307
|
+
if cvss_score:
|
|
1308
|
+
score_str = f"CVSS{cvss_version}: {cvss_score}"
|
|
1309
|
+
if cvss_vector:
|
|
1310
|
+
score_str += f" ({cvss_vector})"
|
|
1311
|
+
cvss_scores.append(score_str)
|
|
1312
|
+
return cvss_scores
|
|
1313
|
+
|
|
1314
|
+
def _extract_vendor_info(self, vendor: dict) -> str:
|
|
1315
|
+
"""
|
|
1316
|
+
Extract vendor information from vulnerability data.
|
|
1317
|
+
|
|
1318
|
+
:param dict vendor: Vendor data
|
|
1319
|
+
:return: Formatted vendor info string
|
|
1320
|
+
:rtype: str
|
|
1321
|
+
"""
|
|
1322
|
+
vendor_name = vendor.get("Name", "")
|
|
1323
|
+
vendor_url = vendor.get("Url", "")
|
|
1324
|
+
if not vendor_name:
|
|
1325
|
+
return ""
|
|
1326
|
+
return f"{vendor_name}: {vendor_url}" if vendor_url else vendor_name
|
|
1327
|
+
|
|
1328
|
+
def _build_package_version_string(self, pkg: dict) -> str:
|
|
1329
|
+
"""
|
|
1330
|
+
Build version string from package data.
|
|
1331
|
+
|
|
1332
|
+
:param dict pkg: Package data
|
|
1333
|
+
:return: Formatted version string
|
|
1334
|
+
:rtype: str
|
|
1335
|
+
"""
|
|
1336
|
+
pkg_version = pkg.get("Version", "")
|
|
1337
|
+
if not pkg_version:
|
|
1338
|
+
return ""
|
|
1339
|
+
|
|
1340
|
+
version_str = pkg_version
|
|
1341
|
+
if pkg_epoch := pkg.get("Epoch", ""):
|
|
1342
|
+
version_str = f"{pkg_epoch}:{version_str}"
|
|
1343
|
+
if pkg_release := pkg.get("Release", ""):
|
|
1344
|
+
version_str = f"{version_str}-{pkg_release}"
|
|
1345
|
+
if pkg_arch := pkg.get("Architecture", ""):
|
|
1346
|
+
version_str = f"{version_str}.{pkg_arch}"
|
|
1347
|
+
return version_str
|
|
1348
|
+
|
|
1349
|
+
def _extract_package_details(self, pkg: dict) -> str:
|
|
1350
|
+
"""
|
|
1351
|
+
Extract package details from vulnerable package data.
|
|
1352
|
+
|
|
1353
|
+
:param dict pkg: Package data
|
|
1354
|
+
:return: Formatted package details string
|
|
1355
|
+
:rtype: str
|
|
1356
|
+
"""
|
|
1357
|
+
pkg_details = []
|
|
1358
|
+
|
|
1359
|
+
if pkg_name := pkg.get("Name", ""):
|
|
1360
|
+
pkg_details.append(f"Package: {pkg_name}")
|
|
1361
|
+
|
|
1362
|
+
if version_str := self._build_package_version_string(pkg):
|
|
1363
|
+
pkg_details.append(f"Installed Version: {version_str}")
|
|
1364
|
+
|
|
1365
|
+
if fixed_version := pkg.get("FixedInVersion", ""):
|
|
1366
|
+
pkg_details.append(f"Fixed In: {fixed_version}")
|
|
1367
|
+
|
|
1368
|
+
return " | ".join(pkg_details) if pkg_details else ""
|
|
1369
|
+
|
|
1370
|
+
def _process_vulnerability(self, vuln: dict, cve_data: dict) -> None:
|
|
1371
|
+
"""
|
|
1372
|
+
Process a single vulnerability and update CVE data dictionary.
|
|
1373
|
+
|
|
1374
|
+
:param dict vuln: Vulnerability data
|
|
1375
|
+
:param dict cve_data: CVE data dictionary to update
|
|
1376
|
+
"""
|
|
1377
|
+
if cve_id := vuln.get("Id", ""):
|
|
1378
|
+
cve_data["cve_ids"].append(cve_id)
|
|
1379
|
+
|
|
1380
|
+
if cvss_list := vuln.get("Cvss", []):
|
|
1381
|
+
cve_data["cvss_scores"].extend(self._extract_cvss_scores(cvss_list))
|
|
1382
|
+
|
|
1383
|
+
if vendor := vuln.get("Vendor", {}):
|
|
1384
|
+
if vendor_info := self._extract_vendor_info(vendor):
|
|
1385
|
+
cve_data["vendor_info"].append(vendor_info)
|
|
1386
|
+
|
|
1387
|
+
if ref_urls := vuln.get("ReferenceUrls", []):
|
|
1388
|
+
cve_data["reference_urls"].extend(ref_urls)
|
|
1389
|
+
|
|
1390
|
+
for pkg in vuln.get("VulnerablePackages", []):
|
|
1391
|
+
if pkg_details := self._extract_package_details(pkg):
|
|
1392
|
+
cve_data["vulnerability_details"].append(pkg_details)
|
|
1393
|
+
|
|
1394
|
+
def _process_vulnerability_enhanced(self, vuln: dict, cve_data: dict) -> None:
|
|
1395
|
+
"""
|
|
1396
|
+
Process a single vulnerability with enhanced structured data extraction.
|
|
1397
|
+
|
|
1398
|
+
:param dict vuln: Vulnerability data
|
|
1399
|
+
:param dict cve_data: CVE data dictionary to update with structured fields
|
|
1400
|
+
"""
|
|
1401
|
+
self._extract_cve_id(vuln, cve_data)
|
|
1402
|
+
self._extract_cvss_scores(vuln, cve_data)
|
|
1403
|
+
self._extract_vendor_info_from_vuln(vuln, cve_data)
|
|
1404
|
+
self._extract_reference_urls(vuln, cve_data)
|
|
1405
|
+
self._extract_exploit_availability(vuln, cve_data)
|
|
1406
|
+
self._extract_package_details_from_vuln(vuln, cve_data)
|
|
1407
|
+
|
|
1408
|
+
def _extract_cve_id(self, vuln: dict, cve_data: dict) -> None:
|
|
1409
|
+
"""
|
|
1410
|
+
Extract CVE ID from vulnerability data.
|
|
1411
|
+
|
|
1412
|
+
:param dict vuln: Vulnerability data
|
|
1413
|
+
:param dict cve_data: CVE data dictionary to update
|
|
1414
|
+
:rtype: None
|
|
1415
|
+
"""
|
|
1416
|
+
if cve_id := vuln.get("Id", ""):
|
|
1417
|
+
cve_data["cve_ids"].append(cve_id)
|
|
1418
|
+
|
|
1419
|
+
def _extract_cvss_scores(self, vuln: dict, cve_data: dict) -> None:
|
|
1420
|
+
"""
|
|
1421
|
+
Extract and parse CVSS scores from vulnerability data.
|
|
1422
|
+
|
|
1423
|
+
:param dict vuln: Vulnerability data
|
|
1424
|
+
:param dict cve_data: CVE data dictionary to update
|
|
1425
|
+
:rtype: None
|
|
1426
|
+
"""
|
|
1427
|
+
cvss_list = vuln.get("Cvss", [])
|
|
1428
|
+
if not cvss_list:
|
|
1429
|
+
return
|
|
1430
|
+
|
|
1431
|
+
for cvss in cvss_list:
|
|
1432
|
+
self._process_single_cvss_score(cvss, cve_data)
|
|
1433
|
+
|
|
1434
|
+
def _process_single_cvss_score(self, cvss: dict, cve_data: dict) -> None:
|
|
1435
|
+
"""
|
|
1436
|
+
Process a single CVSS score entry.
|
|
1437
|
+
|
|
1438
|
+
:param dict cvss: CVSS score data
|
|
1439
|
+
:param dict cve_data: CVE data dictionary to update
|
|
1440
|
+
:rtype: None
|
|
1441
|
+
"""
|
|
1442
|
+
version = cvss.get("Version", "")
|
|
1443
|
+
score = cvss.get("BaseScore", 0)
|
|
1444
|
+
vector = cvss.get("BaseVector", "")
|
|
1445
|
+
|
|
1446
|
+
if not score:
|
|
1447
|
+
return
|
|
1448
|
+
|
|
1449
|
+
self._add_cvss_score_string(version, score, vector, cve_data)
|
|
1450
|
+
self._update_cvss_structured_data(version, score, vector, cve_data)
|
|
1451
|
+
|
|
1452
|
+
def _add_cvss_score_string(self, version: str, score: float, vector: str, cve_data: dict) -> None:
|
|
1453
|
+
"""
|
|
1454
|
+
Add formatted CVSS score string to CVE data.
|
|
1455
|
+
|
|
1456
|
+
:param str version: CVSS version
|
|
1457
|
+
:param float score: CVSS score
|
|
1458
|
+
:param str vector: CVSS vector
|
|
1459
|
+
:param dict cve_data: CVE data dictionary to update
|
|
1460
|
+
:rtype: None
|
|
1461
|
+
"""
|
|
1462
|
+
score_str = f"CVSS{version}: {score}"
|
|
1463
|
+
if vector:
|
|
1464
|
+
score_str += f" ({vector})"
|
|
1465
|
+
cve_data["cvss_scores"].append(score_str)
|
|
1466
|
+
|
|
1467
|
+
def _update_cvss_structured_data(self, version: str, score: float, vector: str, cve_data: dict) -> None:
|
|
1468
|
+
"""
|
|
1469
|
+
Update structured CVSS data fields based on version.
|
|
1470
|
+
|
|
1471
|
+
:param str version: CVSS version
|
|
1472
|
+
:param float score: CVSS score
|
|
1473
|
+
:param str vector: CVSS vector
|
|
1474
|
+
:param dict cve_data: CVE data dictionary to update
|
|
1475
|
+
:rtype: None
|
|
1476
|
+
"""
|
|
1477
|
+
if version in ("3.0", "3.1"):
|
|
1478
|
+
self._update_cvss_v3_data(score, vector, cve_data)
|
|
1479
|
+
elif version == "2.0":
|
|
1480
|
+
self._update_cvss_v2_data(score, vector, cve_data)
|
|
1481
|
+
|
|
1482
|
+
def _update_cvss_v3_data(self, score: float, vector: str, cve_data: dict) -> None:
|
|
1483
|
+
"""
|
|
1484
|
+
Update CVSS v3 data with highest score.
|
|
1485
|
+
|
|
1486
|
+
:param float score: CVSS score
|
|
1487
|
+
:param str vector: CVSS vector
|
|
1488
|
+
:param dict cve_data: CVE data dictionary to update
|
|
1489
|
+
:rtype: None
|
|
1490
|
+
"""
|
|
1491
|
+
if cve_data["cvss_v3_score"] is None or score > cve_data["cvss_v3_score"]:
|
|
1492
|
+
cve_data["cvss_v3_score"] = float(score)
|
|
1493
|
+
if vector:
|
|
1494
|
+
cve_data["cvss_v3_vector"] = vector
|
|
1495
|
+
|
|
1496
|
+
def _update_cvss_v2_data(self, score: float, vector: str, cve_data: dict) -> None:
|
|
1497
|
+
"""
|
|
1498
|
+
Update CVSS v2 data with highest score.
|
|
1499
|
+
|
|
1500
|
+
:param float score: CVSS score
|
|
1501
|
+
:param str vector: CVSS vector
|
|
1502
|
+
:param dict cve_data: CVE data dictionary to update
|
|
1503
|
+
:rtype: None
|
|
1504
|
+
"""
|
|
1505
|
+
if cve_data["cvss_v2_score"] is None or score > cve_data["cvss_v2_score"]:
|
|
1506
|
+
cve_data["cvss_v2_score"] = float(score)
|
|
1507
|
+
if vector:
|
|
1508
|
+
cve_data["cvss_v2_vector"] = vector
|
|
1509
|
+
|
|
1510
|
+
def _extract_vendor_info_from_vuln(self, vuln: dict, cve_data: dict) -> None:
|
|
1511
|
+
"""
|
|
1512
|
+
Extract vendor information from vulnerability data.
|
|
1513
|
+
|
|
1514
|
+
:param dict vuln: Vulnerability data
|
|
1515
|
+
:param dict cve_data: CVE data dictionary to update
|
|
1516
|
+
:rtype: None
|
|
1517
|
+
"""
|
|
1518
|
+
if vendor := vuln.get("Vendor", {}):
|
|
1519
|
+
if vendor_info := self._extract_vendor_info(vendor):
|
|
1520
|
+
cve_data["vendor_info"].append(vendor_info)
|
|
1521
|
+
|
|
1522
|
+
def _extract_reference_urls(self, vuln: dict, cve_data: dict) -> None:
|
|
1523
|
+
"""
|
|
1524
|
+
Extract reference URLs from vulnerability data.
|
|
1525
|
+
|
|
1526
|
+
:param dict vuln: Vulnerability data
|
|
1527
|
+
:param dict cve_data: CVE data dictionary to update
|
|
1528
|
+
:rtype: None
|
|
1529
|
+
"""
|
|
1530
|
+
if ref_urls := vuln.get("ReferenceUrls", []):
|
|
1531
|
+
cve_data["reference_urls"].extend(ref_urls)
|
|
1532
|
+
|
|
1533
|
+
def _extract_exploit_availability(self, vuln: dict, cve_data: dict) -> None:
|
|
1534
|
+
"""
|
|
1535
|
+
Check and set exploit availability flag.
|
|
1536
|
+
|
|
1537
|
+
:param dict vuln: Vulnerability data
|
|
1538
|
+
:param dict cve_data: CVE data dictionary to update
|
|
1539
|
+
:rtype: None
|
|
1540
|
+
"""
|
|
1541
|
+
if vuln.get("ExploitAvailable"):
|
|
1542
|
+
cve_data["exploit_available"] = True
|
|
1543
|
+
|
|
1544
|
+
def _extract_package_details_from_vuln(self, vuln: dict, cve_data: dict) -> None:
|
|
1545
|
+
"""
|
|
1546
|
+
Extract package details from vulnerability data.
|
|
1547
|
+
|
|
1548
|
+
:param dict vuln: Vulnerability data
|
|
1549
|
+
:param dict cve_data: CVE data dictionary to update
|
|
1550
|
+
:rtype: None
|
|
1551
|
+
"""
|
|
1552
|
+
for pkg in vuln.get("VulnerablePackages", []):
|
|
1553
|
+
self._process_vulnerable_package(pkg, cve_data)
|
|
1554
|
+
|
|
1555
|
+
def _process_vulnerable_package(self, pkg: dict, cve_data: dict) -> None:
|
|
1556
|
+
"""
|
|
1557
|
+
Process a single vulnerable package.
|
|
1558
|
+
|
|
1559
|
+
:param dict pkg: Package data
|
|
1560
|
+
:param dict cve_data: CVE data dictionary to update
|
|
1561
|
+
:rtype: None
|
|
1562
|
+
"""
|
|
1563
|
+
if pkg_name := pkg.get("Name", ""):
|
|
1564
|
+
cve_data["affected_packages"].append(pkg_name)
|
|
1565
|
+
|
|
1566
|
+
if version_str := self._build_package_version_string(pkg):
|
|
1567
|
+
cve_data["installed_versions"].append(version_str)
|
|
1568
|
+
|
|
1569
|
+
if fixed_version := pkg.get("FixedInVersion", ""):
|
|
1570
|
+
cve_data["fixed_versions"].append(fixed_version)
|
|
1571
|
+
|
|
1572
|
+
if pkg_details := self._extract_package_details(pkg):
|
|
1573
|
+
cve_data["vulnerability_details"].append(pkg_details)
|
|
1574
|
+
|
|
1575
|
+
def _extract_cve_data(self, finding: dict) -> dict:
|
|
1576
|
+
"""
|
|
1577
|
+
Extract CVE and vulnerability data from AWS Security Hub finding with structured CVSS data.
|
|
1578
|
+
|
|
1579
|
+
:param dict finding: AWS Security Hub finding
|
|
1580
|
+
:return: Dictionary with CVE data including structured CVSS scores
|
|
1581
|
+
:rtype: dict
|
|
1582
|
+
"""
|
|
1583
|
+
cve_data: dict = {
|
|
1584
|
+
"cve_ids": [],
|
|
1585
|
+
"cvss_scores": [],
|
|
1586
|
+
"vulnerability_details": [],
|
|
1587
|
+
"vendor_info": [],
|
|
1588
|
+
"reference_urls": [],
|
|
1589
|
+
# New structured fields
|
|
1590
|
+
"cvss_v3_score": None,
|
|
1591
|
+
"cvss_v2_score": None,
|
|
1592
|
+
"cvss_v3_vector": None,
|
|
1593
|
+
"cvss_v2_vector": None,
|
|
1594
|
+
"affected_packages": [],
|
|
1595
|
+
"installed_versions": [],
|
|
1596
|
+
"fixed_versions": [],
|
|
1597
|
+
"exploit_available": False,
|
|
1598
|
+
}
|
|
1599
|
+
|
|
1600
|
+
vulnerabilities = finding.get("Vulnerabilities", [])
|
|
1601
|
+
if not vulnerabilities:
|
|
1602
|
+
return cve_data
|
|
1603
|
+
|
|
1604
|
+
for vuln in vulnerabilities:
|
|
1605
|
+
self._process_vulnerability_enhanced(vuln, cve_data)
|
|
1606
|
+
|
|
1607
|
+
# Convert lists to comma-separated strings for model fields
|
|
1608
|
+
if cve_data["affected_packages"]:
|
|
1609
|
+
cve_data["affected_packages_str"] = ", ".join(cve_data["affected_packages"])
|
|
1610
|
+
if cve_data["installed_versions"]:
|
|
1611
|
+
cve_data["installed_versions_str"] = ", ".join(cve_data["installed_versions"])
|
|
1612
|
+
if cve_data["fixed_versions"]:
|
|
1613
|
+
cve_data["fixed_versions_str"] = ", ".join(cve_data["fixed_versions"])
|
|
1614
|
+
|
|
1615
|
+
return cve_data
|
|
1616
|
+
|
|
1617
|
+
def _create_integration_finding(
|
|
1618
|
+
self,
|
|
1619
|
+
resource: dict,
|
|
1620
|
+
finding: dict,
|
|
1621
|
+
severity: str,
|
|
1622
|
+
comments: str,
|
|
1623
|
+
status: str,
|
|
1624
|
+
results: str,
|
|
1625
|
+
due_date: str,
|
|
1626
|
+
plugin_name: str,
|
|
1627
|
+
plugin_id: str,
|
|
1628
|
+
) -> IntegrationFinding:
|
|
1629
|
+
"""
|
|
1630
|
+
Create IntegrationFinding from processed finding data.
|
|
1631
|
+
|
|
1632
|
+
:param dict resource: AWS resource from finding
|
|
1633
|
+
:param dict finding: AWS Security Hub finding
|
|
1634
|
+
:param str severity: Severity level
|
|
1635
|
+
:param str comments: Finding comments
|
|
1636
|
+
:param str status: Compliance status
|
|
1637
|
+
:param str results: Test results
|
|
1638
|
+
:param str due_date: Due date string
|
|
1639
|
+
:param str plugin_name: Plugin name
|
|
1640
|
+
:param str plugin_id: Plugin ID
|
|
1641
|
+
:return: Integration finding
|
|
1642
|
+
:rtype: IntegrationFinding
|
|
1643
|
+
"""
|
|
1644
|
+
# Extract CVE data from finding
|
|
1645
|
+
cve_data = self._extract_cve_data(finding)
|
|
1646
|
+
|
|
1647
|
+
# Build enhanced comments with CVE information
|
|
1648
|
+
enhanced_comments = comments
|
|
1649
|
+
if cve_data["cve_ids"]:
|
|
1650
|
+
enhanced_comments += f"\n\nCVE IDs: {', '.join(cve_data['cve_ids'])}"
|
|
1651
|
+
if cve_data["cvss_scores"]:
|
|
1652
|
+
enhanced_comments += f"\nCVSS Scores: {'; '.join(cve_data['cvss_scores'])}"
|
|
1653
|
+
if cve_data["vulnerability_details"]:
|
|
1654
|
+
enhanced_comments += "\n\nVulnerable Packages:\n" + "\n".join(
|
|
1655
|
+
f"- {detail}" for detail in cve_data["vulnerability_details"]
|
|
1656
|
+
)
|
|
1657
|
+
if cve_data["vendor_info"]:
|
|
1658
|
+
enhanced_comments += f"\n\nVendor Info: {'; '.join(cve_data['vendor_info'])}"
|
|
1659
|
+
if cve_data["reference_urls"]:
|
|
1660
|
+
enhanced_comments += "\n\nReferences:\n" + "\n".join(
|
|
1661
|
+
f"- {url}" for url in cve_data["reference_urls"][:5] # Limit to first 5 URLs
|
|
1662
|
+
)
|
|
1663
|
+
|
|
1664
|
+
# Build observations with CVE details
|
|
1665
|
+
observations = enhanced_comments
|
|
1666
|
+
|
|
1667
|
+
# Build gaps field with vulnerability details
|
|
1668
|
+
gaps = ""
|
|
1669
|
+
if cve_data["vulnerability_details"]:
|
|
1670
|
+
gaps = "Vulnerable packages identified:\n" + "\n".join(cve_data["vulnerability_details"])
|
|
1671
|
+
|
|
1672
|
+
# Build evidence field with reference URLs
|
|
1673
|
+
evidence = ""
|
|
1674
|
+
if cve_data["reference_urls"]:
|
|
1675
|
+
evidence = "Reference URLs:\n" + "\n".join(cve_data["reference_urls"])
|
|
1676
|
+
|
|
1677
|
+
# Determine vulnerability number and plugin_id (primary CVE ID if available)
|
|
1678
|
+
vulnerability_number = cve_data["cve_ids"][0] if cve_data["cve_ids"] else ""
|
|
1679
|
+
primary_cve = cve_data["cve_ids"][0] if cve_data["cve_ids"] else None
|
|
1680
|
+
|
|
1681
|
+
# Use CVE as plugin_id if available, otherwise use constructed plugin_id
|
|
1682
|
+
if primary_cve:
|
|
1683
|
+
plugin_id = primary_cve
|
|
1684
|
+
|
|
1685
|
+
# Extract first/last seen dates from finding
|
|
1686
|
+
first_seen_date = date_str(finding.get("FirstObservedAt", finding.get("CreatedAt")))
|
|
1687
|
+
last_seen_date = date_str(finding.get("LastObservedAt", finding.get("UpdatedAt", finding.get("CreatedAt"))))
|
|
1688
|
+
|
|
1689
|
+
return IntegrationFinding(
|
|
1690
|
+
asset_identifier=resource["Id"],
|
|
1691
|
+
external_id=finding.get("Id", ""),
|
|
1692
|
+
control_labels=[],
|
|
1693
|
+
title=finding["Title"],
|
|
1694
|
+
category="SecurityHub",
|
|
1695
|
+
issue_title=finding["Title"],
|
|
1696
|
+
severity=self.finding_severity_map.get(severity),
|
|
1697
|
+
description=finding["Description"],
|
|
1698
|
+
status=self.get_configured_issue_status(),
|
|
1699
|
+
checklist_status=self.get_checklist_status(status),
|
|
1700
|
+
vulnerability_number=vulnerability_number,
|
|
1701
|
+
results=results,
|
|
1702
|
+
recommendation_for_mitigation=finding.get("Remediation", {}).get("Recommendation", {}).get("Text", ""),
|
|
1703
|
+
comments=enhanced_comments,
|
|
1704
|
+
poam_comments=enhanced_comments,
|
|
1705
|
+
date_created=date_str(finding["CreatedAt"]),
|
|
1706
|
+
due_date=due_date,
|
|
1707
|
+
plugin_name=plugin_name,
|
|
1708
|
+
plugin_id=plugin_id,
|
|
1709
|
+
baseline=self.get_baseline(resource),
|
|
1710
|
+
observations=observations,
|
|
1711
|
+
gaps=gaps,
|
|
1712
|
+
evidence=evidence,
|
|
1713
|
+
impact="",
|
|
1714
|
+
vulnerability_type="Vulnerability Scan",
|
|
1715
|
+
# Vulnerability-specific fields
|
|
1716
|
+
cve=primary_cve,
|
|
1717
|
+
cvss_v3_score=cve_data.get("cvss_v3_score"),
|
|
1718
|
+
cvss_v2_score=cve_data.get("cvss_v2_score"),
|
|
1719
|
+
cvss_v3_vector=cve_data.get("cvss_v3_vector"),
|
|
1720
|
+
cvss_v2_vector=cve_data.get("cvss_v2_vector"),
|
|
1721
|
+
first_seen=first_seen_date,
|
|
1722
|
+
last_seen=last_seen_date,
|
|
1723
|
+
affected_packages=cve_data.get("affected_packages_str"),
|
|
1724
|
+
installed_versions=cve_data.get("installed_versions_str"),
|
|
1725
|
+
fixed_versions=cve_data.get("fixed_versions_str"),
|
|
1726
|
+
)
|
|
1727
|
+
|
|
879
1728
|
def parse_finding(self, finding: dict) -> list[IntegrationFinding]:
|
|
880
1729
|
"""
|
|
881
1730
|
Parse AWS Security Hub to RegScale IntegrationFinding format.
|
|
@@ -888,78 +1737,96 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
888
1737
|
findings = []
|
|
889
1738
|
try:
|
|
890
1739
|
for resource in finding["Resources"]:
|
|
891
|
-
#
|
|
892
|
-
|
|
893
|
-
if
|
|
894
|
-
|
|
895
|
-
|
|
896
|
-
|
|
897
|
-
|
|
898
|
-
|
|
1740
|
+
# Check if the service for this resource type is enabled
|
|
1741
|
+
resource_type = resource.get("Type", "")
|
|
1742
|
+
if not self.is_service_enabled_for_resource(resource_type):
|
|
1743
|
+
logger.debug(f"Skipping finding for disabled service resource type '{resource_type}'")
|
|
1744
|
+
continue
|
|
1745
|
+
|
|
1746
|
+
# Discover asset from resource
|
|
1747
|
+
self._discover_asset_from_resource(resource, finding)
|
|
1748
|
+
|
|
1749
|
+
# Determine status and severity
|
|
899
1750
|
status, results = determine_status_and_results(finding)
|
|
900
1751
|
comments = get_comments(finding)
|
|
901
|
-
severity = check_finding_severity(comments)
|
|
902
|
-
friendly_sev =
|
|
903
|
-
|
|
904
|
-
|
|
905
|
-
elif severity in ["MEDIUM", "MODERATE"]:
|
|
906
|
-
friendly_sev = "moderate"
|
|
907
|
-
|
|
908
|
-
# Filter findings based on minimum severity configuration
|
|
1752
|
+
severity = check_finding_severity(finding, comments)
|
|
1753
|
+
friendly_sev = self._get_friendly_severity(severity)
|
|
1754
|
+
|
|
1755
|
+
# Filter by minimum severity
|
|
909
1756
|
if not self.should_process_finding_by_severity(severity):
|
|
910
1757
|
logger.debug(f"Skipping finding with severity '{severity}' - below minimum threshold")
|
|
911
1758
|
continue
|
|
912
|
-
|
|
913
|
-
|
|
914
|
-
|
|
915
|
-
|
|
916
|
-
|
|
917
|
-
|
|
918
|
-
|
|
919
|
-
|
|
920
|
-
# Create a unique plugin_id using the finding ID to ensure each finding creates a separate issue
|
|
921
|
-
finding_id = finding.get("Id", "")
|
|
922
|
-
# Extract just the finding UUID from the full ARN for a cleaner ID
|
|
923
|
-
finding_uuid = finding_id.split("/")[-1] if "/" in finding_id else finding_id.split(":")[-1]
|
|
924
|
-
plugin_id = f"{plugin_name.replace(' ', '_').replace('/', '_').replace(':', '_')}_{finding_uuid}"
|
|
925
|
-
|
|
926
|
-
findings.append(
|
|
927
|
-
IntegrationFinding(
|
|
928
|
-
asset_identifier=self.extract_name_from_arn(resource["Id"]),
|
|
929
|
-
external_id=finding_id, # Use the full finding ID as external_id for uniqueness
|
|
930
|
-
control_labels=[], # Determine how to populate this
|
|
931
|
-
title=finding["Title"],
|
|
932
|
-
category="SecurityHub",
|
|
933
|
-
issue_title=finding["Title"],
|
|
934
|
-
severity=self.finding_severity_map.get(severity),
|
|
935
|
-
description=finding["Description"],
|
|
936
|
-
status=self.get_configured_issue_status(),
|
|
937
|
-
checklist_status=self.get_checklist_status(status),
|
|
938
|
-
vulnerability_number="",
|
|
939
|
-
results=results,
|
|
940
|
-
recommendation_for_mitigation=finding.get("Remediation", {})
|
|
941
|
-
.get("Recommendation", {})
|
|
942
|
-
.get("Text", ""),
|
|
943
|
-
comments=comments,
|
|
944
|
-
poam_comments=comments,
|
|
945
|
-
date_created=date_str(finding["CreatedAt"]),
|
|
946
|
-
due_date=due_date,
|
|
947
|
-
plugin_name=plugin_name,
|
|
948
|
-
plugin_id=plugin_id, # Add the sanitized plugin_id
|
|
949
|
-
baseline=self.get_baseline(resource),
|
|
950
|
-
observations=comments,
|
|
951
|
-
gaps="",
|
|
952
|
-
evidence="",
|
|
953
|
-
impact="",
|
|
954
|
-
vulnerability_type="Vulnerability Scan",
|
|
955
|
-
)
|
|
1759
|
+
|
|
1760
|
+
# Calculate due date and construct IDs
|
|
1761
|
+
due_date = self._get_due_date_for_finding(finding, friendly_sev)
|
|
1762
|
+
plugin_name, plugin_id = self._construct_plugin_id(finding, resource)
|
|
1763
|
+
|
|
1764
|
+
# Create finding object
|
|
1765
|
+
integration_finding = self._create_integration_finding(
|
|
1766
|
+
resource, finding, severity, comments, status, results, due_date, plugin_name, plugin_id
|
|
956
1767
|
)
|
|
1768
|
+
findings.append(integration_finding)
|
|
957
1769
|
|
|
958
1770
|
except Exception as e:
|
|
959
1771
|
logger.error(f"Error parsing AWS Security Hub finding: {str(e)}", exc_info=True)
|
|
960
1772
|
|
|
961
1773
|
return findings
|
|
962
1774
|
|
|
1775
|
+
def process_findings_with_evidence(
|
|
1776
|
+
self,
|
|
1777
|
+
findings: List[dict],
|
|
1778
|
+
service_name: str,
|
|
1779
|
+
generate_evidence: bool = False,
|
|
1780
|
+
ssp_id: Optional[int] = None,
|
|
1781
|
+
control_ids: Optional[List[int]] = None,
|
|
1782
|
+
ocsf_format: bool = False,
|
|
1783
|
+
) -> tuple[List[IntegrationFinding], Optional[Any]]:
|
|
1784
|
+
"""
|
|
1785
|
+
Process findings and optionally generate evidence
|
|
1786
|
+
|
|
1787
|
+
:param List[dict] findings: Raw AWS findings
|
|
1788
|
+
:param str service_name: AWS service name
|
|
1789
|
+
:param bool generate_evidence: Whether to generate evidence record
|
|
1790
|
+
:param Optional[int] ssp_id: SSP ID to link evidence
|
|
1791
|
+
:param Optional[List[int]] control_ids: Control IDs to link
|
|
1792
|
+
:param bool ocsf_format: Whether to generate OCSF format
|
|
1793
|
+
:return: Tuple of (parsed findings, evidence record)
|
|
1794
|
+
:rtype: tuple[List[IntegrationFinding], Optional[Any]]
|
|
1795
|
+
"""
|
|
1796
|
+
from regscale.integrations.commercial.aws.evidence_generator import AWSEvidenceGenerator
|
|
1797
|
+
from regscale.integrations.commercial.aws.ocsf.mapper import AWSOCSFMapper
|
|
1798
|
+
|
|
1799
|
+
# Parse findings to IntegrationFinding objects
|
|
1800
|
+
integration_findings = []
|
|
1801
|
+
for finding in findings:
|
|
1802
|
+
integration_findings.extend(self.parse_finding(finding))
|
|
1803
|
+
|
|
1804
|
+
# Generate OCSF data if requested
|
|
1805
|
+
ocsf_data = None
|
|
1806
|
+
if ocsf_format:
|
|
1807
|
+
mapper = AWSOCSFMapper()
|
|
1808
|
+
if service_name == "SecurityHub":
|
|
1809
|
+
ocsf_data = [mapper.securityhub_to_ocsf(f) for f in findings]
|
|
1810
|
+
elif service_name == "GuardDuty":
|
|
1811
|
+
ocsf_data = [mapper.guardduty_to_ocsf(f) for f in findings]
|
|
1812
|
+
elif service_name == "CloudTrail":
|
|
1813
|
+
ocsf_data = [mapper.cloudtrail_event_to_ocsf(f) for f in findings]
|
|
1814
|
+
|
|
1815
|
+
# Generate evidence if requested
|
|
1816
|
+
evidence_record = None
|
|
1817
|
+
if generate_evidence:
|
|
1818
|
+
from regscale.core.app.api import Api
|
|
1819
|
+
|
|
1820
|
+
evidence_gen = AWSEvidenceGenerator(api=Api(), ssp_id=ssp_id)
|
|
1821
|
+
evidence_record = evidence_gen.create_evidence_from_scan(
|
|
1822
|
+
service_name=service_name,
|
|
1823
|
+
findings=findings,
|
|
1824
|
+
ocsf_data=ocsf_data,
|
|
1825
|
+
control_ids=control_ids,
|
|
1826
|
+
)
|
|
1827
|
+
|
|
1828
|
+
return integration_findings, evidence_record
|
|
1829
|
+
|
|
963
1830
|
def parse_resource_to_asset(self, resource: dict, finding: dict) -> Optional[IntegrationAsset]:
|
|
964
1831
|
"""
|
|
965
1832
|
Parse AWS Security Hub resource to RegScale IntegrationAsset format.
|
|
@@ -1030,7 +1897,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1030
1897
|
|
|
1031
1898
|
return IntegrationAsset(
|
|
1032
1899
|
name=name,
|
|
1033
|
-
identifier=
|
|
1900
|
+
identifier=resource_id,
|
|
1034
1901
|
asset_type=regscale_models.AssetType.Firewall, # Security groups act like firewalls
|
|
1035
1902
|
asset_category=regscale_models.AssetCategory.Software,
|
|
1036
1903
|
component_type=regscale_models.ComponentType.Software,
|
|
@@ -1042,7 +1909,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1042
1909
|
location=region,
|
|
1043
1910
|
notes=notes,
|
|
1044
1911
|
manufacturer="AWS",
|
|
1045
|
-
aws_identifier=
|
|
1912
|
+
aws_identifier=resource_id, # Use full ARN for asset matching
|
|
1046
1913
|
vlan_id=details.get("VpcId"),
|
|
1047
1914
|
uri=uri,
|
|
1048
1915
|
source_data=resource,
|
|
@@ -1083,7 +1950,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1083
1950
|
|
|
1084
1951
|
return IntegrationAsset(
|
|
1085
1952
|
name=name,
|
|
1086
|
-
identifier=
|
|
1953
|
+
identifier=resource_id,
|
|
1087
1954
|
asset_type=regscale_models.AssetType.NetworkRouter, # Subnets are network infrastructure
|
|
1088
1955
|
asset_category=regscale_models.AssetCategory.Hardware,
|
|
1089
1956
|
component_type=regscale_models.ComponentType.Hardware,
|
|
@@ -1095,7 +1962,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1095
1962
|
location=region,
|
|
1096
1963
|
notes=notes,
|
|
1097
1964
|
manufacturer="AWS",
|
|
1098
|
-
aws_identifier=
|
|
1965
|
+
aws_identifier=resource_id, # Use full ARN for asset matching
|
|
1099
1966
|
vlan_id=details.get("VpcId"),
|
|
1100
1967
|
uri=uri,
|
|
1101
1968
|
source_data=resource,
|
|
@@ -1119,7 +1986,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1119
1986
|
|
|
1120
1987
|
return IntegrationAsset(
|
|
1121
1988
|
name=name,
|
|
1122
|
-
identifier=
|
|
1989
|
+
identifier=resource_id,
|
|
1123
1990
|
asset_type=regscale_models.AssetType.Other, # IAM users don't fit standard asset types
|
|
1124
1991
|
asset_category=regscale_models.AssetCategory.Software,
|
|
1125
1992
|
component_type=regscale_models.ComponentType.Software,
|
|
@@ -1131,7 +1998,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1131
1998
|
location=region,
|
|
1132
1999
|
notes="AWS IAM User Account",
|
|
1133
2000
|
manufacturer="AWS",
|
|
1134
|
-
aws_identifier=
|
|
2001
|
+
aws_identifier=resource_id, # Use full ARN for asset matching
|
|
1135
2002
|
uri=uri,
|
|
1136
2003
|
source_data=resource,
|
|
1137
2004
|
is_virtual=True,
|
|
@@ -1160,7 +2027,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1160
2027
|
|
|
1161
2028
|
return IntegrationAsset(
|
|
1162
2029
|
name=name,
|
|
1163
|
-
identifier=
|
|
2030
|
+
identifier=resource_id,
|
|
1164
2031
|
asset_type=regscale_models.AssetType.VM,
|
|
1165
2032
|
asset_category=regscale_models.AssetCategory.Hardware,
|
|
1166
2033
|
component_type=regscale_models.ComponentType.Hardware,
|
|
@@ -1173,7 +2040,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1173
2040
|
notes=f"AWS EC2 Instance - {instance_type}",
|
|
1174
2041
|
model=instance_type,
|
|
1175
2042
|
manufacturer="AWS",
|
|
1176
|
-
aws_identifier=
|
|
2043
|
+
aws_identifier=resource_id, # Use full ARN for asset matching
|
|
1177
2044
|
vlan_id=details.get("SubnetId"),
|
|
1178
2045
|
uri=uri,
|
|
1179
2046
|
source_data=resource,
|
|
@@ -1196,7 +2063,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1196
2063
|
|
|
1197
2064
|
return IntegrationAsset(
|
|
1198
2065
|
name=name,
|
|
1199
|
-
identifier=
|
|
2066
|
+
identifier=resource_id,
|
|
1200
2067
|
asset_type=regscale_models.AssetType.Other, # S3 buckets are storage, closest to Other
|
|
1201
2068
|
asset_category=regscale_models.AssetCategory.Software,
|
|
1202
2069
|
component_type=regscale_models.ComponentType.Software,
|
|
@@ -1208,7 +2075,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1208
2075
|
location=region,
|
|
1209
2076
|
notes="AWS S3 Storage Bucket",
|
|
1210
2077
|
manufacturer="AWS",
|
|
1211
|
-
aws_identifier=
|
|
2078
|
+
aws_identifier=resource_id, # Use full ARN for asset matching
|
|
1212
2079
|
uri=uri,
|
|
1213
2080
|
source_data=resource,
|
|
1214
2081
|
is_virtual=True,
|
|
@@ -1235,7 +2102,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1235
2102
|
|
|
1236
2103
|
return IntegrationAsset(
|
|
1237
2104
|
name=name,
|
|
1238
|
-
identifier=
|
|
2105
|
+
identifier=resource_id,
|
|
1239
2106
|
asset_type=regscale_models.AssetType.VM, # RDS instances are virtual database servers
|
|
1240
2107
|
asset_category=regscale_models.AssetCategory.Software,
|
|
1241
2108
|
component_type=regscale_models.ComponentType.Software,
|
|
@@ -1249,7 +2116,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1249
2116
|
model=db_class,
|
|
1250
2117
|
software_name=engine,
|
|
1251
2118
|
manufacturer="AWS",
|
|
1252
|
-
aws_identifier=
|
|
2119
|
+
aws_identifier=resource_id, # Use full ARN for asset matching
|
|
1253
2120
|
uri=uri,
|
|
1254
2121
|
source_data=resource,
|
|
1255
2122
|
is_virtual=True,
|
|
@@ -1275,7 +2142,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1275
2142
|
|
|
1276
2143
|
return IntegrationAsset(
|
|
1277
2144
|
name=name,
|
|
1278
|
-
identifier=
|
|
2145
|
+
identifier=resource_id,
|
|
1279
2146
|
asset_type=regscale_models.AssetType.Other, # Lambda functions are serverless, closest to Other
|
|
1280
2147
|
asset_category=regscale_models.AssetCategory.Software,
|
|
1281
2148
|
component_type=regscale_models.ComponentType.Software,
|
|
@@ -1288,7 +2155,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1288
2155
|
notes=f"AWS Lambda Function - {runtime}",
|
|
1289
2156
|
software_name=runtime,
|
|
1290
2157
|
manufacturer="AWS",
|
|
1291
|
-
aws_identifier=
|
|
2158
|
+
aws_identifier=resource_id, # Use full ARN for asset matching
|
|
1292
2159
|
uri=uri,
|
|
1293
2160
|
source_data=resource,
|
|
1294
2161
|
is_virtual=True,
|
|
@@ -1310,7 +2177,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1310
2177
|
|
|
1311
2178
|
return IntegrationAsset(
|
|
1312
2179
|
name=name,
|
|
1313
|
-
identifier=
|
|
2180
|
+
identifier=resource_id,
|
|
1314
2181
|
asset_type=regscale_models.AssetType.Other, # ECR repositories are container registries
|
|
1315
2182
|
asset_category=regscale_models.AssetCategory.Software,
|
|
1316
2183
|
component_type=regscale_models.ComponentType.Software,
|
|
@@ -1322,7 +2189,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1322
2189
|
location=region,
|
|
1323
2190
|
notes="AWS ECR Container Repository",
|
|
1324
2191
|
manufacturer="AWS",
|
|
1325
|
-
aws_identifier=
|
|
2192
|
+
aws_identifier=resource_id, # Use full ARN for asset matching
|
|
1326
2193
|
uri=uri,
|
|
1327
2194
|
source_data=resource,
|
|
1328
2195
|
is_virtual=True,
|
|
@@ -1341,7 +2208,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1341
2208
|
|
|
1342
2209
|
return IntegrationAsset(
|
|
1343
2210
|
name=name,
|
|
1344
|
-
identifier=
|
|
2211
|
+
identifier=resource_id,
|
|
1345
2212
|
asset_type=regscale_models.AssetType.Other,
|
|
1346
2213
|
asset_category=regscale_models.AssetCategory.Software,
|
|
1347
2214
|
component_type=regscale_models.ComponentType.Software,
|
|
@@ -1353,7 +2220,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1353
2220
|
location=region,
|
|
1354
2221
|
notes=f"AWS {resource_type}",
|
|
1355
2222
|
manufacturer="AWS",
|
|
1356
|
-
aws_identifier=
|
|
2223
|
+
aws_identifier=resource_id, # Use full ARN for asset matching
|
|
1357
2224
|
source_data=resource,
|
|
1358
2225
|
is_virtual=True,
|
|
1359
2226
|
)
|