regscale-cli 6.21.2.0__py3-none-any.whl → 6.28.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- regscale/_version.py +1 -1
- regscale/airflow/hierarchy.py +2 -2
- regscale/core/app/api.py +5 -2
- regscale/core/app/application.py +36 -6
- regscale/core/app/internal/control_editor.py +73 -21
- regscale/core/app/internal/evidence.py +727 -204
- regscale/core/app/internal/login.py +4 -2
- regscale/core/app/internal/model_editor.py +219 -64
- regscale/core/app/utils/app_utils.py +86 -12
- regscale/core/app/utils/catalog_utils/common.py +1 -1
- regscale/core/login.py +21 -4
- regscale/core/utils/async_graphql_client.py +363 -0
- regscale/core/utils/date.py +77 -1
- regscale/dev/cli.py +26 -0
- regscale/dev/code_gen.py +109 -24
- regscale/dev/version.py +72 -0
- regscale/integrations/commercial/__init__.py +30 -2
- regscale/integrations/commercial/aws/audit_manager_compliance.py +3908 -0
- regscale/integrations/commercial/aws/cli.py +3107 -54
- regscale/integrations/commercial/aws/cloudtrail_control_mappings.py +333 -0
- regscale/integrations/commercial/aws/cloudtrail_evidence.py +501 -0
- regscale/integrations/commercial/aws/cloudwatch_control_mappings.py +357 -0
- regscale/integrations/commercial/aws/cloudwatch_evidence.py +490 -0
- regscale/integrations/commercial/{amazon → aws}/common.py +71 -19
- regscale/integrations/commercial/aws/config_compliance.py +914 -0
- regscale/integrations/commercial/aws/conformance_pack_mappings.py +198 -0
- regscale/integrations/commercial/aws/control_compliance_analyzer.py +439 -0
- regscale/integrations/commercial/aws/evidence_generator.py +283 -0
- regscale/integrations/commercial/aws/guardduty_control_mappings.py +340 -0
- regscale/integrations/commercial/aws/guardduty_evidence.py +1053 -0
- regscale/integrations/commercial/aws/iam_control_mappings.py +368 -0
- regscale/integrations/commercial/aws/iam_evidence.py +574 -0
- regscale/integrations/commercial/aws/inventory/__init__.py +338 -22
- regscale/integrations/commercial/aws/inventory/base.py +107 -5
- regscale/integrations/commercial/aws/inventory/resources/analytics.py +390 -0
- regscale/integrations/commercial/aws/inventory/resources/applications.py +234 -0
- regscale/integrations/commercial/aws/inventory/resources/audit_manager.py +513 -0
- regscale/integrations/commercial/aws/inventory/resources/cloudtrail.py +315 -0
- regscale/integrations/commercial/aws/inventory/resources/cloudtrail_logs_metadata.py +476 -0
- regscale/integrations/commercial/aws/inventory/resources/cloudwatch.py +191 -0
- regscale/integrations/commercial/aws/inventory/resources/compute.py +328 -9
- regscale/integrations/commercial/aws/inventory/resources/config.py +464 -0
- regscale/integrations/commercial/aws/inventory/resources/containers.py +74 -9
- regscale/integrations/commercial/aws/inventory/resources/database.py +481 -31
- regscale/integrations/commercial/aws/inventory/resources/developer_tools.py +253 -0
- regscale/integrations/commercial/aws/inventory/resources/guardduty.py +286 -0
- regscale/integrations/commercial/aws/inventory/resources/iam.py +470 -0
- regscale/integrations/commercial/aws/inventory/resources/inspector.py +476 -0
- regscale/integrations/commercial/aws/inventory/resources/integration.py +175 -61
- regscale/integrations/commercial/aws/inventory/resources/kms.py +447 -0
- regscale/integrations/commercial/aws/inventory/resources/machine_learning.py +358 -0
- regscale/integrations/commercial/aws/inventory/resources/networking.py +390 -67
- regscale/integrations/commercial/aws/inventory/resources/s3.py +394 -0
- regscale/integrations/commercial/aws/inventory/resources/security.py +268 -72
- regscale/integrations/commercial/aws/inventory/resources/securityhub.py +473 -0
- regscale/integrations/commercial/aws/inventory/resources/storage.py +288 -29
- regscale/integrations/commercial/aws/inventory/resources/systems_manager.py +657 -0
- regscale/integrations/commercial/aws/inventory/resources/vpc.py +655 -0
- regscale/integrations/commercial/aws/kms_control_mappings.py +288 -0
- regscale/integrations/commercial/aws/kms_evidence.py +879 -0
- regscale/integrations/commercial/aws/ocsf/__init__.py +7 -0
- regscale/integrations/commercial/aws/ocsf/constants.py +115 -0
- regscale/integrations/commercial/aws/ocsf/mapper.py +435 -0
- regscale/integrations/commercial/aws/org_control_mappings.py +286 -0
- regscale/integrations/commercial/aws/org_evidence.py +666 -0
- regscale/integrations/commercial/aws/s3_control_mappings.py +356 -0
- regscale/integrations/commercial/aws/s3_evidence.py +632 -0
- regscale/integrations/commercial/aws/scanner.py +1072 -205
- regscale/integrations/commercial/aws/security_hub.py +319 -0
- regscale/integrations/commercial/aws/session_manager.py +282 -0
- regscale/integrations/commercial/aws/ssm_control_mappings.py +291 -0
- regscale/integrations/commercial/aws/ssm_evidence.py +492 -0
- regscale/integrations/commercial/jira.py +489 -153
- regscale/integrations/commercial/microsoft_defender/defender.py +326 -5
- regscale/integrations/commercial/microsoft_defender/defender_api.py +348 -14
- regscale/integrations/commercial/microsoft_defender/defender_constants.py +157 -0
- regscale/integrations/commercial/qualys/__init__.py +167 -68
- regscale/integrations/commercial/qualys/scanner.py +305 -39
- regscale/integrations/commercial/sarif/sairf_importer.py +432 -0
- regscale/integrations/commercial/sarif/sarif_converter.py +67 -0
- regscale/integrations/commercial/sicura/api.py +79 -42
- regscale/integrations/commercial/sicura/commands.py +8 -2
- regscale/integrations/commercial/sicura/scanner.py +83 -44
- regscale/integrations/commercial/stigv2/ckl_parser.py +5 -5
- regscale/integrations/commercial/synqly/assets.py +133 -16
- regscale/integrations/commercial/synqly/edr.py +2 -8
- regscale/integrations/commercial/synqly/query_builder.py +536 -0
- regscale/integrations/commercial/synqly/ticketing.py +27 -0
- regscale/integrations/commercial/synqly/vulnerabilities.py +165 -28
- regscale/integrations/commercial/tenablev2/cis_parsers.py +453 -0
- regscale/integrations/commercial/tenablev2/cis_scanner.py +447 -0
- regscale/integrations/commercial/tenablev2/commands.py +146 -5
- regscale/integrations/commercial/tenablev2/scanner.py +1 -3
- regscale/integrations/commercial/tenablev2/stig_parsers.py +113 -57
- regscale/integrations/commercial/wizv2/WizDataMixin.py +1 -1
- regscale/integrations/commercial/wizv2/click.py +191 -76
- regscale/integrations/commercial/wizv2/compliance/__init__.py +15 -0
- regscale/integrations/commercial/wizv2/{policy_compliance_helpers.py → compliance/helpers.py} +78 -60
- regscale/integrations/commercial/wizv2/compliance_report.py +1592 -0
- regscale/integrations/commercial/wizv2/core/__init__.py +133 -0
- regscale/integrations/commercial/wizv2/{async_client.py → core/client.py} +7 -3
- regscale/integrations/commercial/wizv2/{constants.py → core/constants.py} +92 -89
- regscale/integrations/commercial/wizv2/core/file_operations.py +237 -0
- regscale/integrations/commercial/wizv2/fetchers/__init__.py +11 -0
- regscale/integrations/commercial/wizv2/{data_fetcher.py → fetchers/policy_assessment.py} +66 -9
- regscale/integrations/commercial/wizv2/file_cleanup.py +104 -0
- regscale/integrations/commercial/wizv2/issue.py +776 -28
- regscale/integrations/commercial/wizv2/models/__init__.py +0 -0
- regscale/integrations/commercial/wizv2/parsers/__init__.py +34 -0
- regscale/integrations/commercial/wizv2/{parsers.py → parsers/main.py} +1 -1
- regscale/integrations/commercial/wizv2/processors/__init__.py +11 -0
- regscale/integrations/commercial/wizv2/{finding_processor.py → processors/finding.py} +1 -1
- regscale/integrations/commercial/wizv2/reports.py +243 -0
- regscale/integrations/commercial/wizv2/sbom.py +1 -1
- regscale/integrations/commercial/wizv2/scanner.py +1031 -441
- regscale/integrations/commercial/wizv2/utils/__init__.py +48 -0
- regscale/integrations/commercial/wizv2/{utils.py → utils/main.py} +116 -61
- regscale/integrations/commercial/wizv2/variables.py +89 -3
- regscale/integrations/compliance_integration.py +1036 -151
- regscale/integrations/control_matcher.py +432 -0
- regscale/integrations/due_date_handler.py +333 -0
- regscale/integrations/milestone_manager.py +291 -0
- regscale/integrations/public/__init__.py +14 -0
- regscale/integrations/public/cci_importer.py +834 -0
- regscale/integrations/public/csam/__init__.py +0 -0
- regscale/integrations/public/csam/csam.py +938 -0
- regscale/integrations/public/csam/csam_agency_defined.py +179 -0
- regscale/integrations/public/csam/csam_common.py +154 -0
- regscale/integrations/public/csam/csam_controls.py +432 -0
- regscale/integrations/public/csam/csam_poam.py +124 -0
- regscale/integrations/public/fedramp/click.py +77 -6
- regscale/integrations/public/fedramp/docx_parser.py +10 -1
- regscale/integrations/public/fedramp/fedramp_cis_crm.py +675 -289
- regscale/integrations/public/fedramp/fedramp_five.py +1 -1
- regscale/integrations/public/fedramp/poam/scanner.py +75 -7
- regscale/integrations/public/fedramp/poam_export_v5.py +888 -0
- regscale/integrations/scanner_integration.py +1961 -430
- regscale/models/integration_models/CCI_List.xml +1 -0
- regscale/models/integration_models/aqua.py +2 -2
- regscale/models/integration_models/cisa_kev_data.json +805 -11
- regscale/models/integration_models/flat_file_importer/__init__.py +5 -8
- regscale/models/integration_models/nexpose.py +36 -10
- regscale/models/integration_models/qualys.py +3 -4
- regscale/models/integration_models/synqly_models/capabilities.json +1 -1
- regscale/models/integration_models/synqly_models/connectors/vulnerabilities.py +87 -18
- regscale/models/integration_models/synqly_models/filter_parser.py +332 -0
- regscale/models/integration_models/synqly_models/ocsf_mapper.py +124 -25
- regscale/models/integration_models/synqly_models/synqly_model.py +89 -16
- regscale/models/locking.py +12 -8
- regscale/models/platform.py +4 -2
- regscale/models/regscale_models/__init__.py +7 -0
- regscale/models/regscale_models/assessment.py +2 -1
- regscale/models/regscale_models/catalog.py +1 -1
- regscale/models/regscale_models/compliance_settings.py +251 -1
- regscale/models/regscale_models/component.py +1 -0
- regscale/models/regscale_models/control_implementation.py +236 -41
- regscale/models/regscale_models/control_objective.py +74 -5
- regscale/models/regscale_models/file.py +2 -0
- regscale/models/regscale_models/form_field_value.py +5 -3
- regscale/models/regscale_models/inheritance.py +44 -0
- regscale/models/regscale_models/issue.py +301 -102
- regscale/models/regscale_models/milestone.py +33 -14
- regscale/models/regscale_models/organization.py +3 -0
- regscale/models/regscale_models/regscale_model.py +310 -73
- regscale/models/regscale_models/security_plan.py +4 -2
- regscale/models/regscale_models/vulnerability.py +3 -3
- regscale/regscale.py +25 -4
- regscale/templates/__init__.py +0 -0
- regscale/utils/threading/threadhandler.py +20 -15
- regscale/validation/record.py +23 -1
- {regscale_cli-6.21.2.0.dist-info → regscale_cli-6.28.2.1.dist-info}/METADATA +17 -33
- {regscale_cli-6.21.2.0.dist-info → regscale_cli-6.28.2.1.dist-info}/RECORD +310 -111
- tests/core/__init__.py +0 -0
- tests/core/utils/__init__.py +0 -0
- tests/core/utils/test_async_graphql_client.py +472 -0
- tests/fixtures/test_fixture.py +13 -8
- tests/regscale/core/test_login.py +171 -4
- tests/regscale/integrations/commercial/__init__.py +0 -0
- tests/regscale/integrations/commercial/aws/__init__.py +0 -0
- tests/regscale/integrations/commercial/aws/test_audit_manager_compliance.py +1304 -0
- tests/regscale/integrations/commercial/aws/test_audit_manager_evidence_aggregation.py +341 -0
- tests/regscale/integrations/commercial/aws/test_aws_analytics_collector.py +260 -0
- tests/regscale/integrations/commercial/aws/test_aws_applications_collector.py +242 -0
- tests/regscale/integrations/commercial/aws/test_aws_audit_manager_collector.py +1155 -0
- tests/regscale/integrations/commercial/aws/test_aws_cloudtrail_collector.py +534 -0
- tests/regscale/integrations/commercial/aws/test_aws_config_collector.py +400 -0
- tests/regscale/integrations/commercial/aws/test_aws_developer_tools_collector.py +203 -0
- tests/regscale/integrations/commercial/aws/test_aws_guardduty_collector.py +315 -0
- tests/regscale/integrations/commercial/aws/test_aws_iam_collector.py +458 -0
- tests/regscale/integrations/commercial/aws/test_aws_inspector_collector.py +353 -0
- tests/regscale/integrations/commercial/aws/test_aws_inventory_integration.py +530 -0
- tests/regscale/integrations/commercial/aws/test_aws_kms_collector.py +919 -0
- tests/regscale/integrations/commercial/aws/test_aws_machine_learning_collector.py +237 -0
- tests/regscale/integrations/commercial/aws/test_aws_s3_collector.py +722 -0
- tests/regscale/integrations/commercial/aws/test_aws_scanner_integration.py +722 -0
- tests/regscale/integrations/commercial/aws/test_aws_securityhub_collector.py +792 -0
- tests/regscale/integrations/commercial/aws/test_aws_systems_manager_collector.py +918 -0
- tests/regscale/integrations/commercial/aws/test_aws_vpc_collector.py +996 -0
- tests/regscale/integrations/commercial/aws/test_cli_evidence.py +431 -0
- tests/regscale/integrations/commercial/aws/test_cloudtrail_control_mappings.py +452 -0
- tests/regscale/integrations/commercial/aws/test_cloudtrail_evidence.py +788 -0
- tests/regscale/integrations/commercial/aws/test_config_compliance.py +298 -0
- tests/regscale/integrations/commercial/aws/test_conformance_pack_mappings.py +200 -0
- tests/regscale/integrations/commercial/aws/test_control_compliance_analyzer.py +375 -0
- tests/regscale/integrations/commercial/aws/test_datetime_parsing.py +223 -0
- tests/regscale/integrations/commercial/aws/test_evidence_generator.py +386 -0
- tests/regscale/integrations/commercial/aws/test_guardduty_control_mappings.py +564 -0
- tests/regscale/integrations/commercial/aws/test_guardduty_evidence.py +1041 -0
- tests/regscale/integrations/commercial/aws/test_iam_control_mappings.py +718 -0
- tests/regscale/integrations/commercial/aws/test_iam_evidence.py +1375 -0
- tests/regscale/integrations/commercial/aws/test_kms_control_mappings.py +656 -0
- tests/regscale/integrations/commercial/aws/test_kms_evidence.py +1163 -0
- tests/regscale/integrations/commercial/aws/test_ocsf_mapper.py +370 -0
- tests/regscale/integrations/commercial/aws/test_org_control_mappings.py +546 -0
- tests/regscale/integrations/commercial/aws/test_org_evidence.py +1240 -0
- tests/regscale/integrations/commercial/aws/test_s3_control_mappings.py +672 -0
- tests/regscale/integrations/commercial/aws/test_s3_evidence.py +987 -0
- tests/regscale/integrations/commercial/aws/test_scanner_evidence.py +373 -0
- tests/regscale/integrations/commercial/aws/test_security_hub_config_filtering.py +539 -0
- tests/regscale/integrations/commercial/aws/test_session_manager.py +516 -0
- tests/regscale/integrations/commercial/aws/test_ssm_control_mappings.py +588 -0
- tests/regscale/integrations/commercial/aws/test_ssm_evidence.py +735 -0
- tests/regscale/integrations/commercial/conftest.py +28 -0
- tests/regscale/integrations/commercial/microsoft_defender/__init__.py +1 -0
- tests/regscale/integrations/commercial/microsoft_defender/test_defender.py +1517 -0
- tests/regscale/integrations/commercial/microsoft_defender/test_defender_api.py +1748 -0
- tests/regscale/integrations/commercial/microsoft_defender/test_defender_constants.py +327 -0
- tests/regscale/integrations/commercial/microsoft_defender/test_defender_scanner.py +487 -0
- tests/regscale/integrations/commercial/test_aws.py +3742 -0
- tests/regscale/integrations/commercial/test_burp.py +48 -0
- tests/regscale/integrations/commercial/test_crowdstrike.py +49 -0
- tests/regscale/integrations/commercial/test_dependabot.py +341 -0
- tests/regscale/integrations/commercial/test_gcp.py +1543 -0
- tests/regscale/integrations/commercial/test_gitlab.py +549 -0
- tests/regscale/integrations/commercial/test_ip_mac_address_length.py +84 -0
- tests/regscale/integrations/commercial/test_jira.py +2204 -0
- tests/regscale/integrations/commercial/test_npm_audit.py +42 -0
- tests/regscale/integrations/commercial/test_okta.py +1228 -0
- tests/regscale/integrations/commercial/test_sarif_converter.py +251 -0
- tests/regscale/integrations/commercial/test_sicura.py +349 -0
- tests/regscale/integrations/commercial/test_snow.py +423 -0
- tests/regscale/integrations/commercial/test_sonarcloud.py +394 -0
- tests/regscale/integrations/commercial/test_sqlserver.py +186 -0
- tests/regscale/integrations/commercial/test_stig.py +33 -0
- tests/regscale/integrations/commercial/test_stig_mapper.py +153 -0
- tests/regscale/integrations/commercial/test_stigv2.py +406 -0
- tests/regscale/integrations/commercial/test_wiz.py +1365 -0
- tests/regscale/integrations/commercial/test_wiz_inventory.py +256 -0
- tests/regscale/integrations/commercial/wizv2/__init__.py +339 -0
- tests/regscale/integrations/commercial/wizv2/compliance/__init__.py +1 -0
- tests/regscale/integrations/commercial/wizv2/compliance/test_helpers.py +903 -0
- tests/regscale/integrations/commercial/wizv2/core/__init__.py +1 -0
- tests/regscale/integrations/commercial/wizv2/core/test_auth.py +701 -0
- tests/regscale/integrations/commercial/wizv2/core/test_client.py +1037 -0
- tests/regscale/integrations/commercial/wizv2/core/test_file_operations.py +989 -0
- tests/regscale/integrations/commercial/wizv2/fetchers/__init__.py +1 -0
- tests/regscale/integrations/commercial/wizv2/fetchers/test_policy_assessment.py +805 -0
- tests/regscale/integrations/commercial/wizv2/parsers/__init__.py +1 -0
- tests/regscale/integrations/commercial/wizv2/parsers/test_main.py +1153 -0
- tests/regscale/integrations/commercial/wizv2/processors/__init__.py +1 -0
- tests/regscale/integrations/commercial/wizv2/processors/test_finding.py +671 -0
- tests/regscale/integrations/commercial/wizv2/test_WizDataMixin.py +537 -0
- tests/regscale/integrations/commercial/wizv2/test_click_comprehensive.py +851 -0
- tests/regscale/integrations/commercial/wizv2/test_compliance_report_comprehensive.py +910 -0
- tests/regscale/integrations/commercial/wizv2/test_compliance_report_normalization.py +138 -0
- tests/regscale/integrations/commercial/wizv2/test_file_cleanup.py +283 -0
- tests/regscale/integrations/commercial/wizv2/test_file_operations.py +260 -0
- tests/regscale/integrations/commercial/wizv2/test_issue.py +343 -0
- tests/regscale/integrations/commercial/wizv2/test_issue_comprehensive.py +1203 -0
- tests/regscale/integrations/commercial/wizv2/test_reports.py +497 -0
- tests/regscale/integrations/commercial/wizv2/test_sbom.py +643 -0
- tests/regscale/integrations/commercial/wizv2/test_scanner_comprehensive.py +805 -0
- tests/regscale/integrations/commercial/wizv2/test_wiz_click_client_id.py +165 -0
- tests/regscale/integrations/commercial/wizv2/test_wiz_compliance_report.py +1394 -0
- tests/regscale/integrations/commercial/wizv2/test_wiz_compliance_unit.py +341 -0
- tests/regscale/integrations/commercial/wizv2/test_wiz_control_normalization.py +138 -0
- tests/regscale/integrations/commercial/wizv2/test_wiz_findings_comprehensive.py +364 -0
- tests/regscale/integrations/commercial/wizv2/test_wiz_inventory_comprehensive.py +644 -0
- tests/regscale/integrations/commercial/wizv2/test_wiz_status_mapping.py +149 -0
- tests/regscale/integrations/commercial/wizv2/test_wizv2.py +1218 -0
- tests/regscale/integrations/commercial/wizv2/test_wizv2_utils.py +519 -0
- tests/regscale/integrations/commercial/wizv2/utils/__init__.py +1 -0
- tests/regscale/integrations/commercial/wizv2/utils/test_main.py +1523 -0
- tests/regscale/integrations/public/__init__.py +0 -0
- tests/regscale/integrations/public/fedramp/__init__.py +1 -0
- tests/regscale/integrations/public/fedramp/test_gen_asset_list.py +150 -0
- tests/regscale/integrations/public/fedramp/test_poam_export_v5.py +1293 -0
- tests/regscale/integrations/public/test_alienvault.py +220 -0
- tests/regscale/integrations/public/test_cci.py +1053 -0
- tests/regscale/integrations/public/test_cisa.py +1021 -0
- tests/regscale/integrations/public/test_emass.py +518 -0
- tests/regscale/integrations/public/test_fedramp.py +1152 -0
- tests/regscale/integrations/public/test_fedramp_cis_crm.py +3661 -0
- tests/regscale/integrations/public/test_file_uploads.py +506 -0
- tests/regscale/integrations/public/test_oscal.py +453 -0
- tests/regscale/integrations/test_compliance_status_mapping.py +406 -0
- tests/regscale/integrations/test_control_matcher.py +1421 -0
- tests/regscale/integrations/test_control_matching.py +155 -0
- tests/regscale/integrations/test_milestone_manager.py +408 -0
- tests/regscale/models/test_control_implementation.py +118 -3
- tests/regscale/models/test_form_field_value_integration.py +304 -0
- tests/regscale/models/test_issue.py +378 -1
- tests/regscale/models/test_module_integration.py +582 -0
- tests/regscale/models/test_tenable_integrations.py +811 -105
- regscale/integrations/commercial/wizv2/policy_compliance.py +0 -3057
- regscale/integrations/public/fedramp/mappings/fedramp_r4_parts.json +0 -7388
- regscale/integrations/public/fedramp/mappings/fedramp_r5_parts.json +0 -9605
- regscale/integrations/public/fedramp/parts_mapper.py +0 -107
- /regscale/integrations/commercial/{amazon → sarif}/__init__.py +0 -0
- /regscale/integrations/commercial/wizv2/{wiz_auth.py → core/auth.py} +0 -0
- {regscale_cli-6.21.2.0.dist-info → regscale_cli-6.28.2.1.dist-info}/LICENSE +0 -0
- {regscale_cli-6.21.2.0.dist-info → regscale_cli-6.28.2.1.dist-info}/WHEEL +0 -0
- {regscale_cli-6.21.2.0.dist-info → regscale_cli-6.28.2.1.dist-info}/entry_points.txt +0 -0
- {regscale_cli-6.21.2.0.dist-info → regscale_cli-6.28.2.1.dist-info}/top_level.txt +0 -0
|
@@ -11,20 +11,23 @@ import os
|
|
|
11
11
|
import shutil
|
|
12
12
|
import zipfile
|
|
13
13
|
from datetime import datetime
|
|
14
|
+
from logging import getLogger
|
|
15
|
+
from pathlib import Path
|
|
14
16
|
from typing import Tuple
|
|
15
17
|
|
|
16
18
|
import click # type: ignore
|
|
17
19
|
import pdfplumber # type: ignore
|
|
18
20
|
from docx import Document # type: ignore
|
|
19
|
-
from pathlib import Path
|
|
20
21
|
from rich.progress import Progress, TaskID
|
|
21
22
|
|
|
22
23
|
from regscale.core.app.api import Api
|
|
23
24
|
from regscale.core.app.application import Application
|
|
24
|
-
from regscale.core.app.logz import create_logger
|
|
25
25
|
from regscale.core.app.utils.app_utils import check_file_path, create_progress_object, error_and_exit
|
|
26
26
|
from regscale.models.app_models.click import regscale_ssp_id
|
|
27
|
-
from regscale.models.regscale_models import Assessment, File, Project, SecurityPlan
|
|
27
|
+
from regscale.models.regscale_models import Assessment, File, Project, SecurityPlan, Evidence, Component
|
|
28
|
+
from regscale.models.regscale_models.control_implementation import ControlImplementation
|
|
29
|
+
|
|
30
|
+
logger = getLogger("regscale")
|
|
28
31
|
|
|
29
32
|
|
|
30
33
|
@click.group()
|
|
@@ -64,74 +67,60 @@ def run_evidence_collection():
|
|
|
64
67
|
app = Application()
|
|
65
68
|
api = Api()
|
|
66
69
|
config = app.config
|
|
70
|
+
|
|
71
|
+
logger.info("Starting evidence collection process")
|
|
72
|
+
|
|
67
73
|
check_file_path("./static")
|
|
68
74
|
progress = create_progress_object()
|
|
69
75
|
with progress:
|
|
70
|
-
|
|
76
|
+
task1 = progress.add_task("[white]Initializing evidence collection...", total=4)
|
|
71
77
|
# call function to define variable for use outside of function
|
|
72
78
|
evidence_folder, dir_name, new_cwd = set_directory_variables(
|
|
73
|
-
task=
|
|
79
|
+
task=task1, evidence_folder=config["evidenceFolder"], progress=progress
|
|
74
80
|
)
|
|
75
81
|
|
|
76
|
-
task1 = progress.add_task("[white]Building a required documents list from config.json...", total=3)
|
|
77
82
|
# call function to define variable for use outside of function
|
|
78
83
|
required_docs, document_list = parse_required_docs(
|
|
79
84
|
evidence_folder=evidence_folder, task=task1, progress=progress
|
|
80
85
|
)
|
|
81
86
|
|
|
82
|
-
task2 = progress.add_task("[white]Calculating files last modified times...", total=5)
|
|
83
87
|
# call function to define variable for use outside of function
|
|
84
|
-
times = get_doc_timestamps(evidence_folder=new_cwd, directory=dir_name, task=
|
|
88
|
+
times = get_doc_timestamps(evidence_folder=new_cwd, directory=dir_name, task=task1, progress=progress)
|
|
85
89
|
|
|
86
|
-
task3 = progress.add_task("[white]Building a required texts list from config.json...", total=3)
|
|
87
90
|
# call function to define variable for use outside of function
|
|
88
|
-
texts = set_required_texts(evidence_folder=evidence_folder, task=
|
|
89
|
-
|
|
90
|
-
task4 = progress.add_task("[white]Searching evidence folder for required files...", total=4)
|
|
91
|
+
texts = set_required_texts(evidence_folder=evidence_folder, task=task1, progress=progress)
|
|
91
92
|
|
|
92
93
|
# call function to define variable for use outside of function
|
|
93
|
-
folders = find_required_files_in_folder(evidence_folder=new_cwd, task=
|
|
94
|
+
folders = find_required_files_in_folder(evidence_folder=new_cwd, task=task1, progress=progress)
|
|
94
95
|
|
|
95
|
-
|
|
96
|
+
task2 = progress.add_task("[white]Analyzing documents and content...", total=6)
|
|
96
97
|
|
|
97
98
|
# call function to define variable for use outside of function
|
|
98
99
|
sig_results = signature_assessment_results(
|
|
99
|
-
directory=folders, r_docs=required_docs, task=
|
|
100
|
+
directory=folders, r_docs=required_docs, task=task2, progress=progress
|
|
100
101
|
)
|
|
101
102
|
|
|
102
|
-
task6 = progress.add_task("[white]Testing if required documents are present...", total=2)
|
|
103
|
-
|
|
104
103
|
# call function to define variable for use outside of function
|
|
105
104
|
doc_results = document_assessment_results(
|
|
106
|
-
directory=folders, documents=document_list, task=
|
|
105
|
+
directory=folders, documents=document_list, task=task2, progress=progress
|
|
107
106
|
)
|
|
108
107
|
|
|
109
|
-
task7 = progress.add_task("[white]Extracting texts from required files...", total=4)
|
|
110
|
-
|
|
111
108
|
# call function to define variable for use outside of function
|
|
112
|
-
file_texts = parse_required_text_from_files(evidence_folder=new_cwd, task=
|
|
113
|
-
|
|
114
|
-
task8 = progress.add_task("[white]Searching for required text in parsed documents...", total=2)
|
|
109
|
+
file_texts = parse_required_text_from_files(evidence_folder=new_cwd, task=task2, progress=progress)
|
|
115
110
|
|
|
116
111
|
# call function to define variable for use outside of function
|
|
117
|
-
search_results = text_string_search(f_texts=file_texts, req_texts=texts, task=
|
|
118
|
-
|
|
119
|
-
task9 = progress.add_task("[white]Testing if required texts are present", total=2)
|
|
112
|
+
search_results = text_string_search(f_texts=file_texts, req_texts=texts, task=task2, progress=progress)
|
|
120
113
|
|
|
121
114
|
# call function to define variable for use outside of function
|
|
122
|
-
text_results = text_assessment_results(searches=search_results, r_texts=texts, task=
|
|
115
|
+
text_results = text_assessment_results(searches=search_results, r_texts=texts, task=task2, progress=progress)
|
|
123
116
|
|
|
124
|
-
|
|
117
|
+
task3 = progress.add_task("[white]Processing assessment data...", total=4)
|
|
125
118
|
|
|
126
119
|
# call function to define variable for use outside of function
|
|
127
|
-
data = gather_test_project_data(api=api, evidence_folder=evidence_folder, task=
|
|
128
|
-
|
|
129
|
-
task11 = progress.add_task("[white]Testing file modification times...", total=2)
|
|
120
|
+
data = gather_test_project_data(api=api, evidence_folder=evidence_folder, task=task3, progress=progress)
|
|
130
121
|
|
|
131
122
|
# call function to define variable to use outside of function
|
|
132
|
-
time_results = assess_doc_timestamps(timestamps=times, documents=required_docs, task=
|
|
133
|
-
|
|
134
|
-
task12 = progress.add_task("[white]Building assessment report...", total=4)
|
|
123
|
+
time_results = assess_doc_timestamps(timestamps=times, documents=required_docs, task=task3, progress=progress)
|
|
135
124
|
|
|
136
125
|
# call function to define variable to use outside of function
|
|
137
126
|
report = assessments_report(
|
|
@@ -139,32 +128,66 @@ def run_evidence_collection():
|
|
|
139
128
|
textres=text_results,
|
|
140
129
|
timeres=time_results,
|
|
141
130
|
sigres=sig_results,
|
|
142
|
-
task=
|
|
131
|
+
task=task3,
|
|
143
132
|
progress=progress,
|
|
144
133
|
)
|
|
145
134
|
|
|
146
|
-
task13 = progress.add_task("[white]Building assessment results dataframe...", total=4)
|
|
147
|
-
|
|
148
135
|
# call function to define variable to use outside of function
|
|
149
|
-
results = build_assessment_dataframe(assessments=report, task=
|
|
150
|
-
|
|
151
|
-
task14 = progress.add_task("[white]Calculating assessment score...", total=1)
|
|
136
|
+
results = build_assessment_dataframe(assessments=report, task=task3, progress=progress)
|
|
152
137
|
|
|
153
138
|
# call function to define variable for use outside of function
|
|
154
|
-
score_data = build_score_data(assessments=results, task=
|
|
155
|
-
|
|
156
|
-
task15 = progress.add_task("[white]Building a table for the assessment report...", total=4)
|
|
139
|
+
score_data = build_score_data(assessments=results, task=task3, progress=progress)
|
|
157
140
|
|
|
158
141
|
# call function to define variable for use outside of function
|
|
159
|
-
html_output = build_html_table(assessments=report, task=
|
|
160
|
-
|
|
161
|
-
task16 = progress.add_task("[white]Creating child assessment based on test results...", total=2)
|
|
142
|
+
html_output = build_html_table(assessments=report, task=task3, progress=progress)
|
|
162
143
|
|
|
163
144
|
# call function to create child assessment via POST request
|
|
164
145
|
create_child_assessments(
|
|
165
|
-
api=api, project_data=data, output=html_output, score_data=score_data, task=
|
|
146
|
+
api=api, project_data=data, output=html_output, score_data=score_data, task=task3, progress=progress
|
|
166
147
|
)
|
|
167
148
|
|
|
149
|
+
# Display collected files summary
|
|
150
|
+
display_collected_files(folders, evidence_folder)
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
def display_collected_files(folders: list[dict], evidence_folder: str) -> None:
|
|
154
|
+
"""
|
|
155
|
+
Display a summary of collected files to the user
|
|
156
|
+
|
|
157
|
+
:param list[dict] folders: List of files found in evidence folder
|
|
158
|
+
:param str evidence_folder: Path to evidence folder
|
|
159
|
+
:rtype: None
|
|
160
|
+
"""
|
|
161
|
+
if not folders:
|
|
162
|
+
logger.info("No files were collected from the evidence folder.")
|
|
163
|
+
return
|
|
164
|
+
|
|
165
|
+
logger.info("=" * 60)
|
|
166
|
+
logger.info("EVIDENCE COLLECTION SUMMARY")
|
|
167
|
+
logger.info("=" * 60)
|
|
168
|
+
logger.info(f"Evidence folder: {evidence_folder}")
|
|
169
|
+
logger.info(f"Total files collected: {len(folders)}")
|
|
170
|
+
logger.info("")
|
|
171
|
+
|
|
172
|
+
# Group files by program/folder
|
|
173
|
+
programs = {}
|
|
174
|
+
for file_info in folders:
|
|
175
|
+
program = file_info.get("program", "unknown")
|
|
176
|
+
filename = file_info.get("file", "unknown")
|
|
177
|
+
if program not in programs:
|
|
178
|
+
programs[program] = []
|
|
179
|
+
programs[program].append(filename)
|
|
180
|
+
|
|
181
|
+
# Display files by program
|
|
182
|
+
for program, files in programs.items():
|
|
183
|
+
logger.info(f"Program: {program}")
|
|
184
|
+
logger.info("-" * 40)
|
|
185
|
+
for file in sorted(files):
|
|
186
|
+
logger.info(f" • {file}")
|
|
187
|
+
logger.info("")
|
|
188
|
+
|
|
189
|
+
logger.info("=" * 60)
|
|
190
|
+
|
|
168
191
|
|
|
169
192
|
def package_builder(ssp_id: int, path: Path):
|
|
170
193
|
"""Function to build a directory of evidence and produce a zip file for extraction and use
|
|
@@ -176,7 +199,7 @@ def package_builder(ssp_id: int, path: Path):
|
|
|
176
199
|
app = Application()
|
|
177
200
|
api = Api()
|
|
178
201
|
with create_progress_object() as progress:
|
|
179
|
-
task = progress.add_task("[white]Building and zipping evidence folder for audit...", total=
|
|
202
|
+
task = progress.add_task("[white]Building and zipping evidence folder for audit...", total=8)
|
|
180
203
|
try:
|
|
181
204
|
# Obtaining MEGA Api for given Organizer Record.
|
|
182
205
|
ssp = SecurityPlan.fetch_mega_api_data(ssp_id)
|
|
@@ -199,6 +222,16 @@ def package_builder(ssp_id: int, path: Path):
|
|
|
199
222
|
|
|
200
223
|
progress.update(task, advance=1)
|
|
201
224
|
|
|
225
|
+
# Process evidence lockers at SSP level
|
|
226
|
+
process_ssp_evidence_lockers(
|
|
227
|
+
ssp_id=ssp_id,
|
|
228
|
+
path=path,
|
|
229
|
+
module_folder=module_folder,
|
|
230
|
+
api=api,
|
|
231
|
+
)
|
|
232
|
+
|
|
233
|
+
progress.update(task, advance=1)
|
|
234
|
+
|
|
202
235
|
# Checking MEGA Api for Attachments at Control level
|
|
203
236
|
process_control_attachments(
|
|
204
237
|
ssp=ssp,
|
|
@@ -209,6 +242,19 @@ def package_builder(ssp_id: int, path: Path):
|
|
|
209
242
|
api=api,
|
|
210
243
|
task=task,
|
|
211
244
|
)
|
|
245
|
+
|
|
246
|
+
progress.update(task, advance=1)
|
|
247
|
+
|
|
248
|
+
# Process components and their evidence
|
|
249
|
+
process_components_evidence(
|
|
250
|
+
ssp_id=ssp_id,
|
|
251
|
+
path=path,
|
|
252
|
+
module_folder=module_folder,
|
|
253
|
+
api=api,
|
|
254
|
+
)
|
|
255
|
+
|
|
256
|
+
progress.update(task, advance=1)
|
|
257
|
+
|
|
212
258
|
# Creating zip file and removing temporary Evidence Folder
|
|
213
259
|
new_path = Path("./evidence.zip")
|
|
214
260
|
zip_folder(path, new_path)
|
|
@@ -327,6 +373,9 @@ def process_control_attachments(
|
|
|
327
373
|
# Adding any Attachments at Control level to corresponding folder
|
|
328
374
|
_download_control_attachments(control_attachments, api, path, module_folder_name)
|
|
329
375
|
|
|
376
|
+
# Process evidence lockers for controls
|
|
377
|
+
_process_control_evidence_lockers(control_attachments, api, path, module_folder_name)
|
|
378
|
+
|
|
330
379
|
progress.update(task, advance=1)
|
|
331
380
|
|
|
332
381
|
else:
|
|
@@ -366,6 +415,396 @@ def _download_control_attachments(
|
|
|
366
415
|
json.dump(f, file_drop, indent=4, separators=(", ", ": "))
|
|
367
416
|
|
|
368
417
|
|
|
418
|
+
def _get_control_folder_name(control_attachments: list[dict], control_id: int) -> str | None:
|
|
419
|
+
"""
|
|
420
|
+
Get the control folder name for a given control ID
|
|
421
|
+
|
|
422
|
+
:param list[dict] control_attachments: List of control attachments
|
|
423
|
+
:param int control_id: Control ID to find folder name for
|
|
424
|
+
:return: Control folder name or None
|
|
425
|
+
:rtype: str | None
|
|
426
|
+
"""
|
|
427
|
+
for f in control_attachments:
|
|
428
|
+
if f["parentId"] == control_id:
|
|
429
|
+
return f["controlId"]
|
|
430
|
+
return None
|
|
431
|
+
|
|
432
|
+
|
|
433
|
+
def _download_control_evidence_items(
|
|
434
|
+
evidence_items: list[dict], control_folder_name: str, path: Path, module_folder_name: str, api: Api
|
|
435
|
+
) -> None:
|
|
436
|
+
"""
|
|
437
|
+
Download evidence items for a control
|
|
438
|
+
|
|
439
|
+
:param list[dict] evidence_items: List of evidence items
|
|
440
|
+
:param str control_folder_name: Name of the control folder
|
|
441
|
+
:param Path path: Base path for downloads
|
|
442
|
+
:param str module_folder_name: Module folder name
|
|
443
|
+
:param Api api: API object
|
|
444
|
+
:rtype: None
|
|
445
|
+
"""
|
|
446
|
+
logger.info(f"Found {len(evidence_items)} evidence items for control {control_folder_name}")
|
|
447
|
+
|
|
448
|
+
for evidence_item in evidence_items:
|
|
449
|
+
file_name = evidence_item.get("trustedDisplayName", f"evidence_{evidence_item.get('id', 'unknown')}")
|
|
450
|
+
output_path = f"{path}/{module_folder_name}/{control_folder_name}/{file_name}"
|
|
451
|
+
|
|
452
|
+
if download_evidence_file(api, evidence_item, output_path):
|
|
453
|
+
logger.info(f"Downloaded evidence file: {file_name}")
|
|
454
|
+
else:
|
|
455
|
+
logger.warning(f"Failed to download evidence file: {file_name}")
|
|
456
|
+
|
|
457
|
+
|
|
458
|
+
def _process_control_evidence_lockers(
|
|
459
|
+
control_attachments: list[dict], api: Api, path: Path, module_folder_name: str
|
|
460
|
+
) -> None:
|
|
461
|
+
"""
|
|
462
|
+
Process evidence lockers for controls
|
|
463
|
+
|
|
464
|
+
:param list[dict] control_attachments: List of control attachments
|
|
465
|
+
:param Api api: RegScale CLI API object
|
|
466
|
+
:param Path path: directory for file location
|
|
467
|
+
:param str module_folder_name: name of the module folder
|
|
468
|
+
:rtype: None
|
|
469
|
+
"""
|
|
470
|
+
# Get unique control IDs
|
|
471
|
+
control_ids = list({f["parentId"] for f in control_attachments})
|
|
472
|
+
|
|
473
|
+
for control_id in control_ids:
|
|
474
|
+
try:
|
|
475
|
+
# Get evidence from evidence lockers for this control
|
|
476
|
+
evidence_items = get_evidence_by_control(api, control_id)
|
|
477
|
+
|
|
478
|
+
if evidence_items:
|
|
479
|
+
# Find the control ID for folder naming
|
|
480
|
+
control_folder_name = _get_control_folder_name(control_attachments, control_id)
|
|
481
|
+
|
|
482
|
+
if control_folder_name:
|
|
483
|
+
_download_control_evidence_items(evidence_items, control_folder_name, path, module_folder_name, api)
|
|
484
|
+
except Exception as e:
|
|
485
|
+
logger.warning(f"Failed to process evidence lockers for control {control_id}: {e}")
|
|
486
|
+
|
|
487
|
+
|
|
488
|
+
def get_evidence_by_control(api: Api, control_id: int) -> list[dict]:
|
|
489
|
+
"""
|
|
490
|
+
Get evidence for a specific control
|
|
491
|
+
|
|
492
|
+
:param Api api: RegScale CLI API object (kept for backward compatibility)
|
|
493
|
+
:param int control_id: Control ID
|
|
494
|
+
:return: List of evidence items
|
|
495
|
+
:rtype: list[dict]
|
|
496
|
+
"""
|
|
497
|
+
# Suppress unused parameter warning for backward compatibility
|
|
498
|
+
_ = api
|
|
499
|
+
|
|
500
|
+
try:
|
|
501
|
+
# Use Evidence model method instead of direct API call
|
|
502
|
+
evidence_items = Evidence.get_all_by_parent(parent_id=control_id, parent_module="controls")
|
|
503
|
+
# Convert to dict format for compatibility
|
|
504
|
+
return [evidence.dict() for evidence in evidence_items]
|
|
505
|
+
except Exception as e:
|
|
506
|
+
logger.warning(f"Failed to get evidence for control {control_id}: {e}")
|
|
507
|
+
return []
|
|
508
|
+
|
|
509
|
+
|
|
510
|
+
def get_evidence_by_security_plan(api: Api, ssp_id: int) -> list[dict]:
|
|
511
|
+
"""
|
|
512
|
+
Get evidence for a specific security plan
|
|
513
|
+
|
|
514
|
+
:param Api api: RegScale CLI API object (kept for backward compatibility)
|
|
515
|
+
:param int ssp_id: Security Plan ID
|
|
516
|
+
:return: List of evidence items
|
|
517
|
+
:rtype: list[dict]
|
|
518
|
+
"""
|
|
519
|
+
# Suppress unused parameter warning for backward compatibility
|
|
520
|
+
_ = api
|
|
521
|
+
|
|
522
|
+
try:
|
|
523
|
+
# Use Evidence model method instead of direct API call
|
|
524
|
+
evidence_items = Evidence.get_all_by_parent(parent_id=ssp_id, parent_module="securityplans")
|
|
525
|
+
# Convert to dict format for compatibility
|
|
526
|
+
return [evidence.dict() for evidence in evidence_items]
|
|
527
|
+
except Exception as e:
|
|
528
|
+
logger.warning(f"Failed to get evidence for security plan {ssp_id}: {e}")
|
|
529
|
+
return []
|
|
530
|
+
|
|
531
|
+
|
|
532
|
+
def get_components_by_ssp(api: Api, ssp_id: int) -> list[dict]:
|
|
533
|
+
"""
|
|
534
|
+
Get components for a specific security plan
|
|
535
|
+
|
|
536
|
+
:param Api api: RegScale CLI API object (kept for backward compatibility)
|
|
537
|
+
:param int ssp_id: Security Plan ID
|
|
538
|
+
:return: List of active components
|
|
539
|
+
:rtype: list[dict]
|
|
540
|
+
"""
|
|
541
|
+
# Suppress unused parameter warning for backward compatibility
|
|
542
|
+
_ = api
|
|
543
|
+
|
|
544
|
+
try:
|
|
545
|
+
# Use Component model method instead of direct API call
|
|
546
|
+
components = Component.get_all_by_parent(parent_id=ssp_id, parent_module="securityplans")
|
|
547
|
+
# Filter for active components only and convert to dict format
|
|
548
|
+
return [comp.dict() for comp in components if comp.status == "Active"]
|
|
549
|
+
except Exception as e:
|
|
550
|
+
logger.warning(f"Failed to get components for security plan {ssp_id}: {e}")
|
|
551
|
+
return []
|
|
552
|
+
|
|
553
|
+
|
|
554
|
+
def get_controls_by_parent(api: Api, parent_id: int, parent_module: str) -> list[dict]:
|
|
555
|
+
"""
|
|
556
|
+
Get controls for a specific parent (SSP or Component)
|
|
557
|
+
|
|
558
|
+
:param Api api: RegScale CLI API object (kept for backward compatibility)
|
|
559
|
+
:param int parent_id: Parent ID
|
|
560
|
+
:param str parent_module: Parent module (securityplans or components)
|
|
561
|
+
:return: List of controls
|
|
562
|
+
:rtype: list[dict]
|
|
563
|
+
"""
|
|
564
|
+
# Suppress unused parameter warning for backward compatibility
|
|
565
|
+
_ = api
|
|
566
|
+
|
|
567
|
+
try:
|
|
568
|
+
# Use ControlImplementation model method instead of direct API call
|
|
569
|
+
controls = ControlImplementation.get_all_by_parent(parent_id=parent_id, parent_module=parent_module)
|
|
570
|
+
# Convert to dict format for compatibility
|
|
571
|
+
return [control.dict() for control in controls]
|
|
572
|
+
except Exception as e:
|
|
573
|
+
logger.warning(f"Failed to get controls for parent {parent_id} in module {parent_module}: {e}")
|
|
574
|
+
return []
|
|
575
|
+
|
|
576
|
+
|
|
577
|
+
def download_evidence_file(api: Api, evidence_item: dict, output_path: str) -> bool:
|
|
578
|
+
"""
|
|
579
|
+
Download an evidence file
|
|
580
|
+
|
|
581
|
+
:param Api api: RegScale CLI API object
|
|
582
|
+
:param dict evidence_item: Evidence item data
|
|
583
|
+
:param str output_path: Path to save the file
|
|
584
|
+
:return: True if successful, False otherwise
|
|
585
|
+
:rtype: bool
|
|
586
|
+
"""
|
|
587
|
+
try:
|
|
588
|
+
file_data = File.download_file_from_regscale_to_memory(
|
|
589
|
+
api=api,
|
|
590
|
+
record_id=evidence_item["parentId"],
|
|
591
|
+
module=evidence_item["parentModule"],
|
|
592
|
+
stored_name=evidence_item["trustedStorageName"],
|
|
593
|
+
file_hash=evidence_item.get("fileHash") or evidence_item.get("shaHash"),
|
|
594
|
+
)
|
|
595
|
+
|
|
596
|
+
if file_data is None:
|
|
597
|
+
logger.warning(f"No data received for evidence file {evidence_item.get('trustedDisplayName', 'unknown')}")
|
|
598
|
+
return False
|
|
599
|
+
|
|
600
|
+
with open(output_path, "wb") as f:
|
|
601
|
+
f.write(file_data)
|
|
602
|
+
return True
|
|
603
|
+
except Exception as e:
|
|
604
|
+
logger.warning(f"Failed to download evidence file {evidence_item.get('trustedDisplayName', 'unknown')}: {e}")
|
|
605
|
+
return False
|
|
606
|
+
|
|
607
|
+
|
|
608
|
+
def process_ssp_evidence_lockers(ssp_id: int, path: Path, module_folder: Path, api: Api) -> None:
|
|
609
|
+
"""
|
|
610
|
+
Process evidence lockers at SSP level
|
|
611
|
+
|
|
612
|
+
:param int ssp_id: Security Plan ID
|
|
613
|
+
:param Path path: directory for file location
|
|
614
|
+
:param str module_folder_name: name of the module folder
|
|
615
|
+
:param Path module_folder: path to module folder
|
|
616
|
+
:param Api api: RegScale CLI API object
|
|
617
|
+
:rtype: None
|
|
618
|
+
"""
|
|
619
|
+
try:
|
|
620
|
+
# Get evidence from evidence lockers for the SSP
|
|
621
|
+
evidence_items = get_evidence_by_security_plan(api, ssp_id)
|
|
622
|
+
|
|
623
|
+
if evidence_items:
|
|
624
|
+
logger.info(f"Found {len(evidence_items)} evidence items from evidence lockers for SSP {ssp_id}")
|
|
625
|
+
|
|
626
|
+
for evidence_item in evidence_items:
|
|
627
|
+
file_name = evidence_item.get("trustedDisplayName", f"evidence_{evidence_item.get('id', 'unknown')}")
|
|
628
|
+
output_path = module_folder / file_name
|
|
629
|
+
|
|
630
|
+
if download_evidence_file(api, evidence_item, str(output_path)):
|
|
631
|
+
logger.info(f"Downloaded evidence file: {file_name}")
|
|
632
|
+
else:
|
|
633
|
+
logger.warning(f"Failed to download evidence file: {file_name}")
|
|
634
|
+
else:
|
|
635
|
+
logger.info("No evidence found in evidence lockers for SSP")
|
|
636
|
+
|
|
637
|
+
except Exception as e:
|
|
638
|
+
logger.warning(f"Error processing SSP evidence lockers: {e}")
|
|
639
|
+
|
|
640
|
+
|
|
641
|
+
def _download_files_for_parent(
|
|
642
|
+
parent_id: int, parent_module: str, output_folder: Path, api: Api, module_name: str = None
|
|
643
|
+
) -> None:
|
|
644
|
+
"""
|
|
645
|
+
Generalized function to download files for any parent module
|
|
646
|
+
|
|
647
|
+
:param int parent_id: Parent ID (component, control, etc.)
|
|
648
|
+
:param str parent_module: Parent module name (components, controls, etc.)
|
|
649
|
+
:param Path output_folder: Path to output folder
|
|
650
|
+
:param Api api: API object
|
|
651
|
+
:param str module_name: Human-readable module name for logging (optional)
|
|
652
|
+
:rtype: None
|
|
653
|
+
"""
|
|
654
|
+
if module_name is None:
|
|
655
|
+
module_name = parent_module
|
|
656
|
+
|
|
657
|
+
try:
|
|
658
|
+
# Use File model method instead of direct API call
|
|
659
|
+
files_data = File.get_files_for_parent_from_regscale(api=api, parent_id=parent_id, parent_module=parent_module)
|
|
660
|
+
|
|
661
|
+
for file_item in files_data:
|
|
662
|
+
file_name = file_item.trustedDisplayName or f"file_{file_item.id}"
|
|
663
|
+
output_path = output_folder / file_name
|
|
664
|
+
|
|
665
|
+
try:
|
|
666
|
+
file_data = File.download_file_from_regscale_to_memory(
|
|
667
|
+
api=api,
|
|
668
|
+
record_id=file_item.id,
|
|
669
|
+
module=parent_module,
|
|
670
|
+
stored_name=file_item.trustedStorageName,
|
|
671
|
+
file_hash=file_item.fileHash or file_item.shaHash,
|
|
672
|
+
)
|
|
673
|
+
|
|
674
|
+
if file_data is None:
|
|
675
|
+
logger.warning(f"No data received for {module_name} file {file_name}")
|
|
676
|
+
continue
|
|
677
|
+
|
|
678
|
+
with open(output_path, "wb") as f:
|
|
679
|
+
f.write(file_data)
|
|
680
|
+
logger.info(f"Downloaded {module_name} file: {file_name}")
|
|
681
|
+
except Exception as e:
|
|
682
|
+
logger.warning(f"Failed to download {module_name} file {file_name}: {e}")
|
|
683
|
+
except Exception as e:
|
|
684
|
+
logger.warning(f"Failed to get {module_name} files for {parent_module} {parent_id}: {e}")
|
|
685
|
+
|
|
686
|
+
|
|
687
|
+
def _download_component_files(component_id: int, component_folder: Path, api: Api) -> None:
|
|
688
|
+
"""
|
|
689
|
+
Download files directly attached to a component
|
|
690
|
+
|
|
691
|
+
:param int component_id: Component ID
|
|
692
|
+
:param Path component_folder: Path to component folder
|
|
693
|
+
:param Api api: API object
|
|
694
|
+
:rtype: None
|
|
695
|
+
"""
|
|
696
|
+
_download_files_for_parent(
|
|
697
|
+
parent_id=component_id,
|
|
698
|
+
parent_module="components",
|
|
699
|
+
output_folder=component_folder,
|
|
700
|
+
api=api,
|
|
701
|
+
module_name="component",
|
|
702
|
+
)
|
|
703
|
+
|
|
704
|
+
|
|
705
|
+
def _download_control_files(control_id: int, control_folder: Path, api: Api) -> None:
|
|
706
|
+
"""
|
|
707
|
+
Download files for a control
|
|
708
|
+
|
|
709
|
+
:param int control_id: Control ID
|
|
710
|
+
:param Path control_folder: Path to control folder
|
|
711
|
+
:param Api api: API object
|
|
712
|
+
:rtype: None
|
|
713
|
+
"""
|
|
714
|
+
_download_files_for_parent(
|
|
715
|
+
parent_id=control_id, parent_module="controls", output_folder=control_folder, api=api, module_name="control"
|
|
716
|
+
)
|
|
717
|
+
|
|
718
|
+
|
|
719
|
+
def _download_control_evidence(control_id: int, control_folder: Path, api: Api) -> None:
|
|
720
|
+
"""
|
|
721
|
+
Download evidence from evidence lockers for a control
|
|
722
|
+
|
|
723
|
+
:param int control_id: Control ID
|
|
724
|
+
:param Path control_folder: Path to control folder
|
|
725
|
+
:param Api api: API object
|
|
726
|
+
:rtype: None
|
|
727
|
+
"""
|
|
728
|
+
evidence_items = get_evidence_by_control(api, control_id)
|
|
729
|
+
|
|
730
|
+
if evidence_items:
|
|
731
|
+
logger.info(f"Found {len(evidence_items)} evidence items for control {control_folder.name}")
|
|
732
|
+
|
|
733
|
+
for evidence_item in evidence_items:
|
|
734
|
+
file_name = evidence_item.get("trustedDisplayName", f"evidence_{evidence_item.get('id', 'unknown')}")
|
|
735
|
+
output_path = control_folder / file_name
|
|
736
|
+
|
|
737
|
+
if download_evidence_file(api, evidence_item, str(output_path)):
|
|
738
|
+
logger.info(f"Downloaded evidence file: {file_name}")
|
|
739
|
+
else:
|
|
740
|
+
logger.warning(f"Failed to download evidence file: {file_name}")
|
|
741
|
+
|
|
742
|
+
|
|
743
|
+
def _process_component_controls(component_id: int, component_folder: Path, api: Api) -> None:
|
|
744
|
+
"""
|
|
745
|
+
Process controls for a component
|
|
746
|
+
|
|
747
|
+
:param int component_id: Component ID
|
|
748
|
+
:param Path component_folder: Path to component folder
|
|
749
|
+
:param Api api: API object
|
|
750
|
+
:rtype: None
|
|
751
|
+
"""
|
|
752
|
+
controls = get_controls_by_parent(api, component_id, "components")
|
|
753
|
+
|
|
754
|
+
if controls:
|
|
755
|
+
logger.info(f"Found {len(controls)} controls for component {component_folder.name}")
|
|
756
|
+
|
|
757
|
+
for control in controls:
|
|
758
|
+
control_id = control.get("id")
|
|
759
|
+
control_name = control.get("controlId", f"Control_{control_id}")
|
|
760
|
+
|
|
761
|
+
# Create control folder within component folder
|
|
762
|
+
control_folder = component_folder / control_name
|
|
763
|
+
os.makedirs(control_folder, exist_ok=True)
|
|
764
|
+
|
|
765
|
+
# Download control files and evidence
|
|
766
|
+
_download_control_files(control_id, control_folder, api)
|
|
767
|
+
_download_control_evidence(control_id, control_folder, api)
|
|
768
|
+
|
|
769
|
+
|
|
770
|
+
def process_components_evidence(ssp_id: int, path: Path, module_folder: Path, api: Api) -> None:
|
|
771
|
+
"""
|
|
772
|
+
Process components and their evidence
|
|
773
|
+
|
|
774
|
+
:param int ssp_id: Security Plan ID
|
|
775
|
+
:param Path path: directory for file location
|
|
776
|
+
:param Path module_folder: path to module folder
|
|
777
|
+
:param Api api: RegScale CLI API object
|
|
778
|
+
:rtype: None
|
|
779
|
+
"""
|
|
780
|
+
try:
|
|
781
|
+
# Get components for the SSP
|
|
782
|
+
components = get_components_by_ssp(api, ssp_id)
|
|
783
|
+
|
|
784
|
+
if not components:
|
|
785
|
+
logger.info("No active components found for SSP")
|
|
786
|
+
return
|
|
787
|
+
|
|
788
|
+
logger.info(f"Found {len(components)} active components for SSP {ssp_id}")
|
|
789
|
+
|
|
790
|
+
for component in components:
|
|
791
|
+
component_id = component.get("id")
|
|
792
|
+
component_title = component.get("title", f"Component_{component_id}")
|
|
793
|
+
|
|
794
|
+
# Create component folder
|
|
795
|
+
component_folder = module_folder / component_title
|
|
796
|
+
os.makedirs(component_folder, exist_ok=True)
|
|
797
|
+
|
|
798
|
+
# Download component files
|
|
799
|
+
_download_component_files(component_id, component_folder, api)
|
|
800
|
+
|
|
801
|
+
# Process component controls
|
|
802
|
+
_process_component_controls(component_id, component_folder, api)
|
|
803
|
+
|
|
804
|
+
except Exception as e:
|
|
805
|
+
logger.warning(f"Error processing components evidence: {e}")
|
|
806
|
+
|
|
807
|
+
|
|
369
808
|
def remove_directory(directory_path: Path) -> None:
|
|
370
809
|
"""
|
|
371
810
|
This function removes a given directory even if files stored there
|
|
@@ -374,7 +813,7 @@ def remove_directory(directory_path: Path) -> None:
|
|
|
374
813
|
:rtype: None
|
|
375
814
|
"""
|
|
376
815
|
shutil.rmtree(directory_path.absolute())
|
|
377
|
-
|
|
816
|
+
logger.info("Temporary Evidence directory removed successfully!")
|
|
378
817
|
|
|
379
818
|
|
|
380
819
|
def zip_folder(folder_path: Path, zip_path: Path) -> None:
|
|
@@ -397,7 +836,7 @@ def zip_folder(folder_path: Path, zip_path: Path) -> None:
|
|
|
397
836
|
# Add the file to the ZIP archive using its relative path
|
|
398
837
|
zipf.write(file_path, relative_path) # type: ignore
|
|
399
838
|
|
|
400
|
-
|
|
839
|
+
logger.info("Folder zipped successfully!")
|
|
401
840
|
|
|
402
841
|
|
|
403
842
|
def remove(list_to_review: list) -> list:
|
|
@@ -462,7 +901,6 @@ def find_signatures(file: str) -> int:
|
|
|
462
901
|
import pymupdf
|
|
463
902
|
|
|
464
903
|
number = 0
|
|
465
|
-
logger = create_logger()
|
|
466
904
|
# if the file is a pdf document
|
|
467
905
|
if file.endswith(".pdf"):
|
|
468
906
|
try:
|
|
@@ -507,20 +945,30 @@ def set_directory_variables(task: TaskID, evidence_folder: str, progress: Progre
|
|
|
507
945
|
# set evidence folder variable to init.yaml value
|
|
508
946
|
# if evidence folder does not exist then create it so tests will pass
|
|
509
947
|
check_file_path(evidence_folder)
|
|
948
|
+
|
|
510
949
|
# if evidence folder does not exist or if it is empty then error out
|
|
511
|
-
|
|
950
|
+
evidence_items = os.listdir(evidence_folder)
|
|
951
|
+
|
|
952
|
+
if evidence_folder is None or len(evidence_items) == 0:
|
|
512
953
|
error_and_exit("The directory set to evidenceFolder cannot be found or is empty.")
|
|
513
954
|
else:
|
|
514
955
|
# otherwise change directory to the evidence folder
|
|
515
956
|
os.chdir(evidence_folder)
|
|
516
957
|
progress.update(task, advance=1)
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
958
|
+
|
|
959
|
+
# include RegScale projects folder or use current directory if no subdirs
|
|
960
|
+
subdirs = [filename for filename in os.listdir(os.getcwd()) if os.path.isdir(os.path.join(os.getcwd(), filename))]
|
|
961
|
+
|
|
962
|
+
if subdirs:
|
|
963
|
+
# Prefer 'project' directory if it exists, otherwise use the first one
|
|
964
|
+
if "project" in subdirs:
|
|
965
|
+
dir_name = "project"
|
|
966
|
+
else:
|
|
967
|
+
dir_name = subdirs[0]
|
|
968
|
+
new_cwd = os.getcwd() + os.sep + dir_name
|
|
969
|
+
else:
|
|
970
|
+
dir_name = "evidence"
|
|
971
|
+
new_cwd = os.getcwd()
|
|
524
972
|
progress.update(task, advance=1)
|
|
525
973
|
# return variables for use outside local scope
|
|
526
974
|
return evidence_folder, dir_name, new_cwd
|
|
@@ -543,23 +991,41 @@ def parse_required_docs(evidence_folder: str, task: TaskID, progress: Progress)
|
|
|
543
991
|
document_list = set()
|
|
544
992
|
progress.update(task, advance=1)
|
|
545
993
|
# open app//evidence//config.json file and read contents
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
994
|
+
config_file = f"{evidence_folder}{os.sep}config.json"
|
|
995
|
+
if os.path.exists(config_file):
|
|
996
|
+
with open(config_file, "r", encoding="utf-8") as json_file:
|
|
997
|
+
# load json object into a readable dictionary
|
|
998
|
+
rules = json.load(json_file)
|
|
999
|
+
progress.update(task, advance=1)
|
|
1000
|
+
# loop through required document dicts
|
|
1001
|
+
for i in range(len(rules.get("required-documents", []))):
|
|
1002
|
+
# add to a list of dictionaries for parsing
|
|
1003
|
+
required_docs.append(
|
|
1004
|
+
{
|
|
1005
|
+
"file-name": rules["required-documents"][i].get("file-name"),
|
|
1006
|
+
"last-updated-by": rules["required-documents"][i].get("last-updated-by"),
|
|
1007
|
+
"signatures-required": rules["required-documents"][i].get("signatures-required"),
|
|
1008
|
+
"signature-count": rules["required-documents"][i].get("signature-count"),
|
|
1009
|
+
}
|
|
1010
|
+
)
|
|
1011
|
+
# update contents of list if it does not already exist
|
|
1012
|
+
document_list.add(rules["required-documents"][i].get("file-name"))
|
|
1013
|
+
else:
|
|
1014
|
+
# No config file, use default requirements for any files found
|
|
549
1015
|
progress.update(task, advance=1)
|
|
550
|
-
#
|
|
551
|
-
for
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
1016
|
+
# Get all files in evidence folder and subfolders
|
|
1017
|
+
for root, dirs, files in os.walk(evidence_folder):
|
|
1018
|
+
for file in files:
|
|
1019
|
+
if not file.startswith(".") and file.lower().endswith((".pdf", ".docx", ".doc", ".txt")):
|
|
1020
|
+
required_docs.append(
|
|
1021
|
+
{
|
|
1022
|
+
"file-name": file,
|
|
1023
|
+
"last-updated-by": 365,
|
|
1024
|
+
"signatures-required": False,
|
|
1025
|
+
"signature-count": 0,
|
|
1026
|
+
}
|
|
1027
|
+
)
|
|
1028
|
+
document_list.add(file)
|
|
563
1029
|
progress.update(task, advance=1)
|
|
564
1030
|
# return variables for use outside of local scope
|
|
565
1031
|
return required_docs, document_list
|
|
@@ -585,20 +1051,37 @@ def get_doc_timestamps(evidence_folder: str, directory: str, task: TaskID, progr
|
|
|
585
1051
|
# remove any child folders that start with '.'
|
|
586
1052
|
new_folders = remove(list_to_review=folders_list)
|
|
587
1053
|
progress.update(task, advance=1)
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
1054
|
+
|
|
1055
|
+
# Check if there are subdirectories
|
|
1056
|
+
subdirs = [f for f in new_folders if os.path.isdir(os.path.join(evidence_folder, f))]
|
|
1057
|
+
|
|
1058
|
+
if subdirs:
|
|
1059
|
+
# loop through directory listing
|
|
1060
|
+
for folder in subdirs:
|
|
1061
|
+
# get list of files in each folder
|
|
1062
|
+
filelist = os.listdir(os.path.join(evidence_folder, folder))
|
|
1063
|
+
# remove any files that start with '.'
|
|
1064
|
+
filelist = remove(filelist)
|
|
1065
|
+
# loop through list of files in each folder
|
|
1066
|
+
modified_times.extend(
|
|
1067
|
+
{
|
|
1068
|
+
"program": folder,
|
|
1069
|
+
"file": filename,
|
|
1070
|
+
"last-modified": os.path.getmtime(os.path.join(directory, folder, filename)),
|
|
1071
|
+
}
|
|
1072
|
+
for filename in filelist
|
|
1073
|
+
)
|
|
1074
|
+
else:
|
|
1075
|
+
# No subdirectories, process files directly in evidence folder
|
|
1076
|
+
files = [f for f in new_folders if os.path.isfile(os.path.join(evidence_folder, f))]
|
|
1077
|
+
files = remove(files)
|
|
595
1078
|
modified_times.extend(
|
|
596
1079
|
{
|
|
597
|
-
"program":
|
|
1080
|
+
"program": "evidence",
|
|
598
1081
|
"file": filename,
|
|
599
|
-
"last-modified": os.path.getmtime(os.path.join(
|
|
1082
|
+
"last-modified": os.path.getmtime(os.path.join(evidence_folder, filename)),
|
|
600
1083
|
}
|
|
601
|
-
for filename in
|
|
1084
|
+
for filename in files
|
|
602
1085
|
)
|
|
603
1086
|
progress.update(task, advance=1)
|
|
604
1087
|
# loop through the list of timestamps
|
|
@@ -624,17 +1107,22 @@ def set_required_texts(evidence_folder: str, task: TaskID, progress: Progress) -
|
|
|
624
1107
|
required_text = set()
|
|
625
1108
|
progress.update(task, advance=1)
|
|
626
1109
|
# open app//evidence//config.json file and read contents
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
#
|
|
634
|
-
for
|
|
635
|
-
#
|
|
636
|
-
|
|
1110
|
+
config_file = f"{evidence_folder}{os.sep}config.json"
|
|
1111
|
+
if os.path.exists(config_file):
|
|
1112
|
+
with open(config_file, "r", encoding="utf-8") as json_file:
|
|
1113
|
+
# load json object into a readable dictionary
|
|
1114
|
+
rules = json.load(json_file)
|
|
1115
|
+
progress.update(task, advance=1)
|
|
1116
|
+
# create iterator to traverse dictionary
|
|
1117
|
+
for i in range(len(rules.get("rules-engine", []))):
|
|
1118
|
+
# pull out required text to look for from config
|
|
1119
|
+
for items in rules["rules-engine"][i].get("text-to-find", []):
|
|
1120
|
+
# exclude duplicate text to search from required text
|
|
1121
|
+
required_text.add(items)
|
|
1122
|
+
else:
|
|
1123
|
+
# No config file, use default text requirements
|
|
637
1124
|
progress.update(task, advance=1)
|
|
1125
|
+
required_text = {"security policy", "risk assessment", "compliance", "control", "audit"}
|
|
638
1126
|
# return variable for use outside of local scope
|
|
639
1127
|
return required_text
|
|
640
1128
|
|
|
@@ -658,17 +1146,53 @@ def find_required_files_in_folder(evidence_folder: str, task: TaskID, progress:
|
|
|
658
1146
|
# remove any folders starting with '.' from list
|
|
659
1147
|
new_folders_list = remove(folder_list)
|
|
660
1148
|
progress.update(task, advance=1)
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
|
|
664
|
-
|
|
665
|
-
|
|
666
|
-
|
|
1149
|
+
|
|
1150
|
+
# Check if there are subdirectories
|
|
1151
|
+
subdirs = [f for f in new_folders_list if os.path.isdir(os.path.join(evidence_folder, f))]
|
|
1152
|
+
|
|
1153
|
+
if subdirs:
|
|
1154
|
+
for folder in subdirs:
|
|
1155
|
+
# build a list of all files contained in sub-directories
|
|
1156
|
+
filelist = os.listdir(evidence_folder + os.sep + folder)
|
|
1157
|
+
# remove folders and file names that start with a .
|
|
1158
|
+
filelist = remove(filelist)
|
|
1159
|
+
dir_list.extend({"program": folder, "file": filename} for filename in filelist)
|
|
1160
|
+
else:
|
|
1161
|
+
# No subdirectories, process files directly in evidence folder
|
|
1162
|
+
files = [f for f in new_folders_list if os.path.isfile(os.path.join(evidence_folder, f))]
|
|
1163
|
+
files = remove(files)
|
|
1164
|
+
dir_list.extend({"program": "evidence", "file": filename} for filename in files)
|
|
667
1165
|
progress.update(task, advance=1)
|
|
668
1166
|
# return variable for use outside of local scope
|
|
669
1167
|
return dir_list
|
|
670
1168
|
|
|
671
1169
|
|
|
1170
|
+
def _create_signature_result(program: str, filename: str, test_name: str, result: bool) -> dict:
|
|
1171
|
+
"""Helper function to create signature assessment result"""
|
|
1172
|
+
return {
|
|
1173
|
+
"program": program,
|
|
1174
|
+
"file": filename,
|
|
1175
|
+
"test": test_name,
|
|
1176
|
+
"result": result,
|
|
1177
|
+
}
|
|
1178
|
+
|
|
1179
|
+
|
|
1180
|
+
def _assess_signature_requirement(doc_file: dict, required: dict) -> list[dict]:
|
|
1181
|
+
"""Helper function to assess signature requirements for a document"""
|
|
1182
|
+
results = []
|
|
1183
|
+
|
|
1184
|
+
if required["signatures-required"] is True:
|
|
1185
|
+
sig_result = find_signatures(doc_file["file"])
|
|
1186
|
+
test_name = "signature-required"
|
|
1187
|
+
result = sig_result == 3
|
|
1188
|
+
results.append(_create_signature_result(doc_file["program"], doc_file["file"], test_name, result))
|
|
1189
|
+
elif required["signatures-required"] is False:
|
|
1190
|
+
test_name = "signature-required (not required)"
|
|
1191
|
+
results.append(_create_signature_result(doc_file["program"], doc_file["file"], test_name, True))
|
|
1192
|
+
|
|
1193
|
+
return results
|
|
1194
|
+
|
|
1195
|
+
|
|
672
1196
|
def signature_assessment_results(
|
|
673
1197
|
directory: list[dict], r_docs: list[dict], task: TaskID, progress: Progress
|
|
674
1198
|
) -> list[dict]:
|
|
@@ -682,52 +1206,15 @@ def signature_assessment_results(
|
|
|
682
1206
|
:return: Assessment of signatures
|
|
683
1207
|
:rtype: list[dict]
|
|
684
1208
|
"""
|
|
685
|
-
# create empty list to hold assessment results
|
|
686
1209
|
sig_assessments: list[dict] = []
|
|
687
1210
|
progress.update(task, advance=1)
|
|
688
|
-
|
|
1211
|
+
|
|
689
1212
|
for doc_file in directory:
|
|
690
1213
|
for required in r_docs:
|
|
691
1214
|
if doc_file["file"] == required["file-name"]:
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
# run the signature detection function for the file
|
|
695
|
-
sig_result = find_signatures(doc_file["file"])
|
|
696
|
-
# if the return value is 3 pass the test
|
|
697
|
-
if sig_result == 3:
|
|
698
|
-
# append a true result for each document tested
|
|
699
|
-
sig_assessments.append(
|
|
700
|
-
{
|
|
701
|
-
"program": doc_file["program"],
|
|
702
|
-
"file": doc_file["file"],
|
|
703
|
-
"test": "signature-required",
|
|
704
|
-
"result": True,
|
|
705
|
-
}
|
|
706
|
-
)
|
|
707
|
-
# if the return value is 1, -1 or 0 fail the test
|
|
708
|
-
else:
|
|
709
|
-
# append a false result for each document tested
|
|
710
|
-
sig_assessments.append(
|
|
711
|
-
{
|
|
712
|
-
"program": doc_file["program"],
|
|
713
|
-
"file": doc_file["file"],
|
|
714
|
-
"test": "signature-required",
|
|
715
|
-
"result": False,
|
|
716
|
-
}
|
|
717
|
-
)
|
|
718
|
-
# if the signatures-required field is set to false
|
|
719
|
-
if required["signatures-required"] is False:
|
|
720
|
-
# append a true result for each document not requiring a signature
|
|
721
|
-
sig_assessments.append(
|
|
722
|
-
{
|
|
723
|
-
"program": doc_file["program"],
|
|
724
|
-
"file": doc_file["file"],
|
|
725
|
-
"test": "signature-required (not required)",
|
|
726
|
-
"result": True,
|
|
727
|
-
}
|
|
728
|
-
)
|
|
1215
|
+
sig_assessments.extend(_assess_signature_requirement(doc_file, required))
|
|
1216
|
+
|
|
729
1217
|
progress.update(task, advance=1)
|
|
730
|
-
# return variable for use outside of local scope
|
|
731
1218
|
return sig_assessments
|
|
732
1219
|
|
|
733
1220
|
|
|
@@ -775,6 +1262,50 @@ def document_assessment_results(
|
|
|
775
1262
|
return doc_assessments
|
|
776
1263
|
|
|
777
1264
|
|
|
1265
|
+
def _extract_docx_text(file_path: str) -> list[str]:
|
|
1266
|
+
"""Helper function to extract text from DOCX files"""
|
|
1267
|
+
document = Document(file_path)
|
|
1268
|
+
return [para.text for para in document.paragraphs]
|
|
1269
|
+
|
|
1270
|
+
|
|
1271
|
+
def _extract_pdf_text(file_path: str) -> list[str]:
|
|
1272
|
+
"""Helper function to extract text from PDF files"""
|
|
1273
|
+
output_text_list: list[str] = []
|
|
1274
|
+
with pdfplumber.open(file_path) as pdf:
|
|
1275
|
+
for page in pdf.pages:
|
|
1276
|
+
text = page.extract_text()
|
|
1277
|
+
if text: # Only append non-None text
|
|
1278
|
+
output_text_list.append(text)
|
|
1279
|
+
return output_text_list
|
|
1280
|
+
|
|
1281
|
+
|
|
1282
|
+
def _process_file_for_text(filename: str, file_path: str, program: str) -> dict | None:
|
|
1283
|
+
"""Helper function to process a single file and extract text"""
|
|
1284
|
+
if filename.endswith(".docx"):
|
|
1285
|
+
text = _extract_docx_text(file_path)
|
|
1286
|
+
elif filename.endswith(".pdf"):
|
|
1287
|
+
text = _extract_pdf_text(file_path)
|
|
1288
|
+
else:
|
|
1289
|
+
return None
|
|
1290
|
+
|
|
1291
|
+
return {"program": program, "file": filename, "text": text}
|
|
1292
|
+
|
|
1293
|
+
|
|
1294
|
+
def _process_files_in_folder(folder_path: str, program: str) -> list[dict]:
|
|
1295
|
+
"""Helper function to process all files in a specific folder"""
|
|
1296
|
+
results = []
|
|
1297
|
+
file_list = os.listdir(folder_path)
|
|
1298
|
+
file_list = remove(file_list)
|
|
1299
|
+
|
|
1300
|
+
for filename in file_list:
|
|
1301
|
+
file_path = os.path.join(folder_path, filename)
|
|
1302
|
+
result = _process_file_for_text(filename, file_path, program)
|
|
1303
|
+
if result:
|
|
1304
|
+
results.append(result)
|
|
1305
|
+
|
|
1306
|
+
return results
|
|
1307
|
+
|
|
1308
|
+
|
|
778
1309
|
def parse_required_text_from_files(evidence_folder: str, task: TaskID, progress: Progress) -> list[dict]:
|
|
779
1310
|
"""
|
|
780
1311
|
Parse text from docx/pdf file and hold strings representing required text to test
|
|
@@ -785,51 +1316,26 @@ def parse_required_text_from_files(evidence_folder: str, task: TaskID, progress:
|
|
|
785
1316
|
:return: Results of text found for the files
|
|
786
1317
|
:rtype: list[dict]
|
|
787
1318
|
"""
|
|
788
|
-
# create an empty list to hold all strings from parsed documents
|
|
789
1319
|
full_text: list[dict] = []
|
|
790
1320
|
progress.update(task, advance=1)
|
|
791
|
-
|
|
1321
|
+
|
|
792
1322
|
folder_list = os.listdir(evidence_folder)
|
|
793
1323
|
progress.update(task, advance=1)
|
|
794
|
-
# remove all folders that start with '.'
|
|
795
1324
|
removed_folders_list = remove(folder_list)
|
|
796
1325
|
progress.update(task, advance=1)
|
|
797
|
-
|
|
798
|
-
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
for
|
|
803
|
-
|
|
804
|
-
|
|
805
|
-
|
|
806
|
-
|
|
807
|
-
|
|
808
|
-
|
|
809
|
-
full_text.append({"program": folder, "file": filename, "text": output})
|
|
810
|
-
elif filename.endswith(".pdf"):
|
|
811
|
-
# create empty list to hold text per file
|
|
812
|
-
output_text_list: list[str] = []
|
|
813
|
-
# open filename with pdfplumber
|
|
814
|
-
with pdfplumber.open(filename) as pdf:
|
|
815
|
-
# set number of pages
|
|
816
|
-
pages = pdf.pages
|
|
817
|
-
# for each page in the pdf document
|
|
818
|
-
for page in pages:
|
|
819
|
-
# extract the text
|
|
820
|
-
text = page.extract_text()
|
|
821
|
-
# write the text to a list
|
|
822
|
-
output_text_list.append(text)
|
|
823
|
-
# add each file and the requisite text to the dictionary to test
|
|
824
|
-
full_text.append(
|
|
825
|
-
{
|
|
826
|
-
"program": folder,
|
|
827
|
-
"file": filename,
|
|
828
|
-
"text": output_text_list,
|
|
829
|
-
}
|
|
830
|
-
)
|
|
1326
|
+
|
|
1327
|
+
# Check if there are subdirectories
|
|
1328
|
+
subdirs = [f for f in removed_folders_list if os.path.isdir(os.path.join(evidence_folder, f))]
|
|
1329
|
+
|
|
1330
|
+
if subdirs:
|
|
1331
|
+
for folder in subdirs:
|
|
1332
|
+
folder_path = os.path.join(evidence_folder, folder)
|
|
1333
|
+
full_text.extend(_process_files_in_folder(folder_path, folder))
|
|
1334
|
+
else:
|
|
1335
|
+
# No subdirectories, process files directly in evidence folder
|
|
1336
|
+
full_text.extend(_process_files_in_folder(evidence_folder, "evidence"))
|
|
1337
|
+
|
|
831
1338
|
progress.update(task, advance=1)
|
|
832
|
-
# return variable for use outside of local scope
|
|
833
1339
|
return full_text
|
|
834
1340
|
|
|
835
1341
|
|
|
@@ -924,17 +1430,22 @@ def gather_test_project_data(api: Api, evidence_folder: str, task: TaskID, progr
|
|
|
924
1430
|
test_data: list[dict] = []
|
|
925
1431
|
progress.update(task, advance=1)
|
|
926
1432
|
# test project information created in RegScale UI
|
|
927
|
-
|
|
928
|
-
|
|
929
|
-
|
|
930
|
-
|
|
931
|
-
|
|
932
|
-
|
|
933
|
-
|
|
934
|
-
|
|
935
|
-
|
|
936
|
-
|
|
937
|
-
|
|
1433
|
+
list_file = evidence_folder + os.sep + "list.json"
|
|
1434
|
+
if os.path.exists(list_file):
|
|
1435
|
+
with open(list_file, "r", encoding="utf-8") as json_file:
|
|
1436
|
+
# load json object into a readable dictionary
|
|
1437
|
+
lists = json.load(json_file)
|
|
1438
|
+
# loop through projects in the list.json
|
|
1439
|
+
test_data.extend(
|
|
1440
|
+
{
|
|
1441
|
+
"id": lists["parser-list"][i].get("id"),
|
|
1442
|
+
"program": lists["parser-list"][i].get("folder-name"),
|
|
1443
|
+
}
|
|
1444
|
+
for i in range(len(lists.get("parser-list", [])))
|
|
1445
|
+
)
|
|
1446
|
+
else:
|
|
1447
|
+
# No list.json, skip project data - evidence collection can work without it
|
|
1448
|
+
test_data = []
|
|
938
1449
|
progress.update(task, advance=1)
|
|
939
1450
|
# create empty list to hold json response data for each project
|
|
940
1451
|
test_info: list[dict] = []
|
|
@@ -956,7 +1467,7 @@ def gather_test_project_data(api: Api, evidence_folder: str, task: TaskID, progr
|
|
|
956
1467
|
}
|
|
957
1468
|
)
|
|
958
1469
|
else:
|
|
959
|
-
api.logger.
|
|
1470
|
+
api.logger.warning(f"Project data retrieval was unsuccessful for ID {item['id']}, skipping this project.")
|
|
960
1471
|
progress.update(task, advance=1)
|
|
961
1472
|
# return variables for use outside of local scope
|
|
962
1473
|
return test_info
|
|
@@ -1031,19 +1542,8 @@ def assessments_report(
|
|
|
1031
1542
|
:rtype: list[dict]
|
|
1032
1543
|
"""
|
|
1033
1544
|
progress.update(task, advance=1)
|
|
1034
|
-
|
|
1035
|
-
|
|
1036
|
-
# append all results to 1 master list
|
|
1037
|
-
assessment_report.extend(iter(textres))
|
|
1038
|
-
progress.update(task, advance=1)
|
|
1039
|
-
# append all results to 1 master list
|
|
1040
|
-
assessment_report.extend(iter(timeres))
|
|
1041
|
-
progress.update(task, advance=1)
|
|
1042
|
-
# append all results to 1 master list
|
|
1043
|
-
assessment_report.extend(iter(sigres))
|
|
1044
|
-
progress.update(task, advance=1)
|
|
1045
|
-
# return variable for use outside of local scope
|
|
1046
|
-
return assessment_report
|
|
1545
|
+
# combine all results into one master list
|
|
1546
|
+
return docres + textres + timeres + sigres
|
|
1047
1547
|
|
|
1048
1548
|
|
|
1049
1549
|
def build_assessment_dataframe(assessments: list[dict], task: TaskID, progress: Progress) -> list[dict]:
|
|
@@ -1061,6 +1561,11 @@ def build_assessment_dataframe(assessments: list[dict], task: TaskID, progress:
|
|
|
1061
1561
|
|
|
1062
1562
|
result_df = pd.DataFrame(assessments)
|
|
1063
1563
|
progress.update(task, advance=1)
|
|
1564
|
+
|
|
1565
|
+
# Check if dataframe is empty
|
|
1566
|
+
if result_df.empty:
|
|
1567
|
+
return []
|
|
1568
|
+
|
|
1064
1569
|
# fill in NaN cells
|
|
1065
1570
|
result_df = result_df.fillna(" ")
|
|
1066
1571
|
progress.update(task, advance=1)
|
|
@@ -1148,9 +1653,21 @@ def build_html_table(assessments: list[dict], task: TaskID, progress: Progress)
|
|
|
1148
1653
|
import pandas as pd # Optimize import performance
|
|
1149
1654
|
|
|
1150
1655
|
output_list: list[dict] = []
|
|
1656
|
+
|
|
1657
|
+
# Check if assessments is empty
|
|
1658
|
+
if not assessments:
|
|
1659
|
+
progress.update(task, advance=4) # Skip all remaining progress updates
|
|
1660
|
+
return output_list
|
|
1661
|
+
|
|
1151
1662
|
# create a dataframe of a list of dicts
|
|
1152
1663
|
table_df = pd.DataFrame(data=assessments)
|
|
1153
1664
|
progress.update(task, advance=1)
|
|
1665
|
+
|
|
1666
|
+
# Check if dataframe is empty or missing required columns
|
|
1667
|
+
if table_df.empty or "program" not in table_df.columns:
|
|
1668
|
+
progress.update(task, advance=3) # Skip remaining progress updates
|
|
1669
|
+
return output_list
|
|
1670
|
+
|
|
1154
1671
|
# fill in N/A cells with blank string
|
|
1155
1672
|
table_df = table_df.fillna(" ")
|
|
1156
1673
|
progress.update(task, advance=1)
|
|
@@ -1197,6 +1714,12 @@ def create_child_assessments(
|
|
|
1197
1714
|
# set completion datetime to required format
|
|
1198
1715
|
completion_date = datetime.now().strftime("%Y-%m-%dT%H:%M:%S")
|
|
1199
1716
|
progress.update(task, advance=1)
|
|
1717
|
+
|
|
1718
|
+
# Check if we have project data to work with
|
|
1719
|
+
if not project_data:
|
|
1720
|
+
progress.update(task, advance=1)
|
|
1721
|
+
return
|
|
1722
|
+
|
|
1200
1723
|
# loop through test projects and make an API call for each
|
|
1201
1724
|
for i, project in enumerate(project_data):
|
|
1202
1725
|
# call score calculation function
|