regscale-cli 6.16.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of regscale-cli might be problematic. Click here for more details.
- regscale/__init__.py +1 -0
- regscale/airflow/__init__.py +9 -0
- regscale/airflow/azure/__init__.py +9 -0
- regscale/airflow/azure/cli.py +89 -0
- regscale/airflow/azure/upload_dags.py +116 -0
- regscale/airflow/click_dags.py +127 -0
- regscale/airflow/click_mixins.py +82 -0
- regscale/airflow/config.py +25 -0
- regscale/airflow/factories/__init__.py +0 -0
- regscale/airflow/factories/connections.py +58 -0
- regscale/airflow/factories/workflows.py +78 -0
- regscale/airflow/hierarchy.py +88 -0
- regscale/airflow/operators/__init__.py +0 -0
- regscale/airflow/operators/click.py +36 -0
- regscale/airflow/sensors/__init__.py +0 -0
- regscale/airflow/sensors/sql.py +107 -0
- regscale/airflow/sessions/__init__.py +0 -0
- regscale/airflow/sessions/sql/__init__.py +3 -0
- regscale/airflow/sessions/sql/queries.py +64 -0
- regscale/airflow/sessions/sql/sql_server_queries.py +248 -0
- regscale/airflow/tasks/__init__.py +0 -0
- regscale/airflow/tasks/branches.py +22 -0
- regscale/airflow/tasks/cli.py +116 -0
- regscale/airflow/tasks/click.py +73 -0
- regscale/airflow/tasks/debugging.py +9 -0
- regscale/airflow/tasks/groups.py +116 -0
- regscale/airflow/tasks/init.py +60 -0
- regscale/airflow/tasks/states.py +47 -0
- regscale/airflow/tasks/workflows.py +36 -0
- regscale/ansible/__init__.py +9 -0
- regscale/core/__init__.py +0 -0
- regscale/core/app/__init__.py +3 -0
- regscale/core/app/api.py +571 -0
- regscale/core/app/application.py +665 -0
- regscale/core/app/internal/__init__.py +136 -0
- regscale/core/app/internal/admin_actions.py +230 -0
- regscale/core/app/internal/assessments_editor.py +873 -0
- regscale/core/app/internal/catalog.py +316 -0
- regscale/core/app/internal/comparison.py +459 -0
- regscale/core/app/internal/control_editor.py +571 -0
- regscale/core/app/internal/encrypt.py +79 -0
- regscale/core/app/internal/evidence.py +1240 -0
- regscale/core/app/internal/file_uploads.py +151 -0
- regscale/core/app/internal/healthcheck.py +66 -0
- regscale/core/app/internal/login.py +305 -0
- regscale/core/app/internal/migrations.py +240 -0
- regscale/core/app/internal/model_editor.py +1701 -0
- regscale/core/app/internal/poam_editor.py +632 -0
- regscale/core/app/internal/workflow.py +105 -0
- regscale/core/app/logz.py +74 -0
- regscale/core/app/utils/XMLIR.py +258 -0
- regscale/core/app/utils/__init__.py +0 -0
- regscale/core/app/utils/api_handler.py +358 -0
- regscale/core/app/utils/app_utils.py +1110 -0
- regscale/core/app/utils/catalog_utils/__init__.py +0 -0
- regscale/core/app/utils/catalog_utils/common.py +91 -0
- regscale/core/app/utils/catalog_utils/compare_catalog.py +193 -0
- regscale/core/app/utils/catalog_utils/diagnostic_catalog.py +97 -0
- regscale/core/app/utils/catalog_utils/download_catalog.py +103 -0
- regscale/core/app/utils/catalog_utils/update_catalog.py +718 -0
- regscale/core/app/utils/catalog_utils/update_catalog_v2.py +1378 -0
- regscale/core/app/utils/catalog_utils/update_catalog_v3.py +1272 -0
- regscale/core/app/utils/catalog_utils/update_plans.py +334 -0
- regscale/core/app/utils/file_utils.py +238 -0
- regscale/core/app/utils/parser_utils.py +81 -0
- regscale/core/app/utils/pickle_file_handler.py +57 -0
- regscale/core/app/utils/regscale_utils.py +319 -0
- regscale/core/app/utils/report_utils.py +119 -0
- regscale/core/app/utils/variables.py +226 -0
- regscale/core/decorators.py +31 -0
- regscale/core/lazy_group.py +65 -0
- regscale/core/login.py +63 -0
- regscale/core/server/__init__.py +0 -0
- regscale/core/server/flask_api.py +473 -0
- regscale/core/server/helpers.py +373 -0
- regscale/core/server/rest.py +64 -0
- regscale/core/server/static/css/bootstrap.css +6030 -0
- regscale/core/server/static/css/bootstrap.min.css +6 -0
- regscale/core/server/static/css/main.css +176 -0
- regscale/core/server/static/images/regscale-cli.svg +49 -0
- regscale/core/server/static/images/regscale.svg +38 -0
- regscale/core/server/templates/base.html +74 -0
- regscale/core/server/templates/index.html +43 -0
- regscale/core/server/templates/login.html +28 -0
- regscale/core/server/templates/make_base64.html +22 -0
- regscale/core/server/templates/upload_STIG.html +109 -0
- regscale/core/server/templates/upload_STIG_result.html +26 -0
- regscale/core/server/templates/upload_ssp.html +144 -0
- regscale/core/server/templates/upload_ssp_result.html +128 -0
- regscale/core/static/__init__.py +0 -0
- regscale/core/static/regex.py +14 -0
- regscale/core/utils/__init__.py +117 -0
- regscale/core/utils/click_utils.py +13 -0
- regscale/core/utils/date.py +238 -0
- regscale/core/utils/graphql.py +254 -0
- regscale/core/utils/urls.py +23 -0
- regscale/dev/__init__.py +6 -0
- regscale/dev/analysis.py +454 -0
- regscale/dev/cli.py +235 -0
- regscale/dev/code_gen.py +492 -0
- regscale/dev/dirs.py +69 -0
- regscale/dev/docs.py +384 -0
- regscale/dev/monitoring.py +26 -0
- regscale/dev/profiling.py +216 -0
- regscale/exceptions/__init__.py +4 -0
- regscale/exceptions/license_exception.py +7 -0
- regscale/exceptions/validation_exception.py +9 -0
- regscale/integrations/__init__.py +1 -0
- regscale/integrations/commercial/__init__.py +486 -0
- regscale/integrations/commercial/ad.py +433 -0
- regscale/integrations/commercial/amazon/__init__.py +0 -0
- regscale/integrations/commercial/amazon/common.py +106 -0
- regscale/integrations/commercial/aqua/__init__.py +0 -0
- regscale/integrations/commercial/aqua/aqua.py +91 -0
- regscale/integrations/commercial/aws/__init__.py +6 -0
- regscale/integrations/commercial/aws/cli.py +322 -0
- regscale/integrations/commercial/aws/inventory/__init__.py +110 -0
- regscale/integrations/commercial/aws/inventory/base.py +64 -0
- regscale/integrations/commercial/aws/inventory/resources/__init__.py +19 -0
- regscale/integrations/commercial/aws/inventory/resources/compute.py +234 -0
- regscale/integrations/commercial/aws/inventory/resources/containers.py +113 -0
- regscale/integrations/commercial/aws/inventory/resources/database.py +101 -0
- regscale/integrations/commercial/aws/inventory/resources/integration.py +237 -0
- regscale/integrations/commercial/aws/inventory/resources/networking.py +253 -0
- regscale/integrations/commercial/aws/inventory/resources/security.py +240 -0
- regscale/integrations/commercial/aws/inventory/resources/storage.py +91 -0
- regscale/integrations/commercial/aws/scanner.py +823 -0
- regscale/integrations/commercial/azure/__init__.py +0 -0
- regscale/integrations/commercial/azure/common.py +32 -0
- regscale/integrations/commercial/azure/intune.py +488 -0
- regscale/integrations/commercial/azure/scanner.py +49 -0
- regscale/integrations/commercial/burp.py +78 -0
- regscale/integrations/commercial/cpe.py +144 -0
- regscale/integrations/commercial/crowdstrike.py +1117 -0
- regscale/integrations/commercial/defender.py +1511 -0
- regscale/integrations/commercial/dependabot.py +210 -0
- regscale/integrations/commercial/durosuite/__init__.py +0 -0
- regscale/integrations/commercial/durosuite/api.py +1546 -0
- regscale/integrations/commercial/durosuite/process_devices.py +101 -0
- regscale/integrations/commercial/durosuite/scanner.py +637 -0
- regscale/integrations/commercial/durosuite/variables.py +21 -0
- regscale/integrations/commercial/ecr.py +90 -0
- regscale/integrations/commercial/gcp/__init__.py +237 -0
- regscale/integrations/commercial/gcp/auth.py +96 -0
- regscale/integrations/commercial/gcp/control_tests.py +238 -0
- regscale/integrations/commercial/gcp/variables.py +18 -0
- regscale/integrations/commercial/gitlab.py +332 -0
- regscale/integrations/commercial/grype.py +165 -0
- regscale/integrations/commercial/ibm.py +90 -0
- regscale/integrations/commercial/import_all/__init__.py +0 -0
- regscale/integrations/commercial/import_all/import_all_cmd.py +467 -0
- regscale/integrations/commercial/import_all/scan_file_fingerprints.json +27 -0
- regscale/integrations/commercial/jira.py +1046 -0
- regscale/integrations/commercial/mappings/__init__.py +0 -0
- regscale/integrations/commercial/mappings/csf_controls.json +713 -0
- regscale/integrations/commercial/mappings/nist_800_53_r5_controls.json +1516 -0
- regscale/integrations/commercial/nessus/__init__.py +0 -0
- regscale/integrations/commercial/nessus/nessus_utils.py +429 -0
- regscale/integrations/commercial/nessus/scanner.py +416 -0
- regscale/integrations/commercial/nexpose.py +90 -0
- regscale/integrations/commercial/okta.py +798 -0
- regscale/integrations/commercial/opentext/__init__.py +0 -0
- regscale/integrations/commercial/opentext/click.py +99 -0
- regscale/integrations/commercial/opentext/scanner.py +143 -0
- regscale/integrations/commercial/prisma.py +91 -0
- regscale/integrations/commercial/qualys.py +1462 -0
- regscale/integrations/commercial/salesforce.py +980 -0
- regscale/integrations/commercial/sap/__init__.py +0 -0
- regscale/integrations/commercial/sap/click.py +31 -0
- regscale/integrations/commercial/sap/sysdig/__init__.py +0 -0
- regscale/integrations/commercial/sap/sysdig/click.py +57 -0
- regscale/integrations/commercial/sap/sysdig/sysdig_scanner.py +190 -0
- regscale/integrations/commercial/sap/tenable/__init__.py +0 -0
- regscale/integrations/commercial/sap/tenable/click.py +49 -0
- regscale/integrations/commercial/sap/tenable/scanner.py +196 -0
- regscale/integrations/commercial/servicenow.py +1756 -0
- regscale/integrations/commercial/sicura/__init__.py +0 -0
- regscale/integrations/commercial/sicura/api.py +855 -0
- regscale/integrations/commercial/sicura/commands.py +73 -0
- regscale/integrations/commercial/sicura/scanner.py +481 -0
- regscale/integrations/commercial/sicura/variables.py +16 -0
- regscale/integrations/commercial/snyk.py +90 -0
- regscale/integrations/commercial/sonarcloud.py +260 -0
- regscale/integrations/commercial/sqlserver.py +369 -0
- regscale/integrations/commercial/stig_mapper_integration/__init__.py +0 -0
- regscale/integrations/commercial/stig_mapper_integration/click_commands.py +38 -0
- regscale/integrations/commercial/stig_mapper_integration/mapping_engine.py +353 -0
- regscale/integrations/commercial/stigv2/__init__.py +0 -0
- regscale/integrations/commercial/stigv2/ckl_parser.py +349 -0
- regscale/integrations/commercial/stigv2/click_commands.py +95 -0
- regscale/integrations/commercial/stigv2/stig_integration.py +202 -0
- regscale/integrations/commercial/synqly/__init__.py +0 -0
- regscale/integrations/commercial/synqly/assets.py +46 -0
- regscale/integrations/commercial/synqly/ticketing.py +132 -0
- regscale/integrations/commercial/synqly/vulnerabilities.py +223 -0
- regscale/integrations/commercial/synqly_jira.py +840 -0
- regscale/integrations/commercial/tenablev2/__init__.py +0 -0
- regscale/integrations/commercial/tenablev2/authenticate.py +31 -0
- regscale/integrations/commercial/tenablev2/click.py +1584 -0
- regscale/integrations/commercial/tenablev2/scanner.py +504 -0
- regscale/integrations/commercial/tenablev2/stig_parsers.py +140 -0
- regscale/integrations/commercial/tenablev2/utils.py +78 -0
- regscale/integrations/commercial/tenablev2/variables.py +17 -0
- regscale/integrations/commercial/trivy.py +162 -0
- regscale/integrations/commercial/veracode.py +96 -0
- regscale/integrations/commercial/wizv2/WizDataMixin.py +97 -0
- regscale/integrations/commercial/wizv2/__init__.py +0 -0
- regscale/integrations/commercial/wizv2/click.py +429 -0
- regscale/integrations/commercial/wizv2/constants.py +1001 -0
- regscale/integrations/commercial/wizv2/issue.py +361 -0
- regscale/integrations/commercial/wizv2/models.py +112 -0
- regscale/integrations/commercial/wizv2/parsers.py +339 -0
- regscale/integrations/commercial/wizv2/sbom.py +115 -0
- regscale/integrations/commercial/wizv2/scanner.py +416 -0
- regscale/integrations/commercial/wizv2/utils.py +796 -0
- regscale/integrations/commercial/wizv2/variables.py +39 -0
- regscale/integrations/commercial/wizv2/wiz_auth.py +159 -0
- regscale/integrations/commercial/xray.py +91 -0
- regscale/integrations/integration/__init__.py +2 -0
- regscale/integrations/integration/integration.py +26 -0
- regscale/integrations/integration/inventory.py +17 -0
- regscale/integrations/integration/issue.py +100 -0
- regscale/integrations/integration_override.py +149 -0
- regscale/integrations/public/__init__.py +103 -0
- regscale/integrations/public/cisa.py +641 -0
- regscale/integrations/public/criticality_updater.py +70 -0
- regscale/integrations/public/emass.py +411 -0
- regscale/integrations/public/emass_slcm_import.py +697 -0
- regscale/integrations/public/fedramp/__init__.py +0 -0
- regscale/integrations/public/fedramp/appendix_parser.py +548 -0
- regscale/integrations/public/fedramp/click.py +479 -0
- regscale/integrations/public/fedramp/components.py +714 -0
- regscale/integrations/public/fedramp/docx_parser.py +259 -0
- regscale/integrations/public/fedramp/fedramp_cis_crm.py +1124 -0
- regscale/integrations/public/fedramp/fedramp_common.py +3181 -0
- regscale/integrations/public/fedramp/fedramp_docx.py +388 -0
- regscale/integrations/public/fedramp/fedramp_five.py +2343 -0
- regscale/integrations/public/fedramp/fedramp_traversal.py +138 -0
- regscale/integrations/public/fedramp/import_fedramp_r4_ssp.py +279 -0
- regscale/integrations/public/fedramp/import_workbook.py +495 -0
- regscale/integrations/public/fedramp/inventory_items.py +244 -0
- regscale/integrations/public/fedramp/mappings/__init__.py +0 -0
- regscale/integrations/public/fedramp/mappings/fedramp_r4_parts.json +7388 -0
- regscale/integrations/public/fedramp/mappings/fedramp_r5_params.json +8636 -0
- regscale/integrations/public/fedramp/mappings/fedramp_r5_parts.json +9605 -0
- regscale/integrations/public/fedramp/mappings/system_roles.py +34 -0
- regscale/integrations/public/fedramp/mappings/user.py +175 -0
- regscale/integrations/public/fedramp/mappings/values.py +141 -0
- regscale/integrations/public/fedramp/markdown_parser.py +150 -0
- regscale/integrations/public/fedramp/metadata.py +689 -0
- regscale/integrations/public/fedramp/models/__init__.py +59 -0
- regscale/integrations/public/fedramp/models/leveraged_auth_new.py +168 -0
- regscale/integrations/public/fedramp/models/poam_importer.py +522 -0
- regscale/integrations/public/fedramp/parts_mapper.py +107 -0
- regscale/integrations/public/fedramp/poam/__init__.py +0 -0
- regscale/integrations/public/fedramp/poam/scanner.py +851 -0
- regscale/integrations/public/fedramp/properties.py +201 -0
- regscale/integrations/public/fedramp/reporting.py +84 -0
- regscale/integrations/public/fedramp/resources.py +496 -0
- regscale/integrations/public/fedramp/rosetta.py +110 -0
- regscale/integrations/public/fedramp/ssp_logger.py +87 -0
- regscale/integrations/public/fedramp/system_characteristics.py +922 -0
- regscale/integrations/public/fedramp/system_control_implementations.py +582 -0
- regscale/integrations/public/fedramp/system_implementation.py +190 -0
- regscale/integrations/public/fedramp/xml_utils.py +87 -0
- regscale/integrations/public/nist_catalog.py +275 -0
- regscale/integrations/public/oscal.py +1946 -0
- regscale/integrations/public/otx.py +169 -0
- regscale/integrations/scanner_integration.py +2692 -0
- regscale/integrations/variables.py +25 -0
- regscale/models/__init__.py +7 -0
- regscale/models/app_models/__init__.py +5 -0
- regscale/models/app_models/catalog_compare.py +213 -0
- regscale/models/app_models/click.py +252 -0
- regscale/models/app_models/datetime_encoder.py +21 -0
- regscale/models/app_models/import_validater.py +321 -0
- regscale/models/app_models/mapping.py +260 -0
- regscale/models/app_models/pipeline.py +37 -0
- regscale/models/click_models.py +413 -0
- regscale/models/config.py +154 -0
- regscale/models/email_style.css +67 -0
- regscale/models/hierarchy.py +8 -0
- regscale/models/inspect_models.py +79 -0
- regscale/models/integration_models/__init__.py +0 -0
- regscale/models/integration_models/amazon_models/__init__.py +0 -0
- regscale/models/integration_models/amazon_models/inspector.py +262 -0
- regscale/models/integration_models/amazon_models/inspector_scan.py +206 -0
- regscale/models/integration_models/aqua.py +247 -0
- regscale/models/integration_models/azure_alerts.py +255 -0
- regscale/models/integration_models/base64.py +23 -0
- regscale/models/integration_models/burp.py +433 -0
- regscale/models/integration_models/burp_models.py +128 -0
- regscale/models/integration_models/cisa_kev_data.json +19333 -0
- regscale/models/integration_models/defender_data.py +93 -0
- regscale/models/integration_models/defenderimport.py +143 -0
- regscale/models/integration_models/drf.py +443 -0
- regscale/models/integration_models/ecr_models/__init__.py +0 -0
- regscale/models/integration_models/ecr_models/data.py +69 -0
- regscale/models/integration_models/ecr_models/ecr.py +239 -0
- regscale/models/integration_models/flat_file_importer.py +1079 -0
- regscale/models/integration_models/grype_import.py +247 -0
- regscale/models/integration_models/ibm.py +126 -0
- regscale/models/integration_models/implementation_results.py +85 -0
- regscale/models/integration_models/nexpose.py +140 -0
- regscale/models/integration_models/prisma.py +202 -0
- regscale/models/integration_models/qualys.py +720 -0
- regscale/models/integration_models/qualys_scanner.py +160 -0
- regscale/models/integration_models/sbom/__init__.py +0 -0
- regscale/models/integration_models/sbom/cyclone_dx.py +139 -0
- regscale/models/integration_models/send_reminders.py +620 -0
- regscale/models/integration_models/snyk.py +155 -0
- regscale/models/integration_models/synqly_models/__init__.py +0 -0
- regscale/models/integration_models/synqly_models/capabilities.json +1 -0
- regscale/models/integration_models/synqly_models/connector_types.py +22 -0
- regscale/models/integration_models/synqly_models/connectors/__init__.py +7 -0
- regscale/models/integration_models/synqly_models/connectors/assets.py +97 -0
- regscale/models/integration_models/synqly_models/connectors/ticketing.py +583 -0
- regscale/models/integration_models/synqly_models/connectors/vulnerabilities.py +169 -0
- regscale/models/integration_models/synqly_models/ocsf_mapper.py +331 -0
- regscale/models/integration_models/synqly_models/param.py +72 -0
- regscale/models/integration_models/synqly_models/synqly_model.py +733 -0
- regscale/models/integration_models/synqly_models/tenants.py +39 -0
- regscale/models/integration_models/tenable_models/__init__.py +0 -0
- regscale/models/integration_models/tenable_models/integration.py +187 -0
- regscale/models/integration_models/tenable_models/models.py +513 -0
- regscale/models/integration_models/trivy_import.py +231 -0
- regscale/models/integration_models/veracode.py +217 -0
- regscale/models/integration_models/xray.py +135 -0
- regscale/models/locking.py +100 -0
- regscale/models/platform.py +110 -0
- regscale/models/regscale_models/__init__.py +67 -0
- regscale/models/regscale_models/assessment.py +570 -0
- regscale/models/regscale_models/assessment_plan.py +52 -0
- regscale/models/regscale_models/asset.py +567 -0
- regscale/models/regscale_models/asset_mapping.py +190 -0
- regscale/models/regscale_models/case.py +42 -0
- regscale/models/regscale_models/catalog.py +261 -0
- regscale/models/regscale_models/cci.py +46 -0
- regscale/models/regscale_models/change.py +167 -0
- regscale/models/regscale_models/checklist.py +372 -0
- regscale/models/regscale_models/comment.py +49 -0
- regscale/models/regscale_models/compliance_settings.py +112 -0
- regscale/models/regscale_models/component.py +412 -0
- regscale/models/regscale_models/component_mapping.py +65 -0
- regscale/models/regscale_models/control.py +38 -0
- regscale/models/regscale_models/control_implementation.py +1128 -0
- regscale/models/regscale_models/control_objective.py +261 -0
- regscale/models/regscale_models/control_parameter.py +100 -0
- regscale/models/regscale_models/control_test.py +34 -0
- regscale/models/regscale_models/control_test_plan.py +75 -0
- regscale/models/regscale_models/control_test_result.py +52 -0
- regscale/models/regscale_models/custom_field.py +245 -0
- regscale/models/regscale_models/data.py +109 -0
- regscale/models/regscale_models/data_center.py +40 -0
- regscale/models/regscale_models/deviation.py +203 -0
- regscale/models/regscale_models/email.py +97 -0
- regscale/models/regscale_models/evidence.py +47 -0
- regscale/models/regscale_models/evidence_mapping.py +40 -0
- regscale/models/regscale_models/facility.py +59 -0
- regscale/models/regscale_models/file.py +382 -0
- regscale/models/regscale_models/filetag.py +37 -0
- regscale/models/regscale_models/form_field_value.py +94 -0
- regscale/models/regscale_models/group.py +169 -0
- regscale/models/regscale_models/implementation_objective.py +335 -0
- regscale/models/regscale_models/implementation_option.py +275 -0
- regscale/models/regscale_models/implementation_role.py +33 -0
- regscale/models/regscale_models/incident.py +177 -0
- regscale/models/regscale_models/interconnection.py +43 -0
- regscale/models/regscale_models/issue.py +1176 -0
- regscale/models/regscale_models/leveraged_authorization.py +125 -0
- regscale/models/regscale_models/line_of_inquiry.py +52 -0
- regscale/models/regscale_models/link.py +205 -0
- regscale/models/regscale_models/meta_data.py +64 -0
- regscale/models/regscale_models/mixins/__init__.py +0 -0
- regscale/models/regscale_models/mixins/parent_cache.py +124 -0
- regscale/models/regscale_models/module.py +224 -0
- regscale/models/regscale_models/modules.py +191 -0
- regscale/models/regscale_models/objective.py +14 -0
- regscale/models/regscale_models/parameter.py +87 -0
- regscale/models/regscale_models/ports_protocol.py +81 -0
- regscale/models/regscale_models/privacy.py +89 -0
- regscale/models/regscale_models/profile.py +50 -0
- regscale/models/regscale_models/profile_link.py +68 -0
- regscale/models/regscale_models/profile_mapping.py +124 -0
- regscale/models/regscale_models/project.py +63 -0
- regscale/models/regscale_models/property.py +278 -0
- regscale/models/regscale_models/question.py +85 -0
- regscale/models/regscale_models/questionnaire.py +87 -0
- regscale/models/regscale_models/questionnaire_instance.py +177 -0
- regscale/models/regscale_models/rbac.py +132 -0
- regscale/models/regscale_models/reference.py +86 -0
- regscale/models/regscale_models/regscale_model.py +1643 -0
- regscale/models/regscale_models/requirement.py +29 -0
- regscale/models/regscale_models/risk.py +274 -0
- regscale/models/regscale_models/sbom.py +54 -0
- regscale/models/regscale_models/scan_history.py +436 -0
- regscale/models/regscale_models/search.py +53 -0
- regscale/models/regscale_models/security_control.py +132 -0
- regscale/models/regscale_models/security_plan.py +204 -0
- regscale/models/regscale_models/software_inventory.py +159 -0
- regscale/models/regscale_models/stake_holder.py +64 -0
- regscale/models/regscale_models/stig.py +647 -0
- regscale/models/regscale_models/supply_chain.py +152 -0
- regscale/models/regscale_models/system_role.py +188 -0
- regscale/models/regscale_models/system_role_external_assignment.py +40 -0
- regscale/models/regscale_models/tag.py +37 -0
- regscale/models/regscale_models/tag_mapping.py +19 -0
- regscale/models/regscale_models/task.py +133 -0
- regscale/models/regscale_models/threat.py +196 -0
- regscale/models/regscale_models/user.py +175 -0
- regscale/models/regscale_models/user_group.py +55 -0
- regscale/models/regscale_models/vulnerability.py +242 -0
- regscale/models/regscale_models/vulnerability_mapping.py +162 -0
- regscale/models/regscale_models/workflow.py +55 -0
- regscale/models/regscale_models/workflow_action.py +34 -0
- regscale/models/regscale_models/workflow_instance.py +269 -0
- regscale/models/regscale_models/workflow_instance_step.py +114 -0
- regscale/models/regscale_models/workflow_template.py +58 -0
- regscale/models/regscale_models/workflow_template_step.py +45 -0
- regscale/regscale.py +815 -0
- regscale/utils/__init__.py +7 -0
- regscale/utils/b64conversion.py +14 -0
- regscale/utils/click_utils.py +118 -0
- regscale/utils/decorators.py +48 -0
- regscale/utils/dict_utils.py +59 -0
- regscale/utils/files.py +79 -0
- regscale/utils/fxns.py +30 -0
- regscale/utils/graphql_client.py +113 -0
- regscale/utils/lists.py +16 -0
- regscale/utils/numbers.py +12 -0
- regscale/utils/shell.py +148 -0
- regscale/utils/string.py +121 -0
- regscale/utils/synqly_utils.py +165 -0
- regscale/utils/threading/__init__.py +8 -0
- regscale/utils/threading/threadhandler.py +131 -0
- regscale/utils/threading/threadsafe_counter.py +47 -0
- regscale/utils/threading/threadsafe_dict.py +242 -0
- regscale/utils/threading/threadsafe_list.py +83 -0
- regscale/utils/version.py +104 -0
- regscale/validation/__init__.py +0 -0
- regscale/validation/address.py +37 -0
- regscale/validation/record.py +48 -0
- regscale/visualization/__init__.py +5 -0
- regscale/visualization/click.py +34 -0
- regscale_cli-6.16.0.0.dist-info/LICENSE +21 -0
- regscale_cli-6.16.0.0.dist-info/METADATA +659 -0
- regscale_cli-6.16.0.0.dist-info/RECORD +481 -0
- regscale_cli-6.16.0.0.dist-info/WHEEL +5 -0
- regscale_cli-6.16.0.0.dist-info/entry_points.txt +6 -0
- regscale_cli-6.16.0.0.dist-info/top_level.txt +2 -0
- tests/fixtures/__init__.py +2 -0
- tests/fixtures/api.py +87 -0
- tests/fixtures/models.py +91 -0
- tests/fixtures/test_fixture.py +144 -0
- tests/mocks/__init__.py +0 -0
- tests/mocks/objects.py +3 -0
- tests/mocks/response.py +32 -0
- tests/mocks/xml.py +13 -0
- tests/regscale/__init__.py +0 -0
- tests/regscale/core/__init__.py +0 -0
- tests/regscale/core/test_api.py +232 -0
- tests/regscale/core/test_app.py +406 -0
- tests/regscale/core/test_login.py +37 -0
- tests/regscale/core/test_logz.py +66 -0
- tests/regscale/core/test_sbom_generator.py +87 -0
- tests/regscale/core/test_validation_utils.py +163 -0
- tests/regscale/core/test_version.py +78 -0
- tests/regscale/models/__init__.py +0 -0
- tests/regscale/models/test_asset.py +71 -0
- tests/regscale/models/test_config.py +26 -0
- tests/regscale/models/test_control_implementation.py +27 -0
- tests/regscale/models/test_import.py +97 -0
- tests/regscale/models/test_issue.py +36 -0
- tests/regscale/models/test_mapping.py +52 -0
- tests/regscale/models/test_platform.py +31 -0
- tests/regscale/models/test_regscale_model.py +346 -0
- tests/regscale/models/test_report.py +32 -0
- tests/regscale/models/test_tenable_integrations.py +118 -0
- tests/regscale/models/test_user_model.py +121 -0
- tests/regscale/test_about.py +19 -0
- tests/regscale/test_authorization.py +65 -0
|
@@ -0,0 +1,1240 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""Integrates evidence gathering into RegScale CLI"""
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
# standard python imports
|
|
7
|
+
import fnmatch
|
|
8
|
+
import itertools
|
|
9
|
+
import json
|
|
10
|
+
import os
|
|
11
|
+
import shutil
|
|
12
|
+
import zipfile
|
|
13
|
+
from datetime import datetime
|
|
14
|
+
from typing import Tuple
|
|
15
|
+
|
|
16
|
+
import click # type: ignore
|
|
17
|
+
import pdfplumber # type: ignore
|
|
18
|
+
from docx import Document # type: ignore
|
|
19
|
+
from pathlib import Path
|
|
20
|
+
from rich.progress import Progress, TaskID
|
|
21
|
+
|
|
22
|
+
from regscale.core.app.api import Api
|
|
23
|
+
from regscale.core.app.application import Application
|
|
24
|
+
from regscale.core.app.logz import create_logger
|
|
25
|
+
from regscale.core.app.utils.app_utils import check_file_path, create_progress_object, error_and_exit
|
|
26
|
+
from regscale.models.app_models.click import regscale_ssp_id
|
|
27
|
+
from regscale.models.regscale_models import Assessment, File, Project, SecurityPlan
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
@click.group()
|
|
31
|
+
def evidence():
|
|
32
|
+
"""Welcome to the RegScale Evidence Collection Automation CLI!"""
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
@evidence.command()
|
|
36
|
+
def start():
|
|
37
|
+
"""Starts the evidence collection automation process."""
|
|
38
|
+
run_evidence_collection()
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
@evidence.command(name="build_package")
|
|
42
|
+
@regscale_ssp_id()
|
|
43
|
+
@click.option(
|
|
44
|
+
"--path",
|
|
45
|
+
type=click.Path(exists=False, dir_okay=True, file_okay=False, path_type=Path),
|
|
46
|
+
help="Provide the desired path for creation of evidence files.",
|
|
47
|
+
default=os.path.join(os.getcwd(), "evidence"),
|
|
48
|
+
required=True,
|
|
49
|
+
)
|
|
50
|
+
def build_package(regscale_ssp_id: int, path: Path):
|
|
51
|
+
"""
|
|
52
|
+
This function will build a directory of evidence with the provided RegScale SSP Id
|
|
53
|
+
and RegScale Module and produce a zip file for extraction and use.
|
|
54
|
+
"""
|
|
55
|
+
package_builder(ssp_id=regscale_ssp_id, path=path)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def run_evidence_collection():
|
|
59
|
+
"""
|
|
60
|
+
This function will start the evidence collection automation process
|
|
61
|
+
"""
|
|
62
|
+
import pymupdf # type: ignore
|
|
63
|
+
|
|
64
|
+
app = Application()
|
|
65
|
+
api = Api()
|
|
66
|
+
config = app.config
|
|
67
|
+
check_file_path("./static")
|
|
68
|
+
progress = create_progress_object()
|
|
69
|
+
with progress:
|
|
70
|
+
task0 = progress.add_task("[white]Setting evidence folder directory variables...", total=3)
|
|
71
|
+
# call function to define variable for use outside of function
|
|
72
|
+
evidence_folder, dir_name, new_cwd = set_directory_variables(
|
|
73
|
+
task=task0, evidence_folder=config["evidenceFolder"], progress=progress
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
task1 = progress.add_task("[white]Building a required documents list from config.json...", total=3)
|
|
77
|
+
# call function to define variable for use outside of function
|
|
78
|
+
required_docs, document_list = parse_required_docs(
|
|
79
|
+
evidence_folder=evidence_folder, task=task1, progress=progress
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
task2 = progress.add_task("[white]Calculating files last modified times...", total=5)
|
|
83
|
+
# call function to define variable for use outside of function
|
|
84
|
+
times = get_doc_timestamps(evidence_folder=new_cwd, directory=dir_name, task=task2, progress=progress)
|
|
85
|
+
|
|
86
|
+
task3 = progress.add_task("[white]Building a required texts list from config.json...", total=3)
|
|
87
|
+
# call function to define variable for use outside of function
|
|
88
|
+
texts = set_required_texts(evidence_folder=evidence_folder, task=task3, progress=progress)
|
|
89
|
+
|
|
90
|
+
task4 = progress.add_task("[white]Searching evidence folder for required files...", total=4)
|
|
91
|
+
|
|
92
|
+
# call function to define variable for use outside of function
|
|
93
|
+
folders = find_required_files_in_folder(evidence_folder=new_cwd, task=task4, progress=progress)
|
|
94
|
+
|
|
95
|
+
task5 = progress.add_task("[white]Searching for digital signatures in documents...", total=2)
|
|
96
|
+
|
|
97
|
+
# call function to define variable for use outside of function
|
|
98
|
+
sig_results = signature_assessment_results(
|
|
99
|
+
directory=folders, r_docs=required_docs, task=task5, progress=progress
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
task6 = progress.add_task("[white]Testing if required documents are present...", total=2)
|
|
103
|
+
|
|
104
|
+
# call function to define variable for use outside of function
|
|
105
|
+
doc_results = document_assessment_results(
|
|
106
|
+
directory=folders, documents=document_list, task=task6, progress=progress
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
task7 = progress.add_task("[white]Extracting texts from required files...", total=4)
|
|
110
|
+
|
|
111
|
+
# call function to define variable for use outside of function
|
|
112
|
+
file_texts = parse_required_text_from_files(evidence_folder=new_cwd, task=task7, progress=progress)
|
|
113
|
+
|
|
114
|
+
task8 = progress.add_task("[white]Searching for required text in parsed documents...", total=2)
|
|
115
|
+
|
|
116
|
+
# call function to define variable for use outside of function
|
|
117
|
+
search_results = text_string_search(f_texts=file_texts, req_texts=texts, task=task8, progress=progress)
|
|
118
|
+
|
|
119
|
+
task9 = progress.add_task("[white]Testing if required texts are present", total=2)
|
|
120
|
+
|
|
121
|
+
# call function to define variable for use outside of function
|
|
122
|
+
text_results = text_assessment_results(searches=search_results, r_texts=texts, task=task9, progress=progress)
|
|
123
|
+
|
|
124
|
+
task10 = progress.add_task("[white]Retrieving data from the evidence test projects...", total=3)
|
|
125
|
+
|
|
126
|
+
# call function to define variable for use outside of function
|
|
127
|
+
data = gather_test_project_data(api=api, evidence_folder=evidence_folder, task=task10, progress=progress)
|
|
128
|
+
|
|
129
|
+
task11 = progress.add_task("[white]Testing file modification times...", total=2)
|
|
130
|
+
|
|
131
|
+
# call function to define variable to use outside of function
|
|
132
|
+
time_results = assess_doc_timestamps(timestamps=times, documents=required_docs, task=task11, progress=progress)
|
|
133
|
+
|
|
134
|
+
task12 = progress.add_task("[white]Building assessment report...", total=4)
|
|
135
|
+
|
|
136
|
+
# call function to define variable to use outside of function
|
|
137
|
+
report = assessments_report(
|
|
138
|
+
docres=doc_results,
|
|
139
|
+
textres=text_results,
|
|
140
|
+
timeres=time_results,
|
|
141
|
+
sigres=sig_results,
|
|
142
|
+
task=task12,
|
|
143
|
+
progress=progress,
|
|
144
|
+
)
|
|
145
|
+
|
|
146
|
+
task13 = progress.add_task("[white]Building assessment results dataframe...", total=4)
|
|
147
|
+
|
|
148
|
+
# call function to define variable to use outside of function
|
|
149
|
+
results = build_assessment_dataframe(assessments=report, task=task13, progress=progress)
|
|
150
|
+
|
|
151
|
+
task14 = progress.add_task("[white]Calculating assessment score...", total=1)
|
|
152
|
+
|
|
153
|
+
# call function to define variable for use outside of function
|
|
154
|
+
score_data = build_score_data(assessments=results, task=task14, progress=progress)
|
|
155
|
+
|
|
156
|
+
task15 = progress.add_task("[white]Building a table for the assessment report...", total=4)
|
|
157
|
+
|
|
158
|
+
# call function to define variable for use outside of function
|
|
159
|
+
html_output = build_html_table(assessments=report, task=task15, progress=progress)
|
|
160
|
+
|
|
161
|
+
task16 = progress.add_task("[white]Creating child assessment based on test results...", total=2)
|
|
162
|
+
|
|
163
|
+
# call function to create child assessment via POST request
|
|
164
|
+
create_child_assessments(
|
|
165
|
+
api=api, project_data=data, output=html_output, score_data=score_data, task=task16, progress=progress
|
|
166
|
+
)
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def package_builder(ssp_id: int, path: Path):
|
|
170
|
+
"""Function to build a directory of evidence and produce a zip file for extraction and use
|
|
171
|
+
|
|
172
|
+
:param int ssp_id: RegScale System Security Plan ID
|
|
173
|
+
:param Path path: directory for file location
|
|
174
|
+
:return None
|
|
175
|
+
"""
|
|
176
|
+
app = Application()
|
|
177
|
+
api = Api()
|
|
178
|
+
with create_progress_object() as progress:
|
|
179
|
+
task = progress.add_task("[white]Building and zipping evidence folder for audit...", total=6)
|
|
180
|
+
try:
|
|
181
|
+
# Obtaining MEGA Api for given Organizer Record.
|
|
182
|
+
ssp = SecurityPlan.fetch_mega_api_data(ssp_id)
|
|
183
|
+
module_folder_name = f'{ssp["securityPlan"]["id"]}_{ssp["securityPlan"]["systemName"]}'
|
|
184
|
+
folder_contents_name = f'{ssp["securityPlan"]["id"]}_Evidence_Folder_Contents'
|
|
185
|
+
|
|
186
|
+
module_folder = path / module_folder_name
|
|
187
|
+
os.makedirs(module_folder.absolute(), exist_ok=True)
|
|
188
|
+
|
|
189
|
+
progress.update(task, advance=1)
|
|
190
|
+
|
|
191
|
+
# Checking MEGA Api for Attachments at SSP level
|
|
192
|
+
process_ssp_attachments(
|
|
193
|
+
ssp=ssp,
|
|
194
|
+
path=path,
|
|
195
|
+
folder_contents_name=folder_contents_name,
|
|
196
|
+
module_folder_name=module_folder_name,
|
|
197
|
+
api=api,
|
|
198
|
+
)
|
|
199
|
+
|
|
200
|
+
progress.update(task, advance=1)
|
|
201
|
+
|
|
202
|
+
# Checking MEGA Api for Attachments at Control level
|
|
203
|
+
process_control_attachments(
|
|
204
|
+
ssp=ssp,
|
|
205
|
+
path=path,
|
|
206
|
+
progress=progress,
|
|
207
|
+
module_folder_name=module_folder_name,
|
|
208
|
+
module_folder=module_folder,
|
|
209
|
+
api=api,
|
|
210
|
+
task=task,
|
|
211
|
+
)
|
|
212
|
+
# Creating zip file and removing temporary Evidence Folder
|
|
213
|
+
new_path = Path("./evidence.zip")
|
|
214
|
+
zip_folder(path, new_path)
|
|
215
|
+
remove_directory(module_folder)
|
|
216
|
+
os.remove(path / f"{folder_contents_name}.json")
|
|
217
|
+
shutil.move(new_path, path / "evidence.zip")
|
|
218
|
+
progress.update(task, advance=1)
|
|
219
|
+
app.logger.info("An evidence zipfile has been created and is ready for use!")
|
|
220
|
+
except Exception as ex:
|
|
221
|
+
app.logger.info("No SSP or Evidence exists for given Organizer Record.\n%s", ex)
|
|
222
|
+
|
|
223
|
+
progress.update(task, advance=1)
|
|
224
|
+
app.logger.info("Evidence zipfile located. Thank you!")
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
def process_ssp_attachments(ssp: dict, path: Path, folder_contents_name: str, module_folder_name: str, api: Api):
|
|
228
|
+
"""
|
|
229
|
+
Process SSP attachments and download them to the evidence folder
|
|
230
|
+
|
|
231
|
+
:param dict ssp: RegScale System Security Plan with mega API data
|
|
232
|
+
:param Path path: directory for file location
|
|
233
|
+
:param str folder_contents_name: name of the folder contents file
|
|
234
|
+
:param str module_folder_name: name of the module folder
|
|
235
|
+
:param Api api: RegScale CLI API object
|
|
236
|
+
"""
|
|
237
|
+
if attachments := ssp.get("attachments"):
|
|
238
|
+
outter_attachments = [
|
|
239
|
+
{
|
|
240
|
+
"fileName": i["trustedDisplayName"],
|
|
241
|
+
"storedName": i["trustedStorageName"],
|
|
242
|
+
"parentId": i["parentId"],
|
|
243
|
+
"parentModule": i["parentModule"],
|
|
244
|
+
"fileHash": i.get("fileHash") or i.get("shaHash"),
|
|
245
|
+
"fileSize": i["size"],
|
|
246
|
+
"dateCreated": i["dateCreated"],
|
|
247
|
+
}
|
|
248
|
+
for i in attachments
|
|
249
|
+
]
|
|
250
|
+
|
|
251
|
+
json_data = json.dumps(outter_attachments, indent=4, separators=(", ", ": "))
|
|
252
|
+
with open(f"{path}/{folder_contents_name}.json", "w", newline="\n") as next_output:
|
|
253
|
+
next_output.write(json_data)
|
|
254
|
+
|
|
255
|
+
# Adding any Attachments at SSP level to corresponding folder
|
|
256
|
+
for f in outter_attachments:
|
|
257
|
+
file = File.download_file_from_regscale_to_memory(
|
|
258
|
+
api=api,
|
|
259
|
+
record_id=f["parentId"],
|
|
260
|
+
module=f["parentModule"],
|
|
261
|
+
stored_name=f["storedName"],
|
|
262
|
+
file_hash=f["fileHash"],
|
|
263
|
+
)
|
|
264
|
+
with open(f"{path}/{module_folder_name}/{f['fileName']}", "wb") as att:
|
|
265
|
+
att.write(file)
|
|
266
|
+
|
|
267
|
+
else:
|
|
268
|
+
api.logger.info("No Evidence at SSP level for SSP. Checking for Evidence at Control level.")
|
|
269
|
+
|
|
270
|
+
|
|
271
|
+
def process_control_attachments(
|
|
272
|
+
ssp: dict, path: Path, progress: Progress, module_folder_name: str, module_folder: Path, api: Api, task: TaskID
|
|
273
|
+
) -> None:
|
|
274
|
+
"""
|
|
275
|
+
Process Control attachments and download them to the evidence folder
|
|
276
|
+
|
|
277
|
+
:param dict ssp: RegScale System Security Plan with mega API data
|
|
278
|
+
:param Path path: directory for file location
|
|
279
|
+
:param Progress progress: Progress object
|
|
280
|
+
:param str module_folder_name: name of the module folder
|
|
281
|
+
:param Path module_folder: path to module folder
|
|
282
|
+
:param Api api: RegScale CLI API object
|
|
283
|
+
:param TaskID task: The task to update on the job_progress
|
|
284
|
+
:rtype: None
|
|
285
|
+
"""
|
|
286
|
+
if controls := ssp["normalizedControls"]:
|
|
287
|
+
control_attachments = []
|
|
288
|
+
for i in controls:
|
|
289
|
+
name = i["control"]["item3"]["controlId"]
|
|
290
|
+
|
|
291
|
+
for p in i["attachments"]:
|
|
292
|
+
if not p:
|
|
293
|
+
continue
|
|
294
|
+
file_name = p["trustedDisplayName"]
|
|
295
|
+
stored_name = p["trustedStorageName"]
|
|
296
|
+
parent_id = p["parentId"]
|
|
297
|
+
parent_module = p["parentModule"]
|
|
298
|
+
file_hash = p["fileHash"]
|
|
299
|
+
sha_hash = p["shaHash"]
|
|
300
|
+
file_size = p["size"]
|
|
301
|
+
date_created = p["dateCreated"]
|
|
302
|
+
|
|
303
|
+
control_attachments.append(
|
|
304
|
+
{
|
|
305
|
+
"controlId": name,
|
|
306
|
+
"fileName": file_name,
|
|
307
|
+
"storedName": stored_name,
|
|
308
|
+
"parentId": parent_id,
|
|
309
|
+
"parentModule": parent_module,
|
|
310
|
+
"fileHash": file_hash,
|
|
311
|
+
"shaHash": sha_hash,
|
|
312
|
+
"fileSize": file_size,
|
|
313
|
+
"dateCreated": date_created,
|
|
314
|
+
}
|
|
315
|
+
)
|
|
316
|
+
|
|
317
|
+
progress.update(task, advance=1)
|
|
318
|
+
|
|
319
|
+
# Creating folders for Controls with Attachments
|
|
320
|
+
control_folders = []
|
|
321
|
+
for name in control_attachments:
|
|
322
|
+
control_folders.append(name["controlId"])
|
|
323
|
+
control_folders = list(set(control_folders))
|
|
324
|
+
for i in control_folders:
|
|
325
|
+
os.makedirs(module_folder / str(i), exist_ok=True)
|
|
326
|
+
|
|
327
|
+
# Adding any Attachments at Control level to corresponding folder
|
|
328
|
+
_download_control_attachments(control_attachments, api, path, module_folder_name)
|
|
329
|
+
|
|
330
|
+
progress.update(task, advance=1)
|
|
331
|
+
|
|
332
|
+
else:
|
|
333
|
+
api.logger.info("No Control level Evidence for SSP.")
|
|
334
|
+
|
|
335
|
+
|
|
336
|
+
def _download_control_attachments(
|
|
337
|
+
control_attachments: list[dict], api: Api, path: Path, module_folder_name: str
|
|
338
|
+
) -> None:
|
|
339
|
+
"""
|
|
340
|
+
Download Control attachments to the evidence folder
|
|
341
|
+
|
|
342
|
+
:param list[dict] control_attachments: List of control attachments
|
|
343
|
+
:param Api api: RegScale CLI API object
|
|
344
|
+
:param Path path: directory for file location
|
|
345
|
+
:param str module_folder_name: name of the module folder
|
|
346
|
+
:rtype: None
|
|
347
|
+
"""
|
|
348
|
+
for f in control_attachments:
|
|
349
|
+
file = File.download_file_from_regscale_to_memory(
|
|
350
|
+
api=api,
|
|
351
|
+
record_id=f["parentId"],
|
|
352
|
+
module=f["parentModule"],
|
|
353
|
+
stored_name=f["storedName"],
|
|
354
|
+
file_hash=f["fileHash"],
|
|
355
|
+
)
|
|
356
|
+
|
|
357
|
+
with open(
|
|
358
|
+
f"{path}/{module_folder_name}/{f['controlId']}/{f['fileName']}",
|
|
359
|
+
"wb",
|
|
360
|
+
) as output:
|
|
361
|
+
output.write(file)
|
|
362
|
+
with open(
|
|
363
|
+
f"{path}/{module_folder_name}/{f['controlId']}/{f['controlId']}_Evidence_Folder_Contents.json",
|
|
364
|
+
"a",
|
|
365
|
+
) as file_drop:
|
|
366
|
+
json.dump(f, file_drop, indent=4, separators=(", ", ": "))
|
|
367
|
+
|
|
368
|
+
|
|
369
|
+
def remove_directory(directory_path: Path) -> None:
|
|
370
|
+
"""
|
|
371
|
+
This function removes a given directory even if files stored there
|
|
372
|
+
|
|
373
|
+
:param Path directory_path: file path of directory to remove
|
|
374
|
+
:rtype: None
|
|
375
|
+
"""
|
|
376
|
+
shutil.rmtree(directory_path.absolute())
|
|
377
|
+
create_logger().info("Temporary Evidence directory removed successfully!")
|
|
378
|
+
|
|
379
|
+
|
|
380
|
+
def zip_folder(folder_path: Path, zip_path: Path) -> None:
|
|
381
|
+
"""
|
|
382
|
+
This function zips up files and folders in a given folder or directory path.
|
|
383
|
+
|
|
384
|
+
:param Path folder_path: file path of evidence folder
|
|
385
|
+
:param Path zip_path: file path for zip location of evidence folder
|
|
386
|
+
:rtype: None
|
|
387
|
+
"""
|
|
388
|
+
# Create a ZIP file object in write mode
|
|
389
|
+
with zipfile.ZipFile(zip_path.absolute(), "w", zipfile.ZIP_DEFLATED) as zipf:
|
|
390
|
+
# Iterate over all the files and subfolders in the given folder
|
|
391
|
+
for root, dirs, files in os.walk(folder_path.absolute()):
|
|
392
|
+
for file in files:
|
|
393
|
+
# Get the absolute path of the current file
|
|
394
|
+
file_path = os.path.join(root, file)
|
|
395
|
+
# Get the relative path of the current file within the folder
|
|
396
|
+
relative_path = os.path.relpath(file_path, folder_path.absolute()) # type: ignore
|
|
397
|
+
# Add the file to the ZIP archive using its relative path
|
|
398
|
+
zipf.write(file_path, relative_path) # type: ignore
|
|
399
|
+
|
|
400
|
+
create_logger().info("Folder zipped successfully!")
|
|
401
|
+
|
|
402
|
+
|
|
403
|
+
def remove(list_to_review: list) -> list:
|
|
404
|
+
"""
|
|
405
|
+
Remove items that start with "."
|
|
406
|
+
|
|
407
|
+
:param list list_to_review: list of items to review
|
|
408
|
+
:return: copied list with items removed
|
|
409
|
+
:rtype: list
|
|
410
|
+
"""
|
|
411
|
+
copy_list = list_to_review.copy()
|
|
412
|
+
# loop through folder/file list
|
|
413
|
+
for item in list_to_review:
|
|
414
|
+
# if the folder or file starts with '.'
|
|
415
|
+
if item.startswith("."):
|
|
416
|
+
# remove the item from the list
|
|
417
|
+
copy_list.remove(item)
|
|
418
|
+
return copy_list
|
|
419
|
+
|
|
420
|
+
|
|
421
|
+
def delta(time: datetime) -> int:
|
|
422
|
+
"""
|
|
423
|
+
Calculates the days between provided datetime object and the datetime function was called
|
|
424
|
+
|
|
425
|
+
:param datetime time:
|
|
426
|
+
:return: # of days difference between provided date and datetime function was called
|
|
427
|
+
:rtype: int
|
|
428
|
+
"""
|
|
429
|
+
# find time difference between dates
|
|
430
|
+
diff = datetime.now() - time
|
|
431
|
+
# return the difference in integer days
|
|
432
|
+
return diff.days
|
|
433
|
+
|
|
434
|
+
|
|
435
|
+
def calc_score(number: int, score_data: Tuple[list[int], list[int], list[int]]) -> int:
|
|
436
|
+
"""
|
|
437
|
+
calculate score
|
|
438
|
+
|
|
439
|
+
:param int number: Index in list
|
|
440
|
+
:param Tuple[list[int], list[int], list[int]] score_data: List of scores
|
|
441
|
+
:return: Test score
|
|
442
|
+
:rtype: int
|
|
443
|
+
"""
|
|
444
|
+
# bring in score lists
|
|
445
|
+
true_scores = score_data[0]
|
|
446
|
+
total_scores = score_data[2]
|
|
447
|
+
# set score values
|
|
448
|
+
true_score = true_scores[number]
|
|
449
|
+
total_score = total_scores[number]
|
|
450
|
+
# calculate test score for this result and check for zero division
|
|
451
|
+
return int((true_score / total_score) * 100) if int(total_score) != 0 else 0
|
|
452
|
+
|
|
453
|
+
|
|
454
|
+
def find_signatures(file: str) -> int:
|
|
455
|
+
"""
|
|
456
|
+
Determine if the file is digitally signed
|
|
457
|
+
|
|
458
|
+
:param str file: file path
|
|
459
|
+
:return: # of signatures found
|
|
460
|
+
:rtype: int
|
|
461
|
+
"""
|
|
462
|
+
import pymupdf
|
|
463
|
+
|
|
464
|
+
number = 0
|
|
465
|
+
logger = create_logger()
|
|
466
|
+
# if the file is a pdf document
|
|
467
|
+
if file.endswith(".pdf"):
|
|
468
|
+
try:
|
|
469
|
+
# open the document
|
|
470
|
+
doc = pymupdf.open(file)
|
|
471
|
+
except pymupdf.FileNotFoundError:
|
|
472
|
+
# set sig flag equal to 0
|
|
473
|
+
number = 0
|
|
474
|
+
logger.warning("no such file %s .", file)
|
|
475
|
+
else:
|
|
476
|
+
# determine if document is digitally signed
|
|
477
|
+
number = doc.get_sigflags()
|
|
478
|
+
# if the sig flag is equal to 3
|
|
479
|
+
if number == 3:
|
|
480
|
+
logger.info("%s has signature fields and has been digitally signed.", file)
|
|
481
|
+
# if the sig flag is equal to 1
|
|
482
|
+
elif number == 1:
|
|
483
|
+
logger.info("%s has signature fields, but has not been digitally signed.", file)
|
|
484
|
+
# if the sig flag is equal to -1
|
|
485
|
+
elif number == -1:
|
|
486
|
+
logger.info("%s has no signature fields to hold a digital signature.", file)
|
|
487
|
+
# if the file is a docx document
|
|
488
|
+
if not file.endswith(".pdf"):
|
|
489
|
+
# set sig flag equal to 0
|
|
490
|
+
number = 0
|
|
491
|
+
logger.warning("%s is not a pdf document.", file)
|
|
492
|
+
|
|
493
|
+
# return variable for use outside of local scope
|
|
494
|
+
return number
|
|
495
|
+
|
|
496
|
+
|
|
497
|
+
def set_directory_variables(task: TaskID, evidence_folder: str, progress: Progress) -> Tuple[str, str, str]:
|
|
498
|
+
"""
|
|
499
|
+
Set evidence folder directory variables
|
|
500
|
+
|
|
501
|
+
:param TaskID task: The task to update on the job_progress
|
|
502
|
+
:param str evidence_folder: File path to evidence folder
|
|
503
|
+
:param Progress progress: Progress object
|
|
504
|
+
:return: Tuple[evidence folder path, directory name, new working directory]
|
|
505
|
+
:rtype: Tuple[str, str, str]
|
|
506
|
+
"""
|
|
507
|
+
# set evidence folder variable to init.yaml value
|
|
508
|
+
# if evidence folder does not exist then create it so tests will pass
|
|
509
|
+
check_file_path(evidence_folder)
|
|
510
|
+
# if evidence folder does not exist or if it is empty then error out
|
|
511
|
+
if evidence_folder is None or len(os.listdir(evidence_folder)) <= 1:
|
|
512
|
+
error_and_exit("The directory set to evidenceFolder cannot be found or is empty.")
|
|
513
|
+
else:
|
|
514
|
+
# otherwise change directory to the evidence folder
|
|
515
|
+
os.chdir(evidence_folder)
|
|
516
|
+
progress.update(task, advance=1)
|
|
517
|
+
# include RegScale projects folder
|
|
518
|
+
dir_name = [filename for filename in os.listdir(os.getcwd()) if os.path.isdir(os.path.join(os.getcwd(), filename))][
|
|
519
|
+
0
|
|
520
|
+
]
|
|
521
|
+
progress.update(task, advance=1)
|
|
522
|
+
# pick up subdirectory under the evidence folder
|
|
523
|
+
new_cwd = os.getcwd() + os.sep + dir_name
|
|
524
|
+
progress.update(task, advance=1)
|
|
525
|
+
# return variables for use outside local scope
|
|
526
|
+
return evidence_folder, dir_name, new_cwd
|
|
527
|
+
|
|
528
|
+
|
|
529
|
+
def parse_required_docs(evidence_folder: str, task: TaskID, progress: Progress) -> Tuple[list[dict], set[str]]:
|
|
530
|
+
"""
|
|
531
|
+
build a list of the required documents from config.json
|
|
532
|
+
|
|
533
|
+
:param str evidence_folder:
|
|
534
|
+
:param TaskID task: The task to update on the job_progress
|
|
535
|
+
:param Progress progress: Progress object
|
|
536
|
+
:return: Tuple[required_docs, document_list]
|
|
537
|
+
:rtype: Tuple[list[dict], set[str]]
|
|
538
|
+
"""
|
|
539
|
+
# create an empty list to hold a list of all document requirements for the assessment
|
|
540
|
+
required_docs = []
|
|
541
|
+
progress.update(task, advance=1)
|
|
542
|
+
# create an empty list to hold a list of all required documents
|
|
543
|
+
document_list = set()
|
|
544
|
+
progress.update(task, advance=1)
|
|
545
|
+
# open app//evidence//config.json file and read contents
|
|
546
|
+
with open(f"{evidence_folder}{os.sep}config.json", "r", encoding="utf-8") as json_file:
|
|
547
|
+
# load json object into a readable dictionary
|
|
548
|
+
rules = json.load(json_file)
|
|
549
|
+
progress.update(task, advance=1)
|
|
550
|
+
# loop through required document dicts
|
|
551
|
+
for i in range(len(rules["required-documents"])):
|
|
552
|
+
# add to a list of dictionaries for parsing
|
|
553
|
+
required_docs.append(
|
|
554
|
+
{
|
|
555
|
+
"file-name": rules["required-documents"][i].get("file-name"),
|
|
556
|
+
"last-updated-by": rules["required-documents"][i].get("last-updated-by"),
|
|
557
|
+
"signatures-required": rules["required-documents"][i].get("signatures-required"),
|
|
558
|
+
"signature-count": rules["required-documents"][i].get("signature-count"),
|
|
559
|
+
}
|
|
560
|
+
)
|
|
561
|
+
# update contents of list if it does not already exist
|
|
562
|
+
document_list.add(rules["required-documents"][i].get("file-name"))
|
|
563
|
+
progress.update(task, advance=1)
|
|
564
|
+
# return variables for use outside of local scope
|
|
565
|
+
return required_docs, document_list
|
|
566
|
+
|
|
567
|
+
|
|
568
|
+
def get_doc_timestamps(evidence_folder: str, directory: str, task: TaskID, progress: Progress) -> list[dict]:
|
|
569
|
+
"""
|
|
570
|
+
Get each file's last modified time
|
|
571
|
+
|
|
572
|
+
:param str evidence_folder: File path to evidence folder
|
|
573
|
+
:param str directory: File path to directory
|
|
574
|
+
:param TaskID task: The task to update on the job_progress
|
|
575
|
+
:param Progress progress: Progress object
|
|
576
|
+
:return: list of dictionaries
|
|
577
|
+
:rtype: list[dict]
|
|
578
|
+
"""
|
|
579
|
+
# create empty list to hold file modified times
|
|
580
|
+
modified_times: list[dict] = []
|
|
581
|
+
progress.update(task, advance=1)
|
|
582
|
+
# get list of folders in parent folder
|
|
583
|
+
folders_list = os.listdir(evidence_folder)
|
|
584
|
+
progress.update(task, advance=1)
|
|
585
|
+
# remove any child folders that start with '.'
|
|
586
|
+
new_folders = remove(list_to_review=folders_list)
|
|
587
|
+
progress.update(task, advance=1)
|
|
588
|
+
# loop through directory listing
|
|
589
|
+
for folder in new_folders:
|
|
590
|
+
# get list of files in each folder
|
|
591
|
+
filelist = os.listdir(os.path.join(evidence_folder, folder))
|
|
592
|
+
# remove any files that start with '.'
|
|
593
|
+
remove(list_to_review=filelist)
|
|
594
|
+
# loop through list of files in each folder
|
|
595
|
+
modified_times.extend(
|
|
596
|
+
{
|
|
597
|
+
"program": folder,
|
|
598
|
+
"file": filename,
|
|
599
|
+
"last-modified": os.path.getmtime(os.path.join(directory, folder, filename)),
|
|
600
|
+
}
|
|
601
|
+
for filename in filelist
|
|
602
|
+
)
|
|
603
|
+
progress.update(task, advance=1)
|
|
604
|
+
# loop through the list of timestamps
|
|
605
|
+
for i, time_data in enumerate(modified_times):
|
|
606
|
+
# update the last-modified value to be the count of days
|
|
607
|
+
modified_times[i].update({"last-modified": delta(time=datetime.fromtimestamp(time_data["last-modified"]))})
|
|
608
|
+
progress.update(task, advance=1)
|
|
609
|
+
# return variable for use outside local scope
|
|
610
|
+
return modified_times
|
|
611
|
+
|
|
612
|
+
|
|
613
|
+
def set_required_texts(evidence_folder: str, task: TaskID, progress: Progress) -> set[str]:
|
|
614
|
+
"""
|
|
615
|
+
parse config.json file and build a list of the required texts for the assessment
|
|
616
|
+
|
|
617
|
+
:param str evidence_folder: File path to evidence folder
|
|
618
|
+
:param TaskID task: The task to update on the job_progress
|
|
619
|
+
:param Progress progress: Progress object
|
|
620
|
+
:return: Required text
|
|
621
|
+
:rtype: set[str]
|
|
622
|
+
"""
|
|
623
|
+
# create an empty set to hold all unique required texts for the assessment
|
|
624
|
+
required_text = set()
|
|
625
|
+
progress.update(task, advance=1)
|
|
626
|
+
# open app//evidence//config.json file and read contents
|
|
627
|
+
with open(f"{evidence_folder}{os.sep}config.json", "r", encoding="utf-8") as json_file:
|
|
628
|
+
# load json object into a readable dictionary
|
|
629
|
+
rules = json.load(json_file)
|
|
630
|
+
progress.update(task, advance=1)
|
|
631
|
+
# create iterator to traverse dictionary
|
|
632
|
+
for i in range(len(rules["rules-engine"])):
|
|
633
|
+
# pull out required text to look for from config
|
|
634
|
+
for items in rules["rules-engine"][i]["text-to-find"]:
|
|
635
|
+
# exclude duplicate text to search from required text
|
|
636
|
+
required_text.add(items)
|
|
637
|
+
progress.update(task, advance=1)
|
|
638
|
+
# return variable for use outside of local scope
|
|
639
|
+
return required_text
|
|
640
|
+
|
|
641
|
+
|
|
642
|
+
def find_required_files_in_folder(evidence_folder: str, task: TaskID, progress: Progress) -> list[dict]:
|
|
643
|
+
"""
|
|
644
|
+
Pull out required files from each directory for parsing
|
|
645
|
+
|
|
646
|
+
:param str evidence_folder: File path to evidence folder
|
|
647
|
+
:param TaskID task: The task to update on the job_progress
|
|
648
|
+
:param Progress progress: Progress object
|
|
649
|
+
:return: List of directories
|
|
650
|
+
:rtype: list[dict]
|
|
651
|
+
"""
|
|
652
|
+
# create empty list to hold list of files in directory
|
|
653
|
+
dir_list: list[dict] = []
|
|
654
|
+
progress.update(task, advance=1)
|
|
655
|
+
# build a list of all folders to iterate through
|
|
656
|
+
folder_list = os.listdir(evidence_folder)
|
|
657
|
+
progress.update(task, advance=1)
|
|
658
|
+
# remove any folders starting with '.' from list
|
|
659
|
+
new_folders_list = remove(folder_list)
|
|
660
|
+
progress.update(task, advance=1)
|
|
661
|
+
for folder in new_folders_list:
|
|
662
|
+
# build a list of all files contained in sub-directories
|
|
663
|
+
filelist = os.listdir(evidence_folder + os.sep + folder)
|
|
664
|
+
# remove folders and file names that start with a .
|
|
665
|
+
remove(filelist)
|
|
666
|
+
dir_list.extend({"program": folder, "file": filename} for filename in filelist)
|
|
667
|
+
progress.update(task, advance=1)
|
|
668
|
+
# return variable for use outside of local scope
|
|
669
|
+
return dir_list
|
|
670
|
+
|
|
671
|
+
|
|
672
|
+
def signature_assessment_results(
|
|
673
|
+
directory: list[dict], r_docs: list[dict], task: TaskID, progress: Progress
|
|
674
|
+
) -> list[dict]:
|
|
675
|
+
"""
|
|
676
|
+
Compares signature config parameter against signature detection
|
|
677
|
+
|
|
678
|
+
:param list[dict] directory: List of directories
|
|
679
|
+
:param list[dict] r_docs: List of documents
|
|
680
|
+
:param TaskID task: The task to update on the job_progress
|
|
681
|
+
:param Progress progress: Progress object
|
|
682
|
+
:return: Assessment of signatures
|
|
683
|
+
:rtype: list[dict]
|
|
684
|
+
"""
|
|
685
|
+
# create empty list to hold assessment results
|
|
686
|
+
sig_assessments: list[dict] = []
|
|
687
|
+
progress.update(task, advance=1)
|
|
688
|
+
# loop through list of found documents in each sub-folder
|
|
689
|
+
for doc_file in directory:
|
|
690
|
+
for required in r_docs:
|
|
691
|
+
if doc_file["file"] == required["file-name"]:
|
|
692
|
+
# if the signatures-required field is set to true
|
|
693
|
+
if required["signatures-required"] is True:
|
|
694
|
+
# run the signature detection function for the file
|
|
695
|
+
sig_result = find_signatures(doc_file["file"])
|
|
696
|
+
# if the return value is 3 pass the test
|
|
697
|
+
if sig_result == 3:
|
|
698
|
+
# append a true result for each document tested
|
|
699
|
+
sig_assessments.append(
|
|
700
|
+
{
|
|
701
|
+
"program": doc_file["program"],
|
|
702
|
+
"file": doc_file["file"],
|
|
703
|
+
"test": "signature-required",
|
|
704
|
+
"result": True,
|
|
705
|
+
}
|
|
706
|
+
)
|
|
707
|
+
# if the return value is 1, -1 or 0 fail the test
|
|
708
|
+
else:
|
|
709
|
+
# append a false result for each document tested
|
|
710
|
+
sig_assessments.append(
|
|
711
|
+
{
|
|
712
|
+
"program": doc_file["program"],
|
|
713
|
+
"file": doc_file["file"],
|
|
714
|
+
"test": "signature-required",
|
|
715
|
+
"result": False,
|
|
716
|
+
}
|
|
717
|
+
)
|
|
718
|
+
# if the signatures-required field is set to false
|
|
719
|
+
if required["signatures-required"] is False:
|
|
720
|
+
# append a true result for each document not requiring a signature
|
|
721
|
+
sig_assessments.append(
|
|
722
|
+
{
|
|
723
|
+
"program": doc_file["program"],
|
|
724
|
+
"file": doc_file["file"],
|
|
725
|
+
"test": "signature-required (not required)",
|
|
726
|
+
"result": True,
|
|
727
|
+
}
|
|
728
|
+
)
|
|
729
|
+
progress.update(task, advance=1)
|
|
730
|
+
# return variable for use outside of local scope
|
|
731
|
+
return sig_assessments
|
|
732
|
+
|
|
733
|
+
|
|
734
|
+
def document_assessment_results(
|
|
735
|
+
directory: list[dict], documents: set[str], task: TaskID, progress: Progress
|
|
736
|
+
) -> list[dict]:
|
|
737
|
+
"""
|
|
738
|
+
Test if required documents are present in each directory
|
|
739
|
+
|
|
740
|
+
:param list[dict] directory: List of directories
|
|
741
|
+
:param set[str] documents: List of documents
|
|
742
|
+
:param TaskID task: The task to update on the job_progress
|
|
743
|
+
:param Progress progress: Progress object
|
|
744
|
+
:return: List of assessments of provided documents in the provided directory
|
|
745
|
+
:rtype: list[dict]
|
|
746
|
+
"""
|
|
747
|
+
# create empty list to hold assessment results
|
|
748
|
+
doc_assessments: list[dict] = []
|
|
749
|
+
progress.update(task, advance=1)
|
|
750
|
+
# loop through list of found documents in each sub-folder
|
|
751
|
+
for doc_file in directory:
|
|
752
|
+
# if the file in the sub-folder is in the required documents list
|
|
753
|
+
if doc_file["file"] in documents:
|
|
754
|
+
# append a true result for each file in each program
|
|
755
|
+
doc_assessments.append(
|
|
756
|
+
{
|
|
757
|
+
"program": doc_file["program"],
|
|
758
|
+
"file": doc_file["file"],
|
|
759
|
+
"test": "required-documents",
|
|
760
|
+
"result": True,
|
|
761
|
+
}
|
|
762
|
+
)
|
|
763
|
+
else:
|
|
764
|
+
# append a false result for each file in each program
|
|
765
|
+
doc_assessments.append(
|
|
766
|
+
{
|
|
767
|
+
"program": doc_file["program"],
|
|
768
|
+
"file": doc_file["file"],
|
|
769
|
+
"test": "required-documents",
|
|
770
|
+
"result": False,
|
|
771
|
+
}
|
|
772
|
+
)
|
|
773
|
+
progress.update(task, advance=1)
|
|
774
|
+
# return variable for use outside of local scope
|
|
775
|
+
return doc_assessments
|
|
776
|
+
|
|
777
|
+
|
|
778
|
+
def parse_required_text_from_files(evidence_folder: str, task: TaskID, progress: Progress) -> list[dict]:
|
|
779
|
+
"""
|
|
780
|
+
Parse text from docx/pdf file and hold strings representing required text to test
|
|
781
|
+
|
|
782
|
+
:param str evidence_folder: File path to the evidence folder
|
|
783
|
+
:param TaskID task: The task to update on the job_progress
|
|
784
|
+
:param Progress progress: Progress object
|
|
785
|
+
:return: Results of text found for the files
|
|
786
|
+
:rtype: list[dict]
|
|
787
|
+
"""
|
|
788
|
+
# create an empty list to hold all strings from parsed documents
|
|
789
|
+
full_text: list[dict] = []
|
|
790
|
+
progress.update(task, advance=1)
|
|
791
|
+
# build a list of files in the folder
|
|
792
|
+
folder_list = os.listdir(evidence_folder)
|
|
793
|
+
progress.update(task, advance=1)
|
|
794
|
+
# remove all folders that start with '.'
|
|
795
|
+
removed_folders_list = remove(folder_list)
|
|
796
|
+
progress.update(task, advance=1)
|
|
797
|
+
for folder in removed_folders_list:
|
|
798
|
+
# create a list of files to iterate through for parsing
|
|
799
|
+
file_list = os.listdir((os.path.join(evidence_folder, folder)))
|
|
800
|
+
remove(file_list)
|
|
801
|
+
# iterate through all files in the list
|
|
802
|
+
for filename in file_list:
|
|
803
|
+
# if the filename is a .docx file
|
|
804
|
+
if filename.endswith(".docx"):
|
|
805
|
+
# open the Word document to enable parsing
|
|
806
|
+
document = Document(os.path.join(evidence_folder, folder, filename))
|
|
807
|
+
output: list[str] = [para.text for para in document.paragraphs]
|
|
808
|
+
# add each file and the requisite text to the dictionary to test
|
|
809
|
+
full_text.append({"program": folder, "file": filename, "text": output})
|
|
810
|
+
elif filename.endswith(".pdf"):
|
|
811
|
+
# create empty list to hold text per file
|
|
812
|
+
output_text_list: list[str] = []
|
|
813
|
+
# open filename with pdfplumber
|
|
814
|
+
with pdfplumber.open(filename) as pdf:
|
|
815
|
+
# set number of pages
|
|
816
|
+
pages = pdf.pages
|
|
817
|
+
# for each page in the pdf document
|
|
818
|
+
for page in pages:
|
|
819
|
+
# extract the text
|
|
820
|
+
text = page.extract_text()
|
|
821
|
+
# write the text to a list
|
|
822
|
+
output_text_list.append(text)
|
|
823
|
+
# add each file and the requisite text to the dictionary to test
|
|
824
|
+
full_text.append(
|
|
825
|
+
{
|
|
826
|
+
"program": folder,
|
|
827
|
+
"file": filename,
|
|
828
|
+
"text": output_text_list,
|
|
829
|
+
}
|
|
830
|
+
)
|
|
831
|
+
progress.update(task, advance=1)
|
|
832
|
+
# return variable for use outside of local scope
|
|
833
|
+
return full_text
|
|
834
|
+
|
|
835
|
+
|
|
836
|
+
def text_string_search(f_texts: list[dict], req_texts: set[str], task: TaskID, progress: Progress) -> list[dict]:
|
|
837
|
+
"""
|
|
838
|
+
Search for required texts in document paragraphs
|
|
839
|
+
|
|
840
|
+
:param list[dict] f_texts: List of documents
|
|
841
|
+
:param set[str] req_texts: Required text
|
|
842
|
+
:param TaskID task: The task to update on the job_progress
|
|
843
|
+
:param Progress progress: Progress object
|
|
844
|
+
:return: Results of searched text in documents
|
|
845
|
+
:rtype: list[dict]
|
|
846
|
+
"""
|
|
847
|
+
# create empty list to hold assessment results
|
|
848
|
+
search_list: list[dict] = []
|
|
849
|
+
progress.update(task, advance=1)
|
|
850
|
+
# iterate through each sentence in the required texts
|
|
851
|
+
for parsed_file, line in itertools.product(f_texts, req_texts):
|
|
852
|
+
# if the required text appears in the parsed paragraph
|
|
853
|
+
if any(line in text for text in parsed_file["text"]):
|
|
854
|
+
# then create a "True" entry in the empty list
|
|
855
|
+
search_list.append(
|
|
856
|
+
{
|
|
857
|
+
"program": parsed_file["program"],
|
|
858
|
+
"file": parsed_file["file"],
|
|
859
|
+
"text": line,
|
|
860
|
+
"result": True,
|
|
861
|
+
}
|
|
862
|
+
)
|
|
863
|
+
else:
|
|
864
|
+
# else create a "False" entry in the empty list
|
|
865
|
+
search_list.append(
|
|
866
|
+
{
|
|
867
|
+
"program": parsed_file["program"],
|
|
868
|
+
"file": parsed_file["file"],
|
|
869
|
+
"text": line,
|
|
870
|
+
"result": False,
|
|
871
|
+
}
|
|
872
|
+
)
|
|
873
|
+
progress.update(task, advance=1)
|
|
874
|
+
# return variable for use outside of local scope
|
|
875
|
+
return search_list
|
|
876
|
+
|
|
877
|
+
|
|
878
|
+
def text_assessment_results(searches: list[dict], r_texts: set[str], task: TaskID, progress: Progress) -> list[dict]:
|
|
879
|
+
"""
|
|
880
|
+
Test if required text is present in required files and return test assessment
|
|
881
|
+
|
|
882
|
+
:param list[dict] searches: List of results
|
|
883
|
+
:param set[str] r_texts: Required text
|
|
884
|
+
:param TaskID task: The task to update on the job_progress
|
|
885
|
+
:param Progress progress: Progress object
|
|
886
|
+
:return: List of results
|
|
887
|
+
:rtype: list[dict]
|
|
888
|
+
"""
|
|
889
|
+
# create empty list to hold assessment results
|
|
890
|
+
text_results: list[dict] = []
|
|
891
|
+
progress.update(task, advance=1)
|
|
892
|
+
# loop through text string search results
|
|
893
|
+
for result, line in itertools.product(searches, r_texts):
|
|
894
|
+
# if the text matches the required text
|
|
895
|
+
if result["text"] == line and result["result"] is True:
|
|
896
|
+
text_info = result["text"]
|
|
897
|
+
# condense results into 1 per file
|
|
898
|
+
text_results.append(
|
|
899
|
+
{
|
|
900
|
+
"program": result["program"],
|
|
901
|
+
"file": result["file"],
|
|
902
|
+
"test": f"required-text ({text_info})",
|
|
903
|
+
"result": result["result"],
|
|
904
|
+
}
|
|
905
|
+
)
|
|
906
|
+
# return variable for use outside of local scope
|
|
907
|
+
progress.update(task, advance=1)
|
|
908
|
+
# return variable for use outside of local scope
|
|
909
|
+
return text_results
|
|
910
|
+
|
|
911
|
+
|
|
912
|
+
def gather_test_project_data(api: Api, evidence_folder: str, task: TaskID, progress: Progress) -> list[dict]:
|
|
913
|
+
"""
|
|
914
|
+
Gather information from evidence test projects created in RegScale to catch data
|
|
915
|
+
|
|
916
|
+
:param Api api: API object
|
|
917
|
+
:param str evidence_folder: File path to evidence folder
|
|
918
|
+
:param TaskID task: The task to update on the job_progress
|
|
919
|
+
:param Progress progress: Progress object
|
|
920
|
+
:return: List of results
|
|
921
|
+
:rtype: list[dict]
|
|
922
|
+
"""
|
|
923
|
+
# create empty list to hold project test data from GET API call
|
|
924
|
+
test_data: list[dict] = []
|
|
925
|
+
progress.update(task, advance=1)
|
|
926
|
+
# test project information created in RegScale UI
|
|
927
|
+
with open(evidence_folder + os.sep + "list.json", "r", encoding="utf-8") as json_file:
|
|
928
|
+
# load json object into a readable dictionary
|
|
929
|
+
lists = json.load(json_file)
|
|
930
|
+
# loop through projects in the list.json
|
|
931
|
+
test_data.extend(
|
|
932
|
+
{
|
|
933
|
+
"id": lists["parser-list"][i].get("id"),
|
|
934
|
+
"program": lists["parser-list"][i].get("folder-name"),
|
|
935
|
+
}
|
|
936
|
+
for i in range(len(lists["parser-list"]))
|
|
937
|
+
)
|
|
938
|
+
progress.update(task, advance=1)
|
|
939
|
+
# create empty list to hold json response data for each project
|
|
940
|
+
test_info: list[dict] = []
|
|
941
|
+
# iterate through test projects and make sequential GET API calls
|
|
942
|
+
for item in test_data:
|
|
943
|
+
# make a GET request for each project
|
|
944
|
+
if project := Project.get_object(item["id"]):
|
|
945
|
+
api.logger.info("Project data retrieval was successful.")
|
|
946
|
+
# save the json response data
|
|
947
|
+
test_info.append(
|
|
948
|
+
{
|
|
949
|
+
"id": project.id,
|
|
950
|
+
"title": project.title,
|
|
951
|
+
"uuid": project.uuid,
|
|
952
|
+
"projectmanagerid": project.projectmanagerid,
|
|
953
|
+
"parentid": project.parentId,
|
|
954
|
+
"parentmodule": project.parentModule,
|
|
955
|
+
"program": project.program,
|
|
956
|
+
}
|
|
957
|
+
)
|
|
958
|
+
else:
|
|
959
|
+
api.logger.error("Project data retrieval was unsuccessful.")
|
|
960
|
+
progress.update(task, advance=1)
|
|
961
|
+
# return variables for use outside of local scope
|
|
962
|
+
return test_info
|
|
963
|
+
|
|
964
|
+
|
|
965
|
+
def assess_doc_timestamps(
|
|
966
|
+
timestamps: list[dict], documents: list[dict], task: TaskID, progress: Progress
|
|
967
|
+
) -> list[dict]:
|
|
968
|
+
"""
|
|
969
|
+
Test file modification times
|
|
970
|
+
|
|
971
|
+
:param list[dict] timestamps: list of modified timestamps
|
|
972
|
+
:param list[dict] documents: list of documents
|
|
973
|
+
:param TaskID task: The task to update on the job_progress
|
|
974
|
+
:param Progress progress: Progress object
|
|
975
|
+
:return: List of documents sorted by modified date
|
|
976
|
+
:rtype: list[dict]
|
|
977
|
+
"""
|
|
978
|
+
# create empty list to store test results
|
|
979
|
+
assessed_timestamps = []
|
|
980
|
+
progress.update(task, advance=1)
|
|
981
|
+
# loop through timestamps
|
|
982
|
+
for items in timestamps:
|
|
983
|
+
# loop through required documents
|
|
984
|
+
for doc_items in documents:
|
|
985
|
+
# if file names match between the list of dicts
|
|
986
|
+
if fnmatch.fnmatch(items["file"], doc_items["file-name"]):
|
|
987
|
+
# if the required modification time is less than the last modified days
|
|
988
|
+
if items["last-modified"] < doc_items["last-updated-by"]:
|
|
989
|
+
# append true result to the list of dicts
|
|
990
|
+
assessed_timestamps.append(
|
|
991
|
+
{
|
|
992
|
+
"program": items["program"],
|
|
993
|
+
"file": items["file"],
|
|
994
|
+
"test": "last-updated-by",
|
|
995
|
+
"result": True,
|
|
996
|
+
}
|
|
997
|
+
)
|
|
998
|
+
else:
|
|
999
|
+
# append false results to the list of dicts
|
|
1000
|
+
assessed_timestamps.append(
|
|
1001
|
+
{
|
|
1002
|
+
"program": items["program"],
|
|
1003
|
+
"file": items["file"],
|
|
1004
|
+
"test": "last-updated-by",
|
|
1005
|
+
"result": False,
|
|
1006
|
+
}
|
|
1007
|
+
)
|
|
1008
|
+
progress.update(task, advance=1)
|
|
1009
|
+
# return variables for use outside of local scope
|
|
1010
|
+
return assessed_timestamps
|
|
1011
|
+
|
|
1012
|
+
|
|
1013
|
+
def assessments_report(
|
|
1014
|
+
docres: list[dict],
|
|
1015
|
+
textres: list[dict],
|
|
1016
|
+
timeres: list[dict],
|
|
1017
|
+
sigres: list[dict],
|
|
1018
|
+
task: TaskID,
|
|
1019
|
+
progress: Progress,
|
|
1020
|
+
) -> list[dict]:
|
|
1021
|
+
"""
|
|
1022
|
+
Function that builds the assessment report for all results
|
|
1023
|
+
|
|
1024
|
+
:param list[dict] docres: List of document results
|
|
1025
|
+
:param list[dict] textres: List of text results
|
|
1026
|
+
:param list[dict] timeres: List of time results
|
|
1027
|
+
:param list[dict] sigres: List of signature results
|
|
1028
|
+
:param TaskID task: The task to update on the job_progress
|
|
1029
|
+
:param Progress progress: Progress object
|
|
1030
|
+
:return: List of assessment report for all results
|
|
1031
|
+
:rtype: list[dict]
|
|
1032
|
+
"""
|
|
1033
|
+
progress.update(task, advance=1)
|
|
1034
|
+
assessment_report: list[dict] = list(docres)
|
|
1035
|
+
progress.update(task, advance=1)
|
|
1036
|
+
# append all results to 1 master list
|
|
1037
|
+
assessment_report.extend(iter(textres))
|
|
1038
|
+
progress.update(task, advance=1)
|
|
1039
|
+
# append all results to 1 master list
|
|
1040
|
+
assessment_report.extend(iter(timeres))
|
|
1041
|
+
progress.update(task, advance=1)
|
|
1042
|
+
# append all results to 1 master list
|
|
1043
|
+
assessment_report.extend(iter(sigres))
|
|
1044
|
+
progress.update(task, advance=1)
|
|
1045
|
+
# return variable for use outside of local scope
|
|
1046
|
+
return assessment_report
|
|
1047
|
+
|
|
1048
|
+
|
|
1049
|
+
def build_assessment_dataframe(assessments: list[dict], task: TaskID, progress: Progress) -> list[dict]:
|
|
1050
|
+
"""
|
|
1051
|
+
Build dataframe for assessment results
|
|
1052
|
+
|
|
1053
|
+
:param list[dict] assessments: List of results
|
|
1054
|
+
:param TaskID task: The task to update on the job_progress
|
|
1055
|
+
:param Progress progress: Progress object
|
|
1056
|
+
:return: List of results containing panda's data frames
|
|
1057
|
+
:rtype: list[dict]
|
|
1058
|
+
"""
|
|
1059
|
+
# build out dataframe for score calculations
|
|
1060
|
+
import pandas as pd # Optimize import performance
|
|
1061
|
+
|
|
1062
|
+
result_df = pd.DataFrame(assessments)
|
|
1063
|
+
progress.update(task, advance=1)
|
|
1064
|
+
# fill in NaN cells
|
|
1065
|
+
result_df = result_df.fillna(" ")
|
|
1066
|
+
progress.update(task, advance=1)
|
|
1067
|
+
# loop through the program column and split based on values
|
|
1068
|
+
dfs = [d for _, d in result_df.groupby("program")]
|
|
1069
|
+
# create an empty list to store dataframe results
|
|
1070
|
+
result_list: list[dict] = []
|
|
1071
|
+
progress.update(task, advance=1)
|
|
1072
|
+
# loop through dataframes
|
|
1073
|
+
for dfr in dfs:
|
|
1074
|
+
# pull out unique value counts for true
|
|
1075
|
+
true_counts = dfr["result"].value_counts()
|
|
1076
|
+
true_counts = dict(true_counts)
|
|
1077
|
+
# pull out unique value counts for false
|
|
1078
|
+
false_counts = dfr["result"].value_counts()
|
|
1079
|
+
false_counts = dict(false_counts)
|
|
1080
|
+
# create ints to hold count values
|
|
1081
|
+
pass_count: int
|
|
1082
|
+
fail_count: int
|
|
1083
|
+
pass_count = 0
|
|
1084
|
+
fail_count = 0
|
|
1085
|
+
# loop through true_counts list
|
|
1086
|
+
for i in true_counts:
|
|
1087
|
+
# if value is true
|
|
1088
|
+
if i is True:
|
|
1089
|
+
# set equal to pass value
|
|
1090
|
+
pass_count = true_counts[i]
|
|
1091
|
+
if i is False:
|
|
1092
|
+
# set equal to fail value
|
|
1093
|
+
fail_count = false_counts[i]
|
|
1094
|
+
# output results to list of results
|
|
1095
|
+
result_list.append(
|
|
1096
|
+
{
|
|
1097
|
+
"program": dfr["program"].iloc[0],
|
|
1098
|
+
"true": max(pass_count, 0),
|
|
1099
|
+
"false": max(fail_count, 0),
|
|
1100
|
+
"total": len(dfr),
|
|
1101
|
+
}
|
|
1102
|
+
)
|
|
1103
|
+
progress.update(task, advance=1)
|
|
1104
|
+
# return variable for use outside of local scope
|
|
1105
|
+
return result_list
|
|
1106
|
+
|
|
1107
|
+
|
|
1108
|
+
def build_score_data(
|
|
1109
|
+
assessments: list[dict], task: TaskID, progress: Progress
|
|
1110
|
+
) -> Tuple[list[int], list[int], list[int]]:
|
|
1111
|
+
"""
|
|
1112
|
+
Build assessment score lists
|
|
1113
|
+
|
|
1114
|
+
:param list[dict] assessments: list of assessments to build scores
|
|
1115
|
+
:param TaskID task: The task to update on the job_progress
|
|
1116
|
+
:param Progress progress: Progress object
|
|
1117
|
+
:return: Tuple[list of integers of true list, list of integers of false list, list of integers of total list]
|
|
1118
|
+
:rtype: Tuple[list[int], list[int], list[int]]
|
|
1119
|
+
"""
|
|
1120
|
+
# create empty lists to hold true/false counts
|
|
1121
|
+
true_list: list[int] = []
|
|
1122
|
+
progress.update(task, advance=1)
|
|
1123
|
+
false_list: list[int] = []
|
|
1124
|
+
progress.update(task, advance=1)
|
|
1125
|
+
total_list: list[int] = []
|
|
1126
|
+
progress.update(task, advance=1)
|
|
1127
|
+
# loop through assessment report data
|
|
1128
|
+
for item in assessments:
|
|
1129
|
+
# append true/false/total values to lists
|
|
1130
|
+
true_list.append(item["true"])
|
|
1131
|
+
false_list.append(item["false"])
|
|
1132
|
+
total_list.append(item["total"])
|
|
1133
|
+
progress.update(task, advance=1)
|
|
1134
|
+
# return variable for use outside of local scope
|
|
1135
|
+
return true_list, false_list, total_list
|
|
1136
|
+
|
|
1137
|
+
|
|
1138
|
+
def build_html_table(assessments: list[dict], task: TaskID, progress: Progress) -> list[dict]:
|
|
1139
|
+
"""
|
|
1140
|
+
This wil be a dictionary to html table conversion
|
|
1141
|
+
|
|
1142
|
+
:param list[dict] assessments: List of file assessments
|
|
1143
|
+
:param TaskID task: The task to update on the job_progress
|
|
1144
|
+
:param Progress progress: Progress object
|
|
1145
|
+
:return: List of assessments with HTML formatted data tables
|
|
1146
|
+
:rtype: list[dict]
|
|
1147
|
+
"""
|
|
1148
|
+
import pandas as pd # Optimize import performance
|
|
1149
|
+
|
|
1150
|
+
output_list: list[dict] = []
|
|
1151
|
+
# create a dataframe of a list of dicts
|
|
1152
|
+
table_df = pd.DataFrame(data=assessments)
|
|
1153
|
+
progress.update(task, advance=1)
|
|
1154
|
+
# fill in N/A cells with blank string
|
|
1155
|
+
table_df = table_df.fillna(" ")
|
|
1156
|
+
progress.update(task, advance=1)
|
|
1157
|
+
# split dataframe into list of dataframes
|
|
1158
|
+
dfs = [d for _, d in table_df.groupby("program")]
|
|
1159
|
+
progress.update(task, advance=1)
|
|
1160
|
+
# loop through dataframes
|
|
1161
|
+
for table_df in dfs:
|
|
1162
|
+
# output dataframe to an HTML table
|
|
1163
|
+
output = table_df.to_html()
|
|
1164
|
+
progress.update(task, advance=1)
|
|
1165
|
+
# replace false values with inline styling conditional to red colors for False values
|
|
1166
|
+
output = output.replace("<td>False</td>", '<td style="color:red;">False</td>')
|
|
1167
|
+
progress.update(task, advance=1)
|
|
1168
|
+
# replace true values with inline styling conditional to green colors for True values
|
|
1169
|
+
output = output.replace("<td>True</td>", '<td style="color:green;">True</td>')
|
|
1170
|
+
progress.update(task, advance=1)
|
|
1171
|
+
# build list of outputs to loop through for API POST calls
|
|
1172
|
+
output_list.append({"program": table_df["program"].iloc[0], "html": output})
|
|
1173
|
+
progress.update(task, advance=1)
|
|
1174
|
+
# return variable for use outside of local scope
|
|
1175
|
+
return output_list
|
|
1176
|
+
|
|
1177
|
+
|
|
1178
|
+
def create_child_assessments(
|
|
1179
|
+
api: Api,
|
|
1180
|
+
project_data: list[dict],
|
|
1181
|
+
output: list[dict],
|
|
1182
|
+
score_data: Tuple[list[int], list[int], list[int]],
|
|
1183
|
+
task: TaskID,
|
|
1184
|
+
progress: Progress,
|
|
1185
|
+
) -> None:
|
|
1186
|
+
"""
|
|
1187
|
+
Create assessments based on results of text parsing tests into RegScale via API
|
|
1188
|
+
|
|
1189
|
+
:param Api api: API object
|
|
1190
|
+
:param list[dict] project_data: list of results to part and upload to RegScale
|
|
1191
|
+
:param list[dict] output: HTML output of the results
|
|
1192
|
+
:param Tuple[list[int], list[int], list[int]] score_data: list of scores
|
|
1193
|
+
:param TaskID task: The task to update on the job_progress
|
|
1194
|
+
:param Progress progress: Progress object
|
|
1195
|
+
:rtype: None
|
|
1196
|
+
"""
|
|
1197
|
+
# set completion datetime to required format
|
|
1198
|
+
completion_date = datetime.now().strftime("%Y-%m-%dT%H:%M:%S")
|
|
1199
|
+
progress.update(task, advance=1)
|
|
1200
|
+
# loop through test projects and make an API call for each
|
|
1201
|
+
for i, project in enumerate(project_data):
|
|
1202
|
+
# call score calculation function
|
|
1203
|
+
test_score = calc_score(i, score_data)
|
|
1204
|
+
# if file name matches html output table program name
|
|
1205
|
+
if project_data[i]["program"] == output[i]["program"]:
|
|
1206
|
+
# build assessment data
|
|
1207
|
+
assessment_data = Assessment(
|
|
1208
|
+
status="Complete",
|
|
1209
|
+
leadAssessorId=api.config["userId"],
|
|
1210
|
+
title="Evidence Collection Automation Assessment",
|
|
1211
|
+
assessmentType="Inspection",
|
|
1212
|
+
projectId=project["id"],
|
|
1213
|
+
parentId=project["id"],
|
|
1214
|
+
parentModule="projects",
|
|
1215
|
+
assessmentReport=output[i]["html"],
|
|
1216
|
+
assessmentPlan="Review automated results of evidence collection tests",
|
|
1217
|
+
createdById=api.config["userId"],
|
|
1218
|
+
lastUpdatedById=api.config["userId"],
|
|
1219
|
+
complianceScore=test_score,
|
|
1220
|
+
plannedFinish=completion_date,
|
|
1221
|
+
plannedStart=completion_date,
|
|
1222
|
+
actualFinish=completion_date,
|
|
1223
|
+
)
|
|
1224
|
+
# if all tests passed above score update POST call information
|
|
1225
|
+
if test_score >= api.config["passScore"]:
|
|
1226
|
+
# update assessment data API body information
|
|
1227
|
+
assessment_data.assessmentResult = "Pass"
|
|
1228
|
+
# if all tests failed below score update POST call information
|
|
1229
|
+
elif test_score <= api.config["failScore"]:
|
|
1230
|
+
# update assessment data API body information
|
|
1231
|
+
assessment_data.assessmentResult = "Fail"
|
|
1232
|
+
# if some tests passed in between score update POST call information
|
|
1233
|
+
else:
|
|
1234
|
+
# update assessment data API body information
|
|
1235
|
+
assessment_data.assessmentResult = "Partial Pass"
|
|
1236
|
+
if assessment_data.create():
|
|
1237
|
+
api.logger.info("Child assessment creation was successful.")
|
|
1238
|
+
else:
|
|
1239
|
+
api.logger.warning("Child assessment creation was not successful.")
|
|
1240
|
+
progress.update(task, advance=1)
|