regscale-cli 6.16.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of regscale-cli might be problematic. Click here for more details.
- regscale/__init__.py +1 -0
- regscale/airflow/__init__.py +9 -0
- regscale/airflow/azure/__init__.py +9 -0
- regscale/airflow/azure/cli.py +89 -0
- regscale/airflow/azure/upload_dags.py +116 -0
- regscale/airflow/click_dags.py +127 -0
- regscale/airflow/click_mixins.py +82 -0
- regscale/airflow/config.py +25 -0
- regscale/airflow/factories/__init__.py +0 -0
- regscale/airflow/factories/connections.py +58 -0
- regscale/airflow/factories/workflows.py +78 -0
- regscale/airflow/hierarchy.py +88 -0
- regscale/airflow/operators/__init__.py +0 -0
- regscale/airflow/operators/click.py +36 -0
- regscale/airflow/sensors/__init__.py +0 -0
- regscale/airflow/sensors/sql.py +107 -0
- regscale/airflow/sessions/__init__.py +0 -0
- regscale/airflow/sessions/sql/__init__.py +3 -0
- regscale/airflow/sessions/sql/queries.py +64 -0
- regscale/airflow/sessions/sql/sql_server_queries.py +248 -0
- regscale/airflow/tasks/__init__.py +0 -0
- regscale/airflow/tasks/branches.py +22 -0
- regscale/airflow/tasks/cli.py +116 -0
- regscale/airflow/tasks/click.py +73 -0
- regscale/airflow/tasks/debugging.py +9 -0
- regscale/airflow/tasks/groups.py +116 -0
- regscale/airflow/tasks/init.py +60 -0
- regscale/airflow/tasks/states.py +47 -0
- regscale/airflow/tasks/workflows.py +36 -0
- regscale/ansible/__init__.py +9 -0
- regscale/core/__init__.py +0 -0
- regscale/core/app/__init__.py +3 -0
- regscale/core/app/api.py +571 -0
- regscale/core/app/application.py +665 -0
- regscale/core/app/internal/__init__.py +136 -0
- regscale/core/app/internal/admin_actions.py +230 -0
- regscale/core/app/internal/assessments_editor.py +873 -0
- regscale/core/app/internal/catalog.py +316 -0
- regscale/core/app/internal/comparison.py +459 -0
- regscale/core/app/internal/control_editor.py +571 -0
- regscale/core/app/internal/encrypt.py +79 -0
- regscale/core/app/internal/evidence.py +1240 -0
- regscale/core/app/internal/file_uploads.py +151 -0
- regscale/core/app/internal/healthcheck.py +66 -0
- regscale/core/app/internal/login.py +305 -0
- regscale/core/app/internal/migrations.py +240 -0
- regscale/core/app/internal/model_editor.py +1701 -0
- regscale/core/app/internal/poam_editor.py +632 -0
- regscale/core/app/internal/workflow.py +105 -0
- regscale/core/app/logz.py +74 -0
- regscale/core/app/utils/XMLIR.py +258 -0
- regscale/core/app/utils/__init__.py +0 -0
- regscale/core/app/utils/api_handler.py +358 -0
- regscale/core/app/utils/app_utils.py +1110 -0
- regscale/core/app/utils/catalog_utils/__init__.py +0 -0
- regscale/core/app/utils/catalog_utils/common.py +91 -0
- regscale/core/app/utils/catalog_utils/compare_catalog.py +193 -0
- regscale/core/app/utils/catalog_utils/diagnostic_catalog.py +97 -0
- regscale/core/app/utils/catalog_utils/download_catalog.py +103 -0
- regscale/core/app/utils/catalog_utils/update_catalog.py +718 -0
- regscale/core/app/utils/catalog_utils/update_catalog_v2.py +1378 -0
- regscale/core/app/utils/catalog_utils/update_catalog_v3.py +1272 -0
- regscale/core/app/utils/catalog_utils/update_plans.py +334 -0
- regscale/core/app/utils/file_utils.py +238 -0
- regscale/core/app/utils/parser_utils.py +81 -0
- regscale/core/app/utils/pickle_file_handler.py +57 -0
- regscale/core/app/utils/regscale_utils.py +319 -0
- regscale/core/app/utils/report_utils.py +119 -0
- regscale/core/app/utils/variables.py +226 -0
- regscale/core/decorators.py +31 -0
- regscale/core/lazy_group.py +65 -0
- regscale/core/login.py +63 -0
- regscale/core/server/__init__.py +0 -0
- regscale/core/server/flask_api.py +473 -0
- regscale/core/server/helpers.py +373 -0
- regscale/core/server/rest.py +64 -0
- regscale/core/server/static/css/bootstrap.css +6030 -0
- regscale/core/server/static/css/bootstrap.min.css +6 -0
- regscale/core/server/static/css/main.css +176 -0
- regscale/core/server/static/images/regscale-cli.svg +49 -0
- regscale/core/server/static/images/regscale.svg +38 -0
- regscale/core/server/templates/base.html +74 -0
- regscale/core/server/templates/index.html +43 -0
- regscale/core/server/templates/login.html +28 -0
- regscale/core/server/templates/make_base64.html +22 -0
- regscale/core/server/templates/upload_STIG.html +109 -0
- regscale/core/server/templates/upload_STIG_result.html +26 -0
- regscale/core/server/templates/upload_ssp.html +144 -0
- regscale/core/server/templates/upload_ssp_result.html +128 -0
- regscale/core/static/__init__.py +0 -0
- regscale/core/static/regex.py +14 -0
- regscale/core/utils/__init__.py +117 -0
- regscale/core/utils/click_utils.py +13 -0
- regscale/core/utils/date.py +238 -0
- regscale/core/utils/graphql.py +254 -0
- regscale/core/utils/urls.py +23 -0
- regscale/dev/__init__.py +6 -0
- regscale/dev/analysis.py +454 -0
- regscale/dev/cli.py +235 -0
- regscale/dev/code_gen.py +492 -0
- regscale/dev/dirs.py +69 -0
- regscale/dev/docs.py +384 -0
- regscale/dev/monitoring.py +26 -0
- regscale/dev/profiling.py +216 -0
- regscale/exceptions/__init__.py +4 -0
- regscale/exceptions/license_exception.py +7 -0
- regscale/exceptions/validation_exception.py +9 -0
- regscale/integrations/__init__.py +1 -0
- regscale/integrations/commercial/__init__.py +486 -0
- regscale/integrations/commercial/ad.py +433 -0
- regscale/integrations/commercial/amazon/__init__.py +0 -0
- regscale/integrations/commercial/amazon/common.py +106 -0
- regscale/integrations/commercial/aqua/__init__.py +0 -0
- regscale/integrations/commercial/aqua/aqua.py +91 -0
- regscale/integrations/commercial/aws/__init__.py +6 -0
- regscale/integrations/commercial/aws/cli.py +322 -0
- regscale/integrations/commercial/aws/inventory/__init__.py +110 -0
- regscale/integrations/commercial/aws/inventory/base.py +64 -0
- regscale/integrations/commercial/aws/inventory/resources/__init__.py +19 -0
- regscale/integrations/commercial/aws/inventory/resources/compute.py +234 -0
- regscale/integrations/commercial/aws/inventory/resources/containers.py +113 -0
- regscale/integrations/commercial/aws/inventory/resources/database.py +101 -0
- regscale/integrations/commercial/aws/inventory/resources/integration.py +237 -0
- regscale/integrations/commercial/aws/inventory/resources/networking.py +253 -0
- regscale/integrations/commercial/aws/inventory/resources/security.py +240 -0
- regscale/integrations/commercial/aws/inventory/resources/storage.py +91 -0
- regscale/integrations/commercial/aws/scanner.py +823 -0
- regscale/integrations/commercial/azure/__init__.py +0 -0
- regscale/integrations/commercial/azure/common.py +32 -0
- regscale/integrations/commercial/azure/intune.py +488 -0
- regscale/integrations/commercial/azure/scanner.py +49 -0
- regscale/integrations/commercial/burp.py +78 -0
- regscale/integrations/commercial/cpe.py +144 -0
- regscale/integrations/commercial/crowdstrike.py +1117 -0
- regscale/integrations/commercial/defender.py +1511 -0
- regscale/integrations/commercial/dependabot.py +210 -0
- regscale/integrations/commercial/durosuite/__init__.py +0 -0
- regscale/integrations/commercial/durosuite/api.py +1546 -0
- regscale/integrations/commercial/durosuite/process_devices.py +101 -0
- regscale/integrations/commercial/durosuite/scanner.py +637 -0
- regscale/integrations/commercial/durosuite/variables.py +21 -0
- regscale/integrations/commercial/ecr.py +90 -0
- regscale/integrations/commercial/gcp/__init__.py +237 -0
- regscale/integrations/commercial/gcp/auth.py +96 -0
- regscale/integrations/commercial/gcp/control_tests.py +238 -0
- regscale/integrations/commercial/gcp/variables.py +18 -0
- regscale/integrations/commercial/gitlab.py +332 -0
- regscale/integrations/commercial/grype.py +165 -0
- regscale/integrations/commercial/ibm.py +90 -0
- regscale/integrations/commercial/import_all/__init__.py +0 -0
- regscale/integrations/commercial/import_all/import_all_cmd.py +467 -0
- regscale/integrations/commercial/import_all/scan_file_fingerprints.json +27 -0
- regscale/integrations/commercial/jira.py +1046 -0
- regscale/integrations/commercial/mappings/__init__.py +0 -0
- regscale/integrations/commercial/mappings/csf_controls.json +713 -0
- regscale/integrations/commercial/mappings/nist_800_53_r5_controls.json +1516 -0
- regscale/integrations/commercial/nessus/__init__.py +0 -0
- regscale/integrations/commercial/nessus/nessus_utils.py +429 -0
- regscale/integrations/commercial/nessus/scanner.py +416 -0
- regscale/integrations/commercial/nexpose.py +90 -0
- regscale/integrations/commercial/okta.py +798 -0
- regscale/integrations/commercial/opentext/__init__.py +0 -0
- regscale/integrations/commercial/opentext/click.py +99 -0
- regscale/integrations/commercial/opentext/scanner.py +143 -0
- regscale/integrations/commercial/prisma.py +91 -0
- regscale/integrations/commercial/qualys.py +1462 -0
- regscale/integrations/commercial/salesforce.py +980 -0
- regscale/integrations/commercial/sap/__init__.py +0 -0
- regscale/integrations/commercial/sap/click.py +31 -0
- regscale/integrations/commercial/sap/sysdig/__init__.py +0 -0
- regscale/integrations/commercial/sap/sysdig/click.py +57 -0
- regscale/integrations/commercial/sap/sysdig/sysdig_scanner.py +190 -0
- regscale/integrations/commercial/sap/tenable/__init__.py +0 -0
- regscale/integrations/commercial/sap/tenable/click.py +49 -0
- regscale/integrations/commercial/sap/tenable/scanner.py +196 -0
- regscale/integrations/commercial/servicenow.py +1756 -0
- regscale/integrations/commercial/sicura/__init__.py +0 -0
- regscale/integrations/commercial/sicura/api.py +855 -0
- regscale/integrations/commercial/sicura/commands.py +73 -0
- regscale/integrations/commercial/sicura/scanner.py +481 -0
- regscale/integrations/commercial/sicura/variables.py +16 -0
- regscale/integrations/commercial/snyk.py +90 -0
- regscale/integrations/commercial/sonarcloud.py +260 -0
- regscale/integrations/commercial/sqlserver.py +369 -0
- regscale/integrations/commercial/stig_mapper_integration/__init__.py +0 -0
- regscale/integrations/commercial/stig_mapper_integration/click_commands.py +38 -0
- regscale/integrations/commercial/stig_mapper_integration/mapping_engine.py +353 -0
- regscale/integrations/commercial/stigv2/__init__.py +0 -0
- regscale/integrations/commercial/stigv2/ckl_parser.py +349 -0
- regscale/integrations/commercial/stigv2/click_commands.py +95 -0
- regscale/integrations/commercial/stigv2/stig_integration.py +202 -0
- regscale/integrations/commercial/synqly/__init__.py +0 -0
- regscale/integrations/commercial/synqly/assets.py +46 -0
- regscale/integrations/commercial/synqly/ticketing.py +132 -0
- regscale/integrations/commercial/synqly/vulnerabilities.py +223 -0
- regscale/integrations/commercial/synqly_jira.py +840 -0
- regscale/integrations/commercial/tenablev2/__init__.py +0 -0
- regscale/integrations/commercial/tenablev2/authenticate.py +31 -0
- regscale/integrations/commercial/tenablev2/click.py +1584 -0
- regscale/integrations/commercial/tenablev2/scanner.py +504 -0
- regscale/integrations/commercial/tenablev2/stig_parsers.py +140 -0
- regscale/integrations/commercial/tenablev2/utils.py +78 -0
- regscale/integrations/commercial/tenablev2/variables.py +17 -0
- regscale/integrations/commercial/trivy.py +162 -0
- regscale/integrations/commercial/veracode.py +96 -0
- regscale/integrations/commercial/wizv2/WizDataMixin.py +97 -0
- regscale/integrations/commercial/wizv2/__init__.py +0 -0
- regscale/integrations/commercial/wizv2/click.py +429 -0
- regscale/integrations/commercial/wizv2/constants.py +1001 -0
- regscale/integrations/commercial/wizv2/issue.py +361 -0
- regscale/integrations/commercial/wizv2/models.py +112 -0
- regscale/integrations/commercial/wizv2/parsers.py +339 -0
- regscale/integrations/commercial/wizv2/sbom.py +115 -0
- regscale/integrations/commercial/wizv2/scanner.py +416 -0
- regscale/integrations/commercial/wizv2/utils.py +796 -0
- regscale/integrations/commercial/wizv2/variables.py +39 -0
- regscale/integrations/commercial/wizv2/wiz_auth.py +159 -0
- regscale/integrations/commercial/xray.py +91 -0
- regscale/integrations/integration/__init__.py +2 -0
- regscale/integrations/integration/integration.py +26 -0
- regscale/integrations/integration/inventory.py +17 -0
- regscale/integrations/integration/issue.py +100 -0
- regscale/integrations/integration_override.py +149 -0
- regscale/integrations/public/__init__.py +103 -0
- regscale/integrations/public/cisa.py +641 -0
- regscale/integrations/public/criticality_updater.py +70 -0
- regscale/integrations/public/emass.py +411 -0
- regscale/integrations/public/emass_slcm_import.py +697 -0
- regscale/integrations/public/fedramp/__init__.py +0 -0
- regscale/integrations/public/fedramp/appendix_parser.py +548 -0
- regscale/integrations/public/fedramp/click.py +479 -0
- regscale/integrations/public/fedramp/components.py +714 -0
- regscale/integrations/public/fedramp/docx_parser.py +259 -0
- regscale/integrations/public/fedramp/fedramp_cis_crm.py +1124 -0
- regscale/integrations/public/fedramp/fedramp_common.py +3181 -0
- regscale/integrations/public/fedramp/fedramp_docx.py +388 -0
- regscale/integrations/public/fedramp/fedramp_five.py +2343 -0
- regscale/integrations/public/fedramp/fedramp_traversal.py +138 -0
- regscale/integrations/public/fedramp/import_fedramp_r4_ssp.py +279 -0
- regscale/integrations/public/fedramp/import_workbook.py +495 -0
- regscale/integrations/public/fedramp/inventory_items.py +244 -0
- regscale/integrations/public/fedramp/mappings/__init__.py +0 -0
- regscale/integrations/public/fedramp/mappings/fedramp_r4_parts.json +7388 -0
- regscale/integrations/public/fedramp/mappings/fedramp_r5_params.json +8636 -0
- regscale/integrations/public/fedramp/mappings/fedramp_r5_parts.json +9605 -0
- regscale/integrations/public/fedramp/mappings/system_roles.py +34 -0
- regscale/integrations/public/fedramp/mappings/user.py +175 -0
- regscale/integrations/public/fedramp/mappings/values.py +141 -0
- regscale/integrations/public/fedramp/markdown_parser.py +150 -0
- regscale/integrations/public/fedramp/metadata.py +689 -0
- regscale/integrations/public/fedramp/models/__init__.py +59 -0
- regscale/integrations/public/fedramp/models/leveraged_auth_new.py +168 -0
- regscale/integrations/public/fedramp/models/poam_importer.py +522 -0
- regscale/integrations/public/fedramp/parts_mapper.py +107 -0
- regscale/integrations/public/fedramp/poam/__init__.py +0 -0
- regscale/integrations/public/fedramp/poam/scanner.py +851 -0
- regscale/integrations/public/fedramp/properties.py +201 -0
- regscale/integrations/public/fedramp/reporting.py +84 -0
- regscale/integrations/public/fedramp/resources.py +496 -0
- regscale/integrations/public/fedramp/rosetta.py +110 -0
- regscale/integrations/public/fedramp/ssp_logger.py +87 -0
- regscale/integrations/public/fedramp/system_characteristics.py +922 -0
- regscale/integrations/public/fedramp/system_control_implementations.py +582 -0
- regscale/integrations/public/fedramp/system_implementation.py +190 -0
- regscale/integrations/public/fedramp/xml_utils.py +87 -0
- regscale/integrations/public/nist_catalog.py +275 -0
- regscale/integrations/public/oscal.py +1946 -0
- regscale/integrations/public/otx.py +169 -0
- regscale/integrations/scanner_integration.py +2692 -0
- regscale/integrations/variables.py +25 -0
- regscale/models/__init__.py +7 -0
- regscale/models/app_models/__init__.py +5 -0
- regscale/models/app_models/catalog_compare.py +213 -0
- regscale/models/app_models/click.py +252 -0
- regscale/models/app_models/datetime_encoder.py +21 -0
- regscale/models/app_models/import_validater.py +321 -0
- regscale/models/app_models/mapping.py +260 -0
- regscale/models/app_models/pipeline.py +37 -0
- regscale/models/click_models.py +413 -0
- regscale/models/config.py +154 -0
- regscale/models/email_style.css +67 -0
- regscale/models/hierarchy.py +8 -0
- regscale/models/inspect_models.py +79 -0
- regscale/models/integration_models/__init__.py +0 -0
- regscale/models/integration_models/amazon_models/__init__.py +0 -0
- regscale/models/integration_models/amazon_models/inspector.py +262 -0
- regscale/models/integration_models/amazon_models/inspector_scan.py +206 -0
- regscale/models/integration_models/aqua.py +247 -0
- regscale/models/integration_models/azure_alerts.py +255 -0
- regscale/models/integration_models/base64.py +23 -0
- regscale/models/integration_models/burp.py +433 -0
- regscale/models/integration_models/burp_models.py +128 -0
- regscale/models/integration_models/cisa_kev_data.json +19333 -0
- regscale/models/integration_models/defender_data.py +93 -0
- regscale/models/integration_models/defenderimport.py +143 -0
- regscale/models/integration_models/drf.py +443 -0
- regscale/models/integration_models/ecr_models/__init__.py +0 -0
- regscale/models/integration_models/ecr_models/data.py +69 -0
- regscale/models/integration_models/ecr_models/ecr.py +239 -0
- regscale/models/integration_models/flat_file_importer.py +1079 -0
- regscale/models/integration_models/grype_import.py +247 -0
- regscale/models/integration_models/ibm.py +126 -0
- regscale/models/integration_models/implementation_results.py +85 -0
- regscale/models/integration_models/nexpose.py +140 -0
- regscale/models/integration_models/prisma.py +202 -0
- regscale/models/integration_models/qualys.py +720 -0
- regscale/models/integration_models/qualys_scanner.py +160 -0
- regscale/models/integration_models/sbom/__init__.py +0 -0
- regscale/models/integration_models/sbom/cyclone_dx.py +139 -0
- regscale/models/integration_models/send_reminders.py +620 -0
- regscale/models/integration_models/snyk.py +155 -0
- regscale/models/integration_models/synqly_models/__init__.py +0 -0
- regscale/models/integration_models/synqly_models/capabilities.json +1 -0
- regscale/models/integration_models/synqly_models/connector_types.py +22 -0
- regscale/models/integration_models/synqly_models/connectors/__init__.py +7 -0
- regscale/models/integration_models/synqly_models/connectors/assets.py +97 -0
- regscale/models/integration_models/synqly_models/connectors/ticketing.py +583 -0
- regscale/models/integration_models/synqly_models/connectors/vulnerabilities.py +169 -0
- regscale/models/integration_models/synqly_models/ocsf_mapper.py +331 -0
- regscale/models/integration_models/synqly_models/param.py +72 -0
- regscale/models/integration_models/synqly_models/synqly_model.py +733 -0
- regscale/models/integration_models/synqly_models/tenants.py +39 -0
- regscale/models/integration_models/tenable_models/__init__.py +0 -0
- regscale/models/integration_models/tenable_models/integration.py +187 -0
- regscale/models/integration_models/tenable_models/models.py +513 -0
- regscale/models/integration_models/trivy_import.py +231 -0
- regscale/models/integration_models/veracode.py +217 -0
- regscale/models/integration_models/xray.py +135 -0
- regscale/models/locking.py +100 -0
- regscale/models/platform.py +110 -0
- regscale/models/regscale_models/__init__.py +67 -0
- regscale/models/regscale_models/assessment.py +570 -0
- regscale/models/regscale_models/assessment_plan.py +52 -0
- regscale/models/regscale_models/asset.py +567 -0
- regscale/models/regscale_models/asset_mapping.py +190 -0
- regscale/models/regscale_models/case.py +42 -0
- regscale/models/regscale_models/catalog.py +261 -0
- regscale/models/regscale_models/cci.py +46 -0
- regscale/models/regscale_models/change.py +167 -0
- regscale/models/regscale_models/checklist.py +372 -0
- regscale/models/regscale_models/comment.py +49 -0
- regscale/models/regscale_models/compliance_settings.py +112 -0
- regscale/models/regscale_models/component.py +412 -0
- regscale/models/regscale_models/component_mapping.py +65 -0
- regscale/models/regscale_models/control.py +38 -0
- regscale/models/regscale_models/control_implementation.py +1128 -0
- regscale/models/regscale_models/control_objective.py +261 -0
- regscale/models/regscale_models/control_parameter.py +100 -0
- regscale/models/regscale_models/control_test.py +34 -0
- regscale/models/regscale_models/control_test_plan.py +75 -0
- regscale/models/regscale_models/control_test_result.py +52 -0
- regscale/models/regscale_models/custom_field.py +245 -0
- regscale/models/regscale_models/data.py +109 -0
- regscale/models/regscale_models/data_center.py +40 -0
- regscale/models/regscale_models/deviation.py +203 -0
- regscale/models/regscale_models/email.py +97 -0
- regscale/models/regscale_models/evidence.py +47 -0
- regscale/models/regscale_models/evidence_mapping.py +40 -0
- regscale/models/regscale_models/facility.py +59 -0
- regscale/models/regscale_models/file.py +382 -0
- regscale/models/regscale_models/filetag.py +37 -0
- regscale/models/regscale_models/form_field_value.py +94 -0
- regscale/models/regscale_models/group.py +169 -0
- regscale/models/regscale_models/implementation_objective.py +335 -0
- regscale/models/regscale_models/implementation_option.py +275 -0
- regscale/models/regscale_models/implementation_role.py +33 -0
- regscale/models/regscale_models/incident.py +177 -0
- regscale/models/regscale_models/interconnection.py +43 -0
- regscale/models/regscale_models/issue.py +1176 -0
- regscale/models/regscale_models/leveraged_authorization.py +125 -0
- regscale/models/regscale_models/line_of_inquiry.py +52 -0
- regscale/models/regscale_models/link.py +205 -0
- regscale/models/regscale_models/meta_data.py +64 -0
- regscale/models/regscale_models/mixins/__init__.py +0 -0
- regscale/models/regscale_models/mixins/parent_cache.py +124 -0
- regscale/models/regscale_models/module.py +224 -0
- regscale/models/regscale_models/modules.py +191 -0
- regscale/models/regscale_models/objective.py +14 -0
- regscale/models/regscale_models/parameter.py +87 -0
- regscale/models/regscale_models/ports_protocol.py +81 -0
- regscale/models/regscale_models/privacy.py +89 -0
- regscale/models/regscale_models/profile.py +50 -0
- regscale/models/regscale_models/profile_link.py +68 -0
- regscale/models/regscale_models/profile_mapping.py +124 -0
- regscale/models/regscale_models/project.py +63 -0
- regscale/models/regscale_models/property.py +278 -0
- regscale/models/regscale_models/question.py +85 -0
- regscale/models/regscale_models/questionnaire.py +87 -0
- regscale/models/regscale_models/questionnaire_instance.py +177 -0
- regscale/models/regscale_models/rbac.py +132 -0
- regscale/models/regscale_models/reference.py +86 -0
- regscale/models/regscale_models/regscale_model.py +1643 -0
- regscale/models/regscale_models/requirement.py +29 -0
- regscale/models/regscale_models/risk.py +274 -0
- regscale/models/regscale_models/sbom.py +54 -0
- regscale/models/regscale_models/scan_history.py +436 -0
- regscale/models/regscale_models/search.py +53 -0
- regscale/models/regscale_models/security_control.py +132 -0
- regscale/models/regscale_models/security_plan.py +204 -0
- regscale/models/regscale_models/software_inventory.py +159 -0
- regscale/models/regscale_models/stake_holder.py +64 -0
- regscale/models/regscale_models/stig.py +647 -0
- regscale/models/regscale_models/supply_chain.py +152 -0
- regscale/models/regscale_models/system_role.py +188 -0
- regscale/models/regscale_models/system_role_external_assignment.py +40 -0
- regscale/models/regscale_models/tag.py +37 -0
- regscale/models/regscale_models/tag_mapping.py +19 -0
- regscale/models/regscale_models/task.py +133 -0
- regscale/models/regscale_models/threat.py +196 -0
- regscale/models/regscale_models/user.py +175 -0
- regscale/models/regscale_models/user_group.py +55 -0
- regscale/models/regscale_models/vulnerability.py +242 -0
- regscale/models/regscale_models/vulnerability_mapping.py +162 -0
- regscale/models/regscale_models/workflow.py +55 -0
- regscale/models/regscale_models/workflow_action.py +34 -0
- regscale/models/regscale_models/workflow_instance.py +269 -0
- regscale/models/regscale_models/workflow_instance_step.py +114 -0
- regscale/models/regscale_models/workflow_template.py +58 -0
- regscale/models/regscale_models/workflow_template_step.py +45 -0
- regscale/regscale.py +815 -0
- regscale/utils/__init__.py +7 -0
- regscale/utils/b64conversion.py +14 -0
- regscale/utils/click_utils.py +118 -0
- regscale/utils/decorators.py +48 -0
- regscale/utils/dict_utils.py +59 -0
- regscale/utils/files.py +79 -0
- regscale/utils/fxns.py +30 -0
- regscale/utils/graphql_client.py +113 -0
- regscale/utils/lists.py +16 -0
- regscale/utils/numbers.py +12 -0
- regscale/utils/shell.py +148 -0
- regscale/utils/string.py +121 -0
- regscale/utils/synqly_utils.py +165 -0
- regscale/utils/threading/__init__.py +8 -0
- regscale/utils/threading/threadhandler.py +131 -0
- regscale/utils/threading/threadsafe_counter.py +47 -0
- regscale/utils/threading/threadsafe_dict.py +242 -0
- regscale/utils/threading/threadsafe_list.py +83 -0
- regscale/utils/version.py +104 -0
- regscale/validation/__init__.py +0 -0
- regscale/validation/address.py +37 -0
- regscale/validation/record.py +48 -0
- regscale/visualization/__init__.py +5 -0
- regscale/visualization/click.py +34 -0
- regscale_cli-6.16.0.0.dist-info/LICENSE +21 -0
- regscale_cli-6.16.0.0.dist-info/METADATA +659 -0
- regscale_cli-6.16.0.0.dist-info/RECORD +481 -0
- regscale_cli-6.16.0.0.dist-info/WHEEL +5 -0
- regscale_cli-6.16.0.0.dist-info/entry_points.txt +6 -0
- regscale_cli-6.16.0.0.dist-info/top_level.txt +2 -0
- tests/fixtures/__init__.py +2 -0
- tests/fixtures/api.py +87 -0
- tests/fixtures/models.py +91 -0
- tests/fixtures/test_fixture.py +144 -0
- tests/mocks/__init__.py +0 -0
- tests/mocks/objects.py +3 -0
- tests/mocks/response.py +32 -0
- tests/mocks/xml.py +13 -0
- tests/regscale/__init__.py +0 -0
- tests/regscale/core/__init__.py +0 -0
- tests/regscale/core/test_api.py +232 -0
- tests/regscale/core/test_app.py +406 -0
- tests/regscale/core/test_login.py +37 -0
- tests/regscale/core/test_logz.py +66 -0
- tests/regscale/core/test_sbom_generator.py +87 -0
- tests/regscale/core/test_validation_utils.py +163 -0
- tests/regscale/core/test_version.py +78 -0
- tests/regscale/models/__init__.py +0 -0
- tests/regscale/models/test_asset.py +71 -0
- tests/regscale/models/test_config.py +26 -0
- tests/regscale/models/test_control_implementation.py +27 -0
- tests/regscale/models/test_import.py +97 -0
- tests/regscale/models/test_issue.py +36 -0
- tests/regscale/models/test_mapping.py +52 -0
- tests/regscale/models/test_platform.py +31 -0
- tests/regscale/models/test_regscale_model.py +346 -0
- tests/regscale/models/test_report.py +32 -0
- tests/regscale/models/test_tenable_integrations.py +118 -0
- tests/regscale/models/test_user_model.py +121 -0
- tests/regscale/test_about.py +19 -0
- tests/regscale/test_authorization.py +65 -0
|
@@ -0,0 +1,2343 @@
|
|
|
1
|
+
"""fedramp v5 docx parser"""
|
|
2
|
+
|
|
3
|
+
import datetime
|
|
4
|
+
import logging
|
|
5
|
+
import os
|
|
6
|
+
import re
|
|
7
|
+
import sys
|
|
8
|
+
import zipfile
|
|
9
|
+
from dataclasses import dataclass
|
|
10
|
+
from tempfile import gettempdir
|
|
11
|
+
from typing import Any, Dict, List, Optional, Tuple, Union
|
|
12
|
+
|
|
13
|
+
from dateutil.relativedelta import relativedelta
|
|
14
|
+
from packaging.version import Version
|
|
15
|
+
|
|
16
|
+
from regscale.core.app.api import Api
|
|
17
|
+
from regscale.core.app.application import Application
|
|
18
|
+
from regscale.core.app.utils.app_utils import error_and_exit, get_current_datetime
|
|
19
|
+
from regscale.core.utils.date import datetime_str
|
|
20
|
+
from regscale.integrations.public.fedramp.appendix_parser import AppendixAParser
|
|
21
|
+
from regscale.integrations.public.fedramp.docx_parser import SSPDocParser
|
|
22
|
+
from regscale.integrations.public.fedramp.markdown_parser import MDDocParser
|
|
23
|
+
from regscale.integrations.public.fedramp.rosetta import RosettaStone
|
|
24
|
+
from regscale.models import (
|
|
25
|
+
ControlImplementation,
|
|
26
|
+
ControlImplementationStatus,
|
|
27
|
+
ControlObjective,
|
|
28
|
+
ControlParameter,
|
|
29
|
+
File,
|
|
30
|
+
ImplementationObjective,
|
|
31
|
+
ImplementationObjectiveResponsibility,
|
|
32
|
+
ImplementationOption,
|
|
33
|
+
LeveragedAuthorization,
|
|
34
|
+
Parameter,
|
|
35
|
+
PortsProtocol,
|
|
36
|
+
Profile,
|
|
37
|
+
ProfileMapping,
|
|
38
|
+
SecurityControl,
|
|
39
|
+
SecurityPlan,
|
|
40
|
+
StakeHolder,
|
|
41
|
+
SystemRole,
|
|
42
|
+
User,
|
|
43
|
+
)
|
|
44
|
+
from regscale.utils.version import RegscaleVersion
|
|
45
|
+
|
|
46
|
+
SERVICE_PROVIDER_CORPORATE = "Service Provider Corporate"
|
|
47
|
+
DEFAULT_STATUS = ControlImplementationStatus.NotImplemented
|
|
48
|
+
SYSTEM_DESCRIPTION = "System Description"
|
|
49
|
+
AUTHORIZATION_BOUNDARY = "Authorization Boundary"
|
|
50
|
+
NETWORK_ARCHITECTURE = "System and Network Architecture"
|
|
51
|
+
DATA_FLOW = "Data Flows"
|
|
52
|
+
ENVIRONMENT = "System Environment and Inventory"
|
|
53
|
+
|
|
54
|
+
logger = logging.getLogger(__name__)
|
|
55
|
+
logger.setLevel(logging.DEBUG)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
IN_MEMORY_ROLES_PROCESSED = []
|
|
59
|
+
# precompile part pattern
|
|
60
|
+
PART_PATTERN = re.compile(r"(<p>Part\s[a-zA-Z]:</p>.*?)(?=<p>Part\s[a-zA-Z]:</p>|$)", re.DOTALL)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def get_fedramp_compliance_setting() -> Optional[Any]:
|
|
64
|
+
"""
|
|
65
|
+
Quick lookup for the FedRAMP Compliance Setting
|
|
66
|
+
|
|
67
|
+
:return: The FedRAMP Compliance Setting
|
|
68
|
+
:rtype: Optional[Any]
|
|
69
|
+
"""
|
|
70
|
+
# We have to be generic here, as ComplianceSetting may not exist in the database
|
|
71
|
+
fedramp_comp = None
|
|
72
|
+
try:
|
|
73
|
+
from regscale.models.regscale_models.compliance_settings import ComplianceSettings
|
|
74
|
+
|
|
75
|
+
setting = ComplianceSettings.get_by_current_tenant()
|
|
76
|
+
logger.debug("Using new ComplianceSettings API")
|
|
77
|
+
fedramp_comp = next(
|
|
78
|
+
comp for comp in setting if comp.title == "FedRAMP Compliance Setting"
|
|
79
|
+
) # if this raises a StopIteration, we have a problem, Houston
|
|
80
|
+
except Exception as e:
|
|
81
|
+
logger.debug(f"Error getting Compliance Setting: {e}")
|
|
82
|
+
return fedramp_comp
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
@dataclass
|
|
86
|
+
class Person:
|
|
87
|
+
"""
|
|
88
|
+
Represents a person.
|
|
89
|
+
"""
|
|
90
|
+
|
|
91
|
+
name: str
|
|
92
|
+
phone: str
|
|
93
|
+
email: str
|
|
94
|
+
title: str
|
|
95
|
+
user_id: Optional[str] = None
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
@dataclass
|
|
99
|
+
class Organization:
|
|
100
|
+
"""
|
|
101
|
+
Represents an organization.
|
|
102
|
+
"""
|
|
103
|
+
|
|
104
|
+
name: str
|
|
105
|
+
address: str
|
|
106
|
+
point_of_contact: Person
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
@dataclass
|
|
110
|
+
class PreparedBy:
|
|
111
|
+
"""
|
|
112
|
+
Represents the prepared by information.
|
|
113
|
+
"""
|
|
114
|
+
|
|
115
|
+
name: str
|
|
116
|
+
street: str
|
|
117
|
+
building: str
|
|
118
|
+
city_state_zip: str
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
@dataclass
|
|
122
|
+
class SSPDoc:
|
|
123
|
+
"""
|
|
124
|
+
Represents an SSP document.
|
|
125
|
+
"""
|
|
126
|
+
|
|
127
|
+
name: str
|
|
128
|
+
fedramp_id: str
|
|
129
|
+
service_model: str
|
|
130
|
+
digital_identity_level: str
|
|
131
|
+
fips_199_level: str
|
|
132
|
+
date_fully_operational: str
|
|
133
|
+
deployment_model: str
|
|
134
|
+
authorization_path: str
|
|
135
|
+
description: str
|
|
136
|
+
expiration_date: Optional[str] = None
|
|
137
|
+
date_submitted: Optional[str] = None
|
|
138
|
+
approval_date: Optional[str] = None
|
|
139
|
+
csp_name: Optional[str] = None
|
|
140
|
+
csp_street: Optional[str] = None
|
|
141
|
+
csp_building: Optional[str] = None
|
|
142
|
+
csp_city_state_zip: Optional[str] = None
|
|
143
|
+
three_pao_name: Optional[str] = None
|
|
144
|
+
three_pao_street: Optional[str] = None
|
|
145
|
+
three_pao_building: Optional[str] = None
|
|
146
|
+
three_pao_city_state_zip: Optional[str] = None
|
|
147
|
+
version: str = "1.0"
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
@dataclass
|
|
151
|
+
class LeveragedService:
|
|
152
|
+
"""
|
|
153
|
+
Represents a leveraged service.
|
|
154
|
+
"""
|
|
155
|
+
|
|
156
|
+
fedramp_csp_name: str
|
|
157
|
+
cso_name: str
|
|
158
|
+
auth_type_fedramp_id: str
|
|
159
|
+
agreement_type: str
|
|
160
|
+
impact_level: str
|
|
161
|
+
data_types: str
|
|
162
|
+
authorized_user_authentication: str
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
@dataclass
|
|
166
|
+
class LeveragedServices:
|
|
167
|
+
"""
|
|
168
|
+
Represents a list of leveraged services.
|
|
169
|
+
"""
|
|
170
|
+
|
|
171
|
+
leveraged_services: List[LeveragedService]
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
@dataclass
|
|
175
|
+
class PortsAndProtocolData:
|
|
176
|
+
"""
|
|
177
|
+
Represents ports and protocol data.
|
|
178
|
+
"""
|
|
179
|
+
|
|
180
|
+
service: str
|
|
181
|
+
port: int
|
|
182
|
+
start_port: int
|
|
183
|
+
end_port: int
|
|
184
|
+
protocol: str
|
|
185
|
+
ref_number: str
|
|
186
|
+
purpose: str
|
|
187
|
+
used_by: str
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
@dataclass
|
|
191
|
+
class ParamData:
|
|
192
|
+
"""
|
|
193
|
+
Represents parameter data.
|
|
194
|
+
"""
|
|
195
|
+
|
|
196
|
+
control_id: str
|
|
197
|
+
parameter: Optional[str]
|
|
198
|
+
parameter_value: str
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
def process_company_info(list_of_dicts: List[Dict[str, str]]) -> Organization:
|
|
202
|
+
"""
|
|
203
|
+
Processes the company information table.
|
|
204
|
+
:param List[Dict[str, str]] list_of_dicts: The table to process.
|
|
205
|
+
:return: An Organization object representing the company information.
|
|
206
|
+
:rtype: Organization
|
|
207
|
+
"""
|
|
208
|
+
company_info = merge_dicts(list_of_dicts, True)
|
|
209
|
+
|
|
210
|
+
person = Person(
|
|
211
|
+
name=company_info.get("Name"),
|
|
212
|
+
phone=company_info.get("Phone Number"),
|
|
213
|
+
email=company_info.get("Email Address"),
|
|
214
|
+
title="System Owner",
|
|
215
|
+
)
|
|
216
|
+
|
|
217
|
+
return Organization(
|
|
218
|
+
name=company_info.get("Company / Organization"),
|
|
219
|
+
address=company_info.get("Address"),
|
|
220
|
+
point_of_contact=person,
|
|
221
|
+
)
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
def process_ssp_info(list_of_dicts: List[Dict[str, str]]) -> SSPDoc:
|
|
225
|
+
"""
|
|
226
|
+
Processes the SSP information table.
|
|
227
|
+
|
|
228
|
+
:param List[Dict[str, str]] list_of_dicts: The table to process.
|
|
229
|
+
:return: An SSPDoc object representing the SSP information.
|
|
230
|
+
:rtype: SSPDoc
|
|
231
|
+
"""
|
|
232
|
+
ssp_info = merge_dicts(list_of_dicts, True)
|
|
233
|
+
# print(ssp_info)
|
|
234
|
+
|
|
235
|
+
today_dt = datetime.date.today()
|
|
236
|
+
expiration_date = datetime.date(today_dt.year + 3, today_dt.month, today_dt.day).strftime("%Y-%m-%d %H:%M:%S")
|
|
237
|
+
|
|
238
|
+
return SSPDoc(
|
|
239
|
+
name=ssp_info.get("CSP Name:"),
|
|
240
|
+
fedramp_id=ssp_info.get("FedRAMP Package ID:"),
|
|
241
|
+
service_model=ssp_info.get("Service Model:"),
|
|
242
|
+
digital_identity_level=ssp_info.get("Digital Identity Level (DIL) Determination (SSP Appendix E):"),
|
|
243
|
+
fips_199_level=ssp_info.get("FIPS PUB 199 Level (SSP Appendix K):"),
|
|
244
|
+
date_fully_operational=ssp_info.get("Fully Operational as of:"),
|
|
245
|
+
deployment_model=ssp_info.get("Deployment Model:"),
|
|
246
|
+
authorization_path=ssp_info.get("Authorization Path:"),
|
|
247
|
+
description=ssp_info.get("General System Description:"),
|
|
248
|
+
expiration_date=ssp_info.get("Expiration Date:", expiration_date),
|
|
249
|
+
date_submitted=ssp_info.get("Date Submitted:", get_current_datetime()),
|
|
250
|
+
approval_date=ssp_info.get("Approval Date:", get_current_datetime()),
|
|
251
|
+
)
|
|
252
|
+
|
|
253
|
+
|
|
254
|
+
def build_leveraged_services(leveraged_data: List[Dict[str, str]]) -> List[LeveragedService]:
|
|
255
|
+
"""
|
|
256
|
+
Processes the leveraged services table.
|
|
257
|
+
|
|
258
|
+
:param List[Dict[str, str]] leveraged_data: The table to process.
|
|
259
|
+
:return: A list of LeveragedService objects representing the leveraged services.
|
|
260
|
+
:rtype: List[LeveragedService]
|
|
261
|
+
"""
|
|
262
|
+
services = []
|
|
263
|
+
for row in leveraged_data:
|
|
264
|
+
service = LeveragedService(
|
|
265
|
+
fedramp_csp_name=row.get("CSP/CSO Name (Name on FedRAMP Marketplace)"),
|
|
266
|
+
cso_name=row.get(
|
|
267
|
+
"CSO Service (Names of services and features - services from a single CSO can be all listed in one cell)"
|
|
268
|
+
),
|
|
269
|
+
auth_type_fedramp_id=row.get("Authorization Type (JAB or Agency) and FedRAMP Package ID #"),
|
|
270
|
+
agreement_type=row.get("Nature of Agreement"),
|
|
271
|
+
impact_level=row.get("Impact Level (High, Moderate, Low, LI-SaaS)"),
|
|
272
|
+
data_types=row.get("Data Types"),
|
|
273
|
+
authorized_user_authentication=row.get("Authorized Users/Authentication"),
|
|
274
|
+
)
|
|
275
|
+
services.append(service)
|
|
276
|
+
|
|
277
|
+
return services
|
|
278
|
+
|
|
279
|
+
|
|
280
|
+
def process_person_info(list_of_dicts: List[Dict[str, str]]) -> Person:
|
|
281
|
+
"""
|
|
282
|
+
Processes the person information table.
|
|
283
|
+
:param List[Dict[str, str]] list_of_dicts: The table to process.
|
|
284
|
+
:return: A Person object representing the person information.
|
|
285
|
+
:rtype: Person
|
|
286
|
+
"""
|
|
287
|
+
person_data = merge_dicts(list_of_dicts, True)
|
|
288
|
+
person = Person(
|
|
289
|
+
name=person_data.get("Name"),
|
|
290
|
+
phone=person_data.get("Phone Number"),
|
|
291
|
+
email=person_data.get("Email Address"),
|
|
292
|
+
title=person_data.get("Title"),
|
|
293
|
+
)
|
|
294
|
+
return person
|
|
295
|
+
|
|
296
|
+
|
|
297
|
+
def process_ports_and_protocols(list_of_dicts: List[Dict[str, str]]) -> List[PortsAndProtocolData]:
|
|
298
|
+
"""
|
|
299
|
+
Processes the ports and protocols table.
|
|
300
|
+
:param List[Dict[str, str]] list_of_dicts: The table to process.
|
|
301
|
+
:return: A list of PortsAndProtocolData objects representing the ports and protocols information.
|
|
302
|
+
:rtype: List[PortsAndProtocolData]
|
|
303
|
+
"""
|
|
304
|
+
ports_an_protocols = []
|
|
305
|
+
for row in list_of_dicts:
|
|
306
|
+
try:
|
|
307
|
+
port_col = "Port #"
|
|
308
|
+
ports_an_protocols.append(
|
|
309
|
+
PortsAndProtocolData(
|
|
310
|
+
service=row.get("Service Name"),
|
|
311
|
+
port=int(row.get(port_col)) if "-" not in row.get(port_col) else 0,
|
|
312
|
+
start_port=(
|
|
313
|
+
int(row.get(port_col).split("-")[0]) if "-" in row.get(port_col) else row.get(port_col, 0)
|
|
314
|
+
),
|
|
315
|
+
end_port=int(row.get(port_col).split("-")[1]) if "-" in row.get(port_col) else row.get(port_col, 0),
|
|
316
|
+
protocol=row.get("Transport Protocol"),
|
|
317
|
+
ref_number=row.get("Reference #"),
|
|
318
|
+
purpose=row.get("Purpose"),
|
|
319
|
+
used_by=row.get("Used By"),
|
|
320
|
+
)
|
|
321
|
+
)
|
|
322
|
+
except ValueError:
|
|
323
|
+
logger.warning(f"Skipping bad data unable to processing row: {row}")
|
|
324
|
+
|
|
325
|
+
return ports_an_protocols
|
|
326
|
+
|
|
327
|
+
|
|
328
|
+
def merge_dicts(list_of_dicts: List[Dict], prioritize_first: bool = False) -> dict:
|
|
329
|
+
"""
|
|
330
|
+
Merges a list of dictionaries into a single dictionary.
|
|
331
|
+
:param List[Dict] list_of_dicts: The list of dictionaries to merge.
|
|
332
|
+
:param bool prioritize_first: Whether to prioritize the first dictionary in the list.
|
|
333
|
+
:return: A single dictionary containing the merged data.
|
|
334
|
+
:rtype: dict
|
|
335
|
+
"""
|
|
336
|
+
|
|
337
|
+
merged_dict = {}
|
|
338
|
+
for d in list_of_dicts:
|
|
339
|
+
if prioritize_first:
|
|
340
|
+
merged_dict.update(d, **merged_dict) # Merge with priority to earlier values
|
|
341
|
+
else:
|
|
342
|
+
merged_dict.update(d) # Simple merge
|
|
343
|
+
|
|
344
|
+
return merged_dict
|
|
345
|
+
|
|
346
|
+
|
|
347
|
+
def identify_and_process_tables(tables: List[Dict[str, Any]]):
|
|
348
|
+
"""
|
|
349
|
+
Identifies and processes tables based on their content keywords and processes them accordingly.
|
|
350
|
+
:param List[Dict[str, Any]] tables: The tables to process.
|
|
351
|
+
:return: A dictionary containing the processed data.
|
|
352
|
+
:rtype: Dict[str, Any]
|
|
353
|
+
"""
|
|
354
|
+
processed_data = {
|
|
355
|
+
"ssp_doc": None,
|
|
356
|
+
"org": None,
|
|
357
|
+
"prepared_by": None,
|
|
358
|
+
"stakeholders": [],
|
|
359
|
+
"services": [],
|
|
360
|
+
"ports_and_protocols": [],
|
|
361
|
+
}
|
|
362
|
+
# logger.info(tables)
|
|
363
|
+
for item in tables:
|
|
364
|
+
process_table_based_on_keys(item, processed_data)
|
|
365
|
+
logger.debug(item.get("preceding_text"))
|
|
366
|
+
logger.debug(item.get("table_data"))
|
|
367
|
+
|
|
368
|
+
owner, isso = identify_owner_or_isso(processed_data.get("stakeholders", []))
|
|
369
|
+
logger.debug(f"Owner: {owner}")
|
|
370
|
+
if owner:
|
|
371
|
+
processed_data["owner"] = owner
|
|
372
|
+
if isso:
|
|
373
|
+
processed_data["isso"] = isso
|
|
374
|
+
|
|
375
|
+
return processed_data
|
|
376
|
+
|
|
377
|
+
|
|
378
|
+
def identify_owner_or_isso(people: List[Person]) -> Tuple[Person, Person]:
|
|
379
|
+
"""
|
|
380
|
+
Identifies the ISSO and the Owner from a list of people.
|
|
381
|
+
|
|
382
|
+
:param List[Person] people: A list of Person objects representing the stakeholders.
|
|
383
|
+
:returns: A tuple containing the ISSO and the Owner.
|
|
384
|
+
:rtype: Tuple[Person, Person]
|
|
385
|
+
"""
|
|
386
|
+
logger.info(f"Found People: {len(people)}")
|
|
387
|
+
existing_users = []
|
|
388
|
+
try:
|
|
389
|
+
existing_users = User.get_list()
|
|
390
|
+
except Exception as e:
|
|
391
|
+
logger.warning(f"Error getting Users: {e}")
|
|
392
|
+
logger.debug(f"Found Users: {existing_users}")
|
|
393
|
+
owner, isso = find_owner_and_isso(people)
|
|
394
|
+
|
|
395
|
+
logger.debug(f"Found owner: {owner}")
|
|
396
|
+
logger.debug(f"Found isso: {isso}")
|
|
397
|
+
if owner or isso:
|
|
398
|
+
logger.debug(f"Found existing Users: {len(existing_users)}")
|
|
399
|
+
existing_users_dict = {u.email: u for u in existing_users if hasattr(u, "email")}
|
|
400
|
+
if existing_users_dict and owner and isso:
|
|
401
|
+
try:
|
|
402
|
+
owner = existing_users_dict.get(owner.email)
|
|
403
|
+
isso = existing_users_dict.get(isso.email)
|
|
404
|
+
except Exception as e:
|
|
405
|
+
logger.warning(f"Error getting Users: {e}")
|
|
406
|
+
return owner, isso
|
|
407
|
+
|
|
408
|
+
|
|
409
|
+
def find_owner_and_isso(
|
|
410
|
+
people: List[Person],
|
|
411
|
+
) -> Tuple[Optional[Person], Optional[Person]]:
|
|
412
|
+
"""
|
|
413
|
+
Identifies the ISSO and the Owner from a list of people.
|
|
414
|
+
|
|
415
|
+
:param List[Person] people: A list of Person objects representing the stakeholders.
|
|
416
|
+
:returns: A tuple containing the ISSO and the Owner.
|
|
417
|
+
:rtype: Tuple[Optional[Person], Optional[Person]]
|
|
418
|
+
"""
|
|
419
|
+
owner = None
|
|
420
|
+
isso = None
|
|
421
|
+
try:
|
|
422
|
+
for person in people:
|
|
423
|
+
percent_match_owner = "System Owner".lower() in person.title.lower()
|
|
424
|
+
percent_match_isso = "Information System Security Officer".lower() in person.title.lower()
|
|
425
|
+
|
|
426
|
+
logger.debug(f"Owner match: {percent_match_owner}")
|
|
427
|
+
logger.debug(f"Isso match: {percent_match_isso}")
|
|
428
|
+
if percent_match_owner:
|
|
429
|
+
owner = person
|
|
430
|
+
if percent_match_isso:
|
|
431
|
+
isso = person
|
|
432
|
+
except Exception as e:
|
|
433
|
+
logger.warning(f"Error finding Owner and ISSO: {e}")
|
|
434
|
+
return owner, isso
|
|
435
|
+
|
|
436
|
+
|
|
437
|
+
def process_table_based_on_keys(table: any, processed_data: Dict[str, Any]):
|
|
438
|
+
"""
|
|
439
|
+
Processes a table based on the keys present in the first row of the table.
|
|
440
|
+
:param any table: The table to process.
|
|
441
|
+
:param Dict[str, Any] processed_data: The dictionary where processed data is stored.
|
|
442
|
+
"""
|
|
443
|
+
try:
|
|
444
|
+
key = table.get("preceding_text")
|
|
445
|
+
merged_dict = merge_dicts(table.get("table_data"), True)
|
|
446
|
+
table_data = table.get("table_data")
|
|
447
|
+
fetch_ports(key, processed_data, table_data, merged_dict)
|
|
448
|
+
fetch_stakeholders(merged_dict, table_data, processed_data, key)
|
|
449
|
+
fetch_services(merged_dict, table_data, processed_data)
|
|
450
|
+
fetch_ssp_info(merged_dict, table_data, processed_data, key)
|
|
451
|
+
fetch_prep_data(table_data, processed_data, key)
|
|
452
|
+
except Exception as e:
|
|
453
|
+
logger.warning(f"Error Processing Table - {table.get('preceding_text', '') if table else ''}: {e}")
|
|
454
|
+
|
|
455
|
+
|
|
456
|
+
def fetch_prep_data(
|
|
457
|
+
table_data: List[Dict[str, str]],
|
|
458
|
+
processed_data: Dict[str, Any],
|
|
459
|
+
key: str,
|
|
460
|
+
):
|
|
461
|
+
"""
|
|
462
|
+
Fetches Prepared By and Prepared For information from the table.
|
|
463
|
+
:param str key: The key to check for.
|
|
464
|
+
:param Dict[str, Any] processed_data: The dictionary where processed data is stored.
|
|
465
|
+
:param List[Dict[str, str]] table_data: The table data to process.
|
|
466
|
+
|
|
467
|
+
"""
|
|
468
|
+
if "Prepared by" in key:
|
|
469
|
+
logger.info("Processing Prepared By")
|
|
470
|
+
processed_data["prepared_by"] = process_prepared_by(table_data)
|
|
471
|
+
if "Prepared for" in key:
|
|
472
|
+
logger.info("Processing Prepared By")
|
|
473
|
+
processed_data["prepared_for"] = process_prepared_by(table_data)
|
|
474
|
+
|
|
475
|
+
|
|
476
|
+
def fetch_ssp_info(
|
|
477
|
+
merged_dict: Dict[str, str],
|
|
478
|
+
table_data: List[Dict[str, str]],
|
|
479
|
+
processed_data: Dict[str, Any],
|
|
480
|
+
key: str,
|
|
481
|
+
):
|
|
482
|
+
"""
|
|
483
|
+
Fetches SSP information from the table.
|
|
484
|
+
:param str key: The key to check for.
|
|
485
|
+
:param Dict[str, Any] processed_data: The dictionary where processed data is stored.
|
|
486
|
+
:param List[Dict[str, str]] table_data: The table data to process.
|
|
487
|
+
:param Dict[str, str] merged_dict: The merged dictionary of the table data.
|
|
488
|
+
|
|
489
|
+
"""
|
|
490
|
+
if "CSP Name:" in merged_dict:
|
|
491
|
+
logger.info("Processing SSP Doc")
|
|
492
|
+
processed_data["ssp_doc"] = process_ssp_info(table_data)
|
|
493
|
+
if "Document Revision History" in key:
|
|
494
|
+
logger.info("Processing Version")
|
|
495
|
+
processed_data["version"] = get_max_version(entries=table_data)
|
|
496
|
+
if processed_data["ssp_doc"]:
|
|
497
|
+
processed_data["ssp_doc"].version = processed_data.get("version")
|
|
498
|
+
logger.info(f"Processed Version: {processed_data['version']}")
|
|
499
|
+
|
|
500
|
+
|
|
501
|
+
def fetch_services(
|
|
502
|
+
merged_dict: Dict[str, str],
|
|
503
|
+
table_data: List[Dict[str, str]],
|
|
504
|
+
processed_data: Dict[str, Any],
|
|
505
|
+
):
|
|
506
|
+
"""
|
|
507
|
+
Fetches services data from the table.
|
|
508
|
+
:param Dict[str, Any] processed_data: The dictionary where processed data is stored.
|
|
509
|
+
:param List[Dict[str, str]] table_data: The table data to process.
|
|
510
|
+
:param Dict[str, str] merged_dict: The merged dictionary of the table data.
|
|
511
|
+
|
|
512
|
+
"""
|
|
513
|
+
if "CSP/CSO Name (Name on FedRAMP Marketplace)" in merged_dict:
|
|
514
|
+
logger.info("Processing Leveraged Services")
|
|
515
|
+
processed_data["services"] = build_leveraged_services(table_data)
|
|
516
|
+
|
|
517
|
+
|
|
518
|
+
def fetch_stakeholders(
|
|
519
|
+
merged_dict: Dict[str, str],
|
|
520
|
+
table_data: List[Dict[str, str]],
|
|
521
|
+
processed_data: Dict[str, Any],
|
|
522
|
+
key: str,
|
|
523
|
+
):
|
|
524
|
+
"""
|
|
525
|
+
Fetches stakeholders data from the table.
|
|
526
|
+
:param str key: The key to check for.
|
|
527
|
+
:param Dict[str, Any] processed_data: The dictionary where processed data is stored.
|
|
528
|
+
:param List[Dict[str, str]] table_data: The table data to process.
|
|
529
|
+
:param Dict[str, str] merged_dict: The merged dictionary of the table data.
|
|
530
|
+
|
|
531
|
+
"""
|
|
532
|
+
if "Name" in merged_dict and "Company / Organization" in table_data[0]:
|
|
533
|
+
logger.info("Processing Organization and Stakeholders")
|
|
534
|
+
process_organization_and_stakeholders(table_data, processed_data)
|
|
535
|
+
if ("ISSO (or Equivalent) Point of Contact" in key) or ("Table 4.1" in key):
|
|
536
|
+
logger.info("Processing Stakeholders")
|
|
537
|
+
person = process_person_info(table_data)
|
|
538
|
+
processed_data["stakeholders"].append(person)
|
|
539
|
+
|
|
540
|
+
|
|
541
|
+
def fetch_ports(
|
|
542
|
+
key: str,
|
|
543
|
+
processed_data: Dict[str, Any],
|
|
544
|
+
table_data: List[Dict[str, str]],
|
|
545
|
+
merged_dict: Dict[str, str],
|
|
546
|
+
):
|
|
547
|
+
"""
|
|
548
|
+
Fetches ports and protocols data from the table.
|
|
549
|
+
:param str key: The key to check for.
|
|
550
|
+
:param Dict[str, Any] processed_data: The dictionary where processed data is stored.
|
|
551
|
+
:param List[Dict[str, str]] table_data: The table data to process.
|
|
552
|
+
:param Dict[str, str] merged_dict: The merged dictionary of the table data.
|
|
553
|
+
|
|
554
|
+
"""
|
|
555
|
+
if "Services, Ports, and Protocols" in key and "Port #" in merged_dict:
|
|
556
|
+
logger.info("Processing Ports and Protocols")
|
|
557
|
+
processed_data["ports_and_protocols"] = process_ports_and_protocols(table_data)
|
|
558
|
+
|
|
559
|
+
|
|
560
|
+
def process_prepared_by(table: List[Dict[str, str]]) -> PreparedBy:
|
|
561
|
+
"""
|
|
562
|
+
Processes the prepared by information from the table.
|
|
563
|
+
:param List[Dict[str, str]] table: The table to process.
|
|
564
|
+
:return: A PreparedBy object representing the prepared by information.
|
|
565
|
+
:rtype: PreparedBy
|
|
566
|
+
"""
|
|
567
|
+
prepared_by = merge_dicts(table, True)
|
|
568
|
+
return PreparedBy(
|
|
569
|
+
name=prepared_by.get("Organization Name"),
|
|
570
|
+
street=prepared_by.get("Street Address"),
|
|
571
|
+
building=prepared_by.get("Suite/Room/Building"),
|
|
572
|
+
city_state_zip=prepared_by.get("City, State, Zip"),
|
|
573
|
+
)
|
|
574
|
+
|
|
575
|
+
|
|
576
|
+
def process_version(table: List[Dict[str, str]]) -> str:
|
|
577
|
+
"""
|
|
578
|
+
Processes the version information from the table.
|
|
579
|
+
:param List[Dict[str, str]] table: The table to process.
|
|
580
|
+
:return: The version number.
|
|
581
|
+
:rtype: str
|
|
582
|
+
"""
|
|
583
|
+
# Assuming the version is stored under a key named "Version" in one of the table rows
|
|
584
|
+
return get_max_version(table)
|
|
585
|
+
|
|
586
|
+
|
|
587
|
+
def process_organization_and_stakeholders(table: List[Dict[str, str]], processed_data: Dict[str, Any]):
|
|
588
|
+
"""
|
|
589
|
+
Processes organization and stakeholders information from the table.
|
|
590
|
+
:param List[Dict[str, str]] table: The table to process.
|
|
591
|
+
:param Dict[str, Any] processed_data: The dictionary where processed data is stored.
|
|
592
|
+
"""
|
|
593
|
+
processed_data["org"] = process_company_info(table)
|
|
594
|
+
person = process_person_info(table)
|
|
595
|
+
processed_data["stakeholders"].append(person)
|
|
596
|
+
|
|
597
|
+
|
|
598
|
+
def process_fedramp_docx_v5(
|
|
599
|
+
file_name: str,
|
|
600
|
+
base_fedramp_profile_id: int,
|
|
601
|
+
save_data: bool,
|
|
602
|
+
add_missing: bool,
|
|
603
|
+
appendix_a_file_name: str,
|
|
604
|
+
) -> int:
|
|
605
|
+
"""
|
|
606
|
+
Processes a FedRAMP document and loads it into RegScale.
|
|
607
|
+
:param str file_name: The path to the FedRAMP document.
|
|
608
|
+
:param int base_fedramp_profile_id: The name of the RegScale FedRAMP profile to use.
|
|
609
|
+
:param bool save_data: Whether to save the data as a JSON file.
|
|
610
|
+
:param bool add_missing: Whether to create missing controls from profile in the SSP.
|
|
611
|
+
:param str appendix_a_file_name: The path to the Appendix A document.
|
|
612
|
+
:return: The created SSP ID.
|
|
613
|
+
:rtype: int
|
|
614
|
+
"""
|
|
615
|
+
logger.info(f"Processing FedRAMP Document: {file_name}")
|
|
616
|
+
logger.info(f"Appendix A File: {appendix_a_file_name}")
|
|
617
|
+
ssp_parser = SSPDocParser(file_name)
|
|
618
|
+
|
|
619
|
+
logger.info(f"Using the following values: save_data: {save_data} and add_missing: {add_missing}")
|
|
620
|
+
|
|
621
|
+
tables = ssp_parser.parse()
|
|
622
|
+
doc_text_dict = ssp_parser.text
|
|
623
|
+
app = Application()
|
|
624
|
+
config = app.config
|
|
625
|
+
user_id = config.get("userId")
|
|
626
|
+
|
|
627
|
+
processed_data = identify_and_process_tables(tables)
|
|
628
|
+
parent_id = processing_data_from_ssp_doc(processed_data, user_id, doc_text_dict)
|
|
629
|
+
if appendix_a_file_name:
|
|
630
|
+
logger.info(f"Converting {appendix_a_file_name} to markdown format...")
|
|
631
|
+
try:
|
|
632
|
+
mdparser = MDDocParser(appendix_a_file_name, base_fedramp_profile_id)
|
|
633
|
+
except Exception as e:
|
|
634
|
+
logger.error(f"Error converting {appendix_a_file_name} to markdown format: {e}.")
|
|
635
|
+
return parent_id
|
|
636
|
+
|
|
637
|
+
logger.info("Successfully converted Appendix A file to markdown format.")
|
|
638
|
+
|
|
639
|
+
# new markdown dictionary for control parts.
|
|
640
|
+
mdparts_dict = mdparser.get_parts()
|
|
641
|
+
|
|
642
|
+
logger.info(f"Processing Appendix A File: {appendix_a_file_name}")
|
|
643
|
+
|
|
644
|
+
parser = AppendixAParser(filename=appendix_a_file_name)
|
|
645
|
+
controls_implementation_dict = parser.fetch_controls_implementations()
|
|
646
|
+
|
|
647
|
+
process_appendix_a(
|
|
648
|
+
parent_id=parent_id,
|
|
649
|
+
profile_id=base_fedramp_profile_id,
|
|
650
|
+
add_missing=add_missing,
|
|
651
|
+
controls_implementation_dict=controls_implementation_dict,
|
|
652
|
+
mdparts_dict=mdparts_dict,
|
|
653
|
+
)
|
|
654
|
+
extract_and_upload_images(file_name, parent_id)
|
|
655
|
+
return parent_id
|
|
656
|
+
|
|
657
|
+
|
|
658
|
+
def log_dictionary_items(dict_items: Dict[str, str]):
|
|
659
|
+
"""
|
|
660
|
+
Logs the items in a dictionary.
|
|
661
|
+
:param Dict[str, str] dict_items: The dictionary to log.
|
|
662
|
+
"""
|
|
663
|
+
for key, value in dict_items.items():
|
|
664
|
+
if value:
|
|
665
|
+
logger.info(f"{key}: {value}")
|
|
666
|
+
|
|
667
|
+
|
|
668
|
+
def handle_implemented(row_data: Dict, status: str) -> str:
|
|
669
|
+
"""
|
|
670
|
+
Handles the implemented status of a control.
|
|
671
|
+
:param Dict row_data: The data from the row.
|
|
672
|
+
:param str status: The current status of the control.
|
|
673
|
+
:return: The updated status of the control.
|
|
674
|
+
:rtype: str
|
|
675
|
+
"""
|
|
676
|
+
log_dictionary_items(row_data)
|
|
677
|
+
for key, value in row_data.items():
|
|
678
|
+
if key == "Implemented" and value:
|
|
679
|
+
status = ControlImplementationStatus.FullyImplemented.value
|
|
680
|
+
return status
|
|
681
|
+
|
|
682
|
+
|
|
683
|
+
def handle_service_provider_corporate(row_data: Dict, responsibility: str) -> str:
|
|
684
|
+
"""
|
|
685
|
+
Handles the service provider corporate responsibility of a control.
|
|
686
|
+
:param Dict row_data:
|
|
687
|
+
:param str responsibility:
|
|
688
|
+
:return: fetched responsibility
|
|
689
|
+
:rtype: str
|
|
690
|
+
"""
|
|
691
|
+
log_dictionary_items(row_data)
|
|
692
|
+
for key, value in row_data.items():
|
|
693
|
+
if value:
|
|
694
|
+
responsibility = key
|
|
695
|
+
return responsibility
|
|
696
|
+
|
|
697
|
+
|
|
698
|
+
def handle_parameter(row_data: Dict, parameters: Dict, control_id: int):
|
|
699
|
+
"""
|
|
700
|
+
Handles the parameters of a control.
|
|
701
|
+
:param Dict row_data: The data from the row.
|
|
702
|
+
:param Dict parameters: The parameters dictionary.
|
|
703
|
+
:param int control_id: The control ID.
|
|
704
|
+
"""
|
|
705
|
+
log_dictionary_items(row_data)
|
|
706
|
+
for key, value in row_data.items():
|
|
707
|
+
if value:
|
|
708
|
+
if parameters.get(control_id):
|
|
709
|
+
parameters[control_id].append(value)
|
|
710
|
+
else:
|
|
711
|
+
parameters[control_id] = [value]
|
|
712
|
+
|
|
713
|
+
|
|
714
|
+
def handle_row_data(
|
|
715
|
+
row: Dict,
|
|
716
|
+
control: ControlImplementation,
|
|
717
|
+
status: str,
|
|
718
|
+
responsibility: str,
|
|
719
|
+
parameters: Dict,
|
|
720
|
+
key: str,
|
|
721
|
+
alternative_key: str,
|
|
722
|
+
) -> Tuple[str, str, Dict]:
|
|
723
|
+
"""
|
|
724
|
+
Handles the data from a row.
|
|
725
|
+
:param Dict row: The row to process.
|
|
726
|
+
:param ControlImplementation control:
|
|
727
|
+
:param str status:
|
|
728
|
+
:param str responsibility:
|
|
729
|
+
:param Dict parameters:
|
|
730
|
+
:param str key:
|
|
731
|
+
:param str alternative_key:
|
|
732
|
+
:return: A tuple containing the updated status, responsibility, and parameters.
|
|
733
|
+
:rtype: Tuple[str, str, Dict]
|
|
734
|
+
"""
|
|
735
|
+
row_data = row.get(key, row.get(alternative_key))
|
|
736
|
+
logger.info(f"Row Data: {row_data}")
|
|
737
|
+
|
|
738
|
+
if "Implemented" in row_data:
|
|
739
|
+
status = handle_implemented(row_data, status)
|
|
740
|
+
elif SERVICE_PROVIDER_CORPORATE in row_data:
|
|
741
|
+
responsibility = handle_service_provider_corporate(row_data, responsibility)
|
|
742
|
+
elif "Parameter" in row_data:
|
|
743
|
+
handle_parameter(row_data, parameters, control.id)
|
|
744
|
+
|
|
745
|
+
return status, responsibility, parameters
|
|
746
|
+
|
|
747
|
+
|
|
748
|
+
def process_fetch_key_value(summary_data: Dict) -> Optional[str]:
|
|
749
|
+
"""
|
|
750
|
+
Extracts key information from the summary data.
|
|
751
|
+
:param Dict summary_data: The summary data from the row.
|
|
752
|
+
:return: str: The key from the summary data.
|
|
753
|
+
:rtype: Optional[str]
|
|
754
|
+
"""
|
|
755
|
+
for key, value in summary_data.items():
|
|
756
|
+
if value:
|
|
757
|
+
logger.info(f"{key}: {value}")
|
|
758
|
+
return key
|
|
759
|
+
return None
|
|
760
|
+
|
|
761
|
+
|
|
762
|
+
def process_parameter(summary_data: Dict, control_id: int, current_parameters: List[Dict]):
|
|
763
|
+
"""
|
|
764
|
+
Processes the parameters from the summary data.
|
|
765
|
+
:param Dict summary_data: The summary data from the row.
|
|
766
|
+
:param int control_id: The control ID.
|
|
767
|
+
:param List[Dict] current_parameters: The current parameters.
|
|
768
|
+
"""
|
|
769
|
+
for key, value in summary_data.items():
|
|
770
|
+
if value:
|
|
771
|
+
parameter_name = key.replace("Parameter ", "").strip()
|
|
772
|
+
param = {
|
|
773
|
+
"control_id": control_id,
|
|
774
|
+
"parameter_name": parameter_name if parameter_name else None,
|
|
775
|
+
"parameter_value": value,
|
|
776
|
+
}
|
|
777
|
+
if param not in current_parameters:
|
|
778
|
+
current_parameters.append(param)
|
|
779
|
+
|
|
780
|
+
|
|
781
|
+
def process_row_data(row: Dict, control: SecurityControl, control_dict: Dict) -> Tuple[str, str, List[Dict]]:
|
|
782
|
+
"""
|
|
783
|
+
Processes a single row of data, updating status, responsibility, and parameters based on control summary information.
|
|
784
|
+
:param Dict row: The row to process.
|
|
785
|
+
:param SecurityControl control: The control to process.
|
|
786
|
+
:param Dict control_dict: The dictionary containing the control data.
|
|
787
|
+
:return: A tuple containing the updated status, responsibility, and parameters.
|
|
788
|
+
:rtype: Tuple[str, str, List[Dict]]
|
|
789
|
+
"""
|
|
790
|
+
control_id_key = f"{control.controlId} Control Summary Information"
|
|
791
|
+
alternate = format_alternative_control_key(control.controlId) or control.controlId
|
|
792
|
+
alternative_control_id_key = f"{alternate} Control Summary Information"
|
|
793
|
+
|
|
794
|
+
summary_data = row.get(control_id_key, row.get(alternative_control_id_key))
|
|
795
|
+
if summary_data:
|
|
796
|
+
logger.info(f"Row Data: {summary_data}")
|
|
797
|
+
|
|
798
|
+
if "Implemented" in summary_data:
|
|
799
|
+
status = process_fetch_key_value(summary_data)
|
|
800
|
+
control_dict["status"] = (
|
|
801
|
+
ControlImplementationStatus.FullyImplemented.value if status == "Implemented" else status
|
|
802
|
+
)
|
|
803
|
+
|
|
804
|
+
if SERVICE_PROVIDER_CORPORATE in summary_data:
|
|
805
|
+
control_dict["responsibility"] = process_fetch_key_value(summary_data)
|
|
806
|
+
|
|
807
|
+
if "Parameter" in summary_data:
|
|
808
|
+
process_parameter(summary_data, control.id, control_dict.get("parameters", []))
|
|
809
|
+
|
|
810
|
+
return (
|
|
811
|
+
control_dict.get("status"),
|
|
812
|
+
control_dict.get("responsibility"),
|
|
813
|
+
control_dict.get("parameters"),
|
|
814
|
+
)
|
|
815
|
+
|
|
816
|
+
|
|
817
|
+
def process_fetch_key_if_value(summary_data: Dict) -> str:
|
|
818
|
+
"""
|
|
819
|
+
Extracts key information from the summary data.
|
|
820
|
+
:param Dict summary_data: The summary data from the row.
|
|
821
|
+
:return: str: The key from the summary data.
|
|
822
|
+
:rtype: str
|
|
823
|
+
"""
|
|
824
|
+
for key, value in summary_data.items():
|
|
825
|
+
if value is True or value == "☒":
|
|
826
|
+
logger.info(f"{key}: {value}")
|
|
827
|
+
return key
|
|
828
|
+
|
|
829
|
+
|
|
830
|
+
def _find_statement(control: any, alternative_control_id: str, row: Dict, control_dict: Dict) -> str:
|
|
831
|
+
"""
|
|
832
|
+
Find the statement for the control.
|
|
833
|
+
:param any control:
|
|
834
|
+
:param str alternative_control_id:
|
|
835
|
+
:param Dict row:
|
|
836
|
+
:param Dict control_dict:
|
|
837
|
+
:return: str: The statement for the control.
|
|
838
|
+
:rtype: str
|
|
839
|
+
"""
|
|
840
|
+
key_statment = f"{control.controlId} What is the solution and how is it implemented?"
|
|
841
|
+
key_alt_statment = f"{alternative_control_id} What is the solution and how is it implemented?"
|
|
842
|
+
statement_dict = row.get(key_statment) or row.get(key_alt_statment)
|
|
843
|
+
|
|
844
|
+
if isinstance(statement_dict, dict):
|
|
845
|
+
control_dict["statement"] = " ".join(f"{key} {value}" for key, value in statement_dict.items() if value)
|
|
846
|
+
elif isinstance(statement_dict, str):
|
|
847
|
+
control_dict["statement"] = statement_dict
|
|
848
|
+
return ""
|
|
849
|
+
|
|
850
|
+
|
|
851
|
+
def fetch_profile_mappings(profile_id: int) -> List[ProfileMapping]:
|
|
852
|
+
"""
|
|
853
|
+
Fetches the profile mappings for a given profile.
|
|
854
|
+
:param int profile_id: The profile ID.
|
|
855
|
+
:return: A list of ProfileMapping objects.
|
|
856
|
+
:rtype: List[ProfileMapping]
|
|
857
|
+
"""
|
|
858
|
+
profile_mappings = []
|
|
859
|
+
try:
|
|
860
|
+
profile = Profile.get_object(object_id=profile_id)
|
|
861
|
+
if profile and getattr(profile, "name"):
|
|
862
|
+
logger.debug(f"Profile: {profile.name}")
|
|
863
|
+
profile_mappings = ProfileMapping.get_by_profile(profile_id=profile.id)
|
|
864
|
+
except AttributeError:
|
|
865
|
+
error_and_exit(f"Profile #{profile_id} not found, exiting ..")
|
|
866
|
+
logger.info(f"Found {len(profile_mappings)} controls in profile")
|
|
867
|
+
return profile_mappings
|
|
868
|
+
|
|
869
|
+
|
|
870
|
+
def load_appendix_a(
|
|
871
|
+
appendix_a_file_name: str,
|
|
872
|
+
parent_id: int,
|
|
873
|
+
profile_id: int,
|
|
874
|
+
add_missing: bool,
|
|
875
|
+
):
|
|
876
|
+
"""
|
|
877
|
+
Loads the Appendix A data.
|
|
878
|
+
:param str appendix_a_file_name: The path to the Appendix A file.
|
|
879
|
+
:param int parent_id: The parent ID.
|
|
880
|
+
:param int profile_id: The profile ID.
|
|
881
|
+
:param bool add_missing: Whether to add missing controls.
|
|
882
|
+
"""
|
|
883
|
+
logger.info(f"Processing Appendix A File: {appendix_a_file_name}")
|
|
884
|
+
parser = AppendixAParser(filename=appendix_a_file_name)
|
|
885
|
+
controls_implementation_dict = parser.fetch_controls_implementations()
|
|
886
|
+
|
|
887
|
+
process_appendix_a(
|
|
888
|
+
parent_id=parent_id,
|
|
889
|
+
profile_id=profile_id,
|
|
890
|
+
add_missing=add_missing,
|
|
891
|
+
controls_implementation_dict=controls_implementation_dict,
|
|
892
|
+
)
|
|
893
|
+
|
|
894
|
+
|
|
895
|
+
def process_appendix_a(
|
|
896
|
+
parent_id: int,
|
|
897
|
+
profile_id: int,
|
|
898
|
+
add_missing: bool = False,
|
|
899
|
+
controls_implementation_dict: Dict = None,
|
|
900
|
+
mdparts_dict: Dict = None,
|
|
901
|
+
):
|
|
902
|
+
"""
|
|
903
|
+
Processes the Appendix A data.
|
|
904
|
+
:param int parent_id: The parent ID.
|
|
905
|
+
:param int profile_id: The profile ID.
|
|
906
|
+
:param bool add_missing: Whether to add missing controls.
|
|
907
|
+
:param Dict controls_implementation_dict: The controls implementation dictionary.
|
|
908
|
+
:param Dict mdparts_dict: The control parts dictionary.
|
|
909
|
+
"""
|
|
910
|
+
profile_mappings = fetch_profile_mappings(profile_id=profile_id)
|
|
911
|
+
data_dict = controls_implementation_dict
|
|
912
|
+
existing_controls: list[ControlImplementation] = ControlImplementation.get_all_by_parent(
|
|
913
|
+
parent_id=parent_id, parent_module=SecurityPlan.get_module_slug()
|
|
914
|
+
)
|
|
915
|
+
for control in existing_controls:
|
|
916
|
+
if not control.parentId or control.parentId == 0:
|
|
917
|
+
control.parentId = parent_id
|
|
918
|
+
|
|
919
|
+
logger.info(f"Found {len(existing_controls)} existing controls")
|
|
920
|
+
logger.debug(f"{existing_controls=}")
|
|
921
|
+
existing_control_dict = {c.controlID: c for c in existing_controls if c and c.controlID}
|
|
922
|
+
|
|
923
|
+
param_mapper = RosettaStone()
|
|
924
|
+
param_mapper.load_fedramp_version_5_mapping()
|
|
925
|
+
param_mapper.lookup_l0_by_l1()
|
|
926
|
+
for mapping in profile_mappings:
|
|
927
|
+
control = SecurityControl.get_object(object_id=mapping.controlID)
|
|
928
|
+
|
|
929
|
+
if not control:
|
|
930
|
+
logger.debug(f"Control not found in mappings: {mapping.controlID}")
|
|
931
|
+
continue
|
|
932
|
+
alternate = control.controlId
|
|
933
|
+
try:
|
|
934
|
+
alternate = format_alternative_control_key(control.controlId)
|
|
935
|
+
except ValueError:
|
|
936
|
+
logger.debug(f"Error formatting alternative control key: {control.controlId}")
|
|
937
|
+
alternative_control_id = alternate
|
|
938
|
+
control_dict = data_dict.get(control.controlId)
|
|
939
|
+
if not control_dict:
|
|
940
|
+
control_dict = data_dict.get(alternative_control_id)
|
|
941
|
+
if not control_dict:
|
|
942
|
+
logger.debug(f"Control not found in parsed controls: {control.controlId}")
|
|
943
|
+
continue
|
|
944
|
+
|
|
945
|
+
process_control_implementations(
|
|
946
|
+
existing_control_dict,
|
|
947
|
+
control,
|
|
948
|
+
control_dict,
|
|
949
|
+
parent_id,
|
|
950
|
+
add_missing,
|
|
951
|
+
mdparts_dict,
|
|
952
|
+
)
|
|
953
|
+
|
|
954
|
+
|
|
955
|
+
def process_control_implementations(
|
|
956
|
+
existing_control_dict: Dict,
|
|
957
|
+
control: SecurityControl,
|
|
958
|
+
control_dict: Dict,
|
|
959
|
+
parent_id: int,
|
|
960
|
+
add_missing: bool = False,
|
|
961
|
+
mdparts_dict: Dict = None,
|
|
962
|
+
):
|
|
963
|
+
"""
|
|
964
|
+
Processes the control implementations.
|
|
965
|
+
:param Dict existing_control_dict: The existing control dictionary.
|
|
966
|
+
:param SecurityControl control: The control implementation object.
|
|
967
|
+
:param Dict control_dict: The control dictionary.
|
|
968
|
+
:param int parent_id: The parent ID.
|
|
969
|
+
:param bool add_missing: Whether to add missing controls.
|
|
970
|
+
:param Dict mdparts_dict: The control parts dictionary.
|
|
971
|
+
"""
|
|
972
|
+
supporting_roles, primary_role = get_primary_and_supporting_roles(
|
|
973
|
+
control_dict.get("responsibility").split(",") if control_dict.get("responsibility") else [],
|
|
974
|
+
parent_id,
|
|
975
|
+
)
|
|
976
|
+
|
|
977
|
+
if existing_control := existing_control_dict.get(control.id):
|
|
978
|
+
_update_existing_control(
|
|
979
|
+
existing_control,
|
|
980
|
+
control,
|
|
981
|
+
control_dict,
|
|
982
|
+
primary_role,
|
|
983
|
+
supporting_roles,
|
|
984
|
+
mdparts_dict,
|
|
985
|
+
parent_id,
|
|
986
|
+
)
|
|
987
|
+
else:
|
|
988
|
+
_create_control_implementation(
|
|
989
|
+
control, control_dict, primary_role, parent_id, add_missing, supporting_roles, mdparts_dict
|
|
990
|
+
)
|
|
991
|
+
|
|
992
|
+
|
|
993
|
+
def _create_control_implementation(
|
|
994
|
+
control: SecurityControl,
|
|
995
|
+
control_dict: Dict,
|
|
996
|
+
primary_role: Dict,
|
|
997
|
+
parent_id: int,
|
|
998
|
+
add_missing: bool,
|
|
999
|
+
supporting_roles: List[Dict],
|
|
1000
|
+
mdparts_dict: Dict,
|
|
1001
|
+
):
|
|
1002
|
+
"""
|
|
1003
|
+
Creates a new control implementation.
|
|
1004
|
+
:param SecurityControl control:
|
|
1005
|
+
:param Dict control_dict:
|
|
1006
|
+
:param Dict primary_role:
|
|
1007
|
+
:param int parent_id:
|
|
1008
|
+
:param bool add_missing:
|
|
1009
|
+
:param List[Dict] supporting_roles:
|
|
1010
|
+
:param Dict mdparts_dict:
|
|
1011
|
+
:return:
|
|
1012
|
+
"""
|
|
1013
|
+
new_statement = mdparts_dict.get(control.controlId) if mdparts_dict else None
|
|
1014
|
+
implementation = create_implementations(
|
|
1015
|
+
control,
|
|
1016
|
+
parent_id,
|
|
1017
|
+
control_dict.get("status"),
|
|
1018
|
+
new_statement if new_statement else control_dict.get("statement"),
|
|
1019
|
+
control_dict.get("origination"),
|
|
1020
|
+
control_dict.get("parameters"),
|
|
1021
|
+
add_missing,
|
|
1022
|
+
role_id=primary_role.get("id") if primary_role else None,
|
|
1023
|
+
)
|
|
1024
|
+
if implementation:
|
|
1025
|
+
if parts := control_dict.get("parts"):
|
|
1026
|
+
handle_parts(
|
|
1027
|
+
parts=parts,
|
|
1028
|
+
status=map_implementation_status(control_dict.get("status")),
|
|
1029
|
+
control=control,
|
|
1030
|
+
control_implementation=implementation,
|
|
1031
|
+
mdparts_dict=mdparts_dict,
|
|
1032
|
+
origination=control_dict.get("origination"),
|
|
1033
|
+
)
|
|
1034
|
+
if params := control_dict.get("parameters"):
|
|
1035
|
+
handle_params(
|
|
1036
|
+
parameters=params,
|
|
1037
|
+
control=control,
|
|
1038
|
+
control_implementation=implementation,
|
|
1039
|
+
)
|
|
1040
|
+
add_roles_to_control_implementation(implementation, supporting_roles)
|
|
1041
|
+
|
|
1042
|
+
|
|
1043
|
+
def _update_existing_control(
|
|
1044
|
+
existing_control: ControlImplementation,
|
|
1045
|
+
control: SecurityControl,
|
|
1046
|
+
control_dict: Dict,
|
|
1047
|
+
primary_role: Dict,
|
|
1048
|
+
supporting_roles: List[Dict],
|
|
1049
|
+
mdparts_dict: Dict,
|
|
1050
|
+
parent_id: int,
|
|
1051
|
+
):
|
|
1052
|
+
"""
|
|
1053
|
+
Updates the existing control implementation.
|
|
1054
|
+
:param existing_control ControlImplementation : The existing control implementation.
|
|
1055
|
+
:param control SecurityControl: The control object.
|
|
1056
|
+
:param control_dict Dict:
|
|
1057
|
+
:param primary_role Dict:
|
|
1058
|
+
:param supporting_roles List[Dict:
|
|
1059
|
+
:param mdparts_dict Dict:
|
|
1060
|
+
:param parent_id int:
|
|
1061
|
+
"""
|
|
1062
|
+
new_statement = mdparts_dict.get(control.controlId) if mdparts_dict else None
|
|
1063
|
+
update_existing_control(
|
|
1064
|
+
existing_control,
|
|
1065
|
+
control_dict.get("status"),
|
|
1066
|
+
new_statement if new_statement else control_dict.get("statement"),
|
|
1067
|
+
control_dict.get("responsibility"),
|
|
1068
|
+
primary_role if primary_role and isinstance(primary_role, dict) and primary_role.get("id") else None,
|
|
1069
|
+
parent_id,
|
|
1070
|
+
)
|
|
1071
|
+
if params := control_dict.get("parameters"):
|
|
1072
|
+
handle_params(
|
|
1073
|
+
params,
|
|
1074
|
+
control=control,
|
|
1075
|
+
control_implementation=existing_control,
|
|
1076
|
+
)
|
|
1077
|
+
if parts := control_dict.get("parts"):
|
|
1078
|
+
handle_parts(
|
|
1079
|
+
parts=parts,
|
|
1080
|
+
status=map_implementation_status(control_dict.get("status")),
|
|
1081
|
+
control=control,
|
|
1082
|
+
control_implementation=existing_control,
|
|
1083
|
+
mdparts_dict=mdparts_dict,
|
|
1084
|
+
origination=control_dict.get("origination"),
|
|
1085
|
+
)
|
|
1086
|
+
add_roles_to_control_implementation(existing_control, supporting_roles)
|
|
1087
|
+
|
|
1088
|
+
|
|
1089
|
+
def add_roles_to_control_implementation(implementation: ControlImplementation, roles: List[Dict]):
|
|
1090
|
+
"""
|
|
1091
|
+
Adds roles to a control implementation.
|
|
1092
|
+
:param ControlImplementation implementation: The control implementation.
|
|
1093
|
+
:param List[Dict] roles: The list of roles.
|
|
1094
|
+
"""
|
|
1095
|
+
if roles and len(roles) > 0 and implementation:
|
|
1096
|
+
for role in roles:
|
|
1097
|
+
if isinstance(role, dict) and role.get("id"):
|
|
1098
|
+
implementation.add_role(role.get("id"))
|
|
1099
|
+
|
|
1100
|
+
|
|
1101
|
+
def get_primary_and_supporting_roles(roles: List, parent_id: int) -> Tuple[List, Dict]:
|
|
1102
|
+
"""
|
|
1103
|
+
Get the primary role.
|
|
1104
|
+
:param List roles: The list of roles.
|
|
1105
|
+
:param int parent_id: The parent ID.
|
|
1106
|
+
:return: The primary role and supporting roles.
|
|
1107
|
+
:rtype: Tuple[List, Dict]
|
|
1108
|
+
"""
|
|
1109
|
+
supporting_roles = []
|
|
1110
|
+
primary_role = None
|
|
1111
|
+
if roles and len(roles) >= 1:
|
|
1112
|
+
primary_role = get_or_create_system_role(roles[0], parent_id)
|
|
1113
|
+
for role in roles[1:]:
|
|
1114
|
+
if role:
|
|
1115
|
+
supporting_roles.append(get_or_create_system_role(role, parent_id))
|
|
1116
|
+
return supporting_roles, primary_role
|
|
1117
|
+
|
|
1118
|
+
|
|
1119
|
+
def get_or_create_system_role(role: str, ssp_id: int) -> Optional[Dict]:
|
|
1120
|
+
"""
|
|
1121
|
+
Creates a System Role.
|
|
1122
|
+
:param str role: The name of the role.
|
|
1123
|
+
:param int ssp_id: The user ID.
|
|
1124
|
+
:return: The created role.
|
|
1125
|
+
:rtype: Optional[Dict]
|
|
1126
|
+
"""
|
|
1127
|
+
app = Application()
|
|
1128
|
+
try:
|
|
1129
|
+
role_name = role.strip().replace(",", "")
|
|
1130
|
+
if role_name == "<Roles>":
|
|
1131
|
+
return None
|
|
1132
|
+
existing_sys_roles = [
|
|
1133
|
+
r
|
|
1134
|
+
for r in SystemRole.get_all_by_parent(parent_id=ssp_id, parent_module=SecurityPlan.get_module_slug())
|
|
1135
|
+
if r is not None
|
|
1136
|
+
]
|
|
1137
|
+
existing_roles_dict = {r.roleName: r for r in existing_sys_roles}
|
|
1138
|
+
in_mem_roles_processed_dict = {r.roleName: r for r in IN_MEMORY_ROLES_PROCESSED if r is not None}
|
|
1139
|
+
existing_role = existing_roles_dict.get(role_name) or in_mem_roles_processed_dict.get(role_name)
|
|
1140
|
+
IN_MEMORY_ROLES_PROCESSED.append(existing_role)
|
|
1141
|
+
|
|
1142
|
+
if existing_role:
|
|
1143
|
+
logger.debug("Role: %s already exists in RegScale, skipping insert..", role_name.strip())
|
|
1144
|
+
return existing_role.model_dump()
|
|
1145
|
+
else:
|
|
1146
|
+
user_id = app.config.get("userId")
|
|
1147
|
+
if role_name:
|
|
1148
|
+
sys_role = SystemRole(
|
|
1149
|
+
roleName=role_name,
|
|
1150
|
+
roleType="Internal",
|
|
1151
|
+
accessLevel="Privileged",
|
|
1152
|
+
sensitivityLevel=ControlImplementationStatus.NA.value,
|
|
1153
|
+
assignedUserId=user_id,
|
|
1154
|
+
privilegeDescription=role_name,
|
|
1155
|
+
securityPlanId=ssp_id,
|
|
1156
|
+
functions=role_name,
|
|
1157
|
+
).create()
|
|
1158
|
+
if sys_role:
|
|
1159
|
+
IN_MEMORY_ROLES_PROCESSED.append(sys_role)
|
|
1160
|
+
return sys_role.model_dump()
|
|
1161
|
+
except Exception as e:
|
|
1162
|
+
logger.warning(f"Error creating role: {role} - {e}")
|
|
1163
|
+
return {}
|
|
1164
|
+
|
|
1165
|
+
|
|
1166
|
+
def create_implementations(
|
|
1167
|
+
control: SecurityControl,
|
|
1168
|
+
parent_id: int,
|
|
1169
|
+
status: str,
|
|
1170
|
+
statement: str,
|
|
1171
|
+
responsibility: str,
|
|
1172
|
+
parameters: List[Dict],
|
|
1173
|
+
add_missing: bool = False,
|
|
1174
|
+
role_id: int = None,
|
|
1175
|
+
) -> ControlImplementation:
|
|
1176
|
+
"""
|
|
1177
|
+
Creates the control implementations.
|
|
1178
|
+
:param SecurityControl control: The control object.
|
|
1179
|
+
:param int parent_id: The parent ID.
|
|
1180
|
+
:param str status: The status of the implementation.
|
|
1181
|
+
:param str statement: The statement of the implementation.
|
|
1182
|
+
:param str responsibility: The responsibility of the implementation.
|
|
1183
|
+
:param List[Dict] parameters: The parameters of the implementation.
|
|
1184
|
+
:param bool add_missing: Whether to add missing controls.
|
|
1185
|
+
:param int role_id: The role ID.
|
|
1186
|
+
:return: The created control implementation.
|
|
1187
|
+
:rtype: ControlImplementation
|
|
1188
|
+
"""
|
|
1189
|
+
if status and status.lower() == "implemented":
|
|
1190
|
+
status = ControlImplementationStatus.FullyImplemented.value
|
|
1191
|
+
if control and (status == DEFAULT_STATUS and add_missing) or (status != DEFAULT_STATUS):
|
|
1192
|
+
logger.debug(
|
|
1193
|
+
f"Creating Control: {control.controlId} - {control.id} - {status} - {statement} - {responsibility}"
|
|
1194
|
+
)
|
|
1195
|
+
logger.debug(f"params: {parameters}")
|
|
1196
|
+
justification, planned_date, steps_to_implement = create_control_implementation_defaults(status)
|
|
1197
|
+
|
|
1198
|
+
control_implementation = ControlImplementation(
|
|
1199
|
+
parentId=parent_id,
|
|
1200
|
+
parentModule="securityplans",
|
|
1201
|
+
controlID=control.id,
|
|
1202
|
+
status=map_implementation_status(status),
|
|
1203
|
+
responsibility=map_responsibility(responsibility),
|
|
1204
|
+
implementation=clean_statement(statement),
|
|
1205
|
+
systemRoleId=role_id,
|
|
1206
|
+
exclusionJustification=justification,
|
|
1207
|
+
stepsToImplement=steps_to_implement,
|
|
1208
|
+
plannedImplementationDate=planned_date,
|
|
1209
|
+
)
|
|
1210
|
+
return control_implementation.create()
|
|
1211
|
+
# handle_params(parameters, control, control_implementation)
|
|
1212
|
+
|
|
1213
|
+
|
|
1214
|
+
def create_control_implementation_defaults(status: str) -> Tuple[str, str, str]:
|
|
1215
|
+
"""
|
|
1216
|
+
Creates a tuple with default values for exclusion_justification and planned_implementation_date.
|
|
1217
|
+
|
|
1218
|
+
:return: A tuple with default values for exclusion_justification and planned_implementation_date.
|
|
1219
|
+
:rtype: Tuple[str, str, str]
|
|
1220
|
+
"""
|
|
1221
|
+
exclusion_justification = None
|
|
1222
|
+
planned_implementation_date = None
|
|
1223
|
+
steps_to_implement = None
|
|
1224
|
+
if status == ControlImplementationStatus.NA.value:
|
|
1225
|
+
exclusion_justification = "This is an automated justification, please update"
|
|
1226
|
+
|
|
1227
|
+
if status == "Planned":
|
|
1228
|
+
current_date = datetime.datetime.now()
|
|
1229
|
+
planned_implementation_date = datetime_str(current_date + datetime.timedelta(days=30))
|
|
1230
|
+
steps_to_implement = "Automated steps to implement, please update"
|
|
1231
|
+
|
|
1232
|
+
return exclusion_justification, planned_implementation_date, steps_to_implement
|
|
1233
|
+
|
|
1234
|
+
|
|
1235
|
+
def clean_statement(statement: Union[str, List]) -> str:
|
|
1236
|
+
"""
|
|
1237
|
+
Cleans the statement.
|
|
1238
|
+
:param Union[str, List] statement: The statement to clean.'
|
|
1239
|
+
:return: The cleaned statement.
|
|
1240
|
+
:rtype: str
|
|
1241
|
+
"""
|
|
1242
|
+
if isinstance(statement, list):
|
|
1243
|
+
return " ".join(statement)
|
|
1244
|
+
return statement or ""
|
|
1245
|
+
|
|
1246
|
+
|
|
1247
|
+
def find_matching_parts(part: str, other_ids: List[str]) -> List[str]:
|
|
1248
|
+
"""
|
|
1249
|
+
Find and return the otherId values that contain the specified part (e.g., "Part a"),
|
|
1250
|
+
by directly checking for the presence of a substring like '_obj.a'.
|
|
1251
|
+
:param str part: The part to look for.
|
|
1252
|
+
:param List[str] other_ids: The list of otherId values to search.
|
|
1253
|
+
:return: A list of otherId values that contain the specified part.
|
|
1254
|
+
:rtype: List[str]
|
|
1255
|
+
"""
|
|
1256
|
+
# Extract the letter part (e.g., "a") from the input string.
|
|
1257
|
+
part_letter = part[-1].lower() # Assuming the format "Part X" where X is the part letter.
|
|
1258
|
+
|
|
1259
|
+
# Construct the substring to look for in otherId values.
|
|
1260
|
+
part_pattern = f"_obj.{part_letter}"
|
|
1261
|
+
|
|
1262
|
+
# Collect and return all matching otherId values.
|
|
1263
|
+
matches = [
|
|
1264
|
+
other_id for other_id in other_ids if part_pattern in other_id.lower() or part_letter in other_id.lower()
|
|
1265
|
+
]
|
|
1266
|
+
|
|
1267
|
+
return matches
|
|
1268
|
+
|
|
1269
|
+
|
|
1270
|
+
def get_or_create_option(
|
|
1271
|
+
part_name: str,
|
|
1272
|
+
part_value: str,
|
|
1273
|
+
control: SecurityControl,
|
|
1274
|
+
objective: ControlObjective,
|
|
1275
|
+
existing_options: List[ImplementationOption],
|
|
1276
|
+
status: Optional[str],
|
|
1277
|
+
parent_id: int,
|
|
1278
|
+
) -> Optional[ImplementationOption]:
|
|
1279
|
+
"""
|
|
1280
|
+
Get or create an implementation option.
|
|
1281
|
+
:param str part_name: The name of the part.
|
|
1282
|
+
:param str part_value: The value of the part.
|
|
1283
|
+
:param SecurityControl control: The security control object.
|
|
1284
|
+
:param ControlObjective objective: The control objective object.
|
|
1285
|
+
:param List[ImplementationOption] existing_options: The existing options.
|
|
1286
|
+
:param Optional[str] status: The status of the implementation.
|
|
1287
|
+
:param int parent_id: The parent ID.
|
|
1288
|
+
:return: The implementation option.
|
|
1289
|
+
:rtype: Optional[ImplementationOption]
|
|
1290
|
+
"""
|
|
1291
|
+
option = None
|
|
1292
|
+
for o in existing_options:
|
|
1293
|
+
if o.name == part_name:
|
|
1294
|
+
option = o
|
|
1295
|
+
break
|
|
1296
|
+
if not option:
|
|
1297
|
+
try:
|
|
1298
|
+
option = ImplementationOption(
|
|
1299
|
+
name=part_name,
|
|
1300
|
+
description=part_value,
|
|
1301
|
+
objectiveId=objective.id,
|
|
1302
|
+
acceptability=status,
|
|
1303
|
+
securityControlId=objective.securityControlId,
|
|
1304
|
+
)
|
|
1305
|
+
options = ImplementationOption.get_all_by_parent(parent_id=objective.securityControlId, plan_id=parent_id)
|
|
1306
|
+
for o in options:
|
|
1307
|
+
if o.name == part_name:
|
|
1308
|
+
return o
|
|
1309
|
+
elif option.name == o.name:
|
|
1310
|
+
return o
|
|
1311
|
+
else:
|
|
1312
|
+
option.get_or_create()
|
|
1313
|
+
return option
|
|
1314
|
+
except Exception:
|
|
1315
|
+
logger.warning(f"Error creating option: {part_name}")
|
|
1316
|
+
return option
|
|
1317
|
+
|
|
1318
|
+
|
|
1319
|
+
def extract_parts(content: str) -> dict:
|
|
1320
|
+
"""
|
|
1321
|
+
Splits a string into parts based on markers like "Part a:", "Part b:", etc.
|
|
1322
|
+
If no markers are found, the entire content is treated as general content.
|
|
1323
|
+
:param str content: The content to split into parts.
|
|
1324
|
+
:return: A dictionary where keys are "Part a", "Part b", etc., and values are the corresponding content.
|
|
1325
|
+
:rtype: dict
|
|
1326
|
+
"""
|
|
1327
|
+
output = {}
|
|
1328
|
+
if not content:
|
|
1329
|
+
return output
|
|
1330
|
+
# Regex to find "Part a:", "Part b:", etc., regardless of surrounding HTML tags
|
|
1331
|
+
part_pattern = re.compile(r"(?:<[^>]*>)?Part ([a-z]):(?:</[^>]*>)?", re.IGNORECASE)
|
|
1332
|
+
|
|
1333
|
+
# Find all matches for "Part a:", "Part b:", etc.
|
|
1334
|
+
parts = part_pattern.split(content)
|
|
1335
|
+
|
|
1336
|
+
if len(parts) == 1: # No "Part a:", "Part b:" markers
|
|
1337
|
+
output["default"] = content.strip()
|
|
1338
|
+
else:
|
|
1339
|
+
# First chunk is general content (if any)
|
|
1340
|
+
general_content = parts[0].strip()
|
|
1341
|
+
if general_content:
|
|
1342
|
+
output["default"] = general_content
|
|
1343
|
+
|
|
1344
|
+
# Iterate through the matched parts and their corresponding content
|
|
1345
|
+
for i in range(1, len(parts), 2):
|
|
1346
|
+
part_letter = parts[i].lower() # Part letter (e.g., 'a', 'b')
|
|
1347
|
+
part_content = parts[i + 1].strip() # Corresponding HTML/content for the part
|
|
1348
|
+
output[f"Part {part_letter}"] = part_content
|
|
1349
|
+
|
|
1350
|
+
return output
|
|
1351
|
+
|
|
1352
|
+
|
|
1353
|
+
def handle_parts(
|
|
1354
|
+
parts: Dict,
|
|
1355
|
+
status: str,
|
|
1356
|
+
control: SecurityControl,
|
|
1357
|
+
control_implementation: ControlImplementation,
|
|
1358
|
+
mdparts_dict: Dict,
|
|
1359
|
+
origination: str = None,
|
|
1360
|
+
):
|
|
1361
|
+
"""
|
|
1362
|
+
Handle the parts for the given control and control implementation.
|
|
1363
|
+
:param Dict parts: The parts to handle.
|
|
1364
|
+
:param str status: The status of the implementation.
|
|
1365
|
+
:param SecurityControl control: The security control object.
|
|
1366
|
+
:param ControlImplementation control_implementation: The control implementation object.
|
|
1367
|
+
:param Dict mdparts_dict: The control parts dictionary.
|
|
1368
|
+
:param str origination: The origination of the implementation.
|
|
1369
|
+
"""
|
|
1370
|
+
|
|
1371
|
+
# Compliance settings groups are too inconsistent to auto map, so we need to manually map them
|
|
1372
|
+
status_map = {
|
|
1373
|
+
ControlImplementationStatus.FullyImplemented.value: "Implemented",
|
|
1374
|
+
ControlImplementationStatus.PartiallyImplemented.value: ControlImplementationStatus.PartiallyImplemented.value,
|
|
1375
|
+
ControlImplementationStatus.NA.value: ControlImplementationStatus.NA.value,
|
|
1376
|
+
ControlImplementationStatus.NotImplemented.value: ControlImplementationStatus.NotImplemented.value,
|
|
1377
|
+
"Planned": "Planned",
|
|
1378
|
+
}
|
|
1379
|
+
|
|
1380
|
+
control_parts_string_from_mdict = mdparts_dict.get(control.controlId) if mdparts_dict else None
|
|
1381
|
+
parts_dict = extract_parts(content=control_parts_string_from_mdict)
|
|
1382
|
+
control_objectives = ControlObjective.get_by_control(control_id=control.id)
|
|
1383
|
+
imp_objectives: List[ImplementationObjective] = []
|
|
1384
|
+
for index, part in enumerate(parts):
|
|
1385
|
+
logger.debug(f"Part: {part.get('name')}")
|
|
1386
|
+
if part.get("value") == "":
|
|
1387
|
+
continue
|
|
1388
|
+
logger.debug(f"Control: {control.controlId} Part: {part.get('name')}")
|
|
1389
|
+
part_dict = extract_parts(control_parts_string_from_mdict)
|
|
1390
|
+
part_name = part.get("name", "")
|
|
1391
|
+
logger.debug(f"Part Name: {part_name}")
|
|
1392
|
+
part_letter = get_part_letter(part_name)
|
|
1393
|
+
logger.debug(f"Part Letter: {part_letter}")
|
|
1394
|
+
part_statement = part_dict.get(f"Part {part_letter}", parts_dict.get("default"))
|
|
1395
|
+
logger.debug(f"Control Id: {control.controlId}")
|
|
1396
|
+
multiple_control_objectives = len(control_objectives) > 1
|
|
1397
|
+
matching_objectives = (
|
|
1398
|
+
[o for o in control_objectives if o.name.replace("(", "").startswith(part_letter)]
|
|
1399
|
+
if multiple_control_objectives
|
|
1400
|
+
else control_objectives
|
|
1401
|
+
)
|
|
1402
|
+
logger.debug(f"Matching Objectives: {matching_objectives}")
|
|
1403
|
+
regscale_version = RegscaleVersion.get_platform_version()
|
|
1404
|
+
if len(regscale_version) >= 10 or Version(regscale_version) >= Version("6.13.0.0"):
|
|
1405
|
+
status = status_map.get(status, status)
|
|
1406
|
+
|
|
1407
|
+
# Status should never be None
|
|
1408
|
+
if status is None:
|
|
1409
|
+
error_and_exit("Status should never be None.")
|
|
1410
|
+
|
|
1411
|
+
handle_matching_objectives(
|
|
1412
|
+
matching_objectives=matching_objectives,
|
|
1413
|
+
part=part,
|
|
1414
|
+
control=control,
|
|
1415
|
+
control_implementation=control_implementation,
|
|
1416
|
+
status=status_map.get(status, status),
|
|
1417
|
+
imp_objectives=imp_objectives,
|
|
1418
|
+
origination=map_responsibility(origination),
|
|
1419
|
+
new_statement=part_statement or None,
|
|
1420
|
+
)
|
|
1421
|
+
ImplementationObjective.batch_create(items=imp_objectives)
|
|
1422
|
+
|
|
1423
|
+
|
|
1424
|
+
def handle_matching_objectives(
|
|
1425
|
+
matching_objectives: List[ControlObjective],
|
|
1426
|
+
part: Dict,
|
|
1427
|
+
control: SecurityControl,
|
|
1428
|
+
control_implementation: ControlImplementation,
|
|
1429
|
+
status: Optional[str],
|
|
1430
|
+
imp_objectives: List[ImplementationObjective],
|
|
1431
|
+
origination: Optional[str] = None,
|
|
1432
|
+
new_statement: Optional[str] = None,
|
|
1433
|
+
):
|
|
1434
|
+
"""
|
|
1435
|
+
Handle the matching objectives for the given part.
|
|
1436
|
+
:param List[ControlObjective] matching_objectives: The matching objectives.
|
|
1437
|
+
:param Dict part: The part to handle.
|
|
1438
|
+
:param SecurityControl control: The security control object.
|
|
1439
|
+
:param ControlImplementation control_implementation: The control implementation object.
|
|
1440
|
+
:param Optional[str] status: The status of the implementation.
|
|
1441
|
+
:param List[ImplementationObjective] imp_objectives: The list of implementation objectives.
|
|
1442
|
+
:param Optional[str] origination: The origination of the implementation.
|
|
1443
|
+
:param Optional[str] new_statement: The new statement for the implementation.
|
|
1444
|
+
"""
|
|
1445
|
+
statements_used = []
|
|
1446
|
+
for objective in matching_objectives:
|
|
1447
|
+
logger.info(f"Objective: {objective.id} - {objective.name} - {objective.securityControlId}")
|
|
1448
|
+
part_statement = f"{part.get('value', '')}" if not new_statement else new_statement
|
|
1449
|
+
statements_used.append(part_statement)
|
|
1450
|
+
|
|
1451
|
+
has_existing_obj = check_for_existing_objective(control_implementation, objective, status, part_statement)
|
|
1452
|
+
if has_existing_obj:
|
|
1453
|
+
continue
|
|
1454
|
+
duplicate = True if part_statement in statements_used else False
|
|
1455
|
+
handle_implementation_objectives(
|
|
1456
|
+
objective,
|
|
1457
|
+
part_statement,
|
|
1458
|
+
status,
|
|
1459
|
+
control_implementation,
|
|
1460
|
+
imp_objectives,
|
|
1461
|
+
control,
|
|
1462
|
+
duplicate,
|
|
1463
|
+
origination,
|
|
1464
|
+
)
|
|
1465
|
+
|
|
1466
|
+
|
|
1467
|
+
def check_for_existing_objective(
|
|
1468
|
+
control_implementation: ControlImplementation,
|
|
1469
|
+
objective: ControlObjective,
|
|
1470
|
+
status: Optional[str],
|
|
1471
|
+
part_statement: str,
|
|
1472
|
+
) -> bool:
|
|
1473
|
+
"""
|
|
1474
|
+
Check for existing implementation objectives.
|
|
1475
|
+
:param ControlImplementation control_implementation: The control implementation object.
|
|
1476
|
+
:param ControlObjective objective: The control objective object.
|
|
1477
|
+
:param Optional[str] status: The status of the implementation.
|
|
1478
|
+
:param str part_statement: The part statement.
|
|
1479
|
+
:return: True if an existing implementation objective is found, False otherwise.
|
|
1480
|
+
:rtype: bool
|
|
1481
|
+
"""
|
|
1482
|
+
status_map = {
|
|
1483
|
+
ControlImplementationStatus.FullyImplemented.value: "Implemented",
|
|
1484
|
+
ControlImplementationStatus.PartiallyImplemented.value: ControlImplementationStatus.PartiallyImplemented.value,
|
|
1485
|
+
ControlImplementationStatus.NA.value: ControlImplementationStatus.NA.value,
|
|
1486
|
+
ControlImplementationStatus.NotImplemented.value: ControlImplementationStatus.NotImplemented.value,
|
|
1487
|
+
ControlImplementationStatus.Planned.value: ControlImplementationStatus.Planned.value,
|
|
1488
|
+
}
|
|
1489
|
+
|
|
1490
|
+
existing_objectives: List[ImplementationObjective] = ImplementationObjective.get_by_control(
|
|
1491
|
+
implementation_id=control_implementation.id
|
|
1492
|
+
)
|
|
1493
|
+
for existing_obj in existing_objectives:
|
|
1494
|
+
if existing_obj.objectiveId == objective.id:
|
|
1495
|
+
if status:
|
|
1496
|
+
if isinstance(status, ControlImplementationStatus):
|
|
1497
|
+
status = status.value
|
|
1498
|
+
existing_obj.status = status_map.get(status, status)
|
|
1499
|
+
existing_obj.statement = part_statement
|
|
1500
|
+
existing_obj.parentObjectiveId = objective.id
|
|
1501
|
+
existing_obj.save()
|
|
1502
|
+
return True
|
|
1503
|
+
return False
|
|
1504
|
+
|
|
1505
|
+
|
|
1506
|
+
def map_responsibility(responsibility: str) -> str:
|
|
1507
|
+
"""
|
|
1508
|
+
Map the responsibility to the appropriate value.
|
|
1509
|
+
:param str responsibility: The responsibility to map.
|
|
1510
|
+
:return: The mapped responsibility.
|
|
1511
|
+
:rtype: str
|
|
1512
|
+
"""
|
|
1513
|
+
# This should be server code, sorry
|
|
1514
|
+
responsibility_map = {
|
|
1515
|
+
"Provider": "Service Provider Corporate",
|
|
1516
|
+
"Provider (System Specific)": "Service Provider System Specific",
|
|
1517
|
+
"Customer": "Provided by Customer (Customer System Specific)",
|
|
1518
|
+
"Hybrid": "Service Provider Hybrid (Corporate and System Specific)",
|
|
1519
|
+
"Customer Configured": "Configured by Customer (Customer System Specific)",
|
|
1520
|
+
"Shared": "Shared (Service Provider and Customer Responsibility)",
|
|
1521
|
+
"Inherited": "Inherited from pre-existing FedRAMP Authorization",
|
|
1522
|
+
}
|
|
1523
|
+
res = ""
|
|
1524
|
+
if responsibility == "Service Provider System Specific":
|
|
1525
|
+
res = ImplementationObjectiveResponsibility.PROVIDER_SYSTEM_SPECIFIC.value
|
|
1526
|
+
if responsibility == SERVICE_PROVIDER_CORPORATE:
|
|
1527
|
+
res = ImplementationObjectiveResponsibility.PROVIDER.value
|
|
1528
|
+
if responsibility == "Provided by Customer (Customer System Specific)":
|
|
1529
|
+
res = ImplementationObjectiveResponsibility.CUSTOMER.value
|
|
1530
|
+
if responsibility == "Configured by Customer (Customer System Specific)":
|
|
1531
|
+
res = ImplementationObjectiveResponsibility.CUSTOMER_CONFIGURED.value
|
|
1532
|
+
if responsibility == "Service Provider Hybrid (Corporate and System Specific)":
|
|
1533
|
+
res = ImplementationObjectiveResponsibility.HYBRID.value
|
|
1534
|
+
if responsibility == "Inherited from pre-existing FedRAMP Authorization":
|
|
1535
|
+
res = ImplementationObjectiveResponsibility.INHERITED.value
|
|
1536
|
+
if responsibility == ImplementationObjectiveResponsibility.NOT_APPLICABLE.value:
|
|
1537
|
+
res = ImplementationObjectiveResponsibility.NOT_APPLICABLE.value
|
|
1538
|
+
if responsibility == "Shared (Service Provider and Customer Responsibility)":
|
|
1539
|
+
res = ImplementationObjectiveResponsibility.SHARED.value
|
|
1540
|
+
regscale_version = RegscaleVersion.get_platform_version()
|
|
1541
|
+
if len(regscale_version) >= 10 or Version(regscale_version) >= Version("6.13.0.0"):
|
|
1542
|
+
return responsibility_map.get(res, res)
|
|
1543
|
+
return res
|
|
1544
|
+
|
|
1545
|
+
|
|
1546
|
+
def handle_implementation_objectives(
|
|
1547
|
+
objective: ControlObjective,
|
|
1548
|
+
part_statement: str,
|
|
1549
|
+
status: Optional[str],
|
|
1550
|
+
control_implementation: ControlImplementation,
|
|
1551
|
+
imp_objectives: List[ImplementationObjective],
|
|
1552
|
+
control: SecurityControl,
|
|
1553
|
+
duplicate: bool,
|
|
1554
|
+
origination: Optional[str] = None,
|
|
1555
|
+
):
|
|
1556
|
+
"""
|
|
1557
|
+
Handle the implementation objectives for the given objective, option, and control implementation.
|
|
1558
|
+
:param ControlObjective objective:
|
|
1559
|
+
:param str part_statement:
|
|
1560
|
+
:param Optional[str] status:
|
|
1561
|
+
:param ControlImplementation control_implementation:
|
|
1562
|
+
:param List[ImplementationObjective] imp_objectives:
|
|
1563
|
+
:param SecurityControl control:
|
|
1564
|
+
:param bool duplicate: Whether the option is a duplicate will add note if True.
|
|
1565
|
+
:param Optional[str] origination: The origination of the implementation.
|
|
1566
|
+
"""
|
|
1567
|
+
if isinstance(status, ControlImplementationStatus):
|
|
1568
|
+
status = status.value
|
|
1569
|
+
imp_obj = ImplementationObjective(
|
|
1570
|
+
securityControlId=control.id,
|
|
1571
|
+
implementationId=control_implementation.id,
|
|
1572
|
+
objectiveId=objective.id,
|
|
1573
|
+
optionId=None,
|
|
1574
|
+
status=status,
|
|
1575
|
+
statement=part_statement,
|
|
1576
|
+
notes="#replicated-data-part" if duplicate else "",
|
|
1577
|
+
responsibility=origination if origination else None,
|
|
1578
|
+
)
|
|
1579
|
+
if imp_obj not in imp_objectives:
|
|
1580
|
+
imp_objectives.append(imp_obj)
|
|
1581
|
+
|
|
1582
|
+
|
|
1583
|
+
def add_implementation_to_list(objective: ImplementationObjective, implementation_list: List[ImplementationObjective]):
|
|
1584
|
+
"""
|
|
1585
|
+
Add the implementation objective to the list.
|
|
1586
|
+
:param ImplementationObjective objective: The implementation objective to add.
|
|
1587
|
+
:param List[ImplementationObjective] implementation_list: The list of implementation objectives.
|
|
1588
|
+
"""
|
|
1589
|
+
if objective not in implementation_list:
|
|
1590
|
+
implementation_list.append(objective)
|
|
1591
|
+
|
|
1592
|
+
|
|
1593
|
+
def get_matching_objectives(control_objectives: List[ControlObjective], part_name: str) -> List[ControlObjective]:
|
|
1594
|
+
"""
|
|
1595
|
+
Find and return the control objectives that match the specified part name.
|
|
1596
|
+
:param List[ControlObjective] control_objectives: The list of control objectives to search.
|
|
1597
|
+
:param str part_name: The part name to match.
|
|
1598
|
+
:return: A list of control objectives that match the specified part name.
|
|
1599
|
+
:rtype: List[ControlObjective]
|
|
1600
|
+
"""
|
|
1601
|
+
matching_objectives = []
|
|
1602
|
+
try:
|
|
1603
|
+
matching_objectives = get_objectives_by_matching_property(
|
|
1604
|
+
control_objectives=control_objectives, property_name="name", part_letter=get_part_letter(part_name)
|
|
1605
|
+
)
|
|
1606
|
+
except Exception as e:
|
|
1607
|
+
logger.warning(f"Error finding matching objectives: {e}")
|
|
1608
|
+
return matching_objectives
|
|
1609
|
+
|
|
1610
|
+
|
|
1611
|
+
def get_part_letter(part: str) -> str:
|
|
1612
|
+
"""
|
|
1613
|
+
Get the part letter from the part name.
|
|
1614
|
+
:param str part: The part name.
|
|
1615
|
+
:return: The part letter.
|
|
1616
|
+
:rtype: str
|
|
1617
|
+
"""
|
|
1618
|
+
return part.lower().replace("part", "").strip() # Assuming the format "Part X" where X is the part letter.
|
|
1619
|
+
|
|
1620
|
+
|
|
1621
|
+
def get_objectives_by_matching_property(
|
|
1622
|
+
control_objectives: List[ControlObjective], property_name: str, part_letter: str
|
|
1623
|
+
) -> List[ControlObjective]:
|
|
1624
|
+
"""
|
|
1625
|
+
Find and return the control objectives that match the specified property name.
|
|
1626
|
+
:param List[ControlObjective] control_objectives: The list of control objectives to search.
|
|
1627
|
+
:param str property_name: The property name to match.
|
|
1628
|
+
:param str part_letter: The part letter to match.
|
|
1629
|
+
:return: A list of control objectives that match the specified property name.
|
|
1630
|
+
:rtype: List[ControlObjective]
|
|
1631
|
+
"""
|
|
1632
|
+
matching_objectives = []
|
|
1633
|
+
try:
|
|
1634
|
+
matching_objectives = [
|
|
1635
|
+
o for o in control_objectives if part_letter.lower() in getattr(o, property_name).lower()
|
|
1636
|
+
]
|
|
1637
|
+
except Exception as e:
|
|
1638
|
+
logger.warning(f"Error finding matching objectives: {e}")
|
|
1639
|
+
return matching_objectives
|
|
1640
|
+
|
|
1641
|
+
|
|
1642
|
+
def handle_params(
|
|
1643
|
+
parameters: List[Dict],
|
|
1644
|
+
control: SecurityControl,
|
|
1645
|
+
control_implementation: ControlImplementation,
|
|
1646
|
+
):
|
|
1647
|
+
"""
|
|
1648
|
+
Handle the parameters for the given control and control implementation.
|
|
1649
|
+
:param List[Dict] parameters: The parameters to handle.
|
|
1650
|
+
:param SecurityControl control: The security control object.
|
|
1651
|
+
:param ControlImplementation control_implementation: The control implementation object.
|
|
1652
|
+
|
|
1653
|
+
"""
|
|
1654
|
+
# Log the initial handling of parameters for the given control.
|
|
1655
|
+
logger.info(f"Handling Parameters for Control: {control.id} - {len(parameters)}")
|
|
1656
|
+
param_mapper = RosettaStone()
|
|
1657
|
+
if not param_mapper.map:
|
|
1658
|
+
param_mapper.load_fedramp_version_5_mapping()
|
|
1659
|
+
param_mapper.lookup_l0_by_l1()
|
|
1660
|
+
mappings = param_mapper.map
|
|
1661
|
+
base_control_params = ControlParameter.get_by_control(control_id=control.id)
|
|
1662
|
+
base_control_params_dict = {param.otherId: param for param in base_control_params}
|
|
1663
|
+
for param in parameters:
|
|
1664
|
+
gen_param_name = f"Parameter {param.get('name').replace(' ', '')}"
|
|
1665
|
+
if gen_param_name not in mappings:
|
|
1666
|
+
logger.debug(f"Parameter: {gen_param_name} not found in mappings")
|
|
1667
|
+
logger.info(gen_param_name)
|
|
1668
|
+
control_param_name = mappings.get(gen_param_name)
|
|
1669
|
+
base_control_param = base_control_params_dict.get(control_param_name)
|
|
1670
|
+
|
|
1671
|
+
if base_control_param:
|
|
1672
|
+
existing_params = Parameter.get_by_parent_id(parent_id=control_implementation.id)
|
|
1673
|
+
existing_param_names_dict = {param.name: param for param in existing_params}
|
|
1674
|
+
existing_parameter = existing_param_names_dict.get(control_param_name)
|
|
1675
|
+
existing_param_by_external_name_dict = {param.externalPropertyName: param for param in existing_params}
|
|
1676
|
+
if not existing_parameter:
|
|
1677
|
+
existing_parameter = existing_param_by_external_name_dict.get(control_param_name)
|
|
1678
|
+
try:
|
|
1679
|
+
if not existing_params or not existing_parameter:
|
|
1680
|
+
Parameter(
|
|
1681
|
+
controlImplementationId=control_implementation.id,
|
|
1682
|
+
name=param.get("name").strip(),
|
|
1683
|
+
value=param.get("value"),
|
|
1684
|
+
externalPropertyName=base_control_param.otherId,
|
|
1685
|
+
parentParameterId=base_control_param.id,
|
|
1686
|
+
).create()
|
|
1687
|
+
else:
|
|
1688
|
+
existing_param = existing_parameter
|
|
1689
|
+
if existing_param.name == control_param_name:
|
|
1690
|
+
existing_param.value = param.get("value")
|
|
1691
|
+
existing_param.parentParameterId = base_control_param.id
|
|
1692
|
+
existing_param.save()
|
|
1693
|
+
except Exception as e:
|
|
1694
|
+
logger.warning(f"warning handling parameter: {e}")
|
|
1695
|
+
else:
|
|
1696
|
+
logger.warning(f"Param: {gen_param_name} not found: {control_param_name}")
|
|
1697
|
+
|
|
1698
|
+
|
|
1699
|
+
def build_params(base_control_params: List[ControlParameter], parameters: List[Dict]) -> List[Dict]:
|
|
1700
|
+
"""
|
|
1701
|
+
Builds the parameters for the control implementation.
|
|
1702
|
+
:param List[ControlParameter] base_control_params: The base control parameters.
|
|
1703
|
+
:param List[Dict] parameters: The parameters to build.
|
|
1704
|
+
:return: List[Dict]: The built parameters.
|
|
1705
|
+
:rtype: List[Dict]
|
|
1706
|
+
"""
|
|
1707
|
+
new_params = []
|
|
1708
|
+
if len(base_control_params) >= len(parameters):
|
|
1709
|
+
for index, base_param in enumerate(base_control_params):
|
|
1710
|
+
if len(parameters) >= index + 1:
|
|
1711
|
+
new_param_dict = {}
|
|
1712
|
+
new_param_dict["name"] = base_param.parameterId
|
|
1713
|
+
new_param_dict["value"] = parameters[index].get("value") if parameters[index] else base_param.default
|
|
1714
|
+
new_params.append(new_param_dict)
|
|
1715
|
+
return new_params
|
|
1716
|
+
|
|
1717
|
+
|
|
1718
|
+
def map_implementation_status(status: str) -> str:
|
|
1719
|
+
"""
|
|
1720
|
+
Maps the implementation status to the appropriate value.
|
|
1721
|
+
:param str status: The status to map.
|
|
1722
|
+
:return: The mapped status.
|
|
1723
|
+
:rtype: str
|
|
1724
|
+
"""
|
|
1725
|
+
|
|
1726
|
+
if status and status.lower() == "implemented":
|
|
1727
|
+
return ControlImplementationStatus.Implemented.value
|
|
1728
|
+
elif status and status.lower() == "fully implemented":
|
|
1729
|
+
return ControlImplementationStatus.Implemented.value
|
|
1730
|
+
elif status and status.lower() == "partially implemented":
|
|
1731
|
+
return ControlImplementationStatus.PartiallyImplemented.value
|
|
1732
|
+
elif status and status.lower() == "planned":
|
|
1733
|
+
return ControlImplementationStatus.Planned.value
|
|
1734
|
+
elif status and status.lower() == "not applicable":
|
|
1735
|
+
return ControlImplementationStatus.NA.value
|
|
1736
|
+
elif status and status.lower() == "alternative implementation":
|
|
1737
|
+
return ControlImplementationStatus.FullyImplemented.value
|
|
1738
|
+
else:
|
|
1739
|
+
return ControlImplementationStatus.NotImplemented.value
|
|
1740
|
+
|
|
1741
|
+
|
|
1742
|
+
def update_existing_control(
|
|
1743
|
+
control: ControlImplementation, status: str, statement: str, responsibility: str, primary_role: Dict, parent_id: int
|
|
1744
|
+
):
|
|
1745
|
+
"""
|
|
1746
|
+
Updates an existing control with new information.
|
|
1747
|
+
:param ControlImplementation control: The control implementation object.
|
|
1748
|
+
:param str status: The status of the implementation.
|
|
1749
|
+
:param str statement: The statement of the implementation.
|
|
1750
|
+
:param str responsibility: The responsibility of the implementation.
|
|
1751
|
+
:param Dict primary_role: The primary role of the implementation.
|
|
1752
|
+
:param int parent_id: The parent ID.
|
|
1753
|
+
"""
|
|
1754
|
+
state_text = clean_statement(statement)
|
|
1755
|
+
justify = (
|
|
1756
|
+
state_text or "Unknown" if map_implementation_status(status) == ControlImplementationStatus.NA.value else None
|
|
1757
|
+
)
|
|
1758
|
+
control.parentId = parent_id
|
|
1759
|
+
control.status = map_implementation_status(status)
|
|
1760
|
+
control.exclusionJustification = justify
|
|
1761
|
+
_, planned_date, steps_to_implement = create_control_implementation_defaults(status)
|
|
1762
|
+
if not control.plannedImplementationDate and planned_date:
|
|
1763
|
+
control.plannedImplementationDate = planned_date
|
|
1764
|
+
if not control.stepsToImplement and steps_to_implement:
|
|
1765
|
+
control.stepsToImplement = steps_to_implement
|
|
1766
|
+
control.implementation = state_text
|
|
1767
|
+
control.responsibility = responsibility
|
|
1768
|
+
control.systemRoleId = primary_role.get("id") if primary_role and isinstance(primary_role, dict) else None
|
|
1769
|
+
# Clean statement
|
|
1770
|
+
# So, exclusion
|
|
1771
|
+
# justification is required for "N/A"... "Planned" requires Planned Implementation Date and Steps to Implement....
|
|
1772
|
+
# the only other validation is: Needs "Implementation", "Cloud Implementation", or "Customer Responsibility" if "Implemented" or "Partially Implemented".
|
|
1773
|
+
|
|
1774
|
+
# Convert the model to a dict and back to a model to workaround these odd 400 errors.
|
|
1775
|
+
try:
|
|
1776
|
+
ControlImplementation(**control.dict()).save()
|
|
1777
|
+
except Exception as e:
|
|
1778
|
+
logger.warning(f"Error updating control: {control.id} - {e}")
|
|
1779
|
+
|
|
1780
|
+
|
|
1781
|
+
def format_alternative_control_key(control_id: str) -> str:
|
|
1782
|
+
"""
|
|
1783
|
+
Formats the key for the alternative control information.
|
|
1784
|
+
:param str control_id: The control ID to format.
|
|
1785
|
+
:return: The formatted control ID.
|
|
1786
|
+
:rtype: str
|
|
1787
|
+
"""
|
|
1788
|
+
# Unpack the control_family and the rest (assumes there's at least one '-')
|
|
1789
|
+
control_family, *rest = control_id.split("-")
|
|
1790
|
+
rest_joined = "-".join(rest) # Join the rest back in case there are multiple '-'
|
|
1791
|
+
|
|
1792
|
+
# Check for '(' and split if needed, also handling the case without '(' more cleanly
|
|
1793
|
+
if "(" in rest_joined:
|
|
1794
|
+
control_num, control_ending = rest_joined.split("(", 1) # Split once
|
|
1795
|
+
control_ending = control_ending.rstrip(")") # Remove trailing ')' if present
|
|
1796
|
+
alternative_control_id = f"{control_family}-{format_int(int(control_num))}({control_ending})"
|
|
1797
|
+
else:
|
|
1798
|
+
control_num = rest_joined
|
|
1799
|
+
alternative_control_id = f"{control_family}-{format_int(int(control_num))}"
|
|
1800
|
+
|
|
1801
|
+
return alternative_control_id
|
|
1802
|
+
|
|
1803
|
+
|
|
1804
|
+
def format_int(n: int) -> str:
|
|
1805
|
+
"""
|
|
1806
|
+
Formats an integer to a string with a leading zero if it's a single digit.
|
|
1807
|
+
:param int n: The integer to format.
|
|
1808
|
+
:return: The formatted integer as a string.
|
|
1809
|
+
:rtype: str
|
|
1810
|
+
"""
|
|
1811
|
+
# Check if the integer is between 0 and 9 (inclusive)
|
|
1812
|
+
if 0 <= n <= 9:
|
|
1813
|
+
# Prepend a "0" if it's a single digit
|
|
1814
|
+
return f"0{n}"
|
|
1815
|
+
else:
|
|
1816
|
+
# Just convert to string if it's not a single digit
|
|
1817
|
+
return str(n)
|
|
1818
|
+
|
|
1819
|
+
|
|
1820
|
+
def build_data_dict(tables: List) -> Dict:
|
|
1821
|
+
"""
|
|
1822
|
+
Builds a dictionary from a list of tables.
|
|
1823
|
+
|
|
1824
|
+
:param List tables: A list of tables.
|
|
1825
|
+
:return: A dictionary containing the tables.
|
|
1826
|
+
:rtype: Dict
|
|
1827
|
+
"""
|
|
1828
|
+
table_dict = {}
|
|
1829
|
+
for table in tables:
|
|
1830
|
+
k_parts = list(table.keys())[0].split()
|
|
1831
|
+
if k_parts:
|
|
1832
|
+
key_control = k_parts[0]
|
|
1833
|
+
if key_control in table_dict:
|
|
1834
|
+
table_dict[key_control].append(table)
|
|
1835
|
+
else:
|
|
1836
|
+
table_dict[key_control] = [table]
|
|
1837
|
+
return table_dict
|
|
1838
|
+
|
|
1839
|
+
|
|
1840
|
+
def processing_data_from_ssp_doc(processed_data, user_id, doc_text_dict: Dict) -> int:
|
|
1841
|
+
"""
|
|
1842
|
+
Finalizes the processing of data by creating necessary records in the system.
|
|
1843
|
+
:param Dict[str, Any] processed_data: The processed data.
|
|
1844
|
+
:param str user_id: The ID of the user performing the operation.
|
|
1845
|
+
:param Dict[str, str] doc_text_dict: The dictionary containing the text from the document.
|
|
1846
|
+
:return: The ID of the parent object.
|
|
1847
|
+
:rtype: int
|
|
1848
|
+
"""
|
|
1849
|
+
processed_data["doc_text_dict"] = doc_text_dict
|
|
1850
|
+
# Process SSP Document if present
|
|
1851
|
+
if not processed_data.get("ssp_doc"):
|
|
1852
|
+
logger.warning("No SSP Document found")
|
|
1853
|
+
sys.exit(1)
|
|
1854
|
+
ssp = process_ssp_doc(
|
|
1855
|
+
processed_data.get("ssp_doc"),
|
|
1856
|
+
processed_data,
|
|
1857
|
+
user_id,
|
|
1858
|
+
)
|
|
1859
|
+
parent_id = ssp.id
|
|
1860
|
+
logger.info(f"Parent ID: {parent_id}")
|
|
1861
|
+
parent_module = "securityplans"
|
|
1862
|
+
approval_date = ssp.approvalDate
|
|
1863
|
+
|
|
1864
|
+
# Create stakeholders
|
|
1865
|
+
if processed_data.get("stakeholders"):
|
|
1866
|
+
create_stakeholders(processed_data.get("stakeholders"), parent_id, parent_module)
|
|
1867
|
+
# Process services if present
|
|
1868
|
+
if processed_data.get("services"):
|
|
1869
|
+
create_leveraged_authorizations(
|
|
1870
|
+
processed_data["services"], user_id, parent_id, approval_date
|
|
1871
|
+
) # Assuming parent_id is the ssp_id for simplicity
|
|
1872
|
+
|
|
1873
|
+
# Process ports and protocols if present
|
|
1874
|
+
if processed_data.get("ports_and_protocols"):
|
|
1875
|
+
create_ports_and_protocols(
|
|
1876
|
+
processed_data["ports_and_protocols"], parent_id
|
|
1877
|
+
) # Assuming parent_id is the ssp_id for simplicity
|
|
1878
|
+
return parent_id
|
|
1879
|
+
|
|
1880
|
+
|
|
1881
|
+
def create_stakeholders(stakeholders: List[Person], parent_id: int, parent_module: str) -> None:
|
|
1882
|
+
"""
|
|
1883
|
+
Creates stakeholders in RegScale.
|
|
1884
|
+
:param List[Person] stakeholders: A list of Person objects representing the stakeholders.
|
|
1885
|
+
:param int parent_id: The ID of the parent object.
|
|
1886
|
+
:param str parent_module: The parent module.
|
|
1887
|
+
|
|
1888
|
+
"""
|
|
1889
|
+
logger.info(f"Creating Stakeholders: {parent_id} - {parent_module}")
|
|
1890
|
+
existing_stakeholders: List[StakeHolder] = StakeHolder.get_all_by_parent(
|
|
1891
|
+
parent_id=parent_id, parent_module=parent_module
|
|
1892
|
+
)
|
|
1893
|
+
for person in stakeholders:
|
|
1894
|
+
existing_stakeholder = next(
|
|
1895
|
+
(s for s in existing_stakeholders if s.name == person.name and s.email == person.email),
|
|
1896
|
+
None,
|
|
1897
|
+
)
|
|
1898
|
+
if existing_stakeholder:
|
|
1899
|
+
logger.debug(existing_stakeholder.model_dump())
|
|
1900
|
+
existing_stakeholder.name = person.name
|
|
1901
|
+
existing_stakeholder.email = person.email
|
|
1902
|
+
existing_stakeholder.phone = person.phone
|
|
1903
|
+
existing_stakeholder.title = person.title
|
|
1904
|
+
existing_stakeholder.save()
|
|
1905
|
+
else:
|
|
1906
|
+
StakeHolder(
|
|
1907
|
+
name=person.name,
|
|
1908
|
+
email=person.email,
|
|
1909
|
+
phone=person.phone,
|
|
1910
|
+
title=person.title,
|
|
1911
|
+
parentId=parent_id,
|
|
1912
|
+
parentModule=parent_module,
|
|
1913
|
+
).create()
|
|
1914
|
+
|
|
1915
|
+
|
|
1916
|
+
def process_cloud_info(ssp_doc: SSPDoc) -> Dict:
|
|
1917
|
+
"""
|
|
1918
|
+
Processes the cloud information from the SSP document.
|
|
1919
|
+
:param SSPDoc ssp_doc: The SSP document object.
|
|
1920
|
+
:return: A dictionary containing the cloud deployment model information.
|
|
1921
|
+
:rtype: Dict
|
|
1922
|
+
"""
|
|
1923
|
+
return {
|
|
1924
|
+
"saas": "SaaS" in ssp_doc.service_model,
|
|
1925
|
+
"paas": "PaaS" in ssp_doc.service_model,
|
|
1926
|
+
"iaas": "IaaS" in ssp_doc.service_model,
|
|
1927
|
+
"other_service_model": not any(service in ssp_doc.service_model for service in ["SaaS", "PaaS", "IaaS"]),
|
|
1928
|
+
"deploy_gov": "gov" in ssp_doc.deployment_model.lower() or "government" in ssp_doc.deployment_model.lower(),
|
|
1929
|
+
"deploy_hybrid": "hybrid" in ssp_doc.deployment_model.lower(),
|
|
1930
|
+
"deploy_private": "private" in ssp_doc.deployment_model.lower(),
|
|
1931
|
+
"deploy_public": "public" in ssp_doc.deployment_model.lower(),
|
|
1932
|
+
"deploy_other": not any(
|
|
1933
|
+
deploy in ssp_doc.deployment_model.lower()
|
|
1934
|
+
for deploy in ["gov", "government", "hybrid", "private", "public"]
|
|
1935
|
+
),
|
|
1936
|
+
}
|
|
1937
|
+
|
|
1938
|
+
|
|
1939
|
+
def process_ssp_doc(
|
|
1940
|
+
ssp_doc: SSPDoc,
|
|
1941
|
+
data: Dict,
|
|
1942
|
+
user_id: str,
|
|
1943
|
+
) -> SecurityPlan:
|
|
1944
|
+
"""
|
|
1945
|
+
Processes the SSP document.
|
|
1946
|
+
:param SSPDoc ssp_doc: The SSP document object.
|
|
1947
|
+
:param Dict[str, Any] data: The processed data.
|
|
1948
|
+
:param str user_id: The ID of the user performing the operation.
|
|
1949
|
+
:return: The security plan object.
|
|
1950
|
+
:rtype: SecurityPlan
|
|
1951
|
+
"""
|
|
1952
|
+
if ssp_doc:
|
|
1953
|
+
cloud_info = process_cloud_info(ssp_doc)
|
|
1954
|
+
plans = SecurityPlan.get_list()
|
|
1955
|
+
plan_count = len(plans)
|
|
1956
|
+
logger.info(f"Found SSP Count of: {plan_count}")
|
|
1957
|
+
ssp = None
|
|
1958
|
+
for plan in plans:
|
|
1959
|
+
if plan.systemName == ssp_doc.name:
|
|
1960
|
+
ssp = SecurityPlan.get_object(object_id=plan.id)
|
|
1961
|
+
logger.info(f"Found SSP: {plan.systemName}")
|
|
1962
|
+
break
|
|
1963
|
+
if not ssp:
|
|
1964
|
+
ssp = create_ssp(ssp_doc, cloud_info, user_id, data)
|
|
1965
|
+
else:
|
|
1966
|
+
ssp = save_security_plan_info(ssp, cloud_info, ssp_doc, user_id, data)
|
|
1967
|
+
return ssp
|
|
1968
|
+
|
|
1969
|
+
|
|
1970
|
+
def get_expiration_date(dt_format: Optional[str] = "%Y-%m-%d %H:%M:%S") -> str:
|
|
1971
|
+
"""
|
|
1972
|
+
Return the expiration date, which is 3 years from today
|
|
1973
|
+
|
|
1974
|
+
:param Optional[str] dt_format: desired format for datetime string, defaults to "%Y-%m-%d %H:%M:%S"
|
|
1975
|
+
:return: Expiration date as a string, 3 years from today
|
|
1976
|
+
:rtype: str
|
|
1977
|
+
"""
|
|
1978
|
+
expiration_date = datetime.datetime.now() + relativedelta(years=3)
|
|
1979
|
+
return expiration_date.strftime(dt_format)
|
|
1980
|
+
|
|
1981
|
+
|
|
1982
|
+
def create_ssp(ssp_doc: SSPDoc, cloud_info: Dict, user_id: str, data: Dict) -> SecurityPlan:
|
|
1983
|
+
"""
|
|
1984
|
+
Creates a security plan in RegScale.
|
|
1985
|
+
:param SSPDoc ssp_doc: The SSP document object.
|
|
1986
|
+
:param Dict cloud_info: A dictionary containing cloud deployment model information.
|
|
1987
|
+
:param str user_id: The ID of the user creating the security plan.
|
|
1988
|
+
:param Dict[str, Any] data: The processed data.
|
|
1989
|
+
:return: The security plan object.
|
|
1990
|
+
:rtype: SecurityPlan
|
|
1991
|
+
"""
|
|
1992
|
+
compliance_setting = get_fedramp_compliance_setting()
|
|
1993
|
+
doc_text_dict = data.get("doc_text_dict")
|
|
1994
|
+
|
|
1995
|
+
systemdescription = " ".join(doc_text_dict[SYSTEM_DESCRIPTION]) if SYSTEM_DESCRIPTION in doc_text_dict else None
|
|
1996
|
+
authboundarydescription = (
|
|
1997
|
+
" ".join(doc_text_dict[AUTHORIZATION_BOUNDARY]) if AUTHORIZATION_BOUNDARY in doc_text_dict else None
|
|
1998
|
+
)
|
|
1999
|
+
networkarchdescription = (
|
|
2000
|
+
" ".join(doc_text_dict[NETWORK_ARCHITECTURE]) if NETWORK_ARCHITECTURE in doc_text_dict else None
|
|
2001
|
+
)
|
|
2002
|
+
systemenvironment = " ".join(doc_text_dict[ENVIRONMENT]) if ENVIRONMENT in doc_text_dict else None
|
|
2003
|
+
dataflows = " ".join(doc_text_dict[DATA_FLOW]) if DATA_FLOW in doc_text_dict else None
|
|
2004
|
+
owner, isso = data.get("owner"), data.get("isso")
|
|
2005
|
+
prepared_by: PreparedBy = data.get("prepared_by")
|
|
2006
|
+
prepared_for: PreparedBy = data.get("prepared_for")
|
|
2007
|
+
compliance_setting_id = compliance_setting.id if compliance_setting else 2
|
|
2008
|
+
ssp = SecurityPlan(
|
|
2009
|
+
systemName=ssp_doc.name,
|
|
2010
|
+
fedrampId=ssp_doc.fedramp_id,
|
|
2011
|
+
systemOwnerId=owner.id if owner else user_id,
|
|
2012
|
+
planInformationSystemSecurityOfficerId=isso.id if isso else user_id,
|
|
2013
|
+
status="Operational",
|
|
2014
|
+
description=systemdescription,
|
|
2015
|
+
authorizationBoundary=authboundarydescription,
|
|
2016
|
+
networkArchitecture=networkarchdescription,
|
|
2017
|
+
environment=systemenvironment,
|
|
2018
|
+
dataFlow=dataflows,
|
|
2019
|
+
tenantsId=1,
|
|
2020
|
+
overallCategorization=ssp_doc.fips_199_level,
|
|
2021
|
+
bModelSaaS=cloud_info.get("saas", False),
|
|
2022
|
+
bModelPaaS=cloud_info.get("paas", False),
|
|
2023
|
+
bModelIaaS=cloud_info.get("iaas", False),
|
|
2024
|
+
bModelOther=cloud_info.get("other_service_model", False),
|
|
2025
|
+
bDeployGov=cloud_info.get("deploy_gov", False),
|
|
2026
|
+
bDeployHybrid=cloud_info.get("deploy_hybrid", False),
|
|
2027
|
+
bDeployPrivate=cloud_info.get("deploy_private", False),
|
|
2028
|
+
bDeployPublic=cloud_info.get("deploy_public", False),
|
|
2029
|
+
bDeployOther=cloud_info.get("deploy_other", False),
|
|
2030
|
+
deployOtherRemarks=ssp_doc.deployment_model,
|
|
2031
|
+
dateSubmitted=ssp_doc.date_submitted,
|
|
2032
|
+
approvalDate=ssp_doc.approval_date,
|
|
2033
|
+
expirationDate=get_expiration_date(),
|
|
2034
|
+
fedrampAuthorizationLevel=ssp_doc.fips_199_level,
|
|
2035
|
+
defaultAssessmentDays=365,
|
|
2036
|
+
version=data.get("version", "1.0"),
|
|
2037
|
+
executiveSummary="\n".join(doc_text_dict.get("Introduction", [])),
|
|
2038
|
+
purpose="\n".join(doc_text_dict.get("Purpose", [])),
|
|
2039
|
+
complianceSettingsId=compliance_setting_id,
|
|
2040
|
+
)
|
|
2041
|
+
if prepared_by:
|
|
2042
|
+
ssp.cspOrgName = prepared_by.name
|
|
2043
|
+
ssp.cspAddress = prepared_by.street
|
|
2044
|
+
ssp.cspOffice = prepared_by.building
|
|
2045
|
+
ssp.cspCityState = prepared_by.city_state_zip
|
|
2046
|
+
if prepared_for:
|
|
2047
|
+
ssp.prepOrgName = prepared_for.name
|
|
2048
|
+
ssp.prepAddress = prepared_for.street
|
|
2049
|
+
ssp.prepOffice = prepared_for.building
|
|
2050
|
+
ssp.prepCityState = prepared_for.city_state_zip
|
|
2051
|
+
return ssp.create()
|
|
2052
|
+
|
|
2053
|
+
|
|
2054
|
+
def save_security_plan_info(
|
|
2055
|
+
ssp: SecurityPlan, cloud_info: Dict, ssp_doc: SSPDoc, user_id: str, data: Dict
|
|
2056
|
+
) -> SecurityPlan:
|
|
2057
|
+
"""
|
|
2058
|
+
Saves the security plan information to the database.
|
|
2059
|
+
:param SecurityPlan ssp: The security plan object.
|
|
2060
|
+
:param Dict cloud_info: A dictionary containing cloud deployment model information.
|
|
2061
|
+
:param SSPDoc ssp_doc: The SSP document object.
|
|
2062
|
+
:param str user_id: The ID of the user performing the operation.
|
|
2063
|
+
:param Dict[str, Any] data: The processed data.
|
|
2064
|
+
:return: The updated security plan object.
|
|
2065
|
+
:rtype: SecurityPlan
|
|
2066
|
+
"""
|
|
2067
|
+
prepared_by: PreparedBy = data.get("prepared_by")
|
|
2068
|
+
prepared_for: PreparedBy = data.get("prepared_for")
|
|
2069
|
+
doc_text_dict: Dict = data.get("doc_text_dict")
|
|
2070
|
+
owner, isso = data.get("owner"), data.get("isso")
|
|
2071
|
+
|
|
2072
|
+
logger.info(f"Updating SSP: {ssp.systemName}")
|
|
2073
|
+
ssp.fedrampId = ssp_doc.fedramp_id
|
|
2074
|
+
ssp.systemName = ssp_doc.name
|
|
2075
|
+
ssp.status = "Operational"
|
|
2076
|
+
ssp.description = ssp_doc.description
|
|
2077
|
+
ssp.authorizationBoundary = ssp_doc.authorization_path
|
|
2078
|
+
ssp.systemOwnerId = owner.id if owner else user_id
|
|
2079
|
+
ssp.planInformationSystemSecurityOfficerId = isso.id if isso else user_id
|
|
2080
|
+
ssp.overallCategorization = ssp_doc.fips_199_level
|
|
2081
|
+
ssp.bModelSaaS = cloud_info.get("saas", False)
|
|
2082
|
+
ssp.bModelPaaS = cloud_info.get("paas", False)
|
|
2083
|
+
ssp.bModelIaaS = cloud_info.get("iaas", False)
|
|
2084
|
+
ssp.bModelOther = cloud_info.get("other_service_model", False)
|
|
2085
|
+
ssp.bDeployGov = cloud_info.get("deploy_gov", False)
|
|
2086
|
+
ssp.bDeployHybrid = cloud_info.get("deploy_hybrid", False)
|
|
2087
|
+
ssp.bDeployPrivate = cloud_info.get("deploy_private", False)
|
|
2088
|
+
ssp.bDeployPublic = cloud_info.get("deploy_public", False)
|
|
2089
|
+
ssp.bDeployOther = cloud_info.get("deploy_other", False)
|
|
2090
|
+
ssp.deployOtherRemarks = ssp_doc.deployment_model
|
|
2091
|
+
ssp.dateSubmitted = ssp_doc.date_submitted
|
|
2092
|
+
ssp.approvalDate = ssp_doc.approval_date
|
|
2093
|
+
ssp.expirationDate = get_expiration_date() # ssp_doc.expiration_date
|
|
2094
|
+
ssp.fedrampAuthorizationLevel = ssp_doc.fips_199_level
|
|
2095
|
+
ssp.version = data.get("version", "1.0")
|
|
2096
|
+
if prepared_by:
|
|
2097
|
+
ssp.cspOrgName = prepared_by.name
|
|
2098
|
+
ssp.cspAddress = prepared_by.street
|
|
2099
|
+
ssp.cspOffice = prepared_by.building
|
|
2100
|
+
ssp.cspCityState = prepared_by.city_state_zip
|
|
2101
|
+
if prepared_for:
|
|
2102
|
+
ssp.prepOrgName = prepared_for.name
|
|
2103
|
+
ssp.prepAddress = prepared_for.street
|
|
2104
|
+
ssp.prepOffice = prepared_for.building
|
|
2105
|
+
ssp.prepCityState = prepared_for.city_state_zip
|
|
2106
|
+
|
|
2107
|
+
ssp.executiveSummary = "\n".join(doc_text_dict.get("Introduction", []))
|
|
2108
|
+
ssp.purpose = "\n".join(doc_text_dict.get("Purpose", []))
|
|
2109
|
+
ssp.save()
|
|
2110
|
+
return ssp
|
|
2111
|
+
|
|
2112
|
+
|
|
2113
|
+
def create_leveraged_authorizations(services: List[LeveragedService], user_id: str, ssp_id: int, approval_date: str):
|
|
2114
|
+
"""
|
|
2115
|
+
Creates leveraged authorization records for each service.
|
|
2116
|
+
|
|
2117
|
+
:param List[LeveragedService] services: A list of services to be created.
|
|
2118
|
+
:param str user_id: The ID of the user creating the services.
|
|
2119
|
+
:param int ssp_id: The ID of the security plan these services are associated with.
|
|
2120
|
+
:param str approval_date: The date of approval.
|
|
2121
|
+
|
|
2122
|
+
"""
|
|
2123
|
+
existing_authorizations: List[LeveragedAuthorization] = LeveragedAuthorization.get_all_by_parent(parent_id=ssp_id)
|
|
2124
|
+
logger.info(f"Found {len(existing_authorizations)} existing LeveragedAuthorizations")
|
|
2125
|
+
for service in services:
|
|
2126
|
+
existing_service = next(
|
|
2127
|
+
(a for a in existing_authorizations if a.fedrampId == service.auth_type_fedramp_id),
|
|
2128
|
+
None,
|
|
2129
|
+
)
|
|
2130
|
+
|
|
2131
|
+
if existing_service:
|
|
2132
|
+
logger.debug(existing_service.model_dump())
|
|
2133
|
+
existing_service.title = service.fedramp_csp_name
|
|
2134
|
+
existing_service.fedrampId = service.auth_type_fedramp_id
|
|
2135
|
+
existing_service.ownerId = user_id
|
|
2136
|
+
existing_service.securityPlanId = ssp_id
|
|
2137
|
+
existing_service.dateAuthorized = approval_date or get_current_datetime()
|
|
2138
|
+
existing_service.description = service.cso_name
|
|
2139
|
+
existing_service.dataTypes = service.data_types or "unknown"
|
|
2140
|
+
existing_service.authorizedUserTypes = service.authorized_user_authentication or "unknown"
|
|
2141
|
+
existing_service.impactLevel = service.impact_level
|
|
2142
|
+
existing_service.natureOfAgreement = service.agreement_type or "unknown"
|
|
2143
|
+
existing_service.tenantsId = 1
|
|
2144
|
+
existing_service.save()
|
|
2145
|
+
else:
|
|
2146
|
+
LeveragedAuthorization(
|
|
2147
|
+
title=service.fedramp_csp_name,
|
|
2148
|
+
fedrampId=service.auth_type_fedramp_id or "unknown",
|
|
2149
|
+
ownerId=user_id,
|
|
2150
|
+
securityPlanId=ssp_id,
|
|
2151
|
+
dateAuthorized=approval_date or get_current_datetime(),
|
|
2152
|
+
servicesUsed="unknown",
|
|
2153
|
+
description=service.cso_name,
|
|
2154
|
+
dataTypes=service.data_types or "unknown",
|
|
2155
|
+
authorizationType="SSO",
|
|
2156
|
+
authorizedUserTypes=service.authorized_user_authentication or "unknown",
|
|
2157
|
+
authenticationType="unknown",
|
|
2158
|
+
impactLevel=service.impact_level or "Low",
|
|
2159
|
+
natureOfAgreement=service.agreement_type or "unknown",
|
|
2160
|
+
tenantsId=1,
|
|
2161
|
+
).create()
|
|
2162
|
+
logger.debug(f"LeveragedAuthorization: {service.fedramp_csp_name}")
|
|
2163
|
+
|
|
2164
|
+
|
|
2165
|
+
def create_ports_and_protocols(ports_and_protocols: List[PortsAndProtocolData], ssp_id: int):
|
|
2166
|
+
"""
|
|
2167
|
+
Creates port and protocol records for each entry.
|
|
2168
|
+
|
|
2169
|
+
:param List[PortsAndProtocolData] ports_and_protocols: A list of ports and protocols to be created.
|
|
2170
|
+
:param int ssp_id: The ID of the security plan these ports and protocols are associated with.
|
|
2171
|
+
|
|
2172
|
+
"""
|
|
2173
|
+
existing_ports: List[PortsProtocol] = PortsProtocol.get_all_by_parent(
|
|
2174
|
+
parent_id=ssp_id, parent_module="securityplans"
|
|
2175
|
+
)
|
|
2176
|
+
logger.info(f"Found {len(existing_ports)} existing Ports & Protocols")
|
|
2177
|
+
created_count = 0
|
|
2178
|
+
for port in ports_and_protocols:
|
|
2179
|
+
port_to_create = PortsProtocol(
|
|
2180
|
+
service=port.service,
|
|
2181
|
+
startPort=port.start_port,
|
|
2182
|
+
endPort=port.end_port,
|
|
2183
|
+
protocol=port.protocol,
|
|
2184
|
+
purpose=port.purpose,
|
|
2185
|
+
usedBy=port.used_by,
|
|
2186
|
+
parentId=ssp_id,
|
|
2187
|
+
parentModule="securityplans",
|
|
2188
|
+
)
|
|
2189
|
+
existing = False
|
|
2190
|
+
for existing_port in existing_ports:
|
|
2191
|
+
if (
|
|
2192
|
+
existing_port.startPort == port_to_create.startPort
|
|
2193
|
+
and existing_port.endPort == port_to_create.endPort
|
|
2194
|
+
and existing_port.protocol == port_to_create.protocol
|
|
2195
|
+
and existing_port.service == port_to_create.service
|
|
2196
|
+
and existing_port.purpose == port_to_create.purpose
|
|
2197
|
+
and existing_port.usedBy == port_to_create.usedBy
|
|
2198
|
+
and existing_port.parentId == port_to_create.parentId
|
|
2199
|
+
and existing_port.parentModule == port_to_create.parentModule
|
|
2200
|
+
):
|
|
2201
|
+
existing = True
|
|
2202
|
+
break
|
|
2203
|
+
|
|
2204
|
+
if not existing:
|
|
2205
|
+
port_to_create.create()
|
|
2206
|
+
created_count += 1
|
|
2207
|
+
logger.info(f"Created {created_count} Port & Protocols")
|
|
2208
|
+
|
|
2209
|
+
|
|
2210
|
+
def extract_and_upload_images(file_name: str, parent_id: int) -> None:
|
|
2211
|
+
"""
|
|
2212
|
+
Extracts embedded images from a document and uploads them to RegScale with improved filenames.
|
|
2213
|
+
|
|
2214
|
+
:param str file_name: The path to the document file.
|
|
2215
|
+
:param int parent_id: The parent ID in RegScale to associate the images with.
|
|
2216
|
+
|
|
2217
|
+
"""
|
|
2218
|
+
logger.debug(f"Processing embedded images in {file_name} for parent ID {parent_id}...")
|
|
2219
|
+
existing_files = fetch_existing_files(parent_id)
|
|
2220
|
+
extracted_files_path = extract_embedded_files(file_name)
|
|
2221
|
+
upload_files(extracted_files_path, existing_files, parent_id)
|
|
2222
|
+
|
|
2223
|
+
|
|
2224
|
+
def fetch_existing_files(parent_id: int) -> list:
|
|
2225
|
+
"""
|
|
2226
|
+
Fetches existing files for a given parent ID from RegScale.
|
|
2227
|
+
|
|
2228
|
+
:param int parent_id: The parent ID whose files to fetch.
|
|
2229
|
+
:return: A list of existing files.
|
|
2230
|
+
:rtype: list
|
|
2231
|
+
"""
|
|
2232
|
+
return File.get_files_for_parent_from_regscale(parent_id=parent_id, parent_module="securityplans")
|
|
2233
|
+
|
|
2234
|
+
|
|
2235
|
+
def extract_embedded_files(file_name: str) -> str:
|
|
2236
|
+
"""
|
|
2237
|
+
Extracts embedded files from a document and returns the path where they are stored.
|
|
2238
|
+
|
|
2239
|
+
:param str file_name: The path to the document file.
|
|
2240
|
+
:return: The path where embedded files are extracted to.
|
|
2241
|
+
:rtype: str
|
|
2242
|
+
"""
|
|
2243
|
+
file_dump_path = os.path.join(gettempdir(), "imagedump")
|
|
2244
|
+
with zipfile.ZipFile(file_name, mode="r") as archive:
|
|
2245
|
+
for file in archive.filelist:
|
|
2246
|
+
logger.debug(f"Extracting file: {file.filename}")
|
|
2247
|
+
if file.filename.startswith("word/media/") and file.file_size > 200000: # 200KB filter
|
|
2248
|
+
archive.extract(file, path=file_dump_path)
|
|
2249
|
+
return file_dump_path
|
|
2250
|
+
|
|
2251
|
+
|
|
2252
|
+
def upload_files(extracted_files_path: str, existing_files: list, parent_id: int) -> None:
|
|
2253
|
+
"""
|
|
2254
|
+
Uploads files from a specified path to RegScale, avoiding duplicates.
|
|
2255
|
+
|
|
2256
|
+
:param str extracted_files_path: The path where files are stored.
|
|
2257
|
+
:param list existing_files: A list of files already existing in RegScale to avoid duplicates.
|
|
2258
|
+
:param int parent_id: The parent ID in RegScale to associate the uploaded files with.
|
|
2259
|
+
|
|
2260
|
+
"""
|
|
2261
|
+
media_path = os.path.join(extracted_files_path, "word", "media")
|
|
2262
|
+
if not os.path.exists(media_path):
|
|
2263
|
+
os.makedirs(media_path)
|
|
2264
|
+
|
|
2265
|
+
for filename in os.listdir(media_path):
|
|
2266
|
+
full_file_path = os.path.join(media_path, filename)
|
|
2267
|
+
if os.path.isfile(full_file_path):
|
|
2268
|
+
if not file_already_exists(filename, existing_files):
|
|
2269
|
+
logger.info(f"Uploading embedded image to RegScale: {filename}")
|
|
2270
|
+
upload_file_to_regscale(full_file_path, parent_id)
|
|
2271
|
+
|
|
2272
|
+
|
|
2273
|
+
def file_already_exists(filename: str, existing_files: list) -> bool:
|
|
2274
|
+
"""
|
|
2275
|
+
Checks if a file already exists in RegScale.
|
|
2276
|
+
|
|
2277
|
+
:param str filename: The name of the file to check.
|
|
2278
|
+
:param list existing_files: A list of files already existing in RegScale.
|
|
2279
|
+
:return: True if the file exists, False otherwise.
|
|
2280
|
+
:rtype: bool
|
|
2281
|
+
"""
|
|
2282
|
+
return any(f.trustedDisplayName == filename for f in existing_files)
|
|
2283
|
+
|
|
2284
|
+
|
|
2285
|
+
def upload_file_to_regscale(
|
|
2286
|
+
file_path: str,
|
|
2287
|
+
parent_id: int,
|
|
2288
|
+
) -> None:
|
|
2289
|
+
"""
|
|
2290
|
+
Uploads a single file to RegScale.
|
|
2291
|
+
|
|
2292
|
+
:param str file_path: The full path to the file to upload.
|
|
2293
|
+
:param int parent_id: The parent ID in RegScale to associate the file with.
|
|
2294
|
+
"""
|
|
2295
|
+
api = Api()
|
|
2296
|
+
File.upload_file_to_regscale(
|
|
2297
|
+
file_name=file_path,
|
|
2298
|
+
parent_id=parent_id,
|
|
2299
|
+
parent_module=SecurityPlan.get_module_slug(),
|
|
2300
|
+
api=api,
|
|
2301
|
+
)
|
|
2302
|
+
|
|
2303
|
+
|
|
2304
|
+
def safe_get_first_key(dictionary: dict) -> Optional[str]:
|
|
2305
|
+
"""Safely get the first key of a dictionary.
|
|
2306
|
+
:param dict dictionary: The dictionary to get the first key from.
|
|
2307
|
+
:return: The first key of the dictionary, or None if the dictionary is empty.
|
|
2308
|
+
:rtype: Optional[str]
|
|
2309
|
+
"""
|
|
2310
|
+
try:
|
|
2311
|
+
return next(iter(dictionary))
|
|
2312
|
+
except StopIteration:
|
|
2313
|
+
return None
|
|
2314
|
+
|
|
2315
|
+
|
|
2316
|
+
def parse_version(version_str: str) -> float:
|
|
2317
|
+
"""Parse version string to a float, safely.
|
|
2318
|
+
:param str version_str: The version string to parse.
|
|
2319
|
+
:return: The version number as a float, or 0 if the version string is not a valid number.
|
|
2320
|
+
:rtype: float
|
|
2321
|
+
"""
|
|
2322
|
+
try:
|
|
2323
|
+
if not version_str:
|
|
2324
|
+
return 0
|
|
2325
|
+
return float(version_str)
|
|
2326
|
+
except ValueError:
|
|
2327
|
+
return 0
|
|
2328
|
+
|
|
2329
|
+
|
|
2330
|
+
def get_max_version(entries: List[Dict]) -> Optional[str]:
|
|
2331
|
+
"""Find the maximum version from a list of entries.
|
|
2332
|
+
:param List[Dict] entries: The list of entries to find the maximum version from.
|
|
2333
|
+
:return: The maximum version from the entries, or None if no valid versions are found.
|
|
2334
|
+
:rtype: Optional[str]
|
|
2335
|
+
"""
|
|
2336
|
+
max_version = None
|
|
2337
|
+
for entry in entries:
|
|
2338
|
+
version_str = entry.get("Version", "")
|
|
2339
|
+
version_num = parse_version(version_str)
|
|
2340
|
+
if version_num is not None:
|
|
2341
|
+
max_version = max(max_version, version_str, key=parse_version)
|
|
2342
|
+
logger.debug(f"Version: {max_version}")
|
|
2343
|
+
return max_version
|