regscale-cli 6.16.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of regscale-cli might be problematic. Click here for more details.
- regscale/__init__.py +1 -0
- regscale/airflow/__init__.py +9 -0
- regscale/airflow/azure/__init__.py +9 -0
- regscale/airflow/azure/cli.py +89 -0
- regscale/airflow/azure/upload_dags.py +116 -0
- regscale/airflow/click_dags.py +127 -0
- regscale/airflow/click_mixins.py +82 -0
- regscale/airflow/config.py +25 -0
- regscale/airflow/factories/__init__.py +0 -0
- regscale/airflow/factories/connections.py +58 -0
- regscale/airflow/factories/workflows.py +78 -0
- regscale/airflow/hierarchy.py +88 -0
- regscale/airflow/operators/__init__.py +0 -0
- regscale/airflow/operators/click.py +36 -0
- regscale/airflow/sensors/__init__.py +0 -0
- regscale/airflow/sensors/sql.py +107 -0
- regscale/airflow/sessions/__init__.py +0 -0
- regscale/airflow/sessions/sql/__init__.py +3 -0
- regscale/airflow/sessions/sql/queries.py +64 -0
- regscale/airflow/sessions/sql/sql_server_queries.py +248 -0
- regscale/airflow/tasks/__init__.py +0 -0
- regscale/airflow/tasks/branches.py +22 -0
- regscale/airflow/tasks/cli.py +116 -0
- regscale/airflow/tasks/click.py +73 -0
- regscale/airflow/tasks/debugging.py +9 -0
- regscale/airflow/tasks/groups.py +116 -0
- regscale/airflow/tasks/init.py +60 -0
- regscale/airflow/tasks/states.py +47 -0
- regscale/airflow/tasks/workflows.py +36 -0
- regscale/ansible/__init__.py +9 -0
- regscale/core/__init__.py +0 -0
- regscale/core/app/__init__.py +3 -0
- regscale/core/app/api.py +571 -0
- regscale/core/app/application.py +665 -0
- regscale/core/app/internal/__init__.py +136 -0
- regscale/core/app/internal/admin_actions.py +230 -0
- regscale/core/app/internal/assessments_editor.py +873 -0
- regscale/core/app/internal/catalog.py +316 -0
- regscale/core/app/internal/comparison.py +459 -0
- regscale/core/app/internal/control_editor.py +571 -0
- regscale/core/app/internal/encrypt.py +79 -0
- regscale/core/app/internal/evidence.py +1240 -0
- regscale/core/app/internal/file_uploads.py +151 -0
- regscale/core/app/internal/healthcheck.py +66 -0
- regscale/core/app/internal/login.py +305 -0
- regscale/core/app/internal/migrations.py +240 -0
- regscale/core/app/internal/model_editor.py +1701 -0
- regscale/core/app/internal/poam_editor.py +632 -0
- regscale/core/app/internal/workflow.py +105 -0
- regscale/core/app/logz.py +74 -0
- regscale/core/app/utils/XMLIR.py +258 -0
- regscale/core/app/utils/__init__.py +0 -0
- regscale/core/app/utils/api_handler.py +358 -0
- regscale/core/app/utils/app_utils.py +1110 -0
- regscale/core/app/utils/catalog_utils/__init__.py +0 -0
- regscale/core/app/utils/catalog_utils/common.py +91 -0
- regscale/core/app/utils/catalog_utils/compare_catalog.py +193 -0
- regscale/core/app/utils/catalog_utils/diagnostic_catalog.py +97 -0
- regscale/core/app/utils/catalog_utils/download_catalog.py +103 -0
- regscale/core/app/utils/catalog_utils/update_catalog.py +718 -0
- regscale/core/app/utils/catalog_utils/update_catalog_v2.py +1378 -0
- regscale/core/app/utils/catalog_utils/update_catalog_v3.py +1272 -0
- regscale/core/app/utils/catalog_utils/update_plans.py +334 -0
- regscale/core/app/utils/file_utils.py +238 -0
- regscale/core/app/utils/parser_utils.py +81 -0
- regscale/core/app/utils/pickle_file_handler.py +57 -0
- regscale/core/app/utils/regscale_utils.py +319 -0
- regscale/core/app/utils/report_utils.py +119 -0
- regscale/core/app/utils/variables.py +226 -0
- regscale/core/decorators.py +31 -0
- regscale/core/lazy_group.py +65 -0
- regscale/core/login.py +63 -0
- regscale/core/server/__init__.py +0 -0
- regscale/core/server/flask_api.py +473 -0
- regscale/core/server/helpers.py +373 -0
- regscale/core/server/rest.py +64 -0
- regscale/core/server/static/css/bootstrap.css +6030 -0
- regscale/core/server/static/css/bootstrap.min.css +6 -0
- regscale/core/server/static/css/main.css +176 -0
- regscale/core/server/static/images/regscale-cli.svg +49 -0
- regscale/core/server/static/images/regscale.svg +38 -0
- regscale/core/server/templates/base.html +74 -0
- regscale/core/server/templates/index.html +43 -0
- regscale/core/server/templates/login.html +28 -0
- regscale/core/server/templates/make_base64.html +22 -0
- regscale/core/server/templates/upload_STIG.html +109 -0
- regscale/core/server/templates/upload_STIG_result.html +26 -0
- regscale/core/server/templates/upload_ssp.html +144 -0
- regscale/core/server/templates/upload_ssp_result.html +128 -0
- regscale/core/static/__init__.py +0 -0
- regscale/core/static/regex.py +14 -0
- regscale/core/utils/__init__.py +117 -0
- regscale/core/utils/click_utils.py +13 -0
- regscale/core/utils/date.py +238 -0
- regscale/core/utils/graphql.py +254 -0
- regscale/core/utils/urls.py +23 -0
- regscale/dev/__init__.py +6 -0
- regscale/dev/analysis.py +454 -0
- regscale/dev/cli.py +235 -0
- regscale/dev/code_gen.py +492 -0
- regscale/dev/dirs.py +69 -0
- regscale/dev/docs.py +384 -0
- regscale/dev/monitoring.py +26 -0
- regscale/dev/profiling.py +216 -0
- regscale/exceptions/__init__.py +4 -0
- regscale/exceptions/license_exception.py +7 -0
- regscale/exceptions/validation_exception.py +9 -0
- regscale/integrations/__init__.py +1 -0
- regscale/integrations/commercial/__init__.py +486 -0
- regscale/integrations/commercial/ad.py +433 -0
- regscale/integrations/commercial/amazon/__init__.py +0 -0
- regscale/integrations/commercial/amazon/common.py +106 -0
- regscale/integrations/commercial/aqua/__init__.py +0 -0
- regscale/integrations/commercial/aqua/aqua.py +91 -0
- regscale/integrations/commercial/aws/__init__.py +6 -0
- regscale/integrations/commercial/aws/cli.py +322 -0
- regscale/integrations/commercial/aws/inventory/__init__.py +110 -0
- regscale/integrations/commercial/aws/inventory/base.py +64 -0
- regscale/integrations/commercial/aws/inventory/resources/__init__.py +19 -0
- regscale/integrations/commercial/aws/inventory/resources/compute.py +234 -0
- regscale/integrations/commercial/aws/inventory/resources/containers.py +113 -0
- regscale/integrations/commercial/aws/inventory/resources/database.py +101 -0
- regscale/integrations/commercial/aws/inventory/resources/integration.py +237 -0
- regscale/integrations/commercial/aws/inventory/resources/networking.py +253 -0
- regscale/integrations/commercial/aws/inventory/resources/security.py +240 -0
- regscale/integrations/commercial/aws/inventory/resources/storage.py +91 -0
- regscale/integrations/commercial/aws/scanner.py +823 -0
- regscale/integrations/commercial/azure/__init__.py +0 -0
- regscale/integrations/commercial/azure/common.py +32 -0
- regscale/integrations/commercial/azure/intune.py +488 -0
- regscale/integrations/commercial/azure/scanner.py +49 -0
- regscale/integrations/commercial/burp.py +78 -0
- regscale/integrations/commercial/cpe.py +144 -0
- regscale/integrations/commercial/crowdstrike.py +1117 -0
- regscale/integrations/commercial/defender.py +1511 -0
- regscale/integrations/commercial/dependabot.py +210 -0
- regscale/integrations/commercial/durosuite/__init__.py +0 -0
- regscale/integrations/commercial/durosuite/api.py +1546 -0
- regscale/integrations/commercial/durosuite/process_devices.py +101 -0
- regscale/integrations/commercial/durosuite/scanner.py +637 -0
- regscale/integrations/commercial/durosuite/variables.py +21 -0
- regscale/integrations/commercial/ecr.py +90 -0
- regscale/integrations/commercial/gcp/__init__.py +237 -0
- regscale/integrations/commercial/gcp/auth.py +96 -0
- regscale/integrations/commercial/gcp/control_tests.py +238 -0
- regscale/integrations/commercial/gcp/variables.py +18 -0
- regscale/integrations/commercial/gitlab.py +332 -0
- regscale/integrations/commercial/grype.py +165 -0
- regscale/integrations/commercial/ibm.py +90 -0
- regscale/integrations/commercial/import_all/__init__.py +0 -0
- regscale/integrations/commercial/import_all/import_all_cmd.py +467 -0
- regscale/integrations/commercial/import_all/scan_file_fingerprints.json +27 -0
- regscale/integrations/commercial/jira.py +1046 -0
- regscale/integrations/commercial/mappings/__init__.py +0 -0
- regscale/integrations/commercial/mappings/csf_controls.json +713 -0
- regscale/integrations/commercial/mappings/nist_800_53_r5_controls.json +1516 -0
- regscale/integrations/commercial/nessus/__init__.py +0 -0
- regscale/integrations/commercial/nessus/nessus_utils.py +429 -0
- regscale/integrations/commercial/nessus/scanner.py +416 -0
- regscale/integrations/commercial/nexpose.py +90 -0
- regscale/integrations/commercial/okta.py +798 -0
- regscale/integrations/commercial/opentext/__init__.py +0 -0
- regscale/integrations/commercial/opentext/click.py +99 -0
- regscale/integrations/commercial/opentext/scanner.py +143 -0
- regscale/integrations/commercial/prisma.py +91 -0
- regscale/integrations/commercial/qualys.py +1462 -0
- regscale/integrations/commercial/salesforce.py +980 -0
- regscale/integrations/commercial/sap/__init__.py +0 -0
- regscale/integrations/commercial/sap/click.py +31 -0
- regscale/integrations/commercial/sap/sysdig/__init__.py +0 -0
- regscale/integrations/commercial/sap/sysdig/click.py +57 -0
- regscale/integrations/commercial/sap/sysdig/sysdig_scanner.py +190 -0
- regscale/integrations/commercial/sap/tenable/__init__.py +0 -0
- regscale/integrations/commercial/sap/tenable/click.py +49 -0
- regscale/integrations/commercial/sap/tenable/scanner.py +196 -0
- regscale/integrations/commercial/servicenow.py +1756 -0
- regscale/integrations/commercial/sicura/__init__.py +0 -0
- regscale/integrations/commercial/sicura/api.py +855 -0
- regscale/integrations/commercial/sicura/commands.py +73 -0
- regscale/integrations/commercial/sicura/scanner.py +481 -0
- regscale/integrations/commercial/sicura/variables.py +16 -0
- regscale/integrations/commercial/snyk.py +90 -0
- regscale/integrations/commercial/sonarcloud.py +260 -0
- regscale/integrations/commercial/sqlserver.py +369 -0
- regscale/integrations/commercial/stig_mapper_integration/__init__.py +0 -0
- regscale/integrations/commercial/stig_mapper_integration/click_commands.py +38 -0
- regscale/integrations/commercial/stig_mapper_integration/mapping_engine.py +353 -0
- regscale/integrations/commercial/stigv2/__init__.py +0 -0
- regscale/integrations/commercial/stigv2/ckl_parser.py +349 -0
- regscale/integrations/commercial/stigv2/click_commands.py +95 -0
- regscale/integrations/commercial/stigv2/stig_integration.py +202 -0
- regscale/integrations/commercial/synqly/__init__.py +0 -0
- regscale/integrations/commercial/synqly/assets.py +46 -0
- regscale/integrations/commercial/synqly/ticketing.py +132 -0
- regscale/integrations/commercial/synqly/vulnerabilities.py +223 -0
- regscale/integrations/commercial/synqly_jira.py +840 -0
- regscale/integrations/commercial/tenablev2/__init__.py +0 -0
- regscale/integrations/commercial/tenablev2/authenticate.py +31 -0
- regscale/integrations/commercial/tenablev2/click.py +1584 -0
- regscale/integrations/commercial/tenablev2/scanner.py +504 -0
- regscale/integrations/commercial/tenablev2/stig_parsers.py +140 -0
- regscale/integrations/commercial/tenablev2/utils.py +78 -0
- regscale/integrations/commercial/tenablev2/variables.py +17 -0
- regscale/integrations/commercial/trivy.py +162 -0
- regscale/integrations/commercial/veracode.py +96 -0
- regscale/integrations/commercial/wizv2/WizDataMixin.py +97 -0
- regscale/integrations/commercial/wizv2/__init__.py +0 -0
- regscale/integrations/commercial/wizv2/click.py +429 -0
- regscale/integrations/commercial/wizv2/constants.py +1001 -0
- regscale/integrations/commercial/wizv2/issue.py +361 -0
- regscale/integrations/commercial/wizv2/models.py +112 -0
- regscale/integrations/commercial/wizv2/parsers.py +339 -0
- regscale/integrations/commercial/wizv2/sbom.py +115 -0
- regscale/integrations/commercial/wizv2/scanner.py +416 -0
- regscale/integrations/commercial/wizv2/utils.py +796 -0
- regscale/integrations/commercial/wizv2/variables.py +39 -0
- regscale/integrations/commercial/wizv2/wiz_auth.py +159 -0
- regscale/integrations/commercial/xray.py +91 -0
- regscale/integrations/integration/__init__.py +2 -0
- regscale/integrations/integration/integration.py +26 -0
- regscale/integrations/integration/inventory.py +17 -0
- regscale/integrations/integration/issue.py +100 -0
- regscale/integrations/integration_override.py +149 -0
- regscale/integrations/public/__init__.py +103 -0
- regscale/integrations/public/cisa.py +641 -0
- regscale/integrations/public/criticality_updater.py +70 -0
- regscale/integrations/public/emass.py +411 -0
- regscale/integrations/public/emass_slcm_import.py +697 -0
- regscale/integrations/public/fedramp/__init__.py +0 -0
- regscale/integrations/public/fedramp/appendix_parser.py +548 -0
- regscale/integrations/public/fedramp/click.py +479 -0
- regscale/integrations/public/fedramp/components.py +714 -0
- regscale/integrations/public/fedramp/docx_parser.py +259 -0
- regscale/integrations/public/fedramp/fedramp_cis_crm.py +1124 -0
- regscale/integrations/public/fedramp/fedramp_common.py +3181 -0
- regscale/integrations/public/fedramp/fedramp_docx.py +388 -0
- regscale/integrations/public/fedramp/fedramp_five.py +2343 -0
- regscale/integrations/public/fedramp/fedramp_traversal.py +138 -0
- regscale/integrations/public/fedramp/import_fedramp_r4_ssp.py +279 -0
- regscale/integrations/public/fedramp/import_workbook.py +495 -0
- regscale/integrations/public/fedramp/inventory_items.py +244 -0
- regscale/integrations/public/fedramp/mappings/__init__.py +0 -0
- regscale/integrations/public/fedramp/mappings/fedramp_r4_parts.json +7388 -0
- regscale/integrations/public/fedramp/mappings/fedramp_r5_params.json +8636 -0
- regscale/integrations/public/fedramp/mappings/fedramp_r5_parts.json +9605 -0
- regscale/integrations/public/fedramp/mappings/system_roles.py +34 -0
- regscale/integrations/public/fedramp/mappings/user.py +175 -0
- regscale/integrations/public/fedramp/mappings/values.py +141 -0
- regscale/integrations/public/fedramp/markdown_parser.py +150 -0
- regscale/integrations/public/fedramp/metadata.py +689 -0
- regscale/integrations/public/fedramp/models/__init__.py +59 -0
- regscale/integrations/public/fedramp/models/leveraged_auth_new.py +168 -0
- regscale/integrations/public/fedramp/models/poam_importer.py +522 -0
- regscale/integrations/public/fedramp/parts_mapper.py +107 -0
- regscale/integrations/public/fedramp/poam/__init__.py +0 -0
- regscale/integrations/public/fedramp/poam/scanner.py +851 -0
- regscale/integrations/public/fedramp/properties.py +201 -0
- regscale/integrations/public/fedramp/reporting.py +84 -0
- regscale/integrations/public/fedramp/resources.py +496 -0
- regscale/integrations/public/fedramp/rosetta.py +110 -0
- regscale/integrations/public/fedramp/ssp_logger.py +87 -0
- regscale/integrations/public/fedramp/system_characteristics.py +922 -0
- regscale/integrations/public/fedramp/system_control_implementations.py +582 -0
- regscale/integrations/public/fedramp/system_implementation.py +190 -0
- regscale/integrations/public/fedramp/xml_utils.py +87 -0
- regscale/integrations/public/nist_catalog.py +275 -0
- regscale/integrations/public/oscal.py +1946 -0
- regscale/integrations/public/otx.py +169 -0
- regscale/integrations/scanner_integration.py +2692 -0
- regscale/integrations/variables.py +25 -0
- regscale/models/__init__.py +7 -0
- regscale/models/app_models/__init__.py +5 -0
- regscale/models/app_models/catalog_compare.py +213 -0
- regscale/models/app_models/click.py +252 -0
- regscale/models/app_models/datetime_encoder.py +21 -0
- regscale/models/app_models/import_validater.py +321 -0
- regscale/models/app_models/mapping.py +260 -0
- regscale/models/app_models/pipeline.py +37 -0
- regscale/models/click_models.py +413 -0
- regscale/models/config.py +154 -0
- regscale/models/email_style.css +67 -0
- regscale/models/hierarchy.py +8 -0
- regscale/models/inspect_models.py +79 -0
- regscale/models/integration_models/__init__.py +0 -0
- regscale/models/integration_models/amazon_models/__init__.py +0 -0
- regscale/models/integration_models/amazon_models/inspector.py +262 -0
- regscale/models/integration_models/amazon_models/inspector_scan.py +206 -0
- regscale/models/integration_models/aqua.py +247 -0
- regscale/models/integration_models/azure_alerts.py +255 -0
- regscale/models/integration_models/base64.py +23 -0
- regscale/models/integration_models/burp.py +433 -0
- regscale/models/integration_models/burp_models.py +128 -0
- regscale/models/integration_models/cisa_kev_data.json +19333 -0
- regscale/models/integration_models/defender_data.py +93 -0
- regscale/models/integration_models/defenderimport.py +143 -0
- regscale/models/integration_models/drf.py +443 -0
- regscale/models/integration_models/ecr_models/__init__.py +0 -0
- regscale/models/integration_models/ecr_models/data.py +69 -0
- regscale/models/integration_models/ecr_models/ecr.py +239 -0
- regscale/models/integration_models/flat_file_importer.py +1079 -0
- regscale/models/integration_models/grype_import.py +247 -0
- regscale/models/integration_models/ibm.py +126 -0
- regscale/models/integration_models/implementation_results.py +85 -0
- regscale/models/integration_models/nexpose.py +140 -0
- regscale/models/integration_models/prisma.py +202 -0
- regscale/models/integration_models/qualys.py +720 -0
- regscale/models/integration_models/qualys_scanner.py +160 -0
- regscale/models/integration_models/sbom/__init__.py +0 -0
- regscale/models/integration_models/sbom/cyclone_dx.py +139 -0
- regscale/models/integration_models/send_reminders.py +620 -0
- regscale/models/integration_models/snyk.py +155 -0
- regscale/models/integration_models/synqly_models/__init__.py +0 -0
- regscale/models/integration_models/synqly_models/capabilities.json +1 -0
- regscale/models/integration_models/synqly_models/connector_types.py +22 -0
- regscale/models/integration_models/synqly_models/connectors/__init__.py +7 -0
- regscale/models/integration_models/synqly_models/connectors/assets.py +97 -0
- regscale/models/integration_models/synqly_models/connectors/ticketing.py +583 -0
- regscale/models/integration_models/synqly_models/connectors/vulnerabilities.py +169 -0
- regscale/models/integration_models/synqly_models/ocsf_mapper.py +331 -0
- regscale/models/integration_models/synqly_models/param.py +72 -0
- regscale/models/integration_models/synqly_models/synqly_model.py +733 -0
- regscale/models/integration_models/synqly_models/tenants.py +39 -0
- regscale/models/integration_models/tenable_models/__init__.py +0 -0
- regscale/models/integration_models/tenable_models/integration.py +187 -0
- regscale/models/integration_models/tenable_models/models.py +513 -0
- regscale/models/integration_models/trivy_import.py +231 -0
- regscale/models/integration_models/veracode.py +217 -0
- regscale/models/integration_models/xray.py +135 -0
- regscale/models/locking.py +100 -0
- regscale/models/platform.py +110 -0
- regscale/models/regscale_models/__init__.py +67 -0
- regscale/models/regscale_models/assessment.py +570 -0
- regscale/models/regscale_models/assessment_plan.py +52 -0
- regscale/models/regscale_models/asset.py +567 -0
- regscale/models/regscale_models/asset_mapping.py +190 -0
- regscale/models/regscale_models/case.py +42 -0
- regscale/models/regscale_models/catalog.py +261 -0
- regscale/models/regscale_models/cci.py +46 -0
- regscale/models/regscale_models/change.py +167 -0
- regscale/models/regscale_models/checklist.py +372 -0
- regscale/models/regscale_models/comment.py +49 -0
- regscale/models/regscale_models/compliance_settings.py +112 -0
- regscale/models/regscale_models/component.py +412 -0
- regscale/models/regscale_models/component_mapping.py +65 -0
- regscale/models/regscale_models/control.py +38 -0
- regscale/models/regscale_models/control_implementation.py +1128 -0
- regscale/models/regscale_models/control_objective.py +261 -0
- regscale/models/regscale_models/control_parameter.py +100 -0
- regscale/models/regscale_models/control_test.py +34 -0
- regscale/models/regscale_models/control_test_plan.py +75 -0
- regscale/models/regscale_models/control_test_result.py +52 -0
- regscale/models/regscale_models/custom_field.py +245 -0
- regscale/models/regscale_models/data.py +109 -0
- regscale/models/regscale_models/data_center.py +40 -0
- regscale/models/regscale_models/deviation.py +203 -0
- regscale/models/regscale_models/email.py +97 -0
- regscale/models/regscale_models/evidence.py +47 -0
- regscale/models/regscale_models/evidence_mapping.py +40 -0
- regscale/models/regscale_models/facility.py +59 -0
- regscale/models/regscale_models/file.py +382 -0
- regscale/models/regscale_models/filetag.py +37 -0
- regscale/models/regscale_models/form_field_value.py +94 -0
- regscale/models/regscale_models/group.py +169 -0
- regscale/models/regscale_models/implementation_objective.py +335 -0
- regscale/models/regscale_models/implementation_option.py +275 -0
- regscale/models/regscale_models/implementation_role.py +33 -0
- regscale/models/regscale_models/incident.py +177 -0
- regscale/models/regscale_models/interconnection.py +43 -0
- regscale/models/regscale_models/issue.py +1176 -0
- regscale/models/regscale_models/leveraged_authorization.py +125 -0
- regscale/models/regscale_models/line_of_inquiry.py +52 -0
- regscale/models/regscale_models/link.py +205 -0
- regscale/models/regscale_models/meta_data.py +64 -0
- regscale/models/regscale_models/mixins/__init__.py +0 -0
- regscale/models/regscale_models/mixins/parent_cache.py +124 -0
- regscale/models/regscale_models/module.py +224 -0
- regscale/models/regscale_models/modules.py +191 -0
- regscale/models/regscale_models/objective.py +14 -0
- regscale/models/regscale_models/parameter.py +87 -0
- regscale/models/regscale_models/ports_protocol.py +81 -0
- regscale/models/regscale_models/privacy.py +89 -0
- regscale/models/regscale_models/profile.py +50 -0
- regscale/models/regscale_models/profile_link.py +68 -0
- regscale/models/regscale_models/profile_mapping.py +124 -0
- regscale/models/regscale_models/project.py +63 -0
- regscale/models/regscale_models/property.py +278 -0
- regscale/models/regscale_models/question.py +85 -0
- regscale/models/regscale_models/questionnaire.py +87 -0
- regscale/models/regscale_models/questionnaire_instance.py +177 -0
- regscale/models/regscale_models/rbac.py +132 -0
- regscale/models/regscale_models/reference.py +86 -0
- regscale/models/regscale_models/regscale_model.py +1643 -0
- regscale/models/regscale_models/requirement.py +29 -0
- regscale/models/regscale_models/risk.py +274 -0
- regscale/models/regscale_models/sbom.py +54 -0
- regscale/models/regscale_models/scan_history.py +436 -0
- regscale/models/regscale_models/search.py +53 -0
- regscale/models/regscale_models/security_control.py +132 -0
- regscale/models/regscale_models/security_plan.py +204 -0
- regscale/models/regscale_models/software_inventory.py +159 -0
- regscale/models/regscale_models/stake_holder.py +64 -0
- regscale/models/regscale_models/stig.py +647 -0
- regscale/models/regscale_models/supply_chain.py +152 -0
- regscale/models/regscale_models/system_role.py +188 -0
- regscale/models/regscale_models/system_role_external_assignment.py +40 -0
- regscale/models/regscale_models/tag.py +37 -0
- regscale/models/regscale_models/tag_mapping.py +19 -0
- regscale/models/regscale_models/task.py +133 -0
- regscale/models/regscale_models/threat.py +196 -0
- regscale/models/regscale_models/user.py +175 -0
- regscale/models/regscale_models/user_group.py +55 -0
- regscale/models/regscale_models/vulnerability.py +242 -0
- regscale/models/regscale_models/vulnerability_mapping.py +162 -0
- regscale/models/regscale_models/workflow.py +55 -0
- regscale/models/regscale_models/workflow_action.py +34 -0
- regscale/models/regscale_models/workflow_instance.py +269 -0
- regscale/models/regscale_models/workflow_instance_step.py +114 -0
- regscale/models/regscale_models/workflow_template.py +58 -0
- regscale/models/regscale_models/workflow_template_step.py +45 -0
- regscale/regscale.py +815 -0
- regscale/utils/__init__.py +7 -0
- regscale/utils/b64conversion.py +14 -0
- regscale/utils/click_utils.py +118 -0
- regscale/utils/decorators.py +48 -0
- regscale/utils/dict_utils.py +59 -0
- regscale/utils/files.py +79 -0
- regscale/utils/fxns.py +30 -0
- regscale/utils/graphql_client.py +113 -0
- regscale/utils/lists.py +16 -0
- regscale/utils/numbers.py +12 -0
- regscale/utils/shell.py +148 -0
- regscale/utils/string.py +121 -0
- regscale/utils/synqly_utils.py +165 -0
- regscale/utils/threading/__init__.py +8 -0
- regscale/utils/threading/threadhandler.py +131 -0
- regscale/utils/threading/threadsafe_counter.py +47 -0
- regscale/utils/threading/threadsafe_dict.py +242 -0
- regscale/utils/threading/threadsafe_list.py +83 -0
- regscale/utils/version.py +104 -0
- regscale/validation/__init__.py +0 -0
- regscale/validation/address.py +37 -0
- regscale/validation/record.py +48 -0
- regscale/visualization/__init__.py +5 -0
- regscale/visualization/click.py +34 -0
- regscale_cli-6.16.0.0.dist-info/LICENSE +21 -0
- regscale_cli-6.16.0.0.dist-info/METADATA +659 -0
- regscale_cli-6.16.0.0.dist-info/RECORD +481 -0
- regscale_cli-6.16.0.0.dist-info/WHEEL +5 -0
- regscale_cli-6.16.0.0.dist-info/entry_points.txt +6 -0
- regscale_cli-6.16.0.0.dist-info/top_level.txt +2 -0
- tests/fixtures/__init__.py +2 -0
- tests/fixtures/api.py +87 -0
- tests/fixtures/models.py +91 -0
- tests/fixtures/test_fixture.py +144 -0
- tests/mocks/__init__.py +0 -0
- tests/mocks/objects.py +3 -0
- tests/mocks/response.py +32 -0
- tests/mocks/xml.py +13 -0
- tests/regscale/__init__.py +0 -0
- tests/regscale/core/__init__.py +0 -0
- tests/regscale/core/test_api.py +232 -0
- tests/regscale/core/test_app.py +406 -0
- tests/regscale/core/test_login.py +37 -0
- tests/regscale/core/test_logz.py +66 -0
- tests/regscale/core/test_sbom_generator.py +87 -0
- tests/regscale/core/test_validation_utils.py +163 -0
- tests/regscale/core/test_version.py +78 -0
- tests/regscale/models/__init__.py +0 -0
- tests/regscale/models/test_asset.py +71 -0
- tests/regscale/models/test_config.py +26 -0
- tests/regscale/models/test_control_implementation.py +27 -0
- tests/regscale/models/test_import.py +97 -0
- tests/regscale/models/test_issue.py +36 -0
- tests/regscale/models/test_mapping.py +52 -0
- tests/regscale/models/test_platform.py +31 -0
- tests/regscale/models/test_regscale_model.py +346 -0
- tests/regscale/models/test_report.py +32 -0
- tests/regscale/models/test_tenable_integrations.py +118 -0
- tests/regscale/models/test_user_model.py +121 -0
- tests/regscale/test_about.py +19 -0
- tests/regscale/test_authorization.py +65 -0
|
@@ -0,0 +1,1079 @@
|
|
|
1
|
+
"""Container Scan Abstract"""
|
|
2
|
+
|
|
3
|
+
import ast
|
|
4
|
+
import csv
|
|
5
|
+
import json
|
|
6
|
+
import logging
|
|
7
|
+
import re
|
|
8
|
+
import shutil
|
|
9
|
+
from abc import ABC, abstractmethod
|
|
10
|
+
from collections import namedtuple
|
|
11
|
+
from datetime import datetime, timedelta
|
|
12
|
+
from os import PathLike
|
|
13
|
+
from typing import Any, Callable, Generator, Iterator, List, Optional, Sequence, TextIO, Tuple, Union, TYPE_CHECKING
|
|
14
|
+
|
|
15
|
+
if TYPE_CHECKING:
|
|
16
|
+
from regscale.integrations.scanner_integration import IntegrationAsset, IntegrationFinding
|
|
17
|
+
|
|
18
|
+
import click
|
|
19
|
+
import requests
|
|
20
|
+
import xmltodict
|
|
21
|
+
from openpyxl.reader.excel import load_workbook
|
|
22
|
+
from pathlib import Path
|
|
23
|
+
|
|
24
|
+
from regscale.core.app.api import Api
|
|
25
|
+
from regscale.core.app.application import Application
|
|
26
|
+
from regscale.core.app.utils.app_utils import (
|
|
27
|
+
check_file_path,
|
|
28
|
+
create_progress_object,
|
|
29
|
+
creation_date,
|
|
30
|
+
error_and_exit,
|
|
31
|
+
get_current_datetime,
|
|
32
|
+
)
|
|
33
|
+
from regscale.core.app.utils.parser_utils import safe_datetime_str
|
|
34
|
+
from regscale.core.app.utils.report_utils import ReportGenerator
|
|
35
|
+
from regscale.integrations.scanner_integration import ScannerIntegration
|
|
36
|
+
from regscale.models import IssueStatus, Metadata, regscale_models
|
|
37
|
+
from regscale.models.app_models.mapping import Mapping
|
|
38
|
+
from regscale.models.regscale_models import Asset, File, Issue, Vulnerability
|
|
39
|
+
|
|
40
|
+
logger = logging.getLogger(__name__)
|
|
41
|
+
|
|
42
|
+
DT_FORMAT = "%Y-%m-%d"
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class FlatFileIntegration(ScannerIntegration):
|
|
46
|
+
title = "Flat File Integration"
|
|
47
|
+
# Required fields from ScannerIntegration
|
|
48
|
+
asset_identifier_field = "name"
|
|
49
|
+
type = ScannerIntegration.type.CONTROL_TEST
|
|
50
|
+
|
|
51
|
+
def __init__(
|
|
52
|
+
self,
|
|
53
|
+
plan_id: int,
|
|
54
|
+
asset_identifier_field: str = "name",
|
|
55
|
+
finding_severity_map: Optional[dict] = None,
|
|
56
|
+
**kwargs: Any,
|
|
57
|
+
):
|
|
58
|
+
"""
|
|
59
|
+
Initialize the FlatFileIntegration
|
|
60
|
+
|
|
61
|
+
:param int plan_id: The plan id
|
|
62
|
+
:param str asset_identifier_field: The asset identifier field to use, defaults to "name"
|
|
63
|
+
:param dict kwargs: Additional keyword arguments
|
|
64
|
+
"""
|
|
65
|
+
self.asset_identifier_field = asset_identifier_field
|
|
66
|
+
if finding_severity_map:
|
|
67
|
+
self.finding_severity_map = finding_severity_map
|
|
68
|
+
else:
|
|
69
|
+
self.finding_severity_map = {
|
|
70
|
+
"Critical": regscale_models.IssueSeverity.Critical,
|
|
71
|
+
"High": regscale_models.IssueSeverity.High,
|
|
72
|
+
"Medium": regscale_models.IssueSeverity.Moderate,
|
|
73
|
+
"Low": regscale_models.IssueSeverity.Low,
|
|
74
|
+
}
|
|
75
|
+
super().__init__(plan_id=plan_id, **kwargs)
|
|
76
|
+
|
|
77
|
+
def set_asset_identifier_field(self, asset_identifier_field: str) -> None:
|
|
78
|
+
"""
|
|
79
|
+
Set the asset identifier field
|
|
80
|
+
|
|
81
|
+
:param str asset_identifier_field: The asset identifier field to set
|
|
82
|
+
"""
|
|
83
|
+
self.asset_identifier_field = asset_identifier_field
|
|
84
|
+
|
|
85
|
+
def fetch_assets(self, *args: Any, **kwargs: Any) -> Iterator["IntegrationAsset"]:
|
|
86
|
+
"""
|
|
87
|
+
Fetches assets from FlatFileImporter
|
|
88
|
+
|
|
89
|
+
:param Tuple args: Additional arguments
|
|
90
|
+
:param dict kwargs: Additional keyword arguments
|
|
91
|
+
:yields: Iterator[IntegrationAsset]
|
|
92
|
+
"""
|
|
93
|
+
integration_assets = kwargs.get("integration_assets")
|
|
94
|
+
for asset in integration_assets:
|
|
95
|
+
yield asset
|
|
96
|
+
|
|
97
|
+
def fetch_findings(self, *args: Tuple, **kwargs: dict) -> Iterator["IntegrationFinding"]:
|
|
98
|
+
"""
|
|
99
|
+
Fetches findings from the integration
|
|
100
|
+
|
|
101
|
+
:param Tuple args: Additional arguments
|
|
102
|
+
:param dict kwargs: Additional keyword arguments
|
|
103
|
+
:yields: Iterator[IntegrationFinding]
|
|
104
|
+
|
|
105
|
+
"""
|
|
106
|
+
logger.debug(f"Asset identifier field: {self.asset_identifier_field}")
|
|
107
|
+
integration_findings = kwargs.get("integration_findings")
|
|
108
|
+
for vuln in integration_findings:
|
|
109
|
+
yield vuln
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
class FlatFileImporter(ABC):
|
|
113
|
+
"""
|
|
114
|
+
Abstract class for container scan integration
|
|
115
|
+
|
|
116
|
+
:param dict **kwargs: Keyword arguments
|
|
117
|
+
"""
|
|
118
|
+
|
|
119
|
+
name: str
|
|
120
|
+
mapping: "Mapping"
|
|
121
|
+
|
|
122
|
+
def __init__(self, **kwargs):
|
|
123
|
+
if finding_severity_map := kwargs.pop("finding_severity_map", None):
|
|
124
|
+
self.finding_severity_map = finding_severity_map
|
|
125
|
+
else:
|
|
126
|
+
self.finding_severity_map = {
|
|
127
|
+
"Critical": regscale_models.IssueSeverity.Critical,
|
|
128
|
+
"High": regscale_models.IssueSeverity.High,
|
|
129
|
+
"Medium": regscale_models.IssueSeverity.Moderate,
|
|
130
|
+
"Low": regscale_models.IssueSeverity.Low,
|
|
131
|
+
}
|
|
132
|
+
if "parent_id" not in kwargs and "regscale_ssp_id" in kwargs:
|
|
133
|
+
kwargs["parent_id"] = kwargs["regscale_ssp_id"]
|
|
134
|
+
kwargs["parent_module"] = "securityplans"
|
|
135
|
+
kwargs["plan_id"] = kwargs["regscale_ssp_id"]
|
|
136
|
+
if "app" not in kwargs:
|
|
137
|
+
kwargs["app"] = Application()
|
|
138
|
+
# empty generator
|
|
139
|
+
self.integration_assets: Generator["IntegrationAsset", None, None] = (x for x in [])
|
|
140
|
+
self.integration_findings: Generator["IntegrationAsset", None, None] = (x for x in [])
|
|
141
|
+
self.field_names = [
|
|
142
|
+
"logger",
|
|
143
|
+
"headers",
|
|
144
|
+
"file_type",
|
|
145
|
+
"app",
|
|
146
|
+
"file_path",
|
|
147
|
+
"name",
|
|
148
|
+
"parent_id",
|
|
149
|
+
"parent_module",
|
|
150
|
+
"scan_date",
|
|
151
|
+
"asset_func",
|
|
152
|
+
"vuln_func",
|
|
153
|
+
"issue_func",
|
|
154
|
+
"extra_headers_allowed",
|
|
155
|
+
"mapping",
|
|
156
|
+
"ignore_validation",
|
|
157
|
+
"header_line_number",
|
|
158
|
+
"regscale_ssp_id",
|
|
159
|
+
"plan_id",
|
|
160
|
+
"disable_mapping",
|
|
161
|
+
"mappings_path",
|
|
162
|
+
"upload_file",
|
|
163
|
+
"file_name",
|
|
164
|
+
]
|
|
165
|
+
self.asset_identifier_field = kwargs.pop("asset_identifier_field", "name")
|
|
166
|
+
asset_count = kwargs.pop("asset_count") if "asset_count" in kwargs else 0
|
|
167
|
+
vuln_count = kwargs.pop("vuln_count") if "vuln_count" in kwargs else 0
|
|
168
|
+
_attributes = namedtuple(
|
|
169
|
+
"Attributes",
|
|
170
|
+
self.field_names,
|
|
171
|
+
defaults=[None] * len(self.field_names),
|
|
172
|
+
)
|
|
173
|
+
self.attributes = _attributes(**kwargs)
|
|
174
|
+
|
|
175
|
+
self.file_type = kwargs.get("file_type", ".csv")
|
|
176
|
+
self.extra_headers_allowed = kwargs.get("extra_headers_allowed", False)
|
|
177
|
+
self.scan_date = safe_datetime_str(kwargs.get("scan_date"))
|
|
178
|
+
self.attributes.logger.info("Processing %s...", self.attributes.file_path)
|
|
179
|
+
self.formatted_headers = None
|
|
180
|
+
self.config = self.attributes.app.config
|
|
181
|
+
self.header, self.file_data = self.file_to_list_of_dicts()
|
|
182
|
+
self.data = {
|
|
183
|
+
"assets": [],
|
|
184
|
+
"issues": [],
|
|
185
|
+
"scans": [],
|
|
186
|
+
"vulns": [],
|
|
187
|
+
}
|
|
188
|
+
self.create_epoch = str(int(creation_date(self.attributes.file_path)))
|
|
189
|
+
flat_int = FlatFileIntegration(
|
|
190
|
+
plan_id=self.attributes.plan_id or self.attributes.regscale_ssp_id or self.attributes.parent_id,
|
|
191
|
+
asset_identifier_field=self.asset_identifier_field,
|
|
192
|
+
finding_severity_map=self.finding_severity_map,
|
|
193
|
+
)
|
|
194
|
+
flat_int.asset_identifier_field = self.asset_identifier_field
|
|
195
|
+
logger.info(f"Asset Identifier Field: {flat_int.asset_identifier_field}")
|
|
196
|
+
flat_int.title = self.attributes.name
|
|
197
|
+
self.create_assets(kwargs["asset_func"]) # type: ignore # Pass in the function to create an asset
|
|
198
|
+
self.create_vulns(kwargs["vuln_func"]) # type: ignore # Pass in the function to create a vuln
|
|
199
|
+
if asset_count:
|
|
200
|
+
flat_int.num_assets_to_process = asset_count
|
|
201
|
+
elif isinstance(self.data["assets"], list) and not asset_count:
|
|
202
|
+
flat_int.num_assets_to_process = len(self.data["assets"])
|
|
203
|
+
if vuln_count:
|
|
204
|
+
flat_int.num_findings_to_process = vuln_count
|
|
205
|
+
elif isinstance(self.data["vulns"], list):
|
|
206
|
+
flat_int.num_findings_to_process = len(self.data["vulns"])
|
|
207
|
+
flat_int.sync_assets(
|
|
208
|
+
plan_id=self.attributes.parent_id,
|
|
209
|
+
integration_assets=self.integration_assets,
|
|
210
|
+
title=self.attributes.name,
|
|
211
|
+
asset_count=flat_int.num_assets_to_process,
|
|
212
|
+
)
|
|
213
|
+
flat_int.sync_findings(
|
|
214
|
+
plan_id=self.attributes.parent_id,
|
|
215
|
+
integration_findings=self.integration_findings,
|
|
216
|
+
title=self.attributes.name,
|
|
217
|
+
finding_count=flat_int.num_findings_to_process,
|
|
218
|
+
enable_finding_date_update=True,
|
|
219
|
+
scan_date=self.scan_date,
|
|
220
|
+
)
|
|
221
|
+
self.clean_up()
|
|
222
|
+
|
|
223
|
+
def parse_finding(self, vuln: Vulnerability) -> Optional["IntegrationFinding"]:
|
|
224
|
+
"""
|
|
225
|
+
Parses a vulnerability object into an IntegrationFinding object
|
|
226
|
+
|
|
227
|
+
:param Vulnerability vuln: A vulnerability object
|
|
228
|
+
:return: The parsed IntegrationFinding or None if parsing fails
|
|
229
|
+
:rtype: Optional[IntegrationFinding]
|
|
230
|
+
"""
|
|
231
|
+
from regscale.integrations.scanner_integration import IntegrationFinding
|
|
232
|
+
|
|
233
|
+
try:
|
|
234
|
+
asset_id = vuln.dns or vuln.ipAddress
|
|
235
|
+
if not asset_id:
|
|
236
|
+
return None
|
|
237
|
+
|
|
238
|
+
severity = self.finding_severity_map.get(vuln.severity.capitalize(), regscale_models.IssueSeverity.Low)
|
|
239
|
+
status = self.map_status_to_issue_status(vuln.status)
|
|
240
|
+
cve: Optional[str] = vuln.cve if vuln.cve else ""
|
|
241
|
+
extract_vuln: Any = self.extract_ghsa_strings(vuln.plugInName or "")
|
|
242
|
+
plugin_name = vuln.plugInName if vuln.plugInName else vuln.title
|
|
243
|
+
if not self.assert_valid_cve(cve):
|
|
244
|
+
if isinstance(extract_vuln, list):
|
|
245
|
+
cve = ", ".join(extract_vuln)
|
|
246
|
+
if isinstance(extract_vuln, str):
|
|
247
|
+
# Coalfire requires vulnerabilities to be stuffed into this field, regardless if they start
|
|
248
|
+
# with CVE or not.
|
|
249
|
+
cve = extract_vuln
|
|
250
|
+
if not self.assert_valid_cve(cve):
|
|
251
|
+
plugin_name = cve
|
|
252
|
+
cve = ""
|
|
253
|
+
remediation_description = ""
|
|
254
|
+
if remediation := vuln.extra_data.get("solution"):
|
|
255
|
+
if isinstance(remediation, list):
|
|
256
|
+
remediation_description = ", ".join(remediation)
|
|
257
|
+
elif isinstance(remediation, dict):
|
|
258
|
+
remediation_description = "\n".join([f"{k}: {v}" for k, v in remediation.items()])
|
|
259
|
+
else:
|
|
260
|
+
remediation_description = remediation
|
|
261
|
+
return IntegrationFinding(
|
|
262
|
+
control_labels=[], # Add an empty list for control_labels
|
|
263
|
+
category=f"{self.name} Vulnerability", # Add a default category
|
|
264
|
+
title=vuln.title,
|
|
265
|
+
description=vuln.description,
|
|
266
|
+
severity=severity,
|
|
267
|
+
status=status,
|
|
268
|
+
asset_identifier=asset_id,
|
|
269
|
+
external_id=str(vuln.plugInId),
|
|
270
|
+
rule_id=str(vuln.plugInId),
|
|
271
|
+
first_seen=vuln.firstSeen,
|
|
272
|
+
last_seen=vuln.lastSeen,
|
|
273
|
+
remediation=remediation_description,
|
|
274
|
+
cvss_score=vuln.vprScore,
|
|
275
|
+
cve=cve,
|
|
276
|
+
cvss_v3_base_score=vuln.cvsSv3BaseScore,
|
|
277
|
+
source_rule_id=str(vuln.plugInId),
|
|
278
|
+
vulnerability_type="Vulnerability Scan",
|
|
279
|
+
baseline=f"{self.name} Host",
|
|
280
|
+
results=vuln.title,
|
|
281
|
+
plugin_name=plugin_name,
|
|
282
|
+
date_created=vuln.firstSeen,
|
|
283
|
+
date_last_updated=vuln.lastSeen,
|
|
284
|
+
)
|
|
285
|
+
except (KeyError, TypeError, ValueError) as e:
|
|
286
|
+
self.attributes.logger.error("Error parsing Wiz finding: %s", str(e), exc_info=True)
|
|
287
|
+
return None
|
|
288
|
+
|
|
289
|
+
def parse_asset(self, asset: Asset) -> "IntegrationAsset":
|
|
290
|
+
"""
|
|
291
|
+
Converts Asset -> IntegrationAsset
|
|
292
|
+
|
|
293
|
+
:param Asset asset: The asset to parse
|
|
294
|
+
:return: The parsed IntegrationAsset
|
|
295
|
+
:rtype: IntegrationAsset
|
|
296
|
+
"""
|
|
297
|
+
from regscale.integrations.scanner_integration import IntegrationAsset
|
|
298
|
+
|
|
299
|
+
return IntegrationAsset(
|
|
300
|
+
name=asset.name,
|
|
301
|
+
external_id=asset.otherTrackingNumber,
|
|
302
|
+
other_tracking_number=asset.otherTrackingNumber,
|
|
303
|
+
identifier=(
|
|
304
|
+
getattr(asset, self.asset_identifier_field)
|
|
305
|
+
if hasattr(asset, self.asset_identifier_field)
|
|
306
|
+
else asset.name
|
|
307
|
+
),
|
|
308
|
+
asset_type=asset.assetType,
|
|
309
|
+
asset_owner_id=asset.assetOwnerId,
|
|
310
|
+
parent_id=self.attributes.parent_id,
|
|
311
|
+
parent_module=regscale_models.SecurityPlan.get_module_slug(),
|
|
312
|
+
asset_category=asset.assetCategory,
|
|
313
|
+
date_last_updated=asset.dateLastUpdated,
|
|
314
|
+
status=asset.status,
|
|
315
|
+
ip_address=asset.ipAddress if asset.ipAddress else "Unknown",
|
|
316
|
+
software_vendor=asset.softwareVendor,
|
|
317
|
+
software_version=asset.softwareVersion,
|
|
318
|
+
software_name=asset.softwareName,
|
|
319
|
+
location=asset.location,
|
|
320
|
+
notes=asset.notes,
|
|
321
|
+
model=asset.model,
|
|
322
|
+
serial_number=asset.serialNumber,
|
|
323
|
+
is_public_facing=False,
|
|
324
|
+
azure_identifier=asset.azureIdentifier,
|
|
325
|
+
mac_address=asset.macAddress,
|
|
326
|
+
fqdn=asset.fqdn,
|
|
327
|
+
disk_storage=0,
|
|
328
|
+
cpu=0,
|
|
329
|
+
ram=0,
|
|
330
|
+
operating_system=asset.operatingSystem,
|
|
331
|
+
os_version=asset.osVersion,
|
|
332
|
+
end_of_life_date=asset.endOfLifeDate,
|
|
333
|
+
vlan_id=asset.vlanId,
|
|
334
|
+
uri=asset.uri,
|
|
335
|
+
aws_identifier=asset.awsIdentifier,
|
|
336
|
+
google_identifier=asset.googleIdentifier,
|
|
337
|
+
other_cloud_identifier=asset.otherCloudIdentifier,
|
|
338
|
+
patch_level=asset.patchLevel,
|
|
339
|
+
cpe=asset.cpe,
|
|
340
|
+
component_names=[],
|
|
341
|
+
source_data=None,
|
|
342
|
+
url=None,
|
|
343
|
+
ports_and_protocols=[],
|
|
344
|
+
software_inventory=asset.extra_data.get("software_inventory", []),
|
|
345
|
+
)
|
|
346
|
+
|
|
347
|
+
@staticmethod
|
|
348
|
+
def create_asset_type(asset_type: str) -> str:
|
|
349
|
+
"""
|
|
350
|
+
Create asset type if it does not exist and reformat the string to Title Case
|
|
351
|
+
|
|
352
|
+
:param str asset_type: The asset to parse
|
|
353
|
+
:return: Asset type in title case
|
|
354
|
+
:rtype: str
|
|
355
|
+
"""
|
|
356
|
+
#
|
|
357
|
+
asset_type = asset_type.title().replace("_", " ")
|
|
358
|
+
meta_data_list = Metadata.get_metadata_by_module_field(module="assets", field="Asset Type")
|
|
359
|
+
if not any(meta_data.value == asset_type for meta_data in meta_data_list):
|
|
360
|
+
Metadata(
|
|
361
|
+
field="Asset Type",
|
|
362
|
+
module="assets",
|
|
363
|
+
value=asset_type,
|
|
364
|
+
).create()
|
|
365
|
+
return asset_type
|
|
366
|
+
|
|
367
|
+
def file_to_list_of_dicts(self) -> tuple[Optional[Sequence[str]], list[Any]]:
|
|
368
|
+
"""
|
|
369
|
+
Converts a csv file to a list of dictionaries
|
|
370
|
+
|
|
371
|
+
:raises AssertionError: If the headers in the csv/xlsx file do not match the expected headers
|
|
372
|
+
:return: Tuple of header and data from csv file
|
|
373
|
+
:rtype: tuple[Optional[Sequence[str]], list[Any]]
|
|
374
|
+
"""
|
|
375
|
+
header = []
|
|
376
|
+
data = []
|
|
377
|
+
start_line_number = 0 if not self.attributes.header_line_number else self.attributes.header_line_number
|
|
378
|
+
# added encoding errors="replace" param to replace encoding characters it can on error, ignoring or not including could throw errors or result in missing data
|
|
379
|
+
with open(self.attributes.file_path, encoding="utf-8-sig", errors="replace") as file:
|
|
380
|
+
# Skip lines until the start line is reached
|
|
381
|
+
for _ in range(start_line_number):
|
|
382
|
+
next(file)
|
|
383
|
+
if file.name.endswith(".csv"):
|
|
384
|
+
data, header = self.convert_csv_to_dict(file)
|
|
385
|
+
elif file.name.endswith(".xlsx"):
|
|
386
|
+
data, header = self.convert_xlsx_to_dict(file, start_line_number)
|
|
387
|
+
elif file.name.endswith(".json"):
|
|
388
|
+
try:
|
|
389
|
+
# Filter possible null values
|
|
390
|
+
file_data = json.load(file)
|
|
391
|
+
if isinstance(file_data, dict):
|
|
392
|
+
data = file_data
|
|
393
|
+
if isinstance(file_data, list):
|
|
394
|
+
data = [dat for dat in file_data if dat]
|
|
395
|
+
except json.JSONDecodeError:
|
|
396
|
+
raise AssertionError("Invalid JSON file")
|
|
397
|
+
elif file.name.endswith(".xml"):
|
|
398
|
+
data = self.convert_xml_to_dict(file)
|
|
399
|
+
else:
|
|
400
|
+
raise AssertionError("Unsupported file type")
|
|
401
|
+
return header, data
|
|
402
|
+
|
|
403
|
+
def handle_extra_headers(self, header: list) -> None:
|
|
404
|
+
"""
|
|
405
|
+
Handle extra headers in the csv file
|
|
406
|
+
|
|
407
|
+
:param list header: The headers from the csv file
|
|
408
|
+
:raises AssertionError: If the headers in the csv file do not contain the required headers
|
|
409
|
+
"""
|
|
410
|
+
extra_headers = [column for column in header if column not in self.attributes.headers]
|
|
411
|
+
required_headers = [column for column in header if column in self.attributes.headers]
|
|
412
|
+
|
|
413
|
+
if not all(item in self.attributes.headers for item in required_headers):
|
|
414
|
+
raise AssertionError(
|
|
415
|
+
"The headers in the csv file do not contain the required headers "
|
|
416
|
+
+ f"headers, is this a valid {self.attributes.name} {self.file_type} file?"
|
|
417
|
+
)
|
|
418
|
+
|
|
419
|
+
if extra_headers:
|
|
420
|
+
self.attributes.logger.warning(
|
|
421
|
+
"The following extra columns were found and will be ignored: %s",
|
|
422
|
+
", ".join(extra_headers),
|
|
423
|
+
)
|
|
424
|
+
|
|
425
|
+
def convert_csv_to_dict(self, file: TextIO) -> tuple:
|
|
426
|
+
"""
|
|
427
|
+
Converts a csv file to a list of dictionaries
|
|
428
|
+
|
|
429
|
+
:param TextIO file: The csv file to convert
|
|
430
|
+
:return: Tuple of header and data from csv file
|
|
431
|
+
:rtype: tuple
|
|
432
|
+
"""
|
|
433
|
+
logger.info("flatfileimporter: Converting csv to dict")
|
|
434
|
+
# if file is empty, error and exit
|
|
435
|
+
if not file.read(1):
|
|
436
|
+
error_and_exit("File is empty")
|
|
437
|
+
# Rewind file and skip lines until the start line is reached
|
|
438
|
+
file.seek(0)
|
|
439
|
+
for _ in range(getattr(self.attributes, "header_line_number", 0) or 0):
|
|
440
|
+
next(file)
|
|
441
|
+
reader = csv.DictReader(file)
|
|
442
|
+
|
|
443
|
+
header = [head for head in list(reader.fieldnames) if head]
|
|
444
|
+
|
|
445
|
+
if self.extra_headers_allowed:
|
|
446
|
+
self.handle_extra_headers(header=header)
|
|
447
|
+
|
|
448
|
+
data = list(reader)
|
|
449
|
+
return data, header
|
|
450
|
+
|
|
451
|
+
def convert_xlsx_to_dict(self, file: TextIO, start_line_number: int = 0) -> tuple:
|
|
452
|
+
"""
|
|
453
|
+
Converts a xlsx file to a list of dictionaries
|
|
454
|
+
|
|
455
|
+
:param TextIO file: The xlsx file to convert
|
|
456
|
+
:param int start_line_number: The line number to start reading from
|
|
457
|
+
:return: Tuple of data and header from xlsx file
|
|
458
|
+
:rtype: tuple
|
|
459
|
+
"""
|
|
460
|
+
logger.info("flatfileimporter: Converting xlsx to dict")
|
|
461
|
+
# Load the workbook
|
|
462
|
+
workbook = load_workbook(filename=file.name)
|
|
463
|
+
|
|
464
|
+
# Select the first sheet
|
|
465
|
+
sheet = workbook.active
|
|
466
|
+
|
|
467
|
+
# Get the data from the sheet
|
|
468
|
+
data = list(sheet.values)
|
|
469
|
+
|
|
470
|
+
# Get the header from the first row
|
|
471
|
+
header = list(data[start_line_number])
|
|
472
|
+
|
|
473
|
+
# Get the rest of the data
|
|
474
|
+
data = data[start_line_number + 1 :]
|
|
475
|
+
|
|
476
|
+
# Convert the data to a dictionary
|
|
477
|
+
data_dict = [dict(zip(header, row)) for row in data]
|
|
478
|
+
|
|
479
|
+
# Loop through the data and convert any string lists to lists
|
|
480
|
+
for dat in data_dict:
|
|
481
|
+
for key, val in dat.items():
|
|
482
|
+
if isinstance(val, str) and val.startswith("["):
|
|
483
|
+
try:
|
|
484
|
+
dat[key] = ast.literal_eval(dat[key])
|
|
485
|
+
except SyntaxError as rex:
|
|
486
|
+
# Object is probably not a list, so just leave it as a string
|
|
487
|
+
self.attributes.app.logger.debug("SyntaxError: %s", rex)
|
|
488
|
+
return data_dict, header
|
|
489
|
+
|
|
490
|
+
def count_vuln_by_severity(self, severity: str, asset_id: int) -> int:
|
|
491
|
+
"""
|
|
492
|
+
Count the number of vulnerabilities by the provided severity
|
|
493
|
+
|
|
494
|
+
:param str severity: The severity to count
|
|
495
|
+
:param int asset_id: The asset id to match the vulnerability's parentId
|
|
496
|
+
:return: The number of vulnerabilities
|
|
497
|
+
:rtype: int
|
|
498
|
+
"""
|
|
499
|
+
return len([vuln for vuln in self.data["vulns"] if vuln.parentId == asset_id and vuln.severity == severity])
|
|
500
|
+
|
|
501
|
+
def create_assets(self, func: Callable) -> None:
|
|
502
|
+
"""
|
|
503
|
+
Create assets in RegScale from csv file
|
|
504
|
+
|
|
505
|
+
:param Callable func: Function to create asset
|
|
506
|
+
:return: None
|
|
507
|
+
:rtype: None
|
|
508
|
+
"""
|
|
509
|
+
self.process_assets(func=func)
|
|
510
|
+
|
|
511
|
+
def process_assets(self, func: Callable) -> None:
|
|
512
|
+
"""
|
|
513
|
+
Process the assets in the data
|
|
514
|
+
"""
|
|
515
|
+
# The passed function creates asset objects. Convert to IntegrationAsset here
|
|
516
|
+
if isinstance(self.file_data, list):
|
|
517
|
+
for dat in self.file_data:
|
|
518
|
+
self.process_asset_data(dat, func)
|
|
519
|
+
elif isinstance(self.file_data, dict):
|
|
520
|
+
self.data["assets"] = func(self.file_data)
|
|
521
|
+
if isinstance(self.data["assets"], Iterator):
|
|
522
|
+
self.integration_assets = self.data["assets"]
|
|
523
|
+
return None
|
|
524
|
+
self.integration_assets = (self.parse_asset(asset) for asset in self.data["assets"])
|
|
525
|
+
|
|
526
|
+
def process_asset_data(self, dat: Any, func: Callable) -> None:
|
|
527
|
+
"""
|
|
528
|
+
Process the asset data
|
|
529
|
+
|
|
530
|
+
:param Any dat: The data to process
|
|
531
|
+
:param Callable func: The function to process the data
|
|
532
|
+
:rtype: None
|
|
533
|
+
"""
|
|
534
|
+
|
|
535
|
+
res = func(dat)
|
|
536
|
+
if not res:
|
|
537
|
+
return
|
|
538
|
+
if isinstance(res, Asset) and res not in self.data["assets"]:
|
|
539
|
+
self.data["assets"].append(res)
|
|
540
|
+
elif isinstance(res, IntegrationAsset):
|
|
541
|
+
self.data["assets"].append(res)
|
|
542
|
+
elif isinstance(res, list):
|
|
543
|
+
for asset in res:
|
|
544
|
+
if asset not in self.data["assets"]:
|
|
545
|
+
self.data["assets"].append(asset)
|
|
546
|
+
|
|
547
|
+
@staticmethod
|
|
548
|
+
def check_status_codes(response_list: list) -> None:
|
|
549
|
+
"""
|
|
550
|
+
Check if any of the responses are not 200
|
|
551
|
+
|
|
552
|
+
:param list response_list: List of responses
|
|
553
|
+
:raises AssertionError: If any of the responses are not 200
|
|
554
|
+
:rtype: None
|
|
555
|
+
"""
|
|
556
|
+
for response in response_list:
|
|
557
|
+
if isinstance(response, requests.Response) and response.status_code != 200:
|
|
558
|
+
raise AssertionError(
|
|
559
|
+
f"Unable to {response.request.method} asset to RegScale.\n"
|
|
560
|
+
f"Code: {response.status_code}\nReason: {response.reason}"
|
|
561
|
+
f"\nPayload: {response.text}"
|
|
562
|
+
)
|
|
563
|
+
|
|
564
|
+
def update_due_dt(self, iss: Issue, kev_due_date: Optional[str], scanner: str, severity: str) -> Issue:
|
|
565
|
+
"""
|
|
566
|
+
Find the due date for the issue
|
|
567
|
+
|
|
568
|
+
:param Issue iss: RegScale Issue object
|
|
569
|
+
:param Optional[str] kev_due_date: The KEV due date
|
|
570
|
+
:param str scanner: The scanner
|
|
571
|
+
:param str severity: The severity of the issue
|
|
572
|
+
:return: RegScale Issue object
|
|
573
|
+
:rtype: Issue
|
|
574
|
+
"""
|
|
575
|
+
fmt = "%Y-%m-%d %H:%M:%S"
|
|
576
|
+
days = 30
|
|
577
|
+
if severity == "medium":
|
|
578
|
+
severity = "moderate"
|
|
579
|
+
if severity == "important":
|
|
580
|
+
severity = "high"
|
|
581
|
+
if severity not in ["low", "moderate", "high", "critical"]:
|
|
582
|
+
# An odd severity should be treated as low.
|
|
583
|
+
severity = "low"
|
|
584
|
+
try:
|
|
585
|
+
days = self.attributes.app.config["issues"][scanner][severity]
|
|
586
|
+
except KeyError:
|
|
587
|
+
self.attributes.logger.error(
|
|
588
|
+
"Unable to find severity '%s'\n defaulting to %i days\nPlease add %s to the init.yaml configuration",
|
|
589
|
+
severity,
|
|
590
|
+
days,
|
|
591
|
+
severity,
|
|
592
|
+
)
|
|
593
|
+
if kev_due_date and (datetime.strptime(kev_due_date, fmt) > datetime.now()):
|
|
594
|
+
iss.dueDate = kev_due_date
|
|
595
|
+
else:
|
|
596
|
+
iss.dueDate = datetime.strftime(
|
|
597
|
+
datetime.now() + timedelta(days=days),
|
|
598
|
+
fmt,
|
|
599
|
+
)
|
|
600
|
+
return iss
|
|
601
|
+
|
|
602
|
+
def _check_issue(self, issue: Issue) -> None:
|
|
603
|
+
"""
|
|
604
|
+
Check if the issue is in the data
|
|
605
|
+
|
|
606
|
+
:param Issue issue: The issue to check to prevent duplicates
|
|
607
|
+
:rtype: None
|
|
608
|
+
"""
|
|
609
|
+
if issue and issue not in self.data["issues"]:
|
|
610
|
+
self.data["issues"].append(issue)
|
|
611
|
+
|
|
612
|
+
def _check_issues(self, issues: List[Issue]) -> None:
|
|
613
|
+
"""
|
|
614
|
+
Check if the issues are in the data
|
|
615
|
+
|
|
616
|
+
:param List[Issue] issues: The issues to check to prevent duplicates
|
|
617
|
+
"""
|
|
618
|
+
for issue in issues:
|
|
619
|
+
self._check_issue(issue)
|
|
620
|
+
|
|
621
|
+
def check_and_close_issues(self, existing_issues: list[Issue]) -> None:
|
|
622
|
+
"""
|
|
623
|
+
Function to close issues that are no longer being reported in the export file
|
|
624
|
+
|
|
625
|
+
:param list[Issue] existing_issues: List of existing issues in RegScale
|
|
626
|
+
:rtype: None
|
|
627
|
+
"""
|
|
628
|
+
existing_cves = self.group_issues_by_cve_id(existing_issues)
|
|
629
|
+
parsed_cves = {issue.cve for issue in self.data["issues"] if issue.cve}
|
|
630
|
+
closed_issues = []
|
|
631
|
+
with create_progress_object() as close_issue_progress:
|
|
632
|
+
closing_issues = close_issue_progress.add_task(
|
|
633
|
+
"Comparing parsed issue(s) and existing issue(s)...",
|
|
634
|
+
total=len(existing_cves),
|
|
635
|
+
)
|
|
636
|
+
for cve, issues in existing_cves.items():
|
|
637
|
+
if cve not in parsed_cves:
|
|
638
|
+
for issue in issues:
|
|
639
|
+
if issue.status == "Closed":
|
|
640
|
+
continue
|
|
641
|
+
self.attributes.logger.debug("Closing issue #%s", issue.id)
|
|
642
|
+
issue.status = "Closed"
|
|
643
|
+
issue.dateCompleted = self.scan_date.strftime("%Y-%m-%d %H:%M:%S")
|
|
644
|
+
if issue.save():
|
|
645
|
+
self.attributes.logger.debug("Issue #%s closed", issue.id)
|
|
646
|
+
closed_issues.append(issue)
|
|
647
|
+
close_issue_progress.advance(closing_issues, advance=1)
|
|
648
|
+
self.log_and_save_closed_issues(closed_issues)
|
|
649
|
+
|
|
650
|
+
@staticmethod
|
|
651
|
+
def group_issues_by_cve_id(existing_issues: list[Issue]) -> dict[str, list[Issue]]:
|
|
652
|
+
"""
|
|
653
|
+
Function to group existing issues from RegScale by cve and returns a dictionary of cveId and issues
|
|
654
|
+
|
|
655
|
+
:param list[Issue] existing_issues: List of existing issues in RegScale
|
|
656
|
+
:returns: A dictionary of cveId and list of issues with the same cveId
|
|
657
|
+
:rtype: dict[str, list[Issue]]
|
|
658
|
+
"""
|
|
659
|
+
from collections import defaultdict
|
|
660
|
+
|
|
661
|
+
# create a dict with an empty list for each cve, so we can close issues that have duplicate CVEs
|
|
662
|
+
existing_cves = defaultdict(list)
|
|
663
|
+
# group issues by cve
|
|
664
|
+
for issue in existing_issues:
|
|
665
|
+
if issue.cve:
|
|
666
|
+
existing_cves[issue.cve].append(issue)
|
|
667
|
+
return existing_cves
|
|
668
|
+
|
|
669
|
+
def log_and_save_closed_issues(self, closed_issues: list[Issue]) -> None:
|
|
670
|
+
"""
|
|
671
|
+
Log and save the closed issues if any
|
|
672
|
+
|
|
673
|
+
:param list[Issue] closed_issues: List of closed issues to log and save
|
|
674
|
+
:rtype: None
|
|
675
|
+
"""
|
|
676
|
+
if len(closed_issues) > 0:
|
|
677
|
+
self.attributes.logger.info("Closed %i issue(s) in RegScale.", len(closed_issues))
|
|
678
|
+
ReportGenerator(
|
|
679
|
+
objects=closed_issues,
|
|
680
|
+
to_file=True,
|
|
681
|
+
report_name=f"{self.attributes.name}_closed_issues",
|
|
682
|
+
regscale_id=self.attributes.parent_id,
|
|
683
|
+
regscale_module=self.attributes.parent_module,
|
|
684
|
+
)
|
|
685
|
+
|
|
686
|
+
def _check_vuln(self, vuln_to_check: Union[Vulnerability, "IntegrationFinding"]) -> None:
|
|
687
|
+
"""
|
|
688
|
+
Check if the vuln is in the data
|
|
689
|
+
|
|
690
|
+
:param Union[Vulnerability, IntegrationFinding] vuln_to_check: The vulnerability to check to prevent duplicates
|
|
691
|
+
:rtype: None
|
|
692
|
+
"""
|
|
693
|
+
from regscale.integrations.scanner_integration import IntegrationFinding
|
|
694
|
+
|
|
695
|
+
if isinstance(vuln_to_check, IntegrationFinding):
|
|
696
|
+
if vuln_to_check not in self.data["vulns"]:
|
|
697
|
+
self.data["vulns"].append(vuln_to_check)
|
|
698
|
+
elif (vuln_to_check and vuln_to_check not in self.data["vulns"]) and hasattr(vuln_to_check, "id"):
|
|
699
|
+
self.data["vulns"].append(vuln_to_check)
|
|
700
|
+
|
|
701
|
+
def create_vulns(self, func: Callable) -> None:
|
|
702
|
+
"""
|
|
703
|
+
Create vulns in RegScale from csv file
|
|
704
|
+
|
|
705
|
+
:param Callable func: Function to create vuln
|
|
706
|
+
:rtype: None
|
|
707
|
+
"""
|
|
708
|
+
from regscale.integrations.scanner_integration import IntegrationFinding
|
|
709
|
+
|
|
710
|
+
with create_progress_object() as vuln_progress:
|
|
711
|
+
vuln_task = vuln_progress.add_task("Processing vulnerabilities...", total=len(self.file_data))
|
|
712
|
+
for ix, dat in enumerate(self.file_data):
|
|
713
|
+
vuln = func(dat, index=ix)
|
|
714
|
+
if not vuln:
|
|
715
|
+
vuln_progress.advance(vuln_task, advance=1)
|
|
716
|
+
continue
|
|
717
|
+
if isinstance(vuln, Vulnerability) or isinstance(vuln, IntegrationFinding):
|
|
718
|
+
self._check_vuln(vuln)
|
|
719
|
+
if isinstance(vuln, list):
|
|
720
|
+
for v in vuln:
|
|
721
|
+
self._check_vuln(v)
|
|
722
|
+
if isinstance(vuln, Iterator):
|
|
723
|
+
self.integration_findings = vuln
|
|
724
|
+
self.data["vulns"] = vuln
|
|
725
|
+
vuln_progress.update(vuln_task, completed=len(self.file_data))
|
|
726
|
+
return None
|
|
727
|
+
vuln_progress.advance(vuln_task, advance=1)
|
|
728
|
+
self.integration_findings = (self.parse_finding(vuln) for vuln in self.data["vulns"])
|
|
729
|
+
|
|
730
|
+
def clean_up(self, file_path=None) -> None:
|
|
731
|
+
"""
|
|
732
|
+
Move the Nexpose file to the processed folder
|
|
733
|
+
|
|
734
|
+
:rtype: None
|
|
735
|
+
"""
|
|
736
|
+
if not file_path:
|
|
737
|
+
file_path = self.attributes.file_path
|
|
738
|
+
file_path = Path(file_path)
|
|
739
|
+
processed_dir = file_path.parent / "processed"
|
|
740
|
+
file_name = (f"{file_path.stem}_" + f"{get_current_datetime('%Y%m%d-%I%M%S%p')}").replace(" ", "_")
|
|
741
|
+
new_name = (file_path.parent / file_name).with_suffix(file_path.suffix)
|
|
742
|
+
new_file_path = file_path.rename(new_name)
|
|
743
|
+
if self.attributes.parent_id:
|
|
744
|
+
check_file_path(str(processed_dir.absolute()))
|
|
745
|
+
try:
|
|
746
|
+
self.attributes.logger.info(
|
|
747
|
+
"Renaming %s to %s, ...",
|
|
748
|
+
file_path.name,
|
|
749
|
+
new_file_path.name,
|
|
750
|
+
)
|
|
751
|
+
shutil.move(new_file_path, processed_dir)
|
|
752
|
+
self.attributes.logger.info("File moved to %s", processed_dir)
|
|
753
|
+
except shutil.Error:
|
|
754
|
+
self.attributes.logger.debug(
|
|
755
|
+
"File %s already exists in %s",
|
|
756
|
+
new_file_path.name,
|
|
757
|
+
processed_dir,
|
|
758
|
+
)
|
|
759
|
+
if self.attributes.upload_file and self.attributes.parent_id and self.attributes.parent_module:
|
|
760
|
+
api = Api()
|
|
761
|
+
self.attributes.logger.info(
|
|
762
|
+
"Uploading %s to RegScale %s #%i...",
|
|
763
|
+
new_file_path,
|
|
764
|
+
self.attributes.parent_module,
|
|
765
|
+
self.attributes.parent_id,
|
|
766
|
+
)
|
|
767
|
+
if File.upload_file_to_regscale(
|
|
768
|
+
file_name=str(processed_dir / new_file_path.name),
|
|
769
|
+
parent_id=self.attributes.parent_id,
|
|
770
|
+
parent_module=self.attributes.parent_module,
|
|
771
|
+
api=api,
|
|
772
|
+
):
|
|
773
|
+
self.attributes.logger.info("File uploaded to RegScale succesfully.")
|
|
774
|
+
else:
|
|
775
|
+
self.attributes.logger.error("File upload to RegScale failed.")
|
|
776
|
+
|
|
777
|
+
@abstractmethod
|
|
778
|
+
def create_asset(self):
|
|
779
|
+
"""Create an asset"""
|
|
780
|
+
|
|
781
|
+
@abstractmethod
|
|
782
|
+
def create_vuln(self):
|
|
783
|
+
"""Create a Vulnerability"""
|
|
784
|
+
|
|
785
|
+
@staticmethod
|
|
786
|
+
def import_files(
|
|
787
|
+
import_type: Callable,
|
|
788
|
+
import_name: str,
|
|
789
|
+
file_types: Union[str, list[str]],
|
|
790
|
+
folder_path: PathLike[str],
|
|
791
|
+
regscale_ssp_id: int,
|
|
792
|
+
scan_date: datetime,
|
|
793
|
+
mappings_path: Union[PathLike[str], Path],
|
|
794
|
+
disable_mapping: bool,
|
|
795
|
+
s3_bucket: str,
|
|
796
|
+
s3_prefix: str,
|
|
797
|
+
aws_profile: str,
|
|
798
|
+
upload_file: Optional[bool] = True,
|
|
799
|
+
**kwargs,
|
|
800
|
+
) -> None:
|
|
801
|
+
"""
|
|
802
|
+
Import files from the given file path
|
|
803
|
+
|
|
804
|
+
:param Callable import_type: Function to import files
|
|
805
|
+
:param str import_name: The name of the import type
|
|
806
|
+
:param Union[str, list[str]] file_types: The file types to glob and import, e.g. ".csv" or [".csv", ".xlsx"]
|
|
807
|
+
:param PathLike[str] folder_path: The folder path to import from
|
|
808
|
+
:param int regscale_ssp_id: The RegScale SSP ID
|
|
809
|
+
:param datetime scan_date: The date of the scan
|
|
810
|
+
:param Union[PathLike[str], Path] mappings_path: The path to the mappings file
|
|
811
|
+
:param bool disable_mapping: Whether to disable custom mappings
|
|
812
|
+
:param str s3_bucket: The S3 bucket to download the files from
|
|
813
|
+
:param str s3_prefix: The S3 prefix to download the files from
|
|
814
|
+
:param str aws_profile: The AWS profile to use for S3 access
|
|
815
|
+
:param Optional[bool] upload_file: Whether to upload the file to RegScale after processing, defaults to True
|
|
816
|
+
"""
|
|
817
|
+
from regscale.core.app.utils.file_utils import download_from_s3
|
|
818
|
+
from regscale.validation.record import validate_regscale_object
|
|
819
|
+
from regscale.core.app.application import Application
|
|
820
|
+
from regscale.exceptions import ValidationException
|
|
821
|
+
|
|
822
|
+
if s3_bucket:
|
|
823
|
+
download_from_s3(s3_bucket, s3_prefix, folder_path, aws_profile)
|
|
824
|
+
app = Application()
|
|
825
|
+
if not validate_regscale_object(regscale_ssp_id, "securityplans"):
|
|
826
|
+
app.logger.warning("SSP #%i is not a valid RegScale Security Plan.", regscale_ssp_id)
|
|
827
|
+
return
|
|
828
|
+
if not scan_date or not FlatFileImporter.check_date_format(scan_date):
|
|
829
|
+
scan_date = datetime.now()
|
|
830
|
+
if isinstance(file_types, str):
|
|
831
|
+
file_types = [file_types]
|
|
832
|
+
files = []
|
|
833
|
+
for file_type in file_types:
|
|
834
|
+
files.extend(
|
|
835
|
+
list(Path(folder_path).glob(f"*{file_type if file_type.startswith('.') else '.' + file_type}"))
|
|
836
|
+
)
|
|
837
|
+
if len(files) == 0:
|
|
838
|
+
app.logger.warning(f"No {import_name} ({'/'.join(file_types)}) files found in the specified folder.")
|
|
839
|
+
return
|
|
840
|
+
for file in files:
|
|
841
|
+
try:
|
|
842
|
+
import_type(
|
|
843
|
+
name=import_name,
|
|
844
|
+
file_path=str(file),
|
|
845
|
+
regscale_ssp_id=regscale_ssp_id,
|
|
846
|
+
scan_date=scan_date,
|
|
847
|
+
mappings_path=mappings_path,
|
|
848
|
+
disable_mapping=disable_mapping,
|
|
849
|
+
upload_file=upload_file,
|
|
850
|
+
file_type=file.suffix,
|
|
851
|
+
**kwargs,
|
|
852
|
+
)
|
|
853
|
+
except ValidationException as e:
|
|
854
|
+
app.logger.error(f"Validation error: {e}")
|
|
855
|
+
continue
|
|
856
|
+
|
|
857
|
+
@classmethod
|
|
858
|
+
def common_scanner_options(cls, message: str, prompt: str, import_name: str) -> Callable[[Callable], click.option]:
|
|
859
|
+
"""
|
|
860
|
+
Common options for container scanner integrations
|
|
861
|
+
|
|
862
|
+
:param str message: The message to display to the user
|
|
863
|
+
:param str prompt: The prompt to display to the user
|
|
864
|
+
:param str import_name: The name of the import function
|
|
865
|
+
:return: The decorated function
|
|
866
|
+
:rtype: Callable[[Callable], click.option]
|
|
867
|
+
"""
|
|
868
|
+
import os
|
|
869
|
+
from regscale.models.app_models.click import NotRequiredIf
|
|
870
|
+
|
|
871
|
+
mapping_dir = os.path.join("./", "mappings", import_name)
|
|
872
|
+
|
|
873
|
+
def decorator(this_func) -> Callable[[Callable], click.option]:
|
|
874
|
+
"""
|
|
875
|
+
Decorator for common options
|
|
876
|
+
"""
|
|
877
|
+
|
|
878
|
+
this_func = click.option(
|
|
879
|
+
"--s3-bucket",
|
|
880
|
+
help="S3 bucket to download scan files from",
|
|
881
|
+
type=str,
|
|
882
|
+
cls=NotRequiredIf,
|
|
883
|
+
not_required_if=["folder_path"],
|
|
884
|
+
)(this_func)
|
|
885
|
+
this_func = click.option(
|
|
886
|
+
"--s3-prefix",
|
|
887
|
+
help="Prefix (folder path) within the S3 bucket",
|
|
888
|
+
type=str,
|
|
889
|
+
default="",
|
|
890
|
+
cls=NotRequiredIf,
|
|
891
|
+
not_required_if=["folder_path"],
|
|
892
|
+
)(this_func)
|
|
893
|
+
this_func = click.option(
|
|
894
|
+
"--aws-profile",
|
|
895
|
+
help="AWS profile to use for S3 access",
|
|
896
|
+
type=str,
|
|
897
|
+
default="regscale",
|
|
898
|
+
cls=NotRequiredIf,
|
|
899
|
+
not_required_if=["folder_path"],
|
|
900
|
+
)(this_func)
|
|
901
|
+
this_func = click.option(
|
|
902
|
+
"--folder_path",
|
|
903
|
+
"-f",
|
|
904
|
+
help=message,
|
|
905
|
+
prompt=prompt,
|
|
906
|
+
type=click.Path(exists=True, dir_okay=True, resolve_path=True),
|
|
907
|
+
cls=NotRequiredIf,
|
|
908
|
+
not_required_if=["s3_bucket", "s3_prefix"],
|
|
909
|
+
)(this_func)
|
|
910
|
+
this_func = click.option(
|
|
911
|
+
"--regscale_ssp_id",
|
|
912
|
+
"-id",
|
|
913
|
+
type=click.INT,
|
|
914
|
+
help="The ID number from RegScale of the System Security Plan.",
|
|
915
|
+
prompt="Enter RegScale System Security Plan ID",
|
|
916
|
+
required=True,
|
|
917
|
+
)(this_func)
|
|
918
|
+
this_func = click.option(
|
|
919
|
+
"--scan_date",
|
|
920
|
+
"-sd",
|
|
921
|
+
type=click.DateTime(formats=[DT_FORMAT]),
|
|
922
|
+
help="The scan date of the file.",
|
|
923
|
+
required=False,
|
|
924
|
+
)(this_func)
|
|
925
|
+
this_func = click.option(
|
|
926
|
+
"--mappings_path",
|
|
927
|
+
"-m",
|
|
928
|
+
type=click.Path(dir_okay=True, resolve_path=True),
|
|
929
|
+
help=f"The CLI will use the custom header from the provided mappings directory or file, example is {mapping_dir}",
|
|
930
|
+
default=mapping_dir,
|
|
931
|
+
required=False,
|
|
932
|
+
)(this_func)
|
|
933
|
+
this_func = click.option(
|
|
934
|
+
"--disable_mapping",
|
|
935
|
+
"-dm",
|
|
936
|
+
help="Whether to disable the default mapping",
|
|
937
|
+
is_flag=True,
|
|
938
|
+
)(this_func)
|
|
939
|
+
this_func = click.option(
|
|
940
|
+
"--upload_file",
|
|
941
|
+
"--upload",
|
|
942
|
+
help="Whether to upload the file to RegScale after processing. Default is True.",
|
|
943
|
+
default=True,
|
|
944
|
+
required=False,
|
|
945
|
+
)(this_func)
|
|
946
|
+
return this_func
|
|
947
|
+
|
|
948
|
+
return decorator
|
|
949
|
+
|
|
950
|
+
@classmethod
|
|
951
|
+
def show_mapping(cls, group: click.Group, import_name: str, file_type: Optional[str] = None) -> click.Command:
|
|
952
|
+
"""
|
|
953
|
+
Show the mapping for the given import_name
|
|
954
|
+
|
|
955
|
+
:param click.Group group: The click group to register the command with
|
|
956
|
+
:param str import_name: The name of the import function
|
|
957
|
+
:param Optional[str] file_type: The file type of the import, defaults to None
|
|
958
|
+
:return: The decorated function.
|
|
959
|
+
:rtype: Callable[[Callable], click.option]
|
|
960
|
+
"""
|
|
961
|
+
import os
|
|
962
|
+
|
|
963
|
+
# Define default path based on import_name and file_type
|
|
964
|
+
default = os.path.join("./", "mappings", import_name, f"{file_type}_mapping.json") if file_type else None
|
|
965
|
+
|
|
966
|
+
@click.command(help=f"Show the mapping file used during {import_name} imports.")
|
|
967
|
+
@click.option(
|
|
968
|
+
"--file_path",
|
|
969
|
+
"-f",
|
|
970
|
+
help="File path to the mapping file to display",
|
|
971
|
+
type=click.Path(exists=True, dir_okay=False, file_okay=True, resolve_path=True),
|
|
972
|
+
required=True,
|
|
973
|
+
default=default if default else None,
|
|
974
|
+
)
|
|
975
|
+
# Define the desired function behavior
|
|
976
|
+
def wrapped_func(file_path: str) -> None:
|
|
977
|
+
"""
|
|
978
|
+
Show the mapping file used during imports
|
|
979
|
+
"""
|
|
980
|
+
from rich.console import Console
|
|
981
|
+
|
|
982
|
+
console = Console()
|
|
983
|
+
with open(file_path, "r", encoding="utf-8") as file:
|
|
984
|
+
mapping = json.load(file)
|
|
985
|
+
dat = json.dumps(mapping, indent=4)
|
|
986
|
+
console.print(f"{file_path} mapping:")
|
|
987
|
+
console.print(dat)
|
|
988
|
+
|
|
989
|
+
# Register the decorated function with the given click group
|
|
990
|
+
group.add_command(wrapped_func, name="show_mapping")
|
|
991
|
+
|
|
992
|
+
@staticmethod
|
|
993
|
+
def check_date_format(the_date: Any) -> bool:
|
|
994
|
+
"""
|
|
995
|
+
Check if the date is in the correct format
|
|
996
|
+
|
|
997
|
+
:param Any the_date: The date to check
|
|
998
|
+
:return: True if the date is in the correct format
|
|
999
|
+
:rtype: bool
|
|
1000
|
+
|
|
1001
|
+
"""
|
|
1002
|
+
try:
|
|
1003
|
+
if isinstance(the_date, str):
|
|
1004
|
+
the_date = datetime.strptime(the_date, DT_FORMAT)
|
|
1005
|
+
# make sure the date is not in the future
|
|
1006
|
+
if the_date >= datetime.now():
|
|
1007
|
+
error_and_exit("The scan date cannot be in the future.")
|
|
1008
|
+
res = True
|
|
1009
|
+
except ValueError:
|
|
1010
|
+
error_and_exit("Incorrect data format, should be YYYY-MM-DD")
|
|
1011
|
+
return res
|
|
1012
|
+
|
|
1013
|
+
@staticmethod
|
|
1014
|
+
def convert_xml_to_dict(file: TextIO) -> dict:
|
|
1015
|
+
"""
|
|
1016
|
+
Convert an XML file to a Python dictionary.
|
|
1017
|
+
|
|
1018
|
+
:param TextIO file: The file object representing the XML file.
|
|
1019
|
+
:return: A dictionary representation of the XML content.
|
|
1020
|
+
:rtype: dict
|
|
1021
|
+
"""
|
|
1022
|
+
|
|
1023
|
+
xml_content = file.read()
|
|
1024
|
+
dict_content = xmltodict.parse(xml_content)
|
|
1025
|
+
return dict_content
|
|
1026
|
+
|
|
1027
|
+
@staticmethod
|
|
1028
|
+
def determine_severity(s: str) -> str:
|
|
1029
|
+
"""
|
|
1030
|
+
Determine the CVSS severity of the vulnerability
|
|
1031
|
+
|
|
1032
|
+
:param str s: The severity
|
|
1033
|
+
:return: The severity
|
|
1034
|
+
:rtype: str
|
|
1035
|
+
"""
|
|
1036
|
+
severity = "info"
|
|
1037
|
+
if s:
|
|
1038
|
+
severity = s.lower()
|
|
1039
|
+
return severity
|
|
1040
|
+
|
|
1041
|
+
@staticmethod
|
|
1042
|
+
def map_status_to_issue_status(status: str) -> IssueStatus:
|
|
1043
|
+
"""
|
|
1044
|
+
Maps the vuln status to issue status
|
|
1045
|
+
:param str status: Status of the vulnerability
|
|
1046
|
+
:returns: Issue status
|
|
1047
|
+
:rtype: IssueStatus
|
|
1048
|
+
"""
|
|
1049
|
+
issue_status = IssueStatus.Open
|
|
1050
|
+
if status.lower() in ["resolved", "rejected", "closed", "completed"]:
|
|
1051
|
+
issue_status = IssueStatus.Closed
|
|
1052
|
+
return issue_status
|
|
1053
|
+
|
|
1054
|
+
@staticmethod
|
|
1055
|
+
def extract_ghsa_strings(text: str) -> Union[List[str], str]:
|
|
1056
|
+
"""
|
|
1057
|
+
Extract GHSA strings from a given text.
|
|
1058
|
+
|
|
1059
|
+
:param str text: The input text containing GHSA strings
|
|
1060
|
+
:return: A list of GHSA strings or the input text if no GHSA strings are found
|
|
1061
|
+
:rtype: Union[List[str], str]
|
|
1062
|
+
"""
|
|
1063
|
+
ghsa_pattern = r"GHSA-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}"
|
|
1064
|
+
res = re.findall(ghsa_pattern, text)
|
|
1065
|
+
if res:
|
|
1066
|
+
return res
|
|
1067
|
+
return text
|
|
1068
|
+
|
|
1069
|
+
@staticmethod
|
|
1070
|
+
def assert_valid_cve(cve: str) -> bool:
|
|
1071
|
+
"""
|
|
1072
|
+
Assert that the CVE identifier is valid
|
|
1073
|
+
|
|
1074
|
+
:param str cve: The CVE identifier
|
|
1075
|
+
:return: True if the CVE identifier is valid
|
|
1076
|
+
:rtype: bool
|
|
1077
|
+
"""
|
|
1078
|
+
pattern = r"^CVE-\d{4}-\d{4,}$"
|
|
1079
|
+
return bool(re.match(pattern, cve))
|