gooddata-pipelines 1.50.1.dev1__tar.gz → 1.51.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of gooddata-pipelines might be problematic. Click here for more details.
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/PKG-INFO +11 -3
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/README.md +9 -1
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/__init__.py +18 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/api/gooddata_api.py +55 -0
- gooddata_pipelines-1.51.0/gooddata_pipelines/ldm_extension/input_processor.py +286 -0
- gooddata_pipelines-1.51.0/gooddata_pipelines/ldm_extension/input_validator.py +185 -0
- gooddata_pipelines-1.51.0/gooddata_pipelines/ldm_extension/ldm_extension_manager.py +283 -0
- gooddata_pipelines-1.51.0/gooddata_pipelines/ldm_extension/models/aliases.py +9 -0
- gooddata_pipelines-1.51.0/gooddata_pipelines/ldm_extension/models/analytical_object.py +33 -0
- gooddata_pipelines-1.51.0/gooddata_pipelines/ldm_extension/models/custom_data_object.py +90 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/provisioning/entities/users/models/users.py +10 -1
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/provisioning/entities/users/users.py +38 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/provisioning/provisioning.py +2 -3
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/pyproject.toml +2 -2
- gooddata_pipelines-1.51.0/tests/data/custom_fields/response_get_all_dashboards.json +72 -0
- gooddata_pipelines-1.51.0/tests/data/custom_fields/response_get_all_metrics.json +78 -0
- gooddata_pipelines-1.51.0/tests/data/custom_fields/response_get_all_visualizations.json +143 -0
- gooddata_pipelines-1.51.0/tests/data/profiles.yaml +7 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/provisioning/entities/users/existing_upstream_users.json +8 -0
- gooddata_pipelines-1.51.0/tests/data/provisioning/entities/users/profile_response_content.json +54 -0
- gooddata_pipelines-1.51.0/tests/data/provisioning/entities/users/users_input_full_load_modifies_protected_user.json +34 -0
- gooddata_pipelines-1.51.0/tests/data/provisioning/entities/users/users_input_incremental_load_deletes_protected_user.json +47 -0
- gooddata_pipelines-1.51.0/tests/data/provisioning/entities/users/users_input_incremental_load_modifies_protected_user.json +47 -0
- gooddata_pipelines-1.51.0/tests/provisioning/entities/users/__init__.py +1 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/provisioning/entities/users/test_users.py +36 -0
- gooddata_pipelines-1.51.0/tests/provisioning/entities/workspaces/__init__.py +1 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/provisioning/test_provisioning.py +17 -0
- gooddata_pipelines-1.51.0/tests/test_ldm_extension/__init__.py +1 -0
- gooddata_pipelines-1.51.0/tests/test_ldm_extension/test_input_processor.py +174 -0
- gooddata_pipelines-1.51.0/tests/test_ldm_extension/test_input_validator.py +165 -0
- gooddata_pipelines-1.51.0/tests/test_ldm_extension/test_ldm_extension_manager.py +194 -0
- gooddata_pipelines-1.51.0/tests/test_ldm_extension/test_models/__init__.py +1 -0
- gooddata_pipelines-1.51.0/tests/test_ldm_extension/test_models/test_analytical_object.py +66 -0
- gooddata_pipelines-1.51.0/tests/test_ldm_extension/test_models/test_custom_data_object.py +102 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/.gitignore +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/LICENSE.txt +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/Makefile +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/TODO.md +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/_version.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/api/__init__.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/api/exceptions.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/api/gooddata_api_wrapper.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/api/gooddata_sdk.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/api/utils.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/backup_and_restore/__init__.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/backup_and_restore/backup_input_processor.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/backup_and_restore/backup_manager.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/backup_and_restore/constants.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/backup_and_restore/csv_reader.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/backup_and_restore/models/__init__.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/backup_and_restore/models/input_type.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/backup_and_restore/models/storage.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/backup_and_restore/models/workspace_response.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/backup_and_restore/storage/__init__.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/backup_and_restore/storage/base_storage.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/backup_and_restore/storage/local_storage.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/backup_and_restore/storage/s3_storage.py +0 -0
- {gooddata_pipelines-1.50.1.dev1/gooddata_pipelines/provisioning/entities → gooddata_pipelines-1.51.0/gooddata_pipelines/ldm_extension}/__init__.py +0 -0
- {gooddata_pipelines-1.50.1.dev1/gooddata_pipelines/provisioning/entities/user_data_filters → gooddata_pipelines-1.51.0/gooddata_pipelines/ldm_extension/models}/__init__.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/logger/__init__.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/logger/logger.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/provisioning/__init__.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/provisioning/assets/wdf_setting.json +0 -0
- {gooddata_pipelines-1.50.1.dev1/gooddata_pipelines/provisioning/entities/user_data_filters/models → gooddata_pipelines-1.51.0/gooddata_pipelines/provisioning/entities}/__init__.py +0 -0
- {gooddata_pipelines-1.50.1.dev1/gooddata_pipelines/provisioning/entities/users → gooddata_pipelines-1.51.0/gooddata_pipelines/provisioning/entities/user_data_filters}/__init__.py +0 -0
- {gooddata_pipelines-1.50.1.dev1/gooddata_pipelines/provisioning/entities/users → gooddata_pipelines-1.51.0/gooddata_pipelines/provisioning/entities/user_data_filters}/models/__init__.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/provisioning/entities/user_data_filters/models/udf_models.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/provisioning/entities/user_data_filters/user_data_filters.py +0 -0
- {gooddata_pipelines-1.50.1.dev1/gooddata_pipelines/provisioning/entities/workspaces → gooddata_pipelines-1.51.0/gooddata_pipelines/provisioning/entities/users}/__init__.py +0 -0
- {gooddata_pipelines-1.50.1.dev1/gooddata_pipelines/provisioning/utils → gooddata_pipelines-1.51.0/gooddata_pipelines/provisioning/entities/users/models}/__init__.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/provisioning/entities/users/models/permissions.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/provisioning/entities/users/models/user_groups.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/provisioning/entities/users/permissions.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/provisioning/entities/users/user_groups.py +0 -0
- {gooddata_pipelines-1.50.1.dev1/tests → gooddata_pipelines-1.51.0/gooddata_pipelines/provisioning/entities/workspaces}/__init__.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/provisioning/entities/workspaces/models.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/provisioning/entities/workspaces/workspace.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/provisioning/entities/workspaces/workspace_data_filters.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/provisioning/entities/workspaces/workspace_data_parser.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/provisioning/entities/workspaces/workspace_data_validator.py +0 -0
- {gooddata_pipelines-1.50.1.dev1/tests/backup_and_restore → gooddata_pipelines-1.51.0/gooddata_pipelines/provisioning/utils}/__init__.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/provisioning/utils/context_objects.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/provisioning/utils/exceptions.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/provisioning/utils/utils.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/py.typed +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/utils/__init__.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/utils/rate_limiter.py +0 -0
- {gooddata_pipelines-1.50.1.dev1/tests/data → gooddata_pipelines-1.51.0/tests}/__init__.py +0 -0
- {gooddata_pipelines-1.50.1.dev1/tests/data/backup → gooddata_pipelines-1.51.0/tests/backup_and_restore}/__init__.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/backup_and_restore/test_backup.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/backup_and_restore/test_backup_input_processor.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/conftest.py +0 -0
- {gooddata_pipelines-1.50.1.dev1/tests/panther → gooddata_pipelines-1.51.0/tests/data}/__init__.py +0 -0
- {gooddata_pipelines-1.50.1.dev1/tests/provisioning → gooddata_pipelines-1.51.0/tests/data/backup}/__init__.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/backup/test_conf.yaml +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/backup/test_exports/services/wsid1/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid1/analytics_model/analytical_dashboard_extensions/.gitkeep +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/backup/test_exports/services/wsid1/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid1/analytics_model/analytical_dashboards/.gitkeep +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/backup/test_exports/services/wsid1/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid1/analytics_model/dashboard_plugins/.gitkeep +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/backup/test_exports/services/wsid1/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid1/analytics_model/filter_contexts/.gitkeep +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/backup/test_exports/services/wsid1/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid1/analytics_model/metrics/.gitkeep +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/backup/test_exports/services/wsid1/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid1/analytics_model/visualization_objects/.gitkeep +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/backup/test_exports/services/wsid1/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid1/ldm/datasets/test.yaml +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/backup/test_exports/services/wsid1/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid1/ldm/date_instances/testinstance.yaml +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/backup/test_exports/services/wsid2/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid2/analytics_model/analytical_dashboard_extensions/.gitkeep +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/backup/test_exports/services/wsid2/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid2/analytics_model/analytical_dashboards/id.yaml +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/backup/test_exports/services/wsid2/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid2/analytics_model/dashboard_plugins/.gitkeep +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/backup/test_exports/services/wsid2/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid2/analytics_model/filter_contexts/id.yaml +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/backup/test_exports/services/wsid2/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid2/analytics_model/metrics/.gitkeep +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/backup/test_exports/services/wsid2/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid2/analytics_model/visualization_objects/test.yaml +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/backup/test_exports/services/wsid2/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid2/ldm/datasets/.gitkeep +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/backup/test_exports/services/wsid2/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid2/ldm/date_instances/.gitkeep +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/backup/test_exports/services/wsid3/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid3/analytics_model/analytical_dashboard_extensions/.gitkeep +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/backup/test_exports/services/wsid3/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid3/analytics_model/analytical_dashboards/.gitkeep +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/backup/test_exports/services/wsid3/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid3/analytics_model/dashboard_plugins/.gitkeep +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/backup/test_exports/services/wsid3/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid3/analytics_model/filter_contexts/.gitkeep +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/backup/test_exports/services/wsid3/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid3/analytics_model/metrics/.gitkeep +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/backup/test_exports/services/wsid3/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid3/analytics_model/visualization_objects/.gitkeep +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/backup/test_exports/services/wsid3/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid3/ldm/datasets/.gitkeep +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/backup/test_exports/services/wsid3/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid3/ldm/date_instances/.gitkeep +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/backup/test_exports/services/wsid3/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid3/user_data_filters/.gitkeep +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/backup/test_local_conf.yaml +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/mock_responses.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/provisioning/entities/permissions/existing_upstream_permissions.json +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/provisioning/entities/permissions/permissions_expected_full_load.json +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/provisioning/entities/permissions/permissions_expected_incremental_load.json +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/provisioning/entities/permissions/permissions_input_full_load.json +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/provisioning/entities/permissions/permissions_input_incremental_load.json +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/provisioning/entities/users/users_expected_full_load.json +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/provisioning/entities/users/users_expected_incremental_load.json +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/provisioning/entities/users/users_input_full_load.json +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/data/provisioning/entities/users/users_input_incremental_load.json +0 -0
- {gooddata_pipelines-1.50.1.dev1/tests/provisioning/entities → gooddata_pipelines-1.51.0/tests/panther}/__init__.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/panther/test_api_wrapper.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/panther/test_sdk_wrapper.py +0 -0
- {gooddata_pipelines-1.50.1.dev1/tests/provisioning/entities/users → gooddata_pipelines-1.51.0/tests/provisioning}/__init__.py +0 -0
- {gooddata_pipelines-1.50.1.dev1/tests/provisioning/entities/workspaces → gooddata_pipelines-1.51.0/tests/provisioning/entities}/__init__.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/provisioning/entities/users/test_permissions.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/provisioning/entities/users/test_user_groups.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/provisioning/entities/workspaces/test_provisioning.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/provisioning/entities/workspaces/test_workspace.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/provisioning/entities/workspaces/test_workspace_data_filters.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/provisioning/entities/workspaces/test_workspace_data_parser.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/provisioning/entities/workspaces/test_workspace_data_validator.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tests/utils/test_rate_limiter.py +0 -0
- {gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/tox.ini +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: gooddata-pipelines
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.51.0
|
|
4
4
|
Summary: GoodData Cloud lifecycle automation pipelines
|
|
5
5
|
Author-email: GoodData <support@gooddata.com>
|
|
6
6
|
License: MIT
|
|
@@ -8,7 +8,7 @@ License-File: LICENSE.txt
|
|
|
8
8
|
Requires-Python: >=3.10
|
|
9
9
|
Requires-Dist: boto3-stubs<2.0.0,>=1.39.3
|
|
10
10
|
Requires-Dist: boto3<2.0.0,>=1.39.3
|
|
11
|
-
Requires-Dist: gooddata-sdk~=1.
|
|
11
|
+
Requires-Dist: gooddata-sdk~=1.51.0
|
|
12
12
|
Requires-Dist: pydantic<3.0.0,>=2.11.3
|
|
13
13
|
Requires-Dist: requests<3.0.0,>=2.32.3
|
|
14
14
|
Requires-Dist: types-pyyaml<7.0.0,>=6.0.12.20250326
|
|
@@ -74,4 +74,12 @@ full_load_data: list[UserFullLoad] = UserFullLoad.from_list_of_dicts(
|
|
|
74
74
|
provisioner.full_load(full_load_data)
|
|
75
75
|
```
|
|
76
76
|
|
|
77
|
-
|
|
77
|
+
## Bugs & Requests
|
|
78
|
+
|
|
79
|
+
Please use the [GitHub issue tracker](https://github.com/gooddata/gooddata-python-sdk/issues) to submit bugs
|
|
80
|
+
or request features.
|
|
81
|
+
|
|
82
|
+
## Changelog
|
|
83
|
+
|
|
84
|
+
See [Github releases](https://github.com/gooddata/gooddata-python-sdk/releases) for released versions
|
|
85
|
+
and a list of changes.
|
|
@@ -57,4 +57,12 @@ full_load_data: list[UserFullLoad] = UserFullLoad.from_list_of_dicts(
|
|
|
57
57
|
provisioner.full_load(full_load_data)
|
|
58
58
|
```
|
|
59
59
|
|
|
60
|
-
|
|
60
|
+
## Bugs & Requests
|
|
61
|
+
|
|
62
|
+
Please use the [GitHub issue tracker](https://github.com/gooddata/gooddata-python-sdk/issues) to submit bugs
|
|
63
|
+
or request features.
|
|
64
|
+
|
|
65
|
+
## Changelog
|
|
66
|
+
|
|
67
|
+
See [Github releases](https://github.com/gooddata/gooddata-python-sdk/releases) for released versions
|
|
68
|
+
and a list of changes.
|
|
@@ -6,11 +6,22 @@ from ._version import __version__
|
|
|
6
6
|
from .backup_and_restore.backup_manager import BackupManager
|
|
7
7
|
from .backup_and_restore.models.storage import (
|
|
8
8
|
BackupRestoreConfig,
|
|
9
|
+
LocalStorageConfig,
|
|
10
|
+
S3StorageConfig,
|
|
9
11
|
StorageType,
|
|
10
12
|
)
|
|
11
13
|
from .backup_and_restore.storage.local_storage import LocalStorage
|
|
12
14
|
from .backup_and_restore.storage.s3_storage import S3Storage
|
|
13
15
|
|
|
16
|
+
# -------- LDM Extension --------
|
|
17
|
+
from .ldm_extension.ldm_extension_manager import LdmExtensionManager
|
|
18
|
+
from .ldm_extension.models.custom_data_object import (
|
|
19
|
+
ColumnDataType,
|
|
20
|
+
CustomDatasetDefinition,
|
|
21
|
+
CustomFieldDefinition,
|
|
22
|
+
CustomFieldType,
|
|
23
|
+
)
|
|
24
|
+
|
|
14
25
|
# -------- Provisioning --------
|
|
15
26
|
from .provisioning.entities.user_data_filters.models.udf_models import (
|
|
16
27
|
UserDataFilterFullLoad,
|
|
@@ -51,6 +62,8 @@ __all__ = [
|
|
|
51
62
|
"UserIncrementalLoad",
|
|
52
63
|
"UserGroupIncrementalLoad",
|
|
53
64
|
"PermissionFullLoad",
|
|
65
|
+
"LocalStorageConfig",
|
|
66
|
+
"S3StorageConfig",
|
|
54
67
|
"PermissionIncrementalLoad",
|
|
55
68
|
"UserFullLoad",
|
|
56
69
|
"UserGroupFullLoad",
|
|
@@ -61,5 +74,10 @@ __all__ = [
|
|
|
61
74
|
"UserDataFilterProvisioner",
|
|
62
75
|
"UserDataFilterFullLoad",
|
|
63
76
|
"EntityType",
|
|
77
|
+
"LdmExtensionManager",
|
|
78
|
+
"CustomDatasetDefinition",
|
|
79
|
+
"CustomFieldDefinition",
|
|
80
|
+
"ColumnDataType",
|
|
81
|
+
"CustomFieldType",
|
|
64
82
|
"__version__",
|
|
65
83
|
]
|
{gooddata_pipelines-1.50.1.dev1 → gooddata_pipelines-1.51.0}/gooddata_pipelines/api/gooddata_api.py
RENAMED
|
@@ -174,6 +174,49 @@ class ApiMethods:
|
|
|
174
174
|
)
|
|
175
175
|
return self._get(endpoint)
|
|
176
176
|
|
|
177
|
+
def get_all_metrics(self, workspace_id: str) -> requests.Response:
|
|
178
|
+
"""Get all metrics from the specified workspace.
|
|
179
|
+
|
|
180
|
+
Args:
|
|
181
|
+
workspace_id (str): The ID of the workspace to retrieve metrics from.
|
|
182
|
+
Returns:
|
|
183
|
+
requests.Response: The response containing the metrics.
|
|
184
|
+
"""
|
|
185
|
+
endpoint = f"/entities/workspaces/{workspace_id}/metrics"
|
|
186
|
+
headers = {**self.headers, "X-GDC-VALIDATE-RELATIONS": "true"}
|
|
187
|
+
return self._get(endpoint, headers=headers)
|
|
188
|
+
|
|
189
|
+
def get_all_visualization_objects(
|
|
190
|
+
self, workspace_id: str
|
|
191
|
+
) -> requests.Response:
|
|
192
|
+
"""Get all visualizations from the specified workspace.
|
|
193
|
+
|
|
194
|
+
Args:
|
|
195
|
+
workspace_id (str): The ID of the workspace to retrieve visualizations from.
|
|
196
|
+
Returns:
|
|
197
|
+
requests.Response: The response containing the visualizations.
|
|
198
|
+
"""
|
|
199
|
+
endpoint = f"/entities/workspaces/{workspace_id}/visualizationObjects"
|
|
200
|
+
headers = {**self.headers, "X-GDC-VALIDATE-RELATIONS": "true"}
|
|
201
|
+
return self._get(endpoint, headers=headers)
|
|
202
|
+
|
|
203
|
+
def get_all_dashboards(self, workspace_id: str) -> requests.Response:
|
|
204
|
+
"""Get all dashboards from the specified workspace.
|
|
205
|
+
|
|
206
|
+
Args:
|
|
207
|
+
workspace_id (str): The ID of the workspace to retrieve dashboards from.
|
|
208
|
+
Returns:
|
|
209
|
+
requests.Response: The response containing the dashboards.
|
|
210
|
+
"""
|
|
211
|
+
endpoint = f"/entities/workspaces/{workspace_id}/analyticalDashboards"
|
|
212
|
+
headers = {**self.headers, "X-GDC-VALIDATE-RELATIONS": "true"}
|
|
213
|
+
return self._get(endpoint, headers=headers)
|
|
214
|
+
|
|
215
|
+
def get_profile(self) -> requests.Response:
|
|
216
|
+
"""Returns organization and current user information."""
|
|
217
|
+
endpoint = "/profile"
|
|
218
|
+
return self._get(endpoint)
|
|
219
|
+
|
|
177
220
|
def _get(
|
|
178
221
|
self, endpoint: str, headers: dict[str, str] | None = None
|
|
179
222
|
) -> requests.Response:
|
|
@@ -253,3 +296,15 @@ class ApiMethods:
|
|
|
253
296
|
url = self._get_url(endpoint)
|
|
254
297
|
|
|
255
298
|
return requests.delete(url, headers=self.headers, timeout=TIMEOUT)
|
|
299
|
+
|
|
300
|
+
@staticmethod
|
|
301
|
+
def raise_if_response_not_ok(*responses: requests.Response) -> None:
|
|
302
|
+
"""Check if responses from API calls are OK.
|
|
303
|
+
|
|
304
|
+
Raises ValueError if any response is not OK (status code not 2xx).
|
|
305
|
+
"""
|
|
306
|
+
for response in responses:
|
|
307
|
+
if not response.ok:
|
|
308
|
+
raise ValueError(
|
|
309
|
+
f"Request to {response.url} failed with status code {response.status_code}: {response.text}"
|
|
310
|
+
)
|
|
@@ -0,0 +1,286 @@
|
|
|
1
|
+
# (C) 2025 GoodData Corporation
|
|
2
|
+
"""Module for processing validated custom datasets and fields data.
|
|
3
|
+
|
|
4
|
+
This module is responsible for converting validated custom datasets and fields
|
|
5
|
+
into objects defined in the GoodData Python SDK.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from gooddata_sdk.catalog.identifier import (
|
|
9
|
+
CatalogDatasetWorkspaceDataFilterIdentifier,
|
|
10
|
+
CatalogGrainIdentifier,
|
|
11
|
+
CatalogReferenceIdentifier,
|
|
12
|
+
)
|
|
13
|
+
from gooddata_sdk.catalog.workspace.declarative_model.workspace.logical_model.data_filter_references import (
|
|
14
|
+
CatalogDeclarativeWorkspaceDataFilterReferences,
|
|
15
|
+
)
|
|
16
|
+
from gooddata_sdk.catalog.workspace.declarative_model.workspace.logical_model.dataset.dataset import (
|
|
17
|
+
CatalogDataSourceTableIdentifier,
|
|
18
|
+
CatalogDeclarativeAttribute,
|
|
19
|
+
CatalogDeclarativeDataset,
|
|
20
|
+
CatalogDeclarativeDatasetSql,
|
|
21
|
+
CatalogDeclarativeFact,
|
|
22
|
+
CatalogDeclarativeReference,
|
|
23
|
+
CatalogDeclarativeReferenceSource,
|
|
24
|
+
CatalogDeclarativeWorkspaceDataFilterColumn,
|
|
25
|
+
)
|
|
26
|
+
from gooddata_sdk.catalog.workspace.declarative_model.workspace.logical_model.date_dataset.date_dataset import (
|
|
27
|
+
CatalogDeclarativeDateDataset,
|
|
28
|
+
CatalogGranularitiesFormatting,
|
|
29
|
+
)
|
|
30
|
+
from gooddata_sdk.catalog.workspace.declarative_model.workspace.logical_model.ldm import (
|
|
31
|
+
CatalogDeclarativeLdm,
|
|
32
|
+
CatalogDeclarativeModel,
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
from gooddata_pipelines.ldm_extension.models.aliases import DatasetId
|
|
36
|
+
from gooddata_pipelines.ldm_extension.models.custom_data_object import (
|
|
37
|
+
ColumnDataType,
|
|
38
|
+
CustomDataset,
|
|
39
|
+
CustomFieldDefinition,
|
|
40
|
+
CustomFieldType,
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class LdmExtensionDataProcessor:
|
|
45
|
+
"""Create GoodData LDM from validated custom datasets and fields."""
|
|
46
|
+
|
|
47
|
+
DATE_GRANULARITIES: list[str] = [
|
|
48
|
+
"MINUTE",
|
|
49
|
+
"HOUR",
|
|
50
|
+
"DAY",
|
|
51
|
+
"WEEK",
|
|
52
|
+
"MONTH",
|
|
53
|
+
"QUARTER",
|
|
54
|
+
"YEAR",
|
|
55
|
+
"MINUTE_OF_HOUR",
|
|
56
|
+
"HOUR_OF_DAY",
|
|
57
|
+
"DAY_OF_WEEK",
|
|
58
|
+
"DAY_OF_MONTH",
|
|
59
|
+
"DAY_OF_YEAR",
|
|
60
|
+
"WEEK_OF_YEAR",
|
|
61
|
+
"MONTH_OF_YEAR",
|
|
62
|
+
"QUARTER_OF_YEAR",
|
|
63
|
+
]
|
|
64
|
+
|
|
65
|
+
@staticmethod
|
|
66
|
+
def _attribute_from_field(
|
|
67
|
+
dataset_name: str,
|
|
68
|
+
custom_field: CustomFieldDefinition,
|
|
69
|
+
) -> CatalogDeclarativeAttribute:
|
|
70
|
+
"""Assign a declarative attribute from a custom field definition."""
|
|
71
|
+
return CatalogDeclarativeAttribute(
|
|
72
|
+
id=custom_field.custom_field_id,
|
|
73
|
+
title=custom_field.custom_field_name,
|
|
74
|
+
source_column=custom_field.custom_field_source_column,
|
|
75
|
+
labels=[],
|
|
76
|
+
source_column_data_type=custom_field.custom_field_source_column_data_type.value,
|
|
77
|
+
tags=[dataset_name],
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
@staticmethod
|
|
81
|
+
def _fact_from_field(
|
|
82
|
+
dataset_name: str,
|
|
83
|
+
custom_field: CustomFieldDefinition,
|
|
84
|
+
) -> CatalogDeclarativeFact:
|
|
85
|
+
"""Assign a declarative fact from a custom field definition."""
|
|
86
|
+
return CatalogDeclarativeFact(
|
|
87
|
+
id=custom_field.custom_field_id,
|
|
88
|
+
title=custom_field.custom_field_name,
|
|
89
|
+
source_column=custom_field.custom_field_source_column,
|
|
90
|
+
source_column_data_type=custom_field.custom_field_source_column_data_type.value,
|
|
91
|
+
tags=[dataset_name],
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
def _date_from_field(
|
|
95
|
+
self,
|
|
96
|
+
dataset_name: str,
|
|
97
|
+
custom_field: CustomFieldDefinition,
|
|
98
|
+
) -> CatalogDeclarativeDateDataset:
|
|
99
|
+
"""Assign a declarative date dataset from a custom field definition."""
|
|
100
|
+
|
|
101
|
+
return CatalogDeclarativeDateDataset(
|
|
102
|
+
id=custom_field.custom_field_id,
|
|
103
|
+
title=custom_field.custom_field_name,
|
|
104
|
+
granularities_formatting=CatalogGranularitiesFormatting(
|
|
105
|
+
title_base="",
|
|
106
|
+
title_pattern="%titleBase - %granularityTitle",
|
|
107
|
+
),
|
|
108
|
+
granularities=self.DATE_GRANULARITIES,
|
|
109
|
+
tags=[dataset_name],
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
@staticmethod
|
|
113
|
+
def _date_ref_from_field(
|
|
114
|
+
custom_field: CustomFieldDefinition,
|
|
115
|
+
) -> CatalogDeclarativeReference:
|
|
116
|
+
"""Create a date reference from a custom field definition."""
|
|
117
|
+
return CatalogDeclarativeReference(
|
|
118
|
+
identifier=CatalogReferenceIdentifier(
|
|
119
|
+
id=custom_field.custom_field_id
|
|
120
|
+
),
|
|
121
|
+
multivalue=False,
|
|
122
|
+
sources=[
|
|
123
|
+
CatalogDeclarativeReferenceSource(
|
|
124
|
+
column=custom_field.custom_field_source_column,
|
|
125
|
+
target=CatalogGrainIdentifier(
|
|
126
|
+
id=custom_field.custom_field_id,
|
|
127
|
+
type=CustomFieldType.DATE.value,
|
|
128
|
+
),
|
|
129
|
+
data_type=custom_field.custom_field_source_column_data_type.value,
|
|
130
|
+
)
|
|
131
|
+
],
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
@staticmethod
|
|
135
|
+
def _get_sources(
|
|
136
|
+
dataset: CustomDataset,
|
|
137
|
+
) -> tuple[
|
|
138
|
+
CatalogDataSourceTableIdentifier | None,
|
|
139
|
+
CatalogDeclarativeDatasetSql | None,
|
|
140
|
+
]:
|
|
141
|
+
"""Get the data source table and SQL from the dataset definition."""
|
|
142
|
+
# We will have either a table id or a sql statement. Let's store
|
|
143
|
+
# whatever data is available to variables and pass it to the
|
|
144
|
+
# dataset. Both can be object instances or None, but at least one
|
|
145
|
+
# should be valid as per prior validation.
|
|
146
|
+
dataset_source_table_id = (
|
|
147
|
+
CatalogDataSourceTableIdentifier(
|
|
148
|
+
id=dataset.definition.dataset_source_table,
|
|
149
|
+
data_source_id=dataset.definition.dataset_datasource_id,
|
|
150
|
+
path=[dataset.definition.dataset_source_table],
|
|
151
|
+
)
|
|
152
|
+
if dataset.definition.dataset_source_table
|
|
153
|
+
else None
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
dataset_sql = (
|
|
157
|
+
CatalogDeclarativeDatasetSql(
|
|
158
|
+
statement=dataset.definition.dataset_source_sql,
|
|
159
|
+
data_source_id=dataset.definition.dataset_datasource_id,
|
|
160
|
+
)
|
|
161
|
+
if dataset.definition.dataset_source_sql
|
|
162
|
+
else None
|
|
163
|
+
)
|
|
164
|
+
return dataset_source_table_id, dataset_sql
|
|
165
|
+
|
|
166
|
+
def datasets_to_ldm(
|
|
167
|
+
self, datasets: dict[DatasetId, CustomDataset]
|
|
168
|
+
) -> CatalogDeclarativeModel:
|
|
169
|
+
"""Convert validated datasets to GoodData declarative model.
|
|
170
|
+
|
|
171
|
+
Args:
|
|
172
|
+
datasets (dict[DatasetId, CustomDataset]): Dictionary of validated
|
|
173
|
+
datasets.
|
|
174
|
+
Returns:
|
|
175
|
+
CatalogDeclarativeModel: GoodData declarative model representation
|
|
176
|
+
of the datasets.
|
|
177
|
+
"""
|
|
178
|
+
|
|
179
|
+
declarative_datasets: list[CatalogDeclarativeDataset] = []
|
|
180
|
+
|
|
181
|
+
# Date dimensions are not stored in a dataset, but as a separate datasets
|
|
182
|
+
# in `date_instances` object on the LDM
|
|
183
|
+
date_instances: list[CatalogDeclarativeDateDataset] = []
|
|
184
|
+
|
|
185
|
+
for dataset in datasets.values():
|
|
186
|
+
date_references: list[CatalogDeclarativeReference] = []
|
|
187
|
+
attributes: list[CatalogDeclarativeAttribute] = []
|
|
188
|
+
facts: list[CatalogDeclarativeFact] = []
|
|
189
|
+
|
|
190
|
+
# Iterate through the custom fields and create the appropriate objects
|
|
191
|
+
for custom_field in dataset.custom_fields:
|
|
192
|
+
if custom_field.custom_field_type == CustomFieldType.ATTRIBUTE:
|
|
193
|
+
attributes.append(
|
|
194
|
+
self._attribute_from_field(
|
|
195
|
+
dataset.definition.dataset_name, custom_field
|
|
196
|
+
)
|
|
197
|
+
)
|
|
198
|
+
|
|
199
|
+
elif custom_field.custom_field_type == CustomFieldType.FACT:
|
|
200
|
+
facts.append(
|
|
201
|
+
self._fact_from_field(
|
|
202
|
+
dataset.definition.dataset_name, custom_field
|
|
203
|
+
)
|
|
204
|
+
)
|
|
205
|
+
|
|
206
|
+
# Process date dimensions and store them to date_instances. Date
|
|
207
|
+
# dimensions are not stored in a dataset, but as a separate dataset.
|
|
208
|
+
# However, they need to be referenced in the dataset references to
|
|
209
|
+
# create the connection between the dataset and the date dimension
|
|
210
|
+
# in the GoodData Logical Data Model.
|
|
211
|
+
elif custom_field.custom_field_type == CustomFieldType.DATE:
|
|
212
|
+
# Add the date dimension to the date_instances
|
|
213
|
+
date_instances.append(
|
|
214
|
+
self._date_from_field(
|
|
215
|
+
dataset.definition.dataset_name, custom_field
|
|
216
|
+
)
|
|
217
|
+
)
|
|
218
|
+
|
|
219
|
+
# Create a reference so that the date dimension is connected
|
|
220
|
+
# to the dataset in the GoodData Logical Data Model.
|
|
221
|
+
date_references.append(
|
|
222
|
+
self._date_ref_from_field(custom_field)
|
|
223
|
+
)
|
|
224
|
+
|
|
225
|
+
else:
|
|
226
|
+
raise ValueError(
|
|
227
|
+
f"Unsupported custom field type: {custom_field.custom_field_type}"
|
|
228
|
+
)
|
|
229
|
+
|
|
230
|
+
# Get the data source info
|
|
231
|
+
dataset_source_table_id, dataset_sql = self._get_sources(dataset)
|
|
232
|
+
|
|
233
|
+
# Construct the declarative dataset object and append it to the list.
|
|
234
|
+
declarative_datasets.append(
|
|
235
|
+
CatalogDeclarativeDataset(
|
|
236
|
+
id=dataset.definition.dataset_id,
|
|
237
|
+
title=dataset.definition.dataset_name,
|
|
238
|
+
grain=[],
|
|
239
|
+
references=[
|
|
240
|
+
CatalogDeclarativeReference(
|
|
241
|
+
identifier=CatalogReferenceIdentifier(
|
|
242
|
+
id=dataset.definition.parent_dataset_reference,
|
|
243
|
+
),
|
|
244
|
+
multivalue=True,
|
|
245
|
+
sources=[
|
|
246
|
+
CatalogDeclarativeReferenceSource(
|
|
247
|
+
column=dataset.definition.dataset_reference_source_column,
|
|
248
|
+
data_type=dataset.definition.dataset_reference_source_column_data_type.value,
|
|
249
|
+
target=CatalogGrainIdentifier(
|
|
250
|
+
id=dataset.definition.parent_dataset_reference_attribute_id,
|
|
251
|
+
type=CustomFieldType.ATTRIBUTE.value,
|
|
252
|
+
),
|
|
253
|
+
)
|
|
254
|
+
],
|
|
255
|
+
),
|
|
256
|
+
]
|
|
257
|
+
+ date_references,
|
|
258
|
+
description=None,
|
|
259
|
+
attributes=attributes,
|
|
260
|
+
facts=facts,
|
|
261
|
+
data_source_table_id=dataset_source_table_id,
|
|
262
|
+
sql=dataset_sql,
|
|
263
|
+
workspace_data_filter_columns=[
|
|
264
|
+
CatalogDeclarativeWorkspaceDataFilterColumn(
|
|
265
|
+
name=dataset.definition.workspace_data_filter_column_name,
|
|
266
|
+
data_type=ColumnDataType.STRING.value,
|
|
267
|
+
)
|
|
268
|
+
],
|
|
269
|
+
workspace_data_filter_references=[
|
|
270
|
+
CatalogDeclarativeWorkspaceDataFilterReferences(
|
|
271
|
+
filter_id=CatalogDatasetWorkspaceDataFilterIdentifier(
|
|
272
|
+
id=dataset.definition.workspace_data_filter_id
|
|
273
|
+
),
|
|
274
|
+
filter_column=dataset.definition.workspace_data_filter_column_name,
|
|
275
|
+
filter_column_data_type=ColumnDataType.STRING.value,
|
|
276
|
+
)
|
|
277
|
+
],
|
|
278
|
+
tags=[dataset.definition.dataset_name],
|
|
279
|
+
)
|
|
280
|
+
)
|
|
281
|
+
|
|
282
|
+
# Create the Logical Data Model from the datasets and the date instances.
|
|
283
|
+
ldm = CatalogDeclarativeLdm(
|
|
284
|
+
datasets=declarative_datasets, date_instances=date_instances
|
|
285
|
+
)
|
|
286
|
+
return CatalogDeclarativeModel(ldm=ldm)
|
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
# (C) 2025 GoodData Corporation
|
|
2
|
+
"""Module for validating custom fields input data.
|
|
3
|
+
|
|
4
|
+
This module is responsible for validating custom fields input data checking for
|
|
5
|
+
row level and aggregated constraints.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from collections import Counter
|
|
9
|
+
from typing import Any, TypeVar
|
|
10
|
+
|
|
11
|
+
from pydantic import BaseModel
|
|
12
|
+
|
|
13
|
+
from gooddata_pipelines.ldm_extension.models.aliases import (
|
|
14
|
+
DatasetId,
|
|
15
|
+
WorkspaceId,
|
|
16
|
+
)
|
|
17
|
+
from gooddata_pipelines.ldm_extension.models.custom_data_object import (
|
|
18
|
+
CustomDataset,
|
|
19
|
+
CustomDatasetDefinition,
|
|
20
|
+
CustomFieldDefinition,
|
|
21
|
+
CustomFieldType,
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class LdmExtensionDataValidator:
|
|
26
|
+
ModelT = TypeVar("ModelT", bound=BaseModel)
|
|
27
|
+
|
|
28
|
+
def validate(
|
|
29
|
+
self,
|
|
30
|
+
dataset_definitions: list[CustomDatasetDefinition],
|
|
31
|
+
field_definitions: list[CustomFieldDefinition],
|
|
32
|
+
) -> dict[WorkspaceId, dict[DatasetId, CustomDataset]]:
|
|
33
|
+
"""Validate dataset and field definitions.
|
|
34
|
+
|
|
35
|
+
Validates the dataset definitions and field definitions by using Pydantic
|
|
36
|
+
models to check row level constraints, then aggregates the definitions
|
|
37
|
+
per workspace, while checking for integrity on aggregated level, i.e.,
|
|
38
|
+
uniqueness of combinations of identifieres on workspace level.
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
raw_dataset_definitions (list[dict[str, str]]): List of raw dataset definitions to validate.
|
|
42
|
+
raw_field_definitions (list[dict[str, str]]): List of raw field definitions to validate.
|
|
43
|
+
Returns:
|
|
44
|
+
dict[WorkspaceId, dict[DatasetId, CustomDataset]]:
|
|
45
|
+
Dictionary of validated dataset definitions per workspace,
|
|
46
|
+
where each dataset contains its custom fields:
|
|
47
|
+
```python
|
|
48
|
+
{
|
|
49
|
+
"workspace_id_1": {
|
|
50
|
+
"dataset_id_1": CustomDataset(...),
|
|
51
|
+
"dataset_id_2": CustomDataset(...),
|
|
52
|
+
},
|
|
53
|
+
...
|
|
54
|
+
}
|
|
55
|
+
```
|
|
56
|
+
"""
|
|
57
|
+
|
|
58
|
+
# First, validate the dataset definitions and aggregate them per workspace.
|
|
59
|
+
validated_data = self._validate_dataset_definitions(dataset_definitions)
|
|
60
|
+
|
|
61
|
+
# Then validate the field definitions and connect them to the datasets
|
|
62
|
+
validated_data = self._validate_field_definitions(
|
|
63
|
+
validated_data, field_definitions
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
return validated_data
|
|
67
|
+
|
|
68
|
+
def _validate_dataset_definitions(
|
|
69
|
+
self,
|
|
70
|
+
dataset_definitions: list[CustomDatasetDefinition],
|
|
71
|
+
) -> dict[WorkspaceId, dict[DatasetId, CustomDataset]]:
|
|
72
|
+
self._check_dataset_combinations(dataset_definitions)
|
|
73
|
+
|
|
74
|
+
validated_definitions: dict[
|
|
75
|
+
WorkspaceId, dict[DatasetId, CustomDataset]
|
|
76
|
+
] = {}
|
|
77
|
+
for definition in dataset_definitions:
|
|
78
|
+
validated_definitions.setdefault(definition.workspace_id, {})[
|
|
79
|
+
definition.dataset_id
|
|
80
|
+
] = CustomDataset(definition=definition, custom_fields=[])
|
|
81
|
+
|
|
82
|
+
return validated_definitions
|
|
83
|
+
|
|
84
|
+
def _check_dataset_combinations(
|
|
85
|
+
self, dataset_definitions: list[CustomDatasetDefinition]
|
|
86
|
+
) -> None:
|
|
87
|
+
"""Check integrity of provided dataset definitions.
|
|
88
|
+
|
|
89
|
+
Validation criteria:
|
|
90
|
+
- workspace_id + dataset_id must be unique across all dataset definitions.
|
|
91
|
+
|
|
92
|
+
Args:
|
|
93
|
+
dataset_definitions (list[CustomDatasetDefinition]): List of dataset definitions to check.
|
|
94
|
+
Raises:
|
|
95
|
+
ValueError: If there are duplicate dataset definitions based on workspace_id and dataset_id.
|
|
96
|
+
"""
|
|
97
|
+
workspace_dataset_combinations = [
|
|
98
|
+
(definition.workspace_id, definition.dataset_id)
|
|
99
|
+
for definition in dataset_definitions
|
|
100
|
+
]
|
|
101
|
+
if len(workspace_dataset_combinations) != len(
|
|
102
|
+
set(workspace_dataset_combinations)
|
|
103
|
+
):
|
|
104
|
+
duplicates = self._get_duplicates(workspace_dataset_combinations)
|
|
105
|
+
raise ValueError(
|
|
106
|
+
"Duplicate dataset definitions found in the raw dataset "
|
|
107
|
+
+ f"definitions (workspace_id, dataset_id): {duplicates}"
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
@staticmethod
|
|
111
|
+
def _get_duplicates(list_to_check: list[Any]) -> list[Any]:
|
|
112
|
+
"""Get duplicates from a list.
|
|
113
|
+
|
|
114
|
+
Args:
|
|
115
|
+
list_to_check (list[Any]): List of items to check for duplicates.
|
|
116
|
+
Returns:
|
|
117
|
+
list[Any]: List of duplicate items.
|
|
118
|
+
"""
|
|
119
|
+
counts = Counter(list_to_check)
|
|
120
|
+
return [item for item, count in counts.items() if count > 1]
|
|
121
|
+
|
|
122
|
+
def _check_field_combinations(
|
|
123
|
+
self, field_definitions: list[CustomFieldDefinition]
|
|
124
|
+
) -> None:
|
|
125
|
+
"""Check integrity of provided field definitions.
|
|
126
|
+
|
|
127
|
+
Validation criteria (per workspace):
|
|
128
|
+
- unique workspace_id + cf_id combinations (only for attribute and fact custom_field_type)
|
|
129
|
+
- there is no row with the same dataset_id and cf_id (only for date custom_field_type)
|
|
130
|
+
|
|
131
|
+
Args:
|
|
132
|
+
field_definitions (list[CustomFieldDefinition]): List of field definitions to check.
|
|
133
|
+
Raises:
|
|
134
|
+
ValueError: If there are duplicate field definitions based on workspace_id and cf_id.
|
|
135
|
+
"""
|
|
136
|
+
workspace_field_combinations: set[tuple[str, str]] = set()
|
|
137
|
+
dataset_field_combinations: set[tuple[str, str]] = set()
|
|
138
|
+
|
|
139
|
+
for field in field_definitions:
|
|
140
|
+
if field.custom_field_type in [
|
|
141
|
+
CustomFieldType.ATTRIBUTE,
|
|
142
|
+
CustomFieldType.FACT,
|
|
143
|
+
]:
|
|
144
|
+
combination = (field.workspace_id, field.custom_field_id)
|
|
145
|
+
if combination in workspace_field_combinations:
|
|
146
|
+
raise ValueError(
|
|
147
|
+
f"Duplicate custom field found for workspace {field.workspace_id} "
|
|
148
|
+
+ f"with field ID {field.custom_field_id}"
|
|
149
|
+
)
|
|
150
|
+
workspace_field_combinations.add(combination)
|
|
151
|
+
|
|
152
|
+
elif field.custom_field_type == CustomFieldType.DATE:
|
|
153
|
+
combination = (field.dataset_id, field.custom_field_id)
|
|
154
|
+
if combination in dataset_field_combinations:
|
|
155
|
+
raise ValueError(
|
|
156
|
+
f"Duplicate custom field found for dataset {field.dataset_id} "
|
|
157
|
+
+ f"with field ID {field.custom_field_id}"
|
|
158
|
+
)
|
|
159
|
+
dataset_field_combinations.add(combination)
|
|
160
|
+
|
|
161
|
+
def _validate_field_definitions(
|
|
162
|
+
self,
|
|
163
|
+
validated_definitions: dict[
|
|
164
|
+
WorkspaceId, dict[DatasetId, CustomDataset]
|
|
165
|
+
],
|
|
166
|
+
field_definitions: list[CustomFieldDefinition],
|
|
167
|
+
) -> dict[WorkspaceId, dict[DatasetId, CustomDataset]]:
|
|
168
|
+
"""Validates custom field definitions amd connects them to the datasets.
|
|
169
|
+
|
|
170
|
+
Args:
|
|
171
|
+
validated_definitions (dict[WorkspaceId, dict[DatasetId, CustomDataset]]):
|
|
172
|
+
Dictionary of validated dataset definitions per workspace.
|
|
173
|
+
raw_field_definitions (list[dict[str, str]]): List of raw field definitions to validate.
|
|
174
|
+
Returns:
|
|
175
|
+
dict[WorkspaceId, dict[DatasetId, CustomDataset]]:
|
|
176
|
+
Updated dictionary of validated dataset definitions with custom fields added.
|
|
177
|
+
"""
|
|
178
|
+
self._check_field_combinations(field_definitions)
|
|
179
|
+
|
|
180
|
+
for field_definition in field_definitions:
|
|
181
|
+
validated_definitions[field_definition.workspace_id][
|
|
182
|
+
field_definition.dataset_id
|
|
183
|
+
].custom_fields.append(field_definition)
|
|
184
|
+
|
|
185
|
+
return validated_definitions
|