gooddata-pipelines 1.49.0__tar.gz → 1.49.1.dev2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of gooddata-pipelines might be problematic. Click here for more details.

Files changed (119) hide show
  1. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/PKG-INFO +3 -8
  2. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/backup_and_restore/backup_manager.py +44 -42
  3. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/backup_and_restore/constants.py +2 -1
  4. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/backup_and_restore/models/storage.py +40 -2
  5. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/backup_and_restore/storage/s3_storage.py +22 -11
  6. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/users/models/permissions.py +93 -72
  7. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/users/permissions.py +48 -20
  8. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/provisioning.py +39 -20
  9. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/pyproject.toml +10 -12
  10. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/backup_and_restore/test_backup.py +39 -29
  11. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/conftest.py +3 -0
  12. gooddata_pipelines-1.49.1.dev2/tests/data/provisioning/entities/permissions/existing_upstream_permissions.json +16 -0
  13. gooddata_pipelines-1.49.1.dev2/tests/data/provisioning/entities/permissions/permissions_expected_full_load.json +26 -0
  14. gooddata_pipelines-1.49.1.dev2/tests/data/provisioning/entities/permissions/permissions_expected_incremental_load.json +36 -0
  15. gooddata_pipelines-1.49.1.dev2/tests/data/provisioning/entities/permissions/permissions_input_full_load.json +22 -0
  16. gooddata_pipelines-1.49.1.dev2/tests/data/provisioning/entities/permissions/permissions_input_incremental_load.json +32 -0
  17. gooddata_pipelines-1.49.1.dev2/tests/data/provisioning/entities/users/existing_upstream_users.json +26 -0
  18. gooddata_pipelines-1.49.1.dev2/tests/data/provisioning/entities/users/users_expected_full_load.json +21 -0
  19. gooddata_pipelines-1.49.1.dev2/tests/data/provisioning/entities/users/users_expected_incremental_load.json +21 -0
  20. gooddata_pipelines-1.49.1.dev2/tests/data/provisioning/entities/users/users_input_full_load.json +26 -0
  21. gooddata_pipelines-1.49.1.dev2/tests/data/provisioning/entities/users/users_input_incremental_load.json +38 -0
  22. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/provisioning/entities/users/test_permissions.py +261 -101
  23. gooddata_pipelines-1.49.1.dev2/tests/provisioning/entities/users/test_users.py +292 -0
  24. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tox.ini +1 -2
  25. gooddata_pipelines-1.49.0/tests/provisioning/entities/users/test_users.py +0 -203
  26. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/.gitignore +0 -0
  27. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/LICENSE.txt +0 -0
  28. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/Makefile +0 -0
  29. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/README.md +0 -0
  30. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/TODO.md +0 -0
  31. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/__init__.py +0 -0
  32. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/_version.py +0 -0
  33. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/api/__init__.py +0 -0
  34. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/api/exceptions.py +0 -0
  35. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/api/gooddata_api.py +0 -0
  36. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/api/gooddata_api_wrapper.py +0 -0
  37. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/api/gooddata_sdk.py +0 -0
  38. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/api/utils.py +0 -0
  39. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/backup_and_restore/__init__.py +0 -0
  40. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/backup_and_restore/backup_input_processor.py +0 -0
  41. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/backup_and_restore/csv_reader.py +0 -0
  42. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/backup_and_restore/models/__init__.py +0 -0
  43. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/backup_and_restore/models/input_type.py +0 -0
  44. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/backup_and_restore/models/workspace_response.py +0 -0
  45. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/backup_and_restore/storage/__init__.py +0 -0
  46. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/backup_and_restore/storage/base_storage.py +0 -0
  47. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/backup_and_restore/storage/local_storage.py +0 -0
  48. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/logger/__init__.py +0 -0
  49. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/logger/logger.py +0 -0
  50. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/__init__.py +0 -0
  51. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/assets/wdf_setting.json +0 -0
  52. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/__init__.py +0 -0
  53. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/user_data_filters/__init__.py +0 -0
  54. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/user_data_filters/models/__init__.py +0 -0
  55. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/user_data_filters/models/udf_models.py +0 -0
  56. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/user_data_filters/user_data_filters.py +0 -0
  57. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/users/__init__.py +0 -0
  58. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/users/models/__init__.py +0 -0
  59. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/users/models/user_groups.py +0 -0
  60. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/users/models/users.py +0 -0
  61. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/users/user_groups.py +0 -0
  62. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/users/users.py +0 -0
  63. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/workspaces/__init__.py +0 -0
  64. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/workspaces/models.py +0 -0
  65. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/workspaces/workspace.py +0 -0
  66. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/workspaces/workspace_data_filters.py +0 -0
  67. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/workspaces/workspace_data_parser.py +0 -0
  68. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/workspaces/workspace_data_validator.py +0 -0
  69. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/utils/__init__.py +0 -0
  70. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/utils/context_objects.py +0 -0
  71. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/utils/exceptions.py +0 -0
  72. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/utils/utils.py +0 -0
  73. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/py.typed +0 -0
  74. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/__init__.py +0 -0
  75. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/backup_and_restore/__init__.py +0 -0
  76. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/backup_and_restore/test_backup_input_processor.py +0 -0
  77. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/data/__init__.py +0 -0
  78. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/__init__.py +0 -0
  79. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_conf.yaml +0 -0
  80. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid1/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid1/analytics_model/analytical_dashboard_extensions/.gitkeep +0 -0
  81. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid1/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid1/analytics_model/analytical_dashboards/.gitkeep +0 -0
  82. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid1/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid1/analytics_model/dashboard_plugins/.gitkeep +0 -0
  83. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid1/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid1/analytics_model/filter_contexts/.gitkeep +0 -0
  84. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid1/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid1/analytics_model/metrics/.gitkeep +0 -0
  85. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid1/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid1/analytics_model/visualization_objects/.gitkeep +0 -0
  86. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid1/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid1/ldm/datasets/test.yaml +0 -0
  87. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid1/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid1/ldm/date_instances/testinstance.yaml +0 -0
  88. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid2/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid2/analytics_model/analytical_dashboard_extensions/.gitkeep +0 -0
  89. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid2/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid2/analytics_model/analytical_dashboards/id.yaml +0 -0
  90. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid2/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid2/analytics_model/dashboard_plugins/.gitkeep +0 -0
  91. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid2/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid2/analytics_model/filter_contexts/id.yaml +0 -0
  92. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid2/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid2/analytics_model/metrics/.gitkeep +0 -0
  93. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid2/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid2/analytics_model/visualization_objects/test.yaml +0 -0
  94. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid2/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid2/ldm/datasets/.gitkeep +0 -0
  95. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid2/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid2/ldm/date_instances/.gitkeep +0 -0
  96. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid3/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid3/analytics_model/analytical_dashboard_extensions/.gitkeep +0 -0
  97. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid3/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid3/analytics_model/analytical_dashboards/.gitkeep +0 -0
  98. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid3/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid3/analytics_model/dashboard_plugins/.gitkeep +0 -0
  99. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid3/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid3/analytics_model/filter_contexts/.gitkeep +0 -0
  100. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid3/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid3/analytics_model/metrics/.gitkeep +0 -0
  101. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid3/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid3/analytics_model/visualization_objects/.gitkeep +0 -0
  102. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid3/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid3/ldm/datasets/.gitkeep +0 -0
  103. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid3/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid3/ldm/date_instances/.gitkeep +0 -0
  104. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid3/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid3/user_data_filters/.gitkeep +0 -0
  105. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_local_conf.yaml +0 -0
  106. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/data/mock_responses.py +0 -0
  107. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/panther/__init__.py +0 -0
  108. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/panther/test_api_wrapper.py +0 -0
  109. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/panther/test_sdk_wrapper.py +0 -0
  110. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/provisioning/__init__.py +0 -0
  111. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/provisioning/entities/__init__.py +0 -0
  112. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/provisioning/entities/users/__init__.py +0 -0
  113. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/provisioning/entities/users/test_user_groups.py +0 -0
  114. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/provisioning/entities/workspaces/__init__.py +0 -0
  115. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/provisioning/entities/workspaces/test_provisioning.py +0 -0
  116. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/provisioning/entities/workspaces/test_workspace.py +0 -0
  117. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/provisioning/entities/workspaces/test_workspace_data_filters.py +0 -0
  118. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/provisioning/entities/workspaces/test_workspace_data_parser.py +0 -0
  119. {gooddata_pipelines-1.49.0 → gooddata_pipelines-1.49.1.dev2}/tests/provisioning/entities/workspaces/test_workspace_data_validator.py +0 -0
@@ -1,23 +1,18 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: gooddata-pipelines
3
- Version: 1.49.0
3
+ Version: 1.49.1.dev2
4
+ Summary: GoodData Cloud lifecycle automation pipelines
4
5
  Author-email: GoodData <support@gooddata.com>
5
6
  License: MIT
6
7
  License-File: LICENSE.txt
7
8
  Requires-Python: >=3.10
8
9
  Requires-Dist: boto3-stubs<2.0.0,>=1.39.3
9
10
  Requires-Dist: boto3<2.0.0,>=1.39.3
10
- Requires-Dist: gooddata-sdk~=1.49.0
11
+ Requires-Dist: gooddata-sdk~=1.49.1.dev2
11
12
  Requires-Dist: pydantic<3.0.0,>=2.11.3
12
13
  Requires-Dist: requests<3.0.0,>=2.32.3
13
14
  Requires-Dist: types-pyyaml<7.0.0,>=6.0.12.20250326
14
15
  Requires-Dist: types-requests<3.0.0,>=2.32.0
15
- Provides-Extra: dev
16
- Requires-Dist: moto<6.0.0,>=5.1.6; extra == 'dev'
17
- Requires-Dist: mypy<2.0.0,>=1.16.0; extra == 'dev'
18
- Requires-Dist: pytest-mock<4.0.0,>=3.14.0; extra == 'dev'
19
- Requires-Dist: pytest<9.0.0,>=8.3.5; extra == 'dev'
20
- Requires-Dist: ruff<0.12.0,>=0.11.2; extra == 'dev'
21
16
  Description-Content-Type: text/markdown
22
17
 
23
18
  # GoodData Pipelines
@@ -55,7 +55,7 @@ class BackupManager:
55
55
 
56
56
  self.config = config
57
57
 
58
- self.storage = self.get_storage(self.config)
58
+ self.storage = self._get_storage(self.config)
59
59
  self.org_id = self._api.get_organization_id()
60
60
 
61
61
  self.loader = BackupInputProcessor(self._api, self.config.api_page_size)
@@ -67,7 +67,7 @@ class BackupManager:
67
67
  host: str,
68
68
  token: str,
69
69
  ) -> "BackupManager":
70
- """Creates a backup worker instance using provided host and token."""
70
+ """Creates a backup worker instance using the provided host and token."""
71
71
  return cls(host=host, token=token, config=config)
72
72
 
73
73
  @classmethod
@@ -81,7 +81,8 @@ class BackupManager:
81
81
  content = profile_content(profile, profiles_path)
82
82
  return cls(**content, config=config)
83
83
 
84
- def get_storage(self, conf: BackupRestoreConfig) -> BackupStorage:
84
+ @staticmethod
85
+ def _get_storage(conf: BackupRestoreConfig) -> BackupStorage:
85
86
  """Returns the storage class based on the storage type."""
86
87
  if conf.storage_type == StorageType.S3:
87
88
  return S3Storage(conf)
@@ -100,7 +101,7 @@ class BackupManager:
100
101
  else:
101
102
  raise RuntimeError(f"{response.status_code}: {response.text}")
102
103
 
103
- def store_user_data_filters(
104
+ def _store_user_data_filters(
104
105
  self,
105
106
  user_data_filters: dict,
106
107
  export_path: Path,
@@ -128,20 +129,20 @@ class BackupManager:
128
129
  "user_data_filters",
129
130
  filter["id"] + ".yaml",
130
131
  )
131
- self.write_to_yaml(udf_file_path, filter)
132
+ self._write_to_yaml(udf_file_path, filter)
132
133
 
133
134
  @staticmethod
134
- def move_folder(source: Path, destination: Path) -> None:
135
+ def _move_folder(source: Path, destination: Path) -> None:
135
136
  """Moves the source folder to the destination."""
136
137
  shutil.move(source, destination)
137
138
 
138
139
  @staticmethod
139
- def write_to_yaml(path: str, source: Any) -> None:
140
+ def _write_to_yaml(path: str, source: Any) -> None:
140
141
  """Writes the source to a YAML file."""
141
142
  with open(path, "w") as outfile:
142
143
  yaml.dump(source, outfile)
143
144
 
144
- def get_automations_from_api(self, workspace_id: str) -> Any:
145
+ def _get_automations_from_api(self, workspace_id: str) -> Any:
145
146
  """Returns automations for the workspace as JSON."""
146
147
  response: requests.Response = self._api.get_automations(workspace_id)
147
148
  if response.ok:
@@ -152,10 +153,10 @@ class BackupManager:
152
153
  + f"{response.status_code}: {response.text}"
153
154
  )
154
155
 
155
- def store_automations(self, export_path: Path, workspace_id: str) -> None:
156
+ def _store_automations(self, export_path: Path, workspace_id: str) -> None:
156
157
  """Stores the automations in the specified export path."""
157
158
  # Get the automations from the API
158
- automations: Any = self.get_automations_from_api(workspace_id)
159
+ automations: Any = self._get_automations_from_api(workspace_id)
159
160
 
160
161
  automations_folder_path: Path = Path(
161
162
  export_path,
@@ -184,8 +185,8 @@ class BackupManager:
184
185
  # Get the filter views YAML files from the API
185
186
  self._api.store_declarative_filter_views(workspace_id, export_path)
186
187
 
187
- # Move filter views to the subfolder containing analytics model
188
- self.move_folder(
188
+ # Move filter views to the subfolder containing the analytics model
189
+ self._move_folder(
189
190
  Path(export_path, "gooddata_layouts", self.org_id, "filter_views"),
190
191
  Path(
191
192
  export_path,
@@ -197,7 +198,7 @@ class BackupManager:
197
198
  ),
198
199
  )
199
200
 
200
- def get_workspace_export(
201
+ def _get_workspace_export(
201
202
  self,
202
203
  local_target_path: str,
203
204
  workspaces_to_export: list[str],
@@ -232,9 +233,9 @@ class BackupManager:
232
233
  # be more transparent/readable and possibly safer for threading
233
234
  self._api.store_declarative_workspace(workspace_id, export_path)
234
235
  self.store_declarative_filter_views(export_path, workspace_id)
235
- self.store_automations(export_path, workspace_id)
236
+ self._store_automations(export_path, workspace_id)
236
237
 
237
- self.store_user_data_filters(
238
+ self._store_user_data_filters(
238
239
  user_data_filters, export_path, workspace_id
239
240
  )
240
241
  self.logger.info(f"Stored export for {workspace_id}")
@@ -250,7 +251,7 @@ class BackupManager:
250
251
  + "is correct and that the workspaces exist."
251
252
  )
252
253
 
253
- def archive_gooddata_layouts_to_zip(self, folder: str) -> None:
254
+ def _archive_gooddata_layouts_to_zip(self, folder: str) -> None:
254
255
  """Archives the gooddata_layouts directory to a zip file."""
255
256
  try:
256
257
  target_subdir = ""
@@ -271,11 +272,12 @@ class BackupManager:
271
272
  self.logger.error(f"Error archiving {folder} to zip: {e}")
272
273
  raise
273
274
 
274
- def split_to_batches(
275
- self, workspaces_to_export: list[str], batch_size: int
275
+ @staticmethod
276
+ def _split_to_batches(
277
+ workspaces_to_export: list[str], batch_size: int
276
278
  ) -> list[BackupBatch]:
277
- """Splits the list of workspaces to into batches of the specified size.
278
- The batch is respresented as a list of workspace IDs.
279
+ """Splits the list of workspaces into batches of the specified size.
280
+ The batch is represented as a list of workspace IDs.
279
281
  Returns a list of batches (i.e. list of lists of IDs)
280
282
  """
281
283
  list_of_batches = []
@@ -286,7 +288,7 @@ class BackupManager:
286
288
 
287
289
  return list_of_batches
288
290
 
289
- def process_batch(
291
+ def _process_batch(
290
292
  self,
291
293
  batch: BackupBatch,
292
294
  stop_event: threading.Event,
@@ -298,14 +300,14 @@ class BackupManager:
298
300
  The base wait time is defined by BackupSettings.RETRY_DELAY.
299
301
  """
300
302
  if stop_event.is_set():
301
- # If the stop_event flag is set, return. This will terminate the thread.
303
+ # If the stop_event flag is set, return. This will terminate the thread
302
304
  return
303
305
 
304
306
  try:
305
307
  with tempfile.TemporaryDirectory() as tmpdir:
306
- self.get_workspace_export(tmpdir, batch.list_of_ids)
308
+ self._get_workspace_export(tmpdir, batch.list_of_ids)
307
309
 
308
- self.archive_gooddata_layouts_to_zip(
310
+ self._archive_gooddata_layouts_to_zip(
309
311
  str(Path(tmpdir, self.org_id))
310
312
  )
311
313
 
@@ -316,7 +318,7 @@ class BackupManager:
316
318
  return
317
319
 
318
320
  elif retry_count < BackupSettings.MAX_RETRIES:
319
- # Retry with exponential backoff until MAX_RETRIES.
321
+ # Retry with exponential backoff until MAX_RETRIES
320
322
  next_retry = retry_count + 1
321
323
  wait_time = BackupSettings.RETRY_DELAY**next_retry
322
324
  self.logger.info(
@@ -326,13 +328,13 @@ class BackupManager:
326
328
  )
327
329
 
328
330
  time.sleep(wait_time)
329
- self.process_batch(batch, stop_event, next_retry)
331
+ self._process_batch(batch, stop_event, next_retry)
330
332
  else:
331
- # If the batch fails after MAX_RETRIES, raise the error.
333
+ # If the batch fails after MAX_RETRIES, raise the error
332
334
  self.logger.error(f"Batch failed: {e.__class__.__name__}: {e}")
333
335
  raise
334
336
 
335
- def process_batches_in_parallel(
337
+ def _process_batches_in_parallel(
336
338
  self,
337
339
  batches: list[BackupBatch],
338
340
  ) -> None:
@@ -345,14 +347,14 @@ class BackupManager:
345
347
  stop_event = threading.Event()
346
348
 
347
349
  with ThreadPoolExecutor(
348
- max_workers=BackupSettings.MAX_WORKERS
350
+ max_workers=self.config.max_workers
349
351
  ) as executor:
350
352
  # Set the futures tasks.
351
353
  futures = []
352
354
  for batch in batches:
353
355
  futures.append(
354
356
  executor.submit(
355
- self.process_batch,
357
+ self._process_batch,
356
358
  batch,
357
359
  stop_event,
358
360
  )
@@ -363,10 +365,10 @@ class BackupManager:
363
365
  try:
364
366
  future.result()
365
367
  except Exception:
366
- # On failure, set the flag to True - signal running processes to stop.
368
+ # On failure, set the flag to True - signal running processes to stop
367
369
  stop_event.set()
368
370
 
369
- # Cancel unstarted threads.
371
+ # Cancel unstarted threads
370
372
  for f in futures:
371
373
  if not f.done():
372
374
  f.cancel()
@@ -383,10 +385,10 @@ class BackupManager:
383
385
  workspace in storage specified in the configuration.
384
386
 
385
387
  Args:
386
- path_to_csv (str): Path to a CSV file containing a list of workspace IDs.
388
+ path_to_csv (str): Path to a CSV file containing a list of workspace IDs
387
389
  workspace_ids (list[str]): List of workspace IDs
388
390
  """
389
- self.backup(InputType.LIST_OF_WORKSPACES, path_to_csv, workspace_ids)
391
+ self._backup(InputType.LIST_OF_WORKSPACES, path_to_csv, workspace_ids)
390
392
 
391
393
  def backup_hierarchies(
392
394
  self, path_to_csv: str | None, workspace_ids: list[str] | None
@@ -394,16 +396,16 @@ class BackupManager:
394
396
  """Runs the backup process for a list of hierarchies.
395
397
 
396
398
  Will take the list of workspace IDs or read the list of workspace IDs
397
- from a CSV file and create backup for each those workspaces' hierarchies
399
+ from a CSV file and create backup for each of those workspaces' hierarchies
398
400
  in storage specified in the configuration.
399
401
  Workspace hierarchy means the workspace itself and all its direct and
400
402
  indirect children.
401
403
 
402
404
  Args:
403
- path_to_csv (str): Path to a CSV file containing a list of workspace IDs.
405
+ path_to_csv (str): Path to a CSV file containing a list of workspace IDs
404
406
  workspace_ids (list[str]): List of workspace IDs
405
407
  """
406
- self.backup(InputType.HIERARCHY, path_to_csv, workspace_ids)
408
+ self._backup(InputType.HIERARCHY, path_to_csv, workspace_ids)
407
409
 
408
410
  def backup_entire_organization(self) -> None:
409
411
  """Runs the backup process for the entire organization.
@@ -411,22 +413,22 @@ class BackupManager:
411
413
  Will create backup for all workspaces in the organization in storage
412
414
  specified in the configuration.
413
415
  """
414
- self.backup(InputType.ORGANIZATION)
416
+ self._backup(InputType.ORGANIZATION)
415
417
 
416
- def backup(
418
+ def _backup(
417
419
  self,
418
420
  input_type: InputType,
419
421
  path_to_csv: str | None = None,
420
422
  workspace_ids: list[str] | None = None,
421
423
  ) -> None:
422
- """Runs the backup process with selected input type."""
424
+ """Runs the backup process with the selected input type."""
423
425
  try:
424
426
  workspaces_to_export: list[str] = self.loader.get_ids_to_backup(
425
427
  input_type,
426
428
  path_to_csv,
427
429
  workspace_ids,
428
430
  )
429
- batches = self.split_to_batches(
431
+ batches = self._split_to_batches(
430
432
  workspaces_to_export, self.config.batch_size
431
433
  )
432
434
 
@@ -434,7 +436,7 @@ class BackupManager:
434
436
  f"Exporting {len(workspaces_to_export)} workspaces in {len(batches)} batches."
435
437
  )
436
438
 
437
- self.process_batches_in_parallel(batches)
439
+ self._process_batches_in_parallel(batches)
438
440
 
439
441
  self.logger.info("Backup completed")
440
442
  except Exception as e:
@@ -1,3 +1,4 @@
1
+ # (C) 2025 GoodData Corporation
1
2
  import datetime
2
3
  from dataclasses import dataclass
3
4
 
@@ -22,7 +23,7 @@ class DirNames:
22
23
 
23
24
  @dataclass(frozen=True)
24
25
  class ConcurrencyDefaults:
25
- MAX_WORKERS = 2
26
+ MAX_WORKERS = 1
26
27
  DEFAULT_BATCH_SIZE = 100
27
28
 
28
29
 
@@ -21,10 +21,40 @@ class S3StorageConfig(BaseModel):
21
21
 
22
22
  backup_path: str
23
23
  bucket: str
24
- profile: str = "default"
24
+ profile: Optional[str] = None
25
25
  aws_access_key_id: Optional[str] = None
26
26
  aws_secret_access_key: Optional[str] = None
27
- aws_default_region: Optional[str] = None
27
+ aws_default_region: Optional[str] = "us-east-1"
28
+
29
+ @classmethod
30
+ def from_iam_role(cls, backup_path: str, bucket: str) -> "S3StorageConfig":
31
+ """Use default IAM role or environment credentials."""
32
+ return cls(backup_path=backup_path, bucket=bucket)
33
+
34
+ @classmethod
35
+ def from_aws_credentials(
36
+ cls,
37
+ backup_path: str,
38
+ bucket: str,
39
+ aws_access_key_id: str,
40
+ aws_secret_access_key: str,
41
+ aws_default_region: str,
42
+ ) -> "S3StorageConfig":
43
+ """Use explicit AWS access keys and region."""
44
+ return cls(
45
+ backup_path=backup_path,
46
+ bucket=bucket,
47
+ aws_access_key_id=aws_access_key_id,
48
+ aws_secret_access_key=aws_secret_access_key,
49
+ aws_default_region=aws_default_region,
50
+ )
51
+
52
+ @classmethod
53
+ def from_aws_profile(
54
+ cls, backup_path: str, bucket: str, profile: str
55
+ ) -> "S3StorageConfig":
56
+ """Use a named AWS CLI profile."""
57
+ return cls(backup_path=backup_path, bucket=bucket, profile=profile)
28
58
 
29
59
 
30
60
  class LocalStorageConfig(BaseModel):
@@ -53,6 +83,14 @@ class BackupRestoreConfig(BaseModel):
53
83
  description="Batch size must be greater than 0",
54
84
  ),
55
85
  ] = Field(default=BackupSettings.DEFAULT_BATCH_SIZE)
86
+ max_workers: Annotated[
87
+ int,
88
+ Field(
89
+ gt=0,
90
+ lt=3,
91
+ description="Max workers must be greater than 0 and less than 3",
92
+ ),
93
+ ] = Field(default=BackupSettings.MAX_WORKERS)
56
94
 
57
95
  @classmethod
58
96
  def from_yaml(cls, conf_path: str) -> "BackupRestoreConfig":
@@ -22,6 +22,7 @@ class S3Storage(BackupStorage):
22
22
 
23
23
  self._config = conf.storage
24
24
  self._session = self._create_boto_session(self._config)
25
+ self._client = self._session.client("s3")
25
26
  self._resource = self._session.resource("s3")
26
27
  self._bucket = self._resource.Bucket(self._config.bucket) # type: ignore [missing library stubs]
27
28
  suffix = "/" if not self._config.backup_path.endswith("/") else ""
@@ -43,32 +44,40 @@ class S3Storage(BackupStorage):
43
44
  )
44
45
  except Exception:
45
46
  self.logger.warning(
46
- "Failed to create boto3 session with supplied credentials. Falling back to profile..."
47
+ "Failed to create boto3 session with supplied credentials."
48
+ )
49
+
50
+ if config.profile:
51
+ try:
52
+ return boto3.Session(profile_name=config.profile)
53
+ except Exception:
54
+ self.logger.warning(
55
+ f"AWS profile [{config.profile}] not found."
47
56
  )
48
57
 
49
58
  try:
50
- return boto3.Session(profile_name=config.profile)
59
+ return boto3.Session()
51
60
  except Exception:
52
- self.logger.warning(
53
- 'AWS profile "[default]" not found. Trying other fallback methods...'
61
+ self.logger.error(
62
+ "Failed to create boto3 session with default IAM role or environment credentials."
63
+ )
64
+ raise RuntimeError(
65
+ "Unable to create AWS session. Please check your AWS credentials, profile, or IAM role configuration."
54
66
  )
55
-
56
- return boto3.Session()
57
67
 
58
68
  def _verify_connection(self) -> None:
59
69
  """
60
70
  Pings the S3 bucket to verify that the connection is working.
61
71
  """
62
72
  try:
63
- # TODO: install boto3 s3 stubs
64
- self._resource.meta.client.head_bucket(Bucket=self._config.bucket)
73
+ self._client.head_bucket(Bucket=self._config.bucket)
65
74
  except Exception as e:
66
75
  raise RuntimeError(
67
76
  f"Failed to connect to S3 bucket {self._config.bucket}: {e}"
68
77
  )
69
78
 
70
79
  def export(self, folder: str, org_id: str) -> None:
71
- """Uploads the content of the folder to S3 as backup."""
80
+ """Uploads the content of the folder to S3 as a backup."""
72
81
  storage_path = f"{self._config.bucket}/{self._backup_path}"
73
82
  self.logger.info(f"Uploading {org_id} to {storage_path}")
74
83
  folder = f"{folder}/{org_id}"
@@ -77,10 +86,12 @@ class S3Storage(BackupStorage):
77
86
  export_path = (
78
87
  f"{self._backup_path}{org_id}/{full_path[len(folder) + 1 :]}/"
79
88
  )
80
- self._bucket.put_object(Key=export_path)
89
+ self._client.put_object(Bucket=self._config.bucket, Key=export_path)
81
90
 
82
91
  for file in files:
83
92
  full_path = os.path.join(subdir, file)
84
93
  with open(full_path, "rb") as data:
85
94
  export_path = f"{self._backup_path}{org_id}/{full_path[len(folder) + 1 :]}"
86
- self._bucket.put_object(Key=export_path, Body=data)
95
+ self._client.put_object(
96
+ Bucket=self._config.bucket, Key=export_path, Body=data
97
+ )
@@ -1,103 +1,101 @@
1
1
  # (C) 2025 GoodData Corporation
2
- from dataclasses import dataclass
2
+ from abc import abstractmethod
3
3
  from enum import Enum
4
- from typing import Any, Iterator, TypeAlias
4
+ from typing import Any, Iterator, TypeAlias, TypeVar
5
5
 
6
+ import attrs
6
7
  from gooddata_sdk.catalog.identifier import CatalogAssigneeIdentifier
7
8
  from gooddata_sdk.catalog.permission.declarative_model.permission import (
8
9
  CatalogDeclarativeSingleWorkspacePermission,
9
10
  CatalogDeclarativeWorkspacePermissions,
10
11
  )
12
+ from pydantic import BaseModel
11
13
 
12
14
  from gooddata_pipelines.provisioning.utils.exceptions import BaseUserException
13
15
 
14
- # TODO: refactor the full load and incremental load models to reuse as much as possible
15
- # TODO: use pydantic models instead of dataclasses?
16
- # TODO: make the validation logic more readable (as in PermissionIncrementalLoad)
17
-
18
16
  TargetsPermissionDict: TypeAlias = dict[str, dict[str, bool]]
17
+ ConstructorType = TypeVar("ConstructorType", bound="ConstructorMixin")
19
18
 
20
19
 
21
- class PermissionType(Enum):
20
+ class PermissionType(str, Enum):
21
+ # NOTE: Start using StrEnum with Python 3.11
22
22
  user = "user"
23
23
  user_group = "userGroup"
24
24
 
25
25
 
26
- @dataclass(frozen=True)
27
- class PermissionIncrementalLoad:
28
- permission: str
29
- workspace_id: str
30
- id: str
31
- type: PermissionType
32
- is_active: bool
26
+ class ConstructorMixin:
27
+ @staticmethod
28
+ def _get_id_and_type(
29
+ permission: dict[str, Any],
30
+ ) -> tuple[str, PermissionType]:
31
+ user_id: str | None = permission.get("user_id")
32
+ user_group_id: str | None = permission.get("ug_id")
33
+ if user_id and user_group_id:
34
+ raise ValueError("Only one of user_id or ug_id must be present")
35
+ elif user_id:
36
+ return user_id, PermissionType.user
37
+ elif user_group_id:
38
+ return user_group_id, PermissionType.user_group
39
+ else:
40
+ raise ValueError("Either user_id or ug_id must be present")
33
41
 
34
42
  @classmethod
35
43
  def from_list_of_dicts(
36
- cls, data: list[dict[str, Any]]
37
- ) -> list["PermissionIncrementalLoad"]:
38
- """Creates a list of User objects from list of dicts."""
39
- id: str
44
+ cls: type[ConstructorType], data: list[dict[str, Any]]
45
+ ) -> list[ConstructorType]:
46
+ """Creates a list of instances from list of dicts."""
47
+ # NOTE: We can use typing.Self for the return type in Python 3.11
40
48
  permissions = []
41
49
  for permission in data:
42
- user_id: str | None = permission.get("user_id")
43
- user_group_id: str | None = permission.get("ug_id")
44
-
45
- if user_id is not None:
46
- target_type = PermissionType.user
47
- id = user_id
48
- elif user_group_id is not None:
49
- target_type = PermissionType.user_group
50
- id = user_group_id
51
-
52
- permissions.append(
53
- PermissionIncrementalLoad(
54
- permission=permission["ws_permissions"],
55
- workspace_id=permission["ws_id"],
56
- id=id,
57
- type=target_type,
58
- is_active=str(permission["is_active"]).lower() == "true",
59
- )
60
- )
50
+ permissions.append(cls.from_dict(permission))
61
51
  return permissions
62
52
 
53
+ @classmethod
54
+ @abstractmethod
55
+ def from_dict(cls, data: dict[str, Any]) -> Any:
56
+ """Construction form a dictionary to be implemented by subclasses."""
57
+ pass
58
+
63
59
 
64
- @dataclass(frozen=True)
65
- class PermissionFullLoad:
60
+ class PermissionIncrementalLoad(BaseModel, ConstructorMixin):
66
61
  permission: str
67
62
  workspace_id: str
68
- id: str
69
- type: PermissionType
63
+ id_: str
64
+ type_: PermissionType
65
+ is_active: bool
70
66
 
71
67
  @classmethod
72
- def from_list_of_dicts(
73
- cls, data: list[dict[str, Any]]
74
- ) -> list["PermissionFullLoad"]:
75
- """Creates a list of User objects from list of dicts."""
76
- permissions = []
77
- for permission in data:
78
- id = (
79
- permission["user_id"]
80
- if permission["user_id"]
81
- else permission["ug_id"]
82
- )
68
+ def from_dict(cls, data: dict[str, Any]) -> "PermissionIncrementalLoad":
69
+ """Returns an instance of PermissionIncrementalLoad from a dictionary."""
70
+ id_, target_type = cls._get_id_and_type(data)
71
+ return cls(
72
+ permission=data["ws_permissions"],
73
+ workspace_id=data["ws_id"],
74
+ id_=id_,
75
+ type_=target_type,
76
+ is_active=data["is_active"],
77
+ )
83
78
 
84
- if permission["user_id"]:
85
- target_type = PermissionType.user
86
- else:
87
- target_type = PermissionType.user_group
88
-
89
- permissions.append(
90
- PermissionFullLoad(
91
- permission=permission["ws_permissions"],
92
- workspace_id=permission["ws_id"],
93
- id=id,
94
- type=target_type,
95
- )
96
- )
97
- return permissions
98
79
 
80
+ class PermissionFullLoad(BaseModel, ConstructorMixin):
81
+ permission: str
82
+ workspace_id: str
83
+ id_: str
84
+ type_: PermissionType
85
+
86
+ @classmethod
87
+ def from_dict(cls, data: dict[str, Any]) -> "PermissionFullLoad":
88
+ """Returns an instance of PermissionFullLoad from a dictionary."""
89
+ id_, target_type = cls._get_id_and_type(data)
90
+ return cls(
91
+ permission=data["ws_permissions"],
92
+ workspace_id=data["ws_id"],
93
+ id_=id_,
94
+ type_=target_type,
95
+ )
99
96
 
100
- @dataclass
97
+
98
+ @attrs.define
101
99
  class PermissionDeclaration:
102
100
  users: TargetsPermissionDict
103
101
  user_groups: TargetsPermissionDict
@@ -192,7 +190,9 @@ class PermissionDeclaration:
192
190
  permissions=permission_declarations
193
191
  )
194
192
 
195
- def add_permission(self, permission: PermissionIncrementalLoad) -> None:
193
+ def add_incremental_permission(
194
+ self, permission: PermissionIncrementalLoad
195
+ ) -> None:
196
196
  """
197
197
  Adds WSPermission object into respective field within the instance.
198
198
  Handles duplicate permissions and different combinations of input
@@ -200,15 +200,15 @@ class PermissionDeclaration:
200
200
  """
201
201
  target_dict = (
202
202
  self.users
203
- if permission.type == PermissionType.user
203
+ if permission.type_ == PermissionType.user
204
204
  else self.user_groups
205
205
  )
206
206
 
207
- if permission.id not in target_dict:
208
- target_dict[permission.id] = {}
207
+ if permission.id_ not in target_dict:
208
+ target_dict[permission.id_] = {}
209
209
 
210
210
  is_active = permission.is_active
211
- target_permissions = target_dict[permission.id]
211
+ target_permissions = target_dict[permission.id_]
212
212
  permission_value = permission.permission
213
213
 
214
214
  if permission_value not in target_permissions:
@@ -225,6 +225,27 @@ class PermissionDeclaration:
225
225
  )
226
226
  target_permissions[permission_value] = is_active
227
227
 
228
+ def add_full_load_permission(self, permission: PermissionFullLoad) -> None:
229
+ """
230
+ Adds WSPermission object into respective field within the instance.
231
+ Handles duplicate permissions and different combinations of input
232
+ and upstream is_active permission states.
233
+ """
234
+ target_dict = (
235
+ self.users
236
+ if permission.type_ == PermissionType.user
237
+ else self.user_groups
238
+ )
239
+
240
+ if permission.id_ not in target_dict:
241
+ target_dict[permission.id_] = {}
242
+
243
+ target_permissions = target_dict[permission.id_]
244
+ permission_value = permission.permission
245
+
246
+ if permission_value not in target_permissions:
247
+ target_permissions[permission_value] = True
248
+
228
249
  def upsert(self, other: "PermissionDeclaration") -> None:
229
250
  """
230
251
  Modifies the owner object by merging with the other.