gooddata-pipelines 1.49.1.dev1__tar.gz → 1.49.1.dev2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of gooddata-pipelines might be problematic. Click here for more details.

Files changed (118) hide show
  1. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/PKG-INFO +2 -2
  2. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/backup_and_restore/backup_manager.py +44 -42
  3. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/backup_and_restore/constants.py +2 -1
  4. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/backup_and_restore/models/storage.py +40 -2
  5. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/backup_and_restore/storage/s3_storage.py +22 -11
  6. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/pyproject.toml +2 -2
  7. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/backup_and_restore/test_backup.py +15 -15
  8. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/.gitignore +0 -0
  9. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/LICENSE.txt +0 -0
  10. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/Makefile +0 -0
  11. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/README.md +0 -0
  12. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/TODO.md +0 -0
  13. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/__init__.py +0 -0
  14. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/_version.py +0 -0
  15. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/api/__init__.py +0 -0
  16. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/api/exceptions.py +0 -0
  17. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/api/gooddata_api.py +0 -0
  18. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/api/gooddata_api_wrapper.py +0 -0
  19. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/api/gooddata_sdk.py +0 -0
  20. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/api/utils.py +0 -0
  21. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/backup_and_restore/__init__.py +0 -0
  22. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/backup_and_restore/backup_input_processor.py +0 -0
  23. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/backup_and_restore/csv_reader.py +0 -0
  24. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/backup_and_restore/models/__init__.py +0 -0
  25. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/backup_and_restore/models/input_type.py +0 -0
  26. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/backup_and_restore/models/workspace_response.py +0 -0
  27. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/backup_and_restore/storage/__init__.py +0 -0
  28. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/backup_and_restore/storage/base_storage.py +0 -0
  29. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/backup_and_restore/storage/local_storage.py +0 -0
  30. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/logger/__init__.py +0 -0
  31. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/logger/logger.py +0 -0
  32. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/__init__.py +0 -0
  33. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/assets/wdf_setting.json +0 -0
  34. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/__init__.py +0 -0
  35. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/user_data_filters/__init__.py +0 -0
  36. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/user_data_filters/models/__init__.py +0 -0
  37. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/user_data_filters/models/udf_models.py +0 -0
  38. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/user_data_filters/user_data_filters.py +0 -0
  39. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/users/__init__.py +0 -0
  40. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/users/models/__init__.py +0 -0
  41. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/users/models/permissions.py +0 -0
  42. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/users/models/user_groups.py +0 -0
  43. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/users/models/users.py +0 -0
  44. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/users/permissions.py +0 -0
  45. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/users/user_groups.py +0 -0
  46. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/users/users.py +0 -0
  47. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/workspaces/__init__.py +0 -0
  48. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/workspaces/models.py +0 -0
  49. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/workspaces/workspace.py +0 -0
  50. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/workspaces/workspace_data_filters.py +0 -0
  51. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/workspaces/workspace_data_parser.py +0 -0
  52. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/entities/workspaces/workspace_data_validator.py +0 -0
  53. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/provisioning.py +0 -0
  54. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/utils/__init__.py +0 -0
  55. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/utils/context_objects.py +0 -0
  56. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/utils/exceptions.py +0 -0
  57. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/provisioning/utils/utils.py +0 -0
  58. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/gooddata_pipelines/py.typed +0 -0
  59. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/__init__.py +0 -0
  60. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/backup_and_restore/__init__.py +0 -0
  61. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/backup_and_restore/test_backup_input_processor.py +0 -0
  62. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/conftest.py +0 -0
  63. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/__init__.py +0 -0
  64. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/__init__.py +0 -0
  65. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_conf.yaml +0 -0
  66. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid1/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid1/analytics_model/analytical_dashboard_extensions/.gitkeep +0 -0
  67. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid1/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid1/analytics_model/analytical_dashboards/.gitkeep +0 -0
  68. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid1/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid1/analytics_model/dashboard_plugins/.gitkeep +0 -0
  69. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid1/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid1/analytics_model/filter_contexts/.gitkeep +0 -0
  70. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid1/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid1/analytics_model/metrics/.gitkeep +0 -0
  71. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid1/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid1/analytics_model/visualization_objects/.gitkeep +0 -0
  72. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid1/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid1/ldm/datasets/test.yaml +0 -0
  73. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid1/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid1/ldm/date_instances/testinstance.yaml +0 -0
  74. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid2/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid2/analytics_model/analytical_dashboard_extensions/.gitkeep +0 -0
  75. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid2/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid2/analytics_model/analytical_dashboards/id.yaml +0 -0
  76. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid2/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid2/analytics_model/dashboard_plugins/.gitkeep +0 -0
  77. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid2/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid2/analytics_model/filter_contexts/id.yaml +0 -0
  78. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid2/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid2/analytics_model/metrics/.gitkeep +0 -0
  79. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid2/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid2/analytics_model/visualization_objects/test.yaml +0 -0
  80. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid2/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid2/ldm/datasets/.gitkeep +0 -0
  81. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid2/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid2/ldm/date_instances/.gitkeep +0 -0
  82. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid3/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid3/analytics_model/analytical_dashboard_extensions/.gitkeep +0 -0
  83. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid3/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid3/analytics_model/analytical_dashboards/.gitkeep +0 -0
  84. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid3/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid3/analytics_model/dashboard_plugins/.gitkeep +0 -0
  85. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid3/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid3/analytics_model/filter_contexts/.gitkeep +0 -0
  86. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid3/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid3/analytics_model/metrics/.gitkeep +0 -0
  87. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid3/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid3/analytics_model/visualization_objects/.gitkeep +0 -0
  88. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid3/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid3/ldm/datasets/.gitkeep +0 -0
  89. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid3/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid3/ldm/date_instances/.gitkeep +0 -0
  90. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_exports/services/wsid3/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid3/user_data_filters/.gitkeep +0 -0
  91. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/backup/test_local_conf.yaml +0 -0
  92. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/mock_responses.py +0 -0
  93. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/provisioning/entities/permissions/existing_upstream_permissions.json +0 -0
  94. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/provisioning/entities/permissions/permissions_expected_full_load.json +0 -0
  95. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/provisioning/entities/permissions/permissions_expected_incremental_load.json +0 -0
  96. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/provisioning/entities/permissions/permissions_input_full_load.json +0 -0
  97. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/provisioning/entities/permissions/permissions_input_incremental_load.json +0 -0
  98. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/provisioning/entities/users/existing_upstream_users.json +0 -0
  99. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/provisioning/entities/users/users_expected_full_load.json +0 -0
  100. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/provisioning/entities/users/users_expected_incremental_load.json +0 -0
  101. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/provisioning/entities/users/users_input_full_load.json +0 -0
  102. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/data/provisioning/entities/users/users_input_incremental_load.json +0 -0
  103. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/panther/__init__.py +0 -0
  104. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/panther/test_api_wrapper.py +0 -0
  105. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/panther/test_sdk_wrapper.py +0 -0
  106. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/provisioning/__init__.py +0 -0
  107. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/provisioning/entities/__init__.py +0 -0
  108. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/provisioning/entities/users/__init__.py +0 -0
  109. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/provisioning/entities/users/test_permissions.py +0 -0
  110. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/provisioning/entities/users/test_user_groups.py +0 -0
  111. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/provisioning/entities/users/test_users.py +0 -0
  112. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/provisioning/entities/workspaces/__init__.py +0 -0
  113. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/provisioning/entities/workspaces/test_provisioning.py +0 -0
  114. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/provisioning/entities/workspaces/test_workspace.py +0 -0
  115. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/provisioning/entities/workspaces/test_workspace_data_filters.py +0 -0
  116. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/provisioning/entities/workspaces/test_workspace_data_parser.py +0 -0
  117. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tests/provisioning/entities/workspaces/test_workspace_data_validator.py +0 -0
  118. {gooddata_pipelines-1.49.1.dev1 → gooddata_pipelines-1.49.1.dev2}/tox.ini +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: gooddata-pipelines
3
- Version: 1.49.1.dev1
3
+ Version: 1.49.1.dev2
4
4
  Summary: GoodData Cloud lifecycle automation pipelines
5
5
  Author-email: GoodData <support@gooddata.com>
6
6
  License: MIT
@@ -8,7 +8,7 @@ License-File: LICENSE.txt
8
8
  Requires-Python: >=3.10
9
9
  Requires-Dist: boto3-stubs<2.0.0,>=1.39.3
10
10
  Requires-Dist: boto3<2.0.0,>=1.39.3
11
- Requires-Dist: gooddata-sdk~=1.49.1.dev1
11
+ Requires-Dist: gooddata-sdk~=1.49.1.dev2
12
12
  Requires-Dist: pydantic<3.0.0,>=2.11.3
13
13
  Requires-Dist: requests<3.0.0,>=2.32.3
14
14
  Requires-Dist: types-pyyaml<7.0.0,>=6.0.12.20250326
@@ -55,7 +55,7 @@ class BackupManager:
55
55
 
56
56
  self.config = config
57
57
 
58
- self.storage = self.get_storage(self.config)
58
+ self.storage = self._get_storage(self.config)
59
59
  self.org_id = self._api.get_organization_id()
60
60
 
61
61
  self.loader = BackupInputProcessor(self._api, self.config.api_page_size)
@@ -67,7 +67,7 @@ class BackupManager:
67
67
  host: str,
68
68
  token: str,
69
69
  ) -> "BackupManager":
70
- """Creates a backup worker instance using provided host and token."""
70
+ """Creates a backup worker instance using the provided host and token."""
71
71
  return cls(host=host, token=token, config=config)
72
72
 
73
73
  @classmethod
@@ -81,7 +81,8 @@ class BackupManager:
81
81
  content = profile_content(profile, profiles_path)
82
82
  return cls(**content, config=config)
83
83
 
84
- def get_storage(self, conf: BackupRestoreConfig) -> BackupStorage:
84
+ @staticmethod
85
+ def _get_storage(conf: BackupRestoreConfig) -> BackupStorage:
85
86
  """Returns the storage class based on the storage type."""
86
87
  if conf.storage_type == StorageType.S3:
87
88
  return S3Storage(conf)
@@ -100,7 +101,7 @@ class BackupManager:
100
101
  else:
101
102
  raise RuntimeError(f"{response.status_code}: {response.text}")
102
103
 
103
- def store_user_data_filters(
104
+ def _store_user_data_filters(
104
105
  self,
105
106
  user_data_filters: dict,
106
107
  export_path: Path,
@@ -128,20 +129,20 @@ class BackupManager:
128
129
  "user_data_filters",
129
130
  filter["id"] + ".yaml",
130
131
  )
131
- self.write_to_yaml(udf_file_path, filter)
132
+ self._write_to_yaml(udf_file_path, filter)
132
133
 
133
134
  @staticmethod
134
- def move_folder(source: Path, destination: Path) -> None:
135
+ def _move_folder(source: Path, destination: Path) -> None:
135
136
  """Moves the source folder to the destination."""
136
137
  shutil.move(source, destination)
137
138
 
138
139
  @staticmethod
139
- def write_to_yaml(path: str, source: Any) -> None:
140
+ def _write_to_yaml(path: str, source: Any) -> None:
140
141
  """Writes the source to a YAML file."""
141
142
  with open(path, "w") as outfile:
142
143
  yaml.dump(source, outfile)
143
144
 
144
- def get_automations_from_api(self, workspace_id: str) -> Any:
145
+ def _get_automations_from_api(self, workspace_id: str) -> Any:
145
146
  """Returns automations for the workspace as JSON."""
146
147
  response: requests.Response = self._api.get_automations(workspace_id)
147
148
  if response.ok:
@@ -152,10 +153,10 @@ class BackupManager:
152
153
  + f"{response.status_code}: {response.text}"
153
154
  )
154
155
 
155
- def store_automations(self, export_path: Path, workspace_id: str) -> None:
156
+ def _store_automations(self, export_path: Path, workspace_id: str) -> None:
156
157
  """Stores the automations in the specified export path."""
157
158
  # Get the automations from the API
158
- automations: Any = self.get_automations_from_api(workspace_id)
159
+ automations: Any = self._get_automations_from_api(workspace_id)
159
160
 
160
161
  automations_folder_path: Path = Path(
161
162
  export_path,
@@ -184,8 +185,8 @@ class BackupManager:
184
185
  # Get the filter views YAML files from the API
185
186
  self._api.store_declarative_filter_views(workspace_id, export_path)
186
187
 
187
- # Move filter views to the subfolder containing analytics model
188
- self.move_folder(
188
+ # Move filter views to the subfolder containing the analytics model
189
+ self._move_folder(
189
190
  Path(export_path, "gooddata_layouts", self.org_id, "filter_views"),
190
191
  Path(
191
192
  export_path,
@@ -197,7 +198,7 @@ class BackupManager:
197
198
  ),
198
199
  )
199
200
 
200
- def get_workspace_export(
201
+ def _get_workspace_export(
201
202
  self,
202
203
  local_target_path: str,
203
204
  workspaces_to_export: list[str],
@@ -232,9 +233,9 @@ class BackupManager:
232
233
  # be more transparent/readable and possibly safer for threading
233
234
  self._api.store_declarative_workspace(workspace_id, export_path)
234
235
  self.store_declarative_filter_views(export_path, workspace_id)
235
- self.store_automations(export_path, workspace_id)
236
+ self._store_automations(export_path, workspace_id)
236
237
 
237
- self.store_user_data_filters(
238
+ self._store_user_data_filters(
238
239
  user_data_filters, export_path, workspace_id
239
240
  )
240
241
  self.logger.info(f"Stored export for {workspace_id}")
@@ -250,7 +251,7 @@ class BackupManager:
250
251
  + "is correct and that the workspaces exist."
251
252
  )
252
253
 
253
- def archive_gooddata_layouts_to_zip(self, folder: str) -> None:
254
+ def _archive_gooddata_layouts_to_zip(self, folder: str) -> None:
254
255
  """Archives the gooddata_layouts directory to a zip file."""
255
256
  try:
256
257
  target_subdir = ""
@@ -271,11 +272,12 @@ class BackupManager:
271
272
  self.logger.error(f"Error archiving {folder} to zip: {e}")
272
273
  raise
273
274
 
274
- def split_to_batches(
275
- self, workspaces_to_export: list[str], batch_size: int
275
+ @staticmethod
276
+ def _split_to_batches(
277
+ workspaces_to_export: list[str], batch_size: int
276
278
  ) -> list[BackupBatch]:
277
- """Splits the list of workspaces to into batches of the specified size.
278
- The batch is respresented as a list of workspace IDs.
279
+ """Splits the list of workspaces into batches of the specified size.
280
+ The batch is represented as a list of workspace IDs.
279
281
  Returns a list of batches (i.e. list of lists of IDs)
280
282
  """
281
283
  list_of_batches = []
@@ -286,7 +288,7 @@ class BackupManager:
286
288
 
287
289
  return list_of_batches
288
290
 
289
- def process_batch(
291
+ def _process_batch(
290
292
  self,
291
293
  batch: BackupBatch,
292
294
  stop_event: threading.Event,
@@ -298,14 +300,14 @@ class BackupManager:
298
300
  The base wait time is defined by BackupSettings.RETRY_DELAY.
299
301
  """
300
302
  if stop_event.is_set():
301
- # If the stop_event flag is set, return. This will terminate the thread.
303
+ # If the stop_event flag is set, return. This will terminate the thread
302
304
  return
303
305
 
304
306
  try:
305
307
  with tempfile.TemporaryDirectory() as tmpdir:
306
- self.get_workspace_export(tmpdir, batch.list_of_ids)
308
+ self._get_workspace_export(tmpdir, batch.list_of_ids)
307
309
 
308
- self.archive_gooddata_layouts_to_zip(
310
+ self._archive_gooddata_layouts_to_zip(
309
311
  str(Path(tmpdir, self.org_id))
310
312
  )
311
313
 
@@ -316,7 +318,7 @@ class BackupManager:
316
318
  return
317
319
 
318
320
  elif retry_count < BackupSettings.MAX_RETRIES:
319
- # Retry with exponential backoff until MAX_RETRIES.
321
+ # Retry with exponential backoff until MAX_RETRIES
320
322
  next_retry = retry_count + 1
321
323
  wait_time = BackupSettings.RETRY_DELAY**next_retry
322
324
  self.logger.info(
@@ -326,13 +328,13 @@ class BackupManager:
326
328
  )
327
329
 
328
330
  time.sleep(wait_time)
329
- self.process_batch(batch, stop_event, next_retry)
331
+ self._process_batch(batch, stop_event, next_retry)
330
332
  else:
331
- # If the batch fails after MAX_RETRIES, raise the error.
333
+ # If the batch fails after MAX_RETRIES, raise the error
332
334
  self.logger.error(f"Batch failed: {e.__class__.__name__}: {e}")
333
335
  raise
334
336
 
335
- def process_batches_in_parallel(
337
+ def _process_batches_in_parallel(
336
338
  self,
337
339
  batches: list[BackupBatch],
338
340
  ) -> None:
@@ -345,14 +347,14 @@ class BackupManager:
345
347
  stop_event = threading.Event()
346
348
 
347
349
  with ThreadPoolExecutor(
348
- max_workers=BackupSettings.MAX_WORKERS
350
+ max_workers=self.config.max_workers
349
351
  ) as executor:
350
352
  # Set the futures tasks.
351
353
  futures = []
352
354
  for batch in batches:
353
355
  futures.append(
354
356
  executor.submit(
355
- self.process_batch,
357
+ self._process_batch,
356
358
  batch,
357
359
  stop_event,
358
360
  )
@@ -363,10 +365,10 @@ class BackupManager:
363
365
  try:
364
366
  future.result()
365
367
  except Exception:
366
- # On failure, set the flag to True - signal running processes to stop.
368
+ # On failure, set the flag to True - signal running processes to stop
367
369
  stop_event.set()
368
370
 
369
- # Cancel unstarted threads.
371
+ # Cancel unstarted threads
370
372
  for f in futures:
371
373
  if not f.done():
372
374
  f.cancel()
@@ -383,10 +385,10 @@ class BackupManager:
383
385
  workspace in storage specified in the configuration.
384
386
 
385
387
  Args:
386
- path_to_csv (str): Path to a CSV file containing a list of workspace IDs.
388
+ path_to_csv (str): Path to a CSV file containing a list of workspace IDs
387
389
  workspace_ids (list[str]): List of workspace IDs
388
390
  """
389
- self.backup(InputType.LIST_OF_WORKSPACES, path_to_csv, workspace_ids)
391
+ self._backup(InputType.LIST_OF_WORKSPACES, path_to_csv, workspace_ids)
390
392
 
391
393
  def backup_hierarchies(
392
394
  self, path_to_csv: str | None, workspace_ids: list[str] | None
@@ -394,16 +396,16 @@ class BackupManager:
394
396
  """Runs the backup process for a list of hierarchies.
395
397
 
396
398
  Will take the list of workspace IDs or read the list of workspace IDs
397
- from a CSV file and create backup for each those workspaces' hierarchies
399
+ from a CSV file and create backup for each of those workspaces' hierarchies
398
400
  in storage specified in the configuration.
399
401
  Workspace hierarchy means the workspace itself and all its direct and
400
402
  indirect children.
401
403
 
402
404
  Args:
403
- path_to_csv (str): Path to a CSV file containing a list of workspace IDs.
405
+ path_to_csv (str): Path to a CSV file containing a list of workspace IDs
404
406
  workspace_ids (list[str]): List of workspace IDs
405
407
  """
406
- self.backup(InputType.HIERARCHY, path_to_csv, workspace_ids)
408
+ self._backup(InputType.HIERARCHY, path_to_csv, workspace_ids)
407
409
 
408
410
  def backup_entire_organization(self) -> None:
409
411
  """Runs the backup process for the entire organization.
@@ -411,22 +413,22 @@ class BackupManager:
411
413
  Will create backup for all workspaces in the organization in storage
412
414
  specified in the configuration.
413
415
  """
414
- self.backup(InputType.ORGANIZATION)
416
+ self._backup(InputType.ORGANIZATION)
415
417
 
416
- def backup(
418
+ def _backup(
417
419
  self,
418
420
  input_type: InputType,
419
421
  path_to_csv: str | None = None,
420
422
  workspace_ids: list[str] | None = None,
421
423
  ) -> None:
422
- """Runs the backup process with selected input type."""
424
+ """Runs the backup process with the selected input type."""
423
425
  try:
424
426
  workspaces_to_export: list[str] = self.loader.get_ids_to_backup(
425
427
  input_type,
426
428
  path_to_csv,
427
429
  workspace_ids,
428
430
  )
429
- batches = self.split_to_batches(
431
+ batches = self._split_to_batches(
430
432
  workspaces_to_export, self.config.batch_size
431
433
  )
432
434
 
@@ -434,7 +436,7 @@ class BackupManager:
434
436
  f"Exporting {len(workspaces_to_export)} workspaces in {len(batches)} batches."
435
437
  )
436
438
 
437
- self.process_batches_in_parallel(batches)
439
+ self._process_batches_in_parallel(batches)
438
440
 
439
441
  self.logger.info("Backup completed")
440
442
  except Exception as e:
@@ -1,3 +1,4 @@
1
+ # (C) 2025 GoodData Corporation
1
2
  import datetime
2
3
  from dataclasses import dataclass
3
4
 
@@ -22,7 +23,7 @@ class DirNames:
22
23
 
23
24
  @dataclass(frozen=True)
24
25
  class ConcurrencyDefaults:
25
- MAX_WORKERS = 2
26
+ MAX_WORKERS = 1
26
27
  DEFAULT_BATCH_SIZE = 100
27
28
 
28
29
 
@@ -21,10 +21,40 @@ class S3StorageConfig(BaseModel):
21
21
 
22
22
  backup_path: str
23
23
  bucket: str
24
- profile: str = "default"
24
+ profile: Optional[str] = None
25
25
  aws_access_key_id: Optional[str] = None
26
26
  aws_secret_access_key: Optional[str] = None
27
- aws_default_region: Optional[str] = None
27
+ aws_default_region: Optional[str] = "us-east-1"
28
+
29
+ @classmethod
30
+ def from_iam_role(cls, backup_path: str, bucket: str) -> "S3StorageConfig":
31
+ """Use default IAM role or environment credentials."""
32
+ return cls(backup_path=backup_path, bucket=bucket)
33
+
34
+ @classmethod
35
+ def from_aws_credentials(
36
+ cls,
37
+ backup_path: str,
38
+ bucket: str,
39
+ aws_access_key_id: str,
40
+ aws_secret_access_key: str,
41
+ aws_default_region: str,
42
+ ) -> "S3StorageConfig":
43
+ """Use explicit AWS access keys and region."""
44
+ return cls(
45
+ backup_path=backup_path,
46
+ bucket=bucket,
47
+ aws_access_key_id=aws_access_key_id,
48
+ aws_secret_access_key=aws_secret_access_key,
49
+ aws_default_region=aws_default_region,
50
+ )
51
+
52
+ @classmethod
53
+ def from_aws_profile(
54
+ cls, backup_path: str, bucket: str, profile: str
55
+ ) -> "S3StorageConfig":
56
+ """Use a named AWS CLI profile."""
57
+ return cls(backup_path=backup_path, bucket=bucket, profile=profile)
28
58
 
29
59
 
30
60
  class LocalStorageConfig(BaseModel):
@@ -53,6 +83,14 @@ class BackupRestoreConfig(BaseModel):
53
83
  description="Batch size must be greater than 0",
54
84
  ),
55
85
  ] = Field(default=BackupSettings.DEFAULT_BATCH_SIZE)
86
+ max_workers: Annotated[
87
+ int,
88
+ Field(
89
+ gt=0,
90
+ lt=3,
91
+ description="Max workers must be greater than 0 and less than 3",
92
+ ),
93
+ ] = Field(default=BackupSettings.MAX_WORKERS)
56
94
 
57
95
  @classmethod
58
96
  def from_yaml(cls, conf_path: str) -> "BackupRestoreConfig":
@@ -22,6 +22,7 @@ class S3Storage(BackupStorage):
22
22
 
23
23
  self._config = conf.storage
24
24
  self._session = self._create_boto_session(self._config)
25
+ self._client = self._session.client("s3")
25
26
  self._resource = self._session.resource("s3")
26
27
  self._bucket = self._resource.Bucket(self._config.bucket) # type: ignore [missing library stubs]
27
28
  suffix = "/" if not self._config.backup_path.endswith("/") else ""
@@ -43,32 +44,40 @@ class S3Storage(BackupStorage):
43
44
  )
44
45
  except Exception:
45
46
  self.logger.warning(
46
- "Failed to create boto3 session with supplied credentials. Falling back to profile..."
47
+ "Failed to create boto3 session with supplied credentials."
48
+ )
49
+
50
+ if config.profile:
51
+ try:
52
+ return boto3.Session(profile_name=config.profile)
53
+ except Exception:
54
+ self.logger.warning(
55
+ f"AWS profile [{config.profile}] not found."
47
56
  )
48
57
 
49
58
  try:
50
- return boto3.Session(profile_name=config.profile)
59
+ return boto3.Session()
51
60
  except Exception:
52
- self.logger.warning(
53
- 'AWS profile "[default]" not found. Trying other fallback methods...'
61
+ self.logger.error(
62
+ "Failed to create boto3 session with default IAM role or environment credentials."
63
+ )
64
+ raise RuntimeError(
65
+ "Unable to create AWS session. Please check your AWS credentials, profile, or IAM role configuration."
54
66
  )
55
-
56
- return boto3.Session()
57
67
 
58
68
  def _verify_connection(self) -> None:
59
69
  """
60
70
  Pings the S3 bucket to verify that the connection is working.
61
71
  """
62
72
  try:
63
- # TODO: install boto3 s3 stubs
64
- self._resource.meta.client.head_bucket(Bucket=self._config.bucket)
73
+ self._client.head_bucket(Bucket=self._config.bucket)
65
74
  except Exception as e:
66
75
  raise RuntimeError(
67
76
  f"Failed to connect to S3 bucket {self._config.bucket}: {e}"
68
77
  )
69
78
 
70
79
  def export(self, folder: str, org_id: str) -> None:
71
- """Uploads the content of the folder to S3 as backup."""
80
+ """Uploads the content of the folder to S3 as a backup."""
72
81
  storage_path = f"{self._config.bucket}/{self._backup_path}"
73
82
  self.logger.info(f"Uploading {org_id} to {storage_path}")
74
83
  folder = f"{folder}/{org_id}"
@@ -77,10 +86,12 @@ class S3Storage(BackupStorage):
77
86
  export_path = (
78
87
  f"{self._backup_path}{org_id}/{full_path[len(folder) + 1 :]}/"
79
88
  )
80
- self._bucket.put_object(Key=export_path)
89
+ self._client.put_object(Bucket=self._config.bucket, Key=export_path)
81
90
 
82
91
  for file in files:
83
92
  full_path = os.path.join(subdir, file)
84
93
  with open(full_path, "rb") as data:
85
94
  export_path = f"{self._backup_path}{org_id}/{full_path[len(folder) + 1 :]}"
86
- self._bucket.put_object(Key=export_path, Body=data)
95
+ self._client.put_object(
96
+ Bucket=self._config.bucket, Key=export_path, Body=data
97
+ )
@@ -1,7 +1,7 @@
1
1
  # (C) 2025 GoodData Corporation
2
2
  [project]
3
3
  name = "gooddata-pipelines"
4
- version = "1.49.1.dev1"
4
+ version = "1.49.1.dev2"
5
5
  description = "GoodData Cloud lifecycle automation pipelines"
6
6
  authors = [{ name = "GoodData", email = "support@gooddata.com" }]
7
7
  license = { text = "MIT" }
@@ -11,7 +11,7 @@ dependencies = [
11
11
  "pydantic (>=2.11.3,<3.0.0)",
12
12
  "requests (>=2.32.3,<3.0.0)",
13
13
  "types-requests (>=2.32.0,<3.0.0)",
14
- "gooddata-sdk~=1.49.1.dev1",
14
+ "gooddata-sdk~=1.49.1.dev2",
15
15
  "boto3 (>=1.39.3,<2.0.0)",
16
16
  "boto3-stubs (>=1.39.3,<2.0.0)",
17
17
  "types-pyyaml (>=6.0.12.20250326,<7.0.0)"
@@ -106,13 +106,13 @@ def assert_not_called_with(target, *args, **kwargs):
106
106
 
107
107
  def test_get_s3_storage(backup_manager):
108
108
  """Test get_storage method with literal string as input."""
109
- s3_storage = backup_manager.get_storage(S3_CONFIG)
109
+ s3_storage = backup_manager._get_storage(S3_CONFIG)
110
110
  assert isinstance(s3_storage, S3Storage)
111
111
 
112
112
 
113
113
  def test_get_local_storage(backup_manager):
114
114
  """Test get_storage method with literal string as input."""
115
- local_storage = backup_manager.get_storage(LOCAL_CONFIG)
115
+ local_storage = backup_manager._get_storage(LOCAL_CONFIG)
116
116
  assert isinstance(local_storage, LocalStorage)
117
117
 
118
118
 
@@ -125,7 +125,7 @@ def test_archive_gooddata_layouts_to_zip(backup_manager):
125
125
  ),
126
126
  Path(tmpdir + "/services"),
127
127
  )
128
- backup_manager.archive_gooddata_layouts_to_zip(
128
+ backup_manager._archive_gooddata_layouts_to_zip(
129
129
  str(Path(tmpdir, "services"))
130
130
  )
131
131
 
@@ -202,7 +202,7 @@ def test_store_user_data_filters(backup_manager):
202
202
  ]
203
203
  }
204
204
  user_data_filter_folderlocation = f"{TEST_DATA_SUBDIR}/test_exports/services/wsid1/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid1/user_data_filters"
205
- backup_manager.store_user_data_filters(
205
+ backup_manager._store_user_data_filters(
206
206
  user_data_filters,
207
207
  Path(
208
208
  f"{TEST_DATA_SUBDIR}/test_exports/services/wsid1/20230713-132759-1_3_1_dev5",
@@ -245,7 +245,7 @@ def test_local_storage_export(backup_manager):
245
245
  ),
246
246
  org_store_location,
247
247
  )
248
- local_storage = backup_manager.get_storage(LOCAL_CONFIG)
248
+ local_storage = backup_manager._get_storage(LOCAL_CONFIG)
249
249
 
250
250
  local_storage.export(
251
251
  folder=tmpdir,
@@ -301,7 +301,7 @@ def test_split_to_batches(backup_manager):
301
301
  BackupBatch(["ws5"]),
302
302
  ]
303
303
 
304
- result = backup_manager.split_to_batches(workspaces, batch_size)
304
+ result = backup_manager._split_to_batches(workspaces, batch_size)
305
305
 
306
306
  for i, batch in enumerate(result):
307
307
  assert isinstance(batch, BackupBatch)
@@ -309,10 +309,10 @@ def test_split_to_batches(backup_manager):
309
309
 
310
310
 
311
311
  @mock.patch(
312
- "gooddata_pipelines.backup_and_restore.backup_manager.BackupManager.get_workspace_export"
312
+ "gooddata_pipelines.backup_and_restore.backup_manager.BackupManager._get_workspace_export"
313
313
  )
314
314
  @mock.patch(
315
- "gooddata_pipelines.backup_and_restore.backup_manager.BackupManager.archive_gooddata_layouts_to_zip"
315
+ "gooddata_pipelines.backup_and_restore.backup_manager.BackupManager._archive_gooddata_layouts_to_zip"
316
316
  )
317
317
  def test_process_batch_success(
318
318
  archive_gooddata_layouts_to_zip_mock,
@@ -323,7 +323,7 @@ def test_process_batch_success(
323
323
  backup_manager.storage = mock.Mock()
324
324
  batch = BackupBatch(["ws1", "ws2"])
325
325
 
326
- backup_manager.process_batch(
326
+ backup_manager._process_batch(
327
327
  batch=batch,
328
328
  stop_event=threading.Event(),
329
329
  retry_count=0,
@@ -335,10 +335,10 @@ def test_process_batch_success(
335
335
 
336
336
 
337
337
  @mock.patch(
338
- "gooddata_pipelines.backup_and_restore.backup_manager.BackupManager.get_workspace_export"
338
+ "gooddata_pipelines.backup_and_restore.backup_manager.BackupManager._get_workspace_export"
339
339
  )
340
340
  @mock.patch(
341
- "gooddata_pipelines.backup_and_restore.backup_manager.BackupManager.archive_gooddata_layouts_to_zip"
341
+ "gooddata_pipelines.backup_and_restore.backup_manager.BackupManager._archive_gooddata_layouts_to_zip"
342
342
  )
343
343
  def test_process_batch_retries_on_exception(
344
344
  archive_gooddata_layouts_to_zip_mock,
@@ -360,7 +360,7 @@ def test_process_batch_retries_on_exception(
360
360
 
361
361
  get_workspace_export_mock.side_effect = fail_once
362
362
 
363
- backup_manager.process_batch(
363
+ backup_manager._process_batch(
364
364
  batch=batch,
365
365
  stop_event=threading.Event(),
366
366
  )
@@ -374,10 +374,10 @@ def test_process_batch_retries_on_exception(
374
374
 
375
375
 
376
376
  @mock.patch(
377
- "gooddata_pipelines.backup_and_restore.backup_manager.BackupManager.get_workspace_export"
377
+ "gooddata_pipelines.backup_and_restore.backup_manager.BackupManager._get_workspace_export"
378
378
  )
379
379
  @mock.patch(
380
- "gooddata_pipelines.backup_and_restore.backup_manager.BackupManager.archive_gooddata_layouts_to_zip"
380
+ "gooddata_pipelines.backup_and_restore.backup_manager.BackupManager._archive_gooddata_layouts_to_zip"
381
381
  )
382
382
  def test_process_batch_raises_after_max_retries(
383
383
  archive_gooddata_layouts_to_zip_mock,
@@ -390,7 +390,7 @@ def test_process_batch_raises_after_max_retries(
390
390
  get_workspace_export_mock.side_effect = Exception("fail")
391
391
 
392
392
  with pytest.raises(Exception) as exc_info:
393
- backup_manager.process_batch(
393
+ backup_manager._process_batch(
394
394
  batch=batch,
395
395
  stop_event=threading.Event(),
396
396
  retry_count=BackupSettings.MAX_RETRIES,