gooddata-pipelines 1.49.1.dev1__py3-none-any.whl → 1.50.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of gooddata-pipelines might be problematic. Click here for more details.

Files changed (22) hide show
  1. gooddata_pipelines/__init__.py +7 -1
  2. gooddata_pipelines/api/gooddata_api.py +0 -54
  3. gooddata_pipelines/backup_and_restore/backup_manager.py +50 -44
  4. gooddata_pipelines/backup_and_restore/constants.py +2 -1
  5. gooddata_pipelines/backup_and_restore/models/storage.py +40 -2
  6. gooddata_pipelines/backup_and_restore/storage/s3_storage.py +22 -11
  7. gooddata_pipelines/provisioning/entities/users/models/permissions.py +23 -79
  8. gooddata_pipelines/provisioning/entities/users/models/user_groups.py +23 -50
  9. gooddata_pipelines/provisioning/entities/users/models/users.py +9 -49
  10. gooddata_pipelines/provisioning/entities/users/permissions.py +14 -6
  11. gooddata_pipelines/provisioning/entities/users/user_groups.py +7 -1
  12. gooddata_pipelines/provisioning/entities/users/users.py +3 -0
  13. gooddata_pipelines/provisioning/entities/workspaces/models.py +16 -15
  14. gooddata_pipelines/provisioning/entities/workspaces/workspace.py +52 -5
  15. gooddata_pipelines/provisioning/entities/workspaces/workspace_data_parser.py +9 -6
  16. gooddata_pipelines/provisioning/provisioning.py +24 -6
  17. gooddata_pipelines/provisioning/utils/context_objects.py +6 -6
  18. gooddata_pipelines/provisioning/utils/utils.py +3 -15
  19. {gooddata_pipelines-1.49.1.dev1.dist-info → gooddata_pipelines-1.50.0.dist-info}/METADATA +2 -2
  20. {gooddata_pipelines-1.49.1.dev1.dist-info → gooddata_pipelines-1.50.0.dist-info}/RECORD +22 -22
  21. {gooddata_pipelines-1.49.1.dev1.dist-info → gooddata_pipelines-1.50.0.dist-info}/WHEEL +0 -0
  22. {gooddata_pipelines-1.49.1.dev1.dist-info → gooddata_pipelines-1.50.0.dist-info}/licenses/LICENSE.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  # (C) 2025 GoodData Corporation
2
- from abc import abstractmethod
2
+
3
3
  from enum import Enum
4
- from typing import Any, Iterator, TypeAlias, TypeVar
4
+ from typing import Iterator, TypeAlias
5
5
 
6
6
  import attrs
7
7
  from gooddata_sdk.catalog.identifier import CatalogAssigneeIdentifier
@@ -14,85 +14,29 @@ from pydantic import BaseModel
14
14
  from gooddata_pipelines.provisioning.utils.exceptions import BaseUserException
15
15
 
16
16
  TargetsPermissionDict: TypeAlias = dict[str, dict[str, bool]]
17
- ConstructorType = TypeVar("ConstructorType", bound="ConstructorMixin")
18
17
 
19
18
 
20
- class PermissionType(str, Enum):
19
+ class EntityType(str, Enum):
21
20
  # NOTE: Start using StrEnum with Python 3.11
22
21
  user = "user"
23
22
  user_group = "userGroup"
24
23
 
25
24
 
26
- class ConstructorMixin:
27
- @staticmethod
28
- def _get_id_and_type(
29
- permission: dict[str, Any],
30
- ) -> tuple[str, PermissionType]:
31
- user_id: str | None = permission.get("user_id")
32
- user_group_id: str | None = permission.get("ug_id")
33
- if user_id and user_group_id:
34
- raise ValueError("Only one of user_id or ug_id must be present")
35
- elif user_id:
36
- return user_id, PermissionType.user
37
- elif user_group_id:
38
- return user_group_id, PermissionType.user_group
39
- else:
40
- raise ValueError("Either user_id or ug_id must be present")
41
-
42
- @classmethod
43
- def from_list_of_dicts(
44
- cls: type[ConstructorType], data: list[dict[str, Any]]
45
- ) -> list[ConstructorType]:
46
- """Creates a list of instances from list of dicts."""
47
- # NOTE: We can use typing.Self for the return type in Python 3.11
48
- permissions = []
49
- for permission in data:
50
- permissions.append(cls.from_dict(permission))
51
- return permissions
52
-
53
- @classmethod
54
- @abstractmethod
55
- def from_dict(cls, data: dict[str, Any]) -> Any:
56
- """Construction form a dictionary to be implemented by subclasses."""
57
- pass
58
-
59
-
60
- class PermissionIncrementalLoad(BaseModel, ConstructorMixin):
25
+ class BasePermission(BaseModel):
61
26
  permission: str
62
27
  workspace_id: str
63
- id_: str
64
- type_: PermissionType
65
- is_active: bool
28
+ entity_id: str
29
+ entity_type: EntityType
66
30
 
67
- @classmethod
68
- def from_dict(cls, data: dict[str, Any]) -> "PermissionIncrementalLoad":
69
- """Returns an instance of PermissionIncrementalLoad from a dictionary."""
70
- id_, target_type = cls._get_id_and_type(data)
71
- return cls(
72
- permission=data["ws_permissions"],
73
- workspace_id=data["ws_id"],
74
- id_=id_,
75
- type_=target_type,
76
- is_active=data["is_active"],
77
- )
78
31
 
32
+ class PermissionFullLoad(BasePermission):
33
+ """Input validator for full load of workspace permissions provisioning."""
79
34
 
80
- class PermissionFullLoad(BaseModel, ConstructorMixin):
81
- permission: str
82
- workspace_id: str
83
- id_: str
84
- type_: PermissionType
85
35
 
86
- @classmethod
87
- def from_dict(cls, data: dict[str, Any]) -> "PermissionFullLoad":
88
- """Returns an instance of PermissionFullLoad from a dictionary."""
89
- id_, target_type = cls._get_id_and_type(data)
90
- return cls(
91
- permission=data["ws_permissions"],
92
- workspace_id=data["ws_id"],
93
- id_=id_,
94
- type_=target_type,
95
- )
36
+ class PermissionIncrementalLoad(BasePermission):
37
+ """Input validator for incremental load of workspace permissions provisioning."""
38
+
39
+ is_active: bool
96
40
 
97
41
 
98
42
  @attrs.define
@@ -117,7 +61,7 @@ class PermissionDeclaration:
117
61
  permission.assignee.id,
118
62
  )
119
63
 
120
- if permission_type == PermissionType.user.value:
64
+ if permission_type == EntityType.user.value:
121
65
  target_dict = users
122
66
  else:
123
67
  target_dict = user_groups
@@ -170,7 +114,7 @@ class PermissionDeclaration:
170
114
 
171
115
  for user_id, permissions in self.users.items():
172
116
  assignee = CatalogAssigneeIdentifier(
173
- id=user_id, type=PermissionType.user.value
117
+ id=user_id, type=EntityType.user.value
174
118
  )
175
119
  for declaration in self._permissions_for_target(
176
120
  permissions, assignee
@@ -179,7 +123,7 @@ class PermissionDeclaration:
179
123
 
180
124
  for ug_id, permissions in self.user_groups.items():
181
125
  assignee = CatalogAssigneeIdentifier(
182
- id=ug_id, type=PermissionType.user_group.value
126
+ id=ug_id, type=EntityType.user_group.value
183
127
  )
184
128
  for declaration in self._permissions_for_target(
185
129
  permissions, assignee
@@ -200,15 +144,15 @@ class PermissionDeclaration:
200
144
  """
201
145
  target_dict = (
202
146
  self.users
203
- if permission.type_ == PermissionType.user
147
+ if permission.entity_type == EntityType.user
204
148
  else self.user_groups
205
149
  )
206
150
 
207
- if permission.id_ not in target_dict:
208
- target_dict[permission.id_] = {}
151
+ if permission.entity_id not in target_dict:
152
+ target_dict[permission.entity_id] = {}
209
153
 
210
154
  is_active = permission.is_active
211
- target_permissions = target_dict[permission.id_]
155
+ target_permissions = target_dict[permission.entity_id]
212
156
  permission_value = permission.permission
213
157
 
214
158
  if permission_value not in target_permissions:
@@ -233,14 +177,14 @@ class PermissionDeclaration:
233
177
  """
234
178
  target_dict = (
235
179
  self.users
236
- if permission.type_ == PermissionType.user
180
+ if permission.entity_type == EntityType.user
237
181
  else self.user_groups
238
182
  )
239
183
 
240
- if permission.id_ not in target_dict:
241
- target_dict[permission.id_] = {}
184
+ if permission.entity_id not in target_dict:
185
+ target_dict[permission.entity_id] = {}
242
186
 
243
- target_permissions = target_dict[permission.id_]
187
+ target_permissions = target_dict[permission.entity_id]
244
188
  permission_value = permission.permission
245
189
 
246
190
  if permission_value not in target_permissions:
@@ -1,64 +1,37 @@
1
1
  # (C) 2025 GoodData Corporation
2
2
 
3
- from typing import Any
3
+ from pydantic import BaseModel, Field, ValidationInfo, field_validator
4
4
 
5
- from pydantic import BaseModel
6
5
 
7
- from gooddata_pipelines.provisioning.utils.utils import SplitMixin
8
-
9
-
10
- class BaseUserGroup(BaseModel, SplitMixin):
6
+ class UserGroupBase(BaseModel):
11
7
  user_group_id: str
12
8
  user_group_name: str
13
- parent_user_groups: list[str]
9
+ parent_user_groups: list[str] = Field(default_factory=list)
14
10
 
11
+ @field_validator("user_group_name", mode="before")
15
12
  @classmethod
16
- def _create_from_dict_data(
17
- cls, user_group_data: dict[str, Any], delimiter: str = ","
18
- ) -> dict[str, Any]:
19
- """Helper method to extract common data from dict."""
20
- parent_user_groups = cls.split(
21
- user_group_data["parent_user_groups"], delimiter=delimiter
22
- )
23
- user_group_name = user_group_data["user_group_name"]
24
- if not user_group_name:
25
- user_group_name = user_group_data["user_group_id"]
26
-
27
- return {
28
- "user_group_id": user_group_data["user_group_id"],
29
- "user_group_name": user_group_name,
30
- "parent_user_groups": parent_user_groups,
31
- }
32
-
33
-
34
- class UserGroupIncrementalLoad(BaseUserGroup):
35
- is_active: bool
36
-
13
+ def validate_user_group_name(
14
+ cls, v: str | None, info: ValidationInfo
15
+ ) -> str:
16
+ """If user_group_name is None or empty, default to user_group_id."""
17
+ if not v: # handles None and empty string
18
+ return info.data.get("user_group_id", "")
19
+ return v
20
+
21
+ @field_validator("parent_user_groups", mode="before")
37
22
  @classmethod
38
- def from_list_of_dicts(
39
- cls, data: list[dict[str, Any]], delimiter: str = ","
40
- ) -> list["UserGroupIncrementalLoad"]:
41
- """Creates a list of User objects from list of dicts."""
42
- user_groups = []
43
- for user_group in data:
44
- base_data = cls._create_from_dict_data(user_group, delimiter)
45
- base_data["is_active"] = user_group["is_active"]
23
+ def validate_parent_user_groups(cls, v: list[str] | None) -> list[str]:
24
+ """If parent_user_groups is None or empty, default to empty list."""
25
+ if not v:
26
+ return []
27
+ return v
46
28
 
47
- user_groups.append(UserGroupIncrementalLoad(**base_data))
48
29
 
49
- return user_groups
30
+ class UserGroupFullLoad(UserGroupBase):
31
+ """Input validator for full load of user group provisioning."""
50
32
 
51
33
 
52
- class UserGroupFullLoad(BaseUserGroup):
53
- @classmethod
54
- def from_list_of_dicts(
55
- cls, data: list[dict[str, Any]], delimiter: str = ","
56
- ) -> list["UserGroupFullLoad"]:
57
- """Creates a list of User objects from list of dicts."""
58
- user_groups = []
59
- for user_group in data:
60
- base_data = cls._create_from_dict_data(user_group, delimiter)
34
+ class UserGroupIncrementalLoad(UserGroupBase):
35
+ """Input validator for incremental load of user group provisioning."""
61
36
 
62
- user_groups.append(UserGroupFullLoad(**base_data))
63
-
64
- return user_groups
37
+ is_active: bool
@@ -5,10 +5,8 @@ from typing import Any
5
5
  from gooddata_sdk.catalog.user.entity_model.user import CatalogUser
6
6
  from pydantic import BaseModel
7
7
 
8
- from gooddata_pipelines.provisioning.utils.utils import SplitMixin
9
8
 
10
-
11
- class BaseUser(BaseModel, SplitMixin):
9
+ class BaseUser(BaseModel):
12
10
  """Base class containing shared user fields and functionality."""
13
11
 
14
12
  user_id: str
@@ -18,21 +16,6 @@ class BaseUser(BaseModel, SplitMixin):
18
16
  auth_id: str | None
19
17
  user_groups: list[str]
20
18
 
21
- @classmethod
22
- def _create_from_dict_data(
23
- cls, user_data: dict[str, Any], delimiter: str = ","
24
- ) -> dict[str, Any]:
25
- """Helper method to extract common data from dict."""
26
- user_groups = cls.split(user_data["user_groups"], delimiter=delimiter)
27
- return {
28
- "user_id": user_data["user_id"],
29
- "firstname": user_data["firstname"],
30
- "lastname": user_data["lastname"],
31
- "email": user_data["email"],
32
- "auth_id": user_data["auth_id"],
33
- "user_groups": user_groups,
34
- }
35
-
36
19
  @classmethod
37
20
  def _create_from_sdk_data(cls, obj: CatalogUser) -> dict[str, Any]:
38
21
  """Helper method to extract common data from SDK object."""
@@ -68,47 +51,24 @@ class BaseUser(BaseModel, SplitMixin):
68
51
  )
69
52
 
70
53
 
71
- class UserIncrementalLoad(BaseUser):
72
- """User model for incremental load operations with active status tracking."""
73
-
74
- is_active: bool
75
-
76
- @classmethod
77
- def from_list_of_dicts(
78
- cls, data: list[dict[str, Any]], delimiter: str = ","
79
- ) -> list["UserIncrementalLoad"]:
80
- """Creates a list of User objects from list of dicts."""
81
- converted_users = []
82
- for user in data:
83
- base_data = cls._create_from_dict_data(user, delimiter)
84
- base_data["is_active"] = user["is_active"]
85
- converted_users.append(cls(**base_data))
86
- return converted_users
54
+ class UserFullLoad(BaseUser):
55
+ """Input validator for full load of user provisioning."""
87
56
 
88
57
  @classmethod
89
- def from_sdk_obj(cls, obj: CatalogUser) -> "UserIncrementalLoad":
58
+ def from_sdk_obj(cls, obj: CatalogUser) -> "UserFullLoad":
90
59
  """Creates GDUserTarget from CatalogUser SDK object."""
91
60
  base_data = cls._create_from_sdk_data(obj)
92
- base_data["is_active"] = True
93
61
  return cls(**base_data)
94
62
 
95
63
 
96
- class UserFullLoad(BaseUser):
97
- """User model for full load operations."""
64
+ class UserIncrementalLoad(BaseUser):
65
+ """Input validator for incremental load of user provisioning."""
98
66
 
99
- @classmethod
100
- def from_list_of_dicts(
101
- cls, data: list[dict[str, Any]], delimiter: str = ","
102
- ) -> list["UserFullLoad"]:
103
- """Creates a list of User objects from list of dicts."""
104
- converted_users = []
105
- for user in data:
106
- base_data = cls._create_from_dict_data(user, delimiter)
107
- converted_users.append(cls(**base_data))
108
- return converted_users
67
+ is_active: bool
109
68
 
110
69
  @classmethod
111
- def from_sdk_obj(cls, obj: CatalogUser) -> "UserFullLoad":
70
+ def from_sdk_obj(cls, obj: CatalogUser) -> "UserIncrementalLoad":
112
71
  """Creates GDUserTarget from CatalogUser SDK object."""
113
72
  base_data = cls._create_from_sdk_data(obj)
73
+ base_data["is_active"] = True
114
74
  return cls(**base_data)
@@ -6,10 +6,10 @@ from typing import TypeVar
6
6
 
7
7
  from gooddata_pipelines.api.exceptions import GoodDataApiException
8
8
  from gooddata_pipelines.provisioning.entities.users.models.permissions import (
9
+ EntityType,
9
10
  PermissionDeclaration,
10
11
  PermissionFullLoad,
11
12
  PermissionIncrementalLoad,
12
- PermissionType,
13
13
  TargetsPermissionDict,
14
14
  WSPermissionsDeclarations,
15
15
  )
@@ -28,12 +28,18 @@ class PermissionProvisioner(
28
28
  """Provisioning class for user permissions in GoodData workspaces.
29
29
 
30
30
  This class handles the provisioning of user permissions based on the provided
31
- source data.
31
+ source data. Use the `full_load` or `incremental_load`
32
+ methods to run the provisioning.
32
33
  """
33
34
 
34
35
  source_group_incremental: list[PermissionIncrementalLoad]
35
36
  source_group_full: list[PermissionFullLoad]
36
37
 
38
+ FULL_LOAD_TYPE: type[PermissionFullLoad] = PermissionFullLoad
39
+ INCREMENTAL_LOAD_TYPE: type[PermissionIncrementalLoad] = (
40
+ PermissionIncrementalLoad
41
+ )
42
+
37
43
  def _get_ws_declaration(self, ws_id: str) -> PermissionDeclaration:
38
44
  users: TargetsPermissionDict = {}
39
45
  user_groups: TargetsPermissionDict = {}
@@ -47,7 +53,7 @@ class PermissionProvisioner(
47
53
  )
48
54
  target_dict = (
49
55
  users
50
- if permission_type == PermissionType.user.value
56
+ if permission_type == EntityType.user.value
51
57
  else user_groups
52
58
  )
53
59
 
@@ -105,11 +111,13 @@ class PermissionProvisioner(
105
111
  self, permission: PermissionFullLoad | PermissionIncrementalLoad
106
112
  ) -> None:
107
113
  """Validates if the permission is correctly defined."""
108
- if permission.type_ == PermissionType.user:
109
- self._api.get_user(permission.id_, error_message="User not found")
114
+ if permission.entity_type == EntityType.user:
115
+ self._api.get_user(
116
+ permission.entity_id, error_message="User not found"
117
+ )
110
118
  else:
111
119
  self._api.get_user_group(
112
- permission.id_, error_message="User group not found"
120
+ permission.entity_id, error_message="User group not found"
113
121
  )
114
122
 
115
123
  self._api.get_workspace(
@@ -21,13 +21,19 @@ class UserGroupProvisioner(
21
21
  """Provisioning class for user groups in GoodData workspaces.
22
22
 
23
23
  This class handles the creation, update, and deletion of user groups
24
- based on the provided source data.
24
+ based on the provided source data. Use the `full_load` or `incremental_load`
25
+ methods to run the provisioning.
25
26
  """
26
27
 
27
28
  source_group_incremental: list[UserGroupIncrementalLoad]
28
29
  source_group_full: list[UserGroupFullLoad]
29
30
  upstream_user_groups: list[CatalogUserGroup]
30
31
 
32
+ FULL_LOAD_TYPE: type[UserGroupFullLoad] = UserGroupFullLoad
33
+ INCREMENTAL_LOAD_TYPE: type[UserGroupIncrementalLoad] = (
34
+ UserGroupIncrementalLoad
35
+ )
36
+
31
37
  @staticmethod
32
38
  def _is_changed(
33
39
  group: UserGroupModel, existing_group: CatalogUserGroup
@@ -30,6 +30,9 @@ class UserProvisioner(Provisioning[UserFullLoad, UserIncrementalLoad]):
30
30
  source_group_incremental: list[UserIncrementalLoad]
31
31
  source_group_full: list[UserFullLoad]
32
32
 
33
+ FULL_LOAD_TYPE: type[UserFullLoad] = UserFullLoad
34
+ INCREMENTAL_LOAD_TYPE: type[UserIncrementalLoad] = UserIncrementalLoad
35
+
33
36
  def __init__(self, host: str, token: str) -> None:
34
37
  super().__init__(host, token)
35
38
  self.upstream_user_cache: dict[UserId, UserModel] = {}
@@ -1,29 +1,27 @@
1
1
  # (C) 2025 GoodData Corporation
2
2
  """Module containing models related to workspace provisioning in GoodData Cloud."""
3
3
 
4
- from dataclasses import dataclass, field
5
4
  from typing import Literal
6
5
 
6
+ import attrs
7
7
  from pydantic import BaseModel, ConfigDict
8
8
 
9
9
 
10
- @dataclass
10
+ @attrs.define
11
11
  class WorkspaceDataMaps:
12
12
  """Dataclass to hold various mappings related to workspace data."""
13
13
 
14
- child_to_parent_id_map: dict[str, str] = field(default_factory=dict)
15
- workspace_id_to_wdf_map: dict[str, dict[str, list[str]]] = field(
16
- default_factory=dict
14
+ child_to_parent_id_map: dict[str, str] = attrs.field(factory=dict)
15
+ workspace_id_to_wdf_map: dict[str, dict[str, list[str]]] = attrs.field(
16
+ factory=dict
17
17
  )
18
- parent_ids: set[str] = field(default_factory=set)
19
- source_ids: set[str] = field(default_factory=set)
20
- workspace_id_to_name_map: dict[str, str] = field(default_factory=dict)
21
- upstream_ids: set[str] = field(default_factory=set)
18
+ parent_ids: set[str] = attrs.field(factory=set)
19
+ source_ids: set[str] = attrs.field(factory=set)
20
+ workspace_id_to_name_map: dict[str, str] = attrs.field(factory=dict)
21
+ upstream_ids: set[str] = attrs.field(factory=set)
22
22
 
23
23
 
24
- class WorkspaceFullLoad(BaseModel):
25
- """Model representing input for provisioning of workspaces in GoodData Cloud."""
26
-
24
+ class WorkspaceBase(BaseModel):
27
25
  model_config = ConfigDict(coerce_numbers_to_str=True)
28
26
 
29
27
  parent_id: str
@@ -33,10 +31,13 @@ class WorkspaceFullLoad(BaseModel):
33
31
  workspace_data_filter_values: list[str] | None = None
34
32
 
35
33
 
36
- class WorkspaceIncrementalLoad(WorkspaceFullLoad):
37
- """Model representing input for incremental provisioning of workspaces in GoodData Cloud."""
34
+ class WorkspaceFullLoad(WorkspaceBase):
35
+ """Input validator for full load of workspace provisioning."""
36
+
37
+
38
+ class WorkspaceIncrementalLoad(WorkspaceBase):
39
+ """Input validator for incremental load of workspace provisioning."""
38
40
 
39
- # TODO: double check that the model loads the data correctly, write a test
40
41
  is_active: bool
41
42
 
42
43
 
@@ -35,11 +35,19 @@ class WorkspaceProvisioner(
35
35
  source_group_full: list[WorkspaceFullLoad]
36
36
  source_group_incremental: list[WorkspaceIncrementalLoad]
37
37
 
38
+ FULL_LOAD_TYPE: type[WorkspaceFullLoad] = WorkspaceFullLoad
39
+ INCREMENTAL_LOAD_TYPE: type[WorkspaceIncrementalLoad] = (
40
+ WorkspaceIncrementalLoad
41
+ )
42
+
43
+ upstream_group: list[CatalogWorkspace]
44
+
38
45
  def __init__(self, *args: str, **kwargs: str) -> None:
39
46
  """Creates an instance of the WorkspaceProvisioner.
40
47
 
41
48
  Calls the superclass constructor and initializes the validator, parser,
42
- and maps for workspace data.
49
+ and maps for workspace data. Use the `full_load` or `incremental_load`
50
+ methods to run the provisioning.
43
51
  """
44
52
  super().__init__(*args, **kwargs)
45
53
  self.validator: WorkspaceDataValidator = WorkspaceDataValidator(
@@ -91,10 +99,11 @@ class WorkspaceProvisioner(
91
99
  workspace_ids_to_update: set[str],
92
100
  child_to_parent_map: dict[str, str],
93
101
  workspace_id_to_wdf_map: dict[str, dict[str, list[str]]],
102
+ source_group: list[WorkspaceFullLoad] | list[WorkspaceIncrementalLoad],
94
103
  ) -> None:
95
104
  action: Literal["CREATE", "UPDATE"]
96
105
 
97
- for source_workspace in self.source_group_full:
106
+ for source_workspace in source_group:
98
107
  if source_workspace.workspace_id in workspace_ids_to_update:
99
108
  action = "UPDATE"
100
109
  elif source_workspace.workspace_id in workspace_ids_to_create:
@@ -199,8 +208,8 @@ class WorkspaceProvisioner(
199
208
  )
200
209
 
201
210
  # Get upstream children of all parent workspaces.
202
- self.upstream_group: list[CatalogWorkspace] = (
203
- self._api.get_panther_children_workspaces(self.maps.parent_ids)
211
+ self.upstream_group = self._api.get_panther_children_workspaces(
212
+ self.maps.parent_ids
204
213
  )
205
214
 
206
215
  # Set maps that require upstream data.
@@ -234,6 +243,7 @@ class WorkspaceProvisioner(
234
243
  self.ids_to_update,
235
244
  self.maps.child_to_parent_id_map,
236
245
  self.maps.workspace_id_to_wdf_map,
246
+ self.source_group_full,
237
247
  )
238
248
 
239
249
  # Check WDF settings of ignored workspaces.
@@ -259,5 +269,42 @@ class WorkspaceProvisioner(
259
269
 
260
270
  def _provision_incremental_load(self) -> None:
261
271
  """Incremental workspace provisioning."""
272
+ # Set the maps based on the source data.
273
+ self.maps = self.parser.set_maps_based_on_source(
274
+ self.maps, self.source_group_incremental
275
+ )
276
+
277
+ # Get upstream children of all parent workspaces.
278
+ self.upstream_group = self._api.get_panther_children_workspaces(
279
+ self.maps.parent_ids
280
+ )
281
+
282
+ # Set maps that require upstream data.
283
+ self.maps = self.parser.set_maps_with_upstream_data(
284
+ self.maps, self.source_group_incremental, self.upstream_group
285
+ )
262
286
 
263
- raise NotImplementedError("Not implemented yet.")
287
+ # Create an instance of WDF manager with the created maps.
288
+ self.wdf_manager = WorkspaceDataFilterManager(self._api, self.maps)
289
+
290
+ # Iterate through the source data and sort workspace ID to groups
291
+ ids_to_update: set[str] = set()
292
+ ids_to_delete: set[str] = set()
293
+
294
+ for workspace in self.source_group_incremental:
295
+ if workspace.is_active:
296
+ ids_to_update.add(workspace.workspace_id)
297
+ else:
298
+ ids_to_delete.add(workspace.workspace_id)
299
+
300
+ self._create_or_update_panther_workspaces(
301
+ set(),
302
+ ids_to_update,
303
+ self.maps.child_to_parent_id_map,
304
+ self.maps.workspace_id_to_wdf_map,
305
+ self.source_group_incremental,
306
+ )
307
+
308
+ self.delete_panther_workspaces(
309
+ ids_to_delete, self.maps.workspace_id_to_name_map
310
+ )
@@ -9,6 +9,7 @@ from gooddata_sdk.catalog.workspace.entity_model.workspace import (
9
9
  from gooddata_pipelines.provisioning.entities.workspaces.models import (
10
10
  WorkspaceDataMaps,
11
11
  WorkspaceFullLoad,
12
+ WorkspaceIncrementalLoad,
12
13
  )
13
14
 
14
15
 
@@ -17,7 +18,7 @@ class WorkspaceDataParser:
17
18
 
18
19
  @staticmethod
19
20
  def _get_id_to_name_map(
20
- source_group: list[WorkspaceFullLoad],
21
+ source_group: list[WorkspaceFullLoad] | list[WorkspaceIncrementalLoad],
21
22
  upstream_group: list[CatalogWorkspace],
22
23
  ) -> dict[str, str]:
23
24
  """Creates a map of workspace IDs to their names for all known workspaces."""
@@ -33,7 +34,7 @@ class WorkspaceDataParser:
33
34
 
34
35
  @staticmethod
35
36
  def _get_child_to_parent_map(
36
- source_group: list[WorkspaceFullLoad],
37
+ source_group: list[WorkspaceFullLoad] | list[WorkspaceIncrementalLoad],
37
38
  ) -> dict[str, str]:
38
39
  """Creates a map of child workspace IDs to their parent workspace IDs."""
39
40
  child_to_parent_map: dict[str, str] = {
@@ -45,7 +46,8 @@ class WorkspaceDataParser:
45
46
 
46
47
  @staticmethod
47
48
  def _get_set_of_ids_from_source(
48
- source_group: list[WorkspaceFullLoad], column_name: str
49
+ source_group: list[WorkspaceFullLoad] | list[WorkspaceIncrementalLoad],
50
+ column_name: str,
49
51
  ) -> set[str]:
50
52
  """Creates a set of unique parent workspace IDs."""
51
53
  set_of_ids: set[str] = {
@@ -64,7 +66,8 @@ class WorkspaceDataParser:
64
66
  return set_of_ids
65
67
 
66
68
  def _get_child_to_wdfs_map(
67
- self, source_group: list[WorkspaceFullLoad]
69
+ self,
70
+ source_group: list[WorkspaceFullLoad] | list[WorkspaceIncrementalLoad],
68
71
  ) -> dict[str, dict[str, list[str]]]:
69
72
  """Creates a map of child workspace IDs to their WDF IDs."""
70
73
  # TODO: Use objects or a more transparent data structure instead of this.
@@ -88,7 +91,7 @@ class WorkspaceDataParser:
88
91
  def set_maps_based_on_source(
89
92
  self,
90
93
  map_object: WorkspaceDataMaps,
91
- source_group: list[WorkspaceFullLoad],
94
+ source_group: list[WorkspaceFullLoad] | list[WorkspaceIncrementalLoad],
92
95
  ) -> WorkspaceDataMaps:
93
96
  """Creates maps which are dependent on the source group only."""
94
97
  map_object.child_to_parent_id_map = self._get_child_to_parent_map(
@@ -109,7 +112,7 @@ class WorkspaceDataParser:
109
112
  def set_maps_with_upstream_data(
110
113
  self,
111
114
  map_object: WorkspaceDataMaps,
112
- source_group: list[WorkspaceFullLoad],
115
+ source_group: list[WorkspaceFullLoad] | list[WorkspaceIncrementalLoad],
113
116
  upstream_group: list[CatalogWorkspace],
114
117
  ) -> WorkspaceDataMaps:
115
118
  """Creates maps which are dependent on both the source group and upstream group."""