gooddata-pipelines 1.49.1.dev2__py3-none-any.whl → 1.50.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of gooddata-pipelines might be problematic. Click here for more details.
- gooddata_pipelines/__init__.py +7 -1
- gooddata_pipelines/api/gooddata_api.py +0 -54
- gooddata_pipelines/backup_and_restore/backup_manager.py +6 -2
- gooddata_pipelines/provisioning/entities/users/models/permissions.py +23 -79
- gooddata_pipelines/provisioning/entities/users/models/user_groups.py +23 -50
- gooddata_pipelines/provisioning/entities/users/models/users.py +9 -49
- gooddata_pipelines/provisioning/entities/users/permissions.py +14 -6
- gooddata_pipelines/provisioning/entities/users/user_groups.py +7 -1
- gooddata_pipelines/provisioning/entities/users/users.py +3 -0
- gooddata_pipelines/provisioning/entities/workspaces/models.py +16 -15
- gooddata_pipelines/provisioning/entities/workspaces/workspace.py +52 -5
- gooddata_pipelines/provisioning/entities/workspaces/workspace_data_parser.py +9 -6
- gooddata_pipelines/provisioning/provisioning.py +24 -6
- gooddata_pipelines/provisioning/utils/context_objects.py +6 -6
- gooddata_pipelines/provisioning/utils/utils.py +3 -15
- {gooddata_pipelines-1.49.1.dev2.dist-info → gooddata_pipelines-1.50.0.dist-info}/METADATA +2 -2
- {gooddata_pipelines-1.49.1.dev2.dist-info → gooddata_pipelines-1.50.0.dist-info}/RECORD +19 -19
- {gooddata_pipelines-1.49.1.dev2.dist-info → gooddata_pipelines-1.50.0.dist-info}/WHEEL +0 -0
- {gooddata_pipelines-1.49.1.dev2.dist-info → gooddata_pipelines-1.50.0.dist-info}/licenses/LICENSE.txt +0 -0
gooddata_pipelines/__init__.py
CHANGED
|
@@ -19,6 +19,7 @@ from .provisioning.entities.user_data_filters.user_data_filters import (
|
|
|
19
19
|
UserDataFilterProvisioner,
|
|
20
20
|
)
|
|
21
21
|
from .provisioning.entities.users.models.permissions import (
|
|
22
|
+
EntityType,
|
|
22
23
|
PermissionFullLoad,
|
|
23
24
|
PermissionIncrementalLoad,
|
|
24
25
|
)
|
|
@@ -33,7 +34,10 @@ from .provisioning.entities.users.models.users import (
|
|
|
33
34
|
from .provisioning.entities.users.permissions import PermissionProvisioner
|
|
34
35
|
from .provisioning.entities.users.user_groups import UserGroupProvisioner
|
|
35
36
|
from .provisioning.entities.users.users import UserProvisioner
|
|
36
|
-
from .provisioning.entities.workspaces.models import
|
|
37
|
+
from .provisioning.entities.workspaces.models import (
|
|
38
|
+
WorkspaceFullLoad,
|
|
39
|
+
WorkspaceIncrementalLoad,
|
|
40
|
+
)
|
|
37
41
|
from .provisioning.entities.workspaces.workspace import WorkspaceProvisioner
|
|
38
42
|
|
|
39
43
|
__all__ = [
|
|
@@ -52,8 +56,10 @@ __all__ = [
|
|
|
52
56
|
"UserGroupFullLoad",
|
|
53
57
|
"UserProvisioner",
|
|
54
58
|
"UserGroupProvisioner",
|
|
59
|
+
"WorkspaceIncrementalLoad",
|
|
55
60
|
"PermissionProvisioner",
|
|
56
61
|
"UserDataFilterProvisioner",
|
|
57
62
|
"UserDataFilterFullLoad",
|
|
63
|
+
"EntityType",
|
|
58
64
|
"__version__",
|
|
59
65
|
]
|
|
@@ -7,9 +7,6 @@ from typing import Any
|
|
|
7
7
|
|
|
8
8
|
import requests
|
|
9
9
|
|
|
10
|
-
# TODO: Limit the use of "typing.Any". Improve readability by using either models
|
|
11
|
-
# or typed dicts.
|
|
12
|
-
|
|
13
10
|
TIMEOUT = 60
|
|
14
11
|
REQUEST_PAGE_SIZE = 250
|
|
15
12
|
API_VERSION = "v1"
|
|
@@ -55,42 +52,6 @@ class ApiMethods:
|
|
|
55
52
|
"""
|
|
56
53
|
return f"{self.base_url}{endpoint}"
|
|
57
54
|
|
|
58
|
-
def get_custom_application_setting(
|
|
59
|
-
self, workspace_id: str, setting_id: str
|
|
60
|
-
) -> requests.Response:
|
|
61
|
-
"""Gets a custom application setting.
|
|
62
|
-
|
|
63
|
-
Args:
|
|
64
|
-
workspace_id (str): The ID of the workspace.
|
|
65
|
-
setting_id (str): The ID of the custom application setting.
|
|
66
|
-
Returns:
|
|
67
|
-
requests.Response: The response from the server containing the
|
|
68
|
-
custom application setting.
|
|
69
|
-
"""
|
|
70
|
-
url = f"/entities/workspaces/{workspace_id}/customApplicationSettings/{setting_id}"
|
|
71
|
-
return self._get(url)
|
|
72
|
-
|
|
73
|
-
def put_custom_application_setting(
|
|
74
|
-
self, workspace_id: str, setting_id: str, data: dict[str, Any]
|
|
75
|
-
) -> requests.Response:
|
|
76
|
-
url = f"/entities/workspaces/{workspace_id}/customApplicationSettings/{setting_id}"
|
|
77
|
-
return self._put(url, data, self.headers)
|
|
78
|
-
|
|
79
|
-
def post_custom_application_setting(
|
|
80
|
-
self, workspace_id: str, data: dict[str, Any]
|
|
81
|
-
) -> requests.Response:
|
|
82
|
-
"""Creates a custom application setting for a given workspace.
|
|
83
|
-
|
|
84
|
-
Args:
|
|
85
|
-
workspace_id (str): The ID of the workspace.
|
|
86
|
-
data (dict[str, Any]): The data for the custom application setting.
|
|
87
|
-
Returns:
|
|
88
|
-
requests.Response: The response from the server containing the
|
|
89
|
-
created custom application setting.
|
|
90
|
-
"""
|
|
91
|
-
url = f"/entities/workspaces/{workspace_id}/customApplicationSettings/"
|
|
92
|
-
return self._post(url, data, self.headers)
|
|
93
|
-
|
|
94
55
|
def get_all_workspace_data_filters(
|
|
95
56
|
self, workspace_id: str
|
|
96
57
|
) -> requests.Response:
|
|
@@ -201,21 +162,6 @@ class ApiMethods:
|
|
|
201
162
|
endpoint,
|
|
202
163
|
)
|
|
203
164
|
|
|
204
|
-
def post_workspace_data_filter(
|
|
205
|
-
self, workspace_id: str, data: dict[str, Any]
|
|
206
|
-
) -> requests.Response:
|
|
207
|
-
"""Creates a workspace data filter for a given workspace.
|
|
208
|
-
|
|
209
|
-
Args:
|
|
210
|
-
workspace_id (str): The ID of the workspace.
|
|
211
|
-
data (dict[str, Any]): The data for the workspace data filter.
|
|
212
|
-
Returns:
|
|
213
|
-
requests.Response: The response from the server containing the
|
|
214
|
-
created workspace data filter.
|
|
215
|
-
"""
|
|
216
|
-
endpoint = f"/entities/workspaces/{workspace_id}/workspaceDataFilters"
|
|
217
|
-
return self._post(endpoint, data, self.headers)
|
|
218
|
-
|
|
219
165
|
def get_user_data_filters(self, workspace_id: str) -> requests.Response:
|
|
220
166
|
"""Gets the user data filters for a given workspace."""
|
|
221
167
|
endpoint = f"/layout/workspaces/{workspace_id}/userDataFilters"
|
|
@@ -376,7 +376,9 @@ class BackupManager:
|
|
|
376
376
|
raise
|
|
377
377
|
|
|
378
378
|
def backup_workspaces(
|
|
379
|
-
self,
|
|
379
|
+
self,
|
|
380
|
+
path_to_csv: str | None = None,
|
|
381
|
+
workspace_ids: list[str] | None = None,
|
|
380
382
|
) -> None:
|
|
381
383
|
"""Runs the backup process for a list of workspace IDs.
|
|
382
384
|
|
|
@@ -391,7 +393,9 @@ class BackupManager:
|
|
|
391
393
|
self._backup(InputType.LIST_OF_WORKSPACES, path_to_csv, workspace_ids)
|
|
392
394
|
|
|
393
395
|
def backup_hierarchies(
|
|
394
|
-
self,
|
|
396
|
+
self,
|
|
397
|
+
path_to_csv: str | None = None,
|
|
398
|
+
workspace_ids: list[str] | None = None,
|
|
395
399
|
) -> None:
|
|
396
400
|
"""Runs the backup process for a list of hierarchies.
|
|
397
401
|
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
# (C) 2025 GoodData Corporation
|
|
2
|
-
|
|
2
|
+
|
|
3
3
|
from enum import Enum
|
|
4
|
-
from typing import
|
|
4
|
+
from typing import Iterator, TypeAlias
|
|
5
5
|
|
|
6
6
|
import attrs
|
|
7
7
|
from gooddata_sdk.catalog.identifier import CatalogAssigneeIdentifier
|
|
@@ -14,85 +14,29 @@ from pydantic import BaseModel
|
|
|
14
14
|
from gooddata_pipelines.provisioning.utils.exceptions import BaseUserException
|
|
15
15
|
|
|
16
16
|
TargetsPermissionDict: TypeAlias = dict[str, dict[str, bool]]
|
|
17
|
-
ConstructorType = TypeVar("ConstructorType", bound="ConstructorMixin")
|
|
18
17
|
|
|
19
18
|
|
|
20
|
-
class
|
|
19
|
+
class EntityType(str, Enum):
|
|
21
20
|
# NOTE: Start using StrEnum with Python 3.11
|
|
22
21
|
user = "user"
|
|
23
22
|
user_group = "userGroup"
|
|
24
23
|
|
|
25
24
|
|
|
26
|
-
class
|
|
27
|
-
@staticmethod
|
|
28
|
-
def _get_id_and_type(
|
|
29
|
-
permission: dict[str, Any],
|
|
30
|
-
) -> tuple[str, PermissionType]:
|
|
31
|
-
user_id: str | None = permission.get("user_id")
|
|
32
|
-
user_group_id: str | None = permission.get("ug_id")
|
|
33
|
-
if user_id and user_group_id:
|
|
34
|
-
raise ValueError("Only one of user_id or ug_id must be present")
|
|
35
|
-
elif user_id:
|
|
36
|
-
return user_id, PermissionType.user
|
|
37
|
-
elif user_group_id:
|
|
38
|
-
return user_group_id, PermissionType.user_group
|
|
39
|
-
else:
|
|
40
|
-
raise ValueError("Either user_id or ug_id must be present")
|
|
41
|
-
|
|
42
|
-
@classmethod
|
|
43
|
-
def from_list_of_dicts(
|
|
44
|
-
cls: type[ConstructorType], data: list[dict[str, Any]]
|
|
45
|
-
) -> list[ConstructorType]:
|
|
46
|
-
"""Creates a list of instances from list of dicts."""
|
|
47
|
-
# NOTE: We can use typing.Self for the return type in Python 3.11
|
|
48
|
-
permissions = []
|
|
49
|
-
for permission in data:
|
|
50
|
-
permissions.append(cls.from_dict(permission))
|
|
51
|
-
return permissions
|
|
52
|
-
|
|
53
|
-
@classmethod
|
|
54
|
-
@abstractmethod
|
|
55
|
-
def from_dict(cls, data: dict[str, Any]) -> Any:
|
|
56
|
-
"""Construction form a dictionary to be implemented by subclasses."""
|
|
57
|
-
pass
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
class PermissionIncrementalLoad(BaseModel, ConstructorMixin):
|
|
25
|
+
class BasePermission(BaseModel):
|
|
61
26
|
permission: str
|
|
62
27
|
workspace_id: str
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
is_active: bool
|
|
28
|
+
entity_id: str
|
|
29
|
+
entity_type: EntityType
|
|
66
30
|
|
|
67
|
-
@classmethod
|
|
68
|
-
def from_dict(cls, data: dict[str, Any]) -> "PermissionIncrementalLoad":
|
|
69
|
-
"""Returns an instance of PermissionIncrementalLoad from a dictionary."""
|
|
70
|
-
id_, target_type = cls._get_id_and_type(data)
|
|
71
|
-
return cls(
|
|
72
|
-
permission=data["ws_permissions"],
|
|
73
|
-
workspace_id=data["ws_id"],
|
|
74
|
-
id_=id_,
|
|
75
|
-
type_=target_type,
|
|
76
|
-
is_active=data["is_active"],
|
|
77
|
-
)
|
|
78
31
|
|
|
32
|
+
class PermissionFullLoad(BasePermission):
|
|
33
|
+
"""Input validator for full load of workspace permissions provisioning."""
|
|
79
34
|
|
|
80
|
-
class PermissionFullLoad(BaseModel, ConstructorMixin):
|
|
81
|
-
permission: str
|
|
82
|
-
workspace_id: str
|
|
83
|
-
id_: str
|
|
84
|
-
type_: PermissionType
|
|
85
35
|
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
return cls(
|
|
91
|
-
permission=data["ws_permissions"],
|
|
92
|
-
workspace_id=data["ws_id"],
|
|
93
|
-
id_=id_,
|
|
94
|
-
type_=target_type,
|
|
95
|
-
)
|
|
36
|
+
class PermissionIncrementalLoad(BasePermission):
|
|
37
|
+
"""Input validator for incremental load of workspace permissions provisioning."""
|
|
38
|
+
|
|
39
|
+
is_active: bool
|
|
96
40
|
|
|
97
41
|
|
|
98
42
|
@attrs.define
|
|
@@ -117,7 +61,7 @@ class PermissionDeclaration:
|
|
|
117
61
|
permission.assignee.id,
|
|
118
62
|
)
|
|
119
63
|
|
|
120
|
-
if permission_type ==
|
|
64
|
+
if permission_type == EntityType.user.value:
|
|
121
65
|
target_dict = users
|
|
122
66
|
else:
|
|
123
67
|
target_dict = user_groups
|
|
@@ -170,7 +114,7 @@ class PermissionDeclaration:
|
|
|
170
114
|
|
|
171
115
|
for user_id, permissions in self.users.items():
|
|
172
116
|
assignee = CatalogAssigneeIdentifier(
|
|
173
|
-
id=user_id, type=
|
|
117
|
+
id=user_id, type=EntityType.user.value
|
|
174
118
|
)
|
|
175
119
|
for declaration in self._permissions_for_target(
|
|
176
120
|
permissions, assignee
|
|
@@ -179,7 +123,7 @@ class PermissionDeclaration:
|
|
|
179
123
|
|
|
180
124
|
for ug_id, permissions in self.user_groups.items():
|
|
181
125
|
assignee = CatalogAssigneeIdentifier(
|
|
182
|
-
id=ug_id, type=
|
|
126
|
+
id=ug_id, type=EntityType.user_group.value
|
|
183
127
|
)
|
|
184
128
|
for declaration in self._permissions_for_target(
|
|
185
129
|
permissions, assignee
|
|
@@ -200,15 +144,15 @@ class PermissionDeclaration:
|
|
|
200
144
|
"""
|
|
201
145
|
target_dict = (
|
|
202
146
|
self.users
|
|
203
|
-
if permission.
|
|
147
|
+
if permission.entity_type == EntityType.user
|
|
204
148
|
else self.user_groups
|
|
205
149
|
)
|
|
206
150
|
|
|
207
|
-
if permission.
|
|
208
|
-
target_dict[permission.
|
|
151
|
+
if permission.entity_id not in target_dict:
|
|
152
|
+
target_dict[permission.entity_id] = {}
|
|
209
153
|
|
|
210
154
|
is_active = permission.is_active
|
|
211
|
-
target_permissions = target_dict[permission.
|
|
155
|
+
target_permissions = target_dict[permission.entity_id]
|
|
212
156
|
permission_value = permission.permission
|
|
213
157
|
|
|
214
158
|
if permission_value not in target_permissions:
|
|
@@ -233,14 +177,14 @@ class PermissionDeclaration:
|
|
|
233
177
|
"""
|
|
234
178
|
target_dict = (
|
|
235
179
|
self.users
|
|
236
|
-
if permission.
|
|
180
|
+
if permission.entity_type == EntityType.user
|
|
237
181
|
else self.user_groups
|
|
238
182
|
)
|
|
239
183
|
|
|
240
|
-
if permission.
|
|
241
|
-
target_dict[permission.
|
|
184
|
+
if permission.entity_id not in target_dict:
|
|
185
|
+
target_dict[permission.entity_id] = {}
|
|
242
186
|
|
|
243
|
-
target_permissions = target_dict[permission.
|
|
187
|
+
target_permissions = target_dict[permission.entity_id]
|
|
244
188
|
permission_value = permission.permission
|
|
245
189
|
|
|
246
190
|
if permission_value not in target_permissions:
|
|
@@ -1,64 +1,37 @@
|
|
|
1
1
|
# (C) 2025 GoodData Corporation
|
|
2
2
|
|
|
3
|
-
from
|
|
3
|
+
from pydantic import BaseModel, Field, ValidationInfo, field_validator
|
|
4
4
|
|
|
5
|
-
from pydantic import BaseModel
|
|
6
5
|
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
class BaseUserGroup(BaseModel, SplitMixin):
|
|
6
|
+
class UserGroupBase(BaseModel):
|
|
11
7
|
user_group_id: str
|
|
12
8
|
user_group_name: str
|
|
13
|
-
parent_user_groups: list[str]
|
|
9
|
+
parent_user_groups: list[str] = Field(default_factory=list)
|
|
14
10
|
|
|
11
|
+
@field_validator("user_group_name", mode="before")
|
|
15
12
|
@classmethod
|
|
16
|
-
def
|
|
17
|
-
cls,
|
|
18
|
-
) ->
|
|
19
|
-
"""
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
user_group_name = user_group_data["user_group_id"]
|
|
26
|
-
|
|
27
|
-
return {
|
|
28
|
-
"user_group_id": user_group_data["user_group_id"],
|
|
29
|
-
"user_group_name": user_group_name,
|
|
30
|
-
"parent_user_groups": parent_user_groups,
|
|
31
|
-
}
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
class UserGroupIncrementalLoad(BaseUserGroup):
|
|
35
|
-
is_active: bool
|
|
36
|
-
|
|
13
|
+
def validate_user_group_name(
|
|
14
|
+
cls, v: str | None, info: ValidationInfo
|
|
15
|
+
) -> str:
|
|
16
|
+
"""If user_group_name is None or empty, default to user_group_id."""
|
|
17
|
+
if not v: # handles None and empty string
|
|
18
|
+
return info.data.get("user_group_id", "")
|
|
19
|
+
return v
|
|
20
|
+
|
|
21
|
+
@field_validator("parent_user_groups", mode="before")
|
|
37
22
|
@classmethod
|
|
38
|
-
def
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
for user_group in data:
|
|
44
|
-
base_data = cls._create_from_dict_data(user_group, delimiter)
|
|
45
|
-
base_data["is_active"] = user_group["is_active"]
|
|
23
|
+
def validate_parent_user_groups(cls, v: list[str] | None) -> list[str]:
|
|
24
|
+
"""If parent_user_groups is None or empty, default to empty list."""
|
|
25
|
+
if not v:
|
|
26
|
+
return []
|
|
27
|
+
return v
|
|
46
28
|
|
|
47
|
-
user_groups.append(UserGroupIncrementalLoad(**base_data))
|
|
48
29
|
|
|
49
|
-
|
|
30
|
+
class UserGroupFullLoad(UserGroupBase):
|
|
31
|
+
"""Input validator for full load of user group provisioning."""
|
|
50
32
|
|
|
51
33
|
|
|
52
|
-
class
|
|
53
|
-
|
|
54
|
-
def from_list_of_dicts(
|
|
55
|
-
cls, data: list[dict[str, Any]], delimiter: str = ","
|
|
56
|
-
) -> list["UserGroupFullLoad"]:
|
|
57
|
-
"""Creates a list of User objects from list of dicts."""
|
|
58
|
-
user_groups = []
|
|
59
|
-
for user_group in data:
|
|
60
|
-
base_data = cls._create_from_dict_data(user_group, delimiter)
|
|
34
|
+
class UserGroupIncrementalLoad(UserGroupBase):
|
|
35
|
+
"""Input validator for incremental load of user group provisioning."""
|
|
61
36
|
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
return user_groups
|
|
37
|
+
is_active: bool
|
|
@@ -5,10 +5,8 @@ from typing import Any
|
|
|
5
5
|
from gooddata_sdk.catalog.user.entity_model.user import CatalogUser
|
|
6
6
|
from pydantic import BaseModel
|
|
7
7
|
|
|
8
|
-
from gooddata_pipelines.provisioning.utils.utils import SplitMixin
|
|
9
8
|
|
|
10
|
-
|
|
11
|
-
class BaseUser(BaseModel, SplitMixin):
|
|
9
|
+
class BaseUser(BaseModel):
|
|
12
10
|
"""Base class containing shared user fields and functionality."""
|
|
13
11
|
|
|
14
12
|
user_id: str
|
|
@@ -18,21 +16,6 @@ class BaseUser(BaseModel, SplitMixin):
|
|
|
18
16
|
auth_id: str | None
|
|
19
17
|
user_groups: list[str]
|
|
20
18
|
|
|
21
|
-
@classmethod
|
|
22
|
-
def _create_from_dict_data(
|
|
23
|
-
cls, user_data: dict[str, Any], delimiter: str = ","
|
|
24
|
-
) -> dict[str, Any]:
|
|
25
|
-
"""Helper method to extract common data from dict."""
|
|
26
|
-
user_groups = cls.split(user_data["user_groups"], delimiter=delimiter)
|
|
27
|
-
return {
|
|
28
|
-
"user_id": user_data["user_id"],
|
|
29
|
-
"firstname": user_data["firstname"],
|
|
30
|
-
"lastname": user_data["lastname"],
|
|
31
|
-
"email": user_data["email"],
|
|
32
|
-
"auth_id": user_data["auth_id"],
|
|
33
|
-
"user_groups": user_groups,
|
|
34
|
-
}
|
|
35
|
-
|
|
36
19
|
@classmethod
|
|
37
20
|
def _create_from_sdk_data(cls, obj: CatalogUser) -> dict[str, Any]:
|
|
38
21
|
"""Helper method to extract common data from SDK object."""
|
|
@@ -68,47 +51,24 @@ class BaseUser(BaseModel, SplitMixin):
|
|
|
68
51
|
)
|
|
69
52
|
|
|
70
53
|
|
|
71
|
-
class
|
|
72
|
-
"""
|
|
73
|
-
|
|
74
|
-
is_active: bool
|
|
75
|
-
|
|
76
|
-
@classmethod
|
|
77
|
-
def from_list_of_dicts(
|
|
78
|
-
cls, data: list[dict[str, Any]], delimiter: str = ","
|
|
79
|
-
) -> list["UserIncrementalLoad"]:
|
|
80
|
-
"""Creates a list of User objects from list of dicts."""
|
|
81
|
-
converted_users = []
|
|
82
|
-
for user in data:
|
|
83
|
-
base_data = cls._create_from_dict_data(user, delimiter)
|
|
84
|
-
base_data["is_active"] = user["is_active"]
|
|
85
|
-
converted_users.append(cls(**base_data))
|
|
86
|
-
return converted_users
|
|
54
|
+
class UserFullLoad(BaseUser):
|
|
55
|
+
"""Input validator for full load of user provisioning."""
|
|
87
56
|
|
|
88
57
|
@classmethod
|
|
89
|
-
def from_sdk_obj(cls, obj: CatalogUser) -> "
|
|
58
|
+
def from_sdk_obj(cls, obj: CatalogUser) -> "UserFullLoad":
|
|
90
59
|
"""Creates GDUserTarget from CatalogUser SDK object."""
|
|
91
60
|
base_data = cls._create_from_sdk_data(obj)
|
|
92
|
-
base_data["is_active"] = True
|
|
93
61
|
return cls(**base_data)
|
|
94
62
|
|
|
95
63
|
|
|
96
|
-
class
|
|
97
|
-
"""
|
|
64
|
+
class UserIncrementalLoad(BaseUser):
|
|
65
|
+
"""Input validator for incremental load of user provisioning."""
|
|
98
66
|
|
|
99
|
-
|
|
100
|
-
def from_list_of_dicts(
|
|
101
|
-
cls, data: list[dict[str, Any]], delimiter: str = ","
|
|
102
|
-
) -> list["UserFullLoad"]:
|
|
103
|
-
"""Creates a list of User objects from list of dicts."""
|
|
104
|
-
converted_users = []
|
|
105
|
-
for user in data:
|
|
106
|
-
base_data = cls._create_from_dict_data(user, delimiter)
|
|
107
|
-
converted_users.append(cls(**base_data))
|
|
108
|
-
return converted_users
|
|
67
|
+
is_active: bool
|
|
109
68
|
|
|
110
69
|
@classmethod
|
|
111
|
-
def from_sdk_obj(cls, obj: CatalogUser) -> "
|
|
70
|
+
def from_sdk_obj(cls, obj: CatalogUser) -> "UserIncrementalLoad":
|
|
112
71
|
"""Creates GDUserTarget from CatalogUser SDK object."""
|
|
113
72
|
base_data = cls._create_from_sdk_data(obj)
|
|
73
|
+
base_data["is_active"] = True
|
|
114
74
|
return cls(**base_data)
|
|
@@ -6,10 +6,10 @@ from typing import TypeVar
|
|
|
6
6
|
|
|
7
7
|
from gooddata_pipelines.api.exceptions import GoodDataApiException
|
|
8
8
|
from gooddata_pipelines.provisioning.entities.users.models.permissions import (
|
|
9
|
+
EntityType,
|
|
9
10
|
PermissionDeclaration,
|
|
10
11
|
PermissionFullLoad,
|
|
11
12
|
PermissionIncrementalLoad,
|
|
12
|
-
PermissionType,
|
|
13
13
|
TargetsPermissionDict,
|
|
14
14
|
WSPermissionsDeclarations,
|
|
15
15
|
)
|
|
@@ -28,12 +28,18 @@ class PermissionProvisioner(
|
|
|
28
28
|
"""Provisioning class for user permissions in GoodData workspaces.
|
|
29
29
|
|
|
30
30
|
This class handles the provisioning of user permissions based on the provided
|
|
31
|
-
source data.
|
|
31
|
+
source data. Use the `full_load` or `incremental_load`
|
|
32
|
+
methods to run the provisioning.
|
|
32
33
|
"""
|
|
33
34
|
|
|
34
35
|
source_group_incremental: list[PermissionIncrementalLoad]
|
|
35
36
|
source_group_full: list[PermissionFullLoad]
|
|
36
37
|
|
|
38
|
+
FULL_LOAD_TYPE: type[PermissionFullLoad] = PermissionFullLoad
|
|
39
|
+
INCREMENTAL_LOAD_TYPE: type[PermissionIncrementalLoad] = (
|
|
40
|
+
PermissionIncrementalLoad
|
|
41
|
+
)
|
|
42
|
+
|
|
37
43
|
def _get_ws_declaration(self, ws_id: str) -> PermissionDeclaration:
|
|
38
44
|
users: TargetsPermissionDict = {}
|
|
39
45
|
user_groups: TargetsPermissionDict = {}
|
|
@@ -47,7 +53,7 @@ class PermissionProvisioner(
|
|
|
47
53
|
)
|
|
48
54
|
target_dict = (
|
|
49
55
|
users
|
|
50
|
-
if permission_type ==
|
|
56
|
+
if permission_type == EntityType.user.value
|
|
51
57
|
else user_groups
|
|
52
58
|
)
|
|
53
59
|
|
|
@@ -105,11 +111,13 @@ class PermissionProvisioner(
|
|
|
105
111
|
self, permission: PermissionFullLoad | PermissionIncrementalLoad
|
|
106
112
|
) -> None:
|
|
107
113
|
"""Validates if the permission is correctly defined."""
|
|
108
|
-
if permission.
|
|
109
|
-
self._api.get_user(
|
|
114
|
+
if permission.entity_type == EntityType.user:
|
|
115
|
+
self._api.get_user(
|
|
116
|
+
permission.entity_id, error_message="User not found"
|
|
117
|
+
)
|
|
110
118
|
else:
|
|
111
119
|
self._api.get_user_group(
|
|
112
|
-
permission.
|
|
120
|
+
permission.entity_id, error_message="User group not found"
|
|
113
121
|
)
|
|
114
122
|
|
|
115
123
|
self._api.get_workspace(
|
|
@@ -21,13 +21,19 @@ class UserGroupProvisioner(
|
|
|
21
21
|
"""Provisioning class for user groups in GoodData workspaces.
|
|
22
22
|
|
|
23
23
|
This class handles the creation, update, and deletion of user groups
|
|
24
|
-
based on the provided source data.
|
|
24
|
+
based on the provided source data. Use the `full_load` or `incremental_load`
|
|
25
|
+
methods to run the provisioning.
|
|
25
26
|
"""
|
|
26
27
|
|
|
27
28
|
source_group_incremental: list[UserGroupIncrementalLoad]
|
|
28
29
|
source_group_full: list[UserGroupFullLoad]
|
|
29
30
|
upstream_user_groups: list[CatalogUserGroup]
|
|
30
31
|
|
|
32
|
+
FULL_LOAD_TYPE: type[UserGroupFullLoad] = UserGroupFullLoad
|
|
33
|
+
INCREMENTAL_LOAD_TYPE: type[UserGroupIncrementalLoad] = (
|
|
34
|
+
UserGroupIncrementalLoad
|
|
35
|
+
)
|
|
36
|
+
|
|
31
37
|
@staticmethod
|
|
32
38
|
def _is_changed(
|
|
33
39
|
group: UserGroupModel, existing_group: CatalogUserGroup
|
|
@@ -30,6 +30,9 @@ class UserProvisioner(Provisioning[UserFullLoad, UserIncrementalLoad]):
|
|
|
30
30
|
source_group_incremental: list[UserIncrementalLoad]
|
|
31
31
|
source_group_full: list[UserFullLoad]
|
|
32
32
|
|
|
33
|
+
FULL_LOAD_TYPE: type[UserFullLoad] = UserFullLoad
|
|
34
|
+
INCREMENTAL_LOAD_TYPE: type[UserIncrementalLoad] = UserIncrementalLoad
|
|
35
|
+
|
|
33
36
|
def __init__(self, host: str, token: str) -> None:
|
|
34
37
|
super().__init__(host, token)
|
|
35
38
|
self.upstream_user_cache: dict[UserId, UserModel] = {}
|
|
@@ -1,29 +1,27 @@
|
|
|
1
1
|
# (C) 2025 GoodData Corporation
|
|
2
2
|
"""Module containing models related to workspace provisioning in GoodData Cloud."""
|
|
3
3
|
|
|
4
|
-
from dataclasses import dataclass, field
|
|
5
4
|
from typing import Literal
|
|
6
5
|
|
|
6
|
+
import attrs
|
|
7
7
|
from pydantic import BaseModel, ConfigDict
|
|
8
8
|
|
|
9
9
|
|
|
10
|
-
@
|
|
10
|
+
@attrs.define
|
|
11
11
|
class WorkspaceDataMaps:
|
|
12
12
|
"""Dataclass to hold various mappings related to workspace data."""
|
|
13
13
|
|
|
14
|
-
child_to_parent_id_map: dict[str, str] = field(
|
|
15
|
-
workspace_id_to_wdf_map: dict[str, dict[str, list[str]]] = field(
|
|
16
|
-
|
|
14
|
+
child_to_parent_id_map: dict[str, str] = attrs.field(factory=dict)
|
|
15
|
+
workspace_id_to_wdf_map: dict[str, dict[str, list[str]]] = attrs.field(
|
|
16
|
+
factory=dict
|
|
17
17
|
)
|
|
18
|
-
parent_ids: set[str] = field(
|
|
19
|
-
source_ids: set[str] = field(
|
|
20
|
-
workspace_id_to_name_map: dict[str, str] = field(
|
|
21
|
-
upstream_ids: set[str] = field(
|
|
18
|
+
parent_ids: set[str] = attrs.field(factory=set)
|
|
19
|
+
source_ids: set[str] = attrs.field(factory=set)
|
|
20
|
+
workspace_id_to_name_map: dict[str, str] = attrs.field(factory=dict)
|
|
21
|
+
upstream_ids: set[str] = attrs.field(factory=set)
|
|
22
22
|
|
|
23
23
|
|
|
24
|
-
class
|
|
25
|
-
"""Model representing input for provisioning of workspaces in GoodData Cloud."""
|
|
26
|
-
|
|
24
|
+
class WorkspaceBase(BaseModel):
|
|
27
25
|
model_config = ConfigDict(coerce_numbers_to_str=True)
|
|
28
26
|
|
|
29
27
|
parent_id: str
|
|
@@ -33,10 +31,13 @@ class WorkspaceFullLoad(BaseModel):
|
|
|
33
31
|
workspace_data_filter_values: list[str] | None = None
|
|
34
32
|
|
|
35
33
|
|
|
36
|
-
class
|
|
37
|
-
"""
|
|
34
|
+
class WorkspaceFullLoad(WorkspaceBase):
|
|
35
|
+
"""Input validator for full load of workspace provisioning."""
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class WorkspaceIncrementalLoad(WorkspaceBase):
|
|
39
|
+
"""Input validator for incremental load of workspace provisioning."""
|
|
38
40
|
|
|
39
|
-
# TODO: double check that the model loads the data correctly, write a test
|
|
40
41
|
is_active: bool
|
|
41
42
|
|
|
42
43
|
|
|
@@ -35,11 +35,19 @@ class WorkspaceProvisioner(
|
|
|
35
35
|
source_group_full: list[WorkspaceFullLoad]
|
|
36
36
|
source_group_incremental: list[WorkspaceIncrementalLoad]
|
|
37
37
|
|
|
38
|
+
FULL_LOAD_TYPE: type[WorkspaceFullLoad] = WorkspaceFullLoad
|
|
39
|
+
INCREMENTAL_LOAD_TYPE: type[WorkspaceIncrementalLoad] = (
|
|
40
|
+
WorkspaceIncrementalLoad
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
upstream_group: list[CatalogWorkspace]
|
|
44
|
+
|
|
38
45
|
def __init__(self, *args: str, **kwargs: str) -> None:
|
|
39
46
|
"""Creates an instance of the WorkspaceProvisioner.
|
|
40
47
|
|
|
41
48
|
Calls the superclass constructor and initializes the validator, parser,
|
|
42
|
-
and maps for workspace data.
|
|
49
|
+
and maps for workspace data. Use the `full_load` or `incremental_load`
|
|
50
|
+
methods to run the provisioning.
|
|
43
51
|
"""
|
|
44
52
|
super().__init__(*args, **kwargs)
|
|
45
53
|
self.validator: WorkspaceDataValidator = WorkspaceDataValidator(
|
|
@@ -91,10 +99,11 @@ class WorkspaceProvisioner(
|
|
|
91
99
|
workspace_ids_to_update: set[str],
|
|
92
100
|
child_to_parent_map: dict[str, str],
|
|
93
101
|
workspace_id_to_wdf_map: dict[str, dict[str, list[str]]],
|
|
102
|
+
source_group: list[WorkspaceFullLoad] | list[WorkspaceIncrementalLoad],
|
|
94
103
|
) -> None:
|
|
95
104
|
action: Literal["CREATE", "UPDATE"]
|
|
96
105
|
|
|
97
|
-
for source_workspace in
|
|
106
|
+
for source_workspace in source_group:
|
|
98
107
|
if source_workspace.workspace_id in workspace_ids_to_update:
|
|
99
108
|
action = "UPDATE"
|
|
100
109
|
elif source_workspace.workspace_id in workspace_ids_to_create:
|
|
@@ -199,8 +208,8 @@ class WorkspaceProvisioner(
|
|
|
199
208
|
)
|
|
200
209
|
|
|
201
210
|
# Get upstream children of all parent workspaces.
|
|
202
|
-
self.upstream_group
|
|
203
|
-
self.
|
|
211
|
+
self.upstream_group = self._api.get_panther_children_workspaces(
|
|
212
|
+
self.maps.parent_ids
|
|
204
213
|
)
|
|
205
214
|
|
|
206
215
|
# Set maps that require upstream data.
|
|
@@ -234,6 +243,7 @@ class WorkspaceProvisioner(
|
|
|
234
243
|
self.ids_to_update,
|
|
235
244
|
self.maps.child_to_parent_id_map,
|
|
236
245
|
self.maps.workspace_id_to_wdf_map,
|
|
246
|
+
self.source_group_full,
|
|
237
247
|
)
|
|
238
248
|
|
|
239
249
|
# Check WDF settings of ignored workspaces.
|
|
@@ -259,5 +269,42 @@ class WorkspaceProvisioner(
|
|
|
259
269
|
|
|
260
270
|
def _provision_incremental_load(self) -> None:
|
|
261
271
|
"""Incremental workspace provisioning."""
|
|
272
|
+
# Set the maps based on the source data.
|
|
273
|
+
self.maps = self.parser.set_maps_based_on_source(
|
|
274
|
+
self.maps, self.source_group_incremental
|
|
275
|
+
)
|
|
276
|
+
|
|
277
|
+
# Get upstream children of all parent workspaces.
|
|
278
|
+
self.upstream_group = self._api.get_panther_children_workspaces(
|
|
279
|
+
self.maps.parent_ids
|
|
280
|
+
)
|
|
281
|
+
|
|
282
|
+
# Set maps that require upstream data.
|
|
283
|
+
self.maps = self.parser.set_maps_with_upstream_data(
|
|
284
|
+
self.maps, self.source_group_incremental, self.upstream_group
|
|
285
|
+
)
|
|
262
286
|
|
|
263
|
-
|
|
287
|
+
# Create an instance of WDF manager with the created maps.
|
|
288
|
+
self.wdf_manager = WorkspaceDataFilterManager(self._api, self.maps)
|
|
289
|
+
|
|
290
|
+
# Iterate through the source data and sort workspace ID to groups
|
|
291
|
+
ids_to_update: set[str] = set()
|
|
292
|
+
ids_to_delete: set[str] = set()
|
|
293
|
+
|
|
294
|
+
for workspace in self.source_group_incremental:
|
|
295
|
+
if workspace.is_active:
|
|
296
|
+
ids_to_update.add(workspace.workspace_id)
|
|
297
|
+
else:
|
|
298
|
+
ids_to_delete.add(workspace.workspace_id)
|
|
299
|
+
|
|
300
|
+
self._create_or_update_panther_workspaces(
|
|
301
|
+
set(),
|
|
302
|
+
ids_to_update,
|
|
303
|
+
self.maps.child_to_parent_id_map,
|
|
304
|
+
self.maps.workspace_id_to_wdf_map,
|
|
305
|
+
self.source_group_incremental,
|
|
306
|
+
)
|
|
307
|
+
|
|
308
|
+
self.delete_panther_workspaces(
|
|
309
|
+
ids_to_delete, self.maps.workspace_id_to_name_map
|
|
310
|
+
)
|
|
@@ -9,6 +9,7 @@ from gooddata_sdk.catalog.workspace.entity_model.workspace import (
|
|
|
9
9
|
from gooddata_pipelines.provisioning.entities.workspaces.models import (
|
|
10
10
|
WorkspaceDataMaps,
|
|
11
11
|
WorkspaceFullLoad,
|
|
12
|
+
WorkspaceIncrementalLoad,
|
|
12
13
|
)
|
|
13
14
|
|
|
14
15
|
|
|
@@ -17,7 +18,7 @@ class WorkspaceDataParser:
|
|
|
17
18
|
|
|
18
19
|
@staticmethod
|
|
19
20
|
def _get_id_to_name_map(
|
|
20
|
-
source_group: list[WorkspaceFullLoad],
|
|
21
|
+
source_group: list[WorkspaceFullLoad] | list[WorkspaceIncrementalLoad],
|
|
21
22
|
upstream_group: list[CatalogWorkspace],
|
|
22
23
|
) -> dict[str, str]:
|
|
23
24
|
"""Creates a map of workspace IDs to their names for all known workspaces."""
|
|
@@ -33,7 +34,7 @@ class WorkspaceDataParser:
|
|
|
33
34
|
|
|
34
35
|
@staticmethod
|
|
35
36
|
def _get_child_to_parent_map(
|
|
36
|
-
source_group: list[WorkspaceFullLoad],
|
|
37
|
+
source_group: list[WorkspaceFullLoad] | list[WorkspaceIncrementalLoad],
|
|
37
38
|
) -> dict[str, str]:
|
|
38
39
|
"""Creates a map of child workspace IDs to their parent workspace IDs."""
|
|
39
40
|
child_to_parent_map: dict[str, str] = {
|
|
@@ -45,7 +46,8 @@ class WorkspaceDataParser:
|
|
|
45
46
|
|
|
46
47
|
@staticmethod
|
|
47
48
|
def _get_set_of_ids_from_source(
|
|
48
|
-
source_group: list[WorkspaceFullLoad]
|
|
49
|
+
source_group: list[WorkspaceFullLoad] | list[WorkspaceIncrementalLoad],
|
|
50
|
+
column_name: str,
|
|
49
51
|
) -> set[str]:
|
|
50
52
|
"""Creates a set of unique parent workspace IDs."""
|
|
51
53
|
set_of_ids: set[str] = {
|
|
@@ -64,7 +66,8 @@ class WorkspaceDataParser:
|
|
|
64
66
|
return set_of_ids
|
|
65
67
|
|
|
66
68
|
def _get_child_to_wdfs_map(
|
|
67
|
-
self,
|
|
69
|
+
self,
|
|
70
|
+
source_group: list[WorkspaceFullLoad] | list[WorkspaceIncrementalLoad],
|
|
68
71
|
) -> dict[str, dict[str, list[str]]]:
|
|
69
72
|
"""Creates a map of child workspace IDs to their WDF IDs."""
|
|
70
73
|
# TODO: Use objects or a more transparent data structure instead of this.
|
|
@@ -88,7 +91,7 @@ class WorkspaceDataParser:
|
|
|
88
91
|
def set_maps_based_on_source(
|
|
89
92
|
self,
|
|
90
93
|
map_object: WorkspaceDataMaps,
|
|
91
|
-
source_group: list[WorkspaceFullLoad],
|
|
94
|
+
source_group: list[WorkspaceFullLoad] | list[WorkspaceIncrementalLoad],
|
|
92
95
|
) -> WorkspaceDataMaps:
|
|
93
96
|
"""Creates maps which are dependent on the source group only."""
|
|
94
97
|
map_object.child_to_parent_id_map = self._get_child_to_parent_map(
|
|
@@ -109,7 +112,7 @@ class WorkspaceDataParser:
|
|
|
109
112
|
def set_maps_with_upstream_data(
|
|
110
113
|
self,
|
|
111
114
|
map_object: WorkspaceDataMaps,
|
|
112
|
-
source_group: list[WorkspaceFullLoad],
|
|
115
|
+
source_group: list[WorkspaceFullLoad] | list[WorkspaceIncrementalLoad],
|
|
113
116
|
upstream_group: list[CatalogWorkspace],
|
|
114
117
|
) -> WorkspaceDataMaps:
|
|
115
118
|
"""Creates maps which are dependent on both the source group and upstream group."""
|
|
@@ -24,6 +24,9 @@ class Provisioning(Generic[TFullLoadSourceData, TIncrementalSourceData]):
|
|
|
24
24
|
source_group_full: list[TFullLoadSourceData]
|
|
25
25
|
source_group_incremental: list[TIncrementalSourceData]
|
|
26
26
|
|
|
27
|
+
FULL_LOAD_TYPE: type[TFullLoadSourceData]
|
|
28
|
+
INCREMENTAL_LOAD_TYPE: type[TIncrementalSourceData]
|
|
29
|
+
|
|
27
30
|
def __init__(self, host: str, token: str) -> None:
|
|
28
31
|
self.source_id: set[str] = set()
|
|
29
32
|
self.upstream_id: set[str] = set()
|
|
@@ -80,6 +83,17 @@ class Provisioning(Generic[TFullLoadSourceData, TIncrementalSourceData]):
|
|
|
80
83
|
ids_to_create=ids_to_create,
|
|
81
84
|
)
|
|
82
85
|
|
|
86
|
+
def _validate_source_data_type(
|
|
87
|
+
self,
|
|
88
|
+
source_data: list[TFullLoadSourceData] | list[TIncrementalSourceData],
|
|
89
|
+
model: type[TFullLoadSourceData] | type[TIncrementalSourceData],
|
|
90
|
+
) -> None:
|
|
91
|
+
"""Validates data type of the source data."""
|
|
92
|
+
if not all(isinstance(record, model) for record in source_data):
|
|
93
|
+
raise TypeError(
|
|
94
|
+
f"Not all elements in source data are instances of {model.__name__}"
|
|
95
|
+
)
|
|
96
|
+
|
|
83
97
|
def _provision_incremental_load(self) -> None:
|
|
84
98
|
raise NotImplementedError(
|
|
85
99
|
"Provisioning method to be implemented in the subclass."
|
|
@@ -100,11 +114,13 @@ class Provisioning(Generic[TFullLoadSourceData, TIncrementalSourceData]):
|
|
|
100
114
|
That means:
|
|
101
115
|
- All workspaces declared in the source data are created if missing, or
|
|
102
116
|
updated to match the source data
|
|
103
|
-
- All workspaces not declared
|
|
117
|
+
- All child workspaces not declared under the parent workspace in the
|
|
118
|
+
source data are deleted
|
|
104
119
|
"""
|
|
105
|
-
self.source_group_full = source_data
|
|
106
120
|
|
|
107
121
|
try:
|
|
122
|
+
self._validate_source_data_type(source_data, self.FULL_LOAD_TYPE)
|
|
123
|
+
self.source_group_full = source_data
|
|
108
124
|
self._provision_full_load()
|
|
109
125
|
self.logger.info("Provisioning completed.")
|
|
110
126
|
except Exception as e:
|
|
@@ -116,12 +132,14 @@ class Provisioning(Generic[TFullLoadSourceData, TIncrementalSourceData]):
|
|
|
116
132
|
"""Runs incremental provisioning workflow with the provided source data.
|
|
117
133
|
|
|
118
134
|
Incremental provisioning is used to modify a subset of the upstream workspaces
|
|
119
|
-
based on the source data provided.
|
|
135
|
+
based on the source data provided. Only changes requested in the source
|
|
136
|
+
data will be applied.
|
|
120
137
|
"""
|
|
121
|
-
# TODO: validate the data type of source group at runtime
|
|
122
|
-
self.source_group_incremental = source_data
|
|
123
|
-
|
|
124
138
|
try:
|
|
139
|
+
self._validate_source_data_type(
|
|
140
|
+
source_data, self.INCREMENTAL_LOAD_TYPE
|
|
141
|
+
)
|
|
142
|
+
self.source_group_incremental = source_data
|
|
125
143
|
self._provision_incremental_load()
|
|
126
144
|
self.logger.info("Provisioning completed.")
|
|
127
145
|
except Exception as e:
|
|
@@ -16,10 +16,10 @@ class WorkspaceContext:
|
|
|
16
16
|
wdf_id: str | None = None,
|
|
17
17
|
wdf_values: list[str] | None = None,
|
|
18
18
|
):
|
|
19
|
-
self.workspace_id
|
|
20
|
-
self.workspace_name
|
|
21
|
-
self.wdf_id
|
|
22
|
-
self.wdf_values
|
|
19
|
+
self.workspace_id = workspace_id if workspace_id else "NA"
|
|
20
|
+
self.workspace_name = workspace_name
|
|
21
|
+
self.wdf_id = wdf_id
|
|
22
|
+
self.wdf_values = wdf_values
|
|
23
23
|
|
|
24
24
|
|
|
25
25
|
class UserContext:
|
|
@@ -28,5 +28,5 @@ class UserContext:
|
|
|
28
28
|
|
|
29
29
|
def __init__(self, user_id: str, user_groups: list[str]):
|
|
30
30
|
"""User context object, stringifies list of user groups"""
|
|
31
|
-
self.user_id
|
|
32
|
-
self.user_groups
|
|
31
|
+
self.user_id = user_id
|
|
32
|
+
self.user_groups = ",".join(user_groups)
|
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
"""Module for utilities used in GoodData Pipelines provisioning."""
|
|
4
4
|
|
|
5
|
-
|
|
5
|
+
import attrs
|
|
6
6
|
from requests import Response
|
|
7
7
|
|
|
8
8
|
|
|
@@ -61,20 +61,8 @@ class AttributesMixin:
|
|
|
61
61
|
return attrs
|
|
62
62
|
|
|
63
63
|
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
def split(string_value: str, delimiter: str = ",") -> list[str]:
|
|
67
|
-
"""
|
|
68
|
-
Splits a string by the given delimiter and returns a list of stripped values.
|
|
69
|
-
If the input is empty, returns an empty list.
|
|
70
|
-
"""
|
|
71
|
-
if not string_value:
|
|
72
|
-
return []
|
|
73
|
-
|
|
74
|
-
return [value.strip() for value in string_value.split(delimiter)]
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
class EntityGroupIds(BaseModel):
|
|
64
|
+
@attrs.define
|
|
65
|
+
class EntityGroupIds:
|
|
78
66
|
ids_in_both_systems: set[str]
|
|
79
67
|
ids_to_delete: set[str]
|
|
80
68
|
ids_to_create: set[str]
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: gooddata-pipelines
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.50.0
|
|
4
4
|
Summary: GoodData Cloud lifecycle automation pipelines
|
|
5
5
|
Author-email: GoodData <support@gooddata.com>
|
|
6
6
|
License: MIT
|
|
@@ -8,7 +8,7 @@ License-File: LICENSE.txt
|
|
|
8
8
|
Requires-Python: >=3.10
|
|
9
9
|
Requires-Dist: boto3-stubs<2.0.0,>=1.39.3
|
|
10
10
|
Requires-Dist: boto3<2.0.0,>=1.39.3
|
|
11
|
-
Requires-Dist: gooddata-sdk~=1.
|
|
11
|
+
Requires-Dist: gooddata-sdk~=1.50.0
|
|
12
12
|
Requires-Dist: pydantic<3.0.0,>=2.11.3
|
|
13
13
|
Requires-Dist: requests<3.0.0,>=2.32.3
|
|
14
14
|
Requires-Dist: types-pyyaml<7.0.0,>=6.0.12.20250326
|
|
@@ -1,15 +1,15 @@
|
|
|
1
|
-
gooddata_pipelines/__init__.py,sha256=
|
|
1
|
+
gooddata_pipelines/__init__.py,sha256=AEKIRuGBPMA_RkL14RF-recw9hS4dGV8cVqgDM3XmrA,1931
|
|
2
2
|
gooddata_pipelines/_version.py,sha256=Zi8Ht5ofjFeSYGG5USixQtJNB1po6okh0Rez8VyAsFM,200
|
|
3
3
|
gooddata_pipelines/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
4
|
gooddata_pipelines/api/__init__.py,sha256=0WaBI2XMdkkZgnUsQ9kqipNzh2l2zamZvUt_qjp8xCk,106
|
|
5
5
|
gooddata_pipelines/api/exceptions.py,sha256=rddQXfv8Ktckz7RONKBnKfm53M7dzPCh50Dl1k-8hqs,1545
|
|
6
|
-
gooddata_pipelines/api/gooddata_api.py,sha256=
|
|
6
|
+
gooddata_pipelines/api/gooddata_api.py,sha256=ALuxTgu3KOK5S2b0C5HpDyvmT_UNfGeF-eqbvxXhDQM,8667
|
|
7
7
|
gooddata_pipelines/api/gooddata_api_wrapper.py,sha256=t7dFrXJ6X4yXS9XDthOmvd2CyzdnDDNPeIngTEW72YU,1152
|
|
8
8
|
gooddata_pipelines/api/gooddata_sdk.py,sha256=wd5O4e9BQLWUawt6odrs5a51nqFGthBkvqh9WOiW36Q,13734
|
|
9
9
|
gooddata_pipelines/api/utils.py,sha256=3QY_aYH17I9THoCINE3l-n5oj52k-gNeT1wv6Z_VxN8,1433
|
|
10
10
|
gooddata_pipelines/backup_and_restore/__init__.py,sha256=-BG28PGDbalLyZGQjpFG0pjdIvtf25ut0r8ZwZVbi4s,32
|
|
11
11
|
gooddata_pipelines/backup_and_restore/backup_input_processor.py,sha256=ex1tGwETdHDDBRJ_DGKZsZbH6uoRuOrbGbKOC976H5s,7940
|
|
12
|
-
gooddata_pipelines/backup_and_restore/backup_manager.py,sha256=
|
|
12
|
+
gooddata_pipelines/backup_and_restore/backup_manager.py,sha256=rfNMn6VLul2OjnLmMyy7AL3qaOVuGapyelORvoTOjGA,16012
|
|
13
13
|
gooddata_pipelines/backup_and_restore/constants.py,sha256=TYw4hU5hhzDVTLJa0gWseaiSs_VboWsYwW7QsqtJ1hA,939
|
|
14
14
|
gooddata_pipelines/backup_and_restore/csv_reader.py,sha256=0Kw7mJT7REj3Gjqfsc6YT9MbhcqfCGNB_SKBwzTI1rk,1268
|
|
15
15
|
gooddata_pipelines/backup_and_restore/models/__init__.py,sha256=-BG28PGDbalLyZGQjpFG0pjdIvtf25ut0r8ZwZVbi4s,32
|
|
@@ -23,7 +23,7 @@ gooddata_pipelines/backup_and_restore/storage/s3_storage.py,sha256=ZAysu4sPMAvdW
|
|
|
23
23
|
gooddata_pipelines/logger/__init__.py,sha256=W-fJvMStnsDUY52AYFhx_LnS2cSCFNf3bB47Iew2j04,129
|
|
24
24
|
gooddata_pipelines/logger/logger.py,sha256=yIMdvqsmOSGQLI4U_tQwxX5E2q_FXUu0Ko7Hv39slFM,3549
|
|
25
25
|
gooddata_pipelines/provisioning/__init__.py,sha256=RZDEiv8nla4Jwa2TZXUdp1NSxg2_-lLqz4h7k2c4v5Y,854
|
|
26
|
-
gooddata_pipelines/provisioning/provisioning.py,sha256=
|
|
26
|
+
gooddata_pipelines/provisioning/provisioning.py,sha256=Mibf1-ZwPfHzmoAjgIRuYvtakY7LqerDTF36FgPg990,6175
|
|
27
27
|
gooddata_pipelines/provisioning/assets/wdf_setting.json,sha256=nxOLGZkEQiMdARcUDER5ygqr3Zu-MQlLlUyXVhPUq64,280
|
|
28
28
|
gooddata_pipelines/provisioning/entities/__init__.py,sha256=-BG28PGDbalLyZGQjpFG0pjdIvtf25ut0r8ZwZVbi4s,32
|
|
29
29
|
gooddata_pipelines/provisioning/entities/user_data_filters/__init__.py,sha256=-BG28PGDbalLyZGQjpFG0pjdIvtf25ut0r8ZwZVbi4s,32
|
|
@@ -31,24 +31,24 @@ gooddata_pipelines/provisioning/entities/user_data_filters/user_data_filters.py,
|
|
|
31
31
|
gooddata_pipelines/provisioning/entities/user_data_filters/models/__init__.py,sha256=-BG28PGDbalLyZGQjpFG0pjdIvtf25ut0r8ZwZVbi4s,32
|
|
32
32
|
gooddata_pipelines/provisioning/entities/user_data_filters/models/udf_models.py,sha256=y0q5E91AhxIkf_EHW0swCjNUkiiAOFXarAhvjUKVVKw,740
|
|
33
33
|
gooddata_pipelines/provisioning/entities/users/__init__.py,sha256=-BG28PGDbalLyZGQjpFG0pjdIvtf25ut0r8ZwZVbi4s,32
|
|
34
|
-
gooddata_pipelines/provisioning/entities/users/permissions.py,sha256=
|
|
35
|
-
gooddata_pipelines/provisioning/entities/users/user_groups.py,sha256
|
|
36
|
-
gooddata_pipelines/provisioning/entities/users/users.py,sha256=
|
|
34
|
+
gooddata_pipelines/provisioning/entities/users/permissions.py,sha256=2k3oPI7WyABcD2TMmLPsMUDrAjnKM7Vw56kz_RWhcmI,7135
|
|
35
|
+
gooddata_pipelines/provisioning/entities/users/user_groups.py,sha256=-2Nca01ZMjXmnAGDUuKP5G7mqFyn4MnsgZsnS2oy7vg,8511
|
|
36
|
+
gooddata_pipelines/provisioning/entities/users/users.py,sha256=TVfOp3fqQYmzA4K03IBGNYJrqGQAzWH_oay0qsvR8Xo,6633
|
|
37
37
|
gooddata_pipelines/provisioning/entities/users/models/__init__.py,sha256=-BG28PGDbalLyZGQjpFG0pjdIvtf25ut0r8ZwZVbi4s,32
|
|
38
|
-
gooddata_pipelines/provisioning/entities/users/models/permissions.py,sha256=
|
|
39
|
-
gooddata_pipelines/provisioning/entities/users/models/user_groups.py,sha256=
|
|
40
|
-
gooddata_pipelines/provisioning/entities/users/models/users.py,sha256=
|
|
38
|
+
gooddata_pipelines/provisioning/entities/users/models/permissions.py,sha256=buyNtDShvAJL4mFZSV-UqK_9JAL_2-AaIlGYCHibhHo,7244
|
|
39
|
+
gooddata_pipelines/provisioning/entities/users/models/user_groups.py,sha256=Odp4yZoK2vC40jgh7FBKmaIINpwffl62uoaT8Xxr-14,1160
|
|
40
|
+
gooddata_pipelines/provisioning/entities/users/models/users.py,sha256=lwb8Q-slBELs_0882KOumkMgKiFKCL3ZABONsoT5Nw0,2234
|
|
41
41
|
gooddata_pipelines/provisioning/entities/workspaces/__init__.py,sha256=-BG28PGDbalLyZGQjpFG0pjdIvtf25ut0r8ZwZVbi4s,32
|
|
42
|
-
gooddata_pipelines/provisioning/entities/workspaces/models.py,sha256
|
|
43
|
-
gooddata_pipelines/provisioning/entities/workspaces/workspace.py,sha256=
|
|
42
|
+
gooddata_pipelines/provisioning/entities/workspaces/models.py,sha256=-ehte9HLNos3l6yLip4mZU6wBcmY_Yzwq0t0m0fhwPI,2031
|
|
43
|
+
gooddata_pipelines/provisioning/entities/workspaces/workspace.py,sha256=jngaEKNlMfhjRr4rQ2ECQDoh0gk7KaZTMuTazPLECnM,11505
|
|
44
44
|
gooddata_pipelines/provisioning/entities/workspaces/workspace_data_filters.py,sha256=0dNcK7tkp40XulCj7EPoB4zVeyQbRx2Tt4yAfgLrm50,10736
|
|
45
|
-
gooddata_pipelines/provisioning/entities/workspaces/workspace_data_parser.py,sha256=
|
|
45
|
+
gooddata_pipelines/provisioning/entities/workspaces/workspace_data_parser.py,sha256=akiN8F9x-6xo7KXLJ40iOlmBImEKqWlGYlN3lpF4jQs,4562
|
|
46
46
|
gooddata_pipelines/provisioning/entities/workspaces/workspace_data_validator.py,sha256=t6RWNsrDpebyOgB4c_ctqrkio72jBHqsXqk-ntBTkA4,7225
|
|
47
47
|
gooddata_pipelines/provisioning/utils/__init__.py,sha256=-BG28PGDbalLyZGQjpFG0pjdIvtf25ut0r8ZwZVbi4s,32
|
|
48
|
-
gooddata_pipelines/provisioning/utils/context_objects.py,sha256=
|
|
48
|
+
gooddata_pipelines/provisioning/utils/context_objects.py,sha256=HJoeumH_gXwM6X-GO3HkC4w-6RYozz6-aqQOhDnu7no,879
|
|
49
49
|
gooddata_pipelines/provisioning/utils/exceptions.py,sha256=1WnAOlPhqOf0xRcvn70lxAlLb8Oo6m6WCYS4hj9uzDU,3630
|
|
50
|
-
gooddata_pipelines/provisioning/utils/utils.py,sha256=
|
|
51
|
-
gooddata_pipelines-1.
|
|
52
|
-
gooddata_pipelines-1.
|
|
53
|
-
gooddata_pipelines-1.
|
|
54
|
-
gooddata_pipelines-1.
|
|
50
|
+
gooddata_pipelines/provisioning/utils/utils.py,sha256=uF3k5hmoM5d6UoWWfPGCQgT_861zcU-ACyaQHHOOncY,2434
|
|
51
|
+
gooddata_pipelines-1.50.0.dist-info/METADATA,sha256=CeJDooBpPypFs18Un0bz0MjSUYhsFJWGddlkwaOpD98,3512
|
|
52
|
+
gooddata_pipelines-1.50.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
53
|
+
gooddata_pipelines-1.50.0.dist-info/licenses/LICENSE.txt,sha256=PNC7WXGIo6OKkNoPLRxlVrw6jaLcjSTUsSxy9Xcu9Jo,560365
|
|
54
|
+
gooddata_pipelines-1.50.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|