gooddata-pipelines 1.48.1.dev1__py3-none-any.whl → 1.48.1.dev2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of gooddata-pipelines might be problematic. Click here for more details.
- gooddata_pipelines/backup_and_restore/models/storage.py +4 -1
- gooddata_pipelines/backup_and_restore/storage/s3_storage.py +19 -4
- {gooddata_pipelines-1.48.1.dev1.dist-info → gooddata_pipelines-1.48.1.dev2.dist-info}/METADATA +11 -14
- {gooddata_pipelines-1.48.1.dev1.dist-info → gooddata_pipelines-1.48.1.dev2.dist-info}/RECORD +6 -6
- {gooddata_pipelines-1.48.1.dev1.dist-info → gooddata_pipelines-1.48.1.dev2.dist-info}/WHEEL +0 -0
- {gooddata_pipelines-1.48.1.dev1.dist-info → gooddata_pipelines-1.48.1.dev2.dist-info}/licenses/LICENSE.txt +0 -0
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
# (C) 2025 GoodData Corporation
|
|
2
2
|
|
|
3
3
|
from enum import Enum
|
|
4
|
-
from typing import Annotated, TypeAlias
|
|
4
|
+
from typing import Annotated, TypeAlias, Optional
|
|
5
5
|
|
|
6
6
|
import yaml
|
|
7
7
|
from pydantic import BaseModel, Field
|
|
@@ -22,6 +22,9 @@ class S3StorageConfig(BaseModel):
|
|
|
22
22
|
backup_path: str
|
|
23
23
|
bucket: str
|
|
24
24
|
profile: str = "default"
|
|
25
|
+
aws_access_key_id: Optional[str] = None
|
|
26
|
+
aws_secret_access_key: Optional[str] = None
|
|
27
|
+
aws_default_region: Optional[str] = None
|
|
25
28
|
|
|
26
29
|
|
|
27
30
|
class LocalStorageConfig(BaseModel):
|
|
@@ -21,8 +21,7 @@ class S3Storage(BackupStorage):
|
|
|
21
21
|
raise ValueError("S3 storage config is required")
|
|
22
22
|
|
|
23
23
|
self._config = conf.storage
|
|
24
|
-
self.
|
|
25
|
-
self._session = self._create_boto_session(self._profile)
|
|
24
|
+
self._session = self._create_boto_session(self._config)
|
|
26
25
|
self._resource = self._session.resource("s3")
|
|
27
26
|
self._bucket = self._resource.Bucket(self._config.bucket) # type: ignore [missing library stubs]
|
|
28
27
|
suffix = "/" if not self._config.backup_path.endswith("/") else ""
|
|
@@ -30,9 +29,25 @@ class S3Storage(BackupStorage):
|
|
|
30
29
|
|
|
31
30
|
self._verify_connection()
|
|
32
31
|
|
|
33
|
-
def _create_boto_session(self,
|
|
32
|
+
def _create_boto_session(self, config: S3StorageConfig) -> boto3.Session:
|
|
33
|
+
if config.aws_access_key_id and config.aws_secret_access_key:
|
|
34
|
+
if not config.aws_default_region:
|
|
35
|
+
self.logger.warning(
|
|
36
|
+
"No AWS region specified. Defaulting to us-east-1."
|
|
37
|
+
)
|
|
38
|
+
try:
|
|
39
|
+
return boto3.Session(
|
|
40
|
+
aws_access_key_id=config.aws_access_key_id,
|
|
41
|
+
aws_secret_access_key=config.aws_secret_access_key,
|
|
42
|
+
region_name=config.aws_default_region,
|
|
43
|
+
)
|
|
44
|
+
except Exception:
|
|
45
|
+
self.logger.warning(
|
|
46
|
+
"Failed to create boto3 session with supplied credentials. Falling back to profile..."
|
|
47
|
+
)
|
|
48
|
+
|
|
34
49
|
try:
|
|
35
|
-
return boto3.Session(profile_name=profile)
|
|
50
|
+
return boto3.Session(profile_name=config.profile)
|
|
36
51
|
except Exception:
|
|
37
52
|
self.logger.warning(
|
|
38
53
|
'AWS profile "[default]" not found. Trying other fallback methods...'
|
{gooddata_pipelines-1.48.1.dev1.dist-info → gooddata_pipelines-1.48.1.dev2.dist-info}/METADATA
RENAMED
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: gooddata-pipelines
|
|
3
|
-
Version: 1.48.1.
|
|
3
|
+
Version: 1.48.1.dev2
|
|
4
4
|
Author-email: GoodData <support@gooddata.com>
|
|
5
5
|
License: MIT
|
|
6
6
|
License-File: LICENSE.txt
|
|
7
7
|
Requires-Python: >=3.10
|
|
8
8
|
Requires-Dist: boto3-stubs<2.0.0,>=1.39.3
|
|
9
9
|
Requires-Dist: boto3<2.0.0,>=1.39.3
|
|
10
|
-
Requires-Dist: gooddata-sdk~=1.48.1.
|
|
10
|
+
Requires-Dist: gooddata-sdk~=1.48.1.dev2
|
|
11
11
|
Requires-Dist: pydantic<3.0.0,>=2.11.3
|
|
12
12
|
Requires-Dist: requests<3.0.0,>=2.32.3
|
|
13
13
|
Requires-Dist: types-pyyaml<7.0.0,>=6.0.12.20250326
|
|
@@ -22,9 +22,9 @@ Description-Content-Type: text/markdown
|
|
|
22
22
|
|
|
23
23
|
# GoodData Pipelines
|
|
24
24
|
|
|
25
|
-
A high
|
|
25
|
+
A high-level library for automating the lifecycle of GoodData Cloud (GDC).
|
|
26
26
|
|
|
27
|
-
You can use the package to manage following
|
|
27
|
+
You can use the package to manage following resources in GDC:
|
|
28
28
|
|
|
29
29
|
1. Provisioning (create, update, delete)
|
|
30
30
|
- User profiles
|
|
@@ -36,7 +36,7 @@ You can use the package to manage following resoursec in GDC:
|
|
|
36
36
|
1. _[PLANNED]:_ Custom fields management
|
|
37
37
|
- extend the Logical Data Model of a child workspace
|
|
38
38
|
|
|
39
|
-
In case you are not interested in incorporating a library in your own program
|
|
39
|
+
In case you are not interested in incorporating a library in your own program but would like to use a ready-made script, consider having a look at [GoodData Productivity Tools](https://github.com/gooddata/gooddata-productivity-tools).
|
|
40
40
|
|
|
41
41
|
## Provisioning
|
|
42
42
|
|
|
@@ -53,23 +53,20 @@ import os
|
|
|
53
53
|
from csv import DictReader
|
|
54
54
|
from pathlib import Path
|
|
55
55
|
|
|
56
|
-
# Import the Entity Provisioner class and
|
|
56
|
+
# Import the Entity Provisioner class and corresponding model from gooddata_pipelines library
|
|
57
57
|
from gooddata_pipelines import UserFullLoad, UserProvisioner
|
|
58
|
+
from gooddata_pipelines.logger.logger import LogObserver
|
|
58
59
|
|
|
59
|
-
#
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
setup_logging()
|
|
60
|
+
# Optionally, subscribe a standard Python logger to the LogObserver
|
|
61
|
+
import logging
|
|
63
62
|
logger = logging.getLogger(__name__)
|
|
63
|
+
LogObserver().subscribe(logger)
|
|
64
64
|
|
|
65
65
|
# Create the Provisioner instance - you can also create the instance from a GDC yaml profile
|
|
66
66
|
provisioner = UserProvisioner(
|
|
67
67
|
host=os.environ["GDC_HOSTNAME"], token=os.environ["GDC_AUTH_TOKEN"]
|
|
68
68
|
)
|
|
69
69
|
|
|
70
|
-
# Optional: subscribe to logs
|
|
71
|
-
provisioner.logger.subscribe(logger)
|
|
72
|
-
|
|
73
70
|
# Load your data from your data source
|
|
74
71
|
source_data_path: Path = Path("path/to/some.csv")
|
|
75
72
|
source_data_reader = DictReader(source_data_path.read_text().splitlines())
|
|
@@ -82,4 +79,4 @@ full_load_data: list[UserFullLoad] = UserFullLoad.from_list_of_dicts(
|
|
|
82
79
|
provisioner.full_load(full_load_data)
|
|
83
80
|
```
|
|
84
81
|
|
|
85
|
-
Ready
|
|
82
|
+
Ready-made scripts covering the basic use cases can be found here in the [GoodData Productivity Tools](https://github.com/gooddata/gooddata-productivity-tools) repository
|
{gooddata_pipelines-1.48.1.dev1.dist-info → gooddata_pipelines-1.48.1.dev2.dist-info}/RECORD
RENAMED
|
@@ -14,12 +14,12 @@ gooddata_pipelines/backup_and_restore/constants.py,sha256=AO4H6ngsLMs4bCV-RcT7xI
|
|
|
14
14
|
gooddata_pipelines/backup_and_restore/csv_reader.py,sha256=0Kw7mJT7REj3Gjqfsc6YT9MbhcqfCGNB_SKBwzTI1rk,1268
|
|
15
15
|
gooddata_pipelines/backup_and_restore/models/__init__.py,sha256=-BG28PGDbalLyZGQjpFG0pjdIvtf25ut0r8ZwZVbi4s,32
|
|
16
16
|
gooddata_pipelines/backup_and_restore/models/input_type.py,sha256=CBKJigKdmZ-NJD9MSfNhq89bo86W0AqCMMoyonbd1QA,239
|
|
17
|
-
gooddata_pipelines/backup_and_restore/models/storage.py,sha256=
|
|
17
|
+
gooddata_pipelines/backup_and_restore/models/storage.py,sha256=DcFH8iWz7LtZZIXoiScZ9ztG6uZHeI9-vLsD07FFnFY,1537
|
|
18
18
|
gooddata_pipelines/backup_and_restore/models/workspace_response.py,sha256=eQbYLgRQc17IRG0yPTAJVrD-Xs05SzuwtzoNrPT2DoY,833
|
|
19
19
|
gooddata_pipelines/backup_and_restore/storage/__init__.py,sha256=-BG28PGDbalLyZGQjpFG0pjdIvtf25ut0r8ZwZVbi4s,32
|
|
20
20
|
gooddata_pipelines/backup_and_restore/storage/base_storage.py,sha256=67wdItlG3neExeb_eCUDQhswdUB62X5Nyj9sOImB_Hg,487
|
|
21
21
|
gooddata_pipelines/backup_and_restore/storage/local_storage.py,sha256=NvhPRzRAvuSpc5qCDyPqZaMB0i1jeZOZczaSwjUSGEg,1155
|
|
22
|
-
gooddata_pipelines/backup_and_restore/storage/s3_storage.py,sha256=
|
|
22
|
+
gooddata_pipelines/backup_and_restore/storage/s3_storage.py,sha256=iRtMtDq_C1b_JBL92P9DP4t-BtIdIlmf7hMDE7a02Ps,3284
|
|
23
23
|
gooddata_pipelines/logger/__init__.py,sha256=W-fJvMStnsDUY52AYFhx_LnS2cSCFNf3bB47Iew2j04,129
|
|
24
24
|
gooddata_pipelines/logger/logger.py,sha256=yIMdvqsmOSGQLI4U_tQwxX5E2q_FXUu0Ko7Hv39slFM,3549
|
|
25
25
|
gooddata_pipelines/provisioning/__init__.py,sha256=RZDEiv8nla4Jwa2TZXUdp1NSxg2_-lLqz4h7k2c4v5Y,854
|
|
@@ -48,7 +48,7 @@ gooddata_pipelines/provisioning/utils/__init__.py,sha256=-BG28PGDbalLyZGQjpFG0pj
|
|
|
48
48
|
gooddata_pipelines/provisioning/utils/context_objects.py,sha256=sM22hMsxE0XLI1TU0Vs-2kK0vf4YrB1musoAg__4bjc,936
|
|
49
49
|
gooddata_pipelines/provisioning/utils/exceptions.py,sha256=1WnAOlPhqOf0xRcvn70lxAlLb8Oo6m6WCYS4hj9uzDU,3630
|
|
50
50
|
gooddata_pipelines/provisioning/utils/utils.py,sha256=_Tk-mFgbIGpCixDCF9e-3ZYd-g5Jb3SJiLSH465k4jY,2846
|
|
51
|
-
gooddata_pipelines-1.48.1.
|
|
52
|
-
gooddata_pipelines-1.48.1.
|
|
53
|
-
gooddata_pipelines-1.48.1.
|
|
54
|
-
gooddata_pipelines-1.48.1.
|
|
51
|
+
gooddata_pipelines-1.48.1.dev2.dist-info/METADATA,sha256=wouhl67mPG9gt9dyF-oHeL1tIEHtnt5J0ANIF-Wi84I,3750
|
|
52
|
+
gooddata_pipelines-1.48.1.dev2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
53
|
+
gooddata_pipelines-1.48.1.dev2.dist-info/licenses/LICENSE.txt,sha256=PNC7WXGIo6OKkNoPLRxlVrw6jaLcjSTUsSxy9Xcu9Jo,560365
|
|
54
|
+
gooddata_pipelines-1.48.1.dev2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|