gooddata-pipelines 1.49.1.dev1__py3-none-any.whl → 1.49.1.dev2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of gooddata-pipelines might be problematic. Click here for more details.
- gooddata_pipelines/backup_and_restore/backup_manager.py +44 -42
- gooddata_pipelines/backup_and_restore/constants.py +2 -1
- gooddata_pipelines/backup_and_restore/models/storage.py +40 -2
- gooddata_pipelines/backup_and_restore/storage/s3_storage.py +22 -11
- {gooddata_pipelines-1.49.1.dev1.dist-info → gooddata_pipelines-1.49.1.dev2.dist-info}/METADATA +2 -2
- {gooddata_pipelines-1.49.1.dev1.dist-info → gooddata_pipelines-1.49.1.dev2.dist-info}/RECORD +8 -8
- {gooddata_pipelines-1.49.1.dev1.dist-info → gooddata_pipelines-1.49.1.dev2.dist-info}/WHEEL +0 -0
- {gooddata_pipelines-1.49.1.dev1.dist-info → gooddata_pipelines-1.49.1.dev2.dist-info}/licenses/LICENSE.txt +0 -0
|
@@ -55,7 +55,7 @@ class BackupManager:
|
|
|
55
55
|
|
|
56
56
|
self.config = config
|
|
57
57
|
|
|
58
|
-
self.storage = self.
|
|
58
|
+
self.storage = self._get_storage(self.config)
|
|
59
59
|
self.org_id = self._api.get_organization_id()
|
|
60
60
|
|
|
61
61
|
self.loader = BackupInputProcessor(self._api, self.config.api_page_size)
|
|
@@ -67,7 +67,7 @@ class BackupManager:
|
|
|
67
67
|
host: str,
|
|
68
68
|
token: str,
|
|
69
69
|
) -> "BackupManager":
|
|
70
|
-
"""Creates a backup worker instance using provided host and token."""
|
|
70
|
+
"""Creates a backup worker instance using the provided host and token."""
|
|
71
71
|
return cls(host=host, token=token, config=config)
|
|
72
72
|
|
|
73
73
|
@classmethod
|
|
@@ -81,7 +81,8 @@ class BackupManager:
|
|
|
81
81
|
content = profile_content(profile, profiles_path)
|
|
82
82
|
return cls(**content, config=config)
|
|
83
83
|
|
|
84
|
-
|
|
84
|
+
@staticmethod
|
|
85
|
+
def _get_storage(conf: BackupRestoreConfig) -> BackupStorage:
|
|
85
86
|
"""Returns the storage class based on the storage type."""
|
|
86
87
|
if conf.storage_type == StorageType.S3:
|
|
87
88
|
return S3Storage(conf)
|
|
@@ -100,7 +101,7 @@ class BackupManager:
|
|
|
100
101
|
else:
|
|
101
102
|
raise RuntimeError(f"{response.status_code}: {response.text}")
|
|
102
103
|
|
|
103
|
-
def
|
|
104
|
+
def _store_user_data_filters(
|
|
104
105
|
self,
|
|
105
106
|
user_data_filters: dict,
|
|
106
107
|
export_path: Path,
|
|
@@ -128,20 +129,20 @@ class BackupManager:
|
|
|
128
129
|
"user_data_filters",
|
|
129
130
|
filter["id"] + ".yaml",
|
|
130
131
|
)
|
|
131
|
-
self.
|
|
132
|
+
self._write_to_yaml(udf_file_path, filter)
|
|
132
133
|
|
|
133
134
|
@staticmethod
|
|
134
|
-
def
|
|
135
|
+
def _move_folder(source: Path, destination: Path) -> None:
|
|
135
136
|
"""Moves the source folder to the destination."""
|
|
136
137
|
shutil.move(source, destination)
|
|
137
138
|
|
|
138
139
|
@staticmethod
|
|
139
|
-
def
|
|
140
|
+
def _write_to_yaml(path: str, source: Any) -> None:
|
|
140
141
|
"""Writes the source to a YAML file."""
|
|
141
142
|
with open(path, "w") as outfile:
|
|
142
143
|
yaml.dump(source, outfile)
|
|
143
144
|
|
|
144
|
-
def
|
|
145
|
+
def _get_automations_from_api(self, workspace_id: str) -> Any:
|
|
145
146
|
"""Returns automations for the workspace as JSON."""
|
|
146
147
|
response: requests.Response = self._api.get_automations(workspace_id)
|
|
147
148
|
if response.ok:
|
|
@@ -152,10 +153,10 @@ class BackupManager:
|
|
|
152
153
|
+ f"{response.status_code}: {response.text}"
|
|
153
154
|
)
|
|
154
155
|
|
|
155
|
-
def
|
|
156
|
+
def _store_automations(self, export_path: Path, workspace_id: str) -> None:
|
|
156
157
|
"""Stores the automations in the specified export path."""
|
|
157
158
|
# Get the automations from the API
|
|
158
|
-
automations: Any = self.
|
|
159
|
+
automations: Any = self._get_automations_from_api(workspace_id)
|
|
159
160
|
|
|
160
161
|
automations_folder_path: Path = Path(
|
|
161
162
|
export_path,
|
|
@@ -184,8 +185,8 @@ class BackupManager:
|
|
|
184
185
|
# Get the filter views YAML files from the API
|
|
185
186
|
self._api.store_declarative_filter_views(workspace_id, export_path)
|
|
186
187
|
|
|
187
|
-
# Move filter views to the subfolder containing analytics model
|
|
188
|
-
self.
|
|
188
|
+
# Move filter views to the subfolder containing the analytics model
|
|
189
|
+
self._move_folder(
|
|
189
190
|
Path(export_path, "gooddata_layouts", self.org_id, "filter_views"),
|
|
190
191
|
Path(
|
|
191
192
|
export_path,
|
|
@@ -197,7 +198,7 @@ class BackupManager:
|
|
|
197
198
|
),
|
|
198
199
|
)
|
|
199
200
|
|
|
200
|
-
def
|
|
201
|
+
def _get_workspace_export(
|
|
201
202
|
self,
|
|
202
203
|
local_target_path: str,
|
|
203
204
|
workspaces_to_export: list[str],
|
|
@@ -232,9 +233,9 @@ class BackupManager:
|
|
|
232
233
|
# be more transparent/readable and possibly safer for threading
|
|
233
234
|
self._api.store_declarative_workspace(workspace_id, export_path)
|
|
234
235
|
self.store_declarative_filter_views(export_path, workspace_id)
|
|
235
|
-
self.
|
|
236
|
+
self._store_automations(export_path, workspace_id)
|
|
236
237
|
|
|
237
|
-
self.
|
|
238
|
+
self._store_user_data_filters(
|
|
238
239
|
user_data_filters, export_path, workspace_id
|
|
239
240
|
)
|
|
240
241
|
self.logger.info(f"Stored export for {workspace_id}")
|
|
@@ -250,7 +251,7 @@ class BackupManager:
|
|
|
250
251
|
+ "is correct and that the workspaces exist."
|
|
251
252
|
)
|
|
252
253
|
|
|
253
|
-
def
|
|
254
|
+
def _archive_gooddata_layouts_to_zip(self, folder: str) -> None:
|
|
254
255
|
"""Archives the gooddata_layouts directory to a zip file."""
|
|
255
256
|
try:
|
|
256
257
|
target_subdir = ""
|
|
@@ -271,11 +272,12 @@ class BackupManager:
|
|
|
271
272
|
self.logger.error(f"Error archiving {folder} to zip: {e}")
|
|
272
273
|
raise
|
|
273
274
|
|
|
274
|
-
|
|
275
|
-
|
|
275
|
+
@staticmethod
|
|
276
|
+
def _split_to_batches(
|
|
277
|
+
workspaces_to_export: list[str], batch_size: int
|
|
276
278
|
) -> list[BackupBatch]:
|
|
277
|
-
"""Splits the list of workspaces
|
|
278
|
-
The batch is
|
|
279
|
+
"""Splits the list of workspaces into batches of the specified size.
|
|
280
|
+
The batch is represented as a list of workspace IDs.
|
|
279
281
|
Returns a list of batches (i.e. list of lists of IDs)
|
|
280
282
|
"""
|
|
281
283
|
list_of_batches = []
|
|
@@ -286,7 +288,7 @@ class BackupManager:
|
|
|
286
288
|
|
|
287
289
|
return list_of_batches
|
|
288
290
|
|
|
289
|
-
def
|
|
291
|
+
def _process_batch(
|
|
290
292
|
self,
|
|
291
293
|
batch: BackupBatch,
|
|
292
294
|
stop_event: threading.Event,
|
|
@@ -298,14 +300,14 @@ class BackupManager:
|
|
|
298
300
|
The base wait time is defined by BackupSettings.RETRY_DELAY.
|
|
299
301
|
"""
|
|
300
302
|
if stop_event.is_set():
|
|
301
|
-
# If the stop_event flag is set, return. This will terminate the thread
|
|
303
|
+
# If the stop_event flag is set, return. This will terminate the thread
|
|
302
304
|
return
|
|
303
305
|
|
|
304
306
|
try:
|
|
305
307
|
with tempfile.TemporaryDirectory() as tmpdir:
|
|
306
|
-
self.
|
|
308
|
+
self._get_workspace_export(tmpdir, batch.list_of_ids)
|
|
307
309
|
|
|
308
|
-
self.
|
|
310
|
+
self._archive_gooddata_layouts_to_zip(
|
|
309
311
|
str(Path(tmpdir, self.org_id))
|
|
310
312
|
)
|
|
311
313
|
|
|
@@ -316,7 +318,7 @@ class BackupManager:
|
|
|
316
318
|
return
|
|
317
319
|
|
|
318
320
|
elif retry_count < BackupSettings.MAX_RETRIES:
|
|
319
|
-
# Retry with exponential backoff until MAX_RETRIES
|
|
321
|
+
# Retry with exponential backoff until MAX_RETRIES
|
|
320
322
|
next_retry = retry_count + 1
|
|
321
323
|
wait_time = BackupSettings.RETRY_DELAY**next_retry
|
|
322
324
|
self.logger.info(
|
|
@@ -326,13 +328,13 @@ class BackupManager:
|
|
|
326
328
|
)
|
|
327
329
|
|
|
328
330
|
time.sleep(wait_time)
|
|
329
|
-
self.
|
|
331
|
+
self._process_batch(batch, stop_event, next_retry)
|
|
330
332
|
else:
|
|
331
|
-
# If the batch fails after MAX_RETRIES, raise the error
|
|
333
|
+
# If the batch fails after MAX_RETRIES, raise the error
|
|
332
334
|
self.logger.error(f"Batch failed: {e.__class__.__name__}: {e}")
|
|
333
335
|
raise
|
|
334
336
|
|
|
335
|
-
def
|
|
337
|
+
def _process_batches_in_parallel(
|
|
336
338
|
self,
|
|
337
339
|
batches: list[BackupBatch],
|
|
338
340
|
) -> None:
|
|
@@ -345,14 +347,14 @@ class BackupManager:
|
|
|
345
347
|
stop_event = threading.Event()
|
|
346
348
|
|
|
347
349
|
with ThreadPoolExecutor(
|
|
348
|
-
max_workers=
|
|
350
|
+
max_workers=self.config.max_workers
|
|
349
351
|
) as executor:
|
|
350
352
|
# Set the futures tasks.
|
|
351
353
|
futures = []
|
|
352
354
|
for batch in batches:
|
|
353
355
|
futures.append(
|
|
354
356
|
executor.submit(
|
|
355
|
-
self.
|
|
357
|
+
self._process_batch,
|
|
356
358
|
batch,
|
|
357
359
|
stop_event,
|
|
358
360
|
)
|
|
@@ -363,10 +365,10 @@ class BackupManager:
|
|
|
363
365
|
try:
|
|
364
366
|
future.result()
|
|
365
367
|
except Exception:
|
|
366
|
-
# On failure, set the flag to True - signal running processes to stop
|
|
368
|
+
# On failure, set the flag to True - signal running processes to stop
|
|
367
369
|
stop_event.set()
|
|
368
370
|
|
|
369
|
-
# Cancel unstarted threads
|
|
371
|
+
# Cancel unstarted threads
|
|
370
372
|
for f in futures:
|
|
371
373
|
if not f.done():
|
|
372
374
|
f.cancel()
|
|
@@ -383,10 +385,10 @@ class BackupManager:
|
|
|
383
385
|
workspace in storage specified in the configuration.
|
|
384
386
|
|
|
385
387
|
Args:
|
|
386
|
-
path_to_csv (str): Path to a CSV file containing a list of workspace IDs
|
|
388
|
+
path_to_csv (str): Path to a CSV file containing a list of workspace IDs
|
|
387
389
|
workspace_ids (list[str]): List of workspace IDs
|
|
388
390
|
"""
|
|
389
|
-
self.
|
|
391
|
+
self._backup(InputType.LIST_OF_WORKSPACES, path_to_csv, workspace_ids)
|
|
390
392
|
|
|
391
393
|
def backup_hierarchies(
|
|
392
394
|
self, path_to_csv: str | None, workspace_ids: list[str] | None
|
|
@@ -394,16 +396,16 @@ class BackupManager:
|
|
|
394
396
|
"""Runs the backup process for a list of hierarchies.
|
|
395
397
|
|
|
396
398
|
Will take the list of workspace IDs or read the list of workspace IDs
|
|
397
|
-
from a CSV file and create backup for each those workspaces' hierarchies
|
|
399
|
+
from a CSV file and create backup for each of those workspaces' hierarchies
|
|
398
400
|
in storage specified in the configuration.
|
|
399
401
|
Workspace hierarchy means the workspace itself and all its direct and
|
|
400
402
|
indirect children.
|
|
401
403
|
|
|
402
404
|
Args:
|
|
403
|
-
path_to_csv (str): Path to a CSV file containing a list of workspace IDs
|
|
405
|
+
path_to_csv (str): Path to a CSV file containing a list of workspace IDs
|
|
404
406
|
workspace_ids (list[str]): List of workspace IDs
|
|
405
407
|
"""
|
|
406
|
-
self.
|
|
408
|
+
self._backup(InputType.HIERARCHY, path_to_csv, workspace_ids)
|
|
407
409
|
|
|
408
410
|
def backup_entire_organization(self) -> None:
|
|
409
411
|
"""Runs the backup process for the entire organization.
|
|
@@ -411,22 +413,22 @@ class BackupManager:
|
|
|
411
413
|
Will create backup for all workspaces in the organization in storage
|
|
412
414
|
specified in the configuration.
|
|
413
415
|
"""
|
|
414
|
-
self.
|
|
416
|
+
self._backup(InputType.ORGANIZATION)
|
|
415
417
|
|
|
416
|
-
def
|
|
418
|
+
def _backup(
|
|
417
419
|
self,
|
|
418
420
|
input_type: InputType,
|
|
419
421
|
path_to_csv: str | None = None,
|
|
420
422
|
workspace_ids: list[str] | None = None,
|
|
421
423
|
) -> None:
|
|
422
|
-
"""Runs the backup process with selected input type."""
|
|
424
|
+
"""Runs the backup process with the selected input type."""
|
|
423
425
|
try:
|
|
424
426
|
workspaces_to_export: list[str] = self.loader.get_ids_to_backup(
|
|
425
427
|
input_type,
|
|
426
428
|
path_to_csv,
|
|
427
429
|
workspace_ids,
|
|
428
430
|
)
|
|
429
|
-
batches = self.
|
|
431
|
+
batches = self._split_to_batches(
|
|
430
432
|
workspaces_to_export, self.config.batch_size
|
|
431
433
|
)
|
|
432
434
|
|
|
@@ -434,7 +436,7 @@ class BackupManager:
|
|
|
434
436
|
f"Exporting {len(workspaces_to_export)} workspaces in {len(batches)} batches."
|
|
435
437
|
)
|
|
436
438
|
|
|
437
|
-
self.
|
|
439
|
+
self._process_batches_in_parallel(batches)
|
|
438
440
|
|
|
439
441
|
self.logger.info("Backup completed")
|
|
440
442
|
except Exception as e:
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
# (C) 2025 GoodData Corporation
|
|
1
2
|
import datetime
|
|
2
3
|
from dataclasses import dataclass
|
|
3
4
|
|
|
@@ -22,7 +23,7 @@ class DirNames:
|
|
|
22
23
|
|
|
23
24
|
@dataclass(frozen=True)
|
|
24
25
|
class ConcurrencyDefaults:
|
|
25
|
-
MAX_WORKERS =
|
|
26
|
+
MAX_WORKERS = 1
|
|
26
27
|
DEFAULT_BATCH_SIZE = 100
|
|
27
28
|
|
|
28
29
|
|
|
@@ -21,10 +21,40 @@ class S3StorageConfig(BaseModel):
|
|
|
21
21
|
|
|
22
22
|
backup_path: str
|
|
23
23
|
bucket: str
|
|
24
|
-
profile: str =
|
|
24
|
+
profile: Optional[str] = None
|
|
25
25
|
aws_access_key_id: Optional[str] = None
|
|
26
26
|
aws_secret_access_key: Optional[str] = None
|
|
27
|
-
aws_default_region: Optional[str] =
|
|
27
|
+
aws_default_region: Optional[str] = "us-east-1"
|
|
28
|
+
|
|
29
|
+
@classmethod
|
|
30
|
+
def from_iam_role(cls, backup_path: str, bucket: str) -> "S3StorageConfig":
|
|
31
|
+
"""Use default IAM role or environment credentials."""
|
|
32
|
+
return cls(backup_path=backup_path, bucket=bucket)
|
|
33
|
+
|
|
34
|
+
@classmethod
|
|
35
|
+
def from_aws_credentials(
|
|
36
|
+
cls,
|
|
37
|
+
backup_path: str,
|
|
38
|
+
bucket: str,
|
|
39
|
+
aws_access_key_id: str,
|
|
40
|
+
aws_secret_access_key: str,
|
|
41
|
+
aws_default_region: str,
|
|
42
|
+
) -> "S3StorageConfig":
|
|
43
|
+
"""Use explicit AWS access keys and region."""
|
|
44
|
+
return cls(
|
|
45
|
+
backup_path=backup_path,
|
|
46
|
+
bucket=bucket,
|
|
47
|
+
aws_access_key_id=aws_access_key_id,
|
|
48
|
+
aws_secret_access_key=aws_secret_access_key,
|
|
49
|
+
aws_default_region=aws_default_region,
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
@classmethod
|
|
53
|
+
def from_aws_profile(
|
|
54
|
+
cls, backup_path: str, bucket: str, profile: str
|
|
55
|
+
) -> "S3StorageConfig":
|
|
56
|
+
"""Use a named AWS CLI profile."""
|
|
57
|
+
return cls(backup_path=backup_path, bucket=bucket, profile=profile)
|
|
28
58
|
|
|
29
59
|
|
|
30
60
|
class LocalStorageConfig(BaseModel):
|
|
@@ -53,6 +83,14 @@ class BackupRestoreConfig(BaseModel):
|
|
|
53
83
|
description="Batch size must be greater than 0",
|
|
54
84
|
),
|
|
55
85
|
] = Field(default=BackupSettings.DEFAULT_BATCH_SIZE)
|
|
86
|
+
max_workers: Annotated[
|
|
87
|
+
int,
|
|
88
|
+
Field(
|
|
89
|
+
gt=0,
|
|
90
|
+
lt=3,
|
|
91
|
+
description="Max workers must be greater than 0 and less than 3",
|
|
92
|
+
),
|
|
93
|
+
] = Field(default=BackupSettings.MAX_WORKERS)
|
|
56
94
|
|
|
57
95
|
@classmethod
|
|
58
96
|
def from_yaml(cls, conf_path: str) -> "BackupRestoreConfig":
|
|
@@ -22,6 +22,7 @@ class S3Storage(BackupStorage):
|
|
|
22
22
|
|
|
23
23
|
self._config = conf.storage
|
|
24
24
|
self._session = self._create_boto_session(self._config)
|
|
25
|
+
self._client = self._session.client("s3")
|
|
25
26
|
self._resource = self._session.resource("s3")
|
|
26
27
|
self._bucket = self._resource.Bucket(self._config.bucket) # type: ignore [missing library stubs]
|
|
27
28
|
suffix = "/" if not self._config.backup_path.endswith("/") else ""
|
|
@@ -43,32 +44,40 @@ class S3Storage(BackupStorage):
|
|
|
43
44
|
)
|
|
44
45
|
except Exception:
|
|
45
46
|
self.logger.warning(
|
|
46
|
-
"Failed to create boto3 session with supplied credentials.
|
|
47
|
+
"Failed to create boto3 session with supplied credentials."
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
if config.profile:
|
|
51
|
+
try:
|
|
52
|
+
return boto3.Session(profile_name=config.profile)
|
|
53
|
+
except Exception:
|
|
54
|
+
self.logger.warning(
|
|
55
|
+
f"AWS profile [{config.profile}] not found."
|
|
47
56
|
)
|
|
48
57
|
|
|
49
58
|
try:
|
|
50
|
-
return boto3.Session(
|
|
59
|
+
return boto3.Session()
|
|
51
60
|
except Exception:
|
|
52
|
-
self.logger.
|
|
53
|
-
|
|
61
|
+
self.logger.error(
|
|
62
|
+
"Failed to create boto3 session with default IAM role or environment credentials."
|
|
63
|
+
)
|
|
64
|
+
raise RuntimeError(
|
|
65
|
+
"Unable to create AWS session. Please check your AWS credentials, profile, or IAM role configuration."
|
|
54
66
|
)
|
|
55
|
-
|
|
56
|
-
return boto3.Session()
|
|
57
67
|
|
|
58
68
|
def _verify_connection(self) -> None:
|
|
59
69
|
"""
|
|
60
70
|
Pings the S3 bucket to verify that the connection is working.
|
|
61
71
|
"""
|
|
62
72
|
try:
|
|
63
|
-
|
|
64
|
-
self._resource.meta.client.head_bucket(Bucket=self._config.bucket)
|
|
73
|
+
self._client.head_bucket(Bucket=self._config.bucket)
|
|
65
74
|
except Exception as e:
|
|
66
75
|
raise RuntimeError(
|
|
67
76
|
f"Failed to connect to S3 bucket {self._config.bucket}: {e}"
|
|
68
77
|
)
|
|
69
78
|
|
|
70
79
|
def export(self, folder: str, org_id: str) -> None:
|
|
71
|
-
"""Uploads the content of the folder to S3 as backup."""
|
|
80
|
+
"""Uploads the content of the folder to S3 as a backup."""
|
|
72
81
|
storage_path = f"{self._config.bucket}/{self._backup_path}"
|
|
73
82
|
self.logger.info(f"Uploading {org_id} to {storage_path}")
|
|
74
83
|
folder = f"{folder}/{org_id}"
|
|
@@ -77,10 +86,12 @@ class S3Storage(BackupStorage):
|
|
|
77
86
|
export_path = (
|
|
78
87
|
f"{self._backup_path}{org_id}/{full_path[len(folder) + 1 :]}/"
|
|
79
88
|
)
|
|
80
|
-
self.
|
|
89
|
+
self._client.put_object(Bucket=self._config.bucket, Key=export_path)
|
|
81
90
|
|
|
82
91
|
for file in files:
|
|
83
92
|
full_path = os.path.join(subdir, file)
|
|
84
93
|
with open(full_path, "rb") as data:
|
|
85
94
|
export_path = f"{self._backup_path}{org_id}/{full_path[len(folder) + 1 :]}"
|
|
86
|
-
self.
|
|
95
|
+
self._client.put_object(
|
|
96
|
+
Bucket=self._config.bucket, Key=export_path, Body=data
|
|
97
|
+
)
|
{gooddata_pipelines-1.49.1.dev1.dist-info → gooddata_pipelines-1.49.1.dev2.dist-info}/METADATA
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: gooddata-pipelines
|
|
3
|
-
Version: 1.49.1.
|
|
3
|
+
Version: 1.49.1.dev2
|
|
4
4
|
Summary: GoodData Cloud lifecycle automation pipelines
|
|
5
5
|
Author-email: GoodData <support@gooddata.com>
|
|
6
6
|
License: MIT
|
|
@@ -8,7 +8,7 @@ License-File: LICENSE.txt
|
|
|
8
8
|
Requires-Python: >=3.10
|
|
9
9
|
Requires-Dist: boto3-stubs<2.0.0,>=1.39.3
|
|
10
10
|
Requires-Dist: boto3<2.0.0,>=1.39.3
|
|
11
|
-
Requires-Dist: gooddata-sdk~=1.49.1.
|
|
11
|
+
Requires-Dist: gooddata-sdk~=1.49.1.dev2
|
|
12
12
|
Requires-Dist: pydantic<3.0.0,>=2.11.3
|
|
13
13
|
Requires-Dist: requests<3.0.0,>=2.32.3
|
|
14
14
|
Requires-Dist: types-pyyaml<7.0.0,>=6.0.12.20250326
|
{gooddata_pipelines-1.49.1.dev1.dist-info → gooddata_pipelines-1.49.1.dev2.dist-info}/RECORD
RENAMED
|
@@ -9,17 +9,17 @@ gooddata_pipelines/api/gooddata_sdk.py,sha256=wd5O4e9BQLWUawt6odrs5a51nqFGthBkvq
|
|
|
9
9
|
gooddata_pipelines/api/utils.py,sha256=3QY_aYH17I9THoCINE3l-n5oj52k-gNeT1wv6Z_VxN8,1433
|
|
10
10
|
gooddata_pipelines/backup_and_restore/__init__.py,sha256=-BG28PGDbalLyZGQjpFG0pjdIvtf25ut0r8ZwZVbi4s,32
|
|
11
11
|
gooddata_pipelines/backup_and_restore/backup_input_processor.py,sha256=ex1tGwETdHDDBRJ_DGKZsZbH6uoRuOrbGbKOC976H5s,7940
|
|
12
|
-
gooddata_pipelines/backup_and_restore/backup_manager.py,sha256=
|
|
13
|
-
gooddata_pipelines/backup_and_restore/constants.py,sha256=
|
|
12
|
+
gooddata_pipelines/backup_and_restore/backup_manager.py,sha256=qfweyFxlJRZ-rfxrkpQqhiZKQyvz3qFP8mnvnfBUV7U,15950
|
|
13
|
+
gooddata_pipelines/backup_and_restore/constants.py,sha256=TYw4hU5hhzDVTLJa0gWseaiSs_VboWsYwW7QsqtJ1hA,939
|
|
14
14
|
gooddata_pipelines/backup_and_restore/csv_reader.py,sha256=0Kw7mJT7REj3Gjqfsc6YT9MbhcqfCGNB_SKBwzTI1rk,1268
|
|
15
15
|
gooddata_pipelines/backup_and_restore/models/__init__.py,sha256=-BG28PGDbalLyZGQjpFG0pjdIvtf25ut0r8ZwZVbi4s,32
|
|
16
16
|
gooddata_pipelines/backup_and_restore/models/input_type.py,sha256=CBKJigKdmZ-NJD9MSfNhq89bo86W0AqCMMoyonbd1QA,239
|
|
17
|
-
gooddata_pipelines/backup_and_restore/models/storage.py,sha256=
|
|
17
|
+
gooddata_pipelines/backup_and_restore/models/storage.py,sha256=GToCc1M2OlqZJd9NcrIZKsZH_FCD_P_XjdHB4QPtAvo,2791
|
|
18
18
|
gooddata_pipelines/backup_and_restore/models/workspace_response.py,sha256=eQbYLgRQc17IRG0yPTAJVrD-Xs05SzuwtzoNrPT2DoY,833
|
|
19
19
|
gooddata_pipelines/backup_and_restore/storage/__init__.py,sha256=-BG28PGDbalLyZGQjpFG0pjdIvtf25ut0r8ZwZVbi4s,32
|
|
20
20
|
gooddata_pipelines/backup_and_restore/storage/base_storage.py,sha256=67wdItlG3neExeb_eCUDQhswdUB62X5Nyj9sOImB_Hg,487
|
|
21
21
|
gooddata_pipelines/backup_and_restore/storage/local_storage.py,sha256=NvhPRzRAvuSpc5qCDyPqZaMB0i1jeZOZczaSwjUSGEg,1155
|
|
22
|
-
gooddata_pipelines/backup_and_restore/storage/s3_storage.py,sha256=
|
|
22
|
+
gooddata_pipelines/backup_and_restore/storage/s3_storage.py,sha256=ZAysu4sPMAvdWs3RUroHHp2XZLHeU_LhJ5qBHlBQ7n4,3732
|
|
23
23
|
gooddata_pipelines/logger/__init__.py,sha256=W-fJvMStnsDUY52AYFhx_LnS2cSCFNf3bB47Iew2j04,129
|
|
24
24
|
gooddata_pipelines/logger/logger.py,sha256=yIMdvqsmOSGQLI4U_tQwxX5E2q_FXUu0Ko7Hv39slFM,3549
|
|
25
25
|
gooddata_pipelines/provisioning/__init__.py,sha256=RZDEiv8nla4Jwa2TZXUdp1NSxg2_-lLqz4h7k2c4v5Y,854
|
|
@@ -48,7 +48,7 @@ gooddata_pipelines/provisioning/utils/__init__.py,sha256=-BG28PGDbalLyZGQjpFG0pj
|
|
|
48
48
|
gooddata_pipelines/provisioning/utils/context_objects.py,sha256=sM22hMsxE0XLI1TU0Vs-2kK0vf4YrB1musoAg__4bjc,936
|
|
49
49
|
gooddata_pipelines/provisioning/utils/exceptions.py,sha256=1WnAOlPhqOf0xRcvn70lxAlLb8Oo6m6WCYS4hj9uzDU,3630
|
|
50
50
|
gooddata_pipelines/provisioning/utils/utils.py,sha256=_Tk-mFgbIGpCixDCF9e-3ZYd-g5Jb3SJiLSH465k4jY,2846
|
|
51
|
-
gooddata_pipelines-1.49.1.
|
|
52
|
-
gooddata_pipelines-1.49.1.
|
|
53
|
-
gooddata_pipelines-1.49.1.
|
|
54
|
-
gooddata_pipelines-1.49.1.
|
|
51
|
+
gooddata_pipelines-1.49.1.dev2.dist-info/METADATA,sha256=Kw4aUQiXUnm6_NC9PaM50L9NIgW22Md01ZCQiek0VkE,3522
|
|
52
|
+
gooddata_pipelines-1.49.1.dev2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
53
|
+
gooddata_pipelines-1.49.1.dev2.dist-info/licenses/LICENSE.txt,sha256=PNC7WXGIo6OKkNoPLRxlVrw6jaLcjSTUsSxy9Xcu9Jo,560365
|
|
54
|
+
gooddata_pipelines-1.49.1.dev2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|