aepp 0.5.1__tar.gz → 0.5.1.post1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {aepp-0.5.1/aepp.egg-info → aepp-0.5.1.post1}/PKG-INFO +1 -1
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/__init__.py +24 -24
- aepp-0.5.1.post1/aepp/__version__.py +1 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/cli/__main__.py +106 -97
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/deletion.py +22 -22
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/synchronizer.py +2 -2
- {aepp-0.5.1 → aepp-0.5.1.post1/aepp.egg-info}/PKG-INFO +1 -1
- aepp-0.5.1/aepp/__version__.py +0 -1
- {aepp-0.5.1 → aepp-0.5.1.post1}/LICENSE +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/MANIFEST.in +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/README.md +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/accesscontrol.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/catalog.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/classmanager.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/cli/__init__.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/config.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/configs.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/connector.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/customerprofile.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/dataaccess.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/dataprep.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/datasets.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/datatypemanager.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/destination.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/destinationinstanceservice.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/edge.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/exportDatasetToDataLandingZone.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/fieldgroupmanager.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/flowservice.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/hygiene.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/identity.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/ingestion.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/observability.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/policy.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/privacyservice.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/queryservice.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/sandboxes.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/schema.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/schemamanager.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/segmentation.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/sensei.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/som.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/tags.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp/utils.py +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp.egg-info/SOURCES.txt +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp.egg-info/dependency_links.txt +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp.egg-info/entry_points.txt +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp.egg-info/requires.txt +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/aepp.egg-info/top_level.txt +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/pyproject.toml +0 -0
- {aepp-0.5.1 → aepp-0.5.1.post1}/setup.cfg +0 -0
|
@@ -165,7 +165,7 @@ def __titleSafe__(text: str) -> str:
|
|
|
165
165
|
return text
|
|
166
166
|
|
|
167
167
|
|
|
168
|
-
def
|
|
168
|
+
def extractSandboxArtifacts(
|
|
169
169
|
sandbox: 'ConnectObject' = None,
|
|
170
170
|
localFolder: Union[str, Path] = None,
|
|
171
171
|
region: str = "nld2",
|
|
@@ -310,20 +310,20 @@ def extractSandboxArtefacts(
|
|
|
310
310
|
with open(f"{audiencePath / safe_name}.json",'w') as f:
|
|
311
311
|
json.dump(el,f,indent=2)
|
|
312
312
|
|
|
313
|
-
def
|
|
313
|
+
def extractSandboxArtifact(
|
|
314
314
|
sandbox: 'ConnectObject' = None,
|
|
315
315
|
localFolder: Union[str, Path] = None,
|
|
316
|
-
|
|
317
|
-
|
|
316
|
+
artifact: str = None,
|
|
317
|
+
artifactType: str = None,
|
|
318
318
|
region: str = "nld2",
|
|
319
319
|
):
|
|
320
320
|
"""
|
|
321
|
-
Export a single
|
|
321
|
+
Export a single artifact and its dependencies from the sandbox.
|
|
322
322
|
Arguments:
|
|
323
323
|
sandbox: REQUIRED: the instance of a ConnectObject that contains the sandbox information and connection.
|
|
324
324
|
localFolder: OPTIONAL: the local folder where to extract the sandbox. If not provided, it will use the current working directory and name the folder the name of the sandbox.
|
|
325
|
-
|
|
326
|
-
|
|
325
|
+
artifact: REQUIRED: the id or the name of the artifact to export.
|
|
326
|
+
artifactType: REQUIRED: the type of artifact to export. Possible values are: 'class','schema','fieldgroup','datatype','descriptor','dataset','identity','mergepolicy',audience'
|
|
327
327
|
region: OPTIONAL: the region of the sandbox (default: nld2). This is used to fetch the correct API endpoints for the identities.
|
|
328
328
|
Possible values: "va7","aus5", "can2", "ind2"
|
|
329
329
|
"""
|
|
@@ -348,24 +348,24 @@ def extractSandboxArtefact(
|
|
|
348
348
|
from aepp import schema, catalog, identity
|
|
349
349
|
sch = schema.Schema(config=sandbox)
|
|
350
350
|
cat = catalog.Catalog(config=sandbox)
|
|
351
|
-
if
|
|
352
|
-
__extractClass__(
|
|
353
|
-
elif
|
|
354
|
-
__extractSchema__(
|
|
355
|
-
elif
|
|
356
|
-
__extractFieldGroup__(
|
|
357
|
-
elif
|
|
358
|
-
__extractDataType__(
|
|
359
|
-
elif
|
|
360
|
-
__extractDataset__(
|
|
361
|
-
elif
|
|
362
|
-
__extractIdentity__(
|
|
363
|
-
elif
|
|
364
|
-
__extractMergePolicy__(
|
|
365
|
-
elif
|
|
366
|
-
__extractAudience__(
|
|
351
|
+
if artifactType == 'class':
|
|
352
|
+
__extractClass__(artifact,completePath,sandbox)
|
|
353
|
+
elif artifactType == 'schema':
|
|
354
|
+
__extractSchema__(artifact,completePath,sandbox,region)
|
|
355
|
+
elif artifactType == 'fieldgroup':
|
|
356
|
+
__extractFieldGroup__(artifact,completePath,sandbox)
|
|
357
|
+
elif artifactType == 'datatype':
|
|
358
|
+
__extractDataType__(artifact,completePath,sandbox)
|
|
359
|
+
elif artifactType == 'dataset':
|
|
360
|
+
__extractDataset__(artifact,completePath,sandbox,region)
|
|
361
|
+
elif artifactType == 'identity':
|
|
362
|
+
__extractIdentity__(artifact,region,completePath,sandbox)
|
|
363
|
+
elif artifactType == 'mergepolicy':
|
|
364
|
+
__extractMergePolicy__(artifact,completePath,sandbox)
|
|
365
|
+
elif artifactType == 'audience':
|
|
366
|
+
__extractAudience__(artifact,completePath,sandbox)
|
|
367
367
|
else:
|
|
368
|
-
raise ValueError("
|
|
368
|
+
raise ValueError("artifactType not recognized")
|
|
369
369
|
|
|
370
370
|
def __extractClass__(classEl: str,folder: Union[str, Path] = None,sandbox: 'ConnectObject' = None):
|
|
371
371
|
classPath = Path(folder) / 'class'
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "0.5.1-1"
|
|
@@ -13,12 +13,16 @@ from io import FileIO
|
|
|
13
13
|
import pandas as pd
|
|
14
14
|
from datetime import datetime
|
|
15
15
|
import urllib.parse
|
|
16
|
+
from typing import Any, Concatenate, ParamSpec, ParamSpecKwargs
|
|
17
|
+
from collections.abc import Callable
|
|
18
|
+
|
|
19
|
+
P = ParamSpec("P")
|
|
16
20
|
|
|
17
21
|
# --- 1. The Decorator (The Gatekeeper) ---
|
|
18
|
-
def login_required(f):
|
|
22
|
+
def login_required(f:Callable[Concatenate["ServiceShell", P], None]) -> Callable[Concatenate["ServiceShell", P], None]:
|
|
19
23
|
"""Decorator to block commands if not logged in."""
|
|
20
24
|
@wraps(f)
|
|
21
|
-
def wrapper(self, *args, **kwargs):
|
|
25
|
+
def wrapper(self:"ServiceShell", *args:P.args, **kwargs:P.kwargs) -> None:
|
|
22
26
|
if not hasattr(self, 'config') or self.config is None:
|
|
23
27
|
print("(!) Access Denied: You must setup config first.")
|
|
24
28
|
return
|
|
@@ -29,26 +33,30 @@ console = Console()
|
|
|
29
33
|
|
|
30
34
|
# --- 2. The Interactive Shell ---
|
|
31
35
|
class ServiceShell(cmd.Cmd):
|
|
32
|
-
def __init__(self, **kwargs):
|
|
36
|
+
def __init__(self, **kwargs:ParamSpecKwargs) -> None:
|
|
33
37
|
super().__init__()
|
|
34
38
|
self.config = None
|
|
35
39
|
self.connectInstance = True
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
+
if kwargs.get("config_file") is not None:
|
|
41
|
+
config_path = Path(kwargs.get("config_file"))
|
|
42
|
+
if not config_path.is_absolute():
|
|
43
|
+
config_path = Path.cwd() / config_path
|
|
44
|
+
if kwargs.get("config_file") is not None:
|
|
40
45
|
dict_config = json.load(FileIO(config_path))
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
self.
|
|
46
|
+
if kwargs.get("sandbox") is None:
|
|
47
|
+
self.sandbox = str(dict_config.get("sandbox-name","prod"))
|
|
48
|
+
else:
|
|
49
|
+
self.sandbox = str(kwargs.get("sandbox","prod"))
|
|
50
|
+
self.secret = str(dict_config.get("secret",kwargs.get("secret")))
|
|
51
|
+
self.org_id = str(dict_config.get("org_id",kwargs.get("org_id")))
|
|
52
|
+
self.client_id = str(dict_config.get("client_id",kwargs.get("client_id")))
|
|
53
|
+
self.scopes = str(dict_config.get("scopes",kwargs.get("scopes")))
|
|
46
54
|
else:
|
|
47
|
-
self.sandbox = kwargs.get("sandbox","prod")
|
|
48
|
-
self.secret = kwargs.get("secret")
|
|
49
|
-
self.org_id = kwargs.get("org_id")
|
|
50
|
-
self.client_id = kwargs.get("client_id")
|
|
51
|
-
self.scopes = kwargs.get("scopes")
|
|
55
|
+
self.sandbox = str(kwargs.get("sandbox","prod"))
|
|
56
|
+
self.secret = str(kwargs.get("secret"))
|
|
57
|
+
self.org_id = str(kwargs.get("org_id"))
|
|
58
|
+
self.client_id = str(kwargs.get("client_id"))
|
|
59
|
+
self.scopes = str(kwargs.get("scopes"))
|
|
52
60
|
self.connectInstance = True
|
|
53
61
|
if self.sandbox is not None and self.secret is not None and self.org_id is not None and self.client_id is not None and self.scopes is not None:
|
|
54
62
|
print("Configuring connection...")
|
|
@@ -63,7 +71,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
63
71
|
self.prompt = f"{self.config.sandbox}> "
|
|
64
72
|
console.print(Panel(f"Connected to [bold green]{self.sandbox}[/bold green]", style="blue"))
|
|
65
73
|
|
|
66
|
-
def do_createConfigFile(self, arg):
|
|
74
|
+
def do_createConfigFile(self, arg:Any) -> None:
|
|
67
75
|
"""Create a configuration file for future use"""
|
|
68
76
|
parser = argparse.ArgumentParser(prog='createConfigFile', add_help=True)
|
|
69
77
|
parser.add_argument("-f", "--file_name", help="file name for your config file", default="aepp_config.json")
|
|
@@ -76,7 +84,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
76
84
|
|
|
77
85
|
|
|
78
86
|
# # --- Commands ---
|
|
79
|
-
def do_config(self, arg):
|
|
87
|
+
def do_config(self, arg:Any) -> None:
|
|
80
88
|
"""connect to an AEP instance"""
|
|
81
89
|
parser = argparse.ArgumentParser(prog='config', add_help=True)
|
|
82
90
|
parser.add_argument("-sx", "--sandbox", help="Auto-login sandbox")
|
|
@@ -89,18 +97,18 @@ class ServiceShell(cmd.Cmd):
|
|
|
89
97
|
if args.config_file:
|
|
90
98
|
mypath = Path.cwd()
|
|
91
99
|
dict_config = json.load(FileIO(mypath / Path(args.config_file)))
|
|
92
|
-
self.sandbox = args.sandbox if args.sandbox else dict_config.get("sandbox-name",args.sandbox)
|
|
93
|
-
self.secret = dict_config.get("secret",args.secret)
|
|
94
|
-
self.org_id = dict_config.get("org_id",args.org_id)
|
|
95
|
-
self.client_id = dict_config.get("client_id",args.client_id)
|
|
96
|
-
self.scopes = dict_config.get("scopes",args.scopes)
|
|
100
|
+
self.sandbox = str(args.sandbox) if args.sandbox else str(dict_config.get("sandbox-name",args.sandbox))
|
|
101
|
+
self.secret = str(dict_config.get("secret",args.secret))
|
|
102
|
+
self.org_id = str(dict_config.get("org_id",args.org_id))
|
|
103
|
+
self.client_id = str(dict_config.get("client_id",args.client_id))
|
|
104
|
+
self.scopes = str(dict_config.get("scopes",args.scopes))
|
|
97
105
|
self.connectInstance = True
|
|
98
106
|
else:
|
|
99
|
-
if args.sandbox: self.sandbox = args.sandbox
|
|
100
|
-
if args.secret: self.secret = args.secret
|
|
101
|
-
if args.org_id: self.org_id = args.org_id
|
|
102
|
-
if args.scopes: self.scopes = args.scopes
|
|
103
|
-
if args.client_id: self.client_id = args.client_id
|
|
107
|
+
if args.sandbox: self.sandbox = str(args.sandbox)
|
|
108
|
+
if args.secret: self.secret = str(args.secret)
|
|
109
|
+
if args.org_id: self.org_id = str(args.org_id)
|
|
110
|
+
if args.scopes: self.scopes = str(args.scopes)
|
|
111
|
+
if args.client_id: self.client_id = str(args.client_id)
|
|
104
112
|
console.print("Configuring connection...", style="blue")
|
|
105
113
|
self.config = aepp.configure(
|
|
106
114
|
connectInstance=self.connectInstance,
|
|
@@ -114,15 +122,15 @@ class ServiceShell(cmd.Cmd):
|
|
|
114
122
|
self.prompt = f"{self.config.sandbox}> "
|
|
115
123
|
return
|
|
116
124
|
|
|
117
|
-
def do_change_sandbox(self, args):
|
|
125
|
+
def do_change_sandbox(self, args:Any) -> None:
|
|
118
126
|
"""Change the current sandbox after configuration"""
|
|
119
127
|
parser = argparse.ArgumentParser(prog='change sandbox', add_help=True)
|
|
120
128
|
parser.add_argument("sandbox", help="sandbox name to switch to")
|
|
121
129
|
args = parser.parse_args(shlex.split(args))
|
|
122
|
-
self.sandbox = args.sandbox if args.sandbox else console.print(Panel("(!) Please provide a sandbox name using -sx or --sandbox", style="red"))
|
|
130
|
+
self.sandbox = str(args.sandbox) if args.sandbox else console.print(Panel("(!) Please provide a sandbox name using -sx or --sandbox", style="red"))
|
|
123
131
|
if self.config is not None:
|
|
124
132
|
if args.sandbox:
|
|
125
|
-
self.config.setSandbox(args.sandbox)
|
|
133
|
+
self.config.setSandbox(str(args.sandbox))
|
|
126
134
|
self.prompt = f"{self.config.sandbox}> "
|
|
127
135
|
console.print(Panel(f"Sandbox changed to: [bold green]{self.config.sandbox}[/bold green]", style="blue"))
|
|
128
136
|
else:
|
|
@@ -130,7 +138,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
130
138
|
|
|
131
139
|
|
|
132
140
|
@login_required
|
|
133
|
-
def do_get_schemas(self, args):
|
|
141
|
+
def do_get_schemas(self, args:Any) -> None:
|
|
134
142
|
"""List all schemas in the current sandbox"""
|
|
135
143
|
parser = argparse.ArgumentParser(prog='get_schemas', add_help=True)
|
|
136
144
|
parser.add_argument("-sv", "--save",help="Save schemas to CSV file")
|
|
@@ -162,7 +170,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
162
170
|
return
|
|
163
171
|
|
|
164
172
|
@login_required
|
|
165
|
-
def do_get_ups_schemas(self, args):
|
|
173
|
+
def do_get_ups_schemas(self, args) -> None:
|
|
166
174
|
"""List all schemas enabled for Profile in the current sandbox"""
|
|
167
175
|
parser = argparse.ArgumentParser(prog='get_schemas_enabled', add_help=True)
|
|
168
176
|
parser.add_argument("-sv", "--save",help="Save enabled schemas to CSV file")
|
|
@@ -200,8 +208,9 @@ class ServiceShell(cmd.Cmd):
|
|
|
200
208
|
console.print(f"(!) Error: {str(e)}", style="red")
|
|
201
209
|
except SystemExit:
|
|
202
210
|
return
|
|
211
|
+
|
|
203
212
|
@login_required
|
|
204
|
-
def do_get_ups_fieldgroups(self, args):
|
|
213
|
+
def do_get_ups_fieldgroups(self, args:Any) -> None:
|
|
205
214
|
"""List all field groups enabled for Profile in the current sandbox"""
|
|
206
215
|
parser = argparse.ArgumentParser(prog='get_fieldgroups_enabled', add_help=True)
|
|
207
216
|
parser.add_argument("-sv", "--save",help="Save enabled field groups to CSV file")
|
|
@@ -241,7 +250,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
241
250
|
return
|
|
242
251
|
|
|
243
252
|
@login_required
|
|
244
|
-
def do_get_profile_schemas(self,args):
|
|
253
|
+
def do_get_profile_schemas(self,args:Any) -> None:
|
|
245
254
|
"""Get the current profile schema"""
|
|
246
255
|
parser = argparse.ArgumentParser(prog='get_schemas_enabled', add_help=True)
|
|
247
256
|
try:
|
|
@@ -268,7 +277,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
268
277
|
return
|
|
269
278
|
|
|
270
279
|
@login_required
|
|
271
|
-
def do_get_union_profile_json(self,args):
|
|
280
|
+
def do_get_union_profile_json(self,args:Any) -> None:
|
|
272
281
|
"""Get the current Profile union schema"""
|
|
273
282
|
parser = argparse.ArgumentParser(prog='get_union_profile', add_help=True)
|
|
274
283
|
try:
|
|
@@ -284,7 +293,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
284
293
|
return
|
|
285
294
|
|
|
286
295
|
@login_required
|
|
287
|
-
def do_get_union_profile_csv(self,args):
|
|
296
|
+
def do_get_union_profile_csv(self,args:Any) -> None:
|
|
288
297
|
"""Get the current Profile union schema"""
|
|
289
298
|
parser = argparse.ArgumentParser(prog='get_union_profile', add_help=True)
|
|
290
299
|
parser.add_argument("-f","--full",default=False,help="Get full schema information with all details",type=bool)
|
|
@@ -300,7 +309,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
300
309
|
return
|
|
301
310
|
|
|
302
311
|
@login_required
|
|
303
|
-
def do_get_union_event_json(self,args):
|
|
312
|
+
def do_get_union_event_json(self,args:Any) -> None:
|
|
304
313
|
"""Get the current Experience Event union schema"""
|
|
305
314
|
parser = argparse.ArgumentParser(prog='get_union_event', add_help=True)
|
|
306
315
|
try:
|
|
@@ -316,7 +325,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
316
325
|
return
|
|
317
326
|
|
|
318
327
|
@login_required
|
|
319
|
-
def do_get_union_event_csv(self,args):
|
|
328
|
+
def do_get_union_event_csv(self,args:Any) -> None:
|
|
320
329
|
"""Get the current Experience Event union schema"""
|
|
321
330
|
parser = argparse.ArgumentParser(prog='get_union_event', add_help=True)
|
|
322
331
|
parser.add_argument("-f","--full",default=False,help="Get full schema information with all details",type=bool)
|
|
@@ -332,7 +341,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
332
341
|
return
|
|
333
342
|
|
|
334
343
|
@login_required
|
|
335
|
-
def do_get_event_schemas(self,args):
|
|
344
|
+
def do_get_event_schemas(self,args:Any) -> None:
|
|
336
345
|
"""Get the current Experience Event schemas"""
|
|
337
346
|
parser = argparse.ArgumentParser(prog='get_event_schemas', add_help=True)
|
|
338
347
|
parser.add_argument("-sv", "--save",help="Save event schemas to CSV file")
|
|
@@ -364,7 +373,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
364
373
|
return
|
|
365
374
|
|
|
366
375
|
@login_required
|
|
367
|
-
def do_get_union_event_json(self,args):
|
|
376
|
+
def do_get_union_event_json(self,args:Any) -> None:
|
|
368
377
|
"""Get the current Experience Event union schema"""
|
|
369
378
|
parser = argparse.ArgumentParser(prog='get_union_event', add_help=True)
|
|
370
379
|
try:
|
|
@@ -381,7 +390,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
381
390
|
|
|
382
391
|
|
|
383
392
|
@login_required
|
|
384
|
-
def do_get_schema_xdm(self, arg):
|
|
393
|
+
def do_get_schema_xdm(self, arg:Any) -> None:
|
|
385
394
|
"""Get schema JSON by name or ID"""
|
|
386
395
|
parser = argparse.ArgumentParser(prog='get_schema_xdm', add_help=True)
|
|
387
396
|
parser.add_argument("schema", help="Schema title, $id or alt:Id to retrieve")
|
|
@@ -414,7 +423,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
414
423
|
return
|
|
415
424
|
|
|
416
425
|
@login_required
|
|
417
|
-
def do_get_schema_csv(self, arg):
|
|
426
|
+
def do_get_schema_csv(self, arg:Any) -> None:
|
|
418
427
|
"""Get schema CSV by name or ID"""
|
|
419
428
|
parser = argparse.ArgumentParser(prog='get_schema_csv', add_help=True)
|
|
420
429
|
parser.add_argument("schema", help="Schema $id or alt:Id to retrieve")
|
|
@@ -444,7 +453,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
444
453
|
return
|
|
445
454
|
|
|
446
455
|
@login_required
|
|
447
|
-
def do_get_schema_json(self, args):
|
|
456
|
+
def do_get_schema_json(self, args:Any) -> None:
|
|
448
457
|
"""Get schema JSON by name or ID"""
|
|
449
458
|
parser = argparse.ArgumentParser(prog='get_schema_json', add_help=True)
|
|
450
459
|
parser.add_argument("schema", help="Schema $id or alt:Id to retrieve")
|
|
@@ -473,7 +482,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
473
482
|
return
|
|
474
483
|
|
|
475
484
|
@login_required
|
|
476
|
-
def do_get_fieldgroups(self, args):
|
|
485
|
+
def do_get_fieldgroups(self, args:Any) -> None:
|
|
477
486
|
"""List all field groups in the current sandbox"""
|
|
478
487
|
parser = argparse.ArgumentParser(prog='get_fieldgroups', add_help=True)
|
|
479
488
|
parser.add_argument("-sv", "--save",help="Save field groups to CSV file")
|
|
@@ -503,7 +512,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
503
512
|
return
|
|
504
513
|
|
|
505
514
|
@login_required
|
|
506
|
-
def do_get_fieldgroup_json(self, args):
|
|
515
|
+
def do_get_fieldgroup_json(self, args:Any) -> None:
|
|
507
516
|
"""Get field group JSON by name or ID"""
|
|
508
517
|
parser = argparse.ArgumentParser(prog='get_fieldgroup_json', add_help=True)
|
|
509
518
|
parser.add_argument("fieldgroup", help="Field Group name, $id or alt:Id to retrieve")
|
|
@@ -532,7 +541,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
532
541
|
return
|
|
533
542
|
|
|
534
543
|
@login_required
|
|
535
|
-
def do_get_fieldgroup_csv(self, args):
|
|
544
|
+
def do_get_fieldgroup_csv(self, args:Any) -> None:
|
|
536
545
|
"""Get field group CSV by name or ID"""
|
|
537
546
|
parser = argparse.ArgumentParser(prog='get_fieldgroup_csv', add_help=True)
|
|
538
547
|
parser.add_argument("fieldgroup", help="Field Group name, $id or alt:Id to retrieve")
|
|
@@ -560,7 +569,8 @@ class ServiceShell(cmd.Cmd):
|
|
|
560
569
|
except SystemExit:
|
|
561
570
|
return
|
|
562
571
|
|
|
563
|
-
|
|
572
|
+
@login_required
|
|
573
|
+
def do_get_datatypes(self, args:Any) -> None:
|
|
564
574
|
"""List all data types in the current sandbox"""
|
|
565
575
|
parser = argparse.ArgumentParser(prog='get_datatypes', add_help=True)
|
|
566
576
|
try:
|
|
@@ -585,7 +595,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
585
595
|
return
|
|
586
596
|
|
|
587
597
|
@login_required
|
|
588
|
-
def do_get_datatype_csv(self, args):
|
|
598
|
+
def do_get_datatype_csv(self, args:Any) -> None:
|
|
589
599
|
"""Get data type CSV by name or ID"""
|
|
590
600
|
parser = argparse.ArgumentParser(prog='get_datatype_csv', add_help=True)
|
|
591
601
|
parser.add_argument("datatype", help="Data Type name, $id or alt:Id to retrieve")
|
|
@@ -614,7 +624,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
614
624
|
return
|
|
615
625
|
|
|
616
626
|
@login_required
|
|
617
|
-
def do_get_datatype_json(self, args):
|
|
627
|
+
def do_get_datatype_json(self, args:Any) -> None:
|
|
618
628
|
"""Get data type JSON by name or ID"""
|
|
619
629
|
parser = argparse.ArgumentParser(prog='get_datatype_json', add_help=True)
|
|
620
630
|
parser.add_argument("datatype", help="Data Type name, $id or alt:Id to retrieve")
|
|
@@ -644,7 +654,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
644
654
|
return
|
|
645
655
|
|
|
646
656
|
@login_required
|
|
647
|
-
def do_enable_schema_for_ups(self, args):
|
|
657
|
+
def do_enable_schema_for_ups(self, args:Any) -> None:
|
|
648
658
|
"""Enable a schema for Profile"""
|
|
649
659
|
parser = argparse.ArgumentParser(prog='enable_schema_for_ups', add_help=True)
|
|
650
660
|
parser.add_argument("schema_id", help="Schema ID to enable for Profile")
|
|
@@ -659,7 +669,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
659
669
|
return
|
|
660
670
|
|
|
661
671
|
@login_required
|
|
662
|
-
def do_upload_fieldgroup_definition_csv(self,args):
|
|
672
|
+
def do_upload_fieldgroup_definition_csv(self,args:Any) -> None:
|
|
663
673
|
"""Upload a field group definition from a CSV file"""
|
|
664
674
|
parser = argparse.ArgumentParser(prog='upload_fieldgroup_definition_csv', add_help=True)
|
|
665
675
|
parser.add_argument("csv_path", help="Path to the field group CSV file")
|
|
@@ -683,7 +693,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
683
693
|
return
|
|
684
694
|
|
|
685
695
|
@login_required
|
|
686
|
-
def do_upload_fieldgroup_definition_xdm(self,args):
|
|
696
|
+
def do_upload_fieldgroup_definition_xdm(self,args:Any) -> None:
|
|
687
697
|
"""Upload a field group definition from a JSON XDM file"""
|
|
688
698
|
parser = argparse.ArgumentParser(prog='upload_fieldgroup_definition_xdm', add_help=True)
|
|
689
699
|
parser.add_argument("xdm_path", help="Path to the field group JSON XDM file")
|
|
@@ -708,7 +718,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
708
718
|
return
|
|
709
719
|
|
|
710
720
|
@login_required
|
|
711
|
-
def do_get_datasets(self, args):
|
|
721
|
+
def do_get_datasets(self, args:Any) -> None:
|
|
712
722
|
"""List all datasets in the current sandbox"""
|
|
713
723
|
parser = argparse.ArgumentParser(prog='get_datasets', add_help=True)
|
|
714
724
|
try:
|
|
@@ -739,7 +749,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
739
749
|
return
|
|
740
750
|
|
|
741
751
|
@login_required
|
|
742
|
-
def do_get_datasets_infos(self, args):
|
|
752
|
+
def do_get_datasets_infos(self, args:Any) -> None:
|
|
743
753
|
"""List all datasets in the current sandbox"""
|
|
744
754
|
parser = argparse.ArgumentParser(prog='get_datasets_infos', add_help=True)
|
|
745
755
|
try:
|
|
@@ -771,7 +781,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
771
781
|
return
|
|
772
782
|
|
|
773
783
|
@login_required
|
|
774
|
-
def do_createDataset(self, args):
|
|
784
|
+
def do_createDataset(self, args:Any) -> None:
|
|
775
785
|
"""Create a new dataset in the current sandbox"""
|
|
776
786
|
parser = argparse.ArgumentParser(prog='createDataset', add_help=True)
|
|
777
787
|
parser.add_argument("dataset_name", help="Name of the dataset to create")
|
|
@@ -787,7 +797,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
787
797
|
return
|
|
788
798
|
|
|
789
799
|
@login_required
|
|
790
|
-
def do_enable_dataset_for_ups(self, args):
|
|
800
|
+
def do_enable_dataset_for_ups(self, args:Any) -> None:
|
|
791
801
|
"""Enable a dataset for Profile"""
|
|
792
802
|
parser = argparse.ArgumentParser(prog='enable_dataset_for_ups', add_help=True)
|
|
793
803
|
parser.add_argument("dataset", help="Dataset ID or Dataset Name to enable for Profile")
|
|
@@ -805,8 +815,8 @@ class ServiceShell(cmd.Cmd):
|
|
|
805
815
|
except SystemExit:
|
|
806
816
|
return
|
|
807
817
|
|
|
808
|
-
@login_required
|
|
809
|
-
def do_get_identities(self, args):
|
|
818
|
+
@login_required
|
|
819
|
+
def do_get_identities(self, args:Any) -> None:
|
|
810
820
|
"""List all identities in the current sandbox"""
|
|
811
821
|
parser = argparse.ArgumentParser(prog='get_identities', add_help=True)
|
|
812
822
|
parser.add_argument("-r","--region", help="Region to get identities from: 'ndl2' (default), 'va7', 'aus5', 'can2', 'ind2'", default='ndl2')
|
|
@@ -837,7 +847,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
837
847
|
return
|
|
838
848
|
|
|
839
849
|
@login_required
|
|
840
|
-
def do_get_flows(self, args):
|
|
850
|
+
def do_get_flows(self, args:Any) -> None:
|
|
841
851
|
"""List flows in the current sandbox based on parameters provided. By default, list all sources and destinations."""
|
|
842
852
|
parser = argparse.ArgumentParser(prog='get_flows', add_help=True)
|
|
843
853
|
parser.add_argument("-i","--internal_flows",help="Get internal flows", default=False,type=bool)
|
|
@@ -1000,7 +1010,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
1000
1010
|
return
|
|
1001
1011
|
|
|
1002
1012
|
@login_required
|
|
1003
|
-
def do_get_flow_errors(self,args):
|
|
1013
|
+
def do_get_flow_errors(self,args:Any) -> None:
|
|
1004
1014
|
"""Get errors for a specific flow, saving it in a JSON file for specific timeframe, default last 24 hours."""
|
|
1005
1015
|
parser = argparse.ArgumentParser(prog='get_flow_errors', add_help=True)
|
|
1006
1016
|
parser.add_argument("flow_id", help="Flow ID to get errors for")
|
|
@@ -1024,7 +1034,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
1024
1034
|
return
|
|
1025
1035
|
|
|
1026
1036
|
@login_required
|
|
1027
|
-
def do_create_dataset_http_source(self,args):
|
|
1037
|
+
def do_create_dataset_http_source(self,args:Any) -> None:
|
|
1028
1038
|
"""Create an HTTP Source connection for a specific dataset, XDM compatible data only."""
|
|
1029
1039
|
parser = argparse.ArgumentParser(prog='do_create_dataset_http_source', add_help=True)
|
|
1030
1040
|
parser.add_argument("dataset", help="Name or ID of the Dataset Source connection to create")
|
|
@@ -1050,7 +1060,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
1050
1060
|
return
|
|
1051
1061
|
|
|
1052
1062
|
@login_required
|
|
1053
|
-
def do_get_DLZ_credential(self,args):
|
|
1063
|
+
def do_get_DLZ_credential(self,args:Any) -> None:
|
|
1054
1064
|
"""Get Data Lake Zone credential for the current sandbox"""
|
|
1055
1065
|
parser = argparse.ArgumentParser(prog='get_DLZ_credential', add_help=True)
|
|
1056
1066
|
parser.add_argument("type",nargs='?',help="Type of credential to retrieve: 'user_drop_zone' or 'dlz_destination'",default="user_drop_zone")
|
|
@@ -1066,7 +1076,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
1066
1076
|
return
|
|
1067
1077
|
|
|
1068
1078
|
@login_required
|
|
1069
|
-
def do_get_queries(self, args):
|
|
1079
|
+
def do_get_queries(self, args:Any)-> None:
|
|
1070
1080
|
"""List top 1000 queries in the current sandbox for the last 24 hours by default, optionally filtered by dataset ID"""
|
|
1071
1081
|
parser = argparse.ArgumentParser(prog='get_queries', add_help=True)
|
|
1072
1082
|
parser.add_argument("-ds","--dataset", help="Dataset ID to filter queries", default=None)
|
|
@@ -1128,7 +1138,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
1128
1138
|
return
|
|
1129
1139
|
|
|
1130
1140
|
@login_required
|
|
1131
|
-
def do_query(self,args):
|
|
1141
|
+
def do_query(self,args:Any) -> None:
|
|
1132
1142
|
"""Execute a SQL query against the current sandbox"""
|
|
1133
1143
|
parser = argparse.ArgumentParser(prog='query', add_help=True)
|
|
1134
1144
|
parser.add_argument("sql_query", help="SQL query to execute",type=str)
|
|
@@ -1148,15 +1158,15 @@ class ServiceShell(cmd.Cmd):
|
|
|
1148
1158
|
|
|
1149
1159
|
|
|
1150
1160
|
@login_required
|
|
1151
|
-
def
|
|
1152
|
-
"""
|
|
1153
|
-
console.print("Extracting
|
|
1154
|
-
parser = argparse.ArgumentParser(prog='
|
|
1155
|
-
parser.add_argument('-lf','--localfolder', help='Local folder to extract
|
|
1156
|
-
parser.add_argument('-rg','--region', help='Region to extract
|
|
1161
|
+
def do_extractArtifacts(self,args:Any) -> None:
|
|
1162
|
+
"""extractArtifacts localfolder"""
|
|
1163
|
+
console.print("Extracting artifacts...", style="blue")
|
|
1164
|
+
parser = argparse.ArgumentParser(prog='extractArtifacts', description='Extract artifacts from AEP')
|
|
1165
|
+
parser.add_argument('-lf','--localfolder', help='Local folder to extract artifacts to', default='./extractions')
|
|
1166
|
+
parser.add_argument('-rg','--region', help='Region to extract artifacts from: "ndl2" (default), "va7", "aus5", "can2", "ind2"',default='ndl2')
|
|
1157
1167
|
try:
|
|
1158
1168
|
args = parser.parse_args(shlex.split(args))
|
|
1159
|
-
aepp.
|
|
1169
|
+
aepp.extractSandboxArtifacts(
|
|
1160
1170
|
sandbox=self.config,
|
|
1161
1171
|
localFolder=args.localfolder,
|
|
1162
1172
|
region=args.region
|
|
@@ -1166,20 +1176,20 @@ class ServiceShell(cmd.Cmd):
|
|
|
1166
1176
|
return
|
|
1167
1177
|
|
|
1168
1178
|
@login_required
|
|
1169
|
-
def
|
|
1170
|
-
"""
|
|
1171
|
-
console.print("Extracting
|
|
1172
|
-
parser = argparse.ArgumentParser(prog='
|
|
1173
|
-
parser.add_argument('
|
|
1174
|
-
parser.add_argument('-at','--
|
|
1175
|
-
parser.add_argument('-lf','--localfolder', help='Local folder to extract
|
|
1176
|
-
parser.add_argument('-rg','--region', help='Region to extract
|
|
1179
|
+
def do_extractArtifact(self,args:Any) -> None:
|
|
1180
|
+
"""extractArtifacts localfolder"""
|
|
1181
|
+
console.print("Extracting artifact...", style="blue")
|
|
1182
|
+
parser = argparse.ArgumentParser(prog='extractArtifact', description='Extract artifacts from AEP')
|
|
1183
|
+
parser.add_argument('artifact', help='artifact to extract (name or id): "schema","fieldgroup","datatype","descriptor","dataset","identity","mergepolicy","audience"')
|
|
1184
|
+
parser.add_argument('-at','--artifactType', help='artifact type ')
|
|
1185
|
+
parser.add_argument('-lf','--localfolder', help='Local folder to extract artifacts to',default='extractions')
|
|
1186
|
+
parser.add_argument('-rg','--region', help='Region to extract artifacts from: "ndl2" (default), "va7", "aus5", "can2", "ind2"',default='ndl2')
|
|
1177
1187
|
|
|
1178
1188
|
try:
|
|
1179
1189
|
args = parser.parse_args(shlex.split(args))
|
|
1180
|
-
aepp.
|
|
1181
|
-
|
|
1182
|
-
|
|
1190
|
+
aepp.extractSandboxArtifact(
|
|
1191
|
+
artifact=args.artifact,
|
|
1192
|
+
artifactType=args.artifactType,
|
|
1183
1193
|
sandbox=self.config,
|
|
1184
1194
|
localFolder=args.localfolder
|
|
1185
1195
|
)
|
|
@@ -1188,16 +1198,16 @@ class ServiceShell(cmd.Cmd):
|
|
|
1188
1198
|
return
|
|
1189
1199
|
|
|
1190
1200
|
@login_required
|
|
1191
|
-
def do_sync(self,args):
|
|
1192
|
-
"""
|
|
1193
|
-
console.print("Syncing
|
|
1194
|
-
parser = argparse.ArgumentParser(prog='
|
|
1195
|
-
parser.add_argument('
|
|
1196
|
-
parser.add_argument('-at','--
|
|
1201
|
+
def do_sync(self,args:Any) -> None:
|
|
1202
|
+
"""extractArtifacts localfolder"""
|
|
1203
|
+
console.print("Syncing artifact...", style="blue")
|
|
1204
|
+
parser = argparse.ArgumentParser(prog='extractArtifact', description='Extract artifacts from AEP')
|
|
1205
|
+
parser.add_argument('artifact', help='artifact to extract (name or id): "schema","fieldgroup","datatype","descriptor","dataset","identity","mergepolicy","audience"')
|
|
1206
|
+
parser.add_argument('-at','--artifactType', help='artifact type ')
|
|
1197
1207
|
parser.add_argument('-t','--targets', help='target sandboxes')
|
|
1198
|
-
parser.add_argument('-lf','--localfolder', help='Local folder to extract
|
|
1208
|
+
parser.add_argument('-lf','--localfolder', help='Local folder to extract artifacts to',default='extractions')
|
|
1199
1209
|
parser.add_argument('-b','--baseSandbox', help='Base sandbox for synchronization')
|
|
1200
|
-
parser.add_argument('-rg','--region', help='Region to extract
|
|
1210
|
+
parser.add_argument('-rg','--region', help='Region to extract artifacts from: "ndl2" (default), "va7", "aus5", "can2", "ind2"',default='ndl2')
|
|
1201
1211
|
parser.add_argument('-v','--verbose', help='Enable verbose output',default=True)
|
|
1202
1212
|
try:
|
|
1203
1213
|
args = parser.parse_args(shlex.split(args))
|
|
@@ -1222,21 +1232,20 @@ class ServiceShell(cmd.Cmd):
|
|
|
1222
1232
|
)
|
|
1223
1233
|
console.print("Starting Sync...", style="blue")
|
|
1224
1234
|
synchronizor.syncComponent(
|
|
1225
|
-
component=args.
|
|
1226
|
-
componentType=args.
|
|
1235
|
+
component=args.artifact,
|
|
1236
|
+
componentType=args.artifactType,
|
|
1227
1237
|
verbose=args.verbose
|
|
1228
1238
|
)
|
|
1229
1239
|
console.print("Sync completed!", style="green")
|
|
1230
1240
|
except SystemExit:
|
|
1231
1241
|
return
|
|
1232
1242
|
|
|
1233
|
-
|
|
1234
|
-
def do_exit(self, args):
|
|
1243
|
+
def do_exit(self, args:Any) -> None:
|
|
1235
1244
|
"""Exit the application"""
|
|
1236
1245
|
console.print(Panel("Exiting...", style="blue"))
|
|
1237
1246
|
return True # Stops the loop
|
|
1238
1247
|
|
|
1239
|
-
def do_EOF(self, args):
|
|
1248
|
+
def do_EOF(self, args:Any) -> None:
|
|
1240
1249
|
"""Handle Ctrl+D"""
|
|
1241
1250
|
console.print(Panel("Exiting...", style="blue"))
|
|
1242
1251
|
return True
|
|
@@ -20,11 +20,11 @@ from aepp import connector
|
|
|
20
20
|
|
|
21
21
|
class Deletion:
|
|
22
22
|
"""
|
|
23
|
-
This class regroups differet methods and combine some to clean and delete
|
|
23
|
+
This class regroups differet methods and combine some to clean and delete artifact from Adobe Experience Platform.
|
|
24
24
|
Supported in this class:
|
|
25
|
-
- Deleting datasets (and associated
|
|
26
|
-
- Deleteting dataflows (and associated
|
|
27
|
-
- Deleting schemas (and associated
|
|
25
|
+
- Deleting datasets (and associated artifacts)
|
|
26
|
+
- Deleteting dataflows (and associated artifacts)
|
|
27
|
+
- Deleting schemas (and associated artifacts)
|
|
28
28
|
- Deleting audiences
|
|
29
29
|
"""
|
|
30
30
|
loggingEnabled = False
|
|
@@ -83,14 +83,14 @@ class Deletion:
|
|
|
83
83
|
def __repr__self(self):
|
|
84
84
|
return f"Deletion(config={self.config})"
|
|
85
85
|
|
|
86
|
-
def deleteDataset(self,datasetId: str,
|
|
86
|
+
def deleteDataset(self,datasetId: str,associatedArtifacts:bool=False) -> dict:
|
|
87
87
|
"""
|
|
88
|
-
Delete a dataset and all associated
|
|
88
|
+
Delete a dataset and all associated artifacts (dataflows, schemas, data connections).
|
|
89
89
|
Arguments:
|
|
90
90
|
datasetId : REQUIRED : The identifier of the dataset to delete.
|
|
91
|
-
|
|
92
|
-
Note : Deleting associated
|
|
93
|
-
In case, it is not possible to delete
|
|
91
|
+
associatedArtifacts : OPTIONAL : If set to True, all associated artifacts (dataflows, schemas) will also be deleted (default False).
|
|
92
|
+
Note : Deleting associated artifacts option will be pass down to other methods called within this method. So Field Groups, Data Type could be impacted.
|
|
93
|
+
In case, it is not possible to delete artifacts, it will be silently ignored and returns in the output dictionary.
|
|
94
94
|
"""
|
|
95
95
|
result = {}
|
|
96
96
|
from aepp import catalog
|
|
@@ -100,7 +100,7 @@ class Deletion:
|
|
|
100
100
|
schemaRef = datasetInfo.get('schemaRef',{}).get('id',None)
|
|
101
101
|
res = cat.deleteDataSet(datasetId=datasetId)
|
|
102
102
|
result['dataset'] = res
|
|
103
|
-
if
|
|
103
|
+
if associatedArtifacts:
|
|
104
104
|
# Deleting associated dataflows
|
|
105
105
|
result['flows'] = {'connections':{}, 'flows': {}}
|
|
106
106
|
from aepp import flowservice
|
|
@@ -113,21 +113,21 @@ class Deletion:
|
|
|
113
113
|
flows = flow.getFlows()
|
|
114
114
|
list_flowIds = [f['id'] for f in flows if f.get('sourceConnectionIds',[""])[0] in list_source_dataflowsIds or f.get('targetConnectionIds',[""])[0] in list_target_dataflowsIds]
|
|
115
115
|
for flowId in list_flowIds:
|
|
116
|
-
res_flow = self.deleteDataFlow(flowId=flowId,
|
|
116
|
+
res_flow = self.deleteDataFlow(flowId=flowId, associatedArtifacts=associatedArtifacts)
|
|
117
117
|
result['flows']['flows'][flowId] = res_flow
|
|
118
118
|
# Deleting associated schema
|
|
119
119
|
if schemaRef is not None:
|
|
120
|
-
result['schema'] = self.deleteSchema(schemaId=schemaRef,
|
|
120
|
+
result['schema'] = self.deleteSchema(schemaId=schemaRef, associatedArtifacts=associatedArtifacts)
|
|
121
121
|
return result
|
|
122
122
|
|
|
123
|
-
def deleteSchema(self,schemaId: str,
|
|
123
|
+
def deleteSchema(self,schemaId: str,associatedArtifacts:bool=False) -> dict:
|
|
124
124
|
"""
|
|
125
|
-
Delete a schema and possibly all associated
|
|
125
|
+
Delete a schema and possibly all associated artifacts.
|
|
126
126
|
Arguments:
|
|
127
127
|
schemaId : REQUIRED : The identifier of the schema to delete.
|
|
128
|
-
|
|
129
|
-
Note : Deleting associated
|
|
130
|
-
In case, it is not possible to delete
|
|
128
|
+
associatedArtifacts : OPTIONAL : If set to True, all associated artifacts (fieldGroup, datatype) will also be deleted (default False).
|
|
129
|
+
Note : Deleting associated artifacts option will be pass down to other methods called within this method. So Field Groups, Data Type could be impacted.
|
|
130
|
+
In case, it is not possible to delete artifacts, it will be silently ignored and returns in the output dictionary.
|
|
131
131
|
"""
|
|
132
132
|
result = {'fieldGroup': {}, 'schema': {} , 'datatypes':{} }
|
|
133
133
|
from aepp import schema, schemamanager
|
|
@@ -135,7 +135,7 @@ class Deletion:
|
|
|
135
135
|
schemaInfo = schemamanager.SchemaManager(schemaId,config=self.config)
|
|
136
136
|
res = sch.deleteSchema(schemaId=schemaId)
|
|
137
137
|
result['schema'] = res
|
|
138
|
-
if
|
|
138
|
+
if associatedArtifacts:
|
|
139
139
|
for fieldgroupId, fieldgroupName in schemaInfo.fieldGroups.items():
|
|
140
140
|
myFG = schemaInfo.getFieldGroupManager(fieldgroupName)
|
|
141
141
|
datatypes = myFG.dataTypes
|
|
@@ -146,12 +146,12 @@ class Deletion:
|
|
|
146
146
|
result['fieldGroupName'][fieldgroupId] = res_fg
|
|
147
147
|
return result
|
|
148
148
|
|
|
149
|
-
def deleteDataFlow(self,flowId: str,
|
|
149
|
+
def deleteDataFlow(self,flowId: str,associatedArtifacts:bool=False) -> dict:
|
|
150
150
|
"""
|
|
151
|
-
Delete a dataflow and possibly all associated
|
|
151
|
+
Delete a dataflow and possibly all associated artifacts.
|
|
152
152
|
Arguments:
|
|
153
153
|
flowId : REQUIRED : The identifier of the dataflow to delete.
|
|
154
|
-
|
|
154
|
+
associatedArtifacts : OPTIONAL : If set to True, all associated artifacts (source and target) will also be deleted (default False).
|
|
155
155
|
Note : The base connection will be identified and returned but not deleted. It can contains other dataflows still actives."""
|
|
156
156
|
result = {'flow': {}, 'targetConnection': {},'sourceConnection':{}, 'baseConnection': {} }
|
|
157
157
|
from aepp import flowservice
|
|
@@ -163,7 +163,7 @@ class Deletion:
|
|
|
163
163
|
result['baseConnection'] = baseConn
|
|
164
164
|
res = flow.deleteFlow(flowId=flowId)
|
|
165
165
|
result['response_flow'] = res
|
|
166
|
-
if
|
|
166
|
+
if associatedArtifacts:
|
|
167
167
|
for sourceConnectionId in sourceConnectionIds:
|
|
168
168
|
res_sc = flow.deleteSourceConnection(connectionId=sourceConnectionId)
|
|
169
169
|
result["response_sourceConn"] = res_sc
|
|
@@ -888,7 +888,7 @@ class Synchronizer:
|
|
|
888
888
|
|
|
889
889
|
def __syncDataset__(self,baseDataset:dict,verbose:bool=False)-> dict:
|
|
890
890
|
"""
|
|
891
|
-
Synchronize the dataset to the target sandboxes. Mostly creating a new dataset and associated
|
|
891
|
+
Synchronize the dataset to the target sandboxes. Mostly creating a new dataset and associated artifacts when not already created.
|
|
892
892
|
Arguments:
|
|
893
893
|
baseDataset : REQUIRED : dictionary with the dataset definition
|
|
894
894
|
"""
|
|
@@ -942,7 +942,7 @@ class Synchronizer:
|
|
|
942
942
|
|
|
943
943
|
def __syncMergePolicy__(self,mergePolicy:dict,verbose:bool=False)->None:
|
|
944
944
|
"""
|
|
945
|
-
Synchronize the dataset to the target sandboxes. Mostly creating a new dataset and associated
|
|
945
|
+
Synchronize the dataset to the target sandboxes. Mostly creating a new dataset and associated artifacts when not already created.
|
|
946
946
|
Arguments:
|
|
947
947
|
mergePolicy : REQUIRED : The merge policy dictionary to sync
|
|
948
948
|
"""
|
aepp-0.5.1/aepp/__version__.py
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
__version__ = "0.5.1"
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|