aepp 0.5.0.post8__tar.gz → 0.5.1.post1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {aepp-0.5.0.post8/aepp.egg-info → aepp-0.5.1.post1}/PKG-INFO +1 -1
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/__init__.py +24 -24
- aepp-0.5.1.post1/aepp/__version__.py +1 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/cli/__main__.py +124 -103
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/deletion.py +22 -22
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/synchronizer.py +9 -9
- {aepp-0.5.0.post8 → aepp-0.5.1.post1/aepp.egg-info}/PKG-INFO +1 -1
- aepp-0.5.0.post8/aepp/__version__.py +0 -1
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/LICENSE +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/MANIFEST.in +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/README.md +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/accesscontrol.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/catalog.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/classmanager.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/cli/__init__.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/config.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/configs.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/connector.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/customerprofile.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/dataaccess.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/dataprep.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/datasets.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/datatypemanager.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/destination.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/destinationinstanceservice.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/edge.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/exportDatasetToDataLandingZone.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/fieldgroupmanager.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/flowservice.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/hygiene.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/identity.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/ingestion.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/observability.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/policy.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/privacyservice.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/queryservice.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/sandboxes.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/schema.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/schemamanager.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/segmentation.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/sensei.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/som.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/tags.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp/utils.py +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp.egg-info/SOURCES.txt +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp.egg-info/dependency_links.txt +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp.egg-info/entry_points.txt +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp.egg-info/requires.txt +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/aepp.egg-info/top_level.txt +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/pyproject.toml +0 -0
- {aepp-0.5.0.post8 → aepp-0.5.1.post1}/setup.cfg +0 -0
|
@@ -165,7 +165,7 @@ def __titleSafe__(text: str) -> str:
|
|
|
165
165
|
return text
|
|
166
166
|
|
|
167
167
|
|
|
168
|
-
def
|
|
168
|
+
def extractSandboxArtifacts(
|
|
169
169
|
sandbox: 'ConnectObject' = None,
|
|
170
170
|
localFolder: Union[str, Path] = None,
|
|
171
171
|
region: str = "nld2",
|
|
@@ -310,20 +310,20 @@ def extractSandboxArtefacts(
|
|
|
310
310
|
with open(f"{audiencePath / safe_name}.json",'w') as f:
|
|
311
311
|
json.dump(el,f,indent=2)
|
|
312
312
|
|
|
313
|
-
def
|
|
313
|
+
def extractSandboxArtifact(
|
|
314
314
|
sandbox: 'ConnectObject' = None,
|
|
315
315
|
localFolder: Union[str, Path] = None,
|
|
316
|
-
|
|
317
|
-
|
|
316
|
+
artifact: str = None,
|
|
317
|
+
artifactType: str = None,
|
|
318
318
|
region: str = "nld2",
|
|
319
319
|
):
|
|
320
320
|
"""
|
|
321
|
-
Export a single
|
|
321
|
+
Export a single artifact and its dependencies from the sandbox.
|
|
322
322
|
Arguments:
|
|
323
323
|
sandbox: REQUIRED: the instance of a ConnectObject that contains the sandbox information and connection.
|
|
324
324
|
localFolder: OPTIONAL: the local folder where to extract the sandbox. If not provided, it will use the current working directory and name the folder the name of the sandbox.
|
|
325
|
-
|
|
326
|
-
|
|
325
|
+
artifact: REQUIRED: the id or the name of the artifact to export.
|
|
326
|
+
artifactType: REQUIRED: the type of artifact to export. Possible values are: 'class','schema','fieldgroup','datatype','descriptor','dataset','identity','mergepolicy',audience'
|
|
327
327
|
region: OPTIONAL: the region of the sandbox (default: nld2). This is used to fetch the correct API endpoints for the identities.
|
|
328
328
|
Possible values: "va7","aus5", "can2", "ind2"
|
|
329
329
|
"""
|
|
@@ -348,24 +348,24 @@ def extractSandboxArtefact(
|
|
|
348
348
|
from aepp import schema, catalog, identity
|
|
349
349
|
sch = schema.Schema(config=sandbox)
|
|
350
350
|
cat = catalog.Catalog(config=sandbox)
|
|
351
|
-
if
|
|
352
|
-
__extractClass__(
|
|
353
|
-
elif
|
|
354
|
-
__extractSchema__(
|
|
355
|
-
elif
|
|
356
|
-
__extractFieldGroup__(
|
|
357
|
-
elif
|
|
358
|
-
__extractDataType__(
|
|
359
|
-
elif
|
|
360
|
-
__extractDataset__(
|
|
361
|
-
elif
|
|
362
|
-
__extractIdentity__(
|
|
363
|
-
elif
|
|
364
|
-
__extractMergePolicy__(
|
|
365
|
-
elif
|
|
366
|
-
__extractAudience__(
|
|
351
|
+
if artifactType == 'class':
|
|
352
|
+
__extractClass__(artifact,completePath,sandbox)
|
|
353
|
+
elif artifactType == 'schema':
|
|
354
|
+
__extractSchema__(artifact,completePath,sandbox,region)
|
|
355
|
+
elif artifactType == 'fieldgroup':
|
|
356
|
+
__extractFieldGroup__(artifact,completePath,sandbox)
|
|
357
|
+
elif artifactType == 'datatype':
|
|
358
|
+
__extractDataType__(artifact,completePath,sandbox)
|
|
359
|
+
elif artifactType == 'dataset':
|
|
360
|
+
__extractDataset__(artifact,completePath,sandbox,region)
|
|
361
|
+
elif artifactType == 'identity':
|
|
362
|
+
__extractIdentity__(artifact,region,completePath,sandbox)
|
|
363
|
+
elif artifactType == 'mergepolicy':
|
|
364
|
+
__extractMergePolicy__(artifact,completePath,sandbox)
|
|
365
|
+
elif artifactType == 'audience':
|
|
366
|
+
__extractAudience__(artifact,completePath,sandbox)
|
|
367
367
|
else:
|
|
368
|
-
raise ValueError("
|
|
368
|
+
raise ValueError("artifactType not recognized")
|
|
369
369
|
|
|
370
370
|
def __extractClass__(classEl: str,folder: Union[str, Path] = None,sandbox: 'ConnectObject' = None):
|
|
371
371
|
classPath = Path(folder) / 'class'
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "0.5.1-1"
|
|
@@ -13,12 +13,16 @@ from io import FileIO
|
|
|
13
13
|
import pandas as pd
|
|
14
14
|
from datetime import datetime
|
|
15
15
|
import urllib.parse
|
|
16
|
+
from typing import Any, Concatenate, ParamSpec, ParamSpecKwargs
|
|
17
|
+
from collections.abc import Callable
|
|
18
|
+
|
|
19
|
+
P = ParamSpec("P")
|
|
16
20
|
|
|
17
21
|
# --- 1. The Decorator (The Gatekeeper) ---
|
|
18
|
-
def login_required(f):
|
|
22
|
+
def login_required(f:Callable[Concatenate["ServiceShell", P], None]) -> Callable[Concatenate["ServiceShell", P], None]:
|
|
19
23
|
"""Decorator to block commands if not logged in."""
|
|
20
24
|
@wraps(f)
|
|
21
|
-
def wrapper(self, *args, **kwargs):
|
|
25
|
+
def wrapper(self:"ServiceShell", *args:P.args, **kwargs:P.kwargs) -> None:
|
|
22
26
|
if not hasattr(self, 'config') or self.config is None:
|
|
23
27
|
print("(!) Access Denied: You must setup config first.")
|
|
24
28
|
return
|
|
@@ -29,26 +33,30 @@ console = Console()
|
|
|
29
33
|
|
|
30
34
|
# --- 2. The Interactive Shell ---
|
|
31
35
|
class ServiceShell(cmd.Cmd):
|
|
32
|
-
def __init__(self, **kwargs):
|
|
36
|
+
def __init__(self, **kwargs:ParamSpecKwargs) -> None:
|
|
33
37
|
super().__init__()
|
|
34
38
|
self.config = None
|
|
35
39
|
self.connectInstance = True
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
+
if kwargs.get("config_file") is not None:
|
|
41
|
+
config_path = Path(kwargs.get("config_file"))
|
|
42
|
+
if not config_path.is_absolute():
|
|
43
|
+
config_path = Path.cwd() / config_path
|
|
44
|
+
if kwargs.get("config_file") is not None:
|
|
40
45
|
dict_config = json.load(FileIO(config_path))
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
self.
|
|
46
|
+
if kwargs.get("sandbox") is None:
|
|
47
|
+
self.sandbox = str(dict_config.get("sandbox-name","prod"))
|
|
48
|
+
else:
|
|
49
|
+
self.sandbox = str(kwargs.get("sandbox","prod"))
|
|
50
|
+
self.secret = str(dict_config.get("secret",kwargs.get("secret")))
|
|
51
|
+
self.org_id = str(dict_config.get("org_id",kwargs.get("org_id")))
|
|
52
|
+
self.client_id = str(dict_config.get("client_id",kwargs.get("client_id")))
|
|
53
|
+
self.scopes = str(dict_config.get("scopes",kwargs.get("scopes")))
|
|
46
54
|
else:
|
|
47
|
-
self.sandbox = kwargs.get("sandbox","prod")
|
|
48
|
-
self.secret = kwargs.get("secret")
|
|
49
|
-
self.org_id = kwargs.get("org_id")
|
|
50
|
-
self.client_id = kwargs.get("client_id")
|
|
51
|
-
self.scopes = kwargs.get("scopes")
|
|
55
|
+
self.sandbox = str(kwargs.get("sandbox","prod"))
|
|
56
|
+
self.secret = str(kwargs.get("secret"))
|
|
57
|
+
self.org_id = str(kwargs.get("org_id"))
|
|
58
|
+
self.client_id = str(kwargs.get("client_id"))
|
|
59
|
+
self.scopes = str(kwargs.get("scopes"))
|
|
52
60
|
self.connectInstance = True
|
|
53
61
|
if self.sandbox is not None and self.secret is not None and self.org_id is not None and self.client_id is not None and self.scopes is not None:
|
|
54
62
|
print("Configuring connection...")
|
|
@@ -63,8 +71,20 @@ class ServiceShell(cmd.Cmd):
|
|
|
63
71
|
self.prompt = f"{self.config.sandbox}> "
|
|
64
72
|
console.print(Panel(f"Connected to [bold green]{self.sandbox}[/bold green]", style="blue"))
|
|
65
73
|
|
|
74
|
+
def do_createConfigFile(self, arg:Any) -> None:
|
|
75
|
+
"""Create a configuration file for future use"""
|
|
76
|
+
parser = argparse.ArgumentParser(prog='createConfigFile', add_help=True)
|
|
77
|
+
parser.add_argument("-f", "--file_name", help="file name for your config file", default="aepp_config.json")
|
|
78
|
+
args = parser.parse_args(shlex.split(arg))
|
|
79
|
+
filename = args.file_name
|
|
80
|
+
aepp.createConfigFile(destination=filename)
|
|
81
|
+
filename_json = filename + ".json" if not filename.endswith(".json") else filename
|
|
82
|
+
console.print(f"Configuration file created at {Path.cwd() / Path(filename_json)}", style="green")
|
|
83
|
+
return
|
|
84
|
+
|
|
85
|
+
|
|
66
86
|
# # --- Commands ---
|
|
67
|
-
def do_config(self, arg):
|
|
87
|
+
def do_config(self, arg:Any) -> None:
|
|
68
88
|
"""connect to an AEP instance"""
|
|
69
89
|
parser = argparse.ArgumentParser(prog='config', add_help=True)
|
|
70
90
|
parser.add_argument("-sx", "--sandbox", help="Auto-login sandbox")
|
|
@@ -77,18 +97,18 @@ class ServiceShell(cmd.Cmd):
|
|
|
77
97
|
if args.config_file:
|
|
78
98
|
mypath = Path.cwd()
|
|
79
99
|
dict_config = json.load(FileIO(mypath / Path(args.config_file)))
|
|
80
|
-
self.sandbox = args.sandbox if args.sandbox else dict_config.get("sandbox-name",args.sandbox)
|
|
81
|
-
self.secret = dict_config.get("secret",args.secret)
|
|
82
|
-
self.org_id = dict_config.get("org_id",args.org_id)
|
|
83
|
-
self.client_id = dict_config.get("client_id",args.client_id)
|
|
84
|
-
self.scopes = dict_config.get("scopes",args.scopes)
|
|
100
|
+
self.sandbox = str(args.sandbox) if args.sandbox else str(dict_config.get("sandbox-name",args.sandbox))
|
|
101
|
+
self.secret = str(dict_config.get("secret",args.secret))
|
|
102
|
+
self.org_id = str(dict_config.get("org_id",args.org_id))
|
|
103
|
+
self.client_id = str(dict_config.get("client_id",args.client_id))
|
|
104
|
+
self.scopes = str(dict_config.get("scopes",args.scopes))
|
|
85
105
|
self.connectInstance = True
|
|
86
106
|
else:
|
|
87
|
-
if args.sandbox: self.sandbox = args.sandbox
|
|
88
|
-
if args.secret: self.secret = args.secret
|
|
89
|
-
if args.org_id: self.org_id = args.org_id
|
|
90
|
-
if args.scopes: self.scopes = args.scopes
|
|
91
|
-
if args.client_id: self.client_id = args.client_id
|
|
107
|
+
if args.sandbox: self.sandbox = str(args.sandbox)
|
|
108
|
+
if args.secret: self.secret = str(args.secret)
|
|
109
|
+
if args.org_id: self.org_id = str(args.org_id)
|
|
110
|
+
if args.scopes: self.scopes = str(args.scopes)
|
|
111
|
+
if args.client_id: self.client_id = str(args.client_id)
|
|
92
112
|
console.print("Configuring connection...", style="blue")
|
|
93
113
|
self.config = aepp.configure(
|
|
94
114
|
connectInstance=self.connectInstance,
|
|
@@ -102,15 +122,15 @@ class ServiceShell(cmd.Cmd):
|
|
|
102
122
|
self.prompt = f"{self.config.sandbox}> "
|
|
103
123
|
return
|
|
104
124
|
|
|
105
|
-
def do_change_sandbox(self, args):
|
|
125
|
+
def do_change_sandbox(self, args:Any) -> None:
|
|
106
126
|
"""Change the current sandbox after configuration"""
|
|
107
127
|
parser = argparse.ArgumentParser(prog='change sandbox', add_help=True)
|
|
108
128
|
parser.add_argument("sandbox", help="sandbox name to switch to")
|
|
109
129
|
args = parser.parse_args(shlex.split(args))
|
|
110
|
-
self.sandbox = args.sandbox if args.sandbox else console.print(Panel("(!) Please provide a sandbox name using -sx or --sandbox", style="red"))
|
|
130
|
+
self.sandbox = str(args.sandbox) if args.sandbox else console.print(Panel("(!) Please provide a sandbox name using -sx or --sandbox", style="red"))
|
|
111
131
|
if self.config is not None:
|
|
112
132
|
if args.sandbox:
|
|
113
|
-
self.config.setSandbox(args.sandbox)
|
|
133
|
+
self.config.setSandbox(str(args.sandbox))
|
|
114
134
|
self.prompt = f"{self.config.sandbox}> "
|
|
115
135
|
console.print(Panel(f"Sandbox changed to: [bold green]{self.config.sandbox}[/bold green]", style="blue"))
|
|
116
136
|
else:
|
|
@@ -118,7 +138,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
118
138
|
|
|
119
139
|
|
|
120
140
|
@login_required
|
|
121
|
-
def do_get_schemas(self, args):
|
|
141
|
+
def do_get_schemas(self, args:Any) -> None:
|
|
122
142
|
"""List all schemas in the current sandbox"""
|
|
123
143
|
parser = argparse.ArgumentParser(prog='get_schemas', add_help=True)
|
|
124
144
|
parser.add_argument("-sv", "--save",help="Save schemas to CSV file")
|
|
@@ -150,7 +170,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
150
170
|
return
|
|
151
171
|
|
|
152
172
|
@login_required
|
|
153
|
-
def do_get_ups_schemas(self, args):
|
|
173
|
+
def do_get_ups_schemas(self, args) -> None:
|
|
154
174
|
"""List all schemas enabled for Profile in the current sandbox"""
|
|
155
175
|
parser = argparse.ArgumentParser(prog='get_schemas_enabled', add_help=True)
|
|
156
176
|
parser.add_argument("-sv", "--save",help="Save enabled schemas to CSV file")
|
|
@@ -188,8 +208,9 @@ class ServiceShell(cmd.Cmd):
|
|
|
188
208
|
console.print(f"(!) Error: {str(e)}", style="red")
|
|
189
209
|
except SystemExit:
|
|
190
210
|
return
|
|
211
|
+
|
|
191
212
|
@login_required
|
|
192
|
-
def do_get_ups_fieldgroups(self, args):
|
|
213
|
+
def do_get_ups_fieldgroups(self, args:Any) -> None:
|
|
193
214
|
"""List all field groups enabled for Profile in the current sandbox"""
|
|
194
215
|
parser = argparse.ArgumentParser(prog='get_fieldgroups_enabled', add_help=True)
|
|
195
216
|
parser.add_argument("-sv", "--save",help="Save enabled field groups to CSV file")
|
|
@@ -229,7 +250,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
229
250
|
return
|
|
230
251
|
|
|
231
252
|
@login_required
|
|
232
|
-
def do_get_profile_schemas(self,args):
|
|
253
|
+
def do_get_profile_schemas(self,args:Any) -> None:
|
|
233
254
|
"""Get the current profile schema"""
|
|
234
255
|
parser = argparse.ArgumentParser(prog='get_schemas_enabled', add_help=True)
|
|
235
256
|
try:
|
|
@@ -256,7 +277,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
256
277
|
return
|
|
257
278
|
|
|
258
279
|
@login_required
|
|
259
|
-
def do_get_union_profile_json(self,args):
|
|
280
|
+
def do_get_union_profile_json(self,args:Any) -> None:
|
|
260
281
|
"""Get the current Profile union schema"""
|
|
261
282
|
parser = argparse.ArgumentParser(prog='get_union_profile', add_help=True)
|
|
262
283
|
try:
|
|
@@ -272,7 +293,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
272
293
|
return
|
|
273
294
|
|
|
274
295
|
@login_required
|
|
275
|
-
def do_get_union_profile_csv(self,args):
|
|
296
|
+
def do_get_union_profile_csv(self,args:Any) -> None:
|
|
276
297
|
"""Get the current Profile union schema"""
|
|
277
298
|
parser = argparse.ArgumentParser(prog='get_union_profile', add_help=True)
|
|
278
299
|
parser.add_argument("-f","--full",default=False,help="Get full schema information with all details",type=bool)
|
|
@@ -288,7 +309,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
288
309
|
return
|
|
289
310
|
|
|
290
311
|
@login_required
|
|
291
|
-
def do_get_union_event_json(self,args):
|
|
312
|
+
def do_get_union_event_json(self,args:Any) -> None:
|
|
292
313
|
"""Get the current Experience Event union schema"""
|
|
293
314
|
parser = argparse.ArgumentParser(prog='get_union_event', add_help=True)
|
|
294
315
|
try:
|
|
@@ -304,7 +325,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
304
325
|
return
|
|
305
326
|
|
|
306
327
|
@login_required
|
|
307
|
-
def do_get_union_event_csv(self,args):
|
|
328
|
+
def do_get_union_event_csv(self,args:Any) -> None:
|
|
308
329
|
"""Get the current Experience Event union schema"""
|
|
309
330
|
parser = argparse.ArgumentParser(prog='get_union_event', add_help=True)
|
|
310
331
|
parser.add_argument("-f","--full",default=False,help="Get full schema information with all details",type=bool)
|
|
@@ -320,7 +341,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
320
341
|
return
|
|
321
342
|
|
|
322
343
|
@login_required
|
|
323
|
-
def do_get_event_schemas(self,args):
|
|
344
|
+
def do_get_event_schemas(self,args:Any) -> None:
|
|
324
345
|
"""Get the current Experience Event schemas"""
|
|
325
346
|
parser = argparse.ArgumentParser(prog='get_event_schemas', add_help=True)
|
|
326
347
|
parser.add_argument("-sv", "--save",help="Save event schemas to CSV file")
|
|
@@ -352,7 +373,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
352
373
|
return
|
|
353
374
|
|
|
354
375
|
@login_required
|
|
355
|
-
def do_get_union_event_json(self,args):
|
|
376
|
+
def do_get_union_event_json(self,args:Any) -> None:
|
|
356
377
|
"""Get the current Experience Event union schema"""
|
|
357
378
|
parser = argparse.ArgumentParser(prog='get_union_event', add_help=True)
|
|
358
379
|
try:
|
|
@@ -369,7 +390,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
369
390
|
|
|
370
391
|
|
|
371
392
|
@login_required
|
|
372
|
-
def do_get_schema_xdm(self, arg):
|
|
393
|
+
def do_get_schema_xdm(self, arg:Any) -> None:
|
|
373
394
|
"""Get schema JSON by name or ID"""
|
|
374
395
|
parser = argparse.ArgumentParser(prog='get_schema_xdm', add_help=True)
|
|
375
396
|
parser.add_argument("schema", help="Schema title, $id or alt:Id to retrieve")
|
|
@@ -402,7 +423,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
402
423
|
return
|
|
403
424
|
|
|
404
425
|
@login_required
|
|
405
|
-
def do_get_schema_csv(self, arg):
|
|
426
|
+
def do_get_schema_csv(self, arg:Any) -> None:
|
|
406
427
|
"""Get schema CSV by name or ID"""
|
|
407
428
|
parser = argparse.ArgumentParser(prog='get_schema_csv', add_help=True)
|
|
408
429
|
parser.add_argument("schema", help="Schema $id or alt:Id to retrieve")
|
|
@@ -432,7 +453,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
432
453
|
return
|
|
433
454
|
|
|
434
455
|
@login_required
|
|
435
|
-
def do_get_schema_json(self, args):
|
|
456
|
+
def do_get_schema_json(self, args:Any) -> None:
|
|
436
457
|
"""Get schema JSON by name or ID"""
|
|
437
458
|
parser = argparse.ArgumentParser(prog='get_schema_json', add_help=True)
|
|
438
459
|
parser.add_argument("schema", help="Schema $id or alt:Id to retrieve")
|
|
@@ -461,7 +482,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
461
482
|
return
|
|
462
483
|
|
|
463
484
|
@login_required
|
|
464
|
-
def do_get_fieldgroups(self, args):
|
|
485
|
+
def do_get_fieldgroups(self, args:Any) -> None:
|
|
465
486
|
"""List all field groups in the current sandbox"""
|
|
466
487
|
parser = argparse.ArgumentParser(prog='get_fieldgroups', add_help=True)
|
|
467
488
|
parser.add_argument("-sv", "--save",help="Save field groups to CSV file")
|
|
@@ -491,7 +512,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
491
512
|
return
|
|
492
513
|
|
|
493
514
|
@login_required
|
|
494
|
-
def do_get_fieldgroup_json(self, args):
|
|
515
|
+
def do_get_fieldgroup_json(self, args:Any) -> None:
|
|
495
516
|
"""Get field group JSON by name or ID"""
|
|
496
517
|
parser = argparse.ArgumentParser(prog='get_fieldgroup_json', add_help=True)
|
|
497
518
|
parser.add_argument("fieldgroup", help="Field Group name, $id or alt:Id to retrieve")
|
|
@@ -520,7 +541,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
520
541
|
return
|
|
521
542
|
|
|
522
543
|
@login_required
|
|
523
|
-
def do_get_fieldgroup_csv(self, args):
|
|
544
|
+
def do_get_fieldgroup_csv(self, args:Any) -> None:
|
|
524
545
|
"""Get field group CSV by name or ID"""
|
|
525
546
|
parser = argparse.ArgumentParser(prog='get_fieldgroup_csv', add_help=True)
|
|
526
547
|
parser.add_argument("fieldgroup", help="Field Group name, $id or alt:Id to retrieve")
|
|
@@ -548,7 +569,8 @@ class ServiceShell(cmd.Cmd):
|
|
|
548
569
|
except SystemExit:
|
|
549
570
|
return
|
|
550
571
|
|
|
551
|
-
|
|
572
|
+
@login_required
|
|
573
|
+
def do_get_datatypes(self, args:Any) -> None:
|
|
552
574
|
"""List all data types in the current sandbox"""
|
|
553
575
|
parser = argparse.ArgumentParser(prog='get_datatypes', add_help=True)
|
|
554
576
|
try:
|
|
@@ -573,7 +595,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
573
595
|
return
|
|
574
596
|
|
|
575
597
|
@login_required
|
|
576
|
-
def do_get_datatype_csv(self, args):
|
|
598
|
+
def do_get_datatype_csv(self, args:Any) -> None:
|
|
577
599
|
"""Get data type CSV by name or ID"""
|
|
578
600
|
parser = argparse.ArgumentParser(prog='get_datatype_csv', add_help=True)
|
|
579
601
|
parser.add_argument("datatype", help="Data Type name, $id or alt:Id to retrieve")
|
|
@@ -602,7 +624,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
602
624
|
return
|
|
603
625
|
|
|
604
626
|
@login_required
|
|
605
|
-
def do_get_datatype_json(self, args):
|
|
627
|
+
def do_get_datatype_json(self, args:Any) -> None:
|
|
606
628
|
"""Get data type JSON by name or ID"""
|
|
607
629
|
parser = argparse.ArgumentParser(prog='get_datatype_json', add_help=True)
|
|
608
630
|
parser.add_argument("datatype", help="Data Type name, $id or alt:Id to retrieve")
|
|
@@ -632,7 +654,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
632
654
|
return
|
|
633
655
|
|
|
634
656
|
@login_required
|
|
635
|
-
def do_enable_schema_for_ups(self, args):
|
|
657
|
+
def do_enable_schema_for_ups(self, args:Any) -> None:
|
|
636
658
|
"""Enable a schema for Profile"""
|
|
637
659
|
parser = argparse.ArgumentParser(prog='enable_schema_for_ups', add_help=True)
|
|
638
660
|
parser.add_argument("schema_id", help="Schema ID to enable for Profile")
|
|
@@ -647,7 +669,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
647
669
|
return
|
|
648
670
|
|
|
649
671
|
@login_required
|
|
650
|
-
def do_upload_fieldgroup_definition_csv(self,args):
|
|
672
|
+
def do_upload_fieldgroup_definition_csv(self,args:Any) -> None:
|
|
651
673
|
"""Upload a field group definition from a CSV file"""
|
|
652
674
|
parser = argparse.ArgumentParser(prog='upload_fieldgroup_definition_csv', add_help=True)
|
|
653
675
|
parser.add_argument("csv_path", help="Path to the field group CSV file")
|
|
@@ -671,7 +693,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
671
693
|
return
|
|
672
694
|
|
|
673
695
|
@login_required
|
|
674
|
-
def do_upload_fieldgroup_definition_xdm(self,args):
|
|
696
|
+
def do_upload_fieldgroup_definition_xdm(self,args:Any) -> None:
|
|
675
697
|
"""Upload a field group definition from a JSON XDM file"""
|
|
676
698
|
parser = argparse.ArgumentParser(prog='upload_fieldgroup_definition_xdm', add_help=True)
|
|
677
699
|
parser.add_argument("xdm_path", help="Path to the field group JSON XDM file")
|
|
@@ -696,7 +718,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
696
718
|
return
|
|
697
719
|
|
|
698
720
|
@login_required
|
|
699
|
-
def do_get_datasets(self, args):
|
|
721
|
+
def do_get_datasets(self, args:Any) -> None:
|
|
700
722
|
"""List all datasets in the current sandbox"""
|
|
701
723
|
parser = argparse.ArgumentParser(prog='get_datasets', add_help=True)
|
|
702
724
|
try:
|
|
@@ -727,7 +749,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
727
749
|
return
|
|
728
750
|
|
|
729
751
|
@login_required
|
|
730
|
-
def do_get_datasets_infos(self, args):
|
|
752
|
+
def do_get_datasets_infos(self, args:Any) -> None:
|
|
731
753
|
"""List all datasets in the current sandbox"""
|
|
732
754
|
parser = argparse.ArgumentParser(prog='get_datasets_infos', add_help=True)
|
|
733
755
|
try:
|
|
@@ -759,7 +781,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
759
781
|
return
|
|
760
782
|
|
|
761
783
|
@login_required
|
|
762
|
-
def do_createDataset(self, args):
|
|
784
|
+
def do_createDataset(self, args:Any) -> None:
|
|
763
785
|
"""Create a new dataset in the current sandbox"""
|
|
764
786
|
parser = argparse.ArgumentParser(prog='createDataset', add_help=True)
|
|
765
787
|
parser.add_argument("dataset_name", help="Name of the dataset to create")
|
|
@@ -775,7 +797,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
775
797
|
return
|
|
776
798
|
|
|
777
799
|
@login_required
|
|
778
|
-
def do_enable_dataset_for_ups(self, args):
|
|
800
|
+
def do_enable_dataset_for_ups(self, args:Any) -> None:
|
|
779
801
|
"""Enable a dataset for Profile"""
|
|
780
802
|
parser = argparse.ArgumentParser(prog='enable_dataset_for_ups', add_help=True)
|
|
781
803
|
parser.add_argument("dataset", help="Dataset ID or Dataset Name to enable for Profile")
|
|
@@ -793,8 +815,8 @@ class ServiceShell(cmd.Cmd):
|
|
|
793
815
|
except SystemExit:
|
|
794
816
|
return
|
|
795
817
|
|
|
796
|
-
@login_required
|
|
797
|
-
def do_get_identities(self, args):
|
|
818
|
+
@login_required
|
|
819
|
+
def do_get_identities(self, args:Any) -> None:
|
|
798
820
|
"""List all identities in the current sandbox"""
|
|
799
821
|
parser = argparse.ArgumentParser(prog='get_identities', add_help=True)
|
|
800
822
|
parser.add_argument("-r","--region", help="Region to get identities from: 'ndl2' (default), 'va7', 'aus5', 'can2', 'ind2'", default='ndl2')
|
|
@@ -825,7 +847,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
825
847
|
return
|
|
826
848
|
|
|
827
849
|
@login_required
|
|
828
|
-
def do_get_flows(self, args):
|
|
850
|
+
def do_get_flows(self, args:Any) -> None:
|
|
829
851
|
"""List flows in the current sandbox based on parameters provided. By default, list all sources and destinations."""
|
|
830
852
|
parser = argparse.ArgumentParser(prog='get_flows', add_help=True)
|
|
831
853
|
parser.add_argument("-i","--internal_flows",help="Get internal flows", default=False,type=bool)
|
|
@@ -964,19 +986,19 @@ class ServiceShell(cmd.Cmd):
|
|
|
964
986
|
successful_runs = fl.get("Successful Runs", 0)
|
|
965
987
|
failed_runs = fl.get("Failed Runs", 0)
|
|
966
988
|
partial_success = fl.get('Partial Success Runs',0)
|
|
967
|
-
if partial_success>0:
|
|
968
|
-
|
|
969
|
-
|
|
970
|
-
|
|
971
|
-
|
|
972
|
-
|
|
989
|
+
if partial_success>0 and failed_runs==0:
|
|
990
|
+
colorStart = "[orange1]"
|
|
991
|
+
colorEnd = "[/orange1]"
|
|
992
|
+
row_data[0] = f"{colorStart}{fl.get('id','N/A')}{colorEnd}"
|
|
993
|
+
row_data[1] = f"{colorStart}{fl.get('name','N/A')}{colorEnd}"
|
|
994
|
+
row_data[2] = f"{colorStart}{fl.get('type','N/A')}{colorEnd}"
|
|
973
995
|
success_rate = (successful_runs / total_runs * 100) if total_runs > 0 else 0
|
|
974
996
|
failure_rate = (failed_runs / total_runs * 100) if total_runs > 0 else 0
|
|
975
997
|
row_data.extend([
|
|
976
998
|
f"{colorStart}{str(total_runs)}{colorEnd}",
|
|
977
999
|
f"{colorStart}{str(successful_runs)}{colorEnd}",
|
|
978
1000
|
f"{colorStart}{str(failed_runs)}{colorEnd}",
|
|
979
|
-
f"{
|
|
1001
|
+
f"{colorStart}{str(partial_success)}{colorEnd}",
|
|
980
1002
|
f"{colorStart}{success_rate:.0f}%{colorEnd}",
|
|
981
1003
|
f"{colorStart}{failure_rate:.0f}%{colorEnd}"
|
|
982
1004
|
])
|
|
@@ -988,7 +1010,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
988
1010
|
return
|
|
989
1011
|
|
|
990
1012
|
@login_required
|
|
991
|
-
def do_get_flow_errors(self,args):
|
|
1013
|
+
def do_get_flow_errors(self,args:Any) -> None:
|
|
992
1014
|
"""Get errors for a specific flow, saving it in a JSON file for specific timeframe, default last 24 hours."""
|
|
993
1015
|
parser = argparse.ArgumentParser(prog='get_flow_errors', add_help=True)
|
|
994
1016
|
parser.add_argument("flow_id", help="Flow ID to get errors for")
|
|
@@ -1012,7 +1034,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
1012
1034
|
return
|
|
1013
1035
|
|
|
1014
1036
|
@login_required
|
|
1015
|
-
def do_create_dataset_http_source(self,args):
|
|
1037
|
+
def do_create_dataset_http_source(self,args:Any) -> None:
|
|
1016
1038
|
"""Create an HTTP Source connection for a specific dataset, XDM compatible data only."""
|
|
1017
1039
|
parser = argparse.ArgumentParser(prog='do_create_dataset_http_source', add_help=True)
|
|
1018
1040
|
parser.add_argument("dataset", help="Name or ID of the Dataset Source connection to create")
|
|
@@ -1038,7 +1060,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
1038
1060
|
return
|
|
1039
1061
|
|
|
1040
1062
|
@login_required
|
|
1041
|
-
def do_get_DLZ_credential(self,args):
|
|
1063
|
+
def do_get_DLZ_credential(self,args:Any) -> None:
|
|
1042
1064
|
"""Get Data Lake Zone credential for the current sandbox"""
|
|
1043
1065
|
parser = argparse.ArgumentParser(prog='get_DLZ_credential', add_help=True)
|
|
1044
1066
|
parser.add_argument("type",nargs='?',help="Type of credential to retrieve: 'user_drop_zone' or 'dlz_destination'",default="user_drop_zone")
|
|
@@ -1054,7 +1076,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
1054
1076
|
return
|
|
1055
1077
|
|
|
1056
1078
|
@login_required
|
|
1057
|
-
def do_get_queries(self, args):
|
|
1079
|
+
def do_get_queries(self, args:Any)-> None:
|
|
1058
1080
|
"""List top 1000 queries in the current sandbox for the last 24 hours by default, optionally filtered by dataset ID"""
|
|
1059
1081
|
parser = argparse.ArgumentParser(prog='get_queries', add_help=True)
|
|
1060
1082
|
parser.add_argument("-ds","--dataset", help="Dataset ID to filter queries", default=None)
|
|
@@ -1116,7 +1138,7 @@ class ServiceShell(cmd.Cmd):
|
|
|
1116
1138
|
return
|
|
1117
1139
|
|
|
1118
1140
|
@login_required
|
|
1119
|
-
def do_query(self,args):
|
|
1141
|
+
def do_query(self,args:Any) -> None:
|
|
1120
1142
|
"""Execute a SQL query against the current sandbox"""
|
|
1121
1143
|
parser = argparse.ArgumentParser(prog='query', add_help=True)
|
|
1122
1144
|
parser.add_argument("sql_query", help="SQL query to execute",type=str)
|
|
@@ -1136,15 +1158,15 @@ class ServiceShell(cmd.Cmd):
|
|
|
1136
1158
|
|
|
1137
1159
|
|
|
1138
1160
|
@login_required
|
|
1139
|
-
def
|
|
1140
|
-
"""
|
|
1141
|
-
console.print("Extracting
|
|
1142
|
-
parser = argparse.ArgumentParser(prog='
|
|
1143
|
-
parser.add_argument('-lf','--localfolder', help='Local folder to extract
|
|
1144
|
-
parser.add_argument('-rg','--region', help='Region to extract
|
|
1161
|
+
def do_extractArtifacts(self,args:Any) -> None:
|
|
1162
|
+
"""extractArtifacts localfolder"""
|
|
1163
|
+
console.print("Extracting artifacts...", style="blue")
|
|
1164
|
+
parser = argparse.ArgumentParser(prog='extractArtifacts', description='Extract artifacts from AEP')
|
|
1165
|
+
parser.add_argument('-lf','--localfolder', help='Local folder to extract artifacts to', default='./extractions')
|
|
1166
|
+
parser.add_argument('-rg','--region', help='Region to extract artifacts from: "ndl2" (default), "va7", "aus5", "can2", "ind2"',default='ndl2')
|
|
1145
1167
|
try:
|
|
1146
1168
|
args = parser.parse_args(shlex.split(args))
|
|
1147
|
-
aepp.
|
|
1169
|
+
aepp.extractSandboxArtifacts(
|
|
1148
1170
|
sandbox=self.config,
|
|
1149
1171
|
localFolder=args.localfolder,
|
|
1150
1172
|
region=args.region
|
|
@@ -1154,20 +1176,20 @@ class ServiceShell(cmd.Cmd):
|
|
|
1154
1176
|
return
|
|
1155
1177
|
|
|
1156
1178
|
@login_required
|
|
1157
|
-
def
|
|
1158
|
-
"""
|
|
1159
|
-
console.print("Extracting
|
|
1160
|
-
parser = argparse.ArgumentParser(prog='
|
|
1161
|
-
parser.add_argument('
|
|
1162
|
-
parser.add_argument('-at','--
|
|
1163
|
-
parser.add_argument('-lf','--localfolder', help='Local folder to extract
|
|
1164
|
-
parser.add_argument('-rg','--region', help='Region to extract
|
|
1179
|
+
def do_extractArtifact(self,args:Any) -> None:
|
|
1180
|
+
"""extractArtifacts localfolder"""
|
|
1181
|
+
console.print("Extracting artifact...", style="blue")
|
|
1182
|
+
parser = argparse.ArgumentParser(prog='extractArtifact', description='Extract artifacts from AEP')
|
|
1183
|
+
parser.add_argument('artifact', help='artifact to extract (name or id): "schema","fieldgroup","datatype","descriptor","dataset","identity","mergepolicy","audience"')
|
|
1184
|
+
parser.add_argument('-at','--artifactType', help='artifact type ')
|
|
1185
|
+
parser.add_argument('-lf','--localfolder', help='Local folder to extract artifacts to',default='extractions')
|
|
1186
|
+
parser.add_argument('-rg','--region', help='Region to extract artifacts from: "ndl2" (default), "va7", "aus5", "can2", "ind2"',default='ndl2')
|
|
1165
1187
|
|
|
1166
1188
|
try:
|
|
1167
1189
|
args = parser.parse_args(shlex.split(args))
|
|
1168
|
-
aepp.
|
|
1169
|
-
|
|
1170
|
-
|
|
1190
|
+
aepp.extractSandboxArtifact(
|
|
1191
|
+
artifact=args.artifact,
|
|
1192
|
+
artifactType=args.artifactType,
|
|
1171
1193
|
sandbox=self.config,
|
|
1172
1194
|
localFolder=args.localfolder
|
|
1173
1195
|
)
|
|
@@ -1176,16 +1198,16 @@ class ServiceShell(cmd.Cmd):
|
|
|
1176
1198
|
return
|
|
1177
1199
|
|
|
1178
1200
|
@login_required
|
|
1179
|
-
def do_sync(self,args):
|
|
1180
|
-
"""
|
|
1181
|
-
console.print("Syncing
|
|
1182
|
-
parser = argparse.ArgumentParser(prog='
|
|
1183
|
-
parser.add_argument('
|
|
1184
|
-
parser.add_argument('-at','--
|
|
1201
|
+
def do_sync(self,args:Any) -> None:
|
|
1202
|
+
"""extractArtifacts localfolder"""
|
|
1203
|
+
console.print("Syncing artifact...", style="blue")
|
|
1204
|
+
parser = argparse.ArgumentParser(prog='extractArtifact', description='Extract artifacts from AEP')
|
|
1205
|
+
parser.add_argument('artifact', help='artifact to extract (name or id): "schema","fieldgroup","datatype","descriptor","dataset","identity","mergepolicy","audience"')
|
|
1206
|
+
parser.add_argument('-at','--artifactType', help='artifact type ')
|
|
1185
1207
|
parser.add_argument('-t','--targets', help='target sandboxes')
|
|
1186
|
-
parser.add_argument('-lf','--localfolder', help='Local folder to extract
|
|
1208
|
+
parser.add_argument('-lf','--localfolder', help='Local folder to extract artifacts to',default='extractions')
|
|
1187
1209
|
parser.add_argument('-b','--baseSandbox', help='Base sandbox for synchronization')
|
|
1188
|
-
parser.add_argument('-rg','--region', help='Region to extract
|
|
1210
|
+
parser.add_argument('-rg','--region', help='Region to extract artifacts from: "ndl2" (default), "va7", "aus5", "can2", "ind2"',default='ndl2')
|
|
1189
1211
|
parser.add_argument('-v','--verbose', help='Enable verbose output',default=True)
|
|
1190
1212
|
try:
|
|
1191
1213
|
args = parser.parse_args(shlex.split(args))
|
|
@@ -1210,21 +1232,20 @@ class ServiceShell(cmd.Cmd):
|
|
|
1210
1232
|
)
|
|
1211
1233
|
console.print("Starting Sync...", style="blue")
|
|
1212
1234
|
synchronizor.syncComponent(
|
|
1213
|
-
component=args.
|
|
1214
|
-
componentType=args.
|
|
1235
|
+
component=args.artifact,
|
|
1236
|
+
componentType=args.artifactType,
|
|
1215
1237
|
verbose=args.verbose
|
|
1216
1238
|
)
|
|
1217
1239
|
console.print("Sync completed!", style="green")
|
|
1218
1240
|
except SystemExit:
|
|
1219
1241
|
return
|
|
1220
1242
|
|
|
1221
|
-
|
|
1222
|
-
def do_exit(self, args):
|
|
1243
|
+
def do_exit(self, args:Any) -> None:
|
|
1223
1244
|
"""Exit the application"""
|
|
1224
1245
|
console.print(Panel("Exiting...", style="blue"))
|
|
1225
1246
|
return True # Stops the loop
|
|
1226
1247
|
|
|
1227
|
-
def do_EOF(self, args):
|
|
1248
|
+
def do_EOF(self, args:Any) -> None:
|
|
1228
1249
|
"""Handle Ctrl+D"""
|
|
1229
1250
|
console.print(Panel("Exiting...", style="blue"))
|
|
1230
1251
|
return True
|
|
@@ -20,11 +20,11 @@ from aepp import connector
|
|
|
20
20
|
|
|
21
21
|
class Deletion:
|
|
22
22
|
"""
|
|
23
|
-
This class regroups differet methods and combine some to clean and delete
|
|
23
|
+
This class regroups differet methods and combine some to clean and delete artifact from Adobe Experience Platform.
|
|
24
24
|
Supported in this class:
|
|
25
|
-
- Deleting datasets (and associated
|
|
26
|
-
- Deleteting dataflows (and associated
|
|
27
|
-
- Deleting schemas (and associated
|
|
25
|
+
- Deleting datasets (and associated artifacts)
|
|
26
|
+
- Deleteting dataflows (and associated artifacts)
|
|
27
|
+
- Deleting schemas (and associated artifacts)
|
|
28
28
|
- Deleting audiences
|
|
29
29
|
"""
|
|
30
30
|
loggingEnabled = False
|
|
@@ -83,14 +83,14 @@ class Deletion:
|
|
|
83
83
|
def __repr__self(self):
|
|
84
84
|
return f"Deletion(config={self.config})"
|
|
85
85
|
|
|
86
|
-
def deleteDataset(self,datasetId: str,
|
|
86
|
+
def deleteDataset(self,datasetId: str,associatedArtifacts:bool=False) -> dict:
|
|
87
87
|
"""
|
|
88
|
-
Delete a dataset and all associated
|
|
88
|
+
Delete a dataset and all associated artifacts (dataflows, schemas, data connections).
|
|
89
89
|
Arguments:
|
|
90
90
|
datasetId : REQUIRED : The identifier of the dataset to delete.
|
|
91
|
-
|
|
92
|
-
Note : Deleting associated
|
|
93
|
-
In case, it is not possible to delete
|
|
91
|
+
associatedArtifacts : OPTIONAL : If set to True, all associated artifacts (dataflows, schemas) will also be deleted (default False).
|
|
92
|
+
Note : Deleting associated artifacts option will be pass down to other methods called within this method. So Field Groups, Data Type could be impacted.
|
|
93
|
+
In case, it is not possible to delete artifacts, it will be silently ignored and returns in the output dictionary.
|
|
94
94
|
"""
|
|
95
95
|
result = {}
|
|
96
96
|
from aepp import catalog
|
|
@@ -100,7 +100,7 @@ class Deletion:
|
|
|
100
100
|
schemaRef = datasetInfo.get('schemaRef',{}).get('id',None)
|
|
101
101
|
res = cat.deleteDataSet(datasetId=datasetId)
|
|
102
102
|
result['dataset'] = res
|
|
103
|
-
if
|
|
103
|
+
if associatedArtifacts:
|
|
104
104
|
# Deleting associated dataflows
|
|
105
105
|
result['flows'] = {'connections':{}, 'flows': {}}
|
|
106
106
|
from aepp import flowservice
|
|
@@ -113,21 +113,21 @@ class Deletion:
|
|
|
113
113
|
flows = flow.getFlows()
|
|
114
114
|
list_flowIds = [f['id'] for f in flows if f.get('sourceConnectionIds',[""])[0] in list_source_dataflowsIds or f.get('targetConnectionIds',[""])[0] in list_target_dataflowsIds]
|
|
115
115
|
for flowId in list_flowIds:
|
|
116
|
-
res_flow = self.deleteDataFlow(flowId=flowId,
|
|
116
|
+
res_flow = self.deleteDataFlow(flowId=flowId, associatedArtifacts=associatedArtifacts)
|
|
117
117
|
result['flows']['flows'][flowId] = res_flow
|
|
118
118
|
# Deleting associated schema
|
|
119
119
|
if schemaRef is not None:
|
|
120
|
-
result['schema'] = self.deleteSchema(schemaId=schemaRef,
|
|
120
|
+
result['schema'] = self.deleteSchema(schemaId=schemaRef, associatedArtifacts=associatedArtifacts)
|
|
121
121
|
return result
|
|
122
122
|
|
|
123
|
-
def deleteSchema(self,schemaId: str,
|
|
123
|
+
def deleteSchema(self,schemaId: str,associatedArtifacts:bool=False) -> dict:
|
|
124
124
|
"""
|
|
125
|
-
Delete a schema and possibly all associated
|
|
125
|
+
Delete a schema and possibly all associated artifacts.
|
|
126
126
|
Arguments:
|
|
127
127
|
schemaId : REQUIRED : The identifier of the schema to delete.
|
|
128
|
-
|
|
129
|
-
Note : Deleting associated
|
|
130
|
-
In case, it is not possible to delete
|
|
128
|
+
associatedArtifacts : OPTIONAL : If set to True, all associated artifacts (fieldGroup, datatype) will also be deleted (default False).
|
|
129
|
+
Note : Deleting associated artifacts option will be pass down to other methods called within this method. So Field Groups, Data Type could be impacted.
|
|
130
|
+
In case, it is not possible to delete artifacts, it will be silently ignored and returns in the output dictionary.
|
|
131
131
|
"""
|
|
132
132
|
result = {'fieldGroup': {}, 'schema': {} , 'datatypes':{} }
|
|
133
133
|
from aepp import schema, schemamanager
|
|
@@ -135,7 +135,7 @@ class Deletion:
|
|
|
135
135
|
schemaInfo = schemamanager.SchemaManager(schemaId,config=self.config)
|
|
136
136
|
res = sch.deleteSchema(schemaId=schemaId)
|
|
137
137
|
result['schema'] = res
|
|
138
|
-
if
|
|
138
|
+
if associatedArtifacts:
|
|
139
139
|
for fieldgroupId, fieldgroupName in schemaInfo.fieldGroups.items():
|
|
140
140
|
myFG = schemaInfo.getFieldGroupManager(fieldgroupName)
|
|
141
141
|
datatypes = myFG.dataTypes
|
|
@@ -146,12 +146,12 @@ class Deletion:
|
|
|
146
146
|
result['fieldGroupName'][fieldgroupId] = res_fg
|
|
147
147
|
return result
|
|
148
148
|
|
|
149
|
-
def deleteDataFlow(self,flowId: str,
|
|
149
|
+
def deleteDataFlow(self,flowId: str,associatedArtifacts:bool=False) -> dict:
|
|
150
150
|
"""
|
|
151
|
-
Delete a dataflow and possibly all associated
|
|
151
|
+
Delete a dataflow and possibly all associated artifacts.
|
|
152
152
|
Arguments:
|
|
153
153
|
flowId : REQUIRED : The identifier of the dataflow to delete.
|
|
154
|
-
|
|
154
|
+
associatedArtifacts : OPTIONAL : If set to True, all associated artifacts (source and target) will also be deleted (default False).
|
|
155
155
|
Note : The base connection will be identified and returned but not deleted. It can contains other dataflows still actives."""
|
|
156
156
|
result = {'flow': {}, 'targetConnection': {},'sourceConnection':{}, 'baseConnection': {} }
|
|
157
157
|
from aepp import flowservice
|
|
@@ -163,7 +163,7 @@ class Deletion:
|
|
|
163
163
|
result['baseConnection'] = baseConn
|
|
164
164
|
res = flow.deleteFlow(flowId=flowId)
|
|
165
165
|
result['response_flow'] = res
|
|
166
|
-
if
|
|
166
|
+
if associatedArtifacts:
|
|
167
167
|
for sourceConnectionId in sourceConnectionIds:
|
|
168
168
|
res_sc = flow.deleteSourceConnection(connectionId=sourceConnectionId)
|
|
169
169
|
result["response_sourceConn"] = res_sc
|
|
@@ -719,7 +719,7 @@ class Synchronizer:
|
|
|
719
719
|
match descType:
|
|
720
720
|
case "xdm:descriptorIdentity":
|
|
721
721
|
target_identitiesDecs = [desc for desc in target_descriptors if desc['@type'] == 'xdm:descriptorIdentity']
|
|
722
|
-
baseIdentityNS = baseDescriptor['xdm:namespace']
|
|
722
|
+
baseIdentityNS = baseDescriptor['xdm:namespace'].lower()
|
|
723
723
|
if self.baseConfig is not None and self.localfolder is None:
|
|
724
724
|
identityConn = identity.Identity(config=self.baseConfig,region=self.region)
|
|
725
725
|
baseIdentities = identityConn.getIdentities()
|
|
@@ -728,8 +728,8 @@ class Synchronizer:
|
|
|
728
728
|
for file in self.identityFolder.glob('*.json'):
|
|
729
729
|
id_file = json.load(FileIO(file))
|
|
730
730
|
baseIdentities.append(id_file)
|
|
731
|
-
if baseIdentityNS not in [el['xdm:namespace'] for el in target_identitiesDecs]: ## identity descriptor does not exists in target schema
|
|
732
|
-
def_identity = [el for el in baseIdentities if el['code'] == baseIdentityNS][0]
|
|
731
|
+
if baseIdentityNS not in [el['xdm:namespace'].lower() for el in target_identitiesDecs]: ## identity descriptor does not exists in target schema
|
|
732
|
+
def_identity = [el for el in baseIdentities if el['code'].lower() == baseIdentityNS][0]
|
|
733
733
|
self.__syncIdentity__(def_identity,verbose=verbose)
|
|
734
734
|
new_desc = targetSchemaManager.createDescriptorOperation(descType=descType,
|
|
735
735
|
completePath=baseDescriptor['xdm:sourceProperty'],
|
|
@@ -738,7 +738,7 @@ class Synchronizer:
|
|
|
738
738
|
)
|
|
739
739
|
res = targetSchemaManager.createDescriptor(new_desc)
|
|
740
740
|
else:
|
|
741
|
-
res = [el for el in target_identitiesDecs if el['xdm:namespace'] == baseIdentityNS][0]
|
|
741
|
+
res = [el for el in target_identitiesDecs if el['xdm:namespace'].lower() == baseIdentityNS][0]
|
|
742
742
|
list_descriptors.append(res)
|
|
743
743
|
case "xdm:descriptorOneToOne": ## lookup definition
|
|
744
744
|
target_OneToOne = [desc for desc in target_descriptors if desc['@type'] == 'xdm:descriptorOneToOne']
|
|
@@ -866,15 +866,15 @@ class Synchronizer:
|
|
|
866
866
|
"""
|
|
867
867
|
if not isinstance(identityDefiniton,dict):
|
|
868
868
|
raise TypeError("the identityDefinition must be a dictionary")
|
|
869
|
-
code_base_identity = identityDefiniton['code']
|
|
869
|
+
code_base_identity = identityDefiniton['code'].lower()
|
|
870
870
|
self.dict_baseComponents['identities'][code_base_identity] = identityDefiniton
|
|
871
871
|
for target in self.dict_targetsConfig.keys():
|
|
872
872
|
targetIdentity = identity.Identity(config=self.dict_targetsConfig[target],region=self.region)
|
|
873
873
|
t_identities = targetIdentity.getIdentities()
|
|
874
|
-
if code_base_identity in [el['code'] for el in t_identities]:## identity already exists in target
|
|
874
|
+
if code_base_identity in [el['code'].lower() for el in t_identities]:## identity already exists in target
|
|
875
875
|
if verbose:
|
|
876
876
|
print(f"identity '{code_base_identity}' already exists in target {target}, saving it")
|
|
877
|
-
self.dict_targetComponents[target]['identities'][code_base_identity] = [el for el in t_identities if el['code'] == code_base_identity][0]
|
|
877
|
+
self.dict_targetComponents[target]['identities'][code_base_identity] = [el for el in t_identities if el['code'].lower() == code_base_identity][0]
|
|
878
878
|
else:
|
|
879
879
|
if verbose:
|
|
880
880
|
print(f"identity '{code_base_identity}' does not exist in target {target}, creating it")
|
|
@@ -888,7 +888,7 @@ class Synchronizer:
|
|
|
888
888
|
|
|
889
889
|
def __syncDataset__(self,baseDataset:dict,verbose:bool=False)-> dict:
|
|
890
890
|
"""
|
|
891
|
-
Synchronize the dataset to the target sandboxes. Mostly creating a new dataset and associated
|
|
891
|
+
Synchronize the dataset to the target sandboxes. Mostly creating a new dataset and associated artifacts when not already created.
|
|
892
892
|
Arguments:
|
|
893
893
|
baseDataset : REQUIRED : dictionary with the dataset definition
|
|
894
894
|
"""
|
|
@@ -942,7 +942,7 @@ class Synchronizer:
|
|
|
942
942
|
|
|
943
943
|
def __syncMergePolicy__(self,mergePolicy:dict,verbose:bool=False)->None:
|
|
944
944
|
"""
|
|
945
|
-
Synchronize the dataset to the target sandboxes. Mostly creating a new dataset and associated
|
|
945
|
+
Synchronize the dataset to the target sandboxes. Mostly creating a new dataset and associated artifacts when not already created.
|
|
946
946
|
Arguments:
|
|
947
947
|
mergePolicy : REQUIRED : The merge policy dictionary to sync
|
|
948
948
|
"""
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
__version__ = "0.5.0-8"
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|