aepp 0.4.3__py3-none-any.whl → 0.4.3.post2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aepp/__version__.py +1 -1
- aepp/cli/__init__.py +0 -0
- aepp/cli/__main__.py +1239 -0
- {aepp-0.4.3.dist-info → aepp-0.4.3.post2.dist-info}/METADATA +1 -6
- {aepp-0.4.3.dist-info → aepp-0.4.3.post2.dist-info}/RECORD +9 -7
- {aepp-0.4.3.dist-info → aepp-0.4.3.post2.dist-info}/WHEEL +0 -0
- {aepp-0.4.3.dist-info → aepp-0.4.3.post2.dist-info}/entry_points.txt +0 -0
- {aepp-0.4.3.dist-info → aepp-0.4.3.post2.dist-info}/licenses/LICENSE +0 -0
- {aepp-0.4.3.dist-info → aepp-0.4.3.post2.dist-info}/top_level.txt +0 -0
aepp/__version__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "0.4.3"
|
|
1
|
+
__version__ = "0.4.3-2"
|
aepp/cli/__init__.py
ADDED
|
File without changes
|
aepp/cli/__main__.py
ADDED
|
@@ -0,0 +1,1239 @@
|
|
|
1
|
+
from ast import arg
|
|
2
|
+
from matplotlib.pyplot import table
|
|
3
|
+
import aepp
|
|
4
|
+
from aepp import synchronizer, schema, schemamanager, fieldgroupmanager, datatypemanager, identity, queryservice,catalog,flowservice
|
|
5
|
+
import argparse, cmd, shlex, json
|
|
6
|
+
from functools import wraps
|
|
7
|
+
from rich.console import Console
|
|
8
|
+
from rich.table import Table
|
|
9
|
+
from rich.panel import Panel
|
|
10
|
+
from rich.markdown import Markdown
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from io import FileIO
|
|
13
|
+
import pandas as pd
|
|
14
|
+
from datetime import datetime
|
|
15
|
+
import urllib.parse
|
|
16
|
+
|
|
17
|
+
# --- 1. The Decorator (The Gatekeeper) ---
|
|
18
|
+
def login_required(f):
|
|
19
|
+
"""Decorator to block commands if not logged in."""
|
|
20
|
+
@wraps(f)
|
|
21
|
+
def wrapper(self, *args, **kwargs):
|
|
22
|
+
if not hasattr(self, 'config') or self.config is None:
|
|
23
|
+
print("(!) Access Denied: You must setup config first.")
|
|
24
|
+
return
|
|
25
|
+
return f(self, *args, **kwargs)
|
|
26
|
+
return wrapper
|
|
27
|
+
|
|
28
|
+
console = Console()
|
|
29
|
+
|
|
30
|
+
# --- 2. The Interactive Shell ---
|
|
31
|
+
class ServiceShell(cmd.Cmd):
|
|
32
|
+
def __init__(self, **kwargs):
|
|
33
|
+
super().__init__()
|
|
34
|
+
self.config = None
|
|
35
|
+
self.connectInstance = True
|
|
36
|
+
if kwargs.get("config_file"):
|
|
37
|
+
mypath = Path.cwd()
|
|
38
|
+
dict_config = json.load(FileIO( mypath / Path(kwargs.get("config_file"))))
|
|
39
|
+
self.sandbox = kwargs.get("sandbox",dict_config.get("sandbox-name","prod"))
|
|
40
|
+
self.secret = dict_config.get("secret",kwargs.get("secret"))
|
|
41
|
+
self.org_id = dict_config.get("org_id",kwargs.get("org_id"))
|
|
42
|
+
self.client_id = dict_config.get("client_id",kwargs.get("client_id"))
|
|
43
|
+
self.scopes = dict_config.get("scopes",kwargs.get("scopes"))
|
|
44
|
+
self.connectInstance = True
|
|
45
|
+
else:
|
|
46
|
+
self.sandbox = kwargs.get("sandbox","prod")
|
|
47
|
+
self.secret = kwargs.get("secret")
|
|
48
|
+
self.org_id = kwargs.get("org_id")
|
|
49
|
+
self.client_id = kwargs.get("client_id")
|
|
50
|
+
self.scopes = kwargs.get("scopes")
|
|
51
|
+
self.connectInstance = True
|
|
52
|
+
if self.sandbox is not None and self.secret is not None and self.org_id is not None and self.client_id is not None:
|
|
53
|
+
print("Auto-configuring connection...")
|
|
54
|
+
self.config = aepp.configure(
|
|
55
|
+
connectInstance=self.connectInstance,
|
|
56
|
+
sandbox=self.sandbox,
|
|
57
|
+
secret=self.secret,
|
|
58
|
+
org_id=self.org_id,
|
|
59
|
+
client_id=self.client_id,
|
|
60
|
+
scopes=self.scopes
|
|
61
|
+
)
|
|
62
|
+
self.prompt = f"{self.config.sandbox}> "
|
|
63
|
+
console.print(Panel(f"Connected to [bold green]{self.sandbox}[/bold green]", style="blue"))
|
|
64
|
+
|
|
65
|
+
# # --- Commands ---
|
|
66
|
+
def do_config(self, arg):
|
|
67
|
+
"""connect to an AEP instance"""
|
|
68
|
+
parser = argparse.ArgumentParser(prog='config', add_help=True)
|
|
69
|
+
parser.add_argument("-sx", "--sandbox", help="Auto-login sandbox")
|
|
70
|
+
parser.add_argument("-s", "--secret", help="Secret")
|
|
71
|
+
parser.add_argument("-o", "--org_id", help="Auto-login org ID")
|
|
72
|
+
parser.add_argument("-sc", "--scopes", help="Scopes")
|
|
73
|
+
parser.add_argument("-cid", "--client_id", help="Auto-login client ID")
|
|
74
|
+
parser.add_argument("-cf", "--config_file", help="Path to config file", default=None)
|
|
75
|
+
args = parser.parse_args(shlex.split(arg))
|
|
76
|
+
if args.config_file:
|
|
77
|
+
mypath = Path.cwd()
|
|
78
|
+
dict_config = json.load(FileIO(mypath / Path(args.config_file)))
|
|
79
|
+
self.sandbox = args.sandbox if args.sandbox else dict_config.get("sandbox-name",args.sandbox)
|
|
80
|
+
self.secret = dict_config.get("secret",args.secret)
|
|
81
|
+
self.org_id = dict_config.get("org_id",args.org_id)
|
|
82
|
+
self.client_id = dict_config.get("client_id",args.client_id)
|
|
83
|
+
self.scopes = dict_config.get("scopes",args.scopes)
|
|
84
|
+
self.connectInstance = True
|
|
85
|
+
else:
|
|
86
|
+
if args.sandbox: self.sandbox = args.sandbox
|
|
87
|
+
if args.secret: self.secret = args.secret
|
|
88
|
+
if args.org_id: self.org_id = args.org_id
|
|
89
|
+
if args.scopes: self.scopes = args.scopes
|
|
90
|
+
if args.client_id: self.client_id = args.client_id
|
|
91
|
+
console.print("Configuring connection...", style="blue")
|
|
92
|
+
self.config = aepp.configure(
|
|
93
|
+
connectInstance=self.connectInstance,
|
|
94
|
+
sandbox=self.sandbox,
|
|
95
|
+
secret=self.secret,
|
|
96
|
+
org_id=self.org_id,
|
|
97
|
+
client_id=self.client_id,
|
|
98
|
+
scopes=self.scopes
|
|
99
|
+
)
|
|
100
|
+
console.print(Panel(f"Connected to [bold green]{self.sandbox}[/bold green]", style="blue"))
|
|
101
|
+
self.prompt = f"{self.config.sandbox}> "
|
|
102
|
+
return
|
|
103
|
+
|
|
104
|
+
def do_change_sandbox(self, args):
|
|
105
|
+
"""Change the current sandbox after configuration"""
|
|
106
|
+
parser = argparse.ArgumentParser(prog='change sandbox', add_help=True)
|
|
107
|
+
parser.add_argument("sandbox", help="sandbox name to switch to")
|
|
108
|
+
args = parser.parse_args(shlex.split(args))
|
|
109
|
+
self.sandbox = args.sandbox if args.sandbox else console.print(Panel("(!) Please provide a sandbox name using -sx or --sandbox", style="red"))
|
|
110
|
+
if self.config is not None:
|
|
111
|
+
if args.sandbox:
|
|
112
|
+
self.config.setSandbox(args.sandbox)
|
|
113
|
+
self.prompt = f"{self.config.sandbox}> "
|
|
114
|
+
console.print(Panel(f"Sandbox changed to: {self.config.sandbox}", style="blue"))
|
|
115
|
+
else:
|
|
116
|
+
console.print(Panel("(!) You must configure the connection first using the 'config' command.", style="red"))
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
@login_required
|
|
120
|
+
def do_get_schemas(self, args):
|
|
121
|
+
"""List all schemas in the current sandbox"""
|
|
122
|
+
parser = argparse.ArgumentParser(prog='get_schemas', add_help=True)
|
|
123
|
+
parser.add_argument("-sv", "--save",help="Save schemas to CSV file")
|
|
124
|
+
try:
|
|
125
|
+
args = parser.parse_args(shlex.split(args))
|
|
126
|
+
aepp_schema = schema.Schema(config=self.config)
|
|
127
|
+
schemas = aepp_schema.getSchemas()
|
|
128
|
+
if len(schemas) > 0:
|
|
129
|
+
if args.save:
|
|
130
|
+
df_schemas = pd.DataFrame(schemas)
|
|
131
|
+
df_schemas.to_csv(f"{self.config.sandbox}_schemas.csv", index=False)
|
|
132
|
+
console.print(f"Schemas exported to {self.config.sandbox}_schemas.csv", style="green")
|
|
133
|
+
table = Table(title=f"Schemas in Sandbox: {self.config.sandbox}")
|
|
134
|
+
table.add_column("ID", style="cyan")
|
|
135
|
+
table.add_column("Name", style="magenta")
|
|
136
|
+
table.add_column("Version", style="green")
|
|
137
|
+
for sch in schemas:
|
|
138
|
+
table.add_row(
|
|
139
|
+
sch.get("meta:altId","N/A"),
|
|
140
|
+
sch.get("title","N/A"),
|
|
141
|
+
str(sch.get("version","N/A")),
|
|
142
|
+
)
|
|
143
|
+
console.print(table)
|
|
144
|
+
else:
|
|
145
|
+
console.print("(!) No schemas found.", style="red")
|
|
146
|
+
except Exception as e:
|
|
147
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
148
|
+
except SystemExit:
|
|
149
|
+
return
|
|
150
|
+
|
|
151
|
+
@login_required
|
|
152
|
+
def do_get_ups_schemas(self, args):
|
|
153
|
+
"""List all schemas enabled for Profile in the current sandbox"""
|
|
154
|
+
parser = argparse.ArgumentParser(prog='get_schemas_enabled', add_help=True)
|
|
155
|
+
parser.add_argument("-sv", "--save",help="Save enabled schemas to CSV file")
|
|
156
|
+
try:
|
|
157
|
+
args = parser.parse_args(shlex.split(args))
|
|
158
|
+
aepp_schema = schema.Schema(config=self.config)
|
|
159
|
+
union_schemas = aepp_schema.getUnions()
|
|
160
|
+
schemas = aepp_schema.getSchemas()
|
|
161
|
+
enabled_schemas = []
|
|
162
|
+
for union in union_schemas:
|
|
163
|
+
for member in union.get("meta:extends",[]):
|
|
164
|
+
if 'schema' in member:
|
|
165
|
+
enabled_schemas.append(member)
|
|
166
|
+
list_enabled_schemas = []
|
|
167
|
+
list_enabled_schemas = [sc for sc in schemas if sc.get("$id") in enabled_schemas]
|
|
168
|
+
if len(list_enabled_schemas) > 0:
|
|
169
|
+
if args.save:
|
|
170
|
+
df_schemas = pd.DataFrame(list_enabled_schemas)
|
|
171
|
+
df_schemas.to_csv(f"{self.config.sandbox}_enabled_schemas.csv", index=False)
|
|
172
|
+
console.print(f"Enabled Schemas exported to {self.config.sandbox}_enabled_schemas.csv", style="green")
|
|
173
|
+
table = Table(title=f"Enabled Schemas in Sandbox: {self.config.sandbox}")
|
|
174
|
+
table.add_column("ID", style="cyan")
|
|
175
|
+
table.add_column("Name", style="magenta")
|
|
176
|
+
table.add_column("Version", style="green")
|
|
177
|
+
for sch in list_enabled_schemas:
|
|
178
|
+
table.add_row(
|
|
179
|
+
sch.get("meta:altId","N/A"),
|
|
180
|
+
sch.get("title","N/A"),
|
|
181
|
+
str(sch.get("version","N/A")),
|
|
182
|
+
)
|
|
183
|
+
console.print(table)
|
|
184
|
+
else:
|
|
185
|
+
console.print("(!) No enabled schemas found.", style="red")
|
|
186
|
+
except Exception as e:
|
|
187
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
188
|
+
except SystemExit:
|
|
189
|
+
return
|
|
190
|
+
@login_required
|
|
191
|
+
def do_get_ups_fieldgroups(self, args):
|
|
192
|
+
"""List all field groups enabled for Profile in the current sandbox"""
|
|
193
|
+
parser = argparse.ArgumentParser(prog='get_fieldgroups_enabled', add_help=True)
|
|
194
|
+
parser.add_argument("-sv", "--save",help="Save enabled field groups to CSV file")
|
|
195
|
+
try:
|
|
196
|
+
args = parser.parse_args(shlex.split(args))
|
|
197
|
+
aepp_schema = schema.Schema(config=self.config)
|
|
198
|
+
union_schemas = aepp_schema.getUnions()
|
|
199
|
+
fgs = aepp_schema.getFieldGroups()
|
|
200
|
+
enabled_fgs = []
|
|
201
|
+
for union in union_schemas:
|
|
202
|
+
for member in union.get("meta:extends",[]):
|
|
203
|
+
if 'mixins' in member:
|
|
204
|
+
enabled_fgs.append(member)
|
|
205
|
+
list_enabled_fgs = []
|
|
206
|
+
list_enabled_fgs = [f for f in fgs if f.get("$id") in enabled_fgs]
|
|
207
|
+
if len(list_enabled_fgs) > 0:
|
|
208
|
+
if args.save:
|
|
209
|
+
df_fgs = pd.DataFrame(list_enabled_fgs)
|
|
210
|
+
df_fgs.to_csv(f"{self.config.sandbox}_enabled_field_groups.csv", index=False)
|
|
211
|
+
console.print(f"Enabled Field Groups exported to {self.config.sandbox}_enabled_field_groups.csv", style="green")
|
|
212
|
+
table = Table(title=f"Enabled Field Groups in Sandbox: {self.config.sandbox}")
|
|
213
|
+
table.add_column("ID", style="cyan")
|
|
214
|
+
table.add_column("Name", style="magenta")
|
|
215
|
+
table.add_column("Version", style="green")
|
|
216
|
+
for sch in list_enabled_fgs:
|
|
217
|
+
table.add_row(
|
|
218
|
+
sch.get("meta:altId","N/A"),
|
|
219
|
+
sch.get("title","N/A"),
|
|
220
|
+
str(sch.get("version","N/A")),
|
|
221
|
+
)
|
|
222
|
+
console.print(table)
|
|
223
|
+
else:
|
|
224
|
+
console.print("(!) No enabled field groups found.", style="red")
|
|
225
|
+
except Exception as e:
|
|
226
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
227
|
+
except SystemExit:
|
|
228
|
+
return
|
|
229
|
+
|
|
230
|
+
@login_required
|
|
231
|
+
def do_get_profile_schemas(self,args):
|
|
232
|
+
"""Get the current profile schema"""
|
|
233
|
+
parser = argparse.ArgumentParser(prog='get_schemas_enabled', add_help=True)
|
|
234
|
+
try:
|
|
235
|
+
args = parser.parse_args(shlex.split(args))
|
|
236
|
+
aepp_schema = schema.Schema(config=self.config)
|
|
237
|
+
profile_schemas = aepp_schema.getSchemas(classFilter="https://ns.adobe.com/xdm/context/profile")
|
|
238
|
+
if profile_schemas:
|
|
239
|
+
table = Table(title=f"Profile Schemas in Sandbox: {self.config.sandbox}")
|
|
240
|
+
table.add_column("ID", style="cyan")
|
|
241
|
+
table.add_column("Name", style="magenta")
|
|
242
|
+
table.add_column("Version", style="green")
|
|
243
|
+
for sch in profile_schemas:
|
|
244
|
+
table.add_row(
|
|
245
|
+
sch.get("meta:altId","N/A"),
|
|
246
|
+
sch.get("title","N/A"),
|
|
247
|
+
str(sch.get("version","N/A")),
|
|
248
|
+
)
|
|
249
|
+
console.print(table)
|
|
250
|
+
else:
|
|
251
|
+
console.print("(!) No profile schemas found.", style="red")
|
|
252
|
+
except Exception as e:
|
|
253
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
254
|
+
except SystemExit:
|
|
255
|
+
return
|
|
256
|
+
|
|
257
|
+
@login_required
|
|
258
|
+
def do_get_union_profile_json(self,args):
|
|
259
|
+
"""Get the current Profile union schema"""
|
|
260
|
+
parser = argparse.ArgumentParser(prog='get_union_profile', add_help=True)
|
|
261
|
+
try:
|
|
262
|
+
args = parser.parse_args(shlex.split(args))
|
|
263
|
+
profile_union = schemamanager.SchemaManager('https://ns.adobe.com/xdm/context/profile__union',config=self.config)
|
|
264
|
+
data = profile_union.to_dict()
|
|
265
|
+
with open(f"{self.config.sandbox}_profile_union_schema.json", 'w') as f:
|
|
266
|
+
json.dump(data, f, indent=4)
|
|
267
|
+
console.print(f"Profile Union Schema exported to {self.config.sandbox}_profile_union_schema.json", style="green")
|
|
268
|
+
except Exception as e:
|
|
269
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
270
|
+
except SystemExit:
|
|
271
|
+
return
|
|
272
|
+
|
|
273
|
+
@login_required
|
|
274
|
+
def do_get_union_profile_csv(self,args):
|
|
275
|
+
"""Get the current Profile union schema"""
|
|
276
|
+
parser = argparse.ArgumentParser(prog='get_union_profile', add_help=True)
|
|
277
|
+
parser.add_argument("-f","--full",default=False,help="Get full schema information with all details",type=bool)
|
|
278
|
+
try:
|
|
279
|
+
args = parser.parse_args(shlex.split(args))
|
|
280
|
+
profile_union = schemamanager.SchemaManager('https://ns.adobe.com/xdm/context/profile__union',config=self.config)
|
|
281
|
+
df = profile_union.to_dataframe(full=args.full)
|
|
282
|
+
df.to_csv(f"{self.config.sandbox}_profile_union_schema.csv", index=False)
|
|
283
|
+
console.print(f"Profile Union Schema exported to {self.config.sandbox}_profile_union_schema.csv", style="green")
|
|
284
|
+
except Exception as e:
|
|
285
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
286
|
+
except SystemExit:
|
|
287
|
+
return
|
|
288
|
+
|
|
289
|
+
@login_required
|
|
290
|
+
def do_get_union_event_json(self,args):
|
|
291
|
+
"""Get the current Experience Event union schema"""
|
|
292
|
+
parser = argparse.ArgumentParser(prog='get_union_event', add_help=True)
|
|
293
|
+
try:
|
|
294
|
+
args = parser.parse_args(shlex.split(args))
|
|
295
|
+
event_union = schemamanager.SchemaManager('https://ns.adobe.com/xdm/context/experienceevent__union',config=self.config)
|
|
296
|
+
data = event_union.to_dict()
|
|
297
|
+
with open(f"{self.config.sandbox}_event_union_schema.json", 'w') as f:
|
|
298
|
+
json.dump(data, f, indent=4)
|
|
299
|
+
console.print(f"Event Union Schema exported to {self.config.sandbox}_event_union_schema.json", style="green")
|
|
300
|
+
except Exception as e:
|
|
301
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
302
|
+
except SystemExit:
|
|
303
|
+
return
|
|
304
|
+
|
|
305
|
+
@login_required
|
|
306
|
+
def do_get_union_event_csv(self,args):
|
|
307
|
+
"""Get the current Experience Event union schema"""
|
|
308
|
+
parser = argparse.ArgumentParser(prog='get_union_event', add_help=True)
|
|
309
|
+
parser.add_argument("-f","--full",default=False,help="Get full schema information with all details",type=bool)
|
|
310
|
+
try:
|
|
311
|
+
args = parser.parse_args(shlex.split(args))
|
|
312
|
+
event_union = schemamanager.SchemaManager('https://ns.adobe.com/xdm/context/experienceevent__union',config=self.config)
|
|
313
|
+
df = event_union.to_dataframe(full=args.full)
|
|
314
|
+
df.to_csv(f"{self.config.sandbox}_event_union_schema.csv", index=False)
|
|
315
|
+
console.print(f"Event Union Schema exported to {self.config.sandbox}_event_union_schema.csv", style="green")
|
|
316
|
+
except Exception as e:
|
|
317
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
318
|
+
except SystemExit:
|
|
319
|
+
return
|
|
320
|
+
|
|
321
|
+
@login_required
|
|
322
|
+
def do_get_event_schemas(self,args):
|
|
323
|
+
"""Get the current Experience Event schemas"""
|
|
324
|
+
parser = argparse.ArgumentParser(prog='get_event_schemas', add_help=True)
|
|
325
|
+
parser.add_argument("-sv", "--save",help="Save event schemas to CSV file")
|
|
326
|
+
try:
|
|
327
|
+
args = parser.parse_args(shlex.split(args))
|
|
328
|
+
aepp_schema = schema.Schema(config=self.config)
|
|
329
|
+
event_schemas = aepp_schema.getSchemas(classFilter="https://ns.adobe.com/xdm/context/experienceevent")
|
|
330
|
+
if args.save:
|
|
331
|
+
df_schemas = pd.DataFrame(event_schemas)
|
|
332
|
+
df_schemas.to_csv(f"{self.config.sandbox}_event_schemas.csv", index=False)
|
|
333
|
+
console.print(f"Event Schemas exported to {self.config.sandbox}_event_schemas.csv", style="green")
|
|
334
|
+
if event_schemas:
|
|
335
|
+
table = Table(title=f"Event Schemas in Sandbox: {self.config.sandbox}")
|
|
336
|
+
table.add_column("ID", style="cyan")
|
|
337
|
+
table.add_column("Name", style="magenta")
|
|
338
|
+
table.add_column("Version", style="green")
|
|
339
|
+
for sch in event_schemas:
|
|
340
|
+
table.add_row(
|
|
341
|
+
sch.get("meta:altId","N/A"),
|
|
342
|
+
sch.get("title","N/A"),
|
|
343
|
+
str(sch.get("version","N/A")),
|
|
344
|
+
)
|
|
345
|
+
console.print(table)
|
|
346
|
+
else:
|
|
347
|
+
console.print("(!) No event schemas found.", style="red")
|
|
348
|
+
except Exception as e:
|
|
349
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
350
|
+
except SystemExit:
|
|
351
|
+
return
|
|
352
|
+
|
|
353
|
+
@login_required
|
|
354
|
+
def do_get_union_event_json(self,args):
|
|
355
|
+
"""Get the current Experience Event union schema"""
|
|
356
|
+
parser = argparse.ArgumentParser(prog='get_union_event', add_help=True)
|
|
357
|
+
try:
|
|
358
|
+
args = parser.parse_args(shlex.split(args))
|
|
359
|
+
event_union = schemamanager.SchemaManager('https://ns.adobe.com/xdm/context/experienceevent__union',config=self.config)
|
|
360
|
+
data = event_union.to_dict()
|
|
361
|
+
with open(f"{self.config.sandbox}_event_union_schema.json", 'w') as f:
|
|
362
|
+
json.dump(data, f, indent=4)
|
|
363
|
+
console.print(f"Event Union Schema exported to {self.config.sandbox}_event_union_schema.json", style="green")
|
|
364
|
+
except Exception as e:
|
|
365
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
366
|
+
except SystemExit:
|
|
367
|
+
return
|
|
368
|
+
|
|
369
|
+
|
|
370
|
+
@login_required
|
|
371
|
+
def do_get_schema_xdm(self, arg):
|
|
372
|
+
"""Get schema JSON by name or ID"""
|
|
373
|
+
parser = argparse.ArgumentParser(prog='get_schema_xdm', add_help=True)
|
|
374
|
+
parser.add_argument("schema", help="Schema title, $id or alt:Id to retrieve")
|
|
375
|
+
parser.add_argument("-f","--full",default=False,help="Get full schema with all details",type=bool)
|
|
376
|
+
try:
|
|
377
|
+
args = parser.parse_args(shlex.split(arg))
|
|
378
|
+
aepp_schema = schema.Schema(config=self.config)
|
|
379
|
+
schemas = aepp_schema.getSchemas()
|
|
380
|
+
print(args.schema)
|
|
381
|
+
## chech if schema title is found
|
|
382
|
+
if args.schema in [sch for sch in aepp_schema.data.schemas_altId.keys()]:
|
|
383
|
+
schema_json = aepp_schema.getSchema(
|
|
384
|
+
schemaId=aepp_schema.data.schemas_altId[args.schema],
|
|
385
|
+
)
|
|
386
|
+
else:
|
|
387
|
+
|
|
388
|
+
schema_json = aepp_schema.getSchema(
|
|
389
|
+
schemaId=args.schema
|
|
390
|
+
)
|
|
391
|
+
if 'title' in schema_json.keys():
|
|
392
|
+
filename = f"{schema_json['title']}_xdm.json"
|
|
393
|
+
with open(filename, 'w') as f:
|
|
394
|
+
json.dump(schema_json, f, indent=4)
|
|
395
|
+
console.print(f"Schema '{args.schema}' saved to {filename}.", style="green")
|
|
396
|
+
else:
|
|
397
|
+
console.print(f"(!) Schema '{args.schema}' not found.", style="red")
|
|
398
|
+
except Exception as e:
|
|
399
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
400
|
+
except SystemExit:
|
|
401
|
+
return
|
|
402
|
+
|
|
403
|
+
@login_required
|
|
404
|
+
def do_get_schema_csv(self, arg):
|
|
405
|
+
"""Get schema CSV by name or ID"""
|
|
406
|
+
parser = argparse.ArgumentParser(prog='get_schema_csv', add_help=True)
|
|
407
|
+
parser.add_argument("schema", help="Schema $id or alt:Id to retrieve")
|
|
408
|
+
parser.add_argument("-f","--full",default=False,help="Get full schema information with all details",type=bool)
|
|
409
|
+
try:
|
|
410
|
+
args = parser.parse_args(shlex.split(arg))
|
|
411
|
+
aepp_schema = schema.Schema(config=self.config)
|
|
412
|
+
schemas = aepp_schema.getSchemas()
|
|
413
|
+
## chech if schema title is found
|
|
414
|
+
if args.schema in [sch for sch in aepp_schema.data.schemas_altId.keys()]:
|
|
415
|
+
my_schema_manager = schemamanager.SchemaManager(
|
|
416
|
+
schema=aepp_schema.data.schemas_altId[args.schema],
|
|
417
|
+
config=self.config
|
|
418
|
+
)
|
|
419
|
+
df = my_schema_manager.to_dataframe(full=args.full)
|
|
420
|
+
else:
|
|
421
|
+
my_schema_manager = schemamanager.SchemaManager(
|
|
422
|
+
schema=args.schema,
|
|
423
|
+
config=self.config
|
|
424
|
+
)
|
|
425
|
+
df = my_schema_manager.to_dataframe(full=args.full)
|
|
426
|
+
df.to_csv(f"{my_schema_manager.title}_schema.csv", index=False)
|
|
427
|
+
console.print(f"Schema exported to {my_schema_manager.title}_schema.csv", style="green")
|
|
428
|
+
except Exception as e:
|
|
429
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
430
|
+
except SystemExit:
|
|
431
|
+
return
|
|
432
|
+
|
|
433
|
+
@login_required
|
|
434
|
+
def do_get_schema_json(self, args):
|
|
435
|
+
"""Get schema JSON by name or ID"""
|
|
436
|
+
parser = argparse.ArgumentParser(prog='get_schema_json', add_help=True)
|
|
437
|
+
parser.add_argument("schema", help="Schema $id or alt:Id to retrieve")
|
|
438
|
+
try:
|
|
439
|
+
args = parser.parse_args(shlex.split(args))
|
|
440
|
+
aepp_schema = schema.Schema(config=self.config)
|
|
441
|
+
schemas = aepp_schema.getSchemas()
|
|
442
|
+
## chech if schema title is found
|
|
443
|
+
if args.schema in [sch for sch in aepp_schema.data.schemas_altId.keys()]:
|
|
444
|
+
my_schema_manager = schemamanager.SchemaManager(
|
|
445
|
+
schema=aepp_schema.data.schemas_altId[args.schema],
|
|
446
|
+
config=self.config
|
|
447
|
+
)
|
|
448
|
+
else:
|
|
449
|
+
my_schema_manager = schemamanager.SchemaManager(
|
|
450
|
+
schema=args.schema,
|
|
451
|
+
config=self.config
|
|
452
|
+
)
|
|
453
|
+
data = my_schema_manager.to_dict()
|
|
454
|
+
with open(f"{my_schema_manager.title}.json", 'w') as f:
|
|
455
|
+
json.dump(data, f, indent=4)
|
|
456
|
+
console.print(f"Schema exported to {my_schema_manager.title}.json", style="green")
|
|
457
|
+
except Exception as e:
|
|
458
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
459
|
+
except SystemExit:
|
|
460
|
+
return
|
|
461
|
+
|
|
462
|
+
@login_required
|
|
463
|
+
def do_get_fieldgroups(self, args):
|
|
464
|
+
"""List all field groups in the current sandbox"""
|
|
465
|
+
parser = argparse.ArgumentParser(prog='get_fieldgroups', add_help=True)
|
|
466
|
+
parser.add_argument("-sv", "--save",help="Save field groups to CSV file")
|
|
467
|
+
try:
|
|
468
|
+
args = parser.parse_args(shlex.split(args))
|
|
469
|
+
aepp_schema = schema.Schema(config=self.config)
|
|
470
|
+
fieldgroups = aepp_schema.getFieldGroups()
|
|
471
|
+
if args.save:
|
|
472
|
+
df_fgs = pd.DataFrame(fieldgroups)
|
|
473
|
+
df_fgs.to_csv(f"{self.config.sandbox}_fieldgroups.csv",index=False)
|
|
474
|
+
console.print(f"Field Groups exported to {self.config.sandbox}_fieldgroups.csv", style="green")
|
|
475
|
+
if fieldgroups:
|
|
476
|
+
table = Table(title=f"Field Groups in Sandbox: {self.config.sandbox}")
|
|
477
|
+
table.add_column("altId", style="cyan")
|
|
478
|
+
table.add_column("Title", style="magenta")
|
|
479
|
+
for fg in fieldgroups:
|
|
480
|
+
table.add_row(
|
|
481
|
+
fg.get("meta:altId","N/A"),
|
|
482
|
+
fg.get("title","N/A"),
|
|
483
|
+
)
|
|
484
|
+
console.print(table)
|
|
485
|
+
else:
|
|
486
|
+
console.print("(!) No field groups found.", style="red")
|
|
487
|
+
except Exception as e:
|
|
488
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
489
|
+
except SystemExit:
|
|
490
|
+
return
|
|
491
|
+
|
|
492
|
+
@login_required
|
|
493
|
+
def do_get_fieldgroup_json(self, args):
|
|
494
|
+
"""Get field group JSON by name or ID"""
|
|
495
|
+
parser = argparse.ArgumentParser(prog='get_fieldgroup_json', add_help=True)
|
|
496
|
+
parser.add_argument("fieldgroup", help="Field Group name, $id or alt:Id to retrieve")
|
|
497
|
+
try:
|
|
498
|
+
args = parser.parse_args(shlex.split(args))
|
|
499
|
+
aepp_schema = schema.Schema(config=self.config)
|
|
500
|
+
fieldgroups = aepp_schema.getFieldGroups()
|
|
501
|
+
## chech if schema title is found
|
|
502
|
+
if args.fieldgroup in [fg for fg in aepp_schema.data.fieldGroups_altId.keys()]:
|
|
503
|
+
my_fieldgroup_manager = fieldgroupmanager.FieldGroupManager(
|
|
504
|
+
fieldgroup=aepp_schema.data.fieldGroups_altId[args.fieldgroup],
|
|
505
|
+
config=self.config
|
|
506
|
+
)
|
|
507
|
+
else:
|
|
508
|
+
my_fieldgroup_manager = fieldgroupmanager.FieldGroupManager(
|
|
509
|
+
fieldgroup=args.fieldgroup,
|
|
510
|
+
config=self.config
|
|
511
|
+
)
|
|
512
|
+
data = my_fieldgroup_manager.to_dict()
|
|
513
|
+
with open(f"{my_fieldgroup_manager.title}_fieldgroup.json", 'w') as f:
|
|
514
|
+
json.dump(data, f, indent=4)
|
|
515
|
+
console.print(f"Field Group exported to {my_fieldgroup_manager.title}_fieldgroup.json", style="green")
|
|
516
|
+
except Exception as e:
|
|
517
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
518
|
+
except SystemExit:
|
|
519
|
+
return
|
|
520
|
+
|
|
521
|
+
@login_required
|
|
522
|
+
def do_get_fieldgroup_csv(self, args):
|
|
523
|
+
"""Get field group CSV by name or ID"""
|
|
524
|
+
parser = argparse.ArgumentParser(prog='get_fieldgroup_csv', add_help=True)
|
|
525
|
+
parser.add_argument("fieldgroup", help="Field Group name, $id or alt:Id to retrieve")
|
|
526
|
+
parser.add_argument("-f","--full",default=False,help="Get full field group information with all details",type=bool)
|
|
527
|
+
try:
|
|
528
|
+
args = parser.parse_args(shlex.split(args))
|
|
529
|
+
aepp_schema = schema.Schema(config=self.config)
|
|
530
|
+
fieldgroups = aepp_schema.getFieldGroups()
|
|
531
|
+
## chech if schema title is found
|
|
532
|
+
if args.fieldgroup in [fg for fg in aepp_schema.data.fieldGroups_altId.keys()]:
|
|
533
|
+
my_fieldgroup_manager = fieldgroupmanager.FieldGroupManager(
|
|
534
|
+
fieldgroup=aepp_schema.data.fieldGroups_altId[args.fieldgroup],
|
|
535
|
+
config=self.config
|
|
536
|
+
)
|
|
537
|
+
else:
|
|
538
|
+
my_fieldgroup_manager = fieldgroupmanager.FieldGroupManager(
|
|
539
|
+
fieldgroup=args.fieldgroup,
|
|
540
|
+
config=self.config
|
|
541
|
+
)
|
|
542
|
+
df = my_fieldgroup_manager.to_dataframe(full=args.full)
|
|
543
|
+
df.to_csv(f"{my_fieldgroup_manager.title}_fieldgroup.csv", index=False)
|
|
544
|
+
console.print(f"Field Group exported to {my_fieldgroup_manager.title}_fieldgroup.csv", style="green")
|
|
545
|
+
except Exception as e:
|
|
546
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
547
|
+
except SystemExit:
|
|
548
|
+
return
|
|
549
|
+
|
|
550
|
+
def do_get_datatypes(self, args):
|
|
551
|
+
"""List all data types in the current sandbox"""
|
|
552
|
+
parser = argparse.ArgumentParser(prog='get_datatypes', add_help=True)
|
|
553
|
+
try:
|
|
554
|
+
args = parser.parse_args(shlex.split(args))
|
|
555
|
+
aepp_schema = schema.Schema(config=self.config)
|
|
556
|
+
datatypes = aepp_schema.getDataTypes()
|
|
557
|
+
if datatypes:
|
|
558
|
+
table = Table(title=f"Data Types in Sandbox: {self.config.sandbox}")
|
|
559
|
+
table.add_column("altId", style="cyan")
|
|
560
|
+
table.add_column("Title", style="magenta")
|
|
561
|
+
for dt in datatypes:
|
|
562
|
+
table.add_row(
|
|
563
|
+
dt.get("meta:altId","N/A"),
|
|
564
|
+
dt.get("title","N/A"),
|
|
565
|
+
)
|
|
566
|
+
console.print(table)
|
|
567
|
+
else:
|
|
568
|
+
console.print("(!) No data types found.", style="red")
|
|
569
|
+
except Exception as e:
|
|
570
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
571
|
+
except SystemExit:
|
|
572
|
+
return
|
|
573
|
+
|
|
574
|
+
@login_required
|
|
575
|
+
def do_get_datatype_csv(self, args):
|
|
576
|
+
"""Get data type CSV by name or ID"""
|
|
577
|
+
parser = argparse.ArgumentParser(prog='get_datatype_csv', add_help=True)
|
|
578
|
+
parser.add_argument("datatype", help="Data Type name, $id or alt:Id to retrieve")
|
|
579
|
+
parser.add_argument("-f","--full",default=False,help="Get full data type information with all details",type=bool)
|
|
580
|
+
try:
|
|
581
|
+
args = parser.parse_args(shlex.split(args))
|
|
582
|
+
aepp_schema = schema.Schema(config=self.config)
|
|
583
|
+
datatypes = aepp_schema.getDataTypes()
|
|
584
|
+
## chech if schema title is found
|
|
585
|
+
if args.datatype in [dt for dt in aepp_schema.data.dataTypes_altId.keys()]:
|
|
586
|
+
my_datatype_manager = datatypemanager.DataTypeManager(
|
|
587
|
+
datatype=aepp_schema.data.dataTypes_altId[args.datatype],
|
|
588
|
+
config=self.config
|
|
589
|
+
)
|
|
590
|
+
else:
|
|
591
|
+
my_datatype_manager = datatypemanager.DataTypeManager(
|
|
592
|
+
datatype=args.datatype,
|
|
593
|
+
config=self.config
|
|
594
|
+
)
|
|
595
|
+
df = my_datatype_manager.to_dataframe(full=args.full)
|
|
596
|
+
df.to_csv(f"{my_datatype_manager.title}_datatype.csv", index=False)
|
|
597
|
+
console.print(f"Data Type exported to {my_datatype_manager.title}_datatype.csv", style="green")
|
|
598
|
+
except Exception as e:
|
|
599
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
600
|
+
except SystemExit:
|
|
601
|
+
return
|
|
602
|
+
|
|
603
|
+
@login_required
|
|
604
|
+
def do_get_datatype_json(self, args):
|
|
605
|
+
"""Get data type JSON by name or ID"""
|
|
606
|
+
parser = argparse.ArgumentParser(prog='get_datatype_json', add_help=True)
|
|
607
|
+
parser.add_argument("datatype", help="Data Type name, $id or alt:Id to retrieve")
|
|
608
|
+
parser.add_argument("-f","--full",default=False,help="Get full data type information with all details",type=bool)
|
|
609
|
+
try:
|
|
610
|
+
args = parser.parse_args(shlex.split(args))
|
|
611
|
+
aepp_schema = schema.Schema(config=self.config)
|
|
612
|
+
datatypes = aepp_schema.getDataTypes()
|
|
613
|
+
## chech if schema title is found
|
|
614
|
+
if args.datatype in [dt for dt in aepp_schema.data.dataTypes_altId.keys()]:
|
|
615
|
+
my_datatype_manager = datatypemanager.DataTypeManager(
|
|
616
|
+
datatype=aepp_schema.data.dataTypes_altId[args.datatype],
|
|
617
|
+
config=self.config
|
|
618
|
+
)
|
|
619
|
+
else:
|
|
620
|
+
my_datatype_manager = datatypemanager.DataTypeManager(
|
|
621
|
+
datatype=args.datatype,
|
|
622
|
+
config=self.config
|
|
623
|
+
)
|
|
624
|
+
data = my_datatype_manager.to_dict()
|
|
625
|
+
with open(f"{my_datatype_manager.title}_datatype.json", 'w') as f:
|
|
626
|
+
json.dump(data, f, indent=4)
|
|
627
|
+
console.print(f"Data Type exported to {my_datatype_manager.title}_datatype.json", style="green")
|
|
628
|
+
except Exception as e:
|
|
629
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
630
|
+
except SystemExit:
|
|
631
|
+
return
|
|
632
|
+
|
|
633
|
+
@login_required
|
|
634
|
+
def do_enable_schema_for_ups(self, args):
|
|
635
|
+
"""Enable a schema for Profile"""
|
|
636
|
+
parser = argparse.ArgumentParser(prog='enable_schema_for_ups', add_help=True)
|
|
637
|
+
parser.add_argument("schema_id", help="Schema ID to enable for Profile")
|
|
638
|
+
try:
|
|
639
|
+
args = parser.parse_args(shlex.split(args))
|
|
640
|
+
aepp_schema = schema.Schema(config=self.config)
|
|
641
|
+
result = aepp_schema.enableSchemaForUPS(schemaId=args.schema_id)
|
|
642
|
+
console.print(f"Schema '{args.schema_id}' enabled for Profile.", style="green")
|
|
643
|
+
except Exception as e:
|
|
644
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
645
|
+
except SystemExit:
|
|
646
|
+
return
|
|
647
|
+
|
|
648
|
+
@login_required
|
|
649
|
+
def do_upload_fieldgroup_definition_csv(self,args):
|
|
650
|
+
"""Upload a field group definition from a CSV file"""
|
|
651
|
+
parser = argparse.ArgumentParser(prog='upload_fieldgroup_definition_csv', add_help=True)
|
|
652
|
+
parser.add_argument("csv_path", help="Path to the field group CSV file")
|
|
653
|
+
parser.add_argument("-ts","--test",help="Test upload without uploading it to AEP",default=False,type=bool)
|
|
654
|
+
try:
|
|
655
|
+
args = parser.parse_args(shlex.split(args))
|
|
656
|
+
myfg = fieldgroupmanager.FieldGroupManager(config=self.config)
|
|
657
|
+
myfg.importFieldGroupDefinition(fieldgroup=args.csv_path)
|
|
658
|
+
if args.test:
|
|
659
|
+
data = myfg.to_dict()
|
|
660
|
+
with open(f"test_{myfg.title}_fieldgroup.json", 'w') as f:
|
|
661
|
+
json.dump(data, f, indent=4)
|
|
662
|
+
console.print(f"Field Group definition test exported to test_{myfg.title}_fieldgroup.json", style="green")
|
|
663
|
+
console.print_json(data=data)
|
|
664
|
+
return
|
|
665
|
+
res = myfg.createFieldGroup()
|
|
666
|
+
console.print(f"Field Group uploaded with ID: {res.get('meta:altId')}", style="green")
|
|
667
|
+
except Exception as e:
|
|
668
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
669
|
+
except SystemExit:
|
|
670
|
+
return
|
|
671
|
+
|
|
672
|
+
@login_required
|
|
673
|
+
def do_upload_fieldgroup_definition_xdm(self,args):
|
|
674
|
+
"""Upload a field group definition from a JSON XDM file"""
|
|
675
|
+
parser = argparse.ArgumentParser(prog='upload_fieldgroup_definition_xdm', add_help=True)
|
|
676
|
+
parser.add_argument("xdm_path", help="Path to the field group JSON XDM file")
|
|
677
|
+
parser.add_argument("-ts","--test",help="Test upload without uploading it to AEP",default=False,type=bool)
|
|
678
|
+
try:
|
|
679
|
+
args = parser.parse_args(shlex.split(args))
|
|
680
|
+
with open(args.xdm_path, 'r') as f:
|
|
681
|
+
xdm_data = json.load(f)
|
|
682
|
+
myfg = fieldgroupmanager.FieldGroupManager(xdm_data,config=self.config)
|
|
683
|
+
if args.test:
|
|
684
|
+
data = myfg.to_dict()
|
|
685
|
+
with open(f"test_{myfg.title}_fieldgroup.json", 'w') as f:
|
|
686
|
+
json.dump(data, f, indent=4)
|
|
687
|
+
console.print(f"Field Group definition test exported to test_{myfg.title}_fieldgroup.json", style="green")
|
|
688
|
+
console.print_json(data=data)
|
|
689
|
+
return
|
|
690
|
+
res = myfg.createFieldGroup()
|
|
691
|
+
console.print(f"Field Group uploaded with ID: {res.get('meta:altId')}", style="green")
|
|
692
|
+
except Exception as e:
|
|
693
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
694
|
+
except SystemExit:
|
|
695
|
+
return
|
|
696
|
+
|
|
697
|
+
@login_required
|
|
698
|
+
def do_get_datasets(self, args):
|
|
699
|
+
"""List all datasets in the current sandbox"""
|
|
700
|
+
parser = argparse.ArgumentParser(prog='get_datasets', add_help=True)
|
|
701
|
+
try:
|
|
702
|
+
args = parser.parse_args(shlex.split(args))
|
|
703
|
+
aepp_cat = catalog.Catalog(config=self.config)
|
|
704
|
+
datasets = aepp_cat.getDataSets(output='list')
|
|
705
|
+
df_datasets = pd.DataFrame(datasets)
|
|
706
|
+
df_datasets.to_csv(f"{self.config.sandbox}_datasets.csv",index=False)
|
|
707
|
+
console.print(f"Datasets exported to {self.config.sandbox}_datasets.csv", style="green")
|
|
708
|
+
table = Table(title=f"Datasets in Sandbox: {self.config.sandbox}")
|
|
709
|
+
table.add_column("ID", style="white")
|
|
710
|
+
table.add_column("Name", style="white",no_wrap=True)
|
|
711
|
+
table.add_column("Created At", style="yellow")
|
|
712
|
+
table.add_column("Data Ingested", style="magenta")
|
|
713
|
+
table.add_column("Data Type", style="red")
|
|
714
|
+
for ds in datasets:
|
|
715
|
+
table.add_row(
|
|
716
|
+
ds.get("id","N/A"),
|
|
717
|
+
ds.get("name","N/A"),
|
|
718
|
+
datetime.fromtimestamp(ds.get("created",1000)/1000).isoformat().split('T')[0],
|
|
719
|
+
str(ds.get("dataIngested",False)),
|
|
720
|
+
ds.get("classification",{}).get("dataBehavior","unknown")
|
|
721
|
+
)
|
|
722
|
+
console.print(table)
|
|
723
|
+
except Exception as e:
|
|
724
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
725
|
+
except SystemExit:
|
|
726
|
+
return
|
|
727
|
+
|
|
728
|
+
@login_required
|
|
729
|
+
def do_get_datasets_infos(self, args):
|
|
730
|
+
"""List all datasets in the current sandbox"""
|
|
731
|
+
parser = argparse.ArgumentParser(prog='get_datasets_infos', add_help=True)
|
|
732
|
+
try:
|
|
733
|
+
args = parser.parse_args(shlex.split(args))
|
|
734
|
+
aepp_cat = catalog.Catalog(config=self.config)
|
|
735
|
+
datasets = aepp_cat.getDataSets()
|
|
736
|
+
aepp_cat.data.infos.to_csv(f"{aepp_cat.sandbox}_datasets_infos.csv",index=False)
|
|
737
|
+
console.print(f"Datasets infos exported to {aepp_cat.sandbox}_datasets_infos.csv", style="green")
|
|
738
|
+
table = Table(title=f"Datasets in Sandbox: {self.config.sandbox}")
|
|
739
|
+
table.add_column("ID", style="white")
|
|
740
|
+
table.add_column("Name", style="white",no_wrap=True)
|
|
741
|
+
table.add_column("Datalake_rows", style="blue")
|
|
742
|
+
table.add_column("Datalake_storage", style="blue")
|
|
743
|
+
table.add_column("UPS_rows", style="magenta")
|
|
744
|
+
table.add_column("UPS_storage", style="magenta")
|
|
745
|
+
for _, ds in aepp_cat.data.infos.iterrows():
|
|
746
|
+
table.add_row(
|
|
747
|
+
ds.get("id","N/A"),
|
|
748
|
+
ds.get("name","N/A"),
|
|
749
|
+
str(ds.get("datalake_rows","N/A")),
|
|
750
|
+
str(ds.get("datalake_storageSize","N/A")),
|
|
751
|
+
str(ds.get("ups_rows","N/A")),
|
|
752
|
+
str(ds.get("ups_storageSize","N/A"))
|
|
753
|
+
)
|
|
754
|
+
console.print(table)
|
|
755
|
+
except Exception as e:
|
|
756
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
757
|
+
except SystemExit:
|
|
758
|
+
return
|
|
759
|
+
|
|
760
|
+
@login_required
|
|
761
|
+
def do_createDataset(self, args):
|
|
762
|
+
"""Create a new dataset in the current sandbox"""
|
|
763
|
+
parser = argparse.ArgumentParser(prog='createDataset', add_help=True)
|
|
764
|
+
parser.add_argument("dataset_name", help="Name of the dataset to create")
|
|
765
|
+
parser.add_argument("schema_id", help="Schema ID to associate with the dataset")
|
|
766
|
+
try:
|
|
767
|
+
args = parser.parse_args(shlex.split(args))
|
|
768
|
+
aepp_cat = catalog.Catalog(config=self.config,region=args.region)
|
|
769
|
+
dataset_id = aepp_cat.createDataSet(dataset_name=args.dataset_name,schemaId=args.schema_id)
|
|
770
|
+
console.print(f"Dataset '{args.dataset_name}' created with ID: {dataset_id[0]}", style="green")
|
|
771
|
+
except Exception as e:
|
|
772
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
773
|
+
except SystemExit:
|
|
774
|
+
return
|
|
775
|
+
|
|
776
|
+
@login_required
|
|
777
|
+
def do_enable_dataset_for_ups(self, args):
|
|
778
|
+
"""Enable a dataset for Profile"""
|
|
779
|
+
parser = argparse.ArgumentParser(prog='enable_dataset_for_ups', add_help=True)
|
|
780
|
+
parser.add_argument("dataset", help="Dataset ID or Dataset Name to enable for Profile")
|
|
781
|
+
try:
|
|
782
|
+
args = parser.parse_args(shlex.split(args))
|
|
783
|
+
aepp_cat = catalog.Catalog(config=self.config)
|
|
784
|
+
datasets = aepp_cat.getDataSets(output='list')
|
|
785
|
+
for ds in datasets:
|
|
786
|
+
if ds.get("name","") == args.dataset or ds.get("id","") == args.dataset:
|
|
787
|
+
datasetId = ds.get("id")
|
|
788
|
+
result = aepp_cat.enableDatasetProfile(datasetId=datasetId)
|
|
789
|
+
console.print(f"Dataset '{datasetId}' enabled for Profile.", style="green")
|
|
790
|
+
except Exception as e:
|
|
791
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
792
|
+
except SystemExit:
|
|
793
|
+
return
|
|
794
|
+
|
|
795
|
+
@login_required
|
|
796
|
+
def do_get_identities(self, args):
|
|
797
|
+
"""List all identities in the current sandbox"""
|
|
798
|
+
parser = argparse.ArgumentParser(prog='get_identities', add_help=True)
|
|
799
|
+
parser.add_argument("-r","--region", help="Region to get identities from: 'ndl2' (default), 'va7', 'aus5', 'can2', 'ind2'", default='ndl2')
|
|
800
|
+
parser.add_argument("-co","--custom_only",help="Get only custom identities", default=False,type=bool)
|
|
801
|
+
try:
|
|
802
|
+
args = parser.parse_args(shlex.split(args))
|
|
803
|
+
aepp_identity = identity.Identity(config=self.config,region=args.region)
|
|
804
|
+
identities = aepp_identity.getIdentities(only_custom=args.custom_only)
|
|
805
|
+
df_identites = pd.DataFrame(identities)
|
|
806
|
+
df_identites.to_csv(f"{self.config.sandbox}_identities.csv",index=False)
|
|
807
|
+
console.print(f"Identities exported to {self.config.sandbox}_identities.csv", style="green")
|
|
808
|
+
table = Table(title=f"Identities in Sandbox: {self.config.sandbox}")
|
|
809
|
+
table.add_column("Code", style="cyan")
|
|
810
|
+
table.add_column("Name", style="magenta")
|
|
811
|
+
table.add_column("id", style="white")
|
|
812
|
+
table.add_column("namespaceType", style="green")
|
|
813
|
+
for _, iden in df_identites.iterrows():
|
|
814
|
+
table.add_row(
|
|
815
|
+
iden.get("code","N/A"),
|
|
816
|
+
iden.get("name","N/A"),
|
|
817
|
+
str(iden.get("id","N/A")),
|
|
818
|
+
iden.get("namespaceType","N/A"),
|
|
819
|
+
)
|
|
820
|
+
console.print(table)
|
|
821
|
+
except Exception as e:
|
|
822
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
823
|
+
except SystemExit:
|
|
824
|
+
return
|
|
825
|
+
|
|
826
|
+
@login_required
|
|
827
|
+
def do_get_flows(self, args):
|
|
828
|
+
"""List flows in the current sandbox based on parameters provided. By default, list all sources and destinations."""
|
|
829
|
+
parser = argparse.ArgumentParser(prog='get_flows', add_help=True)
|
|
830
|
+
parser.add_argument("-i","--internal_flows",help="Get internal flows", default=False,type=bool)
|
|
831
|
+
parser.add_argument("-adv","--advanced",help="Get advanced information about runs", default=False,type=bool)
|
|
832
|
+
parser.add_argument("-ao","--active_only",help="Get only active flows during that time period", default=True,type=bool)
|
|
833
|
+
parser.add_argument("-mn","--minutes", help="Timeframe in minutes to check for errors, default 0", default=0,type=int)
|
|
834
|
+
parser.add_argument("-H","--hours", help="Timeframe in hours to check for errors, default 0", default=0,type=int)
|
|
835
|
+
parser.add_argument("-d","--days", help="Timeframe in days to check for errors, default 0", default=0,type=int)
|
|
836
|
+
try:
|
|
837
|
+
args = parser.parse_args(shlex.split(args))
|
|
838
|
+
timetotal_minutes = args.minutes + (args.hours * 60) + (args.days * 1440)
|
|
839
|
+
if timetotal_minutes == 0:
|
|
840
|
+
timetotal_minutes = 1440 # default to last 24 hours
|
|
841
|
+
timereference = int(datetime.now().timestamp()*1000) - (timetotal_minutes * 60 * 1000)
|
|
842
|
+
aepp_flow = flowservice.FlowService(config=self.config)
|
|
843
|
+
flows = aepp_flow.getFlows(n_results="inf")
|
|
844
|
+
runs = None
|
|
845
|
+
if args.active_only:
|
|
846
|
+
runs = aepp_flow.getRuns(n_results="inf",prop=[f'metrics.durationSummary.startedAtUTC>{timereference}'])
|
|
847
|
+
active_flow_ids = list(set([run.get("flowId") for run in runs]))
|
|
848
|
+
source_flows = aepp_flow.getFlows(onlySources=True)
|
|
849
|
+
destinations_flows = aepp_flow.getFlows(onlyDestinations=True)
|
|
850
|
+
list_source_ids = [f.get("id") for f in source_flows]
|
|
851
|
+
list_destination_ids = [f.get("id") for f in destinations_flows]
|
|
852
|
+
if args.internal_flows:
|
|
853
|
+
list_flows = flows
|
|
854
|
+
else:
|
|
855
|
+
list_flows = source_flows + destinations_flows
|
|
856
|
+
if args.active_only:
|
|
857
|
+
list_flows = [fl for fl in list_flows if fl.get("id") in active_flow_ids]
|
|
858
|
+
if args.advanced:
|
|
859
|
+
if runs is None:
|
|
860
|
+
runs = aepp_flow.getRuns(n_results="inf",prop=[f'metrics.durationSummary.startedAtUTC>{timereference}'])
|
|
861
|
+
runs_by_flow = {}
|
|
862
|
+
for run in runs:
|
|
863
|
+
flow_id = run.get("flowId")
|
|
864
|
+
if flow_id not in runs_by_flow:
|
|
865
|
+
runs_by_flow[flow_id] = {
|
|
866
|
+
"total_runs": 0,
|
|
867
|
+
"failed_runs": 0,
|
|
868
|
+
"success_runs": 0,
|
|
869
|
+
}
|
|
870
|
+
runs_by_flow[flow_id]["total_runs"] += 1
|
|
871
|
+
status = run.get("metrics",{}).get("statusSummary",{}).get("status","unknown")
|
|
872
|
+
if status == "failed":
|
|
873
|
+
runs_by_flow[flow_id]["failed_runs"] += 1
|
|
874
|
+
elif status == "success":
|
|
875
|
+
runs_by_flow[flow_id]["success_runs"] += 1
|
|
876
|
+
report_flows = []
|
|
877
|
+
for fl in list_flows:
|
|
878
|
+
obj = {
|
|
879
|
+
"id": fl.get("id","N/A"),
|
|
880
|
+
"name": fl.get("name","N/A"),
|
|
881
|
+
"created": fl.get("createdAt",1000),
|
|
882
|
+
"flowSpec": fl.get("flowSpec",{}).get('id','N/A'),
|
|
883
|
+
"sourceConnectionId": fl.get("sourceConnectionIds",["N/A"])[0],
|
|
884
|
+
"targetConnectionId": fl.get("targetConnectionIds",["N/A"])[0],
|
|
885
|
+
"connectionSpec": fl.get("inheritedAttributes",{}).get('sourceConnections',[{}])[0].get('connectionSpec',{}).get('id'),
|
|
886
|
+
"type": fl.get("inheritedAttributes",{}).get('properties','N/A'),
|
|
887
|
+
}
|
|
888
|
+
if obj.get("id") in list_source_ids:
|
|
889
|
+
obj["type"] = "Source"
|
|
890
|
+
elif obj.get("id") in list_destination_ids:
|
|
891
|
+
obj["type"] = "Destination"
|
|
892
|
+
else:
|
|
893
|
+
obj["type"] = "Internal"
|
|
894
|
+
if fl.get('transformations') and len(fl.get('transformations')) > 0:
|
|
895
|
+
obj["Transformation"] = True
|
|
896
|
+
else:
|
|
897
|
+
obj["Transformation"] = False
|
|
898
|
+
if args.advanced:
|
|
899
|
+
run_info = runs_by_flow.get(obj.get("id"),{"total_runs":0,"failed_runs":0,"success_runs":0})
|
|
900
|
+
obj["Total Runs"] = run_info.get("total_runs",0)
|
|
901
|
+
obj["Failed Runs"] = run_info.get("failed_runs",0)
|
|
902
|
+
obj["Successful Runs"] = run_info.get("success_runs",0)
|
|
903
|
+
report_flows.append(obj)
|
|
904
|
+
df_flows = pd.DataFrame(list_flows)
|
|
905
|
+
filename = f"{self.config.sandbox}_flows_{timereference/1000}"
|
|
906
|
+
if args.advanced:
|
|
907
|
+
filename = f"{filename}_advanced"
|
|
908
|
+
if args.active_only == False:
|
|
909
|
+
filename = f"{filename}_all"
|
|
910
|
+
if args.internal_flows:
|
|
911
|
+
filename = f"{filename}_internal"
|
|
912
|
+
df_flows.to_csv(f"{filename}.csv",index=False)
|
|
913
|
+
console.print(f"Flows exported to {filename}.csv", style="green")
|
|
914
|
+
table = Table(title=f"Flows in Sandbox: {self.config.sandbox}")
|
|
915
|
+
table.add_column("ID", style="cyan")
|
|
916
|
+
table.add_column("Name", style="magenta")
|
|
917
|
+
table.add_column("Created", style="white")
|
|
918
|
+
table.add_column("Type", style="white")
|
|
919
|
+
table.add_column("Transformation", style="white")
|
|
920
|
+
if args.advanced == False:
|
|
921
|
+
table.add_column("Flow Spec", style="white")
|
|
922
|
+
table.add_column("Source Conn ID", style="white")
|
|
923
|
+
table.add_column("Target Conn ID", style="white")
|
|
924
|
+
if args.advanced:
|
|
925
|
+
table.add_column("Total Runs", style="blue")
|
|
926
|
+
table.add_column("Failed Runs", style="red")
|
|
927
|
+
table.add_column("Successful Runs", style="green")
|
|
928
|
+
table.add_column("Success Rate", style="green")
|
|
929
|
+
table.add_column("Failure Rate", style="red")
|
|
930
|
+
for fl in report_flows:
|
|
931
|
+
row_data = []
|
|
932
|
+
if args.advanced:
|
|
933
|
+
if fl.get("Failed Runs",0) > 0:
|
|
934
|
+
colorStart = "[red]"
|
|
935
|
+
colorEnd = "[/red]"
|
|
936
|
+
else:
|
|
937
|
+
colorStart = "[green]"
|
|
938
|
+
colorEnd = "[/green]"
|
|
939
|
+
else:
|
|
940
|
+
colorStart = ""
|
|
941
|
+
colorEnd = ""
|
|
942
|
+
row_data = [
|
|
943
|
+
f"{colorStart}{fl.get('id','N/A')}{colorEnd}",
|
|
944
|
+
f"{colorStart}{fl.get('name','N/A')}{colorEnd}",
|
|
945
|
+
f"{colorStart}{datetime.fromtimestamp(fl.get('created',1000)/1000).isoformat().split('T')[0]}{colorEnd}",
|
|
946
|
+
f"{colorStart}{fl.get('type','N/A')}{colorEnd}",
|
|
947
|
+
f"{colorStart}{str(fl.get('Transformation', False))}{colorEnd}",
|
|
948
|
+
]
|
|
949
|
+
if args.advanced == False:
|
|
950
|
+
row_data.extend([
|
|
951
|
+
f"{colorStart}{fl.get('flowSpec','N/A')}{colorEnd}",
|
|
952
|
+
f"{colorStart}{fl.get('sourceConnectionId','N/A')}{colorEnd}",
|
|
953
|
+
f"{colorStart}{fl.get('targetConnectionId','N/A')}{colorEnd}",
|
|
954
|
+
])
|
|
955
|
+
if args.advanced:
|
|
956
|
+
total_runs = fl.get("Total Runs", 0)
|
|
957
|
+
failed_runs = fl.get("Failed Runs", 0)
|
|
958
|
+
successful_runs = fl.get("Successful Runs", 0)
|
|
959
|
+
success_rate = (successful_runs / total_runs * 100) if total_runs > 0 else 0
|
|
960
|
+
failure_rate = (failed_runs / total_runs * 100) if total_runs > 0 else 0
|
|
961
|
+
row_data.extend([
|
|
962
|
+
f"{colorStart}{str(total_runs)}{colorEnd}",
|
|
963
|
+
f"{colorStart}{str(failed_runs)}{colorEnd}",
|
|
964
|
+
f"{colorStart}{str(successful_runs)}{colorEnd}",
|
|
965
|
+
f"{colorStart}{success_rate:.0f}%{colorEnd}",
|
|
966
|
+
f"{colorStart}{failure_rate:.0f}%{colorEnd}"
|
|
967
|
+
])
|
|
968
|
+
table.add_row(*row_data)
|
|
969
|
+
console.print(table)
|
|
970
|
+
except Exception as e:
|
|
971
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
972
|
+
except SystemExit:
|
|
973
|
+
return
|
|
974
|
+
|
|
975
|
+
@login_required
|
|
976
|
+
def do_get_flow_errors(self,args):
|
|
977
|
+
"""Get errors for a specific flow, saving it in a JSON file for specific timeframe, default last 24 hours."""
|
|
978
|
+
parser = argparse.ArgumentParser(prog='get_flow_errors', add_help=True)
|
|
979
|
+
parser.add_argument("flow_id", help="Flow ID to get errors for")
|
|
980
|
+
parser.add_argument("-mn","--minutes", help="Timeframe in minutes to check for errors, default 0", default=0,type=int)
|
|
981
|
+
parser.add_argument("-H","--hours", help="Timeframe in hours to check for errors, default 0", default=0,type=int)
|
|
982
|
+
parser.add_argument("-d","--days", help="Timeframe in days to check for errors, default 0", default=0,type=int)
|
|
983
|
+
try:
|
|
984
|
+
args = parser.parse_args(shlex.split(args))
|
|
985
|
+
timetotal_minutes = args.minutes + (args.hours * 60) + (args.days * 1440)
|
|
986
|
+
if timetotal_minutes == 0:
|
|
987
|
+
timetotal_minutes = 1440 # default to last 24 hours
|
|
988
|
+
aepp_flow = flowservice.FlowService(config=self.config)
|
|
989
|
+
timereference = int(datetime.now().timestamp()*1000) - (timetotal_minutes * 60 * 1000)
|
|
990
|
+
failed_runs = aepp_flow.getRuns(prop=['metrics.statusSummary.status==failed',f'flowId=={args.flow_id}',f'metrics.durationSummary.startedAtUTC>{timereference}'],n_results="inf")
|
|
991
|
+
with open(f"flow_{args.flow_id}_errors.json", 'w') as f:
|
|
992
|
+
json.dump(failed_runs, f, indent=4)
|
|
993
|
+
console.print(f"Flow errors exported to flow_{args.flow_id}_errors.json", style="green")
|
|
994
|
+
except Exception as e:
|
|
995
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
996
|
+
except SystemExit:
|
|
997
|
+
return
|
|
998
|
+
|
|
999
|
+
@login_required
|
|
1000
|
+
def do_create_dataset_http_source(self,args):
|
|
1001
|
+
"""Create an HTTP Source connection for a specific dataset, XDM compatible data only."""
|
|
1002
|
+
parser = argparse.ArgumentParser(prog='do_create_dataset_http_source', add_help=True)
|
|
1003
|
+
parser.add_argument("dataset", help="Name or ID of the Dataset Source connection to create")
|
|
1004
|
+
try:
|
|
1005
|
+
args = parser.parse_args(shlex.split(args))
|
|
1006
|
+
aepp_cat = catalog.Catalog(config=self.config)
|
|
1007
|
+
datasets = aepp_cat.getDataSets(output='list')
|
|
1008
|
+
if args.dataset in [ds.get("name","") for ds in datasets]:
|
|
1009
|
+
for ds in datasets:
|
|
1010
|
+
if ds.get("name","") == args.dataset:
|
|
1011
|
+
datasetId = ds.get("id")
|
|
1012
|
+
else:
|
|
1013
|
+
datasetId = args.dataset
|
|
1014
|
+
flw = flowservice.FlowService(config=self.config)
|
|
1015
|
+
res = flw.createFlowStreaming(datasetId=datasetId)
|
|
1016
|
+
console.print(f"HTTP Source connection created with Flow ID: {res.get('flow',{}).get('id')}", style="green")
|
|
1017
|
+
source_id = res.get('source_connection_id',{}).get('id')
|
|
1018
|
+
sourceConnection = flw.getSourceConnection(sourceConnectionId=source_id)
|
|
1019
|
+
console.print(f"Endpoint URL: {sourceConnection.get('params',{}).get('inletUrl')}", style="green")
|
|
1020
|
+
except Exception as e:
|
|
1021
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
1022
|
+
except SystemExit:
|
|
1023
|
+
return
|
|
1024
|
+
|
|
1025
|
+
@login_required
|
|
1026
|
+
def do_get_DLZ_credential(self,args):
|
|
1027
|
+
"""Get Data Lake Zone credential for the current sandbox"""
|
|
1028
|
+
parser = argparse.ArgumentParser(prog='get_DLZ_credential', add_help=True)
|
|
1029
|
+
parser.add_argument("type",nargs='?',help="Type of credential to retrieve: 'user_drop_zone' or 'dlz_destination'",default="user_drop_zone")
|
|
1030
|
+
try:
|
|
1031
|
+
args = parser.parse_args(shlex.split(args))
|
|
1032
|
+
flw = flowservice.FlowService(config=self.config)
|
|
1033
|
+
cred = flw.getLandingZoneCredential(dlz_type=args.type)
|
|
1034
|
+
console.print(f"Data Lake Zone Credential for sandbox '{self.config.sandbox}':", style="green")
|
|
1035
|
+
console.print_json(data=cred)
|
|
1036
|
+
except Exception as e:
|
|
1037
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
1038
|
+
except SystemExit:
|
|
1039
|
+
return
|
|
1040
|
+
|
|
1041
|
+
@login_required
|
|
1042
|
+
def do_get_queries(self, args):
|
|
1043
|
+
"""List top 1000 queries in the current sandbox for the last 24 hours by default, optionally filtered by dataset ID"""
|
|
1044
|
+
parser = argparse.ArgumentParser(prog='get_queries', add_help=True)
|
|
1045
|
+
parser.add_argument("-ds","--dataset", help="Dataset ID to filter queries", default=None)
|
|
1046
|
+
parser.add_argument("-st","--state", help="State to filter queries (running, completed, failed)", default=None)
|
|
1047
|
+
parser.add_argument("-H","--hours", help="Timeframe in hours to check for errors, default 0", default=0,type=int)
|
|
1048
|
+
parser.add_argument("-d","--days", help="Timeframe in days to check for errors, default 0", default=0,type=int)
|
|
1049
|
+
parser.add_argument("-mn","--minutes", help="Timeframe in minutes to check for errors, default 0", default=0,type=int)
|
|
1050
|
+
try:
|
|
1051
|
+
args = parser.parse_args(shlex.split(args))
|
|
1052
|
+
timetotal_minutes = args.minutes + (args.hours * 60) + (args.days * 1440)
|
|
1053
|
+
if timetotal_minutes == 0:
|
|
1054
|
+
timetotal_minutes = 1440 # default to last 24 hours
|
|
1055
|
+
time_reference = int(datetime.now().timestamp()) - (timetotal_minutes * 60)
|
|
1056
|
+
time_reference_z = datetime.fromtimestamp(time_reference).isoformat() + 'Z'
|
|
1057
|
+
params = {'property':f'created>={time_reference_z}','orderBy':'-created'}
|
|
1058
|
+
if args.dataset:
|
|
1059
|
+
if params['property'] == '':
|
|
1060
|
+
params['property'] = f'referenced_datasets=={args.dataset}'
|
|
1061
|
+
else:
|
|
1062
|
+
params['property'] += f',referenced_datasets=={args.dataset}'
|
|
1063
|
+
if params['property'] == '':
|
|
1064
|
+
params = None
|
|
1065
|
+
else:
|
|
1066
|
+
params['property'] = urllib.parse.quote(params['property'])
|
|
1067
|
+
aepp_query = queryservice.QueryService(config=self.config)
|
|
1068
|
+
queries = aepp_query.getQueries(property=params['property'] if params else None, orderby=params['orderBy'])
|
|
1069
|
+
list_queries = []
|
|
1070
|
+
for q in queries:
|
|
1071
|
+
if q['client'] == "Adobe Query Service UI" or q["client"] == 'Generic PostgreSQL':
|
|
1072
|
+
list_queries.append(q)
|
|
1073
|
+
for q in list_queries:
|
|
1074
|
+
obj = {
|
|
1075
|
+
"id": q.get("id","N/A"),
|
|
1076
|
+
"created": q.get("created"),
|
|
1077
|
+
"client": q.get("client","N/A"),
|
|
1078
|
+
"elapsedTime": q.get("elapsedTime","N/A"),
|
|
1079
|
+
"userId": q.get("userId","N/A"),
|
|
1080
|
+
}
|
|
1081
|
+
list_queries.append(obj)
|
|
1082
|
+
df_queries = pd.DataFrame(list_queries)
|
|
1083
|
+
df_queries.to_csv(f"{self.config.sandbox}_queries.csv",index=False)
|
|
1084
|
+
console.print(f"Queries exported to {self.config.sandbox}_queries.csv", style="green")
|
|
1085
|
+
table = Table(title=f"Queries in Sandbox: {self.config.sandbox}")
|
|
1086
|
+
table.add_column("ID", style="cyan")
|
|
1087
|
+
table.add_column("Created", style="yellow")
|
|
1088
|
+
table.add_column("Client", style="white")
|
|
1089
|
+
table.add_column("Elapsed Time (ms)", style="white")
|
|
1090
|
+
for q in queries:
|
|
1091
|
+
table.add_row(
|
|
1092
|
+
q.get("id","N/A"),
|
|
1093
|
+
q.get("created","N/A"),
|
|
1094
|
+
q.get("client","N/A"),
|
|
1095
|
+
str(q.get("elapsedTime","N/A"))
|
|
1096
|
+
)
|
|
1097
|
+
console.print(table)
|
|
1098
|
+
except Exception as e:
|
|
1099
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
1100
|
+
except SystemExit:
|
|
1101
|
+
return
|
|
1102
|
+
|
|
1103
|
+
@login_required
|
|
1104
|
+
def do_query(self,args):
|
|
1105
|
+
"""Execute a SQL query against the current sandbox"""
|
|
1106
|
+
parser = argparse.ArgumentParser(prog='query', add_help=True)
|
|
1107
|
+
parser.add_argument("sql_query", help="SQL query to execute",type=str)
|
|
1108
|
+
try:
|
|
1109
|
+
args = parser.parse_args(shlex.split(args))
|
|
1110
|
+
aepp_query = queryservice.QueryService(config=self.config)
|
|
1111
|
+
conn = aepp_query.connection()
|
|
1112
|
+
iqs2 = queryservice.InteractiveQuery2(conn)
|
|
1113
|
+
result:pd.DataFrame = iqs2.query(sql=args.sql_query)
|
|
1114
|
+
result.to_csv(f"query_result_{int(datetime.now().timestamp())}.csv", index=False)
|
|
1115
|
+
console.print(f"Query result exported to query_result_{int(datetime.now().timestamp())}.csv", style="green")
|
|
1116
|
+
console.print(result)
|
|
1117
|
+
except Exception as e:
|
|
1118
|
+
console.print(f"(!) Error: {str(e)}", style="red")
|
|
1119
|
+
except SystemExit:
|
|
1120
|
+
return
|
|
1121
|
+
|
|
1122
|
+
|
|
1123
|
+
@login_required
|
|
1124
|
+
def do_extractArtefacts(self,args):
|
|
1125
|
+
"""extractArtefacts localfolder"""
|
|
1126
|
+
console.print("Extracting artefacts...", style="blue")
|
|
1127
|
+
parser = argparse.ArgumentParser(prog='extractArtefacts', description='Extract artefacts from AEP')
|
|
1128
|
+
parser.add_argument('-lf','--localfolder', help='Local folder to extract artefacts to', default='./extractions')
|
|
1129
|
+
parser.add_argument('-rg','--region', help='Region to extract artefacts from: "ndl2" (default), "va7", "aus5", "can2", "ind2"',default='ndl2')
|
|
1130
|
+
try:
|
|
1131
|
+
args = parser.parse_args(shlex.split(args))
|
|
1132
|
+
aepp.extractSandboxArtefacts(
|
|
1133
|
+
sandbox=self.config,
|
|
1134
|
+
localFolder=args.localfolder,
|
|
1135
|
+
region=args.region
|
|
1136
|
+
)
|
|
1137
|
+
console.print(Panel("Extraction completed!", style="green"))
|
|
1138
|
+
except SystemExit:
|
|
1139
|
+
return
|
|
1140
|
+
|
|
1141
|
+
@login_required
|
|
1142
|
+
def do_extractArtefact(self,args):
|
|
1143
|
+
"""extractArtefacts localfolder"""
|
|
1144
|
+
console.print("Extracting artefact...", style="blue")
|
|
1145
|
+
parser = argparse.ArgumentParser(prog='extractArtefact', description='Extract artefacts from AEP')
|
|
1146
|
+
parser.add_argument('artefact', help='artefact to extract (name or id): "schema","fieldgroup","datatype","descriptor","dataset","identity","mergepolicy","audience"')
|
|
1147
|
+
parser.add_argument('-at','--artefactType', help='artefact type ')
|
|
1148
|
+
parser.add_argument('-lf','--localfolder', help='Local folder to extract artefacts to',default='extractions')
|
|
1149
|
+
parser.add_argument('-rg','--region', help='Region to extract artefacts from: "ndl2" (default), "va7", "aus5", "can2", "ind2"',default='ndl2')
|
|
1150
|
+
|
|
1151
|
+
try:
|
|
1152
|
+
args = parser.parse_args(shlex.split(args))
|
|
1153
|
+
aepp.extractSandboxArtefact(
|
|
1154
|
+
artefact=args.artefact,
|
|
1155
|
+
artefactType=args.artefactType,
|
|
1156
|
+
sandbox=self.config,
|
|
1157
|
+
localFolder=args.localfolder
|
|
1158
|
+
)
|
|
1159
|
+
console.print("Extraction completed!", style="green")
|
|
1160
|
+
except SystemExit:
|
|
1161
|
+
return
|
|
1162
|
+
|
|
1163
|
+
@login_required
|
|
1164
|
+
def do_sync(self,args):
|
|
1165
|
+
"""extractArtefacts localfolder"""
|
|
1166
|
+
console.print("Syncing artefact...", style="blue")
|
|
1167
|
+
parser = argparse.ArgumentParser(prog='extractArtefact', description='Extract artefacts from AEP')
|
|
1168
|
+
parser.add_argument('artefact', help='artefact to extract (name or id): "schema","fieldgroup","datatype","descriptor","dataset","identity","mergepolicy","audience"')
|
|
1169
|
+
parser.add_argument('-at','--artefactType', help='artefact type ')
|
|
1170
|
+
parser.add_argument('-t','--targets', help='target sandboxes')
|
|
1171
|
+
parser.add_argument('-lf','--localfolder', help='Local folder to extract artefacts to',default='extractions')
|
|
1172
|
+
parser.add_argument('-b','--baseSandbox', help='Base sandbox for synchronization')
|
|
1173
|
+
parser.add_argument('-rg','--region', help='Region to extract artefacts from: "ndl2" (default), "va7", "aus5", "can2", "ind2"',default='ndl2')
|
|
1174
|
+
parser.add_argument('-v','--verbose', help='Enable verbose output',default=True)
|
|
1175
|
+
try:
|
|
1176
|
+
args = parser.parse_args(shlex.split(args))
|
|
1177
|
+
if ',' in args.targets:
|
|
1178
|
+
args.targets = args.targets.split(',')
|
|
1179
|
+
else:
|
|
1180
|
+
args.targets = [args.targets]
|
|
1181
|
+
console.print("Initializing Synchronizor...", style="blue")
|
|
1182
|
+
if args.baseSandbox:
|
|
1183
|
+
synchronizor = synchronizer.Synchronizer(
|
|
1184
|
+
config=self.config,
|
|
1185
|
+
targets=args.targets,
|
|
1186
|
+
region=args.region,
|
|
1187
|
+
baseSandbox=args.baseSandbox,
|
|
1188
|
+
)
|
|
1189
|
+
elif args.localfolder:
|
|
1190
|
+
synchronizor = synchronizer.Synchronizer(
|
|
1191
|
+
config=self.config,
|
|
1192
|
+
targets=args.targets,
|
|
1193
|
+
region=args.region,
|
|
1194
|
+
localFolder=args.localfolder,
|
|
1195
|
+
)
|
|
1196
|
+
console.print("Starting Sync...", style="blue")
|
|
1197
|
+
synchronizor.syncComponent(
|
|
1198
|
+
component=args.artefact,
|
|
1199
|
+
componentType=args.artefactType,
|
|
1200
|
+
verbose=args.verbose
|
|
1201
|
+
)
|
|
1202
|
+
console.print("Sync completed!", style="green")
|
|
1203
|
+
except SystemExit:
|
|
1204
|
+
return
|
|
1205
|
+
|
|
1206
|
+
|
|
1207
|
+
def do_exit(self, args):
|
|
1208
|
+
"""Exit the application"""
|
|
1209
|
+
console.print(Panel("Exiting...", style="blue"))
|
|
1210
|
+
return True # Stops the loop
|
|
1211
|
+
|
|
1212
|
+
def do_EOF(self, args):
|
|
1213
|
+
"""Handle Ctrl+D"""
|
|
1214
|
+
console.print(Panel("Exiting...", style="blue"))
|
|
1215
|
+
return True
|
|
1216
|
+
|
|
1217
|
+
# --- 3. The Entry Point ---#
|
|
1218
|
+
|
|
1219
|
+
def main():
|
|
1220
|
+
# ARGPARSE: Handles the initial setup flags
|
|
1221
|
+
parser = argparse.ArgumentParser(description="Interactive Client Tool",add_help=True)
|
|
1222
|
+
|
|
1223
|
+
# Optional: Allow passing user/pass via flags to skip the interactive login step
|
|
1224
|
+
parser.add_argument("-sx", "--sandbox", help="Auto-login sandbox")
|
|
1225
|
+
parser.add_argument("-s", "--secret", help="Secret")
|
|
1226
|
+
parser.add_argument("-o", "--org_id", help="Auto-login org ID")
|
|
1227
|
+
parser.add_argument("-sc", "--scopes", help="Scopes")
|
|
1228
|
+
parser.add_argument("-cid", "--client_id", help="Auto-login client ID")
|
|
1229
|
+
parser.add_argument("-cf", "--config_file", help="Path to config file", default=None)
|
|
1230
|
+
args = parser.parse_args()
|
|
1231
|
+
# Initialize the shell
|
|
1232
|
+
shell = ServiceShell(**vars(args))
|
|
1233
|
+
try:
|
|
1234
|
+
shell.cmdloop()
|
|
1235
|
+
except KeyboardInterrupt:
|
|
1236
|
+
console.print(Panel("\nForce closing...", style="red"))
|
|
1237
|
+
|
|
1238
|
+
if __name__ == "__main__":
|
|
1239
|
+
main()
|
|
@@ -1,9 +1,7 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: aepp
|
|
3
|
-
Version: 0.4.3
|
|
3
|
+
Version: 0.4.3.post2
|
|
4
4
|
Summary: Package to manage AEP API endpoint and some helper functions
|
|
5
|
-
Home-page: https://github.com/adobe/aepp
|
|
6
|
-
Author: Julien Piccini
|
|
7
5
|
Author-email: Julien Piccini <piccini.julien@gmail.com>
|
|
8
6
|
License: Apache-2.0
|
|
9
7
|
Project-URL: Homepage, https://github.com/adobe/aepp
|
|
@@ -24,10 +22,7 @@ Requires-Dist: tenacity
|
|
|
24
22
|
Requires-Dist: deprecation
|
|
25
23
|
Requires-Dist: datamodel-code-generator
|
|
26
24
|
Requires-Dist: rich
|
|
27
|
-
Dynamic: author
|
|
28
|
-
Dynamic: home-page
|
|
29
25
|
Dynamic: license-file
|
|
30
|
-
Dynamic: requires-python
|
|
31
26
|
|
|
32
27
|
# Adobe Experience Platform API made for humans
|
|
33
28
|
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
aepp/__init__.py,sha256=tKlipgknDl84iALUNgR9mktILF3gSk1GgMUw_Gg-HXE,27912
|
|
2
|
-
aepp/__version__.py,sha256
|
|
2
|
+
aepp/__version__.py,sha256=Yekzu7BH82bvWze_UzjQ9aMGW0Xdp4efqOXr4a_Vd-E,23
|
|
3
3
|
aepp/accesscontrol.py,sha256=PB3FcrO4bvDjdNxjHx7p_20hp4ahBXewoOSxuTGMXC8,17423
|
|
4
4
|
aepp/catalog.py,sha256=hK9m3SAP0fhgkYqu14Tcfq14qBhw54tLCOF0mH31b1M,68237
|
|
5
5
|
aepp/classmanager.py,sha256=CTYGkg5ygB8HtRia6DfT9WLBqXJOVg7pSM9jBB25Bqw,64707
|
|
@@ -34,9 +34,11 @@ aepp/som.py,sha256=XNm_Lu2wt2kpSSpldLptuER2eludFXeO9fI6i3iNCzo,34175
|
|
|
34
34
|
aepp/synchronizer.py,sha256=nkZ3dn335JmwzzO3PqYkMhU7ZfiOHGKObfL1yZrnHLY,77932
|
|
35
35
|
aepp/tags.py,sha256=t2qBallTcWR4IOXcDBmrPpqjbSay1z3E2bcRijzVm1s,17641
|
|
36
36
|
aepp/utils.py,sha256=tG-YVXylm38-bynqfp5N_Mzyo7mhlZj-dLo7wLoO4tM,1200
|
|
37
|
-
aepp
|
|
38
|
-
aepp
|
|
39
|
-
aepp-0.4.3.dist-info/
|
|
40
|
-
aepp-0.4.3.dist-info/
|
|
41
|
-
aepp-0.4.3.dist-info/
|
|
42
|
-
aepp-0.4.3.dist-info/
|
|
37
|
+
aepp/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
38
|
+
aepp/cli/__main__.py,sha256=tyiwolY7vJ4B6Q5h7YInKZdyv1VInJkP4GyA0t7AECY,65366
|
|
39
|
+
aepp-0.4.3.post2.dist-info/licenses/LICENSE,sha256=HjYTlfne3BbS5gNHzNqJ5COCiTQLUdf87QkzRyFbE4Y,10337
|
|
40
|
+
aepp-0.4.3.post2.dist-info/METADATA,sha256=SpV2tLvoEv5sT4bO5Hb7KglSyiZG8-W6caOaHAFUtf4,5317
|
|
41
|
+
aepp-0.4.3.post2.dist-info/WHEEL,sha256=qELbo2s1Yzl39ZmrAibXA2jjPLUYfnVhUNTlyF1rq0Y,92
|
|
42
|
+
aepp-0.4.3.post2.dist-info/entry_points.txt,sha256=e7HAumUTymoUiCuVRzFlcchennUBLcjxvuiimySF98Y,48
|
|
43
|
+
aepp-0.4.3.post2.dist-info/top_level.txt,sha256=dtZJI8SzhWVgZRl68PHKZX_fD6amvDiFR-lqD9FSJvE,5
|
|
44
|
+
aepp-0.4.3.post2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|