aepp 0.5.2.post1__py3-none-any.whl → 0.5.2.post3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
aepp/__init__.py CHANGED
@@ -159,9 +159,8 @@ def __titleSafe__(text: str) -> str:
159
159
  Arguments:
160
160
  text : REQUIRED : the text to be converted
161
161
  """
162
- invalid_chars = ['<', '>', ':', '"', '/', '\\', '|', '?', '*', ' ']
163
- for char in invalid_chars:
164
- text = text.replace(char, '_')
162
+ valid_chars = "[^a-zA-Z0-9_\n\\.]"
163
+ text = re.sub(valid_chars, "_", text)
165
164
  return text
166
165
 
167
166
 
@@ -187,7 +186,7 @@ def extractSandboxArtifacts(
187
186
  completePath = mypath / f'{sandbox.sandbox}'
188
187
  else:
189
188
  completePath = Path(localFolder)
190
- from aepp import schema, catalog, identity,customerprofile, segmentation
189
+ from aepp import schema, catalog, identity,customerprofile, segmentation, tags
191
190
  sch = schema.Schema(config=sandbox)
192
191
  cat = catalog.Catalog(config=sandbox)
193
192
  ide = identity.Identity(config=sandbox,region=region)
@@ -223,6 +222,16 @@ def extractSandboxArtifacts(
223
222
  mergePolicyPath.mkdir(exist_ok=True)
224
223
  audiencePath = completePath / 'audience'
225
224
  audiencePath.mkdir(exist_ok=True)
225
+ tagPath = completePath / 'tag'
226
+ tagPath.mkdir(exist_ok=True)
227
+ ## handling tags
228
+ tag_manager = tags.Tags(config=sandbox)
229
+ all_tags = tag_manager.getTags()
230
+ dict_id_name = {tag['id']:tag['name'] for tag in all_tags}
231
+ for tag in all_tags:
232
+ safe_name = __titleSafe__(tag.get('name','unknown'))
233
+ with open(f"{tagPath / safe_name}.json",'w') as f:
234
+ json.dump(tag,f,indent=2)
226
235
  myclasses = sch.getClasses()
227
236
  classesGlobal = sch.getClassesGlobal()
228
237
  behaviors = sch.getBehaviors()
@@ -290,6 +299,9 @@ def extractSandboxArtifacts(
290
299
  datasets = cat.getDataSets()
291
300
  for key,value in datasets.items():
292
301
  value['id'] = key
302
+ if len(value.get('unifiedTags',[])) > 0:
303
+ tag_names = [dict_id_name.get(tag_id) for tag_id in value.get('unifiedTags',[])]
304
+ value['unifiedTags'] = tag_names
293
305
  with open(f"{datasetPath / value.get('tags',{}).get('adobe/pqs/table',[key])[0]}.json",'w') as f:
294
306
  json.dump(value,f,indent=2)
295
307
  identities = ide.getIdentities()
@@ -307,6 +319,9 @@ def extractSandboxArtifacts(
307
319
  audiences = mysegmentation.getAudiences()
308
320
  for el in audiences:
309
321
  safe_name = __titleSafe__(el.get('name','unknown'))
322
+ if len(el.get('tags',[])) > 0:
323
+ tag_names = [dict_id_name.get(tag_id) for tag_id in el.get('tags',[])]
324
+ el['tags'] = tag_names
310
325
  with open(f"{audiencePath / safe_name}.json",'w') as f:
311
326
  json.dump(el,f,indent=2)
312
327
 
@@ -345,9 +360,17 @@ def extractSandboxArtifact(
345
360
  with open(f'{completePath}/config.json','w') as f:
346
361
  json.dump(globalConfig,f,indent=2)
347
362
 
348
- from aepp import schema, catalog, identity
363
+ from aepp import schema, catalog, tags
349
364
  sch = schema.Schema(config=sandbox)
350
365
  cat = catalog.Catalog(config=sandbox)
366
+ ### taking care of tas
367
+ tagPath = completePath / 'tag'
368
+ tagPath.mkdir(exist_ok=True)
369
+ tag_manager = tags.Tags(config=sandbox)
370
+ all_tags = tag_manager.getTags()
371
+ dict_tag_id_name = {tag['id']:tag['name'] for tag in all_tags}
372
+ with open(f'{tagPath}/tags.json','w') as f:
373
+ json.dump(all_tags,f,indent=2)
351
374
  if artifactType == 'class':
352
375
  __extractClass__(artifact,completePath,sandbox)
353
376
  elif artifactType == 'schema':
@@ -357,16 +380,16 @@ def extractSandboxArtifact(
357
380
  elif artifactType == 'datatype':
358
381
  __extractDataType__(artifact,completePath,sandbox)
359
382
  elif artifactType == 'dataset':
360
- __extractDataset__(artifact,completePath,sandbox,region)
383
+ __extractDataset__(artifact,completePath,sandbox,region,dict_tag_id_name)
361
384
  elif artifactType == 'identity':
362
385
  __extractIdentity__(artifact,region,completePath,sandbox)
363
386
  elif artifactType == 'mergepolicy':
364
- __extractMergePolicy__(artifact,completePath,sandbox)
387
+ __extractMergePolicy__(artifact,completePath,sandbox,dict_tag_id_name=dict_tag_id_name)
365
388
  elif artifactType == 'audience':
366
- __extractAudience__(artifact,completePath,sandbox)
389
+ __extractAudience__(artifact,completePath,sandbox,dict_tag_id_name)
367
390
  else:
368
391
  raise ValueError("artifactType not recognized")
369
-
392
+
370
393
  def __extractClass__(classEl: str,folder: Union[str, Path] = None,sandbox: 'ConnectObject' = None):
371
394
  classPath = Path(folder) / 'class'
372
395
  classPath.mkdir(exist_ok=True)
@@ -511,7 +534,7 @@ def __extractIdentity__(identityStr: str,region:str=None,folder: Union[str, Path
511
534
  with open(f"{identityPath / file_name}.json",'w') as f:
512
535
  json.dump(myIdentity,f,indent=2)
513
536
 
514
- def __extractDataset__(dataset: str,folder: Union[str, Path] = None,sandbox: 'ConnectObject' = None, region:str=None):
537
+ def __extractDataset__(dataset: str,folder: Union[str, Path] = None,sandbox: 'ConnectObject' = None, region:str=None,dict_tag_id_name: dict = None,**kwargs):
515
538
  from aepp import catalog
516
539
  cat = catalog.Catalog(config=sandbox)
517
540
  datasets = cat.getDataSets()
@@ -520,6 +543,9 @@ def __extractDataset__(dataset: str,folder: Union[str, Path] = None,sandbox: 'Co
520
543
  if key == dataset or value.get('tags',{}).get('adobe/pqs/table',[''])[0] == dataset or value.get('name','') == dataset:
521
544
  myDataset = value
522
545
  myDataset['id'] = key
546
+ if dict_tag_id_name is not None and len(myDataset.get('unifiedTags',[])) > 0:
547
+ tag_names = [dict_tag_id_name.get(tag_id) for tag_id in myDataset.get('unifiedTags',[])]
548
+ myDataset['unifiedTags'] = tag_names
523
549
  if myDataset is None:
524
550
  raise ValueError("Dataset not found")
525
551
  datasetPath = Path(folder) / 'dataset'
@@ -531,7 +557,7 @@ def __extractDataset__(dataset: str,folder: Union[str, Path] = None,sandbox: 'Co
531
557
  if schema is not None:
532
558
  __extractSchema__(schema,folder,sandbox,region)
533
559
 
534
- def __extractMergePolicy__(mergePolicy: str = None,folder:Union[str, Path]=None, sandbox: 'ConnectObject' = None,region:str=None):
560
+ def __extractMergePolicy__(mergePolicy: str = None,folder:Union[str, Path]=None, sandbox: 'ConnectObject' = None,region:str=None,dict_tag_id_name: dict = None,**kwargs):
535
561
  from aepp import customerprofile
536
562
  ups = customerprofile.Profile(config=sandbox)
537
563
  mymergePolicies = ups.getMergePolicies()
@@ -539,13 +565,13 @@ def __extractMergePolicy__(mergePolicy: str = None,folder:Union[str, Path]=None,
539
565
  if mymergePolicy['attributeMerge'].get('type','timestampOrdered') == 'dataSetPrecedence':
540
566
  list_ds = mymergePolicy['attributeMerge'].get('order',[])
541
567
  for ds in list_ds:
542
- __extractDataset__(ds,folder,sandbox,region)
568
+ __extractDataset__(ds,folder,sandbox,region,dict_tag_id_name=dict_tag_id_name)
543
569
  mergePolicyPath = Path(folder) / 'mergePolicy'
544
570
  mergePolicyPath.mkdir(exist_ok=True)
545
571
  with open(f"{mergePolicyPath / mymergePolicy.get('id','unknown')}.json",'w') as f:
546
572
  json.dump(mymergePolicy,f,indent=2)
547
573
 
548
- def __extractAudience__(audienceName: str = None,folder:Union[str, Path]=None, sandbox: 'ConnectObject' = None):
574
+ def __extractAudience__(audienceName: str = None,folder:Union[str, Path]=None, sandbox: 'ConnectObject' = None,dict_tag_id_name: dict = None,**kwargs):
549
575
  from aepp import segmentation
550
576
  mysegmentation = segmentation.Segmentation(config=sandbox)
551
577
  audiences = mysegmentation.getAudiences()
@@ -553,5 +579,8 @@ def __extractAudience__(audienceName: str = None,folder:Union[str, Path]=None, s
553
579
  audiencePath = Path(folder) / 'audience'
554
580
  audiencePath.mkdir(exist_ok=True)
555
581
  safe_name = __titleSafe__(myaudience.get('name','unknown'))
582
+ if len(myaudience.get('tags',[])) > 0 and dict_tag_id_name is not None:
583
+ tag_names = [dict_tag_id_name.get(tag_id) for tag_id in myaudience.get('tags',[])]
584
+ myaudience['tags'] = tag_names
556
585
  with open(f"{audiencePath / safe_name}.json",'w') as f:
557
586
  json.dump(myaudience,f,indent=2)
aepp/__version__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.5.2-1"
1
+ __version__ = "0.5.2-3"
aepp/catalog.py CHANGED
@@ -489,6 +489,7 @@ class Catalog:
489
489
  identityEnabled:bool=False,
490
490
  upsert:bool=False,
491
491
  tags:dict=None,
492
+ unifiedTags:list[str]=None,
492
493
  systemLabels:list[str]=None,
493
494
  **kwargs)-> dict:
494
495
  """
@@ -503,6 +504,7 @@ class Catalog:
503
504
  upsert : OPTIONAL : If the dataset to be created with profile enbaled and Upsert capability.
504
505
  tags : OPTIONAL : set of attribute to add as tags.
505
506
  systemLabels : OPTIONAL : A list of string to attribute system based label on creation.
507
+ unifiedTags : OPTIONAL : a list of tags to be added to the dataset
506
508
  possible kwargs
507
509
  requestDataSource : Set to true if you want Catalog to create a dataSource on your behalf; otherwise, pass a dataSourceId in the body.
508
510
  """
@@ -533,6 +535,8 @@ class Catalog:
533
535
  data['tags']["unifiedIdentity"] = ["enabled:true"]
534
536
  if upsert:
535
537
  data['tags']['unifiedProfile'] = ["enabled: true","isUpsert: true"]
538
+ if unifiedTags is not None and type(unifiedTags) == list:
539
+ data["unifiedTags"] = unifiedTags
536
540
  if tags is not None and type(tags) == dict:
537
541
  for key in tags:
538
542
  data['tags'][key] = tags[key]
aepp/cli/__main__.py CHANGED
@@ -1,7 +1,7 @@
1
- from ast import arg
2
1
  from matplotlib.pyplot import table
3
2
  import aepp
4
3
  from aepp import synchronizer, schema, schemamanager, fieldgroupmanager, datatypemanager, identity, queryservice,catalog,flowservice,sandboxes, segmentation
4
+ from aepp.cli.upsfieldsanalyzer import UpsFieldsAnalyzer
5
5
  import argparse, cmd, shlex, json
6
6
  from functools import wraps
7
7
  from rich.console import Console
@@ -37,6 +37,7 @@ class ServiceShell(cmd.Cmd):
37
37
  super().__init__()
38
38
  self.config = None
39
39
  self.connectInstance = True
40
+ self.ups_profile_analyzer:UpsFieldsAnalyzer|None = None
40
41
  if kwargs.get("config_file") is not None:
41
42
  config_path = Path(kwargs.get("config_file"))
42
43
  if not config_path.is_absolute():
@@ -69,7 +70,7 @@ class ServiceShell(cmd.Cmd):
69
70
  )
70
71
  self.prompt = f"{self.config.sandbox}> "
71
72
  console.print(Panel(f"Connected to [bold green]{self.sandbox}[/bold green]", style="blue"))
72
-
73
+
73
74
  def do_createConfigFile(self, arg:Any) -> None:
74
75
  """Create a configuration file for future use"""
75
76
  parser = argparse.ArgumentParser(prog='createConfigFile', add_help=True)
@@ -124,15 +125,20 @@ class ServiceShell(cmd.Cmd):
124
125
  """Change the current sandbox after configuration"""
125
126
  parser = argparse.ArgumentParser(prog='change sandbox', add_help=True)
126
127
  parser.add_argument("sandbox", help="sandbox name to switch to")
127
- args = parser.parse_args(shlex.split(args))
128
- self.sandbox = str(args.sandbox) if args.sandbox else console.print(Panel("(!) Please provide a sandbox name using -sx or --sandbox", style="red"))
129
- if self.config is not None:
130
- if args.sandbox:
131
- self.config.setSandbox(str(args.sandbox))
132
- self.prompt = f"{self.config.sandbox}> "
133
- console.print(Panel(f"Sandbox changed to: [bold green]{self.config.sandbox}[/bold green]", style="blue"))
134
- else:
135
- console.print(Panel("(!) You must configure the connection first using the 'config' command.", style="red"))
128
+ try:
129
+ args = parser.parse_args(shlex.split(args))
130
+ self.sandbox = str(args.sandbox) if args.sandbox else console.print(Panel("(!) Please provide a sandbox name using -sx or --sandbox", style="red"))
131
+ if self.config is not None:
132
+ if args.sandbox:
133
+ self.config.setSandbox(str(args.sandbox))
134
+ self.prompt = f"{self.config.sandbox}> "
135
+ console.print(Panel(f"Sandbox changed to: [bold green]{self.config.sandbox}[/bold green]", style="blue"))
136
+ else:
137
+ console.print(Panel("(!) You must configure the connection first using the 'config' command.", style="red"))
138
+ except Exception as e:
139
+ console.print(f"(!) Error: {str(e)}", style="red")
140
+ except SystemExit:
141
+ return
136
142
 
137
143
  @login_required
138
144
  def do_get_sandboxes(self, args:Any) -> None:
@@ -169,7 +175,112 @@ class ServiceShell(cmd.Cmd):
169
175
  console.print(f"(!) Error: {str(e)}", style="red")
170
176
  except SystemExit:
171
177
  return
172
-
178
+
179
+ @login_required
180
+ def do_get_profile_paths_info(self,args:Any)->None:
181
+ """Get usage information for all Profile paths"""
182
+ parser = argparse.ArgumentParser(prog='get_profile_paths_info', add_help=True)
183
+ try:
184
+ args = parser.parse_args(shlex.split(args))
185
+ if self.ups_profile_analyzer is None:
186
+ console.print("Initializing Profile UPS Fields Analyzer. This will take few minutes...", style="blue")
187
+ self.ups_profile_analyzer = UpsFieldsAnalyzer(config=self.config)
188
+ else:
189
+ if self.config.sandbox != self.ups_profile_analyzer.sandbox:
190
+ console.print("Re-initializing Profile UPS Fields Analyzer for the new sandbox. This will take few minutes...", style="blue")
191
+ self.ups_profile_analyzer = UpsFieldsAnalyzer(config=self.config)
192
+ console.print("Analyzing all Profile paths information. This will take few minutes...", style="blue")
193
+ df_analysis:pd.DataFrame = self.ups_profile_analyzer.analyzePaths(output='df')
194
+ if df_analysis is not None:
195
+ console.print(df_analysis)
196
+ df_analysis.to_csv(f"profile_all_paths_info.csv", index=False)
197
+ console.print(f"Profile all paths information data exported to profile_all_paths_info.csv", style="green")
198
+ else:
199
+ console.print("(!) No profile paths information data found.", style="red")
200
+ except Exception as e:
201
+ console.print(f"(!) Error: {str(e)}", style="red")
202
+ except SystemExit:
203
+ return
204
+
205
+ @login_required
206
+ def do_get_profile_path_info(self, args:Any) -> None:
207
+ """Get path information on Profile"""
208
+ parser = argparse.ArgumentParser(prog='get_profile_path_info', add_help=True)
209
+ parser.add_argument("path", help="Dot notation of the path to analyze in Profile Storage", default=None,type=str)
210
+ try:
211
+ args = parser.parse_args(shlex.split(args))
212
+ if self.ups_profile_analyzer is None:
213
+ console.print("Initializing Profile UPS Fields Analyzer. This will take few minutes...", style="blue")
214
+ self.ups_profile_analyzer = UpsFieldsAnalyzer(config=self.config)
215
+ else:
216
+ if self.config.sandbox != self.ups_profile_analyzer.sandbox:
217
+ console.print("Re-initializing Profile UPS Fields Analyzer for the new sandbox. This will take few minutes...", style="blue")
218
+ self.ups_profile_analyzer = UpsFieldsAnalyzer(config=self.config)
219
+ analysis = self.ups_profile_analyzer.analyzePath(args.path)
220
+ if analysis:
221
+ console.print_json(data=analysis)
222
+ with open(f"profile_path_info_{args.path.replace('/','_')}.json", 'w') as f:
223
+ json.dump(analysis, f, indent=4)
224
+ console.print(f"Profile path information data exported to profile_path_info_{args.path.replace('/','_')}.json", style="green")
225
+ else:
226
+ console.print("(!) No profile path information data found.", style="red")
227
+ except Exception as e:
228
+ console.print(f"(!) Error: {str(e)}", style="red")
229
+ except SystemExit:
230
+ return
231
+
232
+ @login_required
233
+ def do_get_event_paths_info(self,args:Any)->None:
234
+ """Get information for all Experience Event paths"""
235
+ parser = argparse.ArgumentParser(prog='get_event_paths_info', add_help=True)
236
+ try:
237
+ args = parser.parse_args(shlex.split(args))
238
+ if self.ups_profile_analyzer is None:
239
+ console.print("Initializing Event UPS Fields Analyzer. This will take few minutes...", style="blue")
240
+ self.ups_profile_analyzer = UpsFieldsAnalyzer(config=self.config,union='https://ns.adobe.com/xdm/context/experienceevent__union')
241
+ else:
242
+ if self.config.sandbox != self.ups_profile_analyzer.sandbox:
243
+ console.print("Re-initializing Event UPS Fields Analyzer for the new sandbox. This will take few minutes...", style="blue")
244
+ self.ups_profile_analyzer = UpsFieldsAnalyzer(config=self.config,union='https://ns.adobe.com/xdm/context/experienceevent__union')
245
+ console.print("Analyzing all Event paths information. This will take few minutes...", style="blue")
246
+ df_analysis:pd.DataFrame = self.ups_profile_analyzer.analyzePaths(output='df')
247
+ if df_analysis is not None:
248
+ console.print(df_analysis)
249
+ df_analysis.to_csv(f"event_all_paths_info.csv", index=False)
250
+ console.print(f"Event all paths information data exported to event_all_paths_info.csv", style="green")
251
+ else:
252
+ console.print("(!) No event paths information data found.", style="red")
253
+ except Exception as e:
254
+ console.print(f"(!) Error: {str(e)}", style="red")
255
+ except SystemExit:
256
+ return
257
+
258
+ @login_required
259
+ def do_get_event_path_info(self, args:Any) -> None:
260
+ """Get path information on Experience Event"""
261
+ parser = argparse.ArgumentParser(prog='get_event_path_info', add_help=True)
262
+ parser.add_argument("path", help="Dot notation of the path to analyze in Experience Event Storage", default=None,type=str)
263
+ try:
264
+ args = parser.parse_args(shlex.split(args))
265
+ if self.ups_profile_analyzer is None:
266
+ console.print("Initializing Event UPS Fields Analyzer. This will take few minutes...", style="blue")
267
+ self.ups_profile_analyzer = UpsFieldsAnalyzer(config=self.config,union='https://ns.adobe.com/xdm/context/experienceevent__union')
268
+ else:
269
+ if self.config.sandbox != self.ups_profile_analyzer.sandbox:
270
+ console.print("Re-initializing Event UPS Fields Analyzer for the new sandbox. This will take few minutes...", style="blue")
271
+ self.ups_profile_analyzer = UpsFieldsAnalyzer(config=self.config,union='https://ns.adobe.com/xdm/context/experienceevent__union')
272
+ analysis = self.ups_profile_analyzer.analyzePath(args.path)
273
+ if analysis:
274
+ console.print_json(data=analysis)
275
+ with open(f"event_path_info_{args.path.replace('/','_')}.json", 'w') as f:
276
+ json.dump(analysis, f, indent=4)
277
+ console.print(f"Event path information data exported to event_path_info_{args.path.replace('/','_')}.json", style="green")
278
+ else:
279
+ console.print("(!) No event path information data found.", style="red")
280
+ except Exception as e:
281
+ console.print(f"(!) Error: {str(e)}", style="red")
282
+ except SystemExit:
283
+ return
173
284
 
174
285
  @login_required
175
286
  def do_get_schemas(self, args:Any) -> None:
@@ -783,8 +894,9 @@ class ServiceShell(cmd.Cmd):
783
894
  return
784
895
 
785
896
  @login_required
786
- def do_get_datasets_tableName(self, args:Any) -> None:
787
- parser = argparse.ArgumentParser(prog='get_datasets', add_help=True)
897
+ def do_get_datasets_tableNames(self, args:Any) -> None:
898
+ """List all datasets with their table names in the current sandbox"""
899
+ parser = argparse.ArgumentParser(prog='get_datasets_tableNames', add_help=True)
788
900
  try:
789
901
  args = parser.parse_args(shlex.split(args))
790
902
  aepp_cat = catalog.Catalog(config=self.config)
@@ -956,10 +1068,11 @@ class ServiceShell(cmd.Cmd):
956
1068
  def do_get_identities(self, args:Any) -> None:
957
1069
  """List all identities in the current sandbox"""
958
1070
  parser = argparse.ArgumentParser(prog='get_identities', add_help=True)
959
- parser.add_argument("-r","--region", help="Region to get identities from: 'ndl2' (default), 'va7', 'aus5', 'can2', 'ind2'", default='ndl2')
1071
+ parser.add_argument("-r","--region", help="Region to get identities from: 'ndl2' (default), 'va7', 'aus5', 'can2', 'ind2'", default='ndl2',type=str)
960
1072
  parser.add_argument("-co","--custom_only",help="Get only custom identities", default=False,type=bool)
961
1073
  try:
962
1074
  args = parser.parse_args(shlex.split(args))
1075
+ region = args.region if args.region else 'ndl2'
963
1076
  aepp_identity = identity.Identity(config=self.config,region=args.region)
964
1077
  identities = aepp_identity.getIdentities(only_custom=args.custom_only)
965
1078
  df_identites = pd.DataFrame(identities)
@@ -991,6 +1104,23 @@ class ServiceShell(cmd.Cmd):
991
1104
  args = parser.parse_args(shlex.split(args))
992
1105
  aepp_audience = segmentation.Segmentation(config=self.config)
993
1106
  audiences = aepp_audience.getAudiences()
1107
+ flw = flowservice.FlowService(config=self.config)
1108
+ destinations = flw.getFlows(onlyDestinations=True)
1109
+ segments_shared = []
1110
+ for tmpFlow in destinations:
1111
+ if len(tmpFlow['transformations'])>0:
1112
+ tmpSegmentShared = tmpFlow['transformations'][0].get('params',{}).get('segmentSelectors',{}).get('selectors',[])
1113
+ for s in tmpSegmentShared:
1114
+ s['flowId'] = tmpFlow['id']
1115
+ segments_shared += tmpSegmentShared
1116
+ segment_shared_dict = {seg.get('value',{}).get('id'):{
1117
+ "exportMode" : seg.get('value',{}).get('exportMode'),
1118
+ "scheduleFrequency": seg.get('value',{}).get("schedule",{}).get('frequency',''),
1119
+ "flowId" : seg["flowId"]
1120
+ } for seg in segments_shared}
1121
+ for aud in audiences:
1122
+ aud['usedInFlow'] = True if segment_shared_dict.get(aud.get("id","N/A"),{}) != {} else False
1123
+ aud['sharedInfo'] = segment_shared_dict.get(aud.get("id","N/A"),{})
994
1124
  df_audiences = pd.DataFrame(audiences)
995
1125
  df_audiences.to_csv(f"{self.config.sandbox}_audiences.csv",index=False)
996
1126
  console.print(f"Audiences exported to {self.config.sandbox}_audiences.csv", style="green")
@@ -999,14 +1129,44 @@ class ServiceShell(cmd.Cmd):
999
1129
  table.add_column("Name", style="magenta")
1000
1130
  table.add_column("Evaluation", style="yellow")
1001
1131
  table.add_column("Total Profiles", style="green")
1002
- table.add_column("Evaluation Date", style="white")
1132
+ table.add_column("In Flow", style="white")
1003
1133
  for aud in audiences:
1004
1134
  table.add_row(
1005
1135
  aud.get("id","N/A"),
1006
1136
  aud.get("name","N/A"),
1007
- '[red3]Batch[/red3]' if aud.get("evaluationInfo",{}).get("batch",{}).get('enabled') else '[chartreuse1]Streaming[/chartreuse1]' if aud.get("evaluationInfo",{}).get("continuous",{}).get('enabled') else '[blue_violet]Edge[/blue_violet]' if aud.get("evaluationInfo",{}).get("synchronous",{}).get('enabled') else 'N/A',
1137
+ '[bright_blue]Batch[/bright_blue]' if aud.get("evaluationInfo",{}).get("batch",{}).get('enabled') else '[chartreuse1]Streaming[/chartreuse1]' if aud.get("evaluationInfo",{}).get("continuous",{}).get('enabled') else '[purple]Edge[/purple]' if aud.get("evaluationInfo",{}).get("synchronous",{}).get('enabled') else 'N/A',
1008
1138
  str(aud.get('metrics',{}).get('data',{}).get('totalProfiles','N/A')),
1009
- datetime.fromtimestamp(aud.get('metrics',{}).get('updateEpoch',0)).isoformat(),
1139
+ '[green3]True[/green3]' if aud.get("usedInFlow",False) else '[red3]False[/red3]',
1140
+ )
1141
+ console.print(table)
1142
+ except Exception as e:
1143
+ console.print(f"(!) Error: {str(e)}", style="red")
1144
+ except SystemExit:
1145
+ return
1146
+
1147
+ @login_required
1148
+ def do_get_tags(self,args)->None:
1149
+ """
1150
+ Provide the list of tags defined in the current organization
1151
+ """
1152
+ parser = argparse.ArgumentParser(prog='get_tags', add_help=True)
1153
+ try:
1154
+ from aepp import tags
1155
+ args = parser.parse_args(shlex.split(args))
1156
+ aepp_tag = tags.Tags(config=self.config)
1157
+ tags = aepp_tag.getTags()
1158
+ df_tags = pd.DataFrame(tags)
1159
+ df_tags.to_csv(f"tags.csv",index=False)
1160
+ console.print(f"Tags exported to tags.csv", style="green")
1161
+ table = Table(title=f"Tags in Organization: {self.config.org_id}")
1162
+ table.add_column("ID", style="cyan")
1163
+ table.add_column("Name", style="magenta")
1164
+ table.add_column("Category Name", style="white")
1165
+ for _, tg in df_tags.iterrows():
1166
+ table.add_row(
1167
+ str(tg.get("id","N/A")),
1168
+ tg.get("name","N/A"),
1169
+ tg.get("tagCategoryName","N/A"),
1010
1170
  )
1011
1171
  console.print(table)
1012
1172
  except Exception as e:
@@ -1316,7 +1476,7 @@ class ServiceShell(cmd.Cmd):
1316
1476
  conn = aepp_query.connection()
1317
1477
  iqs2 = queryservice.InteractiveQuery2(conn)
1318
1478
  result:pd.DataFrame = iqs2.query(sql=args.sql_query)
1319
- result.to_csv(f"query_result_{int(datetime.now().timestamp())}.csv", index=False)
1479
+ result.sample(5).to_csv(f"query_result_{int(datetime.now().timestamp())}.csv", index=False)
1320
1480
  console.print(f"Query result exported to query_result_{int(datetime.now().timestamp())}.csv", style="green")
1321
1481
  console.print(result)
1322
1482
  except Exception as e:
@@ -1328,11 +1488,11 @@ class ServiceShell(cmd.Cmd):
1328
1488
  @login_required
1329
1489
  def do_extractArtifacts(self,args:Any) -> None:
1330
1490
  """extractArtifacts localfolder"""
1331
- console.print("Extracting artifacts...", style="blue")
1332
- parser = argparse.ArgumentParser(prog='extractArtifacts', description='Extract artifacts from AEP')
1491
+ parser = argparse.ArgumentParser(prog='extractArtifacts', description='Extract artifacts from AEP',add_help=True)
1333
1492
  parser.add_argument('-lf','--localfolder', help='Local folder to extract artifacts to', default='./extractions')
1334
1493
  parser.add_argument('-rg','--region', help='Region to extract artifacts from: "ndl2" (default), "va7", "aus5", "can2", "ind2"',default='ndl2')
1335
1494
  try:
1495
+ console.print("Extracting artifacts...", style="blue")
1336
1496
  args = parser.parse_args(shlex.split(args))
1337
1497
  aepp.extractSandboxArtifacts(
1338
1498
  sandbox=self.config,
@@ -1342,18 +1502,19 @@ class ServiceShell(cmd.Cmd):
1342
1502
  console.print(Panel("Extraction completed!", style="green"))
1343
1503
  except SystemExit:
1344
1504
  return
1505
+ except Exception as e:
1506
+ console.print(f"(!) Error: {str(e)}", style="red")
1345
1507
 
1346
1508
  @login_required
1347
1509
  def do_extractArtifact(self,args:Any) -> None:
1348
1510
  """extractArtifacts localfolder"""
1349
- console.print("Extracting artifact...", style="blue")
1350
- parser = argparse.ArgumentParser(prog='extractArtifact', description='Extract artifacts from AEP')
1511
+ parser = argparse.ArgumentParser(prog='extractArtifact', description='Extract artifacts from AEP',add_help=True)
1351
1512
  parser.add_argument('artifact', help='artifact to extract (name or id): "schema","fieldgroup","datatype","descriptor","dataset","identity","mergepolicy","audience"')
1352
1513
  parser.add_argument('-at','--artifactType', help='artifact type ')
1353
1514
  parser.add_argument('-lf','--localfolder', help='Local folder to extract artifacts to',default='extractions')
1354
1515
  parser.add_argument('-rg','--region', help='Region to extract artifacts from: "ndl2" (default), "va7", "aus5", "can2", "ind2"',default='ndl2')
1355
-
1356
1516
  try:
1517
+ console.print("Extracting artifact...", style="blue")
1357
1518
  args = parser.parse_args(shlex.split(args))
1358
1519
  aepp.extractSandboxArtifact(
1359
1520
  artifact=args.artifact,
@@ -1364,12 +1525,13 @@ class ServiceShell(cmd.Cmd):
1364
1525
  console.print("Extraction completed!", style="green")
1365
1526
  except SystemExit:
1366
1527
  return
1528
+ except Exception as e:
1529
+ console.print(f"(!) Error: {str(e)}", style="red")
1367
1530
 
1368
1531
  @login_required
1369
1532
  def do_sync(self,args:Any) -> None:
1370
1533
  """extractArtifacts localfolder"""
1371
- console.print("Syncing artifact...", style="blue")
1372
- parser = argparse.ArgumentParser(prog='extractArtifact', description='Extract artifacts from AEP')
1534
+ parser = argparse.ArgumentParser(prog='extractArtifact', description='Extract artifacts from AEP',add_help=True)
1373
1535
  parser.add_argument('artifact', help='artifact to extract (name or id): "schema","fieldgroup","datatype","descriptor","dataset","identity","mergepolicy","audience"')
1374
1536
  parser.add_argument('-at','--artifactType', help='artifact type ',type=str)
1375
1537
  parser.add_argument('-t','--targets', help='target sandboxes',nargs='+',type=str)
@@ -1380,18 +1542,22 @@ class ServiceShell(cmd.Cmd):
1380
1542
  try:
1381
1543
  args = parser.parse_args(shlex.split(args))
1382
1544
  console.print("Initializing Synchronizor...", style="blue")
1545
+ if args.region:
1546
+ region=args.region
1547
+ else:
1548
+ region='ndl2'
1383
1549
  if args.baseSandbox:
1384
1550
  synchronizor = synchronizer.Synchronizer(
1385
1551
  config=self.config,
1386
1552
  targets=args.targets,
1387
- region=args.region,
1553
+ region=region,
1388
1554
  baseSandbox=args.baseSandbox,
1389
1555
  )
1390
1556
  elif args.localfolder:
1391
1557
  synchronizor = synchronizer.Synchronizer(
1392
1558
  config=self.config,
1393
1559
  targets=args.targets,
1394
- region=args.region,
1560
+ region=region,
1395
1561
  localFolder=args.localfolder,
1396
1562
  )
1397
1563
  console.print("Starting Sync...", style="blue")