aepp 0.4.1.post2__py3-none-any.whl → 0.4.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aepp/__init__.py +53 -5
- aepp/__version__.py +1 -1
- aepp/synchronizer.py +268 -27
- {aepp-0.4.1.post2.dist-info → aepp-0.4.2.dist-info}/METADATA +1 -1
- {aepp-0.4.1.post2.dist-info → aepp-0.4.2.dist-info}/RECORD +8 -8
- {aepp-0.4.1.post2.dist-info → aepp-0.4.2.dist-info}/WHEEL +0 -0
- {aepp-0.4.1.post2.dist-info → aepp-0.4.2.dist-info}/licenses/LICENSE +0 -0
- {aepp-0.4.1.post2.dist-info → aepp-0.4.2.dist-info}/top_level.txt +0 -0
aepp/__init__.py
CHANGED
|
@@ -153,14 +153,15 @@ def extractSandboxArtefacts(
|
|
|
153
153
|
completePath = mypath / f'{sandbox.sandbox}'
|
|
154
154
|
else:
|
|
155
155
|
completePath = Path(localFolder)
|
|
156
|
-
from aepp import schema, catalog, identity
|
|
156
|
+
from aepp import schema, catalog, identity,customerprofile, segmentation
|
|
157
157
|
sch = schema.Schema(config=sandbox)
|
|
158
158
|
cat = catalog.Catalog(config=sandbox)
|
|
159
159
|
ide = identity.Identity(config=sandbox,region=region)
|
|
160
160
|
completePath.mkdir(exist_ok=True)
|
|
161
161
|
globalConfig = {
|
|
162
162
|
"imsOrgId":sandbox.org_id,
|
|
163
|
-
"tenantId":f"_{sch.getTenantId()}"
|
|
163
|
+
"tenantId":f"_{sch.getTenantId()}",
|
|
164
|
+
"sandbox":sandbox.sandbox
|
|
164
165
|
}
|
|
165
166
|
with open(f'{completePath}/config.json','w') as f:
|
|
166
167
|
json.dump(globalConfig,f,indent=2)
|
|
@@ -184,6 +185,10 @@ def extractSandboxArtefacts(
|
|
|
184
185
|
identityPath.mkdir(exist_ok=True)
|
|
185
186
|
datasetPath = completePath / 'dataset'
|
|
186
187
|
datasetPath.mkdir(exist_ok=True)
|
|
188
|
+
mergePolicyPath = completePath / 'mergePolicy'
|
|
189
|
+
mergePolicyPath.mkdir(exist_ok=True)
|
|
190
|
+
audiencePath = completePath / 'audience'
|
|
191
|
+
audiencePath.mkdir(exist_ok=True)
|
|
187
192
|
myclasses = sch.getClasses()
|
|
188
193
|
classesGlobal = sch.getClassesGlobal()
|
|
189
194
|
behaviors = sch.getBehaviors()
|
|
@@ -257,6 +262,19 @@ def extractSandboxArtefacts(
|
|
|
257
262
|
for el in identities:
|
|
258
263
|
with open(f"{identityPath / el['code']}.json",'w') as f:
|
|
259
264
|
json.dump(el,f,indent=2)
|
|
265
|
+
## merge policies
|
|
266
|
+
ups = customerprofile.Profile(config=sandbox)
|
|
267
|
+
mymergePolicies = ups.getMergePolicies()
|
|
268
|
+
for el in mymergePolicies:
|
|
269
|
+
with open(f"{mergePolicyPath / el.get('id','unknown')}.json",'w') as f:
|
|
270
|
+
json.dump(el,f,indent=2)
|
|
271
|
+
## audiences
|
|
272
|
+
mysegmentation = segmentation.Segmentation(config=sandbox)
|
|
273
|
+
audiences = mysegmentation.getAudiences()
|
|
274
|
+
for el in audiences:
|
|
275
|
+
safe_name = __titleSafe__(el.get('name','unknown'))
|
|
276
|
+
with open(f"{audiencePath / safe_name}.json",'w') as f:
|
|
277
|
+
json.dump(el,f,indent=2)
|
|
260
278
|
|
|
261
279
|
def extractSandboxArtefact(
|
|
262
280
|
sandbox: 'ConnectObject' = None,
|
|
@@ -271,7 +289,7 @@ def extractSandboxArtefact(
|
|
|
271
289
|
sandbox: REQUIRED: the instance of a ConnectObject that contains the sandbox information and connection.
|
|
272
290
|
localFolder: OPTIONAL: the local folder where to extract the sandbox. If not provided, it will use the current working directory and name the folder the name of the sandbox.
|
|
273
291
|
artefact: REQUIRED: the id or the name of the artefact to export.
|
|
274
|
-
artefactType: REQUIRED: the type of artefact to export. Possible values are: 'class','schema','fieldgroup','datatype','descriptor','dataset','identity'
|
|
292
|
+
artefactType: REQUIRED: the type of artefact to export. Possible values are: 'class','schema','fieldgroup','datatype','descriptor','dataset','identity','mergepolicy'
|
|
275
293
|
region: OPTIONAL: the region of the sandbox (default: nld2). This is used to fetch the correct API endpoints for the identities.
|
|
276
294
|
Possible values: "va7","aus5", "can2", "ind2"
|
|
277
295
|
"""
|
|
@@ -287,7 +305,8 @@ def extractSandboxArtefact(
|
|
|
287
305
|
sch = schema.Schema(config=sandbox)
|
|
288
306
|
globalConfig = {
|
|
289
307
|
"imsOrgId":sandbox.org_id,
|
|
290
|
-
"tenantId":f"_{sch.getTenantId()}"
|
|
308
|
+
"tenantId":f"_{sch.getTenantId()}",
|
|
309
|
+
"sandbox":sandbox.sandbox
|
|
291
310
|
}
|
|
292
311
|
with open(f'{completePath}/config.json','w') as f:
|
|
293
312
|
json.dump(globalConfig,f,indent=2)
|
|
@@ -307,6 +326,10 @@ def extractSandboxArtefact(
|
|
|
307
326
|
__extractDataset__(artefact,completePath,sandbox,region)
|
|
308
327
|
elif artefactType == 'identity':
|
|
309
328
|
__extractIdentity__(artefact,region,completePath,sandbox)
|
|
329
|
+
elif artefactType == 'mergepolicy':
|
|
330
|
+
__extractMergePolicy__(artefact,completePath,sandbox)
|
|
331
|
+
elif artefactType == 'audience':
|
|
332
|
+
__extractAudience__(artefact,completePath,sandbox)
|
|
310
333
|
else:
|
|
311
334
|
raise ValueError("artefactType not recognized")
|
|
312
335
|
|
|
@@ -472,4 +495,29 @@ def __extractDataset__(dataset: str,folder: Union[str, Path] = None,sandbox: 'Co
|
|
|
472
495
|
json.dump(myDataset,f,indent=2)
|
|
473
496
|
schema = myDataset.get('schemaRef',{}).get('id',None)
|
|
474
497
|
if schema is not None:
|
|
475
|
-
__extractSchema__(schema,folder,sandbox,region)
|
|
498
|
+
__extractSchema__(schema,folder,sandbox,region)
|
|
499
|
+
|
|
500
|
+
def __extractMergePolicy__(mergePolicy: str = None,folder:Union[str, Path]=None, sandbox: 'ConnectObject' = None,region:str=None):
|
|
501
|
+
from aepp import customerprofile
|
|
502
|
+
ups = customerprofile.Profile(config=sandbox)
|
|
503
|
+
mymergePolicies = ups.getMergePolicies()
|
|
504
|
+
mymergePolicy = [el for el in mymergePolicies if el.get('id','') == mergePolicy or el.get('name','') == mergePolicy][0]
|
|
505
|
+
if mymergePolicy['attributeMerge'].get('type','timestampOrdered') == 'dataSetPrecedence':
|
|
506
|
+
list_ds = mymergePolicy['attributeMerge'].get('order',[])
|
|
507
|
+
for ds in list_ds:
|
|
508
|
+
__extractDataset__(ds,folder,sandbox,region)
|
|
509
|
+
mergePolicyPath = Path(folder) / 'mergePolicy'
|
|
510
|
+
mergePolicyPath.mkdir(exist_ok=True)
|
|
511
|
+
with open(f"{mergePolicyPath / mymergePolicy.get('id','unknown')}.json",'w') as f:
|
|
512
|
+
json.dump(mymergePolicy,f,indent=2)
|
|
513
|
+
|
|
514
|
+
def __extractAudience__(audienceName: str = None,folder:Union[str, Path]=None, sandbox: 'ConnectObject' = None):
|
|
515
|
+
from aepp import segmentation
|
|
516
|
+
mysegmentation = segmentation.Segmentation(config=sandbox)
|
|
517
|
+
audiences = mysegmentation.getAudiences()
|
|
518
|
+
myaudience = [el for el in audiences if el.get('name','') == audienceName or el.get('id','') == audienceName][0]
|
|
519
|
+
audiencePath = Path(folder) / 'audience'
|
|
520
|
+
audiencePath.mkdir(exist_ok=True)
|
|
521
|
+
safe_name = __titleSafe__(myaudience.get('name','unknown'))
|
|
522
|
+
with open(f"{audiencePath / safe_name}.json",'w') as f:
|
|
523
|
+
json.dump(myaudience,f,indent=2)
|
aepp/__version__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "0.4.
|
|
1
|
+
__version__ = "0.4.2"
|
aepp/synchronizer.py
CHANGED
|
@@ -10,7 +10,7 @@
|
|
|
10
10
|
|
|
11
11
|
import json
|
|
12
12
|
import aepp
|
|
13
|
-
from aepp import schema, schemamanager, fieldgroupmanager, datatypemanager,classmanager,identity,catalog
|
|
13
|
+
from aepp import schema, schemamanager, fieldgroupmanager, datatypemanager,classmanager,identity,catalog,customerprofile,segmentation
|
|
14
14
|
from copy import deepcopy
|
|
15
15
|
from typing import Union
|
|
16
16
|
from pathlib import Path
|
|
@@ -65,21 +65,83 @@ class Synchronizer:
|
|
|
65
65
|
self.identityFolder = self.localfolder / 'identity'
|
|
66
66
|
self.datasetFolder = self.localfolder / 'dataset'
|
|
67
67
|
self.descriptorFolder = self.localfolder / 'descriptor'
|
|
68
|
+
self.mergePolicyFolder = self.localfolder / 'mergepolicy'
|
|
69
|
+
self.audienceFolder = self.localfolder / 'audience'
|
|
68
70
|
if baseSandbox is not None:
|
|
69
71
|
self.baseSandbox = baseSandbox
|
|
72
|
+
else:
|
|
73
|
+
with open(self.localfolder / 'config.json','r') as f:
|
|
74
|
+
local_config = json.load(f)
|
|
75
|
+
self.baseSandbox = local_config.get('sandbox',None)
|
|
70
76
|
self.dict_targetsConfig = {target: aepp.configure(org_id=config_object['org_id'],client_id=config_object['client_id'],scopes=config_object['scopes'],secret=config_object['secret'],sandbox=target,connectInstance=True) for target in targets}
|
|
71
77
|
self.region = region
|
|
72
|
-
self.
|
|
78
|
+
self.dict_baseComponents = {'schema':{},'class':{},'fieldgroup':{},'datatype':{},'datasets':{},'identities':{},"schemaDescriptors":{},'mergePolicy':{},'audience':{}}
|
|
79
|
+
self.dict_targetComponents = {target:{'schema':{},'class':{},'fieldgroup':{},'datatype':{},'datasets':{},'identities':{},"schemaDescriptors":{},'mergePolicy':{},'audience':{}} for target in targets}
|
|
73
80
|
|
|
74
|
-
def
|
|
81
|
+
def getSyncFieldGroupManager(self,fieldgroup:str,sandbox:str=None)-> dict:
|
|
82
|
+
"""
|
|
83
|
+
Get a field group Manager from the synchronizer.
|
|
84
|
+
It searches through the component cache to see if the FieldGroupManager for the target sandbox is already instantiated.
|
|
85
|
+
If not, it generate an error.
|
|
86
|
+
Arguments:
|
|
87
|
+
fieldgroup : REQUIRED : Either $id, or name or alt:Id of the field group to get
|
|
88
|
+
sandbox : REQUIRED : name of the sandbox to get the field group from
|
|
89
|
+
"""
|
|
90
|
+
if sandbox is None:
|
|
91
|
+
raise ValueError("a sandbox name must be provided")
|
|
92
|
+
if sandbox == self.baseSandbox:
|
|
93
|
+
if fieldgroup in self.dict_baseComponents['fieldgroup'].keys():
|
|
94
|
+
return self.dict_baseComponents['fieldgroup'][fieldgroup]
|
|
95
|
+
elif fieldgroup in [self.dict_baseComponents['fieldgroup'][fg].id for fg in self.dict_baseComponents['fieldgroup'].keys()]:
|
|
96
|
+
fg_key = [fg for fg in self.dict_baseComponents['fieldgroup'].keys() if self.dict_baseComponents['fieldgroup'][fg].id == fieldgroup][0]
|
|
97
|
+
return self.dict_baseComponents['fieldgroup'][fg_key]
|
|
98
|
+
elif fieldgroup in [self.dict_baseComponents['fieldgroup'][fg].altId for fg in self.dict_baseComponents['fieldgroup'].keys()]:
|
|
99
|
+
fg_key = [fg for fg in self.dict_baseComponents['fieldgroup'].keys() if self.dict_baseComponents['fieldgroup'][fg].altId == fieldgroup][0]
|
|
100
|
+
return self.dict_baseComponents['fieldgroup'][fg_key]
|
|
101
|
+
else:
|
|
102
|
+
raise ValueError(f"the field group '{fieldgroup}' has not been synchronized to the sandbox '{sandbox}'")
|
|
103
|
+
else:
|
|
104
|
+
if fieldgroup in self.dict_targetComponents[sandbox]['fieldgroup'].keys():
|
|
105
|
+
return self.dict_targetComponents[sandbox]['fieldgroup'][fieldgroup]
|
|
106
|
+
elif fieldgroup in [self.dict_targetComponents[sandbox]['fieldgroup'][fg].id for fg in self.dict_targetComponents[sandbox]['fieldgroup'].keys()]:
|
|
107
|
+
fg_key = [fg for fg in self.dict_targetComponents[sandbox]['fieldgroup'].keys() if self.dict_targetComponents[sandbox]['fieldgroup'][fg].id == fieldgroup][0]
|
|
108
|
+
return self.dict_targetComponents[sandbox]['fieldgroup'][fg_key]
|
|
109
|
+
elif fieldgroup in [self.dict_targetComponents[sandbox]['fieldgroup'][fg].altId for fg in self.dict_targetComponents[sandbox]['fieldgroup'].keys()]:
|
|
110
|
+
fg_key = [fg for fg in self.dict_targetComponents[sandbox]['fieldgroup'].keys() if self.dict_targetComponents[sandbox]['fieldgroup'][fg].altId == fieldgroup][0]
|
|
111
|
+
return self.dict_targetComponents[sandbox]['fieldgroup'][fg_key]
|
|
112
|
+
else:
|
|
113
|
+
raise ValueError(f"the field group '{fieldgroup}' has not been synchronized to the sandbox '{sandbox}'")
|
|
114
|
+
|
|
115
|
+
def getDatasetName(self,datasetId:str,sandbox:str=None)-> str:
|
|
116
|
+
"""
|
|
117
|
+
Get a dataset name from the synchronizer base on the ID of the dataset.
|
|
118
|
+
Arguments:
|
|
119
|
+
datasetId : REQUIRED : id of the dataset to get
|
|
120
|
+
sandbox : REQUIRED : name of the sandbox to get the dataset from
|
|
121
|
+
"""
|
|
122
|
+
if sandbox is None:
|
|
123
|
+
raise ValueError("a sandbox name must be provided")
|
|
124
|
+
if sandbox == self.baseSandbox:
|
|
125
|
+
if datasetId in [item.get('id') for key,item in self.dict_baseComponents['datasets'].items()]:
|
|
126
|
+
return [key for key,item in self.dict_baseComponents['datasets'].items() if item.get('id') == datasetId][0]
|
|
127
|
+
else:
|
|
128
|
+
raise ValueError(f"the dataset '{datasetId}' has not been synchronized to the sandbox '{sandbox}'")
|
|
129
|
+
else:
|
|
130
|
+
if datasetId in [item.get('id') for key,item in self.dict_targetComponents[sandbox]['datasets'].items()]:
|
|
131
|
+
return [key for key,item in self.dict_targetComponents[sandbox]['datasets'].items() if item.get('id') == datasetId][0]
|
|
132
|
+
else:
|
|
133
|
+
raise ValueError(f"the dataset '{datasetId}' has not been synchronized to the sandbox '{sandbox}'")
|
|
134
|
+
|
|
135
|
+
def syncComponent(self,component:Union[str,dict],componentType:str=None,force:bool=False,verbose:bool=False)-> dict:
|
|
75
136
|
"""
|
|
76
137
|
Synchronize a component to the target sandbox.
|
|
77
138
|
The component could be a string (name or id of the component in the base sandbox) or a dictionary with the definition of the component.
|
|
78
139
|
If the component is a string, you have to have provided a base sandbox in the constructor.
|
|
79
140
|
Arguments:
|
|
80
141
|
component : REQUIRED : name or id of the component or a dictionary with the component definition
|
|
81
|
-
componentType : OPTIONAL : type of the component (e.g. "schema", "fieldgroup", "datatypes", "class", "identity", "dataset"). Required if a string is passed.
|
|
82
|
-
|
|
142
|
+
componentType : OPTIONAL : type of the component (e.g. "schema", "fieldgroup", "datatypes", "class", "identity", "dataset", "mergepolicy", "audience"). Required if a string is passed.
|
|
143
|
+
It is not required but if the type cannot be inferred from the component, it will raise an error.
|
|
144
|
+
force : OPTIONAL : if True, it will force the synchronization of the component even if it already exists in the target sandbox. Works for Schema, FieldGroup, DataType and Class.
|
|
83
145
|
verbose : OPTIONAL : if True, it will print the details of the synchronization process
|
|
84
146
|
"""
|
|
85
147
|
if type(component) == str:
|
|
@@ -87,8 +149,8 @@ class Synchronizer:
|
|
|
87
149
|
raise ValueError("a base sandbox or a local folder must be provided to synchronize a component by name or id")
|
|
88
150
|
if componentType is None:
|
|
89
151
|
raise ValueError("the type of the component must be provided if the component is a string")
|
|
90
|
-
if componentType not in ['schema', 'fieldgroup', 'datatypes', 'class', 'identity', 'dataset']:
|
|
91
|
-
raise ValueError("the type of the component is not supported. Please provide one of the following types: schema, fieldgroup, datatypes, class, identity, dataset")
|
|
152
|
+
if componentType not in ['schema', 'fieldgroup', 'datatypes', 'class', 'identity', 'dataset', 'mergepolicy', 'audience']:
|
|
153
|
+
raise ValueError("the type of the component is not supported. Please provide one of the following types: schema, fieldgroup, datatypes, class, identity, dataset, mergepolicy, audience")
|
|
92
154
|
if componentType in ['schema', 'fieldgroup', 'datatypes', 'class']:
|
|
93
155
|
if self.baseConfig is not None:
|
|
94
156
|
base_schema = schema.Schema(config=self.baseConfig)
|
|
@@ -165,6 +227,30 @@ class Synchronizer:
|
|
|
165
227
|
break
|
|
166
228
|
if len(component) == 1: ## if the component is the catalog API response {'key': {dataset definition}}
|
|
167
229
|
component = component[list(component.keys())[0]] ## accessing the real dataset definition
|
|
230
|
+
elif componentType == "mergepolicy":
|
|
231
|
+
if self.baseConfig is not None:
|
|
232
|
+
ups_base = customerprofile.Profile(config=self.baseConfig)
|
|
233
|
+
base_mergePolicies = ups_base.getMergePolicies()
|
|
234
|
+
if component in [el.get('id','') for el in base_mergePolicies] or component in [el.get('name','') for el in base_mergePolicies]:
|
|
235
|
+
component = [el for el in base_mergePolicies if el.get('id','') == component or el.get('name','') == component][0]
|
|
236
|
+
elif self.localfolder is not None:
|
|
237
|
+
for file in self.mergePolicyFolder.glob('*.json'):
|
|
238
|
+
mp_file = json.load(FileIO(file))
|
|
239
|
+
if mp_file.get('id','') == component or mp_file.get('name','') == component:
|
|
240
|
+
component = mp_file
|
|
241
|
+
break
|
|
242
|
+
elif componentType == 'audience':
|
|
243
|
+
if self.baseConfig is not None:
|
|
244
|
+
seg_base = segmentation.Segmentation(config=self.baseConfig)
|
|
245
|
+
base_audiences = seg_base.getAudiences()
|
|
246
|
+
if component in [el.get('id','') for el in base_audiences] or component in [el.get('name','') for el in base_audiences]:
|
|
247
|
+
component = [el for el in base_audiences if el.get('id','') == component or el.get('name','') == component][0]
|
|
248
|
+
elif self.localfolder is not None:
|
|
249
|
+
for file in self.audienceFolder.glob('*.json'):
|
|
250
|
+
au_file = json.load(FileIO(file))
|
|
251
|
+
if au_file.get('id','') == component or au_file.get('name','') == component:
|
|
252
|
+
component = au_file
|
|
253
|
+
break
|
|
168
254
|
elif type(component) == dict:
|
|
169
255
|
if 'meta:resourceType' in component.keys():
|
|
170
256
|
componentType = component['meta:resourceType']
|
|
@@ -180,30 +266,41 @@ class Synchronizer:
|
|
|
180
266
|
componentType = 'identity'
|
|
181
267
|
elif 'files' in component.keys():
|
|
182
268
|
componentType = 'dataset'
|
|
269
|
+
elif 'attributeMerge' in component.keys():
|
|
270
|
+
componentType = 'mergepolicy'
|
|
271
|
+
elif 'expression' in component.keys():
|
|
272
|
+
componentType = 'audience'
|
|
183
273
|
else:
|
|
184
274
|
raise TypeError("the component type could not be inferred from the component or is not supported. Please provide the type as a parameter")
|
|
185
275
|
## Synchronize the component to the target sandboxes
|
|
186
276
|
if componentType == 'datatypes':
|
|
187
|
-
self.__syncDataType__(component,verbose=verbose)
|
|
277
|
+
self.__syncDataType__(component,verbose=verbose,force=force)
|
|
188
278
|
if componentType == 'fieldgroup':
|
|
189
|
-
self.__syncFieldGroup__(component,verbose=verbose)
|
|
279
|
+
self.__syncFieldGroup__(component,verbose=verbose,force=force)
|
|
190
280
|
if componentType == 'schema':
|
|
191
|
-
self.__syncSchema__(component,verbose=verbose)
|
|
281
|
+
self.__syncSchema__(component,verbose=verbose,force=force)
|
|
192
282
|
if componentType == 'class':
|
|
193
|
-
self.__syncClass__(component,verbose=verbose)
|
|
283
|
+
self.__syncClass__(component,verbose=verbose,force=force)
|
|
194
284
|
if componentType == 'identity':
|
|
195
285
|
self.__syncIdentity__(component,verbose=verbose)
|
|
196
286
|
if componentType == 'dataset':
|
|
197
287
|
self.__syncDataset__(component,verbose=verbose)
|
|
288
|
+
if componentType == 'mergepolicy':
|
|
289
|
+
self.__syncMergePolicy__(component,verbose=verbose)
|
|
290
|
+
if componentType == 'audience':
|
|
291
|
+
self.__syncAudience__(component,verbose=verbose)
|
|
198
292
|
|
|
199
|
-
|
|
293
|
+
|
|
294
|
+
def __syncClass__(self,baseClass:'ClassManager',force:bool=False,verbose:bool=False)-> dict:
|
|
200
295
|
"""
|
|
201
296
|
Synchronize a class to the target sandboxes.
|
|
202
297
|
Arguments:
|
|
203
298
|
baseClass : REQUIRED : class id or name to synchronize
|
|
299
|
+
force : OPTIONAL : if True, it will force the synchronization of the class even if it already exists in the target sandbox
|
|
204
300
|
"""
|
|
205
301
|
if not isinstance(baseClass,classmanager.ClassManager):
|
|
206
302
|
raise TypeError("the baseClass must be a classManager instance")
|
|
303
|
+
self.dict_baseComponents['class'][baseClass.title] = baseClass
|
|
207
304
|
baseClassName = baseClass.title
|
|
208
305
|
baseBehavior = baseClass.behavior
|
|
209
306
|
for target in self.dict_targetsConfig.keys():
|
|
@@ -223,14 +320,16 @@ class Synchronizer:
|
|
|
223
320
|
self.dict_targetComponents[target]['class'][baseClassName] = t_newClass
|
|
224
321
|
|
|
225
322
|
|
|
226
|
-
def __syncDataType__(self,baseDataType:'DataTypeManager',verbose:bool=False)-> dict:
|
|
323
|
+
def __syncDataType__(self,baseDataType:'DataTypeManager',force:bool=False,verbose:bool=False)-> dict:
|
|
227
324
|
"""
|
|
228
325
|
Synchronize a data type to the target sandbox.
|
|
229
326
|
Arguments:
|
|
230
327
|
baseDataType : REQUIRED : DataTypeManager object with the data type to synchronize
|
|
328
|
+
force : OPTIONAL : if True, it will force the synchronization of the data type even if it already exists in the target sandbox
|
|
231
329
|
"""
|
|
232
330
|
if not isinstance(baseDataType,datatypemanager.DataTypeManager):
|
|
233
331
|
raise TypeError("the baseDataType must be a DataTypeManager object")
|
|
332
|
+
self.dict_baseComponents['datatype'][baseDataType.title] = baseDataType
|
|
234
333
|
name_base_datatype = baseDataType.title
|
|
235
334
|
for target in self.dict_targetsConfig.keys():
|
|
236
335
|
targetSchema = schema.Schema(config=self.dict_targetsConfig[target])
|
|
@@ -249,7 +348,7 @@ class Synchronizer:
|
|
|
249
348
|
base_paths = df_base['path'].tolist()
|
|
250
349
|
target_paths = df_target['path'].tolist()
|
|
251
350
|
diff_paths = list(set(base_paths) - set(target_paths))
|
|
252
|
-
if len(diff_paths) > 0: ## there are differences
|
|
351
|
+
if len(diff_paths) > 0 or force==True: ## there are differences
|
|
253
352
|
base_datatypes_paths = baseDataType.getDataTypePaths()
|
|
254
353
|
df_base_limited = df_base[df_base['origin'] == 'self'].copy() ## exclude field group native fields
|
|
255
354
|
df_base_limited = df_base_limited[~df_base_limited['path'].isin(list(base_datatypes_paths.keys()))] ## exclude base of datatype rows
|
|
@@ -258,7 +357,7 @@ class Synchronizer:
|
|
|
258
357
|
base_dict_path_dtTitle = {}
|
|
259
358
|
for path,dt_id in base_datatypes_paths.items():
|
|
260
359
|
tmp_dt_manager = baseDataType.getDataTypeManager(dt_id)
|
|
261
|
-
self.__syncDataType__(tmp_dt_manager,verbose=verbose)
|
|
360
|
+
self.__syncDataType__(tmp_dt_manager,force=force,verbose=verbose)
|
|
262
361
|
base_dict_path_dtTitle[path] = tmp_dt_manager.title
|
|
263
362
|
target_datatypes_paths = t_datatype.getDataTypePaths(som_compatible=True)
|
|
264
363
|
target_datatypes_paths_list = list(target_datatypes_paths.keys())
|
|
@@ -289,7 +388,7 @@ class Synchronizer:
|
|
|
289
388
|
base_dict_path_dtTitle = {}
|
|
290
389
|
for path,dt_id in base_datatypes_paths.items():
|
|
291
390
|
tmp_dt_manager = baseDataType.getDataTypeManager(dt_id)
|
|
292
|
-
self.__syncDataType__(tmp_dt_manager,verbose=verbose)
|
|
391
|
+
self.__syncDataType__(tmp_dt_manager,force=force,verbose=verbose)
|
|
293
392
|
base_dict_path_dtTitle[path] = tmp_dt_manager.title
|
|
294
393
|
target_datatypes_paths = new_datatype.getDataTypePaths(som_compatible=True)
|
|
295
394
|
target_datatypes_paths_list = list(target_datatypes_paths.keys())
|
|
@@ -309,14 +408,16 @@ class Synchronizer:
|
|
|
309
408
|
raise Exception("the data type could not be created in the target sandbox")
|
|
310
409
|
self.dict_targetComponents[target]['datatype'][name_base_datatype] = t_datatype
|
|
311
410
|
|
|
312
|
-
def __syncFieldGroup__(self,baseFieldGroup:'FieldGroupManager',verbose:bool=False)-> dict:
|
|
411
|
+
def __syncFieldGroup__(self,baseFieldGroup:'FieldGroupManager',force:bool=True,verbose:bool=False)-> dict:
|
|
313
412
|
"""
|
|
314
413
|
Synchronize a field group to the target sandboxes.
|
|
315
414
|
Argument:
|
|
316
415
|
baseFieldGroup : REQUIRED : FieldGroupManager object with the field group to synchronize
|
|
416
|
+
force : OPTIONAL : if True, it will force the synchronization of the field group even if it already exists in the target sandbox
|
|
317
417
|
"""
|
|
318
418
|
if not isinstance(baseFieldGroup,fieldgroupmanager.FieldGroupManager):
|
|
319
419
|
raise TypeError("the baseFieldGroup must be a FieldGroupManager object")
|
|
420
|
+
self.dict_baseComponents['fieldgroup'][baseFieldGroup.title] = baseFieldGroup
|
|
320
421
|
name_base_fieldgroup = baseFieldGroup.title
|
|
321
422
|
base_fg_classIds = baseFieldGroup.classIds
|
|
322
423
|
for target in self.dict_targetsConfig.keys():
|
|
@@ -350,7 +451,7 @@ class Synchronizer:
|
|
|
350
451
|
base_paths = df_base['path'].tolist()
|
|
351
452
|
target_paths = df_target['path'].tolist()
|
|
352
453
|
diff_paths = [path for path in base_paths if path not in target_paths]
|
|
353
|
-
if len(diff_paths) > 0:
|
|
454
|
+
if len(diff_paths) > 0 or force==True:
|
|
354
455
|
base_datatypes_paths = baseFieldGroup.getDataTypePaths()
|
|
355
456
|
## handling fieldgroup native fields
|
|
356
457
|
df_base_limited = df_base[df_base['origin'] == 'fieldGroup'].copy() ## exclude datatypes
|
|
@@ -371,7 +472,7 @@ class Synchronizer:
|
|
|
371
472
|
base_dict_path_dtTitle = {}
|
|
372
473
|
for path,dt_id in base_datatypes_paths.items():
|
|
373
474
|
tmp_dt_manager = baseFieldGroup.getDataTypeManager(dt_id)
|
|
374
|
-
self.__syncDataType__(tmp_dt_manager,verbose=verbose)
|
|
475
|
+
self.__syncDataType__(tmp_dt_manager,force=force,verbose=verbose)
|
|
375
476
|
base_dict_path_dtTitle[path] = tmp_dt_manager.title
|
|
376
477
|
target_datatypes_paths = t_fieldgroup.getDataTypePaths(som_compatible=True)
|
|
377
478
|
target_datatypes_paths_list = list(target_datatypes_paths.keys())
|
|
@@ -423,7 +524,7 @@ class Synchronizer:
|
|
|
423
524
|
base_dict_path_dtTitle = {}
|
|
424
525
|
for path,dt_id in base_datatypes_paths.items():
|
|
425
526
|
tmp_dt_manager = baseFieldGroup.getDataTypeManager(dt_id)
|
|
426
|
-
self.__syncDataType__(tmp_dt_manager,verbose=verbose)
|
|
527
|
+
self.__syncDataType__(tmp_dt_manager,force=force,verbose=verbose)
|
|
427
528
|
base_dict_path_dtTitle[path] = tmp_dt_manager.title
|
|
428
529
|
for path,dt_title in base_dict_path_dtTitle.items():
|
|
429
530
|
tmp_t_dt = self.dict_targetComponents[target]['datatype'][dt_title]
|
|
@@ -442,16 +543,18 @@ class Synchronizer:
|
|
|
442
543
|
self.dict_targetComponents[target]['fieldgroup'][name_base_fieldgroup] = t_fieldgroup
|
|
443
544
|
|
|
444
545
|
|
|
445
|
-
def __syncSchema__(self,baseSchema:'SchemaManager',verbose:bool=False)-> dict:
|
|
546
|
+
def __syncSchema__(self,baseSchema:'SchemaManager',force:bool=False,verbose:bool=False)-> dict:
|
|
446
547
|
"""
|
|
447
548
|
Sync the schema to the target sandboxes.
|
|
448
549
|
Arguments:
|
|
449
550
|
baseSchema : REQUIRED : SchemaManager object to synchronize
|
|
551
|
+
force : OPTIONAL : if True, it will force the synchronization of field groups even if they already exist in the target schema
|
|
450
552
|
"""
|
|
451
553
|
## TO DO -> sync required fields
|
|
452
554
|
if not isinstance(baseSchema,schemamanager.SchemaManager):
|
|
453
555
|
raise TypeError("the baseSchema must be a SchemaManager object")
|
|
454
556
|
name_base_schema = baseSchema.title
|
|
557
|
+
self.dict_baseComponents['schema'][name_base_schema] = baseSchema
|
|
455
558
|
descriptors = baseSchema.getDescriptors()
|
|
456
559
|
base_field_groups_names = list(baseSchema.fieldGroups.values())
|
|
457
560
|
dict_base_fg_name_id = {name:fg_id for fg_id,name in baseSchema.fieldGroups.items()}
|
|
@@ -465,7 +568,7 @@ class Synchronizer:
|
|
|
465
568
|
t_schema = schemamanager.SchemaManager(targetSchemaAPI.data.schemas_altId[name_base_schema],config=self.dict_targetsConfig[target],sandbox=target)
|
|
466
569
|
new_fieldgroups = [fg for fg in base_field_groups_names if fg not in t_schema.fieldGroups.values()]
|
|
467
570
|
existing_fieldgroups = [fg for fg in base_field_groups_names if fg in t_schema.fieldGroups.values()]
|
|
468
|
-
if len(new_fieldgroups) > 0: ## if new field groups
|
|
571
|
+
if len(new_fieldgroups) > 0 or force==True: ## if new field groups
|
|
469
572
|
if verbose:
|
|
470
573
|
print('found new field groups to add to the schema')
|
|
471
574
|
for new_fieldgroup in new_fieldgroups:
|
|
@@ -478,7 +581,8 @@ class Synchronizer:
|
|
|
478
581
|
if verbose:
|
|
479
582
|
print(f"field group '{new_fieldgroup}' is a custom field group, syncing it")
|
|
480
583
|
tmp_FieldGroup = baseSchema.getFieldGroupManager(new_fieldgroup)
|
|
481
|
-
|
|
584
|
+
print(f"Creating new custom field group '{tmp_FieldGroup.title}'")
|
|
585
|
+
self.__syncFieldGroup__(tmp_FieldGroup,verbose=verbose,force=force)
|
|
482
586
|
t_schema.addFieldGroup(self.dict_targetComponents[target]['fieldgroup'][new_fieldgroup].id)
|
|
483
587
|
t_schema.setDescription(baseSchema.description)
|
|
484
588
|
res = t_schema.updateSchema()
|
|
@@ -490,13 +594,35 @@ class Synchronizer:
|
|
|
490
594
|
for fg_name in existing_fieldgroups:
|
|
491
595
|
if baseSchema.tenantId[1:] in dict_base_fg_name_id[fg_name]: ## custom field group
|
|
492
596
|
tmp_fieldGroupManager = fieldgroupmanager.FieldGroupManager(dict_base_fg_name_id[fg_name],config=self.baseConfig,sandbox=target,localFolder=self.localfolder)
|
|
493
|
-
self.__syncFieldGroup__(tmp_fieldGroupManager,verbose=verbose)
|
|
597
|
+
self.__syncFieldGroup__(tmp_fieldGroupManager,force=force,verbose=verbose)
|
|
494
598
|
else:
|
|
495
599
|
if verbose:
|
|
496
600
|
print(f"field group '{fg_name}' is a OOTB field group, using it")
|
|
497
601
|
self.dict_targetComponents[target]['fieldgroup'][fg_name] = fieldgroupmanager.FieldGroupManager(dict_base_fg_name_id[fg_name],config=self.dict_targetsConfig[target],sandbox=target)
|
|
498
602
|
list_new_descriptors = self.__syncDescriptor__(baseSchema,t_schema,targetSchemaAPI=targetSchemaAPI,verbose=verbose)
|
|
603
|
+
## handling the meta:refProperty setup if any
|
|
604
|
+
base_allOf = baseSchema.schema.get('allOf',[])
|
|
605
|
+
base_fg_name_metaref = {}
|
|
606
|
+
for refEl in base_allOf: ## retrieving the meta:refProperty from the base schema
|
|
607
|
+
if 'meta:refProperty' in refEl.keys():
|
|
608
|
+
tmp_base_fg_id = refEl['$ref']
|
|
609
|
+
if baseSchema.tenantId[1:] in tmp_base_fg_id:
|
|
610
|
+
tmp_base_fg_manager = self.getSyncFieldGroupManager(tmp_base_fg_id,sandbox=baseSchema.sandbox)
|
|
611
|
+
base_fg_name_metaref[tmp_base_fg_manager.title] = refEl['meta:refProperty']
|
|
612
|
+
else:
|
|
613
|
+
base_fg_name_metaref[tmp_base_fg_id] = refEl['meta:refProperty']
|
|
614
|
+
for fg_name,ref_property in base_fg_name_metaref.items(): ## updating the target schema with the meta:refProperty
|
|
615
|
+
for ref in t_schema.schema.get('allOf',[]):
|
|
616
|
+
tmp_target_fg_id = ref['$ref']
|
|
617
|
+
if baseSchema.tenantId[1:] in tmp_target_fg_id:
|
|
618
|
+
tmp_target_fg_manager = self.getSyncFieldGroupManager(tmp_target_fg_id,sandbox=target)
|
|
619
|
+
if fg_name == tmp_target_fg_manager.title:
|
|
620
|
+
ref['meta:refProperty'] = ref_property
|
|
621
|
+
else:
|
|
622
|
+
if fg_name == ref['$ref']:
|
|
623
|
+
ref['meta:refProperty'] = ref_property
|
|
499
624
|
self.dict_targetComponents[target]['schemaDescriptors'][name_base_schema] = list_new_descriptors
|
|
625
|
+
t_schema.updateSchema()
|
|
500
626
|
else: ## schema does not exist in target
|
|
501
627
|
if verbose:
|
|
502
628
|
print(f"schema '{name_base_schema}' does not exist in target {target}, creating it")
|
|
@@ -506,7 +632,7 @@ class Synchronizer:
|
|
|
506
632
|
tenantidId = baseSchema.tenantId
|
|
507
633
|
if tenantidId[1:] in baseClassId: ## custom class
|
|
508
634
|
baseClassManager = classmanager.ClassManager(baseClassId,config=self.baseConfig,sandbox=target,localFolder=self.localfolder,sandboxBase=self.baseSandbox,tenantidId=tenantidId)
|
|
509
|
-
self.__syncClass__(baseClassManager,verbose=verbose)
|
|
635
|
+
self.__syncClass__(baseClassManager,force=force,verbose=verbose)
|
|
510
636
|
targetClassManager = self.dict_targetComponents[target]['class'][baseClassManager.title]
|
|
511
637
|
classId_toUse = targetClassManager.id
|
|
512
638
|
else:
|
|
@@ -523,7 +649,7 @@ class Synchronizer:
|
|
|
523
649
|
if verbose:
|
|
524
650
|
print(f"field group '{fg_name}' is a custom field group, using it")
|
|
525
651
|
tmp_FieldGroup = baseSchema.getFieldGroupManager(fg_name)
|
|
526
|
-
self.__syncFieldGroup__(tmp_FieldGroup,verbose=verbose)
|
|
652
|
+
self.__syncFieldGroup__(tmp_FieldGroup,force=force,verbose=verbose)
|
|
527
653
|
new_schema.addFieldGroup(self.dict_targetComponents[target]['fieldgroup'][fg_name].id)
|
|
528
654
|
new_schema.setDescription(baseSchema.description)
|
|
529
655
|
res = new_schema.createSchema()
|
|
@@ -535,6 +661,28 @@ class Synchronizer:
|
|
|
535
661
|
## handling descriptors
|
|
536
662
|
list_new_descriptors = self.__syncDescriptor__(baseSchema,t_schema,targetSchemaAPI,verbose=verbose)
|
|
537
663
|
self.dict_targetComponents[target]['schemaDescriptors'][name_base_schema] = list_new_descriptors
|
|
664
|
+
## handling the meta:refProperty setup if any
|
|
665
|
+
base_allOf = baseSchema.schema.get('allOf',[])
|
|
666
|
+
base_fg_name_metaref = {}
|
|
667
|
+
for refEl in base_allOf: ## retrieving the meta:refProperty from the base schema
|
|
668
|
+
if 'meta:refProperty' in refEl.keys():
|
|
669
|
+
tmp_base_fg_id = refEl['$ref']
|
|
670
|
+
if baseSchema.tenantId[1:] in tmp_base_fg_id:
|
|
671
|
+
tmp_base_fg_manager = self.getSyncFieldGroupManager(tmp_base_fg_id,sandbox=baseSchema.sandbox)
|
|
672
|
+
base_fg_name_metaref[tmp_base_fg_manager.title] = refEl['meta:refProperty']
|
|
673
|
+
else:
|
|
674
|
+
base_fg_name_metaref[tmp_base_fg_id] = refEl['meta:refProperty']
|
|
675
|
+
for fg_name,ref_property in base_fg_name_metaref.items(): ## updating the target schema with the meta:refProperty
|
|
676
|
+
for ref in t_schema.schema.get('allOf',[]):
|
|
677
|
+
tmp_target_fg_id = ref['$ref']
|
|
678
|
+
if baseSchema.tenantId[1:] in tmp_target_fg_id:
|
|
679
|
+
tmp_target_fg_manager = self.getSyncFieldGroupManager(tmp_target_fg_id,sandbox=target)
|
|
680
|
+
if fg_name == tmp_target_fg_manager.title:
|
|
681
|
+
ref['meta:refProperty'] = ref_property
|
|
682
|
+
else:
|
|
683
|
+
if fg_name == ref['$ref']:
|
|
684
|
+
ref['meta:refProperty'] = ref_property
|
|
685
|
+
t_schema.updateSchema()
|
|
538
686
|
self.dict_targetComponents[target]['schema'][name_base_schema] = t_schema
|
|
539
687
|
|
|
540
688
|
def __syncDescriptor__(self,baseSchemaManager:'SchemaManager'=None,targetSchemaManager:'SchemaManager'=None,targetSchemaAPI:'Schema'=None,verbose:bool=False)-> dict:
|
|
@@ -553,6 +701,7 @@ class Synchronizer:
|
|
|
553
701
|
if not isinstance(targetSchemaManager,schemamanager.SchemaManager):
|
|
554
702
|
raise TypeError("the targetSchemaManager must be a SchemaManager object")
|
|
555
703
|
base_descriptors = baseSchemaManager.getDescriptors()
|
|
704
|
+
self.dict_baseComponents['schemaDescriptors'][baseSchemaManager.title] = {}
|
|
556
705
|
if self.baseConfig is not None:
|
|
557
706
|
baseSchemaAPI = schema.Schema(config=self.baseConfig)
|
|
558
707
|
myschemas = baseSchemaAPI.getSchemas() ## to populate the data object
|
|
@@ -717,6 +866,7 @@ class Synchronizer:
|
|
|
717
866
|
if not isinstance(identityDefiniton,dict):
|
|
718
867
|
raise TypeError("the identityDefinition must be a dictionary")
|
|
719
868
|
code_base_identity = identityDefiniton['code']
|
|
869
|
+
self.dict_baseComponents['identities'][code_base_identity] = identityDefiniton
|
|
720
870
|
for target in self.dict_targetsConfig.keys():
|
|
721
871
|
targetIdentity = identity.Identity(config=self.dict_targetsConfig[target],region=self.region)
|
|
722
872
|
t_identities = targetIdentity.getIdentities()
|
|
@@ -742,7 +892,8 @@ class Synchronizer:
|
|
|
742
892
|
baseDataset : REQUIRED : dictionary with the dataset definition
|
|
743
893
|
"""
|
|
744
894
|
if len(baseDataset) == 1: ## if receiving the dataset as provided by the API {datasetId:{...definition}}
|
|
745
|
-
baseDataset = deepcopy(baseDataset[list(baseDataset.keys()[0]
|
|
895
|
+
baseDataset = deepcopy(baseDataset[list(baseDataset.keys())[0]])
|
|
896
|
+
self.dict_baseComponents['datasets'][baseDataset['name']] = baseDataset
|
|
746
897
|
base_datasetName = baseDataset['name']
|
|
747
898
|
base_dataset_related_schemaId = baseDataset['schemaRef']['id']
|
|
748
899
|
if self.baseConfig is not None:
|
|
@@ -786,4 +937,94 @@ class Synchronizer:
|
|
|
786
937
|
t_schemas = targetSchema.getSchemas()
|
|
787
938
|
baseSchemaManager = schemamanager.SchemaManager(base_dataset_related_schemaId,config=self.baseConfig,localFolder=self.localfolder,sandbox=self.baseSandbox)
|
|
788
939
|
self.__syncSchema__(baseSchemaManager,verbose=verbose)
|
|
940
|
+
self.dict_targetComponents[target]['datasets'][base_datasetName] = t_dataset
|
|
941
|
+
|
|
942
|
+
def __syncMergePolicy__(self,mergePolicy:dict,verbose:bool=False)->None:
|
|
943
|
+
"""
|
|
944
|
+
Synchronize the dataset to the target sandboxes. Mostly creating a new dataset and associated artefacts when not already created.
|
|
945
|
+
Arguments:
|
|
946
|
+
mergePolicy : REQUIRED : The merge policy dictionary to sync
|
|
947
|
+
"""
|
|
948
|
+
if not isinstance(mergePolicy,dict):
|
|
949
|
+
raise TypeError("the mergePolicy must be a dictionary")
|
|
950
|
+
self.dict_baseComponents['mergePolicy'][mergePolicy.get('id','unknown')] = mergePolicy
|
|
951
|
+
mergePolicy_name = mergePolicy.get('name','unknown')
|
|
952
|
+
if mergePolicy['attributeMerge'].get('type','timestampOrdered') == 'dataSetPrecedence':
|
|
953
|
+
if verbose:
|
|
954
|
+
print(f"handling dataset precedence for merge policy '{mergePolicy_name}'")
|
|
955
|
+
print("syncing the datasets involved in the precedence order")
|
|
956
|
+
base_list_precedenceDatasets = mergePolicy['attributeMerge'].get('order',[])
|
|
957
|
+
for ds_id in base_list_precedenceDatasets:
|
|
958
|
+
res = self.syncComponent(ds_id,componentType='dataset',verbose=verbose)
|
|
959
|
+
for target in self.dict_targetsConfig.keys():
|
|
960
|
+
targetCustomerProfile = customerprofile.Profile(config=self.dict_targetsConfig[target])
|
|
961
|
+
t_mergePolicies = targetCustomerProfile.getMergePolicies()
|
|
962
|
+
if mergePolicy_name not in [el.get('name','') for el in t_mergePolicies]: ## merge policy does not exist in target
|
|
963
|
+
if verbose:
|
|
964
|
+
print(f"merge policy '{mergePolicy_name}' does not exist in target {target}, creating it")
|
|
965
|
+
mergePolicyDef = {
|
|
966
|
+
"name":mergePolicy.get('name',''),
|
|
967
|
+
"schema":mergePolicy.get('schema','_xdm.context.profile'),
|
|
968
|
+
"identityGraph":mergePolicy.get('identityGraph','pdg'),
|
|
969
|
+
"isActiveOnEdge":mergePolicy.get('isActiveOnEdge',False),
|
|
970
|
+
}
|
|
971
|
+
if mergePolicy['attributeMerge'].get('type','timestampOrdered') == 'dataSetPrecedence':
|
|
972
|
+
target_list_precedenceDatasets = []
|
|
973
|
+
for base_ds_id in mergePolicy['attributeMerge'].get('order',[]):
|
|
974
|
+
base_ds_name = self.getDatasetName(base_ds_id,sandbox=target)
|
|
975
|
+
target_ds_id = self.dict_targetComponents[target]['datasets'][base_ds_name]['id']
|
|
976
|
+
target_list_precedenceDatasets.append(target_ds_id)
|
|
977
|
+
mergePolicyDef['attributeMerge'] = {
|
|
978
|
+
"type":mergePolicy['attributeMerge'].get('type','timestampOrdered'),
|
|
979
|
+
"order":target_list_precedenceDatasets
|
|
980
|
+
}
|
|
981
|
+
else:
|
|
982
|
+
mergePolicyDef['attributeMerge'] = {'type':'timestampOrdered'}
|
|
983
|
+
res = targetCustomerProfile.createMergePolicy(mergePolicyDef)
|
|
984
|
+
if 'id' in res.keys():
|
|
985
|
+
self.dict_targetComponents[target]['mergePolicy'][res['id']] = res
|
|
986
|
+
else:
|
|
987
|
+
print(res)
|
|
988
|
+
raise Exception("the merge policy could not be created in the target sandbox")
|
|
989
|
+
else: ## merge policy already exists in target
|
|
990
|
+
if verbose:
|
|
991
|
+
print(f"merge policy '{mergePolicy_name}' already exists in target {target}, saving it")
|
|
992
|
+
self.dict_targetComponents[target]['mergePolicy'][mergePolicy_name] = [el for el in t_mergePolicies if el.get('name','') == mergePolicy_name][0]
|
|
993
|
+
|
|
994
|
+
def __syncAudience__(self,baseAudience:dict,verbose:bool=False)-> None:
|
|
995
|
+
"""
|
|
996
|
+
Synchronize an audience to the target sandboxes.
|
|
997
|
+
Arguments:
|
|
998
|
+
baseAudience : REQUIRED : dictionary with the audience definition
|
|
999
|
+
"""
|
|
1000
|
+
if not isinstance(baseAudience,dict):
|
|
1001
|
+
raise TypeError("the baseAudience must be a dictionary")
|
|
1002
|
+
audience_name = baseAudience.get('name','unknown')
|
|
1003
|
+
self.dict_baseComponents['audience'][audience_name] = baseAudience
|
|
1004
|
+
for target in self.dict_targetsConfig.keys():
|
|
1005
|
+
targetAudiences = segmentation.Segmentation(config=self.dict_targetsConfig[target])
|
|
1006
|
+
t_audiences = targetAudiences.getAudiences()
|
|
1007
|
+
if audience_name not in [el['name'] for el in t_audiences]: ## audience does not exist in target
|
|
1008
|
+
if verbose:
|
|
1009
|
+
print(f"audience '{audience_name}' does not exist in target {target}, creating it")
|
|
1010
|
+
audienceDef = {
|
|
1011
|
+
"name":baseAudience.get('name',''),
|
|
1012
|
+
"description":baseAudience.get('description',''),
|
|
1013
|
+
"type":baseAudience.get('type','SegmentDefinition'),
|
|
1014
|
+
"schema":baseAudience.get('schema','_xdm.context.profile'),
|
|
1015
|
+
"expression":baseAudience.get('expression',[]),
|
|
1016
|
+
"ansibleDataModel":baseAudience.get('ansibleDataModel',{}),
|
|
1017
|
+
"profileInstanceId":baseAudience.get('profileInstanceId',''),
|
|
1018
|
+
"evaluationInfo":baseAudience.get('evaluationInfo',{'batch': {'enabled': True}, 'continuous': {'enabled': False},'synchronous': {'enabled': False}})
|
|
1019
|
+
}
|
|
1020
|
+
res = targetAudiences.createAudience(audienceDef)
|
|
1021
|
+
if 'id' in res.keys():
|
|
1022
|
+
self.dict_targetComponents[target]['audience'][res['id']] = res
|
|
1023
|
+
else:
|
|
1024
|
+
print(res)
|
|
1025
|
+
raise Exception("the audience could not be created in the target sandbox")
|
|
1026
|
+
else: ## audience already exists in target
|
|
1027
|
+
if verbose:
|
|
1028
|
+
print(f"audience '{audience_name}' already exists in target {target}, saving it")
|
|
1029
|
+
self.dict_targetComponents[target]['audience'][audience_name] = [el for el in t_audiences if el['name'] == audience_name][0]
|
|
789
1030
|
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
aepp/__init__.py,sha256=
|
|
2
|
-
aepp/__version__.py,sha256=
|
|
1
|
+
aepp/__init__.py,sha256=d5hE6VcHz52B5o_AbYO368iG8FTsJlt91nBWej8WL8A,26381
|
|
2
|
+
aepp/__version__.py,sha256=Y-I6FXKes4hB39WcBm3HZ1OCtCWiErPB6YZ-YSgexuE,21
|
|
3
3
|
aepp/accesscontrol.py,sha256=PB3FcrO4bvDjdNxjHx7p_20hp4ahBXewoOSxuTGMXC8,17423
|
|
4
4
|
aepp/catalog.py,sha256=hK9m3SAP0fhgkYqu14Tcfq14qBhw54tLCOF0mH31b1M,68237
|
|
5
5
|
aepp/classmanager.py,sha256=CTYGkg5ygB8HtRia6DfT9WLBqXJOVg7pSM9jBB25Bqw,64707
|
|
@@ -31,10 +31,10 @@ aepp/schemamanager.py,sha256=hwItd4vXsPFeV25gX1Fbeiu07-BCg4z_VRQREMgJZ58,50738
|
|
|
31
31
|
aepp/segmentation.py,sha256=oSgR2yx4nawYN5XAeHV_wefvmXEf0nb-bCguaDmp8F8,43555
|
|
32
32
|
aepp/sensei.py,sha256=oYNy5BSWAEqsDkEexcQso6NfA6ntGGMnCOyHri0pJs8,7761
|
|
33
33
|
aepp/som.py,sha256=XNm_Lu2wt2kpSSpldLptuER2eludFXeO9fI6i3iNCzo,34175
|
|
34
|
-
aepp/synchronizer.py,sha256=
|
|
34
|
+
aepp/synchronizer.py,sha256=UA48GKJ5OaasMxbS2O7A9T1i6HrehRx8_7jtxaNFD9M,77388
|
|
35
35
|
aepp/tags.py,sha256=t2qBallTcWR4IOXcDBmrPpqjbSay1z3E2bcRijzVm1s,17641
|
|
36
36
|
aepp/utils.py,sha256=tG-YVXylm38-bynqfp5N_Mzyo7mhlZj-dLo7wLoO4tM,1200
|
|
37
|
-
aepp-0.4.
|
|
37
|
+
aepp-0.4.2.dist-info/licenses/LICENSE,sha256=HjYTlfne3BbS5gNHzNqJ5COCiTQLUdf87QkzRyFbE4Y,10337
|
|
38
38
|
tests/__init__.py,sha256=d6zWJsJFZrQd5wAYM7sezSxwXbuMMWfNPkK_vpaUzFA,623
|
|
39
39
|
tests/catalog_test.py,sha256=O4kkG0C_dXk3E77pSzWIt1ewfyKjfZqgbJmBwWwx0po,2246
|
|
40
40
|
tests/dataaccess_test.py,sha256=bnHwOjPPauTM8s1c6O7iUYC--gqt6tPzT94aEZHDC-c,1238
|
|
@@ -44,7 +44,7 @@ tests/exportDatasetToDatalandingZone_test.py,sha256=193AgQR8yhnQmRWV9pgYz1X2Hz-Y
|
|
|
44
44
|
tests/flowservice_test.py,sha256=Y1mpYWbKYL_x-ZlIY-EuOuNvlzVV1ERlKseDO7gN3Ss,4208
|
|
45
45
|
tests/schema_test.py,sha256=6UsgdsizKmii1hzREpBEKWvZouXdJMvU68UKSxlt1uk,2774
|
|
46
46
|
tests/som_test.py,sha256=a4ut0pEg1HJVMTESaPITmj7YkF54eWCMzKxTMIS-VvM,12101
|
|
47
|
-
aepp-0.4.
|
|
48
|
-
aepp-0.4.
|
|
49
|
-
aepp-0.4.
|
|
50
|
-
aepp-0.4.
|
|
47
|
+
aepp-0.4.2.dist-info/METADATA,sha256=nCyugz0f9W1kg7B-VVHAlYrKQt7Pifwq4Jh7CJ0SmRc,5470
|
|
48
|
+
aepp-0.4.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
49
|
+
aepp-0.4.2.dist-info/top_level.txt,sha256=Gn88pv1ywuEAgOvhmmXXhN4dosEfCrBNDskje3nqS34,11
|
|
50
|
+
aepp-0.4.2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|