aepp 0.4.1.post1__py3-none-any.whl → 0.4.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aepp/__init__.py +59 -5
- aepp/__version__.py +1 -1
- aepp/customerprofile.py +0 -1
- aepp/schema.py +3 -0
- aepp/schemamanager.py +0 -6
- aepp/som.py +0 -1
- aepp/synchronizer.py +275 -27
- {aepp-0.4.1.post1.dist-info → aepp-0.4.2.dist-info}/METADATA +1 -1
- {aepp-0.4.1.post1.dist-info → aepp-0.4.2.dist-info}/RECORD +12 -12
- {aepp-0.4.1.post1.dist-info → aepp-0.4.2.dist-info}/WHEEL +0 -0
- {aepp-0.4.1.post1.dist-info → aepp-0.4.2.dist-info}/licenses/LICENSE +0 -0
- {aepp-0.4.1.post1.dist-info → aepp-0.4.2.dist-info}/top_level.txt +0 -0
aepp/__init__.py
CHANGED
|
@@ -153,14 +153,15 @@ def extractSandboxArtefacts(
|
|
|
153
153
|
completePath = mypath / f'{sandbox.sandbox}'
|
|
154
154
|
else:
|
|
155
155
|
completePath = Path(localFolder)
|
|
156
|
-
from aepp import schema, catalog, identity
|
|
156
|
+
from aepp import schema, catalog, identity,customerprofile, segmentation
|
|
157
157
|
sch = schema.Schema(config=sandbox)
|
|
158
158
|
cat = catalog.Catalog(config=sandbox)
|
|
159
159
|
ide = identity.Identity(config=sandbox,region=region)
|
|
160
160
|
completePath.mkdir(exist_ok=True)
|
|
161
161
|
globalConfig = {
|
|
162
162
|
"imsOrgId":sandbox.org_id,
|
|
163
|
-
"tenantId":f"_{sch.getTenantId()}"
|
|
163
|
+
"tenantId":f"_{sch.getTenantId()}",
|
|
164
|
+
"sandbox":sandbox.sandbox
|
|
164
165
|
}
|
|
165
166
|
with open(f'{completePath}/config.json','w') as f:
|
|
166
167
|
json.dump(globalConfig,f,indent=2)
|
|
@@ -184,6 +185,10 @@ def extractSandboxArtefacts(
|
|
|
184
185
|
identityPath.mkdir(exist_ok=True)
|
|
185
186
|
datasetPath = completePath / 'dataset'
|
|
186
187
|
datasetPath.mkdir(exist_ok=True)
|
|
188
|
+
mergePolicyPath = completePath / 'mergePolicy'
|
|
189
|
+
mergePolicyPath.mkdir(exist_ok=True)
|
|
190
|
+
audiencePath = completePath / 'audience'
|
|
191
|
+
audiencePath.mkdir(exist_ok=True)
|
|
187
192
|
myclasses = sch.getClasses()
|
|
188
193
|
classesGlobal = sch.getClassesGlobal()
|
|
189
194
|
behaviors = sch.getBehaviors()
|
|
@@ -257,6 +262,19 @@ def extractSandboxArtefacts(
|
|
|
257
262
|
for el in identities:
|
|
258
263
|
with open(f"{identityPath / el['code']}.json",'w') as f:
|
|
259
264
|
json.dump(el,f,indent=2)
|
|
265
|
+
## merge policies
|
|
266
|
+
ups = customerprofile.Profile(config=sandbox)
|
|
267
|
+
mymergePolicies = ups.getMergePolicies()
|
|
268
|
+
for el in mymergePolicies:
|
|
269
|
+
with open(f"{mergePolicyPath / el.get('id','unknown')}.json",'w') as f:
|
|
270
|
+
json.dump(el,f,indent=2)
|
|
271
|
+
## audiences
|
|
272
|
+
mysegmentation = segmentation.Segmentation(config=sandbox)
|
|
273
|
+
audiences = mysegmentation.getAudiences()
|
|
274
|
+
for el in audiences:
|
|
275
|
+
safe_name = __titleSafe__(el.get('name','unknown'))
|
|
276
|
+
with open(f"{audiencePath / safe_name}.json",'w') as f:
|
|
277
|
+
json.dump(el,f,indent=2)
|
|
260
278
|
|
|
261
279
|
def extractSandboxArtefact(
|
|
262
280
|
sandbox: 'ConnectObject' = None,
|
|
@@ -271,7 +289,7 @@ def extractSandboxArtefact(
|
|
|
271
289
|
sandbox: REQUIRED: the instance of a ConnectObject that contains the sandbox information and connection.
|
|
272
290
|
localFolder: OPTIONAL: the local folder where to extract the sandbox. If not provided, it will use the current working directory and name the folder the name of the sandbox.
|
|
273
291
|
artefact: REQUIRED: the id or the name of the artefact to export.
|
|
274
|
-
artefactType: REQUIRED: the type of artefact to export. Possible values are: 'class','schema','fieldgroup','datatype','descriptor','dataset','identity'
|
|
292
|
+
artefactType: REQUIRED: the type of artefact to export. Possible values are: 'class','schema','fieldgroup','datatype','descriptor','dataset','identity','mergepolicy'
|
|
275
293
|
region: OPTIONAL: the region of the sandbox (default: nld2). This is used to fetch the correct API endpoints for the identities.
|
|
276
294
|
Possible values: "va7","aus5", "can2", "ind2"
|
|
277
295
|
"""
|
|
@@ -287,7 +305,8 @@ def extractSandboxArtefact(
|
|
|
287
305
|
sch = schema.Schema(config=sandbox)
|
|
288
306
|
globalConfig = {
|
|
289
307
|
"imsOrgId":sandbox.org_id,
|
|
290
|
-
"tenantId":f"_{sch.getTenantId()}"
|
|
308
|
+
"tenantId":f"_{sch.getTenantId()}",
|
|
309
|
+
"sandbox":sandbox.sandbox
|
|
291
310
|
}
|
|
292
311
|
with open(f'{completePath}/config.json','w') as f:
|
|
293
312
|
json.dump(globalConfig,f,indent=2)
|
|
@@ -307,6 +326,10 @@ def extractSandboxArtefact(
|
|
|
307
326
|
__extractDataset__(artefact,completePath,sandbox,region)
|
|
308
327
|
elif artefactType == 'identity':
|
|
309
328
|
__extractIdentity__(artefact,region,completePath,sandbox)
|
|
329
|
+
elif artefactType == 'mergepolicy':
|
|
330
|
+
__extractMergePolicy__(artefact,completePath,sandbox)
|
|
331
|
+
elif artefactType == 'audience':
|
|
332
|
+
__extractAudience__(artefact,completePath,sandbox)
|
|
310
333
|
else:
|
|
311
334
|
raise ValueError("artefactType not recognized")
|
|
312
335
|
|
|
@@ -400,6 +423,9 @@ def __extractFieldGroup__(fieldGroup: str,folder: Union[str, Path] = None,sandbo
|
|
|
400
423
|
for descriptor in descriptors:
|
|
401
424
|
with open(f"{descriptorPath / descriptor['@id']}.json",'w') as f:
|
|
402
425
|
json.dump(descriptor,f,indent=2)
|
|
426
|
+
classes = myfg_manager.classIds
|
|
427
|
+
for cls in classes:
|
|
428
|
+
__extractClass__(cls,folder,sandbox)
|
|
403
429
|
|
|
404
430
|
def __extractSchema__(schemaEl: str,folder: Union[str, Path] = None,sandbox: 'ConnectObject' = None,region:str=None):
|
|
405
431
|
schemaPath = Path(folder) / 'schema'
|
|
@@ -435,6 +461,9 @@ def __extractSchema__(schemaEl: str,folder: Union[str, Path] = None,sandbox: 'Co
|
|
|
435
461
|
if descriptor.get('@type','') == 'xdm:descriptorIdentity':
|
|
436
462
|
namespace = descriptor['xdm:namespace']
|
|
437
463
|
__extractIdentity__(namespace,region,folder,sandbox)
|
|
464
|
+
if descriptor.get('@type','') == 'xdm:descriptorRelationship' or descriptor.get('@type','') == 'xdm:descriptorOneToOne':
|
|
465
|
+
targetSchema = descriptor['xdm:destinationSchema']
|
|
466
|
+
__extractSchema__(targetSchema,folder,sandbox,region)
|
|
438
467
|
|
|
439
468
|
|
|
440
469
|
def __extractIdentity__(identityStr: str,region:str=None,folder: Union[str, Path] = None,sandbox: 'ConnectObject' = None):
|
|
@@ -466,4 +495,29 @@ def __extractDataset__(dataset: str,folder: Union[str, Path] = None,sandbox: 'Co
|
|
|
466
495
|
json.dump(myDataset,f,indent=2)
|
|
467
496
|
schema = myDataset.get('schemaRef',{}).get('id',None)
|
|
468
497
|
if schema is not None:
|
|
469
|
-
__extractSchema__(schema,folder,sandbox,region)
|
|
498
|
+
__extractSchema__(schema,folder,sandbox,region)
|
|
499
|
+
|
|
500
|
+
def __extractMergePolicy__(mergePolicy: str = None,folder:Union[str, Path]=None, sandbox: 'ConnectObject' = None,region:str=None):
|
|
501
|
+
from aepp import customerprofile
|
|
502
|
+
ups = customerprofile.Profile(config=sandbox)
|
|
503
|
+
mymergePolicies = ups.getMergePolicies()
|
|
504
|
+
mymergePolicy = [el for el in mymergePolicies if el.get('id','') == mergePolicy or el.get('name','') == mergePolicy][0]
|
|
505
|
+
if mymergePolicy['attributeMerge'].get('type','timestampOrdered') == 'dataSetPrecedence':
|
|
506
|
+
list_ds = mymergePolicy['attributeMerge'].get('order',[])
|
|
507
|
+
for ds in list_ds:
|
|
508
|
+
__extractDataset__(ds,folder,sandbox,region)
|
|
509
|
+
mergePolicyPath = Path(folder) / 'mergePolicy'
|
|
510
|
+
mergePolicyPath.mkdir(exist_ok=True)
|
|
511
|
+
with open(f"{mergePolicyPath / mymergePolicy.get('id','unknown')}.json",'w') as f:
|
|
512
|
+
json.dump(mymergePolicy,f,indent=2)
|
|
513
|
+
|
|
514
|
+
def __extractAudience__(audienceName: str = None,folder:Union[str, Path]=None, sandbox: 'ConnectObject' = None):
|
|
515
|
+
from aepp import segmentation
|
|
516
|
+
mysegmentation = segmentation.Segmentation(config=sandbox)
|
|
517
|
+
audiences = mysegmentation.getAudiences()
|
|
518
|
+
myaudience = [el for el in audiences if el.get('name','') == audienceName or el.get('id','') == audienceName][0]
|
|
519
|
+
audiencePath = Path(folder) / 'audience'
|
|
520
|
+
audiencePath.mkdir(exist_ok=True)
|
|
521
|
+
safe_name = __titleSafe__(myaudience.get('name','unknown'))
|
|
522
|
+
with open(f"{audiencePath / safe_name}.json",'w') as f:
|
|
523
|
+
json.dump(myaudience,f,indent=2)
|
aepp/__version__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "0.4.
|
|
1
|
+
__version__ = "0.4.2"
|
aepp/customerprofile.py
CHANGED
|
@@ -776,7 +776,6 @@ class Profile:
|
|
|
776
776
|
privateHeader['Accept'] = "application/json"
|
|
777
777
|
params = {"offset":0,"limit":20}
|
|
778
778
|
res = self.connector.getData(self.endpoint_global + path,params=params,headers=privateHeader)
|
|
779
|
-
print(res)
|
|
780
779
|
data = res.get("computedAttributes",[])
|
|
781
780
|
offset = res.get("_page",{}).get("offset", 0)
|
|
782
781
|
while offset != 0:
|
aepp/schema.py
CHANGED
|
@@ -1765,6 +1765,9 @@ class Schema:
|
|
|
1765
1765
|
raise Exception("Require a field Group ID")
|
|
1766
1766
|
if self.loggingEnabled:
|
|
1767
1767
|
self.logger.debug(f"Starting extendFieldGroup")
|
|
1768
|
+
if fieldGroupId.startswith("https://"):
|
|
1769
|
+
from urllib import parse
|
|
1770
|
+
fieldGroupId = parse.quote_plus(fieldGroupId)
|
|
1768
1771
|
path = f"/{tenant}/fieldgroups/{fieldGroupId}"
|
|
1769
1772
|
if values is not None:
|
|
1770
1773
|
list_fgs = values
|
aepp/schemamanager.py
CHANGED
|
@@ -122,9 +122,7 @@ class SchemaManager:
|
|
|
122
122
|
break
|
|
123
123
|
self.fieldGroupIds = [obj['$ref'] for obj in allOf if ('/mixins/' in obj['$ref'] or '/experience/' in obj['$ref'] or '/context/' in obj['$ref']) and obj['$ref'] != self.classId]
|
|
124
124
|
self.classIds = [self.classId]
|
|
125
|
-
print(self.tenantId)
|
|
126
125
|
for ref in self.fieldGroupIds:
|
|
127
|
-
print(ref)
|
|
128
126
|
if '/mixins/' in ref and self.tenantId[1:] in ref:
|
|
129
127
|
if self.localfolder is not None:
|
|
130
128
|
for json_file in self.fieldgroupFolder.glob('*.json'):
|
|
@@ -134,7 +132,6 @@ class SchemaManager:
|
|
|
134
132
|
break
|
|
135
133
|
elif self.schemaAPI is not None:
|
|
136
134
|
definition = self.schemaAPI.getFieldGroup(ref,full=False)
|
|
137
|
-
print(definition.get('title'))
|
|
138
135
|
fgM = FieldGroupManager(fieldGroup=definition,schemaAPI=self.schemaAPI,localFolder=localFolder,tenantId=self.tenantId,sandbox=self.sandbox)
|
|
139
136
|
else:
|
|
140
137
|
if self.localfolder is not None:
|
|
@@ -206,7 +203,6 @@ class SchemaManager:
|
|
|
206
203
|
tmp_def = json.load(FileIO(json_file))
|
|
207
204
|
if tmp_def.get('$id') == ref:
|
|
208
205
|
definition = tmp_def
|
|
209
|
-
print(definition.get('title'))
|
|
210
206
|
break
|
|
211
207
|
if 'properties' in definition.keys():
|
|
212
208
|
definition['definitions'] = definition['properties']
|
|
@@ -260,7 +256,6 @@ class SchemaManager:
|
|
|
260
256
|
tmp_def = json.load(FileIO(json_file))
|
|
261
257
|
if tmp_def.get('$id') == ref:
|
|
262
258
|
definition = tmp_def
|
|
263
|
-
print(definition.get('title'))
|
|
264
259
|
break
|
|
265
260
|
fgM = FieldGroupManager(definition,schemaAPI=self.schemaAPI, localFolder=localFolder,tenantId=self.tenantId,sandbox=self.sandbox)
|
|
266
261
|
self.fieldGroupsManagers[fgM.title] = fgM
|
|
@@ -585,7 +580,6 @@ class SchemaManager:
|
|
|
585
580
|
res = self.schemaAPI.putSchema(self.id,self.schema)
|
|
586
581
|
if 'status' in res.keys():
|
|
587
582
|
if res['status'] == 400:
|
|
588
|
-
print(res['title'])
|
|
589
583
|
return res
|
|
590
584
|
else:
|
|
591
585
|
return res
|
aepp/som.py
CHANGED
aepp/synchronizer.py
CHANGED
|
@@ -10,7 +10,7 @@
|
|
|
10
10
|
|
|
11
11
|
import json
|
|
12
12
|
import aepp
|
|
13
|
-
from aepp import schema, schemamanager, fieldgroupmanager, datatypemanager,classmanager,identity,catalog
|
|
13
|
+
from aepp import schema, schemamanager, fieldgroupmanager, datatypemanager,classmanager,identity,catalog,customerprofile,segmentation
|
|
14
14
|
from copy import deepcopy
|
|
15
15
|
from typing import Union
|
|
16
16
|
from pathlib import Path
|
|
@@ -65,21 +65,83 @@ class Synchronizer:
|
|
|
65
65
|
self.identityFolder = self.localfolder / 'identity'
|
|
66
66
|
self.datasetFolder = self.localfolder / 'dataset'
|
|
67
67
|
self.descriptorFolder = self.localfolder / 'descriptor'
|
|
68
|
+
self.mergePolicyFolder = self.localfolder / 'mergepolicy'
|
|
69
|
+
self.audienceFolder = self.localfolder / 'audience'
|
|
68
70
|
if baseSandbox is not None:
|
|
69
71
|
self.baseSandbox = baseSandbox
|
|
72
|
+
else:
|
|
73
|
+
with open(self.localfolder / 'config.json','r') as f:
|
|
74
|
+
local_config = json.load(f)
|
|
75
|
+
self.baseSandbox = local_config.get('sandbox',None)
|
|
70
76
|
self.dict_targetsConfig = {target: aepp.configure(org_id=config_object['org_id'],client_id=config_object['client_id'],scopes=config_object['scopes'],secret=config_object['secret'],sandbox=target,connectInstance=True) for target in targets}
|
|
71
77
|
self.region = region
|
|
72
|
-
self.
|
|
78
|
+
self.dict_baseComponents = {'schema':{},'class':{},'fieldgroup':{},'datatype':{},'datasets':{},'identities':{},"schemaDescriptors":{},'mergePolicy':{},'audience':{}}
|
|
79
|
+
self.dict_targetComponents = {target:{'schema':{},'class':{},'fieldgroup':{},'datatype':{},'datasets':{},'identities':{},"schemaDescriptors":{},'mergePolicy':{},'audience':{}} for target in targets}
|
|
73
80
|
|
|
74
|
-
def
|
|
81
|
+
def getSyncFieldGroupManager(self,fieldgroup:str,sandbox:str=None)-> dict:
|
|
82
|
+
"""
|
|
83
|
+
Get a field group Manager from the synchronizer.
|
|
84
|
+
It searches through the component cache to see if the FieldGroupManager for the target sandbox is already instantiated.
|
|
85
|
+
If not, it generate an error.
|
|
86
|
+
Arguments:
|
|
87
|
+
fieldgroup : REQUIRED : Either $id, or name or alt:Id of the field group to get
|
|
88
|
+
sandbox : REQUIRED : name of the sandbox to get the field group from
|
|
89
|
+
"""
|
|
90
|
+
if sandbox is None:
|
|
91
|
+
raise ValueError("a sandbox name must be provided")
|
|
92
|
+
if sandbox == self.baseSandbox:
|
|
93
|
+
if fieldgroup in self.dict_baseComponents['fieldgroup'].keys():
|
|
94
|
+
return self.dict_baseComponents['fieldgroup'][fieldgroup]
|
|
95
|
+
elif fieldgroup in [self.dict_baseComponents['fieldgroup'][fg].id for fg in self.dict_baseComponents['fieldgroup'].keys()]:
|
|
96
|
+
fg_key = [fg for fg in self.dict_baseComponents['fieldgroup'].keys() if self.dict_baseComponents['fieldgroup'][fg].id == fieldgroup][0]
|
|
97
|
+
return self.dict_baseComponents['fieldgroup'][fg_key]
|
|
98
|
+
elif fieldgroup in [self.dict_baseComponents['fieldgroup'][fg].altId for fg in self.dict_baseComponents['fieldgroup'].keys()]:
|
|
99
|
+
fg_key = [fg for fg in self.dict_baseComponents['fieldgroup'].keys() if self.dict_baseComponents['fieldgroup'][fg].altId == fieldgroup][0]
|
|
100
|
+
return self.dict_baseComponents['fieldgroup'][fg_key]
|
|
101
|
+
else:
|
|
102
|
+
raise ValueError(f"the field group '{fieldgroup}' has not been synchronized to the sandbox '{sandbox}'")
|
|
103
|
+
else:
|
|
104
|
+
if fieldgroup in self.dict_targetComponents[sandbox]['fieldgroup'].keys():
|
|
105
|
+
return self.dict_targetComponents[sandbox]['fieldgroup'][fieldgroup]
|
|
106
|
+
elif fieldgroup in [self.dict_targetComponents[sandbox]['fieldgroup'][fg].id for fg in self.dict_targetComponents[sandbox]['fieldgroup'].keys()]:
|
|
107
|
+
fg_key = [fg for fg in self.dict_targetComponents[sandbox]['fieldgroup'].keys() if self.dict_targetComponents[sandbox]['fieldgroup'][fg].id == fieldgroup][0]
|
|
108
|
+
return self.dict_targetComponents[sandbox]['fieldgroup'][fg_key]
|
|
109
|
+
elif fieldgroup in [self.dict_targetComponents[sandbox]['fieldgroup'][fg].altId for fg in self.dict_targetComponents[sandbox]['fieldgroup'].keys()]:
|
|
110
|
+
fg_key = [fg for fg in self.dict_targetComponents[sandbox]['fieldgroup'].keys() if self.dict_targetComponents[sandbox]['fieldgroup'][fg].altId == fieldgroup][0]
|
|
111
|
+
return self.dict_targetComponents[sandbox]['fieldgroup'][fg_key]
|
|
112
|
+
else:
|
|
113
|
+
raise ValueError(f"the field group '{fieldgroup}' has not been synchronized to the sandbox '{sandbox}'")
|
|
114
|
+
|
|
115
|
+
def getDatasetName(self,datasetId:str,sandbox:str=None)-> str:
|
|
116
|
+
"""
|
|
117
|
+
Get a dataset name from the synchronizer base on the ID of the dataset.
|
|
118
|
+
Arguments:
|
|
119
|
+
datasetId : REQUIRED : id of the dataset to get
|
|
120
|
+
sandbox : REQUIRED : name of the sandbox to get the dataset from
|
|
121
|
+
"""
|
|
122
|
+
if sandbox is None:
|
|
123
|
+
raise ValueError("a sandbox name must be provided")
|
|
124
|
+
if sandbox == self.baseSandbox:
|
|
125
|
+
if datasetId in [item.get('id') for key,item in self.dict_baseComponents['datasets'].items()]:
|
|
126
|
+
return [key for key,item in self.dict_baseComponents['datasets'].items() if item.get('id') == datasetId][0]
|
|
127
|
+
else:
|
|
128
|
+
raise ValueError(f"the dataset '{datasetId}' has not been synchronized to the sandbox '{sandbox}'")
|
|
129
|
+
else:
|
|
130
|
+
if datasetId in [item.get('id') for key,item in self.dict_targetComponents[sandbox]['datasets'].items()]:
|
|
131
|
+
return [key for key,item in self.dict_targetComponents[sandbox]['datasets'].items() if item.get('id') == datasetId][0]
|
|
132
|
+
else:
|
|
133
|
+
raise ValueError(f"the dataset '{datasetId}' has not been synchronized to the sandbox '{sandbox}'")
|
|
134
|
+
|
|
135
|
+
def syncComponent(self,component:Union[str,dict],componentType:str=None,force:bool=False,verbose:bool=False)-> dict:
|
|
75
136
|
"""
|
|
76
137
|
Synchronize a component to the target sandbox.
|
|
77
138
|
The component could be a string (name or id of the component in the base sandbox) or a dictionary with the definition of the component.
|
|
78
139
|
If the component is a string, you have to have provided a base sandbox in the constructor.
|
|
79
140
|
Arguments:
|
|
80
141
|
component : REQUIRED : name or id of the component or a dictionary with the component definition
|
|
81
|
-
componentType : OPTIONAL : type of the component (e.g. "schema", "fieldgroup", "datatypes", "class", "identity", "dataset"). Required if a string is passed.
|
|
82
|
-
|
|
142
|
+
componentType : OPTIONAL : type of the component (e.g. "schema", "fieldgroup", "datatypes", "class", "identity", "dataset", "mergepolicy", "audience"). Required if a string is passed.
|
|
143
|
+
It is not required but if the type cannot be inferred from the component, it will raise an error.
|
|
144
|
+
force : OPTIONAL : if True, it will force the synchronization of the component even if it already exists in the target sandbox. Works for Schema, FieldGroup, DataType and Class.
|
|
83
145
|
verbose : OPTIONAL : if True, it will print the details of the synchronization process
|
|
84
146
|
"""
|
|
85
147
|
if type(component) == str:
|
|
@@ -87,8 +149,8 @@ class Synchronizer:
|
|
|
87
149
|
raise ValueError("a base sandbox or a local folder must be provided to synchronize a component by name or id")
|
|
88
150
|
if componentType is None:
|
|
89
151
|
raise ValueError("the type of the component must be provided if the component is a string")
|
|
90
|
-
if componentType not in ['schema', 'fieldgroup', 'datatypes', 'class', 'identity', 'dataset']:
|
|
91
|
-
raise ValueError("the type of the component is not supported. Please provide one of the following types: schema, fieldgroup, datatypes, class, identity, dataset")
|
|
152
|
+
if componentType not in ['schema', 'fieldgroup', 'datatypes', 'class', 'identity', 'dataset', 'mergepolicy', 'audience']:
|
|
153
|
+
raise ValueError("the type of the component is not supported. Please provide one of the following types: schema, fieldgroup, datatypes, class, identity, dataset, mergepolicy, audience")
|
|
92
154
|
if componentType in ['schema', 'fieldgroup', 'datatypes', 'class']:
|
|
93
155
|
if self.baseConfig is not None:
|
|
94
156
|
base_schema = schema.Schema(config=self.baseConfig)
|
|
@@ -165,6 +227,30 @@ class Synchronizer:
|
|
|
165
227
|
break
|
|
166
228
|
if len(component) == 1: ## if the component is the catalog API response {'key': {dataset definition}}
|
|
167
229
|
component = component[list(component.keys())[0]] ## accessing the real dataset definition
|
|
230
|
+
elif componentType == "mergepolicy":
|
|
231
|
+
if self.baseConfig is not None:
|
|
232
|
+
ups_base = customerprofile.Profile(config=self.baseConfig)
|
|
233
|
+
base_mergePolicies = ups_base.getMergePolicies()
|
|
234
|
+
if component in [el.get('id','') for el in base_mergePolicies] or component in [el.get('name','') for el in base_mergePolicies]:
|
|
235
|
+
component = [el for el in base_mergePolicies if el.get('id','') == component or el.get('name','') == component][0]
|
|
236
|
+
elif self.localfolder is not None:
|
|
237
|
+
for file in self.mergePolicyFolder.glob('*.json'):
|
|
238
|
+
mp_file = json.load(FileIO(file))
|
|
239
|
+
if mp_file.get('id','') == component or mp_file.get('name','') == component:
|
|
240
|
+
component = mp_file
|
|
241
|
+
break
|
|
242
|
+
elif componentType == 'audience':
|
|
243
|
+
if self.baseConfig is not None:
|
|
244
|
+
seg_base = segmentation.Segmentation(config=self.baseConfig)
|
|
245
|
+
base_audiences = seg_base.getAudiences()
|
|
246
|
+
if component in [el.get('id','') for el in base_audiences] or component in [el.get('name','') for el in base_audiences]:
|
|
247
|
+
component = [el for el in base_audiences if el.get('id','') == component or el.get('name','') == component][0]
|
|
248
|
+
elif self.localfolder is not None:
|
|
249
|
+
for file in self.audienceFolder.glob('*.json'):
|
|
250
|
+
au_file = json.load(FileIO(file))
|
|
251
|
+
if au_file.get('id','') == component or au_file.get('name','') == component:
|
|
252
|
+
component = au_file
|
|
253
|
+
break
|
|
168
254
|
elif type(component) == dict:
|
|
169
255
|
if 'meta:resourceType' in component.keys():
|
|
170
256
|
componentType = component['meta:resourceType']
|
|
@@ -180,30 +266,41 @@ class Synchronizer:
|
|
|
180
266
|
componentType = 'identity'
|
|
181
267
|
elif 'files' in component.keys():
|
|
182
268
|
componentType = 'dataset'
|
|
269
|
+
elif 'attributeMerge' in component.keys():
|
|
270
|
+
componentType = 'mergepolicy'
|
|
271
|
+
elif 'expression' in component.keys():
|
|
272
|
+
componentType = 'audience'
|
|
183
273
|
else:
|
|
184
274
|
raise TypeError("the component type could not be inferred from the component or is not supported. Please provide the type as a parameter")
|
|
185
275
|
## Synchronize the component to the target sandboxes
|
|
186
276
|
if componentType == 'datatypes':
|
|
187
|
-
self.__syncDataType__(component,verbose=verbose)
|
|
277
|
+
self.__syncDataType__(component,verbose=verbose,force=force)
|
|
188
278
|
if componentType == 'fieldgroup':
|
|
189
|
-
self.__syncFieldGroup__(component,verbose=verbose)
|
|
279
|
+
self.__syncFieldGroup__(component,verbose=verbose,force=force)
|
|
190
280
|
if componentType == 'schema':
|
|
191
|
-
self.__syncSchema__(component,verbose=verbose)
|
|
281
|
+
self.__syncSchema__(component,verbose=verbose,force=force)
|
|
192
282
|
if componentType == 'class':
|
|
193
|
-
self.__syncClass__(component,verbose=verbose)
|
|
283
|
+
self.__syncClass__(component,verbose=verbose,force=force)
|
|
194
284
|
if componentType == 'identity':
|
|
195
285
|
self.__syncIdentity__(component,verbose=verbose)
|
|
196
286
|
if componentType == 'dataset':
|
|
197
287
|
self.__syncDataset__(component,verbose=verbose)
|
|
288
|
+
if componentType == 'mergepolicy':
|
|
289
|
+
self.__syncMergePolicy__(component,verbose=verbose)
|
|
290
|
+
if componentType == 'audience':
|
|
291
|
+
self.__syncAudience__(component,verbose=verbose)
|
|
198
292
|
|
|
199
|
-
|
|
293
|
+
|
|
294
|
+
def __syncClass__(self,baseClass:'ClassManager',force:bool=False,verbose:bool=False)-> dict:
|
|
200
295
|
"""
|
|
201
296
|
Synchronize a class to the target sandboxes.
|
|
202
297
|
Arguments:
|
|
203
298
|
baseClass : REQUIRED : class id or name to synchronize
|
|
299
|
+
force : OPTIONAL : if True, it will force the synchronization of the class even if it already exists in the target sandbox
|
|
204
300
|
"""
|
|
205
301
|
if not isinstance(baseClass,classmanager.ClassManager):
|
|
206
302
|
raise TypeError("the baseClass must be a classManager instance")
|
|
303
|
+
self.dict_baseComponents['class'][baseClass.title] = baseClass
|
|
207
304
|
baseClassName = baseClass.title
|
|
208
305
|
baseBehavior = baseClass.behavior
|
|
209
306
|
for target in self.dict_targetsConfig.keys():
|
|
@@ -223,14 +320,16 @@ class Synchronizer:
|
|
|
223
320
|
self.dict_targetComponents[target]['class'][baseClassName] = t_newClass
|
|
224
321
|
|
|
225
322
|
|
|
226
|
-
def __syncDataType__(self,baseDataType:'DataTypeManager',verbose:bool=False)-> dict:
|
|
323
|
+
def __syncDataType__(self,baseDataType:'DataTypeManager',force:bool=False,verbose:bool=False)-> dict:
|
|
227
324
|
"""
|
|
228
325
|
Synchronize a data type to the target sandbox.
|
|
229
326
|
Arguments:
|
|
230
327
|
baseDataType : REQUIRED : DataTypeManager object with the data type to synchronize
|
|
328
|
+
force : OPTIONAL : if True, it will force the synchronization of the data type even if it already exists in the target sandbox
|
|
231
329
|
"""
|
|
232
330
|
if not isinstance(baseDataType,datatypemanager.DataTypeManager):
|
|
233
331
|
raise TypeError("the baseDataType must be a DataTypeManager object")
|
|
332
|
+
self.dict_baseComponents['datatype'][baseDataType.title] = baseDataType
|
|
234
333
|
name_base_datatype = baseDataType.title
|
|
235
334
|
for target in self.dict_targetsConfig.keys():
|
|
236
335
|
targetSchema = schema.Schema(config=self.dict_targetsConfig[target])
|
|
@@ -249,7 +348,7 @@ class Synchronizer:
|
|
|
249
348
|
base_paths = df_base['path'].tolist()
|
|
250
349
|
target_paths = df_target['path'].tolist()
|
|
251
350
|
diff_paths = list(set(base_paths) - set(target_paths))
|
|
252
|
-
if len(diff_paths) > 0: ## there are differences
|
|
351
|
+
if len(diff_paths) > 0 or force==True: ## there are differences
|
|
253
352
|
base_datatypes_paths = baseDataType.getDataTypePaths()
|
|
254
353
|
df_base_limited = df_base[df_base['origin'] == 'self'].copy() ## exclude field group native fields
|
|
255
354
|
df_base_limited = df_base_limited[~df_base_limited['path'].isin(list(base_datatypes_paths.keys()))] ## exclude base of datatype rows
|
|
@@ -258,7 +357,7 @@ class Synchronizer:
|
|
|
258
357
|
base_dict_path_dtTitle = {}
|
|
259
358
|
for path,dt_id in base_datatypes_paths.items():
|
|
260
359
|
tmp_dt_manager = baseDataType.getDataTypeManager(dt_id)
|
|
261
|
-
self.__syncDataType__(tmp_dt_manager,verbose=verbose)
|
|
360
|
+
self.__syncDataType__(tmp_dt_manager,force=force,verbose=verbose)
|
|
262
361
|
base_dict_path_dtTitle[path] = tmp_dt_manager.title
|
|
263
362
|
target_datatypes_paths = t_datatype.getDataTypePaths(som_compatible=True)
|
|
264
363
|
target_datatypes_paths_list = list(target_datatypes_paths.keys())
|
|
@@ -289,7 +388,7 @@ class Synchronizer:
|
|
|
289
388
|
base_dict_path_dtTitle = {}
|
|
290
389
|
for path,dt_id in base_datatypes_paths.items():
|
|
291
390
|
tmp_dt_manager = baseDataType.getDataTypeManager(dt_id)
|
|
292
|
-
self.__syncDataType__(tmp_dt_manager,verbose=verbose)
|
|
391
|
+
self.__syncDataType__(tmp_dt_manager,force=force,verbose=verbose)
|
|
293
392
|
base_dict_path_dtTitle[path] = tmp_dt_manager.title
|
|
294
393
|
target_datatypes_paths = new_datatype.getDataTypePaths(som_compatible=True)
|
|
295
394
|
target_datatypes_paths_list = list(target_datatypes_paths.keys())
|
|
@@ -309,14 +408,16 @@ class Synchronizer:
|
|
|
309
408
|
raise Exception("the data type could not be created in the target sandbox")
|
|
310
409
|
self.dict_targetComponents[target]['datatype'][name_base_datatype] = t_datatype
|
|
311
410
|
|
|
312
|
-
def __syncFieldGroup__(self,baseFieldGroup:'FieldGroupManager',verbose:bool=False)-> dict:
|
|
411
|
+
def __syncFieldGroup__(self,baseFieldGroup:'FieldGroupManager',force:bool=True,verbose:bool=False)-> dict:
|
|
313
412
|
"""
|
|
314
413
|
Synchronize a field group to the target sandboxes.
|
|
315
414
|
Argument:
|
|
316
415
|
baseFieldGroup : REQUIRED : FieldGroupManager object with the field group to synchronize
|
|
416
|
+
force : OPTIONAL : if True, it will force the synchronization of the field group even if it already exists in the target sandbox
|
|
317
417
|
"""
|
|
318
418
|
if not isinstance(baseFieldGroup,fieldgroupmanager.FieldGroupManager):
|
|
319
419
|
raise TypeError("the baseFieldGroup must be a FieldGroupManager object")
|
|
420
|
+
self.dict_baseComponents['fieldgroup'][baseFieldGroup.title] = baseFieldGroup
|
|
320
421
|
name_base_fieldgroup = baseFieldGroup.title
|
|
321
422
|
base_fg_classIds = baseFieldGroup.classIds
|
|
322
423
|
for target in self.dict_targetsConfig.keys():
|
|
@@ -340,12 +441,17 @@ class Synchronizer:
|
|
|
340
441
|
print(f"field group '{name_base_fieldgroup}' already exists in target {target}, checking it")
|
|
341
442
|
if t_fieldgroup is None: ## if need to create the FieldGroupManager
|
|
342
443
|
t_fieldgroup = fieldgroupmanager.FieldGroupManager(targetSchema.data.fieldGroups_altId[name_base_fieldgroup],config=self.dict_targetsConfig[target],sandbox=target)
|
|
444
|
+
for fg_class in t_fieldgroup.classIds:
|
|
445
|
+
if fg_class not in fg_class_ids:
|
|
446
|
+
fg_class_ids.append(fg_class)
|
|
447
|
+
### Aligning class support to the field groups
|
|
448
|
+
t_fieldgroup.schemaAPI.extendFieldGroup(t_fieldgroup.id,fg_class_ids)
|
|
343
449
|
df_base = baseFieldGroup.to_dataframe(full=True)
|
|
344
450
|
df_target = t_fieldgroup.to_dataframe(full=True)
|
|
345
451
|
base_paths = df_base['path'].tolist()
|
|
346
452
|
target_paths = df_target['path'].tolist()
|
|
347
453
|
diff_paths = [path for path in base_paths if path not in target_paths]
|
|
348
|
-
if len(diff_paths) > 0:
|
|
454
|
+
if len(diff_paths) > 0 or force==True:
|
|
349
455
|
base_datatypes_paths = baseFieldGroup.getDataTypePaths()
|
|
350
456
|
## handling fieldgroup native fields
|
|
351
457
|
df_base_limited = df_base[df_base['origin'] == 'fieldGroup'].copy() ## exclude datatypes
|
|
@@ -366,7 +472,7 @@ class Synchronizer:
|
|
|
366
472
|
base_dict_path_dtTitle = {}
|
|
367
473
|
for path,dt_id in base_datatypes_paths.items():
|
|
368
474
|
tmp_dt_manager = baseFieldGroup.getDataTypeManager(dt_id)
|
|
369
|
-
self.__syncDataType__(tmp_dt_manager,verbose=verbose)
|
|
475
|
+
self.__syncDataType__(tmp_dt_manager,force=force,verbose=verbose)
|
|
370
476
|
base_dict_path_dtTitle[path] = tmp_dt_manager.title
|
|
371
477
|
target_datatypes_paths = t_fieldgroup.getDataTypePaths(som_compatible=True)
|
|
372
478
|
target_datatypes_paths_list = list(target_datatypes_paths.keys())
|
|
@@ -418,7 +524,7 @@ class Synchronizer:
|
|
|
418
524
|
base_dict_path_dtTitle = {}
|
|
419
525
|
for path,dt_id in base_datatypes_paths.items():
|
|
420
526
|
tmp_dt_manager = baseFieldGroup.getDataTypeManager(dt_id)
|
|
421
|
-
self.__syncDataType__(tmp_dt_manager,verbose=verbose)
|
|
527
|
+
self.__syncDataType__(tmp_dt_manager,force=force,verbose=verbose)
|
|
422
528
|
base_dict_path_dtTitle[path] = tmp_dt_manager.title
|
|
423
529
|
for path,dt_title in base_dict_path_dtTitle.items():
|
|
424
530
|
tmp_t_dt = self.dict_targetComponents[target]['datatype'][dt_title]
|
|
@@ -437,16 +543,18 @@ class Synchronizer:
|
|
|
437
543
|
self.dict_targetComponents[target]['fieldgroup'][name_base_fieldgroup] = t_fieldgroup
|
|
438
544
|
|
|
439
545
|
|
|
440
|
-
def __syncSchema__(self,baseSchema:'SchemaManager',verbose:bool=False)-> dict:
|
|
546
|
+
def __syncSchema__(self,baseSchema:'SchemaManager',force:bool=False,verbose:bool=False)-> dict:
|
|
441
547
|
"""
|
|
442
548
|
Sync the schema to the target sandboxes.
|
|
443
549
|
Arguments:
|
|
444
550
|
baseSchema : REQUIRED : SchemaManager object to synchronize
|
|
551
|
+
force : OPTIONAL : if True, it will force the synchronization of field groups even if they already exist in the target schema
|
|
445
552
|
"""
|
|
446
553
|
## TO DO -> sync required fields
|
|
447
554
|
if not isinstance(baseSchema,schemamanager.SchemaManager):
|
|
448
555
|
raise TypeError("the baseSchema must be a SchemaManager object")
|
|
449
556
|
name_base_schema = baseSchema.title
|
|
557
|
+
self.dict_baseComponents['schema'][name_base_schema] = baseSchema
|
|
450
558
|
descriptors = baseSchema.getDescriptors()
|
|
451
559
|
base_field_groups_names = list(baseSchema.fieldGroups.values())
|
|
452
560
|
dict_base_fg_name_id = {name:fg_id for fg_id,name in baseSchema.fieldGroups.items()}
|
|
@@ -460,7 +568,7 @@ class Synchronizer:
|
|
|
460
568
|
t_schema = schemamanager.SchemaManager(targetSchemaAPI.data.schemas_altId[name_base_schema],config=self.dict_targetsConfig[target],sandbox=target)
|
|
461
569
|
new_fieldgroups = [fg for fg in base_field_groups_names if fg not in t_schema.fieldGroups.values()]
|
|
462
570
|
existing_fieldgroups = [fg for fg in base_field_groups_names if fg in t_schema.fieldGroups.values()]
|
|
463
|
-
if len(new_fieldgroups) > 0: ## if new field groups
|
|
571
|
+
if len(new_fieldgroups) > 0 or force==True: ## if new field groups
|
|
464
572
|
if verbose:
|
|
465
573
|
print('found new field groups to add to the schema')
|
|
466
574
|
for new_fieldgroup in new_fieldgroups:
|
|
@@ -473,7 +581,8 @@ class Synchronizer:
|
|
|
473
581
|
if verbose:
|
|
474
582
|
print(f"field group '{new_fieldgroup}' is a custom field group, syncing it")
|
|
475
583
|
tmp_FieldGroup = baseSchema.getFieldGroupManager(new_fieldgroup)
|
|
476
|
-
|
|
584
|
+
print(f"Creating new custom field group '{tmp_FieldGroup.title}'")
|
|
585
|
+
self.__syncFieldGroup__(tmp_FieldGroup,verbose=verbose,force=force)
|
|
477
586
|
t_schema.addFieldGroup(self.dict_targetComponents[target]['fieldgroup'][new_fieldgroup].id)
|
|
478
587
|
t_schema.setDescription(baseSchema.description)
|
|
479
588
|
res = t_schema.updateSchema()
|
|
@@ -485,13 +594,35 @@ class Synchronizer:
|
|
|
485
594
|
for fg_name in existing_fieldgroups:
|
|
486
595
|
if baseSchema.tenantId[1:] in dict_base_fg_name_id[fg_name]: ## custom field group
|
|
487
596
|
tmp_fieldGroupManager = fieldgroupmanager.FieldGroupManager(dict_base_fg_name_id[fg_name],config=self.baseConfig,sandbox=target,localFolder=self.localfolder)
|
|
488
|
-
self.__syncFieldGroup__(tmp_fieldGroupManager,verbose=verbose)
|
|
597
|
+
self.__syncFieldGroup__(tmp_fieldGroupManager,force=force,verbose=verbose)
|
|
489
598
|
else:
|
|
490
599
|
if verbose:
|
|
491
600
|
print(f"field group '{fg_name}' is a OOTB field group, using it")
|
|
492
601
|
self.dict_targetComponents[target]['fieldgroup'][fg_name] = fieldgroupmanager.FieldGroupManager(dict_base_fg_name_id[fg_name],config=self.dict_targetsConfig[target],sandbox=target)
|
|
493
602
|
list_new_descriptors = self.__syncDescriptor__(baseSchema,t_schema,targetSchemaAPI=targetSchemaAPI,verbose=verbose)
|
|
603
|
+
## handling the meta:refProperty setup if any
|
|
604
|
+
base_allOf = baseSchema.schema.get('allOf',[])
|
|
605
|
+
base_fg_name_metaref = {}
|
|
606
|
+
for refEl in base_allOf: ## retrieving the meta:refProperty from the base schema
|
|
607
|
+
if 'meta:refProperty' in refEl.keys():
|
|
608
|
+
tmp_base_fg_id = refEl['$ref']
|
|
609
|
+
if baseSchema.tenantId[1:] in tmp_base_fg_id:
|
|
610
|
+
tmp_base_fg_manager = self.getSyncFieldGroupManager(tmp_base_fg_id,sandbox=baseSchema.sandbox)
|
|
611
|
+
base_fg_name_metaref[tmp_base_fg_manager.title] = refEl['meta:refProperty']
|
|
612
|
+
else:
|
|
613
|
+
base_fg_name_metaref[tmp_base_fg_id] = refEl['meta:refProperty']
|
|
614
|
+
for fg_name,ref_property in base_fg_name_metaref.items(): ## updating the target schema with the meta:refProperty
|
|
615
|
+
for ref in t_schema.schema.get('allOf',[]):
|
|
616
|
+
tmp_target_fg_id = ref['$ref']
|
|
617
|
+
if baseSchema.tenantId[1:] in tmp_target_fg_id:
|
|
618
|
+
tmp_target_fg_manager = self.getSyncFieldGroupManager(tmp_target_fg_id,sandbox=target)
|
|
619
|
+
if fg_name == tmp_target_fg_manager.title:
|
|
620
|
+
ref['meta:refProperty'] = ref_property
|
|
621
|
+
else:
|
|
622
|
+
if fg_name == ref['$ref']:
|
|
623
|
+
ref['meta:refProperty'] = ref_property
|
|
494
624
|
self.dict_targetComponents[target]['schemaDescriptors'][name_base_schema] = list_new_descriptors
|
|
625
|
+
t_schema.updateSchema()
|
|
495
626
|
else: ## schema does not exist in target
|
|
496
627
|
if verbose:
|
|
497
628
|
print(f"schema '{name_base_schema}' does not exist in target {target}, creating it")
|
|
@@ -501,7 +632,7 @@ class Synchronizer:
|
|
|
501
632
|
tenantidId = baseSchema.tenantId
|
|
502
633
|
if tenantidId[1:] in baseClassId: ## custom class
|
|
503
634
|
baseClassManager = classmanager.ClassManager(baseClassId,config=self.baseConfig,sandbox=target,localFolder=self.localfolder,sandboxBase=self.baseSandbox,tenantidId=tenantidId)
|
|
504
|
-
self.__syncClass__(baseClassManager,verbose=verbose)
|
|
635
|
+
self.__syncClass__(baseClassManager,force=force,verbose=verbose)
|
|
505
636
|
targetClassManager = self.dict_targetComponents[target]['class'][baseClassManager.title]
|
|
506
637
|
classId_toUse = targetClassManager.id
|
|
507
638
|
else:
|
|
@@ -515,8 +646,10 @@ class Synchronizer:
|
|
|
515
646
|
## adding the field group to the target components
|
|
516
647
|
self.dict_targetComponents[target]['fieldgroup'][fg_name] = fieldgroupmanager.FieldGroupManager(dict_base_fg_name_id[fg_name],config=self.dict_targetsConfig[target],sandbox=target)
|
|
517
648
|
else:
|
|
649
|
+
if verbose:
|
|
650
|
+
print(f"field group '{fg_name}' is a custom field group, using it")
|
|
518
651
|
tmp_FieldGroup = baseSchema.getFieldGroupManager(fg_name)
|
|
519
|
-
self.__syncFieldGroup__(tmp_FieldGroup,verbose=verbose)
|
|
652
|
+
self.__syncFieldGroup__(tmp_FieldGroup,force=force,verbose=verbose)
|
|
520
653
|
new_schema.addFieldGroup(self.dict_targetComponents[target]['fieldgroup'][fg_name].id)
|
|
521
654
|
new_schema.setDescription(baseSchema.description)
|
|
522
655
|
res = new_schema.createSchema()
|
|
@@ -528,6 +661,28 @@ class Synchronizer:
|
|
|
528
661
|
## handling descriptors
|
|
529
662
|
list_new_descriptors = self.__syncDescriptor__(baseSchema,t_schema,targetSchemaAPI,verbose=verbose)
|
|
530
663
|
self.dict_targetComponents[target]['schemaDescriptors'][name_base_schema] = list_new_descriptors
|
|
664
|
+
## handling the meta:refProperty setup if any
|
|
665
|
+
base_allOf = baseSchema.schema.get('allOf',[])
|
|
666
|
+
base_fg_name_metaref = {}
|
|
667
|
+
for refEl in base_allOf: ## retrieving the meta:refProperty from the base schema
|
|
668
|
+
if 'meta:refProperty' in refEl.keys():
|
|
669
|
+
tmp_base_fg_id = refEl['$ref']
|
|
670
|
+
if baseSchema.tenantId[1:] in tmp_base_fg_id:
|
|
671
|
+
tmp_base_fg_manager = self.getSyncFieldGroupManager(tmp_base_fg_id,sandbox=baseSchema.sandbox)
|
|
672
|
+
base_fg_name_metaref[tmp_base_fg_manager.title] = refEl['meta:refProperty']
|
|
673
|
+
else:
|
|
674
|
+
base_fg_name_metaref[tmp_base_fg_id] = refEl['meta:refProperty']
|
|
675
|
+
for fg_name,ref_property in base_fg_name_metaref.items(): ## updating the target schema with the meta:refProperty
|
|
676
|
+
for ref in t_schema.schema.get('allOf',[]):
|
|
677
|
+
tmp_target_fg_id = ref['$ref']
|
|
678
|
+
if baseSchema.tenantId[1:] in tmp_target_fg_id:
|
|
679
|
+
tmp_target_fg_manager = self.getSyncFieldGroupManager(tmp_target_fg_id,sandbox=target)
|
|
680
|
+
if fg_name == tmp_target_fg_manager.title:
|
|
681
|
+
ref['meta:refProperty'] = ref_property
|
|
682
|
+
else:
|
|
683
|
+
if fg_name == ref['$ref']:
|
|
684
|
+
ref['meta:refProperty'] = ref_property
|
|
685
|
+
t_schema.updateSchema()
|
|
531
686
|
self.dict_targetComponents[target]['schema'][name_base_schema] = t_schema
|
|
532
687
|
|
|
533
688
|
def __syncDescriptor__(self,baseSchemaManager:'SchemaManager'=None,targetSchemaManager:'SchemaManager'=None,targetSchemaAPI:'Schema'=None,verbose:bool=False)-> dict:
|
|
@@ -546,6 +701,7 @@ class Synchronizer:
|
|
|
546
701
|
if not isinstance(targetSchemaManager,schemamanager.SchemaManager):
|
|
547
702
|
raise TypeError("the targetSchemaManager must be a SchemaManager object")
|
|
548
703
|
base_descriptors = baseSchemaManager.getDescriptors()
|
|
704
|
+
self.dict_baseComponents['schemaDescriptors'][baseSchemaManager.title] = {}
|
|
549
705
|
if self.baseConfig is not None:
|
|
550
706
|
baseSchemaAPI = schema.Schema(config=self.baseConfig)
|
|
551
707
|
myschemas = baseSchemaAPI.getSchemas() ## to populate the data object
|
|
@@ -710,6 +866,7 @@ class Synchronizer:
|
|
|
710
866
|
if not isinstance(identityDefiniton,dict):
|
|
711
867
|
raise TypeError("the identityDefinition must be a dictionary")
|
|
712
868
|
code_base_identity = identityDefiniton['code']
|
|
869
|
+
self.dict_baseComponents['identities'][code_base_identity] = identityDefiniton
|
|
713
870
|
for target in self.dict_targetsConfig.keys():
|
|
714
871
|
targetIdentity = identity.Identity(config=self.dict_targetsConfig[target],region=self.region)
|
|
715
872
|
t_identities = targetIdentity.getIdentities()
|
|
@@ -735,7 +892,8 @@ class Synchronizer:
|
|
|
735
892
|
baseDataset : REQUIRED : dictionary with the dataset definition
|
|
736
893
|
"""
|
|
737
894
|
if len(baseDataset) == 1: ## if receiving the dataset as provided by the API {datasetId:{...definition}}
|
|
738
|
-
baseDataset = deepcopy(baseDataset[list(baseDataset.keys()[0]
|
|
895
|
+
baseDataset = deepcopy(baseDataset[list(baseDataset.keys())[0]])
|
|
896
|
+
self.dict_baseComponents['datasets'][baseDataset['name']] = baseDataset
|
|
739
897
|
base_datasetName = baseDataset['name']
|
|
740
898
|
base_dataset_related_schemaId = baseDataset['schemaRef']['id']
|
|
741
899
|
if self.baseConfig is not None:
|
|
@@ -779,4 +937,94 @@ class Synchronizer:
|
|
|
779
937
|
t_schemas = targetSchema.getSchemas()
|
|
780
938
|
baseSchemaManager = schemamanager.SchemaManager(base_dataset_related_schemaId,config=self.baseConfig,localFolder=self.localfolder,sandbox=self.baseSandbox)
|
|
781
939
|
self.__syncSchema__(baseSchemaManager,verbose=verbose)
|
|
940
|
+
self.dict_targetComponents[target]['datasets'][base_datasetName] = t_dataset
|
|
941
|
+
|
|
942
|
+
def __syncMergePolicy__(self,mergePolicy:dict,verbose:bool=False)->None:
|
|
943
|
+
"""
|
|
944
|
+
Synchronize the dataset to the target sandboxes. Mostly creating a new dataset and associated artefacts when not already created.
|
|
945
|
+
Arguments:
|
|
946
|
+
mergePolicy : REQUIRED : The merge policy dictionary to sync
|
|
947
|
+
"""
|
|
948
|
+
if not isinstance(mergePolicy,dict):
|
|
949
|
+
raise TypeError("the mergePolicy must be a dictionary")
|
|
950
|
+
self.dict_baseComponents['mergePolicy'][mergePolicy.get('id','unknown')] = mergePolicy
|
|
951
|
+
mergePolicy_name = mergePolicy.get('name','unknown')
|
|
952
|
+
if mergePolicy['attributeMerge'].get('type','timestampOrdered') == 'dataSetPrecedence':
|
|
953
|
+
if verbose:
|
|
954
|
+
print(f"handling dataset precedence for merge policy '{mergePolicy_name}'")
|
|
955
|
+
print("syncing the datasets involved in the precedence order")
|
|
956
|
+
base_list_precedenceDatasets = mergePolicy['attributeMerge'].get('order',[])
|
|
957
|
+
for ds_id in base_list_precedenceDatasets:
|
|
958
|
+
res = self.syncComponent(ds_id,componentType='dataset',verbose=verbose)
|
|
959
|
+
for target in self.dict_targetsConfig.keys():
|
|
960
|
+
targetCustomerProfile = customerprofile.Profile(config=self.dict_targetsConfig[target])
|
|
961
|
+
t_mergePolicies = targetCustomerProfile.getMergePolicies()
|
|
962
|
+
if mergePolicy_name not in [el.get('name','') for el in t_mergePolicies]: ## merge policy does not exist in target
|
|
963
|
+
if verbose:
|
|
964
|
+
print(f"merge policy '{mergePolicy_name}' does not exist in target {target}, creating it")
|
|
965
|
+
mergePolicyDef = {
|
|
966
|
+
"name":mergePolicy.get('name',''),
|
|
967
|
+
"schema":mergePolicy.get('schema','_xdm.context.profile'),
|
|
968
|
+
"identityGraph":mergePolicy.get('identityGraph','pdg'),
|
|
969
|
+
"isActiveOnEdge":mergePolicy.get('isActiveOnEdge',False),
|
|
970
|
+
}
|
|
971
|
+
if mergePolicy['attributeMerge'].get('type','timestampOrdered') == 'dataSetPrecedence':
|
|
972
|
+
target_list_precedenceDatasets = []
|
|
973
|
+
for base_ds_id in mergePolicy['attributeMerge'].get('order',[]):
|
|
974
|
+
base_ds_name = self.getDatasetName(base_ds_id,sandbox=target)
|
|
975
|
+
target_ds_id = self.dict_targetComponents[target]['datasets'][base_ds_name]['id']
|
|
976
|
+
target_list_precedenceDatasets.append(target_ds_id)
|
|
977
|
+
mergePolicyDef['attributeMerge'] = {
|
|
978
|
+
"type":mergePolicy['attributeMerge'].get('type','timestampOrdered'),
|
|
979
|
+
"order":target_list_precedenceDatasets
|
|
980
|
+
}
|
|
981
|
+
else:
|
|
982
|
+
mergePolicyDef['attributeMerge'] = {'type':'timestampOrdered'}
|
|
983
|
+
res = targetCustomerProfile.createMergePolicy(mergePolicyDef)
|
|
984
|
+
if 'id' in res.keys():
|
|
985
|
+
self.dict_targetComponents[target]['mergePolicy'][res['id']] = res
|
|
986
|
+
else:
|
|
987
|
+
print(res)
|
|
988
|
+
raise Exception("the merge policy could not be created in the target sandbox")
|
|
989
|
+
else: ## merge policy already exists in target
|
|
990
|
+
if verbose:
|
|
991
|
+
print(f"merge policy '{mergePolicy_name}' already exists in target {target}, saving it")
|
|
992
|
+
self.dict_targetComponents[target]['mergePolicy'][mergePolicy_name] = [el for el in t_mergePolicies if el.get('name','') == mergePolicy_name][0]
|
|
993
|
+
|
|
994
|
+
def __syncAudience__(self,baseAudience:dict,verbose:bool=False)-> None:
|
|
995
|
+
"""
|
|
996
|
+
Synchronize an audience to the target sandboxes.
|
|
997
|
+
Arguments:
|
|
998
|
+
baseAudience : REQUIRED : dictionary with the audience definition
|
|
999
|
+
"""
|
|
1000
|
+
if not isinstance(baseAudience,dict):
|
|
1001
|
+
raise TypeError("the baseAudience must be a dictionary")
|
|
1002
|
+
audience_name = baseAudience.get('name','unknown')
|
|
1003
|
+
self.dict_baseComponents['audience'][audience_name] = baseAudience
|
|
1004
|
+
for target in self.dict_targetsConfig.keys():
|
|
1005
|
+
targetAudiences = segmentation.Segmentation(config=self.dict_targetsConfig[target])
|
|
1006
|
+
t_audiences = targetAudiences.getAudiences()
|
|
1007
|
+
if audience_name not in [el['name'] for el in t_audiences]: ## audience does not exist in target
|
|
1008
|
+
if verbose:
|
|
1009
|
+
print(f"audience '{audience_name}' does not exist in target {target}, creating it")
|
|
1010
|
+
audienceDef = {
|
|
1011
|
+
"name":baseAudience.get('name',''),
|
|
1012
|
+
"description":baseAudience.get('description',''),
|
|
1013
|
+
"type":baseAudience.get('type','SegmentDefinition'),
|
|
1014
|
+
"schema":baseAudience.get('schema','_xdm.context.profile'),
|
|
1015
|
+
"expression":baseAudience.get('expression',[]),
|
|
1016
|
+
"ansibleDataModel":baseAudience.get('ansibleDataModel',{}),
|
|
1017
|
+
"profileInstanceId":baseAudience.get('profileInstanceId',''),
|
|
1018
|
+
"evaluationInfo":baseAudience.get('evaluationInfo',{'batch': {'enabled': True}, 'continuous': {'enabled': False},'synchronous': {'enabled': False}})
|
|
1019
|
+
}
|
|
1020
|
+
res = targetAudiences.createAudience(audienceDef)
|
|
1021
|
+
if 'id' in res.keys():
|
|
1022
|
+
self.dict_targetComponents[target]['audience'][res['id']] = res
|
|
1023
|
+
else:
|
|
1024
|
+
print(res)
|
|
1025
|
+
raise Exception("the audience could not be created in the target sandbox")
|
|
1026
|
+
else: ## audience already exists in target
|
|
1027
|
+
if verbose:
|
|
1028
|
+
print(f"audience '{audience_name}' already exists in target {target}, saving it")
|
|
1029
|
+
self.dict_targetComponents[target]['audience'][audience_name] = [el for el in t_audiences if el['name'] == audience_name][0]
|
|
782
1030
|
|
|
@@ -1,12 +1,12 @@
|
|
|
1
|
-
aepp/__init__.py,sha256=
|
|
2
|
-
aepp/__version__.py,sha256=
|
|
1
|
+
aepp/__init__.py,sha256=d5hE6VcHz52B5o_AbYO368iG8FTsJlt91nBWej8WL8A,26381
|
|
2
|
+
aepp/__version__.py,sha256=Y-I6FXKes4hB39WcBm3HZ1OCtCWiErPB6YZ-YSgexuE,21
|
|
3
3
|
aepp/accesscontrol.py,sha256=PB3FcrO4bvDjdNxjHx7p_20hp4ahBXewoOSxuTGMXC8,17423
|
|
4
4
|
aepp/catalog.py,sha256=hK9m3SAP0fhgkYqu14Tcfq14qBhw54tLCOF0mH31b1M,68237
|
|
5
5
|
aepp/classmanager.py,sha256=CTYGkg5ygB8HtRia6DfT9WLBqXJOVg7pSM9jBB25Bqw,64707
|
|
6
6
|
aepp/config.py,sha256=232fcO8JaYJnS4glf8Ebnx9rCdHshZBVaVUbhoOAXkc,2543
|
|
7
7
|
aepp/configs.py,sha256=5rRWJoUQDDaj3AAXWdKCZBZA_Xb7q1Hd58OkWhzwK34,16151
|
|
8
8
|
aepp/connector.py,sha256=-EskFJm8Ki8A7_gpuNrydBBhz1-jZZz8QMB6gHQTZeA,27262
|
|
9
|
-
aepp/customerprofile.py,sha256
|
|
9
|
+
aepp/customerprofile.py,sha256=1yz7piGsSbxM2GaHqkCV-117Es6D1SthrNgnsVxH3Y8,49344
|
|
10
10
|
aepp/dataaccess.py,sha256=oOERLSxMh2nYBFngPS1dFI_AG3W-DJXmMoUVUiKXJrw,16338
|
|
11
11
|
aepp/dataprep.py,sha256=vMT4OYO8y6wsGRSjbJNQmgM048BiP3t1-RvKKglSiN4,27586
|
|
12
12
|
aepp/datasets.py,sha256=hTioR0WyImB91mleCwDQ2FfunvqYi_RrxX_v-iW6N70,8778
|
|
@@ -26,15 +26,15 @@ aepp/policy.py,sha256=JbpvfCKJl2kE2McK2mn_ZI5HKd_6pTnrfMoUdyJesWQ,24924
|
|
|
26
26
|
aepp/privacyservice.py,sha256=V6BkJeZG1LDBCyEQm9Gx0i68iRHG6uxSJiVnXzkHapI,8790
|
|
27
27
|
aepp/queryservice.py,sha256=mYcnzBG6PmNZYepQydqMhG-oFD4eIDEzb3mAKtKMgZE,61739
|
|
28
28
|
aepp/sandboxes.py,sha256=UwlSFkO2OOmH--6ISz8rxwDu2LcLH1MPqoH7yOEAZHc,29363
|
|
29
|
-
aepp/schema.py,sha256=
|
|
30
|
-
aepp/schemamanager.py,sha256=
|
|
29
|
+
aepp/schema.py,sha256=PmYXSkmhmVVUbXQqsEVSAaTMYKOYha8FIYoySwPgMs4,122150
|
|
30
|
+
aepp/schemamanager.py,sha256=hwItd4vXsPFeV25gX1Fbeiu07-BCg4z_VRQREMgJZ58,50738
|
|
31
31
|
aepp/segmentation.py,sha256=oSgR2yx4nawYN5XAeHV_wefvmXEf0nb-bCguaDmp8F8,43555
|
|
32
32
|
aepp/sensei.py,sha256=oYNy5BSWAEqsDkEexcQso6NfA6ntGGMnCOyHri0pJs8,7761
|
|
33
|
-
aepp/som.py,sha256=
|
|
34
|
-
aepp/synchronizer.py,sha256=
|
|
33
|
+
aepp/som.py,sha256=XNm_Lu2wt2kpSSpldLptuER2eludFXeO9fI6i3iNCzo,34175
|
|
34
|
+
aepp/synchronizer.py,sha256=UA48GKJ5OaasMxbS2O7A9T1i6HrehRx8_7jtxaNFD9M,77388
|
|
35
35
|
aepp/tags.py,sha256=t2qBallTcWR4IOXcDBmrPpqjbSay1z3E2bcRijzVm1s,17641
|
|
36
36
|
aepp/utils.py,sha256=tG-YVXylm38-bynqfp5N_Mzyo7mhlZj-dLo7wLoO4tM,1200
|
|
37
|
-
aepp-0.4.
|
|
37
|
+
aepp-0.4.2.dist-info/licenses/LICENSE,sha256=HjYTlfne3BbS5gNHzNqJ5COCiTQLUdf87QkzRyFbE4Y,10337
|
|
38
38
|
tests/__init__.py,sha256=d6zWJsJFZrQd5wAYM7sezSxwXbuMMWfNPkK_vpaUzFA,623
|
|
39
39
|
tests/catalog_test.py,sha256=O4kkG0C_dXk3E77pSzWIt1ewfyKjfZqgbJmBwWwx0po,2246
|
|
40
40
|
tests/dataaccess_test.py,sha256=bnHwOjPPauTM8s1c6O7iUYC--gqt6tPzT94aEZHDC-c,1238
|
|
@@ -44,7 +44,7 @@ tests/exportDatasetToDatalandingZone_test.py,sha256=193AgQR8yhnQmRWV9pgYz1X2Hz-Y
|
|
|
44
44
|
tests/flowservice_test.py,sha256=Y1mpYWbKYL_x-ZlIY-EuOuNvlzVV1ERlKseDO7gN3Ss,4208
|
|
45
45
|
tests/schema_test.py,sha256=6UsgdsizKmii1hzREpBEKWvZouXdJMvU68UKSxlt1uk,2774
|
|
46
46
|
tests/som_test.py,sha256=a4ut0pEg1HJVMTESaPITmj7YkF54eWCMzKxTMIS-VvM,12101
|
|
47
|
-
aepp-0.4.
|
|
48
|
-
aepp-0.4.
|
|
49
|
-
aepp-0.4.
|
|
50
|
-
aepp-0.4.
|
|
47
|
+
aepp-0.4.2.dist-info/METADATA,sha256=nCyugz0f9W1kg7B-VVHAlYrKQt7Pifwq4Jh7CJ0SmRc,5470
|
|
48
|
+
aepp-0.4.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
49
|
+
aepp-0.4.2.dist-info/top_level.txt,sha256=Gn88pv1ywuEAgOvhmmXXhN4dosEfCrBNDskje3nqS34,11
|
|
50
|
+
aepp-0.4.2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|