semantic-link-labs 0.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (52) hide show
  1. semantic_link_labs-0.4.1.dist-info/LICENSE +21 -0
  2. semantic_link_labs-0.4.1.dist-info/METADATA +22 -0
  3. semantic_link_labs-0.4.1.dist-info/RECORD +52 -0
  4. semantic_link_labs-0.4.1.dist-info/WHEEL +5 -0
  5. semantic_link_labs-0.4.1.dist-info/top_level.txt +1 -0
  6. sempy_labs/__init__.py +154 -0
  7. sempy_labs/_ai.py +496 -0
  8. sempy_labs/_clear_cache.py +39 -0
  9. sempy_labs/_connections.py +234 -0
  10. sempy_labs/_dax.py +70 -0
  11. sempy_labs/_generate_semantic_model.py +280 -0
  12. sempy_labs/_helper_functions.py +506 -0
  13. sempy_labs/_icons.py +4 -0
  14. sempy_labs/_list_functions.py +1372 -0
  15. sempy_labs/_model_auto_build.py +143 -0
  16. sempy_labs/_model_bpa.py +1354 -0
  17. sempy_labs/_model_dependencies.py +341 -0
  18. sempy_labs/_one_lake_integration.py +155 -0
  19. sempy_labs/_query_scale_out.py +447 -0
  20. sempy_labs/_refresh_semantic_model.py +184 -0
  21. sempy_labs/_tom.py +3766 -0
  22. sempy_labs/_translations.py +378 -0
  23. sempy_labs/_vertipaq.py +893 -0
  24. sempy_labs/directlake/__init__.py +45 -0
  25. sempy_labs/directlake/_directlake_schema_compare.py +110 -0
  26. sempy_labs/directlake/_directlake_schema_sync.py +128 -0
  27. sempy_labs/directlake/_fallback.py +62 -0
  28. sempy_labs/directlake/_get_directlake_lakehouse.py +69 -0
  29. sempy_labs/directlake/_get_shared_expression.py +59 -0
  30. sempy_labs/directlake/_guardrails.py +84 -0
  31. sempy_labs/directlake/_list_directlake_model_calc_tables.py +54 -0
  32. sempy_labs/directlake/_show_unsupported_directlake_objects.py +89 -0
  33. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +81 -0
  34. sempy_labs/directlake/_update_directlake_partition_entity.py +64 -0
  35. sempy_labs/directlake/_warm_cache.py +210 -0
  36. sempy_labs/lakehouse/__init__.py +24 -0
  37. sempy_labs/lakehouse/_get_lakehouse_columns.py +81 -0
  38. sempy_labs/lakehouse/_get_lakehouse_tables.py +250 -0
  39. sempy_labs/lakehouse/_lakehouse.py +85 -0
  40. sempy_labs/lakehouse/_shortcuts.py +296 -0
  41. sempy_labs/migration/__init__.py +29 -0
  42. sempy_labs/migration/_create_pqt_file.py +239 -0
  43. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +429 -0
  44. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +150 -0
  45. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +524 -0
  46. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +165 -0
  47. sempy_labs/migration/_migration_validation.py +227 -0
  48. sempy_labs/migration/_refresh_calc_tables.py +129 -0
  49. sempy_labs/report/__init__.py +35 -0
  50. sempy_labs/report/_generate_report.py +253 -0
  51. sempy_labs/report/_report_functions.py +855 -0
  52. sempy_labs/report/_report_rebind.py +131 -0
@@ -0,0 +1,150 @@
1
+ import sempy
2
+ import sempy.fabric as fabric
3
+ import re, datetime, time
4
+ from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
5
+ from sempy_labs._helper_functions import resolve_lakehouse_name
6
+ from sempy_labs._tom import connect_semantic_model
7
+ from typing import Optional
8
+ from sempy._utils._log import log
9
+ import sempy_labs._icons as icons
10
+
11
+
12
+ @log
13
+ def migrate_calc_tables_to_semantic_model(
14
+ dataset: str,
15
+ new_dataset: str,
16
+ workspace: Optional[str] = None,
17
+ new_dataset_workspace: Optional[str] = None,
18
+ lakehouse: Optional[str] = None,
19
+ lakehouse_workspace: Optional[str] = None,
20
+ ):
21
+ """
22
+ Creates new tables in the Direct Lake semantic model based on the lakehouse tables created using the 'migrate_calc_tables_to_lakehouse' function.
23
+
24
+ Parameters
25
+ ----------
26
+ dataset : str
27
+ Name of the import/DirectQuery semantic model.
28
+ new_dataset : str
29
+ Name of the Direct Lake semantic model.
30
+ workspace : str, default=None
31
+ The Fabric workspace name in which the import/DirectQuery semantic model exists.
32
+ Defaults to None which resolves to the workspace of the attached lakehouse
33
+ or if no lakehouse attached, resolves to the workspace of the notebook.
34
+ new_dataset_workspace : str
35
+ The Fabric workspace name in which the Direct Lake semantic model will be created.
36
+ Defaults to None which resolves to the workspace of the attached lakehouse
37
+ or if no lakehouse attached, resolves to the workspace of the notebook.
38
+ lakehouse : str, default=None
39
+ The Fabric lakehouse used by the Direct Lake semantic model.
40
+ Defaults to None which resolves to the lakehouse attached to the notebook.
41
+ lakehouse_workspace : str, default=None
42
+ The Fabric workspace used by the lakehouse.
43
+ Defaults to None which resolves to the workspace of the attached lakehouse
44
+ or if no lakehouse attached, resolves to the workspace of the notebook.
45
+ """
46
+
47
+ workspace = fabric.resolve_workspace_name(workspace)
48
+
49
+ if new_dataset_workspace == None:
50
+ new_dataset_workspace = workspace
51
+
52
+ if lakehouse_workspace == None:
53
+ lakehouse_workspace = new_dataset_workspace
54
+ if lakehouse == None:
55
+ lakehouse_id = fabric.get_lakehouse_id()
56
+ lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)
57
+
58
+ # Get calc tables but not field parameters
59
+ dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
60
+ dfP_filt = dfP[(dfP["Source Type"] == "Calculated")]
61
+ dfP_filt = dfP_filt[~dfP_filt["Query"].str.contains("NAMEOF")]
62
+
63
+ dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
64
+ lc = get_lakehouse_tables(lakehouse=lakehouse, workspace=lakehouse_workspace)
65
+ # Get all calc table columns of calc tables not including field parameters
66
+ dfC_filt = dfC[
67
+ (dfC["Table Name"].isin(dfP_filt["Table Name"]))
68
+ ] # & (dfC['Type'] == 'CalculatedTableColumn')]
69
+ # dfA = list_annotations(new_dataset, new_dataset_workspace)
70
+ # dfA_filt = dfA[(dfA['Object Type'] == 'Model') & ~ (dfA['Annotation Value'].str.contains('NAMEOF'))]
71
+
72
+ if len(dfP_filt) == 0:
73
+ print(
74
+ f"{icons.green_dot} The '{dataset}' semantic model has no calculated tables."
75
+ )
76
+ return
77
+
78
+ start_time = datetime.datetime.now()
79
+ timeout = datetime.timedelta(minutes=1)
80
+ success = False
81
+
82
+ while not success:
83
+ try:
84
+ with connect_semantic_model(
85
+ dataset=new_dataset, readonly=False, workspace=new_dataset_workspace
86
+ ) as tom:
87
+ success = True
88
+ for tName in dfC_filt["Table Name"].unique():
89
+ if tName.lower() in lc["Table Name"].values:
90
+
91
+ try:
92
+ tom.model.Tables[tName]
93
+ except:
94
+ tom.add_table(name=tName)
95
+ tom.add_entity_partition(
96
+ table_name=tName,
97
+ entity_name=tName.replace(" ", "_").lower(),
98
+ )
99
+
100
+ columns_in_table = dfC_filt.loc[
101
+ dfC_filt["Table Name"] == tName, "Column Name"
102
+ ].unique()
103
+
104
+ for cName in columns_in_table:
105
+ scName = dfC.loc[
106
+ (dfC["Table Name"] == tName)
107
+ & (dfC["Column Name"] == cName),
108
+ "Source",
109
+ ].iloc[0]
110
+ cDataType = dfC.loc[
111
+ (dfC["Table Name"] == tName)
112
+ & (dfC["Column Name"] == cName),
113
+ "Data Type",
114
+ ].iloc[0]
115
+ cType = dfC.loc[
116
+ (dfC["Table Name"] == tName)
117
+ & (dfC["Column Name"] == cName),
118
+ "Type",
119
+ ].iloc[0]
120
+
121
+ # av = tom.get_annotation_value(object = tom.model, name = tName)
122
+
123
+ # if cType == 'CalculatedTableColumn':
124
+ # lakeColumn = scName.replace(' ','_')
125
+ # elif cType == 'Calculated':
126
+ pattern = r"\[([^]]+)\]"
127
+
128
+ matches = re.findall(pattern, scName)
129
+ lakeColumn = matches[0].replace(" ", "")
130
+ try:
131
+ tom.model.Tables[tName].Columns[cName]
132
+ except:
133
+ tom.add_data_column(
134
+ table_name=tName,
135
+ column_name=cName,
136
+ source_column=lakeColumn,
137
+ data_type=cDataType,
138
+ )
139
+ print(
140
+ f"{icons.green_dot} The '{tName}'[{cName}] column has been added."
141
+ )
142
+
143
+ print(
144
+ f"\n{icons.green_dot} All viable calculated tables have been added to the model."
145
+ )
146
+
147
+ except Exception as e:
148
+ if datetime.datetime.now() - start_time > timeout:
149
+ break
150
+ time.sleep(1)
@@ -0,0 +1,524 @@
1
+ import sempy
2
+ import sempy.fabric as fabric
3
+ import re, datetime, time
4
+ from sempy_labs._list_functions import list_tables
5
+ from sempy_labs._helper_functions import create_relationship_name
6
+ from sempy_labs._tom import connect_semantic_model
7
+ from typing import Optional
8
+ from sempy._utils._log import log
9
+ import sempy_labs._icons as icons
10
+
11
+
12
+ @log
13
+ def migrate_model_objects_to_semantic_model(
14
+ dataset: str,
15
+ new_dataset: str,
16
+ workspace: Optional[str] = None,
17
+ new_dataset_workspace: Optional[str] = None,
18
+ ):
19
+ """
20
+ Adds the rest of the model objects (besides tables/columns) and their properties to a Direct Lake semantic model based on an import/DirectQuery semantic model.
21
+
22
+ Parameters
23
+ ----------
24
+ dataset : str
25
+ Name of the import/DirectQuery semantic model.
26
+ new_dataset : str
27
+ Name of the Direct Lake semantic model.
28
+ workspace : str, default=None
29
+ The Fabric workspace name in which the import/DirectQuery semantic model exists.
30
+ Defaults to None which resolves to the workspace of the attached lakehouse
31
+ or if no lakehouse attached, resolves to the workspace of the notebook.
32
+ new_dataset_workspace : str
33
+ The Fabric workspace name in which the Direct Lake semantic model will be created.
34
+ Defaults to None which resolves to the workspace of the attached lakehouse
35
+ or if no lakehouse attached, resolves to the workspace of the notebook.
36
+ """
37
+
38
+ sempy.fabric._client._utils._init_analysis_services()
39
+ import Microsoft.AnalysisServices.Tabular as TOM
40
+ import System
41
+
42
+ if workspace == None:
43
+ workspace_id = fabric.get_workspace_id()
44
+ workspace = fabric.resolve_workspace_name(workspace_id)
45
+ else:
46
+ workspaceId = fabric.resolve_workspace_id(workspace)
47
+
48
+ if new_dataset_workspace == None:
49
+ new_dataset_workspace = workspace
50
+
51
+ dfT = list_tables(dataset, workspace)
52
+ dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
53
+ dfM = fabric.list_measures(dataset=dataset, workspace=workspace)
54
+ dfR = fabric.list_relationships(dataset=dataset, workspace=workspace)
55
+ dfRole = fabric.get_roles(dataset=dataset, workspace=workspace)
56
+ dfRLS = fabric.get_row_level_security_permissions(
57
+ dataset=dataset, workspace=workspace
58
+ )
59
+ dfCI = fabric.list_calculation_items(dataset=dataset, workspace=workspace)
60
+ dfP = fabric.list_perspectives(dataset=dataset, workspace=workspace)
61
+ dfTranslation = fabric.list_translations(dataset=dataset, workspace=workspace)
62
+ dfH = fabric.list_hierarchies(dataset=dataset, workspace=workspace)
63
+ dfPar = fabric.list_partitions(dataset=dataset, workspace=workspace)
64
+
65
+ dfP_cc = dfPar[(dfPar["Source Type"] == "Calculated")]
66
+ dfP_fp = dfP_cc[dfP_cc["Query"].str.contains("NAMEOF")]
67
+ dfC_fp = dfC[dfC["Table Name"].isin(dfP_fp["Table Name"].values)]
68
+
69
+ print(f"{icons.in_progress} Updating '{new_dataset}' based on '{dataset}'...")
70
+ start_time = datetime.datetime.now()
71
+ timeout = datetime.timedelta(minutes=1)
72
+ success = False
73
+
74
+ while not success:
75
+ try:
76
+ with connect_semantic_model(
77
+ dataset=new_dataset, readonly=False, workspace=new_dataset_workspace
78
+ ) as tom:
79
+ success = True
80
+
81
+ isDirectLake = any(
82
+ str(p.Mode) == "DirectLake"
83
+ for t in tom.model.Tables
84
+ for p in t.Partitions
85
+ )
86
+
87
+ print(f"\n{icons.in_progress} Updating table properties...")
88
+ for t in tom.model.Tables:
89
+ t.IsHidden = bool(dfT.loc[dfT["Name"] == t.Name, "Hidden"].iloc[0])
90
+ t.Description = dfT.loc[dfT["Name"] == t.Name, "Description"].iloc[
91
+ 0
92
+ ]
93
+ t.DataCategory = dfT.loc[
94
+ dfT["Name"] == t.Name, "Data Category"
95
+ ].iloc[0]
96
+
97
+ print(
98
+ f"{icons.green_dot} The '{t.Name}' table's properties have been updated."
99
+ )
100
+
101
+ print(f"\n{icons.in_progress} Updating column properties...")
102
+ for t in tom.model.Tables:
103
+ if (
104
+ t.Name not in dfP_fp["Table Name"].values
105
+ ): # do not include field parameters
106
+ dfT_filtered = dfT[dfT["Name"] == t.Name]
107
+ tType = dfT_filtered["Type"].iloc[0]
108
+ for c in t.Columns:
109
+ if not c.Name.startswith("RowNumber-"):
110
+ dfC_filt = dfC[
111
+ (dfC["Table Name"] == t.Name)
112
+ & (dfC["Column Name"] == c.Name)
113
+ ]
114
+ cName = dfC_filt["Column Name"].iloc[0]
115
+ c.Name = cName
116
+ if tType == "Table":
117
+ c.SourceColumn = cName.replace(" ", "_")
118
+ c.IsHidden = bool(dfC_filt["Hidden"].iloc[0])
119
+ c.DataType = System.Enum.Parse(
120
+ TOM.DataType, dfC_filt["Data Type"].iloc[0]
121
+ )
122
+ c.DisplayFolder = dfC_filt["Display Folder"].iloc[0]
123
+ c.FormatString = dfC_filt["Format String"].iloc[0]
124
+ c.SummarizeBy = System.Enum.Parse(
125
+ TOM.AggregateFunction,
126
+ dfC_filt["Summarize By"].iloc[0],
127
+ )
128
+ c.DataCategory = dfC_filt["Data Category"].iloc[0]
129
+ c.IsKey = bool(dfC_filt["Key"].iloc[0])
130
+ sbc = dfC_filt["Sort By Column"].iloc[0]
131
+
132
+ if sbc != None:
133
+ try:
134
+ c.SortByColumn = tom.model.Tables[
135
+ t.Name
136
+ ].Columns[sbc]
137
+ except:
138
+ print(
139
+ f"{icons.red_dot} Failed to create '{sbc}' as a Sort By Column for the '{c.Name}' in the '{t.Name}' table."
140
+ )
141
+ print(
142
+ f"{icons.green_dot} The '{t.Name}'[{c.Name}] column's properties have been updated."
143
+ )
144
+
145
+ print(f"\n{icons.in_progress} Creating hierarchies...")
146
+ dfH_grouped = (
147
+ dfH.groupby(
148
+ [
149
+ "Table Name",
150
+ "Hierarchy Name",
151
+ "Hierarchy Hidden",
152
+ "Hierarchy Description",
153
+ ]
154
+ )
155
+ .agg({"Level Name": list, "Column Name": list})
156
+ .reset_index()
157
+ )
158
+
159
+ for i, r in dfH_grouped.iterrows():
160
+ tName = r["Table Name"]
161
+ hName = r["Hierarchy Name"]
162
+ hDesc = r["Hierarchy Description"]
163
+ hHid = bool(r["Hierarchy Hidden"])
164
+ cols = r["Column Name"]
165
+ lvls = r["Level Name"]
166
+
167
+ try:
168
+ tom.model.Tables[tName].Hierarchies[hName]
169
+ except:
170
+ tom.add_hierarchy(
171
+ table_name=tName,
172
+ hierarchy_name=hName,
173
+ hierarchy_description=hDesc,
174
+ hierarchy_hidden=hHid,
175
+ columns=cols,
176
+ levels=lvls,
177
+ )
178
+ print(
179
+ f"{icons.green_dot} The '{hName}' hierarchy has been added."
180
+ )
181
+
182
+ print(f"\n{icons.in_progress} Creating measures...")
183
+ for i, r in dfM.iterrows():
184
+ tName = r["Table Name"]
185
+ mName = r["Measure Name"]
186
+ mExpr = r["Measure Expression"]
187
+ mHidden = bool(r["Measure Hidden"])
188
+ mDF = r["Measure Display Folder"]
189
+ mDesc = r["Measure Description"]
190
+ mFS = r["Format String"]
191
+
192
+ try:
193
+ tom.model.Tables[tName].Measures[mName]
194
+ except:
195
+ tom.add_measure(
196
+ table_name=tName,
197
+ measure_name=mName,
198
+ expression=mExpr,
199
+ hidden=mHidden,
200
+ display_folder=mDF,
201
+ description=mDesc,
202
+ format_string=mFS,
203
+ )
204
+ print(
205
+ f"{icons.green_dot} The '{mName}' measure has been added."
206
+ )
207
+
208
+ for cgName in dfCI["Calculation Group Name"].unique():
209
+
210
+ isHidden = bool(
211
+ dfCI.loc[
212
+ (dfCI["Calculation Group Name"] == cgName), "Hidden"
213
+ ].iloc[0]
214
+ )
215
+ prec = int(
216
+ dfCI.loc[
217
+ (dfCI["Calculation Group Name"] == cgName), "Precedence"
218
+ ].iloc[0]
219
+ )
220
+ desc = dfCI.loc[
221
+ (dfCI["Calculation Group Name"] == cgName), "Description"
222
+ ].iloc[0]
223
+
224
+ try:
225
+ tom.model.Tables[cgName]
226
+ except:
227
+ tom.add_calculation_group(
228
+ name=cgName,
229
+ description=desc,
230
+ precedence=prec,
231
+ hidden=isHidden,
232
+ )
233
+ print(
234
+ f"{icons.green_dot} The '{cgName}' calculation group has been added."
235
+ )
236
+ tom.model.DiscourageImplicitMeasures = True
237
+
238
+ print(
239
+ f"\n{icons.in_progress} Updating calculation group column name..."
240
+ )
241
+ dfC_filt = dfC[
242
+ (dfC["Table Name"] == cgName) & (dfC["Hidden"] == False)
243
+ ]
244
+ colName = dfC_filt["Column Name"].iloc[0]
245
+ tom.model.Tables[cgName].Columns["Name"].Name = colName
246
+
247
+ calcItems = dfCI.loc[
248
+ dfCI["Calculation Group Name"] == cgName,
249
+ "Calculation Item Name",
250
+ ].unique()
251
+
252
+ print(f"\n{icons.in_progress} Creating calculation items...")
253
+ for calcItem in calcItems:
254
+ ordinal = int(
255
+ dfCI.loc[
256
+ (dfCI["Calculation Group Name"] == cgName)
257
+ & (dfCI["Calculation Item Name"] == calcItem),
258
+ "Ordinal",
259
+ ].iloc[0]
260
+ )
261
+ expr = dfCI.loc[
262
+ (dfCI["Calculation Group Name"] == cgName)
263
+ & (dfCI["Calculation Item Name"] == calcItem),
264
+ "Expression",
265
+ ].iloc[0]
266
+ fse = dfCI.loc[
267
+ (dfCI["Calculation Group Name"] == cgName)
268
+ & (dfCI["Calculation Item Name"] == calcItem),
269
+ "Format String Expression",
270
+ ].iloc[0]
271
+ try:
272
+ tom.model.Tables[cgName].CalculationGroup.CalculationItems[
273
+ calcItem
274
+ ]
275
+ except:
276
+ tom.add_calculation_item(
277
+ table_name=cgName,
278
+ calculation_item_name=calcItem,
279
+ expression=expr,
280
+ format_string_expression=fse,
281
+ ordinal=ordinal,
282
+ )
283
+ print(
284
+ f"{icons.green_dot} The '{calcItem}' has been added to the '{cgName}' calculation group."
285
+ )
286
+
287
+ print(f"\n{icons.in_progress} Creating relationships...")
288
+ for index, row in dfR.iterrows():
289
+ fromTable = row["From Table"]
290
+ fromColumn = row["From Column"]
291
+ toTable = row["To Table"]
292
+ toColumn = row["To Column"]
293
+ isActive = row["Active"]
294
+ cfb = row["Cross Filtering Behavior"]
295
+ sfb = row["Security Filtering Behavior"]
296
+ rori = row["Rely On Referential Integrity"]
297
+ mult = row["Multiplicity"]
298
+
299
+ card_mapping = {"m": "Many", "1": "One", "0": "None"}
300
+
301
+ fromCard = card_mapping.get(mult[0])
302
+ toCard = card_mapping.get(mult[-1])
303
+
304
+ relName = create_relationship_name(
305
+ fromTable, fromColumn, toTable, toColumn
306
+ )
307
+
308
+ if any(
309
+ r.FromTable.Name == fromTable
310
+ and r.FromColumn.Name == fromColumn
311
+ and r.ToTable.Name == toTable
312
+ and r.ToColumn.Name == toColumn
313
+ for r in tom.model.Relationships
314
+ ):
315
+ print(
316
+ f"{icons.yellow_dot} {relName} already exists as a relationship in the semantic model."
317
+ )
318
+ elif isDirectLake and any(
319
+ r.FromTable.Name == fromTable
320
+ and r.FromColumn.Name == fromColumn
321
+ and r.ToTable.Name == toTable
322
+ and r.ToColumn.Name == toColumn
323
+ and (
324
+ r.FromColumn.DataType == "DateTime"
325
+ or r.ToColumn.DataType == "DateTime"
326
+ )
327
+ for r in tom.model.Relationships
328
+ ):
329
+ print(
330
+ f"{icons.yellow_dot} {relName} was not created since relationships based on DateTime columns are not supported."
331
+ )
332
+ elif isDirectLake and any(
333
+ r.FromTable.Name == fromTable
334
+ and r.FromColumn.Name == fromColumn
335
+ and r.ToTable.Name == toTable
336
+ and r.ToColumn.Name == toColumn
337
+ and (r.FromColumn.DataType != r.ToColumn.DataType)
338
+ for r in tom.model.Relationships
339
+ ):
340
+ print(
341
+ f"{icons.yellow_dot} {relName} was not created since columns used in a relationship must have the same data type."
342
+ )
343
+ else:
344
+ try:
345
+ tom.add_relationship(
346
+ from_table=fromTable,
347
+ from_column=fromColumn,
348
+ to_table=toTable,
349
+ to_column=toColumn,
350
+ from_cardinality=fromCard,
351
+ to_cardinality=toCard,
352
+ cross_filtering_behavior=cfb,
353
+ security_filtering_behavior=sfb,
354
+ rely_on_referential_integrity=rori,
355
+ is_active=isActive,
356
+ )
357
+
358
+ print(
359
+ f"{icons.green_dot} The {relName} relationship has been added."
360
+ )
361
+ except:
362
+ print(
363
+ f"{icons.red_dot} The {relName} relationship was not added."
364
+ )
365
+
366
+ print(f"\n{icons.in_progress} Creating roles...")
367
+ for index, row in dfRole.iterrows():
368
+ roleName = row["Role"]
369
+ roleDesc = row["Description"]
370
+ modPerm = row["Model Permission"]
371
+
372
+ try:
373
+ tom.model.Roles[roleName]
374
+ except:
375
+ tom.add_role(
376
+ role_name=roleName,
377
+ model_permission=modPerm,
378
+ description=roleDesc,
379
+ )
380
+ print(
381
+ f"{icons.green_dot} The '{roleName}' role has been added."
382
+ )
383
+
384
+ print(f"\n{icons.in_progress} Creating row level security...")
385
+ for index, row in dfRLS.iterrows():
386
+ roleName = row["Role"]
387
+ tName = row["Table"]
388
+ expr = row["Filter Expression"]
389
+
390
+ try:
391
+ tom.set_rls(
392
+ role_name=roleName, table_name=tName, filter_expression=expr
393
+ )
394
+ print(
395
+ f"{icons.green_dot} Row level security for the '{tName}' table within the '{roleName}' role has been set."
396
+ )
397
+ except:
398
+ print(
399
+ f"{icons.red_dot} Row level security for the '{tName}' table within the '{roleName}' role was not set."
400
+ )
401
+
402
+ print(f"\n{icons.in_progress} Creating perspectives...")
403
+ for pName in dfP["Perspective Name"].unique():
404
+
405
+ try:
406
+ tom.model.Perspectives[pName]
407
+ except:
408
+ tom.add_perspective(perspective_name=pName)
409
+ print(
410
+ f"{icons.green_dot} The '{pName}' perspective has been added."
411
+ )
412
+
413
+ print(f"\n{icons.in_progress} Adding objects to perspectives...")
414
+ for index, row in dfP.iterrows():
415
+ pName = row["Perspective Name"]
416
+ tName = row["Table Name"]
417
+ oName = row["Object Name"]
418
+ oType = row["Object Type"]
419
+ tType = dfT.loc[(dfT["Name"] == tName), "Type"].iloc[0]
420
+
421
+ try:
422
+ if oType == "Table":
423
+ tom.add_to_perspective(
424
+ object=tom.model.Tables[tName], perspective_name=pName
425
+ )
426
+ elif oType == "Column":
427
+ tom.add_to_perspective(
428
+ object=tom.model.Tables[tName].Columns[oName],
429
+ perspective_name=pName,
430
+ )
431
+ elif oType == "Measure":
432
+ tom.add_to_perspective(
433
+ object=tom.model.Tables[tName].Measures[oName],
434
+ perspective_name=pName,
435
+ )
436
+ elif oType == "Hierarchy":
437
+ tom.add_to_perspective(
438
+ object=tom.model.Tables[tName].Hierarchies[oName],
439
+ perspective_name=pName,
440
+ )
441
+ except:
442
+ pass
443
+
444
+ print(f"\n{icons.in_progress} Creating translation languages...")
445
+ for trName in dfTranslation["Culture Name"].unique():
446
+ try:
447
+ tom.model.Cultures[trName]
448
+ except:
449
+ tom.add_translation(trName)
450
+ print(
451
+ f"{icons.green_dot} The '{trName}' translation language has been added."
452
+ )
453
+
454
+ print(f"\n{icons.in_progress} Creating translation values...")
455
+ for index, row in dfTranslation.iterrows():
456
+ trName = row["Culture Name"]
457
+ tName = row["Table Name"]
458
+ oName = row["Object Name"]
459
+ oType = row["Object Type"]
460
+ translation = row["Translation"]
461
+ prop = row["Property"]
462
+
463
+ if prop == "Caption":
464
+ prop = "Name"
465
+ elif prop == "DisplayFolder":
466
+ prop = "Display Folder"
467
+
468
+ try:
469
+ if oType == "Table":
470
+ tom.set_translation(
471
+ object=tom.model.Tables[tName],
472
+ language=trName,
473
+ property=prop,
474
+ value=translation,
475
+ )
476
+ elif oType == "Column":
477
+ tom.set_translation(
478
+ object=tom.model.Tables[tName].Columns[oName],
479
+ language=trName,
480
+ property=prop,
481
+ value=translation,
482
+ )
483
+ elif oType == "Measure":
484
+ tom.set_translation(
485
+ object=tom.model.Tables[tName].Measures[oName],
486
+ language=trName,
487
+ property=prop,
488
+ value=translation,
489
+ )
490
+ elif oType == "Hierarchy":
491
+ tom.set_translation(
492
+ object=tom.model.Tables[tName].Hierarchies[oName],
493
+ language=trName,
494
+ property=prop,
495
+ value=translation,
496
+ )
497
+ elif oType == "Level":
498
+
499
+ pattern = r"\[([^]]+)\]"
500
+ matches = re.findall(pattern, oName)
501
+ lName = matches[0]
502
+
503
+ pattern = r"'([^']+)'"
504
+ matches = re.findall(pattern, oName)
505
+ hName = matches[0]
506
+ tom.set_translation(
507
+ object=tom.model.Tables[tName]
508
+ .Hierarchies[hName]
509
+ .Levels[lName],
510
+ language=trName,
511
+ property=prop,
512
+ value=translation,
513
+ )
514
+ except:
515
+ pass
516
+
517
+ print(
518
+ f"\n{icons.green_dot} Migration of objects from '{dataset}' -> '{new_dataset}' is complete."
519
+ )
520
+
521
+ except Exception as e:
522
+ if datetime.datetime.now() - start_time > timeout:
523
+ break
524
+ time.sleep(1)