semantic-link-labs 0.6.0__py3-none-any.whl → 0.7.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- semantic_link_labs-0.7.1.dist-info/METADATA +148 -0
- semantic_link_labs-0.7.1.dist-info/RECORD +111 -0
- {semantic_link_labs-0.6.0.dist-info → semantic_link_labs-0.7.1.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +26 -2
- sempy_labs/_ai.py +3 -65
- sempy_labs/_bpa_translation/_translations_am-ET.po +828 -0
- sempy_labs/_bpa_translation/_translations_ar-AE.po +860 -0
- sempy_labs/_bpa_translation/_translations_cs-CZ.po +894 -0
- sempy_labs/_bpa_translation/_translations_da-DK.po +894 -0
- sempy_labs/_bpa_translation/_translations_de-DE.po +933 -0
- sempy_labs/_bpa_translation/_translations_el-GR.po +936 -0
- sempy_labs/_bpa_translation/_translations_es-ES.po +915 -0
- sempy_labs/_bpa_translation/_translations_fa-IR.po +883 -0
- sempy_labs/_bpa_translation/_translations_fr-FR.po +938 -0
- sempy_labs/_bpa_translation/_translations_ga-IE.po +912 -0
- sempy_labs/_bpa_translation/_translations_he-IL.po +855 -0
- sempy_labs/_bpa_translation/_translations_hi-IN.po +892 -0
- sempy_labs/_bpa_translation/_translations_hu-HU.po +910 -0
- sempy_labs/_bpa_translation/_translations_is-IS.po +887 -0
- sempy_labs/_bpa_translation/_translations_it-IT.po +931 -0
- sempy_labs/_bpa_translation/_translations_ja-JP.po +805 -0
- sempy_labs/_bpa_translation/_translations_nl-NL.po +924 -0
- sempy_labs/_bpa_translation/_translations_pl-PL.po +913 -0
- sempy_labs/_bpa_translation/_translations_pt-BR.po +909 -0
- sempy_labs/_bpa_translation/_translations_pt-PT.po +904 -0
- sempy_labs/_bpa_translation/_translations_ru-RU.po +909 -0
- sempy_labs/_bpa_translation/_translations_ta-IN.po +922 -0
- sempy_labs/_bpa_translation/_translations_te-IN.po +896 -0
- sempy_labs/_bpa_translation/_translations_th-TH.po +873 -0
- sempy_labs/_bpa_translation/_translations_zh-CN.po +767 -0
- sempy_labs/_bpa_translation/_translations_zu-ZA.po +916 -0
- sempy_labs/_clear_cache.py +9 -4
- sempy_labs/_generate_semantic_model.py +30 -56
- sempy_labs/_helper_functions.py +361 -14
- sempy_labs/_icons.py +10 -1
- sempy_labs/_list_functions.py +539 -260
- sempy_labs/_model_bpa.py +194 -18
- sempy_labs/_model_bpa_bulk.py +367 -0
- sempy_labs/_model_bpa_rules.py +19 -8
- sempy_labs/_model_dependencies.py +12 -10
- sempy_labs/_one_lake_integration.py +7 -7
- sempy_labs/_query_scale_out.py +61 -96
- sempy_labs/_refresh_semantic_model.py +7 -0
- sempy_labs/_translations.py +154 -1
- sempy_labs/_vertipaq.py +103 -90
- sempy_labs/directlake/__init__.py +5 -1
- sempy_labs/directlake/_directlake_schema_compare.py +27 -31
- sempy_labs/directlake/_directlake_schema_sync.py +55 -66
- sempy_labs/directlake/_dl_helper.py +233 -0
- sempy_labs/directlake/_get_directlake_lakehouse.py +6 -7
- sempy_labs/directlake/_get_shared_expression.py +1 -1
- sempy_labs/directlake/_guardrails.py +17 -13
- sempy_labs/directlake/_update_directlake_partition_entity.py +54 -30
- sempy_labs/directlake/_warm_cache.py +1 -1
- sempy_labs/lakehouse/__init__.py +2 -0
- sempy_labs/lakehouse/_get_lakehouse_tables.py +61 -69
- sempy_labs/lakehouse/_lakehouse.py +66 -9
- sempy_labs/lakehouse/_shortcuts.py +1 -1
- sempy_labs/migration/_create_pqt_file.py +174 -182
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +236 -268
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +75 -73
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +442 -426
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +91 -97
- sempy_labs/migration/_refresh_calc_tables.py +92 -101
- sempy_labs/report/_BPAReportTemplate.json +232 -0
- sempy_labs/report/__init__.py +6 -2
- sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
- sempy_labs/report/_bpareporttemplate/.platform +11 -0
- sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
- sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
- sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
- sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
- sempy_labs/report/_generate_report.py +255 -139
- sempy_labs/report/_report_functions.py +26 -33
- sempy_labs/report/_report_rebind.py +31 -26
- sempy_labs/tom/_model.py +75 -58
- semantic_link_labs-0.6.0.dist-info/METADATA +0 -22
- semantic_link_labs-0.6.0.dist-info/RECORD +0 -54
- sempy_labs/directlake/_fallback.py +0 -60
- {semantic_link_labs-0.6.0.dist-info → semantic_link_labs-0.7.1.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.6.0.dist-info → semantic_link_labs-0.7.1.dist-info}/top_level.txt +0 -0
|
@@ -1,10 +1,12 @@
|
|
|
1
1
|
import sempy
|
|
2
2
|
import sempy.fabric as fabric
|
|
3
3
|
import re
|
|
4
|
-
import datetime
|
|
5
|
-
import time
|
|
6
4
|
from sempy_labs._list_functions import list_tables
|
|
7
|
-
from sempy_labs._helper_functions import
|
|
5
|
+
from sempy_labs._helper_functions import (
|
|
6
|
+
create_relationship_name,
|
|
7
|
+
retry,
|
|
8
|
+
format_dax_object_name,
|
|
9
|
+
)
|
|
8
10
|
from sempy_labs.tom import connect_semantic_model
|
|
9
11
|
from typing import Optional
|
|
10
12
|
from sempy._utils._log import log
|
|
@@ -49,7 +51,6 @@ def migrate_model_objects_to_semantic_model(
|
|
|
49
51
|
dfT = list_tables(dataset, workspace)
|
|
50
52
|
dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
|
|
51
53
|
dfM = fabric.list_measures(dataset=dataset, workspace=workspace)
|
|
52
|
-
dfR = fabric.list_relationships(dataset=dataset, workspace=workspace)
|
|
53
54
|
dfRole = fabric.get_roles(dataset=dataset, workspace=workspace)
|
|
54
55
|
dfRLS = fabric.get_row_level_security_permissions(
|
|
55
56
|
dataset=dataset, workspace=workspace
|
|
@@ -64,459 +65,474 @@ def migrate_model_objects_to_semantic_model(
|
|
|
64
65
|
dfP_fp = dfP_cc[dfP_cc["Query"].str.contains("NAMEOF")]
|
|
65
66
|
|
|
66
67
|
print(f"{icons.in_progress} Updating '{new_dataset}' based on '{dataset}'...")
|
|
67
|
-
start_time = datetime.datetime.now()
|
|
68
|
-
timeout = datetime.timedelta(minutes=1)
|
|
69
|
-
success = False
|
|
70
|
-
|
|
71
|
-
while not success:
|
|
72
|
-
try:
|
|
73
|
-
with connect_semantic_model(
|
|
74
|
-
dataset=new_dataset, readonly=False, workspace=new_dataset_workspace
|
|
75
|
-
) as tom:
|
|
76
|
-
success = True
|
|
77
|
-
|
|
78
|
-
isDirectLake = any(
|
|
79
|
-
str(p.Mode) == "DirectLake"
|
|
80
|
-
for t in tom.model.Tables
|
|
81
|
-
for p in t.Partitions
|
|
82
|
-
)
|
|
83
68
|
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
].iloc[0]
|
|
69
|
+
@retry(
|
|
70
|
+
sleep_time=1,
|
|
71
|
+
timeout_error_message=f"{icons.red_dot} Function timed out after 1 minute",
|
|
72
|
+
)
|
|
73
|
+
def dyn_connect():
|
|
74
|
+
with connect_semantic_model(
|
|
75
|
+
dataset=new_dataset, readonly=True, workspace=new_dataset_workspace
|
|
76
|
+
) as tom:
|
|
93
77
|
|
|
94
|
-
|
|
95
|
-
f"{icons.green_dot} The '{t.Name}' table's properties have been updated."
|
|
96
|
-
)
|
|
78
|
+
tom.model
|
|
97
79
|
|
|
98
|
-
|
|
99
|
-
for t in tom.model.Tables:
|
|
100
|
-
if (
|
|
101
|
-
t.Name not in dfP_fp["Table Name"].values
|
|
102
|
-
): # do not include field parameters
|
|
103
|
-
dfT_filtered = dfT[dfT["Name"] == t.Name]
|
|
104
|
-
tType = dfT_filtered["Type"].iloc[0]
|
|
105
|
-
for c in t.Columns:
|
|
106
|
-
if not c.Name.startswith("RowNumber-"):
|
|
107
|
-
dfC_filt = dfC[
|
|
108
|
-
(dfC["Table Name"] == t.Name)
|
|
109
|
-
& (dfC["Column Name"] == c.Name)
|
|
110
|
-
]
|
|
111
|
-
cName = dfC_filt["Column Name"].iloc[0]
|
|
112
|
-
c.Name = cName
|
|
113
|
-
if tType == "Table":
|
|
114
|
-
c.SourceColumn = cName.replace(" ", "_")
|
|
115
|
-
c.IsHidden = bool(dfC_filt["Hidden"].iloc[0])
|
|
116
|
-
c.DataType = System.Enum.Parse(
|
|
117
|
-
TOM.DataType, dfC_filt["Data Type"].iloc[0]
|
|
118
|
-
)
|
|
119
|
-
c.DisplayFolder = dfC_filt["Display Folder"].iloc[0]
|
|
120
|
-
c.FormatString = dfC_filt["Format String"].iloc[0]
|
|
121
|
-
c.SummarizeBy = System.Enum.Parse(
|
|
122
|
-
TOM.AggregateFunction,
|
|
123
|
-
dfC_filt["Summarize By"].iloc[0],
|
|
124
|
-
)
|
|
125
|
-
c.DataCategory = dfC_filt["Data Category"].iloc[0]
|
|
126
|
-
c.IsKey = bool(dfC_filt["Key"].iloc[0])
|
|
127
|
-
sbc = dfC_filt["Sort By Column"].iloc[0]
|
|
128
|
-
|
|
129
|
-
if sbc is not None:
|
|
130
|
-
if any(
|
|
131
|
-
o.Name == sbc and o.Parent.Name == c.Parent.Name
|
|
132
|
-
for o in tom.all_columns()
|
|
133
|
-
):
|
|
134
|
-
c.SortByColumn = tom.model.Tables[
|
|
135
|
-
t.Name
|
|
136
|
-
].Columns[sbc]
|
|
137
|
-
else:
|
|
138
|
-
print(
|
|
139
|
-
f"{icons.red_dot} Failed to create '{sbc}' as a Sort By Column for the '{c.Name}' in the '{t.Name}' table."
|
|
140
|
-
)
|
|
141
|
-
print(
|
|
142
|
-
f"{icons.green_dot} The '{t.Name}'[{c.Name}] column's properties have been updated."
|
|
143
|
-
)
|
|
80
|
+
dyn_connect()
|
|
144
81
|
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
82
|
+
@retry(
|
|
83
|
+
sleep_time=1,
|
|
84
|
+
timeout_error_message=f"{icons.red_dot} Function timed out after 1 minute",
|
|
85
|
+
)
|
|
86
|
+
def dyn_connect2():
|
|
87
|
+
with connect_semantic_model(
|
|
88
|
+
dataset=dataset, readonly=True, workspace=workspace
|
|
89
|
+
) as tom:
|
|
90
|
+
|
|
91
|
+
tom.model
|
|
92
|
+
|
|
93
|
+
dyn_connect2()
|
|
94
|
+
|
|
95
|
+
with connect_semantic_model(
|
|
96
|
+
dataset=new_dataset, readonly=False, workspace=new_dataset_workspace
|
|
97
|
+
) as tom:
|
|
98
|
+
|
|
99
|
+
isDirectLake = tom.is_direct_lake()
|
|
100
|
+
|
|
101
|
+
print(f"\n{icons.in_progress} Updating table properties...")
|
|
102
|
+
for t in tom.model.Tables:
|
|
103
|
+
t.IsHidden = bool(dfT.loc[dfT["Name"] == t.Name, "Hidden"].iloc[0])
|
|
104
|
+
t.Description = dfT.loc[dfT["Name"] == t.Name, "Description"].iloc[0]
|
|
105
|
+
t.DataCategory = dfT.loc[dfT["Name"] == t.Name, "Data Category"].iloc[0]
|
|
106
|
+
|
|
107
|
+
print(
|
|
108
|
+
f"{icons.green_dot} The '{t.Name}' table's properties have been updated."
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
print(f"\n{icons.in_progress} Updating column properties...")
|
|
112
|
+
for t in tom.model.Tables:
|
|
113
|
+
if (
|
|
114
|
+
t.Name not in dfP_fp["Table Name"].values
|
|
115
|
+
): # do not include field parameters
|
|
116
|
+
dfT_filtered = dfT[dfT["Name"] == t.Name]
|
|
117
|
+
tType = dfT_filtered["Type"].iloc[0]
|
|
118
|
+
for c in t.Columns:
|
|
119
|
+
if not c.Name.startswith("RowNumber-"):
|
|
120
|
+
dfC_filt = dfC[
|
|
121
|
+
(dfC["Table Name"] == t.Name)
|
|
122
|
+
& (dfC["Column Name"] == c.Name)
|
|
153
123
|
]
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
hName = r["Hierarchy Name"]
|
|
162
|
-
hDesc = r["Hierarchy Description"]
|
|
163
|
-
hHid = bool(r["Hierarchy Hidden"])
|
|
164
|
-
cols = r["Column Name"]
|
|
165
|
-
lvls = r["Level Name"]
|
|
166
|
-
|
|
167
|
-
if not any(
|
|
168
|
-
t.Name == tName and h.Name == hName
|
|
169
|
-
for t in tom.model.Tables
|
|
170
|
-
for h in t.Hierarchies
|
|
171
|
-
):
|
|
172
|
-
tom.add_hierarchy(
|
|
173
|
-
table_name=tName,
|
|
174
|
-
hierarchy_name=hName,
|
|
175
|
-
hierarchy_description=hDesc,
|
|
176
|
-
hierarchy_hidden=hHid,
|
|
177
|
-
columns=cols,
|
|
178
|
-
levels=lvls,
|
|
124
|
+
cName = dfC_filt["Column Name"].iloc[0]
|
|
125
|
+
c.Name = cName
|
|
126
|
+
# if tType == "Table":
|
|
127
|
+
# c.SourceColumn = cName.replace(" ", "_")
|
|
128
|
+
c.IsHidden = bool(dfC_filt["Hidden"].iloc[0])
|
|
129
|
+
c.DataType = System.Enum.Parse(
|
|
130
|
+
TOM.DataType, dfC_filt["Data Type"].iloc[0]
|
|
179
131
|
)
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
for i, r in dfM.iterrows():
|
|
186
|
-
tName = r["Table Name"]
|
|
187
|
-
mName = r["Measure Name"]
|
|
188
|
-
mExpr = r["Measure Expression"]
|
|
189
|
-
mHidden = bool(r["Measure Hidden"])
|
|
190
|
-
mDF = r["Measure Display Folder"]
|
|
191
|
-
mDesc = r["Measure Description"]
|
|
192
|
-
mFS = r["Format String"]
|
|
193
|
-
|
|
194
|
-
if not any(m.Name == mName for m in tom.all_measures()):
|
|
195
|
-
tom.add_measure(
|
|
196
|
-
table_name=tName,
|
|
197
|
-
measure_name=mName,
|
|
198
|
-
expression=mExpr,
|
|
199
|
-
hidden=mHidden,
|
|
200
|
-
display_folder=mDF,
|
|
201
|
-
description=mDesc,
|
|
202
|
-
format_string=mFS,
|
|
132
|
+
c.DisplayFolder = dfC_filt["Display Folder"].iloc[0]
|
|
133
|
+
c.FormatString = dfC_filt["Format String"].iloc[0]
|
|
134
|
+
c.SummarizeBy = System.Enum.Parse(
|
|
135
|
+
TOM.AggregateFunction,
|
|
136
|
+
dfC_filt["Summarize By"].iloc[0],
|
|
203
137
|
)
|
|
138
|
+
c.DataCategory = dfC_filt["Data Category"].iloc[0]
|
|
139
|
+
c.IsKey = bool(dfC_filt["Key"].iloc[0])
|
|
140
|
+
sbc = dfC_filt["Sort By Column"].iloc[0]
|
|
141
|
+
|
|
142
|
+
if sbc is not None:
|
|
143
|
+
if any(
|
|
144
|
+
o.Name == sbc and o.Parent.Name == c.Parent.Name
|
|
145
|
+
for o in tom.all_columns()
|
|
146
|
+
):
|
|
147
|
+
c.SortByColumn = tom.model.Tables[t.Name].Columns[sbc]
|
|
148
|
+
else:
|
|
149
|
+
print(
|
|
150
|
+
f"{icons.red_dot} Failed to create '{sbc}' as a Sort By Column for the '{c.Name}' in the '{t.Name}' table."
|
|
151
|
+
)
|
|
204
152
|
print(
|
|
205
|
-
f"{icons.green_dot} The '{
|
|
153
|
+
f"{icons.green_dot} The '{t.Name}'[{c.Name}] column's properties have been updated."
|
|
206
154
|
)
|
|
207
|
-
print(f"\n{icons.in_progress} Creating calculation groups...")
|
|
208
|
-
for cgName in dfCI["Calculation Group Name"].unique():
|
|
209
155
|
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
156
|
+
print(f"\n{icons.in_progress} Creating hierarchies...")
|
|
157
|
+
dfH_grouped = (
|
|
158
|
+
dfH.groupby(
|
|
159
|
+
[
|
|
160
|
+
"Table Name",
|
|
161
|
+
"Hierarchy Name",
|
|
162
|
+
"Hierarchy Hidden",
|
|
163
|
+
"Hierarchy Description",
|
|
164
|
+
]
|
|
165
|
+
)
|
|
166
|
+
.agg({"Level Name": list, "Column Name": list})
|
|
167
|
+
.reset_index()
|
|
168
|
+
)
|
|
169
|
+
|
|
170
|
+
for i, r in dfH_grouped.iterrows():
|
|
171
|
+
tName = r["Table Name"]
|
|
172
|
+
hName = r["Hierarchy Name"]
|
|
173
|
+
hDesc = r["Hierarchy Description"]
|
|
174
|
+
hHid = bool(r["Hierarchy Hidden"])
|
|
175
|
+
cols = r["Column Name"]
|
|
176
|
+
lvls = r["Level Name"]
|
|
177
|
+
|
|
178
|
+
missing_columns = []
|
|
179
|
+
for col in cols:
|
|
180
|
+
if not any(
|
|
181
|
+
c.Name == col
|
|
182
|
+
for t in tom.model.Tables
|
|
183
|
+
if t.Name == tName
|
|
184
|
+
for c in t.Columns
|
|
185
|
+
):
|
|
186
|
+
missing_columns.append(col)
|
|
187
|
+
|
|
188
|
+
if any(
|
|
189
|
+
t.Name == tName and h.Name == hName
|
|
190
|
+
for t in tom.model.Tables
|
|
191
|
+
for h in t.Hierarchies
|
|
192
|
+
):
|
|
193
|
+
print(
|
|
194
|
+
f"{icons.warning} The '{hName}' hierarchy within the '{tName}' table already exists."
|
|
195
|
+
)
|
|
196
|
+
elif len(missing_columns) > 0:
|
|
197
|
+
print(
|
|
198
|
+
f"{icons.red_dot} The '{hName}' hierarchy within the '{tName}' table cannot be created as the {missing_columns} column)s) do not exist."
|
|
199
|
+
)
|
|
200
|
+
else:
|
|
201
|
+
tom.add_hierarchy(
|
|
202
|
+
table_name=tName,
|
|
203
|
+
hierarchy_name=hName,
|
|
204
|
+
hierarchy_description=hDesc,
|
|
205
|
+
hierarchy_hidden=hHid,
|
|
206
|
+
columns=cols,
|
|
207
|
+
levels=lvls,
|
|
208
|
+
)
|
|
209
|
+
print(f"{icons.green_dot} The '{hName}' hierarchy has been added.")
|
|
210
|
+
|
|
211
|
+
print(f"\n{icons.in_progress} Creating measures...")
|
|
212
|
+
for i, r in dfM.iterrows():
|
|
213
|
+
tName = r["Table Name"]
|
|
214
|
+
mName = r["Measure Name"]
|
|
215
|
+
mExpr = r["Measure Expression"]
|
|
216
|
+
mHidden = bool(r["Measure Hidden"])
|
|
217
|
+
mDF = r["Measure Display Folder"]
|
|
218
|
+
mDesc = r["Measure Description"]
|
|
219
|
+
mFS = r["Format String"]
|
|
220
|
+
|
|
221
|
+
if not any(m.Name == mName for m in tom.all_measures()):
|
|
222
|
+
tom.add_measure(
|
|
223
|
+
table_name=tName,
|
|
224
|
+
measure_name=mName,
|
|
225
|
+
expression=mExpr,
|
|
226
|
+
hidden=mHidden,
|
|
227
|
+
display_folder=mDF,
|
|
228
|
+
description=mDesc,
|
|
229
|
+
format_string=mFS,
|
|
230
|
+
)
|
|
231
|
+
print(f"{icons.green_dot} The '{mName}' measure has been added.")
|
|
232
|
+
print(f"\n{icons.in_progress} Creating calculation groups...")
|
|
233
|
+
for cgName in dfCI["Calculation Group Name"].unique():
|
|
234
|
+
|
|
235
|
+
isHidden = bool(
|
|
236
|
+
dfCI.loc[(dfCI["Calculation Group Name"] == cgName), "Hidden"].iloc[0]
|
|
237
|
+
)
|
|
238
|
+
prec = int(
|
|
239
|
+
dfCI.loc[(dfCI["Calculation Group Name"] == cgName), "Precedence"].iloc[
|
|
240
|
+
0
|
|
241
|
+
]
|
|
242
|
+
)
|
|
243
|
+
desc = dfCI.loc[
|
|
244
|
+
(dfCI["Calculation Group Name"] == cgName), "Description"
|
|
245
|
+
].iloc[0]
|
|
246
|
+
|
|
247
|
+
if not any(t.Name == cgName for t in tom.model.Tables):
|
|
248
|
+
tom.add_calculation_group(
|
|
249
|
+
name=cgName,
|
|
250
|
+
description=desc,
|
|
251
|
+
precedence=prec,
|
|
252
|
+
hidden=isHidden,
|
|
253
|
+
)
|
|
254
|
+
print(
|
|
255
|
+
f"{icons.green_dot} The '{cgName}' calculation group has been added."
|
|
256
|
+
)
|
|
257
|
+
tom.model.DiscourageImplicitMeasures = True
|
|
258
|
+
|
|
259
|
+
# print(
|
|
260
|
+
# f"\n{icons.in_progress} Updating calculation group column names..."
|
|
261
|
+
# )
|
|
262
|
+
dfC_filt = dfC[(dfC["Table Name"] == cgName) & (dfC["Hidden"] == False)]
|
|
263
|
+
colName = dfC_filt["Column Name"].iloc[0]
|
|
264
|
+
tom.model.Tables[cgName].Columns["Name"].Name = colName
|
|
265
|
+
|
|
266
|
+
calcItems = dfCI.loc[
|
|
267
|
+
dfCI["Calculation Group Name"] == cgName,
|
|
268
|
+
"Calculation Item Name",
|
|
269
|
+
].unique()
|
|
270
|
+
|
|
271
|
+
print(f"\n{icons.in_progress} Creating calculation items...")
|
|
272
|
+
for calcItem in calcItems:
|
|
273
|
+
ordinal = int(
|
|
274
|
+
dfCI.loc[
|
|
275
|
+
(dfCI["Calculation Group Name"] == cgName)
|
|
276
|
+
& (dfCI["Calculation Item Name"] == calcItem),
|
|
277
|
+
"Ordinal",
|
|
278
|
+
].iloc[0]
|
|
279
|
+
)
|
|
280
|
+
expr = dfCI.loc[
|
|
281
|
+
(dfCI["Calculation Group Name"] == cgName)
|
|
282
|
+
& (dfCI["Calculation Item Name"] == calcItem),
|
|
283
|
+
"Expression",
|
|
284
|
+
].iloc[0]
|
|
285
|
+
fse = dfCI.loc[
|
|
286
|
+
(dfCI["Calculation Group Name"] == cgName)
|
|
287
|
+
& (dfCI["Calculation Item Name"] == calcItem),
|
|
288
|
+
"Format String Expression",
|
|
289
|
+
].iloc[0]
|
|
290
|
+
|
|
291
|
+
if not any(
|
|
292
|
+
ci.CalculationGroup.Parent.Name == cgName and ci.Name == calcItem
|
|
293
|
+
for ci in tom.all_calculation_items()
|
|
294
|
+
):
|
|
295
|
+
tom.add_calculation_item(
|
|
296
|
+
table_name=cgName,
|
|
297
|
+
calculation_item_name=calcItem,
|
|
298
|
+
expression=expr,
|
|
299
|
+
format_string_expression=fse,
|
|
300
|
+
ordinal=ordinal,
|
|
214
301
|
)
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
(dfCI["Calculation Group Name"] == cgName), "Precedence"
|
|
218
|
-
].iloc[0]
|
|
302
|
+
print(
|
|
303
|
+
f"{icons.green_dot} The '{calcItem}' has been added to the '{cgName}' calculation group."
|
|
219
304
|
)
|
|
220
|
-
desc = dfCI.loc[
|
|
221
|
-
(dfCI["Calculation Group Name"] == cgName), "Description"
|
|
222
|
-
].iloc[0]
|
|
223
305
|
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
precedence=prec,
|
|
229
|
-
hidden=isHidden,
|
|
230
|
-
)
|
|
231
|
-
print(
|
|
232
|
-
f"{icons.green_dot} The '{cgName}' calculation group has been added."
|
|
233
|
-
)
|
|
234
|
-
tom.model.DiscourageImplicitMeasures = True
|
|
306
|
+
print(f"\n{icons.in_progress} Creating relationships...")
|
|
307
|
+
with connect_semantic_model(
|
|
308
|
+
dataset=dataset, readonly=True, workspace=workspace
|
|
309
|
+
) as tom_old:
|
|
235
310
|
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
for calcItem in calcItems:
|
|
252
|
-
ordinal = int(
|
|
253
|
-
dfCI.loc[
|
|
254
|
-
(dfCI["Calculation Group Name"] == cgName)
|
|
255
|
-
& (dfCI["Calculation Item Name"] == calcItem),
|
|
256
|
-
"Ordinal",
|
|
257
|
-
].iloc[0]
|
|
258
|
-
)
|
|
259
|
-
expr = dfCI.loc[
|
|
260
|
-
(dfCI["Calculation Group Name"] == cgName)
|
|
261
|
-
& (dfCI["Calculation Item Name"] == calcItem),
|
|
262
|
-
"Expression",
|
|
263
|
-
].iloc[0]
|
|
264
|
-
fse = dfCI.loc[
|
|
265
|
-
(dfCI["Calculation Group Name"] == cgName)
|
|
266
|
-
& (dfCI["Calculation Item Name"] == calcItem),
|
|
267
|
-
"Format String Expression",
|
|
268
|
-
].iloc[0]
|
|
269
|
-
|
|
270
|
-
if not any(
|
|
271
|
-
ci.CalculationGroup.Parent.Name == cgName
|
|
272
|
-
and ci.Name == calcItem
|
|
273
|
-
for ci in tom.all_calculation_items()
|
|
274
|
-
):
|
|
275
|
-
tom.add_calculation_item(
|
|
276
|
-
table_name=cgName,
|
|
277
|
-
calculation_item_name=calcItem,
|
|
278
|
-
expression=expr,
|
|
279
|
-
format_string_expression=fse,
|
|
280
|
-
ordinal=ordinal,
|
|
281
|
-
)
|
|
282
|
-
print(
|
|
283
|
-
f"{icons.green_dot} The '{calcItem}' has been added to the '{cgName}' calculation group."
|
|
284
|
-
)
|
|
285
|
-
|
|
286
|
-
print(f"\n{icons.in_progress} Creating relationships...")
|
|
287
|
-
for index, row in dfR.iterrows():
|
|
288
|
-
fromTable = row["From Table"]
|
|
289
|
-
fromColumn = row["From Column"]
|
|
290
|
-
toTable = row["To Table"]
|
|
291
|
-
toColumn = row["To Column"]
|
|
292
|
-
isActive = row["Active"]
|
|
293
|
-
cfb = row["Cross Filtering Behavior"]
|
|
294
|
-
sfb = row["Security Filtering Behavior"]
|
|
295
|
-
rori = row["Rely On Referential Integrity"]
|
|
296
|
-
mult = row["Multiplicity"]
|
|
297
|
-
|
|
298
|
-
card_mapping = {"m": "Many", "1": "One", "0": "None"}
|
|
299
|
-
|
|
300
|
-
fromCard = card_mapping.get(mult[0])
|
|
301
|
-
toCard = card_mapping.get(mult[-1])
|
|
302
|
-
|
|
303
|
-
relName = create_relationship_name(
|
|
304
|
-
fromTable, fromColumn, toTable, toColumn
|
|
311
|
+
for r in tom_old.model.Relationships:
|
|
312
|
+
relName = create_relationship_name(
|
|
313
|
+
r.FromTable.Name, r.FromColumn.Name, r.ToTable.Name, r.ToColumn.Name
|
|
314
|
+
)
|
|
315
|
+
|
|
316
|
+
# Relationship already exists
|
|
317
|
+
if any(
|
|
318
|
+
rel.FromTable.Name == r.FromTable.Name
|
|
319
|
+
and rel.FromColumn.Name == r.FromColumn.Name
|
|
320
|
+
and rel.ToTable.Name == r.ToTable.Name
|
|
321
|
+
and rel.ToColumn.Name == r.ToColumn.Name
|
|
322
|
+
for rel in tom.model.Relationships
|
|
323
|
+
):
|
|
324
|
+
print(
|
|
325
|
+
f"{icons.warning} The {relName} relationship was not created as it already exists in the '{new_dataset}' semantic model within the '{new_dataset_workspace}' workspace."
|
|
305
326
|
)
|
|
306
327
|
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
328
|
+
# Direct Lake with incompatible column data types
|
|
329
|
+
elif isDirectLake and r.FromColumn.DataType != r.ToColumn.DataType:
|
|
330
|
+
print(
|
|
331
|
+
f"{icons.warning} The {relName} relationship was not created as Direct Lake does not support relationships based on columns with different data types."
|
|
332
|
+
)
|
|
333
|
+
# Direct Lake using DateTime columns
|
|
334
|
+
elif isDirectLake and (
|
|
335
|
+
r.FromColumn.DataType == TOM.DataType.DateTime
|
|
336
|
+
or r.ToColumn.DataType == TOM.DataType.DateTime
|
|
337
|
+
):
|
|
338
|
+
print(
|
|
339
|
+
f"{icons.red_dot} The {relName} relationship was not created as Direct Lake does not support relationships based on columns of DateTime data type."
|
|
340
|
+
)
|
|
341
|
+
# Columns do not exist in the new semantic model
|
|
342
|
+
elif not any(
|
|
343
|
+
c.Name == r.FromColumn.Name and c.Parent.Name == r.FromTable.Name
|
|
344
|
+
for c in tom.all_columns()
|
|
345
|
+
) or not any(
|
|
346
|
+
c.Name == r.ToColumn.Name and c.Parent.Name == r.ToTable.Name
|
|
347
|
+
for c in tom.all_columns()
|
|
348
|
+
):
|
|
349
|
+
# Direct lake and based on calculated column
|
|
350
|
+
if isDirectLake and (
|
|
351
|
+
any(
|
|
352
|
+
c.Name == r.FromColumn.Name
|
|
353
|
+
and c.Parent.Name == r.FromTable.Name
|
|
354
|
+
for c in tom_old.all_calculated_columns()
|
|
316
355
|
)
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
and r.ToColumn.Name == toColumn
|
|
322
|
-
and (
|
|
323
|
-
r.FromColumn.DataType == "DateTime"
|
|
324
|
-
or r.ToColumn.DataType == "DateTime"
|
|
356
|
+
or any(
|
|
357
|
+
c.Name == r.ToColumn.Name
|
|
358
|
+
and c.Parent.Name == r.ToTable.Name
|
|
359
|
+
for c in tom_old.all_calculated_columns()
|
|
325
360
|
)
|
|
326
|
-
for r in tom.model.Relationships
|
|
327
361
|
):
|
|
328
362
|
print(
|
|
329
|
-
f"{icons.
|
|
363
|
+
f"{icons.red_dot} The {relName} relationship was not created as the necssary column(s) do not exist. This is due to Direct Lake not supporting calculated columns."
|
|
330
364
|
)
|
|
331
|
-
elif
|
|
332
|
-
|
|
333
|
-
and
|
|
334
|
-
|
|
335
|
-
and r.ToColumn.Name == toColumn
|
|
336
|
-
and (r.FromColumn.DataType != r.ToColumn.DataType)
|
|
337
|
-
for r in tom.model.Relationships
|
|
365
|
+
elif not any(
|
|
366
|
+
c.Name == r.FromColumn.Name
|
|
367
|
+
and c.Parent.Name == r.FromTable.Name
|
|
368
|
+
for c in tom.all_columns()
|
|
338
369
|
):
|
|
339
370
|
print(
|
|
340
|
-
f"{icons.
|
|
371
|
+
f"{icons.red_dot} The {relName} relationship cannot be created because the {format_dax_object_name(r.FromTable.Name, r.FromColumn.Name)} column is not available in the '{new_dataset}' semantic model within the '{new_dataset_workspace}' workspace."
|
|
341
372
|
)
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
and r.ToTable.Name == toTable
|
|
347
|
-
and r.ToColumn.Name == toColumn
|
|
348
|
-
for r in tom.model.Relationships
|
|
349
|
-
):
|
|
350
|
-
tom.add_relationship(
|
|
351
|
-
from_table=fromTable,
|
|
352
|
-
from_column=fromColumn,
|
|
353
|
-
to_table=toTable,
|
|
354
|
-
to_column=toColumn,
|
|
355
|
-
from_cardinality=fromCard,
|
|
356
|
-
to_cardinality=toCard,
|
|
357
|
-
cross_filtering_behavior=cfb,
|
|
358
|
-
security_filtering_behavior=sfb,
|
|
359
|
-
rely_on_referential_integrity=rori,
|
|
360
|
-
is_active=isActive,
|
|
361
|
-
)
|
|
362
|
-
print(
|
|
363
|
-
f"{icons.green_dot} The {relName} relationship has been added."
|
|
364
|
-
)
|
|
365
|
-
else:
|
|
366
|
-
print(
|
|
367
|
-
f"{icons.red_dot} The {relName} relationship was not added."
|
|
368
|
-
)
|
|
369
|
-
|
|
370
|
-
print(f"\n{icons.in_progress} Creating roles...")
|
|
371
|
-
for index, row in dfRole.iterrows():
|
|
372
|
-
roleName = row["Role"]
|
|
373
|
-
roleDesc = row["Description"]
|
|
374
|
-
modPerm = row["Model Permission"]
|
|
375
|
-
|
|
376
|
-
if not any(r.Name == roleName for r in tom.model.Roles):
|
|
377
|
-
tom.add_role(
|
|
378
|
-
role_name=roleName,
|
|
379
|
-
model_permission=modPerm,
|
|
380
|
-
description=roleDesc,
|
|
381
|
-
)
|
|
382
|
-
print(
|
|
383
|
-
f"{icons.green_dot} The '{roleName}' role has been added."
|
|
384
|
-
)
|
|
385
|
-
|
|
386
|
-
print(f"\n{icons.in_progress} Creating row level security...")
|
|
387
|
-
for index, row in dfRLS.iterrows():
|
|
388
|
-
roleName = row["Role"]
|
|
389
|
-
tName = row["Table"]
|
|
390
|
-
expr = row["Filter Expression"]
|
|
391
|
-
|
|
392
|
-
if any(t.Name == tName for t in tom.model.Tables):
|
|
393
|
-
tom.set_rls(
|
|
394
|
-
role_name=roleName, table_name=tName, filter_expression=expr
|
|
395
|
-
)
|
|
396
|
-
print(
|
|
397
|
-
f"{icons.green_dot} Row level security for the '{tName}' table within the '{roleName}' role has been set."
|
|
398
|
-
)
|
|
399
|
-
else:
|
|
373
|
+
elif not any(
|
|
374
|
+
c.Name == r.ToColumn.Name and c.Parent.Name == r.ToTable.Name
|
|
375
|
+
for c in tom.all_columns()
|
|
376
|
+
):
|
|
400
377
|
print(
|
|
401
|
-
f"{icons.red_dot}
|
|
378
|
+
f"{icons.red_dot} The {relName} relationship cannot be created because the {format_dax_object_name(r.ToTable.Name, r.ToColumn.Name)} column is not available in the '{new_dataset}' semantic model within the '{new_dataset_workspace}' workspace."
|
|
402
379
|
)
|
|
380
|
+
else:
|
|
381
|
+
tom.add_relationship(
|
|
382
|
+
from_table=r.FromTable.Name,
|
|
383
|
+
from_column=r.FromColumn.Name,
|
|
384
|
+
to_table=r.ToTable.Name,
|
|
385
|
+
to_column=r.ToColumn.Name,
|
|
386
|
+
from_cardinality=str(r.FromCardinality),
|
|
387
|
+
to_cardinality=str(r.ToCardinality),
|
|
388
|
+
cross_filtering_behavior=str(r.CrossFilteringBehavior),
|
|
389
|
+
security_filtering_behavior=str(r.SecurityFilteringBehavior),
|
|
390
|
+
rely_on_referential_integrity=r.RelyOnReferentialIntegrity,
|
|
391
|
+
is_active=r.IsActive,
|
|
392
|
+
)
|
|
393
|
+
print(
|
|
394
|
+
f"{icons.green_dot} The {relName} relationship has been added."
|
|
395
|
+
)
|
|
403
396
|
|
|
404
|
-
|
|
405
|
-
|
|
397
|
+
print(f"\n{icons.in_progress} Creating roles...")
|
|
398
|
+
for index, row in dfRole.iterrows():
|
|
399
|
+
roleName = row["Role"]
|
|
400
|
+
roleDesc = row["Description"]
|
|
401
|
+
modPerm = row["Model Permission"]
|
|
402
|
+
|
|
403
|
+
if not any(r.Name == roleName for r in tom.model.Roles):
|
|
404
|
+
tom.add_role(
|
|
405
|
+
role_name=roleName,
|
|
406
|
+
model_permission=modPerm,
|
|
407
|
+
description=roleDesc,
|
|
408
|
+
)
|
|
409
|
+
print(f"{icons.green_dot} The '{roleName}' role has been added.")
|
|
406
410
|
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
411
|
+
print(f"\n{icons.in_progress} Creating row level security...")
|
|
412
|
+
for index, row in dfRLS.iterrows():
|
|
413
|
+
roleName = row["Role"]
|
|
414
|
+
tName = row["Table"]
|
|
415
|
+
expr = row["Filter Expression"]
|
|
412
416
|
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
object=tom.model.Tables[tName], perspective_name=pName
|
|
425
|
-
)
|
|
426
|
-
elif oType == "Column":
|
|
427
|
-
tom.add_to_perspective(
|
|
428
|
-
object=tom.model.Tables[tName].Columns[oName],
|
|
429
|
-
perspective_name=pName,
|
|
430
|
-
)
|
|
431
|
-
elif oType == "Measure":
|
|
432
|
-
tom.add_to_perspective(
|
|
433
|
-
object=tom.model.Tables[tName].Measures[oName],
|
|
434
|
-
perspective_name=pName,
|
|
435
|
-
)
|
|
436
|
-
elif oType == "Hierarchy":
|
|
437
|
-
tom.add_to_perspective(
|
|
438
|
-
object=tom.model.Tables[tName].Hierarchies[oName],
|
|
439
|
-
perspective_name=pName,
|
|
440
|
-
)
|
|
441
|
-
except Exception:
|
|
442
|
-
pass
|
|
443
|
-
|
|
444
|
-
print(f"\n{icons.in_progress} Creating translation languages...")
|
|
445
|
-
for trName in dfTranslation["Culture Name"].unique():
|
|
446
|
-
if not any(c.Name == trName for c in tom.model.Cultures):
|
|
447
|
-
tom.add_translation(trName)
|
|
448
|
-
print(
|
|
449
|
-
f"{icons.green_dot} The '{trName}' translation language has been added."
|
|
450
|
-
)
|
|
417
|
+
if any(t.Name == tName for t in tom.model.Tables):
|
|
418
|
+
tom.set_rls(
|
|
419
|
+
role_name=roleName, table_name=tName, filter_expression=expr
|
|
420
|
+
)
|
|
421
|
+
print(
|
|
422
|
+
f"{icons.green_dot} Row level security for the '{tName}' table within the '{roleName}' role has been set."
|
|
423
|
+
)
|
|
424
|
+
else:
|
|
425
|
+
print(
|
|
426
|
+
f"{icons.red_dot} Row level security for the '{tName}' table within the '{roleName}' role was not set."
|
|
427
|
+
)
|
|
451
428
|
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
tom.set_translation(
|
|
490
|
-
object=tom.model.Tables[tName].Hierarchies[oName],
|
|
491
|
-
language=trName,
|
|
492
|
-
property=prop,
|
|
493
|
-
value=translation,
|
|
494
|
-
)
|
|
495
|
-
elif oType == "Level":
|
|
496
|
-
|
|
497
|
-
pattern = r"\[([^]]+)\]"
|
|
498
|
-
matches = re.findall(pattern, oName)
|
|
499
|
-
lName = matches[0]
|
|
500
|
-
|
|
501
|
-
pattern = r"'([^']+)'"
|
|
502
|
-
matches = re.findall(pattern, oName)
|
|
503
|
-
hName = matches[0]
|
|
504
|
-
tom.set_translation(
|
|
505
|
-
object=tom.model.Tables[tName]
|
|
506
|
-
.Hierarchies[hName]
|
|
507
|
-
.Levels[lName],
|
|
508
|
-
language=trName,
|
|
509
|
-
property=prop,
|
|
510
|
-
value=translation,
|
|
511
|
-
)
|
|
512
|
-
except Exception:
|
|
513
|
-
pass
|
|
429
|
+
print(f"\n{icons.in_progress} Creating perspectives...")
|
|
430
|
+
for pName in dfP["Perspective Name"].unique():
|
|
431
|
+
|
|
432
|
+
if not any(p.Name == pName for p in tom.model.Perspectives):
|
|
433
|
+
tom.add_perspective(perspective_name=pName)
|
|
434
|
+
print(f"{icons.green_dot} The '{pName}' perspective has been added.")
|
|
435
|
+
|
|
436
|
+
print(f"\n{icons.in_progress} Adding objects to perspectives...")
|
|
437
|
+
for index, row in dfP.iterrows():
|
|
438
|
+
pName = row["Perspective Name"]
|
|
439
|
+
tName = row["Table Name"]
|
|
440
|
+
oName = row["Object Name"]
|
|
441
|
+
oType = row["Object Type"]
|
|
442
|
+
tType = dfT.loc[(dfT["Name"] == tName), "Type"].iloc[0]
|
|
443
|
+
|
|
444
|
+
try:
|
|
445
|
+
if oType == "Table":
|
|
446
|
+
tom.add_to_perspective(
|
|
447
|
+
object=tom.model.Tables[tName], perspective_name=pName
|
|
448
|
+
)
|
|
449
|
+
elif oType == "Column":
|
|
450
|
+
tom.add_to_perspective(
|
|
451
|
+
object=tom.model.Tables[tName].Columns[oName],
|
|
452
|
+
perspective_name=pName,
|
|
453
|
+
)
|
|
454
|
+
elif oType == "Measure":
|
|
455
|
+
tom.add_to_perspective(
|
|
456
|
+
object=tom.model.Tables[tName].Measures[oName],
|
|
457
|
+
perspective_name=pName,
|
|
458
|
+
)
|
|
459
|
+
elif oType == "Hierarchy":
|
|
460
|
+
tom.add_to_perspective(
|
|
461
|
+
object=tom.model.Tables[tName].Hierarchies[oName],
|
|
462
|
+
perspective_name=pName,
|
|
463
|
+
)
|
|
464
|
+
except Exception:
|
|
465
|
+
pass
|
|
514
466
|
|
|
467
|
+
print(f"\n{icons.in_progress} Creating translation languages...")
|
|
468
|
+
for trName in dfTranslation["Culture Name"].unique():
|
|
469
|
+
if not any(c.Name == trName for c in tom.model.Cultures):
|
|
470
|
+
tom.add_translation(trName)
|
|
515
471
|
print(
|
|
516
|
-
f"
|
|
472
|
+
f"{icons.green_dot} The '{trName}' translation language has been added."
|
|
517
473
|
)
|
|
518
474
|
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
475
|
+
print(f"\n{icons.in_progress} Creating translation values...")
|
|
476
|
+
for index, row in dfTranslation.iterrows():
|
|
477
|
+
trName = row["Culture Name"]
|
|
478
|
+
tName = row["Table Name"]
|
|
479
|
+
oName = row["Object Name"]
|
|
480
|
+
oType = row["Object Type"]
|
|
481
|
+
translation = row["Translation"]
|
|
482
|
+
prop = row["Property"]
|
|
483
|
+
|
|
484
|
+
if prop == "Caption":
|
|
485
|
+
prop = "Name"
|
|
486
|
+
elif prop == "DisplayFolder":
|
|
487
|
+
prop = "Display Folder"
|
|
488
|
+
|
|
489
|
+
try:
|
|
490
|
+
if oType == "Table":
|
|
491
|
+
tom.set_translation(
|
|
492
|
+
object=tom.model.Tables[tName],
|
|
493
|
+
language=trName,
|
|
494
|
+
property=prop,
|
|
495
|
+
value=translation,
|
|
496
|
+
)
|
|
497
|
+
elif oType == "Column":
|
|
498
|
+
tom.set_translation(
|
|
499
|
+
object=tom.model.Tables[tName].Columns[oName],
|
|
500
|
+
language=trName,
|
|
501
|
+
property=prop,
|
|
502
|
+
value=translation,
|
|
503
|
+
)
|
|
504
|
+
elif oType == "Measure":
|
|
505
|
+
tom.set_translation(
|
|
506
|
+
object=tom.model.Tables[tName].Measures[oName],
|
|
507
|
+
language=trName,
|
|
508
|
+
property=prop,
|
|
509
|
+
value=translation,
|
|
510
|
+
)
|
|
511
|
+
elif oType == "Hierarchy":
|
|
512
|
+
tom.set_translation(
|
|
513
|
+
object=tom.model.Tables[tName].Hierarchies[oName],
|
|
514
|
+
language=trName,
|
|
515
|
+
property=prop,
|
|
516
|
+
value=translation,
|
|
517
|
+
)
|
|
518
|
+
elif oType == "Level":
|
|
519
|
+
|
|
520
|
+
pattern = r"\[([^]]+)\]"
|
|
521
|
+
matches = re.findall(pattern, oName)
|
|
522
|
+
lName = matches[0]
|
|
523
|
+
|
|
524
|
+
pattern = r"'([^']+)'"
|
|
525
|
+
matches = re.findall(pattern, oName)
|
|
526
|
+
hName = matches[0]
|
|
527
|
+
tom.set_translation(
|
|
528
|
+
object=tom.model.Tables[tName].Hierarchies[hName].Levels[lName],
|
|
529
|
+
language=trName,
|
|
530
|
+
property=prop,
|
|
531
|
+
value=translation,
|
|
532
|
+
)
|
|
533
|
+
except Exception:
|
|
534
|
+
pass
|
|
535
|
+
|
|
536
|
+
print(
|
|
537
|
+
f"\n{icons.green_dot} Migration of objects from '{dataset}' -> '{new_dataset}' is complete."
|
|
538
|
+
)
|