semantic-link-labs 0.5.0__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- semantic_link_labs-0.7.0.dist-info/METADATA +148 -0
- semantic_link_labs-0.7.0.dist-info/RECORD +111 -0
- {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.7.0.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +45 -15
- sempy_labs/_ai.py +42 -85
- sempy_labs/_bpa_translation/_translations_am-ET.po +828 -0
- sempy_labs/_bpa_translation/_translations_ar-AE.po +860 -0
- sempy_labs/_bpa_translation/_translations_cs-CZ.po +894 -0
- sempy_labs/_bpa_translation/_translations_da-DK.po +894 -0
- sempy_labs/_bpa_translation/_translations_de-DE.po +933 -0
- sempy_labs/_bpa_translation/_translations_el-GR.po +936 -0
- sempy_labs/_bpa_translation/_translations_es-ES.po +915 -0
- sempy_labs/_bpa_translation/_translations_fa-IR.po +883 -0
- sempy_labs/_bpa_translation/_translations_fr-FR.po +938 -0
- sempy_labs/_bpa_translation/_translations_ga-IE.po +912 -0
- sempy_labs/_bpa_translation/_translations_he-IL.po +855 -0
- sempy_labs/_bpa_translation/_translations_hi-IN.po +892 -0
- sempy_labs/_bpa_translation/_translations_hu-HU.po +910 -0
- sempy_labs/_bpa_translation/_translations_is-IS.po +887 -0
- sempy_labs/_bpa_translation/_translations_it-IT.po +931 -0
- sempy_labs/_bpa_translation/_translations_ja-JP.po +805 -0
- sempy_labs/_bpa_translation/_translations_nl-NL.po +924 -0
- sempy_labs/_bpa_translation/_translations_pl-PL.po +913 -0
- sempy_labs/_bpa_translation/_translations_pt-BR.po +909 -0
- sempy_labs/_bpa_translation/_translations_pt-PT.po +904 -0
- sempy_labs/_bpa_translation/_translations_ru-RU.po +909 -0
- sempy_labs/_bpa_translation/_translations_ta-IN.po +922 -0
- sempy_labs/_bpa_translation/_translations_te-IN.po +896 -0
- sempy_labs/_bpa_translation/_translations_th-TH.po +873 -0
- sempy_labs/_bpa_translation/_translations_zh-CN.po +767 -0
- sempy_labs/_bpa_translation/_translations_zu-ZA.po +916 -0
- sempy_labs/_clear_cache.py +12 -8
- sempy_labs/_connections.py +77 -70
- sempy_labs/_dax.py +7 -9
- sempy_labs/_generate_semantic_model.py +75 -90
- sempy_labs/_helper_functions.py +371 -20
- sempy_labs/_icons.py +23 -0
- sempy_labs/_list_functions.py +855 -427
- sempy_labs/_model_auto_build.py +4 -3
- sempy_labs/_model_bpa.py +307 -1118
- sempy_labs/_model_bpa_bulk.py +363 -0
- sempy_labs/_model_bpa_rules.py +831 -0
- sempy_labs/_model_dependencies.py +20 -16
- sempy_labs/_one_lake_integration.py +18 -12
- sempy_labs/_query_scale_out.py +116 -129
- sempy_labs/_refresh_semantic_model.py +23 -10
- sempy_labs/_translations.py +367 -288
- sempy_labs/_vertipaq.py +152 -123
- sempy_labs/directlake/__init__.py +7 -1
- sempy_labs/directlake/_directlake_schema_compare.py +33 -30
- sempy_labs/directlake/_directlake_schema_sync.py +60 -77
- sempy_labs/directlake/_dl_helper.py +233 -0
- sempy_labs/directlake/_get_directlake_lakehouse.py +7 -8
- sempy_labs/directlake/_get_shared_expression.py +5 -3
- sempy_labs/directlake/_guardrails.py +20 -16
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +17 -10
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +3 -2
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +10 -5
- sempy_labs/directlake/_update_directlake_partition_entity.py +169 -22
- sempy_labs/directlake/_warm_cache.py +7 -4
- sempy_labs/lakehouse/_get_lakehouse_columns.py +1 -1
- sempy_labs/lakehouse/_get_lakehouse_tables.py +65 -71
- sempy_labs/lakehouse/_lakehouse.py +5 -3
- sempy_labs/lakehouse/_shortcuts.py +20 -13
- sempy_labs/migration/__init__.py +1 -1
- sempy_labs/migration/_create_pqt_file.py +184 -186
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +240 -269
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +78 -77
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +444 -425
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +96 -102
- sempy_labs/migration/_migration_validation.py +2 -2
- sempy_labs/migration/_refresh_calc_tables.py +94 -100
- sempy_labs/report/_BPAReportTemplate.json +232 -0
- sempy_labs/report/__init__.py +6 -2
- sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
- sempy_labs/report/_bpareporttemplate/.platform +11 -0
- sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
- sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
- sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
- sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
- sempy_labs/report/_generate_report.py +260 -139
- sempy_labs/report/_report_functions.py +90 -59
- sempy_labs/report/_report_rebind.py +40 -34
- sempy_labs/tom/__init__.py +1 -4
- sempy_labs/tom/_model.py +601 -181
- semantic_link_labs-0.5.0.dist-info/METADATA +0 -22
- semantic_link_labs-0.5.0.dist-info/RECORD +0 -53
- sempy_labs/directlake/_fallback.py +0 -58
- {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.7.0.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.7.0.dist-info}/top_level.txt +0 -0
|
@@ -1,13 +1,13 @@
|
|
|
1
|
-
import sempy
|
|
2
1
|
import sempy.fabric as fabric
|
|
3
2
|
import pandas as pd
|
|
4
|
-
import datetime
|
|
3
|
+
import datetime
|
|
4
|
+
import time
|
|
5
5
|
from sempy_labs._list_functions import list_tables
|
|
6
6
|
from sempy_labs.directlake._get_shared_expression import get_shared_expression
|
|
7
|
-
from sempy_labs._helper_functions import resolve_lakehouse_name
|
|
7
|
+
from sempy_labs._helper_functions import resolve_lakehouse_name, retry
|
|
8
8
|
from sempy_labs.lakehouse._lakehouse import lakehouse_attached
|
|
9
9
|
from sempy_labs.tom import connect_semantic_model
|
|
10
|
-
from typing import
|
|
10
|
+
from typing import Optional
|
|
11
11
|
from sempy._utils._log import log
|
|
12
12
|
import sempy_labs._icons as icons
|
|
13
13
|
|
|
@@ -61,105 +61,99 @@ def migrate_tables_columns_to_semantic_model(
|
|
|
61
61
|
lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)
|
|
62
62
|
|
|
63
63
|
# Check that lakehouse is attached to the notebook
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
shEx = get_shared_expression(lakehouse, lakehouse_workspace)
|
|
69
|
-
|
|
70
|
-
dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
|
|
71
|
-
dfT = list_tables(dataset, workspace)
|
|
72
|
-
dfT.rename(columns={"Type": "Table Type"}, inplace=True)
|
|
73
|
-
dfC = pd.merge(
|
|
74
|
-
dfC,
|
|
75
|
-
dfT[["Name", "Table Type"]],
|
|
76
|
-
left_on="Table Name",
|
|
77
|
-
right_on="Name",
|
|
78
|
-
how="left",
|
|
79
|
-
)
|
|
80
|
-
dfT_filt = dfT[dfT["Table Type"] == "Table"]
|
|
81
|
-
dfC_filt = dfC[
|
|
82
|
-
(dfC["Table Type"] == "Table")
|
|
83
|
-
& ~(dfC["Column Name"].str.startswith("RowNumber-"))
|
|
84
|
-
& (dfC["Type"] != "Calculated")
|
|
85
|
-
]
|
|
86
|
-
|
|
87
|
-
print(f"{icons.in_progress} Updating '{new_dataset}' based on '{dataset}'...")
|
|
88
|
-
start_time = datetime.datetime.now()
|
|
89
|
-
timeout = datetime.timedelta(minutes=1)
|
|
90
|
-
success = False
|
|
91
|
-
|
|
92
|
-
while not success:
|
|
93
|
-
try:
|
|
94
|
-
with connect_semantic_model(
|
|
95
|
-
dataset=new_dataset, readonly=False, workspace=new_dataset_workspace
|
|
96
|
-
) as tom:
|
|
97
|
-
success = True
|
|
98
|
-
try:
|
|
99
|
-
tom.model.Expressions["DatabaseQuery"]
|
|
100
|
-
except:
|
|
101
|
-
tom.add_expression("DatabaseQuery", expression=shEx)
|
|
102
|
-
print(
|
|
103
|
-
f"{icons.green_dot} The 'DatabaseQuery' expression has been added."
|
|
104
|
-
)
|
|
105
|
-
|
|
106
|
-
for i, r in dfT_filt.iterrows():
|
|
107
|
-
tName = r["Name"]
|
|
108
|
-
tDC = r["Data Category"]
|
|
109
|
-
tHid = bool(r["Hidden"])
|
|
110
|
-
tDesc = r["Description"]
|
|
111
|
-
|
|
112
|
-
try:
|
|
113
|
-
tom.model.Tables[tName]
|
|
114
|
-
except:
|
|
115
|
-
tom.add_table(
|
|
116
|
-
name=tName,
|
|
117
|
-
description=tDesc,
|
|
118
|
-
data_category=tDC,
|
|
119
|
-
hidden=tHid,
|
|
120
|
-
)
|
|
121
|
-
tom.add_entity_partition(
|
|
122
|
-
table_name=tName, entity_name=tName.replace(" ", "_")
|
|
123
|
-
)
|
|
124
|
-
print(
|
|
125
|
-
f"{icons.green_dot} The '{tName}' table has been added."
|
|
126
|
-
)
|
|
127
|
-
|
|
128
|
-
for i, r in dfC_filt.iterrows():
|
|
129
|
-
tName = r["Table Name"]
|
|
130
|
-
cName = r["Column Name"]
|
|
131
|
-
scName = r["Source"].replace(" ", "_")
|
|
132
|
-
cHid = bool(r["Hidden"])
|
|
133
|
-
cDataType = r["Data Type"]
|
|
134
|
-
|
|
135
|
-
try:
|
|
136
|
-
tom.model.Tables[tName].Columns[cName]
|
|
137
|
-
except:
|
|
138
|
-
tom.add_data_column(
|
|
139
|
-
table_name=tName,
|
|
140
|
-
column_name=cName,
|
|
141
|
-
source_column=scName,
|
|
142
|
-
hidden=cHid,
|
|
143
|
-
data_type=cDataType,
|
|
144
|
-
)
|
|
145
|
-
print(
|
|
146
|
-
f"{icons.green_dot} The '{tName}'[{cName}] column has been added."
|
|
147
|
-
)
|
|
148
|
-
|
|
149
|
-
print(
|
|
150
|
-
f"\n{icons.green_dot} All regular tables and columns have been added to the '{new_dataset}' semantic model."
|
|
151
|
-
)
|
|
152
|
-
except Exception as e:
|
|
153
|
-
if datetime.datetime.now() - start_time > timeout:
|
|
154
|
-
break
|
|
155
|
-
time.sleep(1)
|
|
156
|
-
else:
|
|
157
|
-
print(
|
|
158
|
-
f"{icons.red_dot} Lakehouse not attached to notebook and lakehouse/lakehouse_workspace are not specified. Please add your lakehouse to this notebook or specify the lakehouse/lakehouse_workspace parameters."
|
|
159
|
-
)
|
|
160
|
-
print(
|
|
64
|
+
if not lakehouse_attached() and (lakehouse is None and lakehouse_workspace is None):
|
|
65
|
+
raise ValueError(
|
|
66
|
+
f"{icons.red_dot} Lakehouse not attached to notebook and lakehouse/lakehouse_workspace are not specified. Please add your lakehouse to this notebook"
|
|
67
|
+
f" or specify the lakehouse/lakehouse_workspace parameters."
|
|
161
68
|
"To attach a lakehouse to a notebook, go to the the 'Explorer' window to the left, click 'Lakehouses' to add your lakehouse to this notebook"
|
|
69
|
+
"\nLearn more here: https://learn.microsoft.com/fabric/data-engineering/lakehouse-notebook-explore#add-or-remove-a-lakehouse"
|
|
162
70
|
)
|
|
71
|
+
shEx = get_shared_expression(lakehouse, lakehouse_workspace)
|
|
72
|
+
|
|
73
|
+
dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
|
|
74
|
+
dfT = list_tables(dataset, workspace)
|
|
75
|
+
dfT.rename(columns={"Type": "Table Type"}, inplace=True)
|
|
76
|
+
dfC = pd.merge(
|
|
77
|
+
dfC,
|
|
78
|
+
dfT[["Name", "Table Type"]],
|
|
79
|
+
left_on="Table Name",
|
|
80
|
+
right_on="Name",
|
|
81
|
+
how="left",
|
|
82
|
+
)
|
|
83
|
+
dfT_filt = dfT[dfT["Table Type"] == "Table"]
|
|
84
|
+
dfC_filt = dfC[
|
|
85
|
+
(dfC["Table Type"] == "Table")
|
|
86
|
+
& ~(dfC["Column Name"].str.startswith("RowNumber-"))
|
|
87
|
+
& (dfC["Type"] != "Calculated")
|
|
88
|
+
]
|
|
89
|
+
|
|
90
|
+
print(f"{icons.in_progress} Updating '{new_dataset}' based on '{dataset}'...")
|
|
91
|
+
|
|
92
|
+
@retry(
|
|
93
|
+
sleep_time=1,
|
|
94
|
+
timeout_error_message=f"{icons.red_dot} Function timed out after 1 minute",
|
|
95
|
+
)
|
|
96
|
+
def dyn_connect():
|
|
97
|
+
with connect_semantic_model(
|
|
98
|
+
dataset=new_dataset, readonly=True, workspace=new_dataset_workspace
|
|
99
|
+
) as tom:
|
|
100
|
+
|
|
101
|
+
tom.model
|
|
102
|
+
|
|
103
|
+
dyn_connect()
|
|
104
|
+
|
|
105
|
+
with connect_semantic_model(
|
|
106
|
+
dataset=new_dataset, readonly=False, workspace=new_dataset_workspace
|
|
107
|
+
) as tom:
|
|
108
|
+
if not any(e.Name == "DatabaseQuery" for e in tom.model.Expressions):
|
|
109
|
+
tom.add_expression("DatabaseQuery", expression=shEx)
|
|
110
|
+
print(f"{icons.green_dot} The 'DatabaseQuery' expression has been added.")
|
|
111
|
+
|
|
112
|
+
for i, r in dfT_filt.iterrows():
|
|
113
|
+
tName = r["Name"]
|
|
114
|
+
tDC = r["Data Category"]
|
|
115
|
+
tHid = bool(r["Hidden"])
|
|
116
|
+
tDesc = r["Description"]
|
|
117
|
+
ent_name = tName.replace(" ", "_")
|
|
118
|
+
for char in icons.special_characters:
|
|
119
|
+
ent_name = ent_name.replace(char, "")
|
|
120
|
+
|
|
121
|
+
if not any(t.Name == tName for t in tom.model.Tables):
|
|
122
|
+
tom.add_table(
|
|
123
|
+
name=tName,
|
|
124
|
+
description=tDesc,
|
|
125
|
+
data_category=tDC,
|
|
126
|
+
hidden=tHid,
|
|
127
|
+
)
|
|
128
|
+
tom.add_entity_partition(table_name=tName, entity_name=ent_name)
|
|
129
|
+
print(f"{icons.green_dot} The '{tName}' table has been added.")
|
|
130
|
+
|
|
131
|
+
for i, r in dfC_filt.iterrows():
|
|
132
|
+
tName = r["Table Name"]
|
|
133
|
+
cName = r["Column Name"]
|
|
134
|
+
scName = r["Source"].replace(" ", "_")
|
|
135
|
+
cHid = bool(r["Hidden"])
|
|
136
|
+
cDataType = r["Data Type"]
|
|
137
|
+
for char in icons.special_characters:
|
|
138
|
+
scName = scName.replace(char, "")
|
|
139
|
+
|
|
140
|
+
if scName.endswith("_"):
|
|
141
|
+
scName = scName[:-1]
|
|
142
|
+
|
|
143
|
+
if not any(
|
|
144
|
+
c.Name == cName and c.Parent.Name == tName for c in tom.all_columns()
|
|
145
|
+
):
|
|
146
|
+
tom.add_data_column(
|
|
147
|
+
table_name=tName,
|
|
148
|
+
column_name=cName,
|
|
149
|
+
source_column=scName,
|
|
150
|
+
hidden=cHid,
|
|
151
|
+
data_type=cDataType,
|
|
152
|
+
)
|
|
153
|
+
print(
|
|
154
|
+
f"{icons.green_dot} The '{tName}'[{cName}] column has been added."
|
|
155
|
+
)
|
|
156
|
+
|
|
163
157
|
print(
|
|
164
|
-
"\
|
|
158
|
+
f"\n{icons.green_dot} All regular tables and columns have been added to the '{new_dataset}' semantic model."
|
|
165
159
|
)
|
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
import sempy
|
|
2
1
|
import sempy.fabric as fabric
|
|
3
2
|
import pandas as pd
|
|
4
|
-
from typing import
|
|
3
|
+
from typing import Optional
|
|
5
4
|
from sempy_labs._list_functions import list_semantic_model_objects
|
|
6
5
|
from sempy._utils._log import log
|
|
7
6
|
|
|
7
|
+
|
|
8
8
|
@log
|
|
9
9
|
def migration_validation(
|
|
10
10
|
dataset: str,
|
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
import sempy
|
|
2
1
|
import sempy.fabric as fabric
|
|
3
2
|
import pandas as pd
|
|
4
|
-
import re
|
|
3
|
+
import re
|
|
4
|
+
from sempy_labs._helper_functions import retry
|
|
5
5
|
from pyspark.sql import SparkSession
|
|
6
6
|
from sempy_labs.tom import connect_semantic_model
|
|
7
|
-
from typing import
|
|
7
|
+
from typing import Optional
|
|
8
8
|
from sempy._utils._log import log
|
|
9
9
|
import sempy_labs._icons as icons
|
|
10
10
|
|
|
@@ -25,105 +25,99 @@ def refresh_calc_tables(dataset: str, workspace: Optional[str] = None):
|
|
|
25
25
|
"""
|
|
26
26
|
|
|
27
27
|
spark = SparkSession.builder.getOrCreate()
|
|
28
|
-
|
|
29
|
-
start_time = datetime.datetime.now()
|
|
30
|
-
timeout = datetime.timedelta(minutes=1)
|
|
31
|
-
success = False
|
|
32
|
-
|
|
33
28
|
workspace = fabric.resolve_workspace_name(workspace)
|
|
34
29
|
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
30
|
+
@retry(
|
|
31
|
+
sleep_time=1,
|
|
32
|
+
timeout_error_message=f"{icons.red_dot} Function timed out after 1 minute",
|
|
33
|
+
)
|
|
34
|
+
def dyn_connect():
|
|
35
|
+
with connect_semantic_model(
|
|
36
|
+
dataset=dataset, readonly=True, workspace=workspace
|
|
37
|
+
) as tom:
|
|
38
|
+
|
|
39
|
+
tom.model
|
|
40
|
+
|
|
41
|
+
dyn_connect()
|
|
42
|
+
|
|
43
|
+
with connect_semantic_model(
|
|
44
|
+
dataset=dataset, readonly=True, workspace=workspace
|
|
45
|
+
) as tom:
|
|
46
|
+
for a in tom.model.Annotations:
|
|
47
|
+
if any(a.Name == t.Name for t in tom.model.Tables):
|
|
48
|
+
tName = a.Name
|
|
49
|
+
query = a.Value
|
|
50
|
+
|
|
51
|
+
if not query.startswith("EVALUATE"):
|
|
52
|
+
daxquery = "EVALUATE \n" + query
|
|
53
|
+
else:
|
|
54
|
+
daxquery = query
|
|
55
|
+
|
|
56
|
+
try:
|
|
57
|
+
df = fabric.evaluate_dax(
|
|
58
|
+
dataset=dataset,
|
|
59
|
+
dax_string=daxquery,
|
|
60
|
+
workspace=workspace,
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
# Update column names for non-field parameters
|
|
64
|
+
if query.find("NAMEOF") == -1:
|
|
65
|
+
for old_column_name in df.columns:
|
|
66
|
+
pattern = r"\[([^\]]+)\]"
|
|
67
|
+
|
|
68
|
+
matches = re.findall(pattern, old_column_name)
|
|
69
|
+
new_column_name = matches[0]
|
|
70
|
+
new_column_name = new_column_name.replace(" ", "")
|
|
71
|
+
|
|
72
|
+
df.rename(
|
|
73
|
+
columns={old_column_name: new_column_name},
|
|
74
|
+
inplace=True,
|
|
56
75
|
)
|
|
57
76
|
|
|
58
|
-
# Update
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
new_column_name = matches[0]
|
|
65
|
-
new_column_name = new_column_name.replace(" ", "")
|
|
66
|
-
|
|
67
|
-
df.rename(
|
|
68
|
-
columns={old_column_name: new_column_name},
|
|
69
|
-
inplace=True,
|
|
70
|
-
)
|
|
71
|
-
|
|
72
|
-
# Update data types for lakehouse columns
|
|
73
|
-
dataType = next(
|
|
74
|
-
str(c.DataType)
|
|
75
|
-
for c in tom.all_columns()
|
|
76
|
-
if c.Parent.Name == tName
|
|
77
|
-
and c.SourceColumn == new_column_name
|
|
78
|
-
)
|
|
79
|
-
# dfC_type = dfC[(dfC['Table Name'] == tName) & (dfC['Source'] == new_column_name)]
|
|
80
|
-
# dataType = dfC_type['Data Type'].iloc[0]
|
|
81
|
-
|
|
82
|
-
if dataType == "Int64":
|
|
83
|
-
df[new_column_name] = df[
|
|
84
|
-
new_column_name
|
|
85
|
-
].astype(int)
|
|
86
|
-
elif dataType in ["Decimal", "Double"]:
|
|
87
|
-
df[new_column_name] = df[
|
|
88
|
-
new_column_name
|
|
89
|
-
].astype(float)
|
|
90
|
-
elif dataType == "Boolean":
|
|
91
|
-
df[new_column_name] = df[
|
|
92
|
-
new_column_name
|
|
93
|
-
].astype(bool)
|
|
94
|
-
elif dataType == "DateTime":
|
|
95
|
-
df[new_column_name] = pd.to_datetime(
|
|
96
|
-
df[new_column_name]
|
|
97
|
-
)
|
|
98
|
-
else:
|
|
99
|
-
df[new_column_name] = df[
|
|
100
|
-
new_column_name
|
|
101
|
-
].astype(str)
|
|
102
|
-
# else:
|
|
103
|
-
# second_column_name = df.columns[1]
|
|
104
|
-
# third_column_name = df.columns[2]
|
|
105
|
-
# df[third_column_name] = df[third_column_name].astype(int)
|
|
106
|
-
|
|
107
|
-
# Remove calc columns from field parameters
|
|
108
|
-
# mask = df[second_column_name].isin(dfC_filt['Full Column Name'])
|
|
109
|
-
# df = df[~mask]
|
|
110
|
-
|
|
111
|
-
delta_table_name = tName.replace(" ", "_")
|
|
112
|
-
print(
|
|
113
|
-
f"{icons.in_progress} Refresh of the '{delta_table_name}' table within the lakehouse is in progress..."
|
|
77
|
+
# Update data types for lakehouse columns
|
|
78
|
+
dataType = next(
|
|
79
|
+
str(c.DataType)
|
|
80
|
+
for c in tom.all_columns()
|
|
81
|
+
if c.Parent.Name == tName
|
|
82
|
+
and c.SourceColumn == new_column_name
|
|
114
83
|
)
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
84
|
+
# dfC_type = dfC[(dfC['Table Name'] == tName) & (dfC['Source'] == new_column_name)]
|
|
85
|
+
# dataType = dfC_type['Data Type'].iloc[0]
|
|
86
|
+
|
|
87
|
+
if dataType == "Int64":
|
|
88
|
+
df[new_column_name] = df[new_column_name].astype(int)
|
|
89
|
+
elif dataType in ["Decimal", "Double"]:
|
|
90
|
+
df[new_column_name] = df[new_column_name].astype(float)
|
|
91
|
+
elif dataType == "Boolean":
|
|
92
|
+
df[new_column_name] = df[new_column_name].astype(bool)
|
|
93
|
+
elif dataType == "DateTime":
|
|
94
|
+
df[new_column_name] = pd.to_datetime(
|
|
95
|
+
df[new_column_name]
|
|
96
|
+
)
|
|
97
|
+
else:
|
|
98
|
+
df[new_column_name] = df[new_column_name].astype(str)
|
|
99
|
+
# else:
|
|
100
|
+
# second_column_name = df.columns[1]
|
|
101
|
+
# third_column_name = df.columns[2]
|
|
102
|
+
# df[third_column_name] = df[third_column_name].astype(int)
|
|
103
|
+
|
|
104
|
+
# Remove calc columns from field parameters
|
|
105
|
+
# mask = df[second_column_name].isin(dfC_filt['Full Column Name'])
|
|
106
|
+
# df = df[~mask]
|
|
107
|
+
|
|
108
|
+
delta_table_name = tName.replace(" ", "_")
|
|
109
|
+
print(
|
|
110
|
+
f"{icons.in_progress} Refresh of the '{delta_table_name}' table within the lakehouse is in progress..."
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
spark_df = spark.createDataFrame(df)
|
|
114
|
+
spark_df.write.mode("overwrite").format("delta").saveAsTable(
|
|
115
|
+
delta_table_name
|
|
116
|
+
)
|
|
117
|
+
print(
|
|
118
|
+
f"{icons.green_dot} Calculated table '{tName}' has been refreshed as the '{delta_table_name.lower()}' table in the lakehouse."
|
|
119
|
+
)
|
|
120
|
+
except Exception as e:
|
|
121
|
+
raise ValueError(
|
|
122
|
+
f"{icons.red_dot} Failed to create calculated table '{tName}' as a delta table in the lakehouse."
|
|
123
|
+
) from e
|