semantic-link-labs 0.5.0__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- semantic_link_labs-0.7.0.dist-info/METADATA +148 -0
- semantic_link_labs-0.7.0.dist-info/RECORD +111 -0
- {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.7.0.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +45 -15
- sempy_labs/_ai.py +42 -85
- sempy_labs/_bpa_translation/_translations_am-ET.po +828 -0
- sempy_labs/_bpa_translation/_translations_ar-AE.po +860 -0
- sempy_labs/_bpa_translation/_translations_cs-CZ.po +894 -0
- sempy_labs/_bpa_translation/_translations_da-DK.po +894 -0
- sempy_labs/_bpa_translation/_translations_de-DE.po +933 -0
- sempy_labs/_bpa_translation/_translations_el-GR.po +936 -0
- sempy_labs/_bpa_translation/_translations_es-ES.po +915 -0
- sempy_labs/_bpa_translation/_translations_fa-IR.po +883 -0
- sempy_labs/_bpa_translation/_translations_fr-FR.po +938 -0
- sempy_labs/_bpa_translation/_translations_ga-IE.po +912 -0
- sempy_labs/_bpa_translation/_translations_he-IL.po +855 -0
- sempy_labs/_bpa_translation/_translations_hi-IN.po +892 -0
- sempy_labs/_bpa_translation/_translations_hu-HU.po +910 -0
- sempy_labs/_bpa_translation/_translations_is-IS.po +887 -0
- sempy_labs/_bpa_translation/_translations_it-IT.po +931 -0
- sempy_labs/_bpa_translation/_translations_ja-JP.po +805 -0
- sempy_labs/_bpa_translation/_translations_nl-NL.po +924 -0
- sempy_labs/_bpa_translation/_translations_pl-PL.po +913 -0
- sempy_labs/_bpa_translation/_translations_pt-BR.po +909 -0
- sempy_labs/_bpa_translation/_translations_pt-PT.po +904 -0
- sempy_labs/_bpa_translation/_translations_ru-RU.po +909 -0
- sempy_labs/_bpa_translation/_translations_ta-IN.po +922 -0
- sempy_labs/_bpa_translation/_translations_te-IN.po +896 -0
- sempy_labs/_bpa_translation/_translations_th-TH.po +873 -0
- sempy_labs/_bpa_translation/_translations_zh-CN.po +767 -0
- sempy_labs/_bpa_translation/_translations_zu-ZA.po +916 -0
- sempy_labs/_clear_cache.py +12 -8
- sempy_labs/_connections.py +77 -70
- sempy_labs/_dax.py +7 -9
- sempy_labs/_generate_semantic_model.py +75 -90
- sempy_labs/_helper_functions.py +371 -20
- sempy_labs/_icons.py +23 -0
- sempy_labs/_list_functions.py +855 -427
- sempy_labs/_model_auto_build.py +4 -3
- sempy_labs/_model_bpa.py +307 -1118
- sempy_labs/_model_bpa_bulk.py +363 -0
- sempy_labs/_model_bpa_rules.py +831 -0
- sempy_labs/_model_dependencies.py +20 -16
- sempy_labs/_one_lake_integration.py +18 -12
- sempy_labs/_query_scale_out.py +116 -129
- sempy_labs/_refresh_semantic_model.py +23 -10
- sempy_labs/_translations.py +367 -288
- sempy_labs/_vertipaq.py +152 -123
- sempy_labs/directlake/__init__.py +7 -1
- sempy_labs/directlake/_directlake_schema_compare.py +33 -30
- sempy_labs/directlake/_directlake_schema_sync.py +60 -77
- sempy_labs/directlake/_dl_helper.py +233 -0
- sempy_labs/directlake/_get_directlake_lakehouse.py +7 -8
- sempy_labs/directlake/_get_shared_expression.py +5 -3
- sempy_labs/directlake/_guardrails.py +20 -16
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +17 -10
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +3 -2
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +10 -5
- sempy_labs/directlake/_update_directlake_partition_entity.py +169 -22
- sempy_labs/directlake/_warm_cache.py +7 -4
- sempy_labs/lakehouse/_get_lakehouse_columns.py +1 -1
- sempy_labs/lakehouse/_get_lakehouse_tables.py +65 -71
- sempy_labs/lakehouse/_lakehouse.py +5 -3
- sempy_labs/lakehouse/_shortcuts.py +20 -13
- sempy_labs/migration/__init__.py +1 -1
- sempy_labs/migration/_create_pqt_file.py +184 -186
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +240 -269
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +78 -77
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +444 -425
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +96 -102
- sempy_labs/migration/_migration_validation.py +2 -2
- sempy_labs/migration/_refresh_calc_tables.py +94 -100
- sempy_labs/report/_BPAReportTemplate.json +232 -0
- sempy_labs/report/__init__.py +6 -2
- sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
- sempy_labs/report/_bpareporttemplate/.platform +11 -0
- sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
- sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
- sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
- sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
- sempy_labs/report/_generate_report.py +260 -139
- sempy_labs/report/_report_functions.py +90 -59
- sempy_labs/report/_report_rebind.py +40 -34
- sempy_labs/tom/__init__.py +1 -4
- sempy_labs/tom/_model.py +601 -181
- semantic_link_labs-0.5.0.dist-info/METADATA +0 -22
- semantic_link_labs-0.5.0.dist-info/RECORD +0 -53
- sempy_labs/directlake/_fallback.py +0 -58
- {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.7.0.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.7.0.dist-info}/top_level.txt +0 -0
sempy_labs/_clear_cache.py
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
|
-
import sempy
|
|
2
1
|
import sempy.fabric as fabric
|
|
3
|
-
from ._helper_functions import resolve_dataset_id
|
|
4
|
-
from typing import
|
|
2
|
+
from ._helper_functions import resolve_dataset_id, is_default_semantic_model
|
|
3
|
+
from typing import Optional
|
|
5
4
|
import sempy_labs._icons as icons
|
|
6
5
|
|
|
7
6
|
|
|
@@ -21,14 +20,19 @@ def clear_cache(dataset: str, workspace: Optional[str] = None):
|
|
|
21
20
|
"""
|
|
22
21
|
|
|
23
22
|
workspace = fabric.resolve_workspace_name(workspace)
|
|
23
|
+
if is_default_semantic_model(dataset=dataset, workspace=workspace):
|
|
24
|
+
raise ValueError(
|
|
25
|
+
f"{icons.red_dot} Cannot run XMLA operations against a default semantic model. Please choose a different semantic model. "
|
|
26
|
+
"See here for more information: https://learn.microsoft.com/fabric/data-warehouse/semantic-models"
|
|
27
|
+
)
|
|
24
28
|
|
|
25
|
-
|
|
29
|
+
dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace)
|
|
26
30
|
|
|
27
31
|
xmla = f"""
|
|
28
|
-
<ClearCache xmlns="http://schemas.microsoft.com/analysisservices/2003/engine">
|
|
29
|
-
<Object>
|
|
30
|
-
<DatabaseID>{
|
|
31
|
-
</Object>
|
|
32
|
+
<ClearCache xmlns="http://schemas.microsoft.com/analysisservices/2003/engine">
|
|
33
|
+
<Object>
|
|
34
|
+
<DatabaseID>{dataset_id}</DatabaseID>
|
|
35
|
+
</Object>
|
|
32
36
|
</ClearCache>
|
|
33
37
|
"""
|
|
34
38
|
fabric.execute_xmla(dataset=dataset, xmla_command=xmla, workspace=workspace)
|
sempy_labs/_connections.py
CHANGED
|
@@ -1,8 +1,6 @@
|
|
|
1
|
-
import sempy
|
|
2
1
|
import sempy.fabric as fabric
|
|
3
2
|
import pandas as pd
|
|
4
|
-
from
|
|
5
|
-
import sempy_labs._icons as icons
|
|
3
|
+
from sempy.fabric.exceptions import FabricHTTPException
|
|
6
4
|
|
|
7
5
|
|
|
8
6
|
def create_connection_cloud(
|
|
@@ -56,29 +54,32 @@ def create_connection_cloud(
|
|
|
56
54
|
},
|
|
57
55
|
}
|
|
58
56
|
|
|
59
|
-
response = client.post(
|
|
60
|
-
|
|
61
|
-
if response.status_code
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
57
|
+
response = client.post("/v1/connections", json=request_body)
|
|
58
|
+
|
|
59
|
+
if response.status_code != 200:
|
|
60
|
+
raise FabricHTTPException(response)
|
|
61
|
+
o = response.json()
|
|
62
|
+
new_data = {
|
|
63
|
+
"Connection Id": o.get("id"),
|
|
64
|
+
"Connection Name": o.get("name"),
|
|
65
|
+
"Connectivity Type": o.get("connectivityType"),
|
|
66
|
+
"Connection Type": o.get("connectionDetails", {}).get("type"),
|
|
67
|
+
"Connection Path": o.get("connectionDetails", {}).get("path"),
|
|
68
|
+
"Privacy Level": o.get("privacyLevel"),
|
|
69
|
+
"Credential Type": o.get("credentialDetails", {}).get("credentialType"),
|
|
70
|
+
"Single Sign On Type": o.get("credentialDetails", {}).get("singleSignOnType"),
|
|
71
|
+
"Connection Encryption": o.get("credentialDetails", {}).get(
|
|
72
|
+
"connectionEncryption"
|
|
73
|
+
),
|
|
74
|
+
"Skip Test Connection": o.get("credentialDetails", {}).get(
|
|
75
|
+
"skipTestConnection"
|
|
76
|
+
),
|
|
77
|
+
}
|
|
78
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
76
79
|
|
|
77
|
-
|
|
80
|
+
df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool)
|
|
78
81
|
|
|
79
|
-
|
|
80
|
-
else:
|
|
81
|
-
print(f"{icons.red_dot} {response.status_code}")
|
|
82
|
+
return df
|
|
82
83
|
|
|
83
84
|
|
|
84
85
|
def create_connection_on_prem(
|
|
@@ -131,30 +132,33 @@ def create_connection_on_prem(
|
|
|
131
132
|
},
|
|
132
133
|
}
|
|
133
134
|
|
|
134
|
-
response = client.post(
|
|
135
|
-
|
|
136
|
-
if response.status_code
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
135
|
+
response = client.post("/v1/connections", json=request_body)
|
|
136
|
+
|
|
137
|
+
if response.status_code != 200:
|
|
138
|
+
raise FabricHTTPException(response)
|
|
139
|
+
o = response.json()
|
|
140
|
+
new_data = {
|
|
141
|
+
"Connection Id": o.get("id"),
|
|
142
|
+
"Connection Name": o.get("name"),
|
|
143
|
+
"Gateway ID": o.get("gatewayId"),
|
|
144
|
+
"Connectivity Type": o.get("connectivityType"),
|
|
145
|
+
"Connection Type": o.get("connectionDetails", {}).get("type"),
|
|
146
|
+
"Connection Path": o.get("connectionDetails", {}).get("path"),
|
|
147
|
+
"Privacy Level": o.get("privacyLevel"),
|
|
148
|
+
"Credential Type": o.get("credentialDetails", {}).get("credentialType"),
|
|
149
|
+
"Single Sign On Type": o.get("credentialDetails", {}).get("singleSignOnType"),
|
|
150
|
+
"Connection Encryption": o.get("credentialDetails", {}).get(
|
|
151
|
+
"connectionEncryption"
|
|
152
|
+
),
|
|
153
|
+
"Skip Test Connection": o.get("credentialDetails", {}).get(
|
|
154
|
+
"skipTestConnection"
|
|
155
|
+
),
|
|
156
|
+
}
|
|
157
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
152
158
|
|
|
153
|
-
|
|
159
|
+
df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool)
|
|
154
160
|
|
|
155
|
-
|
|
156
|
-
else:
|
|
157
|
-
print(f"{icons.red_dot} {response.status_code}")
|
|
161
|
+
return df
|
|
158
162
|
|
|
159
163
|
|
|
160
164
|
def create_connection_vnet(
|
|
@@ -209,27 +213,30 @@ def create_connection_vnet(
|
|
|
209
213
|
},
|
|
210
214
|
}
|
|
211
215
|
|
|
212
|
-
response = client.post(
|
|
213
|
-
|
|
214
|
-
if response.status_code
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
216
|
+
response = client.post("/v1/connections", json=request_body)
|
|
217
|
+
|
|
218
|
+
if response.status_code != 200:
|
|
219
|
+
raise FabricHTTPException(response)
|
|
220
|
+
o = response.json()
|
|
221
|
+
new_data = {
|
|
222
|
+
"Connection Id": o.get("id"),
|
|
223
|
+
"Connection Name": o.get("name"),
|
|
224
|
+
"Gateway ID": o.get("gatewayId"),
|
|
225
|
+
"Connectivity Type": o.get("connectivityType"),
|
|
226
|
+
"Connection Type": o.get("connectionDetails", {}).get("type"),
|
|
227
|
+
"Connection Path": o.get("connectionDetails", {}).get("path"),
|
|
228
|
+
"Privacy Level": o.get("privacyLevel"),
|
|
229
|
+
"Credential Type": o.get("credentialDetails", {}).get("credentialType"),
|
|
230
|
+
"Single Sign On Type": o.get("credentialDetails", {}).get("singleSignOnType"),
|
|
231
|
+
"Connection Encryption": o.get("credentialDetails", {}).get(
|
|
232
|
+
"connectionEncryption"
|
|
233
|
+
),
|
|
234
|
+
"Skip Test Connection": o.get("credentialDetails", {}).get(
|
|
235
|
+
"skipTestConnection"
|
|
236
|
+
),
|
|
237
|
+
}
|
|
238
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
239
|
+
|
|
240
|
+
df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool)
|
|
241
|
+
|
|
242
|
+
return df
|
sempy_labs/_dax.py
CHANGED
|
@@ -1,8 +1,10 @@
|
|
|
1
|
-
import sempy
|
|
2
1
|
import sempy.fabric as fabric
|
|
3
2
|
import pandas as pd
|
|
4
|
-
from sempy_labs._helper_functions import
|
|
5
|
-
|
|
3
|
+
from sempy_labs._helper_functions import (
|
|
4
|
+
resolve_dataset_id,
|
|
5
|
+
resolve_workspace_name_and_id,
|
|
6
|
+
)
|
|
7
|
+
from typing import Optional
|
|
6
8
|
from sempy._utils._log import log
|
|
7
9
|
|
|
8
10
|
|
|
@@ -40,17 +42,13 @@ def evaluate_dax_impersonation(
|
|
|
40
42
|
|
|
41
43
|
# https://learn.microsoft.com/en-us/rest/api/power-bi/datasets/execute-queries-in-group
|
|
42
44
|
|
|
43
|
-
|
|
44
|
-
workspace_id = fabric.get_workspace_id()
|
|
45
|
-
workspace = fabric.resolve_workspace_name(workspace_id)
|
|
46
|
-
else:
|
|
47
|
-
workspace_id = fabric.resolve_workspace_id(workspace)
|
|
45
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
48
46
|
|
|
49
47
|
dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace)
|
|
50
48
|
|
|
51
49
|
request_body = {
|
|
52
50
|
"queries": [{"query": dax_query}],
|
|
53
|
-
"impersonatedUserName": user_name
|
|
51
|
+
"impersonatedUserName": user_name,
|
|
54
52
|
}
|
|
55
53
|
|
|
56
54
|
client = fabric.PowerBIRestClient()
|
|
@@ -1,11 +1,15 @@
|
|
|
1
|
-
import sempy
|
|
2
1
|
import sempy.fabric as fabric
|
|
3
2
|
import pandas as pd
|
|
4
|
-
import json
|
|
5
|
-
|
|
3
|
+
import json
|
|
4
|
+
import os
|
|
5
|
+
from typing import Optional
|
|
6
6
|
from sempy_labs._helper_functions import (
|
|
7
7
|
resolve_lakehouse_name,
|
|
8
8
|
resolve_workspace_name_and_id,
|
|
9
|
+
resolve_dataset_id,
|
|
10
|
+
_conv_b64,
|
|
11
|
+
_decode_b64,
|
|
12
|
+
lro,
|
|
9
13
|
)
|
|
10
14
|
from sempy_labs.lakehouse._lakehouse import lakehouse_attached
|
|
11
15
|
import sempy_labs._icons as icons
|
|
@@ -31,13 +35,14 @@ def create_blank_semantic_model(
|
|
|
31
35
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
32
36
|
"""
|
|
33
37
|
|
|
34
|
-
|
|
35
|
-
workspace = fabric.resolve_workspace_name()
|
|
38
|
+
workspace = fabric.resolve_workspace_name(workspace)
|
|
36
39
|
|
|
37
40
|
min_compat = 1500
|
|
38
41
|
|
|
39
42
|
if compatibility_level < min_compat:
|
|
40
|
-
raise ValueError(
|
|
43
|
+
raise ValueError(
|
|
44
|
+
f"{icons.red_dot} Compatiblity level must be at least {min_compat}."
|
|
45
|
+
)
|
|
41
46
|
|
|
42
47
|
tmsl = f"""
|
|
43
48
|
{{
|
|
@@ -84,30 +89,22 @@ def create_semantic_model_from_bim(
|
|
|
84
89
|
|
|
85
90
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
86
91
|
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
dfI = fabric.list_items(workspace=workspace, type=objectType)
|
|
92
|
+
dfI = fabric.list_items(workspace=workspace, type="SemanticModel")
|
|
90
93
|
dfI_filt = dfI[(dfI["Display Name"] == dataset)]
|
|
91
94
|
|
|
92
95
|
if len(dfI_filt) > 0:
|
|
93
|
-
raise ValueError(
|
|
96
|
+
raise ValueError(
|
|
97
|
+
f"{icons.red_dot} '{dataset}' already exists as a semantic model in the '{workspace}' workspace."
|
|
98
|
+
)
|
|
94
99
|
|
|
95
100
|
client = fabric.FabricRestClient()
|
|
96
101
|
defPBIDataset = {"version": "1.0", "settings": {}}
|
|
97
102
|
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
loadJson = json.dumps(file)
|
|
101
|
-
f = base64.b64encode(loadJson.encode("utf-8")).decode("utf-8")
|
|
102
|
-
|
|
103
|
-
return f
|
|
104
|
-
|
|
105
|
-
payloadPBIDefinition = conv_b64(defPBIDataset)
|
|
106
|
-
payloadBim = conv_b64(bim_file)
|
|
103
|
+
payloadPBIDefinition = _conv_b64(defPBIDataset)
|
|
104
|
+
payloadBim = _conv_b64(bim_file)
|
|
107
105
|
|
|
108
106
|
request_body = {
|
|
109
107
|
"displayName": dataset,
|
|
110
|
-
"type": objectType,
|
|
111
108
|
"definition": {
|
|
112
109
|
"parts": [
|
|
113
110
|
{
|
|
@@ -124,83 +121,82 @@ def create_semantic_model_from_bim(
|
|
|
124
121
|
},
|
|
125
122
|
}
|
|
126
123
|
|
|
127
|
-
response = client.post(
|
|
124
|
+
response = client.post(
|
|
125
|
+
f"/v1/workspaces/{workspace_id}/semanticModels",
|
|
126
|
+
json=request_body,
|
|
127
|
+
)
|
|
128
128
|
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
elif response.status_code == 202:
|
|
135
|
-
operationId = response.headers["x-ms-operation-id"]
|
|
136
|
-
response = client.get(f"/v1/operations/{operationId}")
|
|
137
|
-
response_body = json.loads(response.content)
|
|
138
|
-
while response_body["status"] != "Succeeded":
|
|
139
|
-
time.sleep(3)
|
|
140
|
-
response = client.get(f"/v1/operations/{operationId}")
|
|
141
|
-
response_body = json.loads(response.content)
|
|
142
|
-
response = client.get(f"/v1/operations/{operationId}/result")
|
|
143
|
-
print(
|
|
144
|
-
f"{icons.green_dot} The '{dataset}' semantic model has been created within the '{workspace}' workspace."
|
|
145
|
-
)
|
|
146
|
-
print(response.json())
|
|
129
|
+
lro(client, response, status_codes=[201, 202])
|
|
130
|
+
|
|
131
|
+
print(
|
|
132
|
+
f"{icons.green_dot} The '{dataset}' semantic model has been created within the '{workspace}' workspace."
|
|
133
|
+
)
|
|
147
134
|
|
|
148
135
|
|
|
149
136
|
def deploy_semantic_model(
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
137
|
+
source_dataset: str,
|
|
138
|
+
source_workspace: Optional[str] = None,
|
|
139
|
+
target_dataset: Optional[str] = None,
|
|
140
|
+
target_workspace: Optional[str] = None,
|
|
141
|
+
refresh_target_dataset: Optional[bool] = True,
|
|
154
142
|
):
|
|
155
143
|
"""
|
|
156
144
|
Deploys a semantic model based on an existing semantic model.
|
|
157
145
|
|
|
158
146
|
Parameters
|
|
159
147
|
----------
|
|
160
|
-
|
|
148
|
+
source_dataset : str
|
|
161
149
|
Name of the semantic model to deploy.
|
|
162
|
-
|
|
163
|
-
Name of the new semantic model to be created.
|
|
164
|
-
workspace : str, default=None
|
|
150
|
+
source_workspace : str, default=None
|
|
165
151
|
The Fabric workspace name.
|
|
166
152
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
167
153
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
168
|
-
|
|
154
|
+
target_dataset: str
|
|
155
|
+
Name of the new semantic model to be created.
|
|
156
|
+
target_workspace : str, default=None
|
|
169
157
|
The Fabric workspace name in which the new semantic model will be deployed.
|
|
170
158
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
171
159
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
160
|
+
refresh_target_dataset : bool, default=True
|
|
161
|
+
If set to True, this will initiate a full refresh of the target semantic model in the target workspace.
|
|
172
162
|
|
|
173
163
|
Returns
|
|
174
164
|
-------
|
|
175
165
|
|
|
176
166
|
"""
|
|
177
167
|
|
|
178
|
-
|
|
168
|
+
from sempy_labs import refresh_semantic_model
|
|
179
169
|
|
|
180
|
-
|
|
181
|
-
new_dataset_workspace = workspace
|
|
170
|
+
source_workspace = fabric.resolve_workspace_name(source_workspace)
|
|
182
171
|
|
|
183
|
-
if
|
|
184
|
-
|
|
172
|
+
if target_workspace is None:
|
|
173
|
+
target_workspace = source_workspace
|
|
185
174
|
|
|
186
|
-
if
|
|
187
|
-
|
|
188
|
-
|
|
175
|
+
if target_dataset is None:
|
|
176
|
+
target_dataset = source_dataset
|
|
177
|
+
|
|
178
|
+
if target_dataset == source_dataset and target_workspace == source_workspace:
|
|
179
|
+
raise ValueError(
|
|
180
|
+
f"{icons.red_dot} The 'dataset' and 'new_dataset' parameters have the same value. And, the 'workspace' and 'new_dataset_workspace' "
|
|
181
|
+
f"parameters have the same value. At least one of these must be different. Please update the parameters."
|
|
189
182
|
)
|
|
190
|
-
return
|
|
191
183
|
|
|
192
|
-
bim = get_semantic_model_bim(dataset=
|
|
184
|
+
bim = get_semantic_model_bim(dataset=source_dataset, workspace=source_workspace)
|
|
193
185
|
|
|
194
186
|
create_semantic_model_from_bim(
|
|
195
|
-
dataset=
|
|
187
|
+
dataset=target_dataset, bim_file=bim, workspace=target_workspace
|
|
196
188
|
)
|
|
197
189
|
|
|
190
|
+
if refresh_target_dataset:
|
|
191
|
+
refresh_semantic_model(dataset=target_dataset, workspace=target_workspace)
|
|
192
|
+
|
|
198
193
|
|
|
199
194
|
def get_semantic_model_bim(
|
|
200
195
|
dataset: str,
|
|
201
196
|
workspace: Optional[str] = None,
|
|
202
197
|
save_to_file_name: Optional[str] = None,
|
|
203
|
-
|
|
198
|
+
lakehouse_workspace: Optional[str] = None,
|
|
199
|
+
) -> dict:
|
|
204
200
|
"""
|
|
205
201
|
Extracts the Model.bim file for a given semantic model.
|
|
206
202
|
|
|
@@ -209,66 +205,55 @@ def get_semantic_model_bim(
|
|
|
209
205
|
dataset : str
|
|
210
206
|
Name of the semantic model.
|
|
211
207
|
workspace : str, default=None
|
|
212
|
-
The Fabric workspace name.
|
|
208
|
+
The Fabric workspace name in which the semantic model resides.
|
|
213
209
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
214
210
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
215
211
|
save_to_file_name : str, default=None
|
|
216
212
|
If specified, saves the Model.bim as a file in the lakehouse attached to the notebook.
|
|
213
|
+
lakehouse_workspace : str, default=None
|
|
214
|
+
The Fabric workspace name in which the lakehouse attached to the workspace resides.
|
|
215
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
216
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
217
217
|
|
|
218
218
|
Returns
|
|
219
219
|
-------
|
|
220
|
-
|
|
220
|
+
dict
|
|
221
221
|
The Model.bim file for the semantic model.
|
|
222
222
|
"""
|
|
223
223
|
|
|
224
224
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
225
225
|
|
|
226
|
-
|
|
226
|
+
fmt = "TMSL"
|
|
227
227
|
client = fabric.FabricRestClient()
|
|
228
|
-
|
|
229
|
-
itemListFilt = itemList[(itemList["Display Name"] == dataset)]
|
|
230
|
-
itemId = itemListFilt["Id"].iloc[0]
|
|
228
|
+
dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace)
|
|
231
229
|
response = client.post(
|
|
232
|
-
f"/v1/workspaces/{workspace_id}/
|
|
230
|
+
f"/v1/workspaces/{workspace_id}/semanticModels/{dataset_id}/getDefinition?format={fmt}",
|
|
233
231
|
)
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
res = response.json()
|
|
237
|
-
elif response.status_code == 202:
|
|
238
|
-
operationId = response.headers["x-ms-operation-id"]
|
|
239
|
-
response = client.get(f"/v1/operations/{operationId}")
|
|
240
|
-
response_body = json.loads(response.content)
|
|
241
|
-
while response_body["status"] != "Succeeded":
|
|
242
|
-
time.sleep(3)
|
|
243
|
-
response = client.get(f"/v1/operations/{operationId}")
|
|
244
|
-
response_body = json.loads(response.content)
|
|
245
|
-
response = client.get(f"/v1/operations/{operationId}/result")
|
|
246
|
-
res = response.json()
|
|
247
|
-
df_items = pd.json_normalize(res["definition"]["parts"])
|
|
232
|
+
result = lro(client, response).json()
|
|
233
|
+
df_items = pd.json_normalize(result["definition"]["parts"])
|
|
248
234
|
df_items_filt = df_items[df_items["path"] == "model.bim"]
|
|
249
235
|
payload = df_items_filt["payload"].iloc[0]
|
|
250
|
-
bimFile =
|
|
236
|
+
bimFile = _decode_b64(payload)
|
|
251
237
|
bimJson = json.loads(bimFile)
|
|
252
238
|
|
|
253
239
|
if save_to_file_name is not None:
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
f"In order to save the model.bim file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
|
|
240
|
+
if not lakehouse_attached():
|
|
241
|
+
raise ValueError(
|
|
242
|
+
f"{icons.red_dot} In order to save the model.bim file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
|
|
258
243
|
)
|
|
259
|
-
return
|
|
260
244
|
|
|
261
245
|
lakehouse_id = fabric.get_lakehouse_id()
|
|
262
|
-
|
|
246
|
+
lake_workspace = fabric.resolve_workspace_name()
|
|
247
|
+
lakehouse = resolve_lakehouse_name(lakehouse_id, lake_workspace)
|
|
263
248
|
folderPath = "/lakehouse/default/Files"
|
|
264
249
|
fileExt = ".bim"
|
|
265
250
|
if not save_to_file_name.endswith(fileExt):
|
|
266
|
-
save_to_file_name = save_to_file_name
|
|
251
|
+
save_to_file_name = f"{save_to_file_name}{fileExt}"
|
|
267
252
|
filePath = os.path.join(folderPath, save_to_file_name)
|
|
268
253
|
with open(filePath, "w") as json_file:
|
|
269
254
|
json.dump(bimJson, json_file, indent=4)
|
|
270
255
|
print(
|
|
271
|
-
f"The .bim file for the '{dataset}' semantic model has been saved to the '{lakehouse}' in this location: '{filePath}'.\n\n"
|
|
256
|
+
f"{icons.green_dot} The .bim file for the '{dataset}' semantic model has been saved to the '{lakehouse}' in this location: '{filePath}'.\n\n"
|
|
272
257
|
)
|
|
273
258
|
|
|
274
259
|
return bimJson
|