semantic-link-labs 0.5.0__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- semantic_link_labs-0.7.0.dist-info/METADATA +148 -0
- semantic_link_labs-0.7.0.dist-info/RECORD +111 -0
- {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.7.0.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +45 -15
- sempy_labs/_ai.py +42 -85
- sempy_labs/_bpa_translation/_translations_am-ET.po +828 -0
- sempy_labs/_bpa_translation/_translations_ar-AE.po +860 -0
- sempy_labs/_bpa_translation/_translations_cs-CZ.po +894 -0
- sempy_labs/_bpa_translation/_translations_da-DK.po +894 -0
- sempy_labs/_bpa_translation/_translations_de-DE.po +933 -0
- sempy_labs/_bpa_translation/_translations_el-GR.po +936 -0
- sempy_labs/_bpa_translation/_translations_es-ES.po +915 -0
- sempy_labs/_bpa_translation/_translations_fa-IR.po +883 -0
- sempy_labs/_bpa_translation/_translations_fr-FR.po +938 -0
- sempy_labs/_bpa_translation/_translations_ga-IE.po +912 -0
- sempy_labs/_bpa_translation/_translations_he-IL.po +855 -0
- sempy_labs/_bpa_translation/_translations_hi-IN.po +892 -0
- sempy_labs/_bpa_translation/_translations_hu-HU.po +910 -0
- sempy_labs/_bpa_translation/_translations_is-IS.po +887 -0
- sempy_labs/_bpa_translation/_translations_it-IT.po +931 -0
- sempy_labs/_bpa_translation/_translations_ja-JP.po +805 -0
- sempy_labs/_bpa_translation/_translations_nl-NL.po +924 -0
- sempy_labs/_bpa_translation/_translations_pl-PL.po +913 -0
- sempy_labs/_bpa_translation/_translations_pt-BR.po +909 -0
- sempy_labs/_bpa_translation/_translations_pt-PT.po +904 -0
- sempy_labs/_bpa_translation/_translations_ru-RU.po +909 -0
- sempy_labs/_bpa_translation/_translations_ta-IN.po +922 -0
- sempy_labs/_bpa_translation/_translations_te-IN.po +896 -0
- sempy_labs/_bpa_translation/_translations_th-TH.po +873 -0
- sempy_labs/_bpa_translation/_translations_zh-CN.po +767 -0
- sempy_labs/_bpa_translation/_translations_zu-ZA.po +916 -0
- sempy_labs/_clear_cache.py +12 -8
- sempy_labs/_connections.py +77 -70
- sempy_labs/_dax.py +7 -9
- sempy_labs/_generate_semantic_model.py +75 -90
- sempy_labs/_helper_functions.py +371 -20
- sempy_labs/_icons.py +23 -0
- sempy_labs/_list_functions.py +855 -427
- sempy_labs/_model_auto_build.py +4 -3
- sempy_labs/_model_bpa.py +307 -1118
- sempy_labs/_model_bpa_bulk.py +363 -0
- sempy_labs/_model_bpa_rules.py +831 -0
- sempy_labs/_model_dependencies.py +20 -16
- sempy_labs/_one_lake_integration.py +18 -12
- sempy_labs/_query_scale_out.py +116 -129
- sempy_labs/_refresh_semantic_model.py +23 -10
- sempy_labs/_translations.py +367 -288
- sempy_labs/_vertipaq.py +152 -123
- sempy_labs/directlake/__init__.py +7 -1
- sempy_labs/directlake/_directlake_schema_compare.py +33 -30
- sempy_labs/directlake/_directlake_schema_sync.py +60 -77
- sempy_labs/directlake/_dl_helper.py +233 -0
- sempy_labs/directlake/_get_directlake_lakehouse.py +7 -8
- sempy_labs/directlake/_get_shared_expression.py +5 -3
- sempy_labs/directlake/_guardrails.py +20 -16
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +17 -10
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +3 -2
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +10 -5
- sempy_labs/directlake/_update_directlake_partition_entity.py +169 -22
- sempy_labs/directlake/_warm_cache.py +7 -4
- sempy_labs/lakehouse/_get_lakehouse_columns.py +1 -1
- sempy_labs/lakehouse/_get_lakehouse_tables.py +65 -71
- sempy_labs/lakehouse/_lakehouse.py +5 -3
- sempy_labs/lakehouse/_shortcuts.py +20 -13
- sempy_labs/migration/__init__.py +1 -1
- sempy_labs/migration/_create_pqt_file.py +184 -186
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +240 -269
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +78 -77
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +444 -425
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +96 -102
- sempy_labs/migration/_migration_validation.py +2 -2
- sempy_labs/migration/_refresh_calc_tables.py +94 -100
- sempy_labs/report/_BPAReportTemplate.json +232 -0
- sempy_labs/report/__init__.py +6 -2
- sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
- sempy_labs/report/_bpareporttemplate/.platform +11 -0
- sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
- sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
- sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
- sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
- sempy_labs/report/_generate_report.py +260 -139
- sempy_labs/report/_report_functions.py +90 -59
- sempy_labs/report/_report_rebind.py +40 -34
- sempy_labs/tom/__init__.py +1 -4
- sempy_labs/tom/_model.py +601 -181
- semantic_link_labs-0.5.0.dist-info/METADATA +0 -22
- semantic_link_labs-0.5.0.dist-info/RECORD +0 -53
- sempy_labs/directlake/_fallback.py +0 -58
- {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.7.0.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.7.0.dist-info}/top_level.txt +0 -0
|
@@ -1,7 +1,10 @@
|
|
|
1
|
-
import sempy
|
|
2
1
|
import sempy.fabric as fabric
|
|
3
2
|
import pandas as pd
|
|
4
|
-
import json
|
|
3
|
+
import json
|
|
4
|
+
import os
|
|
5
|
+
import time
|
|
6
|
+
import base64
|
|
7
|
+
import copy
|
|
5
8
|
from anytree import Node, RenderTree
|
|
6
9
|
from powerbiclient import Report
|
|
7
10
|
from synapse.ml.services import Translate
|
|
@@ -16,17 +19,20 @@ from sempy_labs._helper_functions import (
|
|
|
16
19
|
resolve_lakehouse_name,
|
|
17
20
|
language_validate,
|
|
18
21
|
resolve_workspace_name_and_id,
|
|
22
|
+
lro,
|
|
23
|
+
_decode_b64,
|
|
19
24
|
)
|
|
20
|
-
from typing import
|
|
25
|
+
from typing import List, Optional, Union
|
|
21
26
|
from sempy._utils._log import log
|
|
22
27
|
import sempy_labs._icons as icons
|
|
28
|
+
from sempy.fabric.exceptions import FabricHTTPException
|
|
23
29
|
|
|
24
30
|
|
|
25
31
|
def get_report_json(
|
|
26
32
|
report: str,
|
|
27
33
|
workspace: Optional[str] = None,
|
|
28
34
|
save_to_file_name: Optional[str] = None,
|
|
29
|
-
) ->
|
|
35
|
+
) -> dict:
|
|
30
36
|
"""
|
|
31
37
|
Gets the report.json file content of a Power BI report.
|
|
32
38
|
|
|
@@ -35,7 +41,7 @@ def get_report_json(
|
|
|
35
41
|
report : str
|
|
36
42
|
Name of the Power BI report.
|
|
37
43
|
workspace : str, default=None
|
|
38
|
-
The Fabric workspace name.
|
|
44
|
+
The Fabric workspace name in which the report exists.
|
|
39
45
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
40
46
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
41
47
|
save_to_file_name : str, default=None
|
|
@@ -43,50 +49,47 @@ def get_report_json(
|
|
|
43
49
|
|
|
44
50
|
Returns
|
|
45
51
|
-------
|
|
46
|
-
|
|
52
|
+
dict
|
|
47
53
|
The report.json file for a given Power BI report.
|
|
48
54
|
"""
|
|
49
55
|
|
|
50
56
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
57
|
+
report_id = resolve_report_id(report=report, workspace=workspace)
|
|
58
|
+
fmt = "PBIR-Legacy"
|
|
51
59
|
|
|
52
60
|
client = fabric.FabricRestClient()
|
|
53
|
-
|
|
54
|
-
dfI = fabric.list_items(workspace=workspace, type="Report")
|
|
55
|
-
dfI_filt = dfI[(dfI["Display Name"] == report)]
|
|
56
|
-
|
|
57
|
-
if len(dfI_filt) == 0:
|
|
58
|
-
raise ValueError(f"{icons.red_dot} The '{report}' report does not exist in the '{workspace}' workspace.")
|
|
59
|
-
|
|
60
|
-
itemId = dfI_filt["Id"].iloc[0]
|
|
61
61
|
response = client.post(
|
|
62
|
-
f"/v1/workspaces/{workspace_id}/
|
|
62
|
+
f"/v1/workspaces/{workspace_id}/reports/{report_id}/getDefinition?format={fmt}"
|
|
63
63
|
)
|
|
64
|
-
|
|
64
|
+
|
|
65
|
+
result = lro(client, response).json()
|
|
66
|
+
df_items = pd.json_normalize(result["definition"]["parts"])
|
|
65
67
|
df_items_filt = df_items[df_items["path"] == "report.json"]
|
|
66
68
|
payload = df_items_filt["payload"].iloc[0]
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
reportJson = json.loads(reportFile)
|
|
69
|
+
report_file = _decode_b64(payload)
|
|
70
|
+
report_json = json.loads(report_file)
|
|
70
71
|
|
|
71
72
|
if save_to_file_name is not None:
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
73
|
+
if not lakehouse_attached():
|
|
74
|
+
raise ValueError(
|
|
75
|
+
f"{icons.red_dot} In order to save the report.json file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
|
|
76
|
+
)
|
|
75
77
|
|
|
76
78
|
lakehouse_id = fabric.get_lakehouse_id()
|
|
77
|
-
|
|
79
|
+
lake_workspace = fabric.resolve_workspace_name()
|
|
80
|
+
lakehouse = resolve_lakehouse_name(lakehouse_id, lake_workspace)
|
|
78
81
|
folderPath = "/lakehouse/default/Files"
|
|
79
82
|
fileExt = ".json"
|
|
80
83
|
if not save_to_file_name.endswith(fileExt):
|
|
81
|
-
save_to_file_name = save_to_file_name
|
|
84
|
+
save_to_file_name = f"{save_to_file_name}{fileExt}"
|
|
82
85
|
filePath = os.path.join(folderPath, save_to_file_name)
|
|
83
86
|
with open(filePath, "w") as json_file:
|
|
84
|
-
json.dump(
|
|
87
|
+
json.dump(report_json, json_file, indent=4)
|
|
85
88
|
print(
|
|
86
89
|
f"{icons.green_dot} The report.json file for the '{report}' report has been saved to the '{lakehouse}' in this location: '{filePath}'.\n\n"
|
|
87
90
|
)
|
|
88
91
|
|
|
89
|
-
return
|
|
92
|
+
return report_json
|
|
90
93
|
|
|
91
94
|
|
|
92
95
|
def report_dependency_tree(workspace: Optional[str] = None):
|
|
@@ -124,7 +127,7 @@ def report_dependency_tree(workspace: Optional[str] = None):
|
|
|
124
127
|
node_dict = {}
|
|
125
128
|
rootNode = Node(workspace)
|
|
126
129
|
node_dict[workspace] = rootNode
|
|
127
|
-
rootNode.custom_property = workspace_icon
|
|
130
|
+
rootNode.custom_property = f"{workspace_icon} "
|
|
128
131
|
|
|
129
132
|
for i, r in dfR.iterrows():
|
|
130
133
|
datasetName = r["Dataset Name"]
|
|
@@ -133,10 +136,10 @@ def report_dependency_tree(workspace: Optional[str] = None):
|
|
|
133
136
|
if parentNode is None:
|
|
134
137
|
parentNode = Node(datasetName, parent=rootNode)
|
|
135
138
|
node_dict[datasetName] = parentNode
|
|
136
|
-
parentNode.custom_property = dataset_icon
|
|
139
|
+
parentNode.custom_property = f"{dataset_icon} "
|
|
137
140
|
|
|
138
141
|
child_node = Node(reportName, parent=parentNode)
|
|
139
|
-
child_node.custom_property = report_icon
|
|
142
|
+
child_node.custom_property = f"{report_icon} "
|
|
140
143
|
|
|
141
144
|
# Print the tree structure
|
|
142
145
|
for pre, _, node in RenderTree(node_dict[workspace]):
|
|
@@ -182,10 +185,10 @@ def export_report(
|
|
|
182
185
|
|
|
183
186
|
# https://learn.microsoft.com/rest/api/power-bi/reports/export-to-file-in-group
|
|
184
187
|
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
188
|
+
if not lakehouse_attached():
|
|
189
|
+
raise ValueError(
|
|
190
|
+
f"{icons.red_dot} In order to run the 'export_report' function, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
|
|
191
|
+
)
|
|
189
192
|
|
|
190
193
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
191
194
|
|
|
@@ -195,10 +198,14 @@ def export_report(
|
|
|
195
198
|
visual_name = [visual_name]
|
|
196
199
|
|
|
197
200
|
if bookmark_name is not None and (page_name is not None or visual_name is not None):
|
|
198
|
-
raise ValueError(
|
|
201
|
+
raise ValueError(
|
|
202
|
+
f"{icons.red_dot} If the 'bookmark_name' parameter is set, the 'page_name' and 'visual_name' parameters must not be set."
|
|
203
|
+
)
|
|
199
204
|
|
|
200
205
|
if visual_name is not None and page_name is None:
|
|
201
|
-
raise ValueError(
|
|
206
|
+
raise ValueError(
|
|
207
|
+
f"{icons.red_dot} If the 'visual_name' parameter is set, the 'page_name' parameter must be set."
|
|
208
|
+
)
|
|
202
209
|
|
|
203
210
|
validFormats = {
|
|
204
211
|
"ACCESSIBLEPDF": ".pdf",
|
|
@@ -221,12 +228,14 @@ def export_report(
|
|
|
221
228
|
|
|
222
229
|
fileExt = validFormats.get(export_format)
|
|
223
230
|
if fileExt is None:
|
|
224
|
-
raise ValueError(
|
|
231
|
+
raise ValueError(
|
|
232
|
+
f"{icons.red_dot} The '{export_format}' format is not a valid format for exporting Power BI reports. Please enter a valid format. Options: {validFormats}"
|
|
233
|
+
)
|
|
225
234
|
|
|
226
235
|
if file_name is None:
|
|
227
|
-
file_name = report
|
|
236
|
+
file_name = f"{report}{fileExt}"
|
|
228
237
|
else:
|
|
229
|
-
file_name = file_name
|
|
238
|
+
file_name = f"{file_name}{fileExt}"
|
|
230
239
|
|
|
231
240
|
folderPath = "/lakehouse/default/Files"
|
|
232
241
|
filePath = os.path.join(folderPath, file_name)
|
|
@@ -238,7 +247,9 @@ def export_report(
|
|
|
238
247
|
]
|
|
239
248
|
|
|
240
249
|
if len(dfI_filt) == 0:
|
|
241
|
-
raise ValueError(
|
|
250
|
+
raise ValueError(
|
|
251
|
+
f"{icons.red_dot} The '{report}' report does not exist in the '{workspace}' workspace."
|
|
252
|
+
)
|
|
242
253
|
|
|
243
254
|
reportType = dfI_filt["Type"].iloc[0]
|
|
244
255
|
|
|
@@ -259,15 +270,21 @@ def export_report(
|
|
|
259
270
|
]
|
|
260
271
|
|
|
261
272
|
if reportType == "Report" and export_format in paginatedOnly:
|
|
262
|
-
raise ValueError(
|
|
273
|
+
raise ValueError(
|
|
274
|
+
f"{icons.red_dot} The '{export_format}' format is only supported for paginated reports."
|
|
275
|
+
)
|
|
263
276
|
|
|
264
277
|
if reportType == "PaginatedReport" and export_format in pbiOnly:
|
|
265
|
-
raise ValueError(
|
|
278
|
+
raise ValueError(
|
|
279
|
+
f"{icons.red_dot} The '{export_format}' format is only supported for Power BI reports."
|
|
280
|
+
)
|
|
266
281
|
|
|
267
282
|
if reportType == "PaginatedReport" and (
|
|
268
283
|
bookmark_name is not None or page_name is not None or visual_name is not None
|
|
269
284
|
):
|
|
270
|
-
raise ValueError(
|
|
285
|
+
raise ValueError(
|
|
286
|
+
f"{icons.red_dot} Export for paginated reports does not support bookmarks/pages/visuals. Those parameters must not be set for paginated reports."
|
|
287
|
+
)
|
|
271
288
|
|
|
272
289
|
reportId = dfI_filt["Id"].iloc[0]
|
|
273
290
|
client = fabric.PowerBIRestClient()
|
|
@@ -304,14 +321,18 @@ def export_report(
|
|
|
304
321
|
for page in page_name:
|
|
305
322
|
dfPage_filt = dfPage[dfPage["Page ID"] == page]
|
|
306
323
|
if len(dfPage_filt) == 0:
|
|
307
|
-
raise ValueError(
|
|
324
|
+
raise ValueError(
|
|
325
|
+
f"{icons.red_dot} The '{page}' page does not exist in the '{report}' report within the '{workspace}' workspace."
|
|
326
|
+
)
|
|
308
327
|
|
|
309
328
|
page_dict = {"pageName": page}
|
|
310
329
|
request_body["powerBIReportConfiguration"]["pages"].append(page_dict)
|
|
311
330
|
|
|
312
331
|
elif page_name is not None and visual_name is not None:
|
|
313
332
|
if len(page_name) != len(visual_name):
|
|
314
|
-
raise ValueError(
|
|
333
|
+
raise ValueError(
|
|
334
|
+
f"{icons.red_dot} Each 'visual_name' must map to a single 'page_name'."
|
|
335
|
+
)
|
|
315
336
|
|
|
316
337
|
if reportType == "Report":
|
|
317
338
|
request_body = {"format": export_format, "powerBIReportConfiguration": {}}
|
|
@@ -324,7 +345,9 @@ def export_report(
|
|
|
324
345
|
(dfVisual["Page ID"] == page) & (dfVisual["Visual ID"] == visual)
|
|
325
346
|
]
|
|
326
347
|
if len(dfVisual_filt) == 0:
|
|
327
|
-
raise ValueError(
|
|
348
|
+
raise ValueError(
|
|
349
|
+
f"{icons.red_dot} The '{visual}' visual does not exist on the '{page}' in the '{report}' report within the '{workspace}' workspace."
|
|
350
|
+
)
|
|
328
351
|
|
|
329
352
|
page_dict = {"pageName": page, "visualName": visual}
|
|
330
353
|
request_body["powerBIReportConfiguration"]["pages"].append(page_dict)
|
|
@@ -359,7 +382,9 @@ def export_report(
|
|
|
359
382
|
)
|
|
360
383
|
response_body = json.loads(response.content)
|
|
361
384
|
if response_body["status"] == "Failed":
|
|
362
|
-
raise ValueError(
|
|
385
|
+
raise ValueError(
|
|
386
|
+
f"{icons.red_dot} The export for the '{report}' report within the '{workspace}' workspace in the '{export_format}' format has failed."
|
|
387
|
+
)
|
|
363
388
|
else:
|
|
364
389
|
response = client.get(
|
|
365
390
|
f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}/exports/{exportId}/file"
|
|
@@ -411,7 +436,9 @@ def clone_report(
|
|
|
411
436
|
dfI_filt = dfI[(dfI["Display Name"] == report)]
|
|
412
437
|
|
|
413
438
|
if len(dfI_filt) == 0:
|
|
414
|
-
raise ValueError(
|
|
439
|
+
raise ValueError(
|
|
440
|
+
f"{icons.red_dot} The '{report}' report does not exist within the '{workspace}' workspace."
|
|
441
|
+
)
|
|
415
442
|
|
|
416
443
|
reportId = resolve_report_id(report, workspace)
|
|
417
444
|
|
|
@@ -423,7 +450,9 @@ def clone_report(
|
|
|
423
450
|
dfW_filt = dfW[dfW["Name"] == target_workspace]
|
|
424
451
|
|
|
425
452
|
if len(dfW_filt) == 0:
|
|
426
|
-
raise ValueError(
|
|
453
|
+
raise ValueError(
|
|
454
|
+
f"{icons.red_dot} The '{workspace}' is not a valid workspace."
|
|
455
|
+
)
|
|
427
456
|
|
|
428
457
|
target_workspace_id = dfW_filt["Id"].iloc[0]
|
|
429
458
|
|
|
@@ -439,7 +468,9 @@ def clone_report(
|
|
|
439
468
|
dfD_filt = dfD[dfD["Dataset Name"] == target_dataset]
|
|
440
469
|
|
|
441
470
|
if len(dfD_filt) == 0:
|
|
442
|
-
raise ValueError(
|
|
471
|
+
raise ValueError(
|
|
472
|
+
f"{icons.red_dot} The '{target_dataset}' target dataset does not exist in the '{target_workspace}' workspace."
|
|
473
|
+
)
|
|
443
474
|
|
|
444
475
|
target_dataset_id = dfD_filt["Dataset Id"].iloc[0]
|
|
445
476
|
|
|
@@ -462,12 +493,12 @@ def clone_report(
|
|
|
462
493
|
f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}/Clone", json=request_body
|
|
463
494
|
)
|
|
464
495
|
|
|
465
|
-
if response.status_code
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
496
|
+
if response.status_code != 200:
|
|
497
|
+
raise FabricHTTPException(response)
|
|
498
|
+
print(
|
|
499
|
+
f"{icons.green_dot} The '{report}' report has been successfully cloned as the '{cloned_report}' report within the"
|
|
500
|
+
f" '{target_workspace}' workspace using the '{target_dataset}' semantic model."
|
|
501
|
+
)
|
|
471
502
|
|
|
472
503
|
|
|
473
504
|
def launch_report(report: str, workspace: Optional[str] = None):
|
|
@@ -544,7 +575,7 @@ def list_report_pages(report: str, workspace: Optional[str] = None):
|
|
|
544
575
|
pageH = pageConfigJson["visibility"]
|
|
545
576
|
if pageH == 1:
|
|
546
577
|
pageHidden = True
|
|
547
|
-
except:
|
|
578
|
+
except Exception:
|
|
548
579
|
pass
|
|
549
580
|
|
|
550
581
|
new_data = {
|
|
@@ -605,7 +636,7 @@ def list_report_visuals(report: str, workspace: Optional[str] = None):
|
|
|
605
636
|
"properties"
|
|
606
637
|
]["text"]["expr"]["Literal"]["Value"]
|
|
607
638
|
title = title[1:-1]
|
|
608
|
-
except:
|
|
639
|
+
except Exception:
|
|
609
640
|
title = ""
|
|
610
641
|
|
|
611
642
|
new_data = {
|
|
@@ -673,7 +704,7 @@ def list_report_bookmarks(report: str, workspace: Optional[str] = None):
|
|
|
673
704
|
][vc]["singleVisual"]["display"]["mode"]
|
|
674
705
|
if hidden == "hidden":
|
|
675
706
|
vHidden = True
|
|
676
|
-
except:
|
|
707
|
+
except Exception:
|
|
677
708
|
pass
|
|
678
709
|
|
|
679
710
|
new_data = {
|
|
@@ -701,7 +732,7 @@ def list_report_bookmarks(report: str, workspace: Optional[str] = None):
|
|
|
701
732
|
|
|
702
733
|
return df
|
|
703
734
|
|
|
704
|
-
except:
|
|
735
|
+
except Exception:
|
|
705
736
|
print(
|
|
706
737
|
f"The '{report}' report within the '{workspace}' workspace has no bookmarks."
|
|
707
738
|
)
|
|
@@ -1,9 +1,10 @@
|
|
|
1
|
-
import sempy
|
|
2
1
|
import sempy.fabric as fabric
|
|
3
2
|
from sempy_labs._helper_functions import resolve_dataset_id, resolve_report_id
|
|
4
3
|
from typing import Optional, List
|
|
5
4
|
from sempy._utils._log import log
|
|
6
5
|
import sempy_labs._icons as icons
|
|
6
|
+
from sempy.fabric.exceptions import FabricHTTPException
|
|
7
|
+
|
|
7
8
|
|
|
8
9
|
@log
|
|
9
10
|
def report_rebind(
|
|
@@ -47,7 +48,7 @@ def report_rebind(
|
|
|
47
48
|
|
|
48
49
|
if isinstance(report, str):
|
|
49
50
|
report = [report]
|
|
50
|
-
|
|
51
|
+
|
|
51
52
|
for rpt in report:
|
|
52
53
|
reportId = resolve_report_id(report=rpt, workspace=report_workspace)
|
|
53
54
|
datasetId = resolve_dataset_id(dataset=dataset, workspace=dataset_workspace)
|
|
@@ -60,12 +61,12 @@ def report_rebind(
|
|
|
60
61
|
json=request_body,
|
|
61
62
|
)
|
|
62
63
|
|
|
63
|
-
if response.status_code
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
64
|
+
if response.status_code != 200:
|
|
65
|
+
raise FabricHTTPException(response)
|
|
66
|
+
print(
|
|
67
|
+
f"{icons.green_dot} The '{rpt}' report has been successfully rebinded to the '{dataset}' semantic model."
|
|
68
|
+
)
|
|
69
|
+
|
|
69
70
|
|
|
70
71
|
@log
|
|
71
72
|
def report_rebind_all(
|
|
@@ -76,12 +77,10 @@ def report_rebind_all(
|
|
|
76
77
|
report_workspace: Optional[str | List[str]] = None,
|
|
77
78
|
):
|
|
78
79
|
"""
|
|
79
|
-
Rebinds all reports
|
|
80
|
+
Rebinds all reports across all workspaces which are bound to a specific semantic model to a new semantic model.
|
|
80
81
|
|
|
81
82
|
Parameters
|
|
82
83
|
----------
|
|
83
|
-
report : str
|
|
84
|
-
Name of the Power BI report.
|
|
85
84
|
dataset : str
|
|
86
85
|
Name of the semantic model currently binded to the reports.
|
|
87
86
|
new_dataset : str
|
|
@@ -96,35 +95,42 @@ def report_rebind_all(
|
|
|
96
95
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
97
96
|
report_workspace : str | List[str], default=None
|
|
98
97
|
The name(s) of the Fabric workspace(s) in which the report(s) reside(s).
|
|
99
|
-
Defaults to None which
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
Returns
|
|
103
|
-
-------
|
|
104
|
-
|
|
98
|
+
Defaults to None which finds all reports in all workspaces which use the semantic model and rebinds them to
|
|
99
|
+
the new semantic model.
|
|
105
100
|
"""
|
|
106
101
|
|
|
107
|
-
|
|
102
|
+
from sempy_labs._list_functions import list_reports_using_semantic_model
|
|
103
|
+
|
|
104
|
+
dataset_workspace = fabric.resolve_workspace_name(dataset_workspace)
|
|
108
105
|
|
|
109
106
|
if new_dataset_workpace is None:
|
|
110
107
|
new_dataset_workpace = dataset_workspace
|
|
111
108
|
|
|
112
|
-
if report_workspace is None:
|
|
113
|
-
report_workspace = dataset_workspace
|
|
114
|
-
|
|
115
109
|
if isinstance(report_workspace, str):
|
|
116
110
|
report_workspace = [report_workspace]
|
|
117
111
|
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
112
|
+
dfR = list_reports_using_semantic_model(
|
|
113
|
+
dataset=dataset, workspace=dataset_workspace
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
if len(dfR) == 0:
|
|
117
|
+
print(
|
|
118
|
+
f"{icons.info} The '{dataset}' semantic model within the '{dataset_workspace}' workspace has no dependent reports."
|
|
119
|
+
)
|
|
120
|
+
return
|
|
121
|
+
|
|
122
|
+
if report_workspace is None:
|
|
123
|
+
dfR_filt = dfR.copy()
|
|
124
|
+
else:
|
|
125
|
+
dfR_filt = dfR[dfR["Report Workspace Name"].isin(report_workspace)]
|
|
126
|
+
|
|
127
|
+
for i, r in dfR_filt.iterrows():
|
|
128
|
+
rpt_name = r["Report Name"]
|
|
129
|
+
rpt_wksp = r["Report Workspace Name"]
|
|
130
|
+
|
|
131
|
+
report_rebind(
|
|
132
|
+
report=rpt_name,
|
|
133
|
+
dataset=new_dataset,
|
|
134
|
+
report_workspace=rpt_wksp,
|
|
135
|
+
dataset_workspace=new_dataset_workpace,
|
|
136
|
+
)
|