semantic-link-labs 0.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- semantic_link_labs-0.4.1.dist-info/LICENSE +21 -0
- semantic_link_labs-0.4.1.dist-info/METADATA +22 -0
- semantic_link_labs-0.4.1.dist-info/RECORD +52 -0
- semantic_link_labs-0.4.1.dist-info/WHEEL +5 -0
- semantic_link_labs-0.4.1.dist-info/top_level.txt +1 -0
- sempy_labs/__init__.py +154 -0
- sempy_labs/_ai.py +496 -0
- sempy_labs/_clear_cache.py +39 -0
- sempy_labs/_connections.py +234 -0
- sempy_labs/_dax.py +70 -0
- sempy_labs/_generate_semantic_model.py +280 -0
- sempy_labs/_helper_functions.py +506 -0
- sempy_labs/_icons.py +4 -0
- sempy_labs/_list_functions.py +1372 -0
- sempy_labs/_model_auto_build.py +143 -0
- sempy_labs/_model_bpa.py +1354 -0
- sempy_labs/_model_dependencies.py +341 -0
- sempy_labs/_one_lake_integration.py +155 -0
- sempy_labs/_query_scale_out.py +447 -0
- sempy_labs/_refresh_semantic_model.py +184 -0
- sempy_labs/_tom.py +3766 -0
- sempy_labs/_translations.py +378 -0
- sempy_labs/_vertipaq.py +893 -0
- sempy_labs/directlake/__init__.py +45 -0
- sempy_labs/directlake/_directlake_schema_compare.py +110 -0
- sempy_labs/directlake/_directlake_schema_sync.py +128 -0
- sempy_labs/directlake/_fallback.py +62 -0
- sempy_labs/directlake/_get_directlake_lakehouse.py +69 -0
- sempy_labs/directlake/_get_shared_expression.py +59 -0
- sempy_labs/directlake/_guardrails.py +84 -0
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +54 -0
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +89 -0
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +81 -0
- sempy_labs/directlake/_update_directlake_partition_entity.py +64 -0
- sempy_labs/directlake/_warm_cache.py +210 -0
- sempy_labs/lakehouse/__init__.py +24 -0
- sempy_labs/lakehouse/_get_lakehouse_columns.py +81 -0
- sempy_labs/lakehouse/_get_lakehouse_tables.py +250 -0
- sempy_labs/lakehouse/_lakehouse.py +85 -0
- sempy_labs/lakehouse/_shortcuts.py +296 -0
- sempy_labs/migration/__init__.py +29 -0
- sempy_labs/migration/_create_pqt_file.py +239 -0
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +429 -0
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +150 -0
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +524 -0
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +165 -0
- sempy_labs/migration/_migration_validation.py +227 -0
- sempy_labs/migration/_refresh_calc_tables.py +129 -0
- sempy_labs/report/__init__.py +35 -0
- sempy_labs/report/_generate_report.py +253 -0
- sempy_labs/report/_report_functions.py +855 -0
- sempy_labs/report/_report_rebind.py +131 -0
|
@@ -0,0 +1,855 @@
|
|
|
1
|
+
import sempy
|
|
2
|
+
import sempy.fabric as fabric
|
|
3
|
+
import pandas as pd
|
|
4
|
+
import json, os, time, base64, copy, re
|
|
5
|
+
from anytree import Node, RenderTree
|
|
6
|
+
from powerbiclient import Report
|
|
7
|
+
from synapse.ml.services import Translate
|
|
8
|
+
from pyspark.sql.functions import col, flatten
|
|
9
|
+
from pyspark.sql import SparkSession
|
|
10
|
+
from sempy_labs.report._generate_report import update_report_from_reportjson
|
|
11
|
+
from sempy_labs.lakehouse._lakehouse import lakehouse_attached
|
|
12
|
+
from sempy_labs._helper_functions import (
|
|
13
|
+
generate_embedded_filter,
|
|
14
|
+
resolve_dataset_name,
|
|
15
|
+
resolve_report_id,
|
|
16
|
+
resolve_lakehouse_name,
|
|
17
|
+
language_validate,
|
|
18
|
+
resolve_workspace_name_and_id,
|
|
19
|
+
)
|
|
20
|
+
from typing import Any, List, Optional, Union
|
|
21
|
+
from sempy._utils._log import log
|
|
22
|
+
import sempy_labs._icons as icons
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def get_report_json(
|
|
26
|
+
report: str,
|
|
27
|
+
workspace: Optional[str] = None,
|
|
28
|
+
save_to_file_name: Optional[str] = None,
|
|
29
|
+
) -> Any:
|
|
30
|
+
"""
|
|
31
|
+
Gets the report.json file content of a Power BI report.
|
|
32
|
+
|
|
33
|
+
Parameters
|
|
34
|
+
----------
|
|
35
|
+
report : str
|
|
36
|
+
Name of the Power BI report.
|
|
37
|
+
workspace : str, default=None
|
|
38
|
+
The Fabric workspace name.
|
|
39
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
40
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
41
|
+
save_to_file_name : str, default=None
|
|
42
|
+
Specifying this parameter will save the report.json file to the lakehouse attached to the notebook with the file name of this parameter.
|
|
43
|
+
|
|
44
|
+
Returns
|
|
45
|
+
-------
|
|
46
|
+
Any
|
|
47
|
+
The report.json file for a given Power BI report.
|
|
48
|
+
"""
|
|
49
|
+
|
|
50
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
51
|
+
|
|
52
|
+
client = fabric.FabricRestClient()
|
|
53
|
+
|
|
54
|
+
dfI = fabric.list_items(workspace=workspace, type="Report")
|
|
55
|
+
dfI_filt = dfI[(dfI["Display Name"] == report)]
|
|
56
|
+
|
|
57
|
+
if len(dfI_filt) == 0:
|
|
58
|
+
print(
|
|
59
|
+
f"{icons.red_dot} The '{report}' report does not exist in the '{workspace}' workspace."
|
|
60
|
+
)
|
|
61
|
+
return
|
|
62
|
+
|
|
63
|
+
itemId = dfI_filt["Id"].iloc[0]
|
|
64
|
+
response = client.post(
|
|
65
|
+
f"/v1/workspaces/{workspace_id}/items/{itemId}/getDefinition"
|
|
66
|
+
)
|
|
67
|
+
df_items = pd.json_normalize(response.json()["definition"]["parts"])
|
|
68
|
+
df_items_filt = df_items[df_items["path"] == "report.json"]
|
|
69
|
+
payload = df_items_filt["payload"].iloc[0]
|
|
70
|
+
|
|
71
|
+
reportFile = base64.b64decode(payload).decode("utf-8")
|
|
72
|
+
reportJson = json.loads(reportFile)
|
|
73
|
+
|
|
74
|
+
if save_to_file_name is not None:
|
|
75
|
+
lakeAttach = lakehouse_attached()
|
|
76
|
+
if lakeAttach == False:
|
|
77
|
+
print(
|
|
78
|
+
f"{icons.red_dot} In order to save the report.json file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
|
|
79
|
+
)
|
|
80
|
+
return
|
|
81
|
+
|
|
82
|
+
lakehouse_id = fabric.get_lakehouse_id()
|
|
83
|
+
lakehouse = resolve_lakehouse_name(lakehouse_id, workspace)
|
|
84
|
+
folderPath = "/lakehouse/default/Files"
|
|
85
|
+
fileExt = ".json"
|
|
86
|
+
if not save_to_file_name.endswith(fileExt):
|
|
87
|
+
save_to_file_name = save_to_file_name + fileExt
|
|
88
|
+
filePath = os.path.join(folderPath, save_to_file_name)
|
|
89
|
+
with open(filePath, "w") as json_file:
|
|
90
|
+
json.dump(reportJson, json_file, indent=4)
|
|
91
|
+
print(
|
|
92
|
+
f"{icons.green_dot} The report.json file for the '{report}' report has been saved to the '{lakehouse}' in this location: '{filePath}'.\n\n"
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
return reportJson
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def report_dependency_tree(workspace: Optional[str] = None):
|
|
99
|
+
"""
|
|
100
|
+
Prints a dependency between reports and semantic models.
|
|
101
|
+
|
|
102
|
+
Parameters
|
|
103
|
+
----------
|
|
104
|
+
workspace : str, default=None
|
|
105
|
+
The Fabric workspace name.
|
|
106
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
107
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
108
|
+
"""
|
|
109
|
+
|
|
110
|
+
if workspace == None:
|
|
111
|
+
workspaceId = fabric.get_workspace_id()
|
|
112
|
+
workspace = fabric.resolve_workspace_name(workspaceId)
|
|
113
|
+
|
|
114
|
+
dfR = fabric.list_reports(workspace=workspace)
|
|
115
|
+
dfD = fabric.list_datasets(workspace=workspace)
|
|
116
|
+
dfR = pd.merge(
|
|
117
|
+
dfR,
|
|
118
|
+
dfD[["Dataset ID", "Dataset Name"]],
|
|
119
|
+
left_on="Dataset Id",
|
|
120
|
+
right_on="Dataset ID",
|
|
121
|
+
how="left",
|
|
122
|
+
)
|
|
123
|
+
dfR.rename(columns={"Name": "Report Name"}, inplace=True)
|
|
124
|
+
dfR = dfR[["Report Name", "Dataset Name"]]
|
|
125
|
+
|
|
126
|
+
report_icon = "\U0001F4F6"
|
|
127
|
+
dataset_icon = "\U0001F9CA"
|
|
128
|
+
workspace_icon = "\U0001F465"
|
|
129
|
+
|
|
130
|
+
node_dict = {}
|
|
131
|
+
rootNode = Node(workspace)
|
|
132
|
+
node_dict[workspace] = rootNode
|
|
133
|
+
rootNode.custom_property = workspace_icon + " "
|
|
134
|
+
|
|
135
|
+
for i, r in dfR.iterrows():
|
|
136
|
+
datasetName = r["Dataset Name"]
|
|
137
|
+
reportName = r["Report Name"]
|
|
138
|
+
parentNode = node_dict.get(datasetName)
|
|
139
|
+
if parentNode is None:
|
|
140
|
+
parentNode = Node(datasetName, parent=rootNode)
|
|
141
|
+
node_dict[datasetName] = parentNode
|
|
142
|
+
parentNode.custom_property = dataset_icon + " "
|
|
143
|
+
|
|
144
|
+
child_node = Node(reportName, parent=parentNode)
|
|
145
|
+
child_node.custom_property = report_icon + " "
|
|
146
|
+
|
|
147
|
+
# Print the tree structure
|
|
148
|
+
for pre, _, node in RenderTree(node_dict[workspace]):
|
|
149
|
+
print(f"{pre}{node.custom_property}'{node.name}'")
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
@log
|
|
153
|
+
def export_report(
|
|
154
|
+
report: str,
|
|
155
|
+
export_format: str,
|
|
156
|
+
file_name: Optional[str] = None,
|
|
157
|
+
bookmark_name: Optional[str] = None,
|
|
158
|
+
page_name: Optional[str] = None,
|
|
159
|
+
visual_name: Optional[str] = None,
|
|
160
|
+
report_filter: Optional[str] = None,
|
|
161
|
+
workspace: Optional[str] = None,
|
|
162
|
+
):
|
|
163
|
+
"""
|
|
164
|
+
Exports a Power BI report to a file in your lakehouse.
|
|
165
|
+
|
|
166
|
+
Parameters
|
|
167
|
+
----------
|
|
168
|
+
report : str
|
|
169
|
+
Name of the Power BI report.
|
|
170
|
+
export_format : str
|
|
171
|
+
The format in which to export the report. For image formats, enter the file extension in this parameter, not 'IMAGE'.
|
|
172
|
+
`Valid formats <https://learn.microsoft.com/rest/api/power-bi/reports/export-to-file-in-group#fileformat>`_
|
|
173
|
+
file_name : str, default=None
|
|
174
|
+
The name of the file to be saved within the lakehouse. Do not include the file extension. Defaults ot the reportName parameter value.
|
|
175
|
+
bookmark_name : str, default=None
|
|
176
|
+
The name (GUID) of a bookmark within the report.
|
|
177
|
+
page_name : str, default=None
|
|
178
|
+
The name (GUID) of the report page.
|
|
179
|
+
visual_name : str, default=None
|
|
180
|
+
The name (GUID) of a visual. If you specify this parameter you must also specify the page_name parameter.
|
|
181
|
+
report_filter : str, default=None
|
|
182
|
+
A report filter to be applied when exporting the report. Syntax is user-friendly. See above for examples.
|
|
183
|
+
workspace : str, default=None
|
|
184
|
+
The Fabric workspace name.
|
|
185
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
186
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
187
|
+
"""
|
|
188
|
+
|
|
189
|
+
# https://learn.microsoft.com/rest/api/power-bi/reports/export-to-file-in-group
|
|
190
|
+
|
|
191
|
+
lakeAttach = lakehouse_attached()
|
|
192
|
+
|
|
193
|
+
if lakeAttach == False:
|
|
194
|
+
print(
|
|
195
|
+
f"{icons.red_dot} In order to run the 'export_report' function, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
|
|
196
|
+
)
|
|
197
|
+
return
|
|
198
|
+
|
|
199
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
200
|
+
|
|
201
|
+
if isinstance(page_name, str):
|
|
202
|
+
page_name = [page_name]
|
|
203
|
+
if isinstance(visual_name, str):
|
|
204
|
+
visual_name = [visual_name]
|
|
205
|
+
|
|
206
|
+
if bookmark_name is not None and (page_name is not None or visual_name is not None):
|
|
207
|
+
print(
|
|
208
|
+
f"{icons.red_dot} If the 'bookmark_name' parameter is set, the 'page_name' and 'visual_name' parameters must not be set."
|
|
209
|
+
)
|
|
210
|
+
return
|
|
211
|
+
if visual_name is not None and page_name is None:
|
|
212
|
+
print(
|
|
213
|
+
f"{icons.red_dot} If the 'visual_name' parameter is set, the 'page_name' parameter must be set."
|
|
214
|
+
)
|
|
215
|
+
return
|
|
216
|
+
|
|
217
|
+
validFormats = {
|
|
218
|
+
"ACCESSIBLEPDF": ".pdf",
|
|
219
|
+
"CSV": ".csv",
|
|
220
|
+
"DOCX": ".docx",
|
|
221
|
+
"MHTML": ".mhtml",
|
|
222
|
+
"PDF": ".pdf",
|
|
223
|
+
"PNG": ".png",
|
|
224
|
+
"PPTX": ".pptx",
|
|
225
|
+
"XLSX": ".xlsx",
|
|
226
|
+
"XML": ".xml",
|
|
227
|
+
"BMP": ".bmp",
|
|
228
|
+
"EMF": ".emf",
|
|
229
|
+
"GIF": ".gif",
|
|
230
|
+
"JPEG": ".jpeg",
|
|
231
|
+
"TIFF": ".tiff",
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
export_format = export_format.upper()
|
|
235
|
+
|
|
236
|
+
fileExt = validFormats.get(export_format)
|
|
237
|
+
if fileExt is None:
|
|
238
|
+
print(
|
|
239
|
+
f"{icons.red_dot} The '{export_format}' format is not a valid format for exporting Power BI reports. Please enter a valid format. Options: {validFormats}"
|
|
240
|
+
)
|
|
241
|
+
return
|
|
242
|
+
|
|
243
|
+
if file_name == None:
|
|
244
|
+
file_name = report + fileExt
|
|
245
|
+
else:
|
|
246
|
+
file_name = file_name + fileExt
|
|
247
|
+
|
|
248
|
+
folderPath = "/lakehouse/default/Files"
|
|
249
|
+
filePath = os.path.join(folderPath, file_name)
|
|
250
|
+
|
|
251
|
+
dfI = fabric.list_items(workspace=workspace)
|
|
252
|
+
dfI_filt = dfI[
|
|
253
|
+
(dfI["Type"].isin(["Report", "PaginatedReport"]))
|
|
254
|
+
& (dfI["Display Name"] == report)
|
|
255
|
+
]
|
|
256
|
+
|
|
257
|
+
if len(dfI_filt) == 0:
|
|
258
|
+
print(
|
|
259
|
+
f"{icons.red_dot} The '{report}' report does not exist in the '{workspace}' workspace."
|
|
260
|
+
)
|
|
261
|
+
return
|
|
262
|
+
|
|
263
|
+
reportType = dfI_filt["Type"].iloc[0]
|
|
264
|
+
|
|
265
|
+
# Limitations
|
|
266
|
+
pbiOnly = ["PNG"]
|
|
267
|
+
paginatedOnly = [
|
|
268
|
+
"ACCESSIBLEPDF",
|
|
269
|
+
"CSV",
|
|
270
|
+
"DOCX",
|
|
271
|
+
"BMP",
|
|
272
|
+
"EMF",
|
|
273
|
+
"GIF",
|
|
274
|
+
"JPEG",
|
|
275
|
+
"TIFF",
|
|
276
|
+
"MHTML",
|
|
277
|
+
"XLSX",
|
|
278
|
+
"XML",
|
|
279
|
+
]
|
|
280
|
+
|
|
281
|
+
if reportType == "Report" and export_format in paginatedOnly:
|
|
282
|
+
print(
|
|
283
|
+
f"{icons.red_dot} The '{export_format}' format is only supported for paginated reports."
|
|
284
|
+
)
|
|
285
|
+
return
|
|
286
|
+
if reportType == "PaginatedReport" and export_format in pbiOnly:
|
|
287
|
+
print(
|
|
288
|
+
f"{icons.red_dot} The '{export_format}' format is only supported for Power BI reports."
|
|
289
|
+
)
|
|
290
|
+
return
|
|
291
|
+
|
|
292
|
+
if reportType == "PaginatedReport" and (
|
|
293
|
+
bookmark_name is not None or page_name is not None or visual_name is not None
|
|
294
|
+
):
|
|
295
|
+
print(
|
|
296
|
+
f"{icons.red_dot} Export for paginated reports does not support bookmarks/pages/visuals. Those parameters must not be set for paginated reports."
|
|
297
|
+
)
|
|
298
|
+
return
|
|
299
|
+
|
|
300
|
+
reportId = dfI_filt["Id"].iloc[0]
|
|
301
|
+
client = fabric.PowerBIRestClient()
|
|
302
|
+
|
|
303
|
+
dfVisual = list_report_visuals(report=report, workspace=workspace)
|
|
304
|
+
dfPage = list_report_pages(report=report, workspace=workspace)
|
|
305
|
+
|
|
306
|
+
if (
|
|
307
|
+
export_format in ["BMP", "EMF", "GIF", "JPEG", "TIFF"]
|
|
308
|
+
and reportType == "PaginatedReport"
|
|
309
|
+
):
|
|
310
|
+
request_body = {
|
|
311
|
+
"format": "IMAGE",
|
|
312
|
+
"paginatedReportConfiguration": {
|
|
313
|
+
"formatSettings": {"OutputFormat": export_format.lower()}
|
|
314
|
+
},
|
|
315
|
+
}
|
|
316
|
+
elif bookmark_name is None and page_name is None and visual_name is None:
|
|
317
|
+
request_body = {"format": export_format}
|
|
318
|
+
elif bookmark_name is not None:
|
|
319
|
+
if reportType == "Report":
|
|
320
|
+
request_body = {
|
|
321
|
+
"format": export_format,
|
|
322
|
+
"powerBIReportConfiguration": {
|
|
323
|
+
"defaultBookmark": {"name": bookmark_name}
|
|
324
|
+
},
|
|
325
|
+
}
|
|
326
|
+
elif page_name is not None and visual_name is None:
|
|
327
|
+
if reportType == "Report":
|
|
328
|
+
request_body = {"format": export_format, "powerBIReportConfiguration": {}}
|
|
329
|
+
|
|
330
|
+
request_body["powerBIReportConfiguration"]["pages"] = []
|
|
331
|
+
|
|
332
|
+
for page in page_name:
|
|
333
|
+
dfPage_filt = dfPage[dfPage["Page ID"] == page]
|
|
334
|
+
if len(dfPage_filt) == 0:
|
|
335
|
+
print(
|
|
336
|
+
f"{icons.red_dot} The '{page}' page does not exist in the '{report}' report within the '{workspace}' workspace."
|
|
337
|
+
)
|
|
338
|
+
return
|
|
339
|
+
page_dict = {"pageName": page}
|
|
340
|
+
request_body["powerBIReportConfiguration"]["pages"].append(page_dict)
|
|
341
|
+
|
|
342
|
+
elif page_name is not None and visual_name is not None:
|
|
343
|
+
if len(page_name) != len(visual_name):
|
|
344
|
+
print(
|
|
345
|
+
f"{icons.red_dot} Each 'visual_name' must map to a single 'page_name'."
|
|
346
|
+
)
|
|
347
|
+
return
|
|
348
|
+
if reportType == "Report":
|
|
349
|
+
request_body = {"format": export_format, "powerBIReportConfiguration": {}}
|
|
350
|
+
|
|
351
|
+
request_body["powerBIReportConfiguration"]["pages"] = []
|
|
352
|
+
a = 0
|
|
353
|
+
for page in page_name:
|
|
354
|
+
visual = visual_name[a]
|
|
355
|
+
dfVisual_filt = dfVisual[
|
|
356
|
+
(dfVisual["Page ID"] == page) & (dfVisual["Visual ID"] == visual)
|
|
357
|
+
]
|
|
358
|
+
if len(dfVisual_filt) == 0:
|
|
359
|
+
print(
|
|
360
|
+
f"{icons.red_dot} The '{visual}' visual does not exist on the '{page}' in the '{report}' report within the '{workspace}' workspace."
|
|
361
|
+
)
|
|
362
|
+
return
|
|
363
|
+
page_dict = {"pageName": page, "visualName": visual}
|
|
364
|
+
request_body["powerBIReportConfiguration"]["pages"].append(page_dict)
|
|
365
|
+
a += 1
|
|
366
|
+
|
|
367
|
+
# Transform and add report filter if it is specified
|
|
368
|
+
if report_filter is not None and reportType == "Report":
|
|
369
|
+
reportFilter = generate_embedded_filter(filter=report_filter)
|
|
370
|
+
report_level_filter = {"filter": reportFilter}
|
|
371
|
+
|
|
372
|
+
if "powerBIReportConfiguration" not in request_body:
|
|
373
|
+
request_body["powerBIReportConfiguration"] = {}
|
|
374
|
+
request_body["powerBIReportConfiguration"]["reportLevelFilters"] = [
|
|
375
|
+
report_level_filter
|
|
376
|
+
]
|
|
377
|
+
print(request_body)
|
|
378
|
+
response = client.post(
|
|
379
|
+
f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}/ExportTo",
|
|
380
|
+
json=request_body,
|
|
381
|
+
)
|
|
382
|
+
if response.status_code == 202:
|
|
383
|
+
response_body = json.loads(response.content)
|
|
384
|
+
exportId = response_body["id"]
|
|
385
|
+
response = client.get(
|
|
386
|
+
f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}/exports/{exportId}"
|
|
387
|
+
)
|
|
388
|
+
response_body = json.loads(response.content)
|
|
389
|
+
while response_body["status"] not in ["Succeeded", "Failed"]:
|
|
390
|
+
time.sleep(3)
|
|
391
|
+
response = client.get(
|
|
392
|
+
f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}/exports/{exportId}"
|
|
393
|
+
)
|
|
394
|
+
response_body = json.loads(response.content)
|
|
395
|
+
if response_body["status"] == "Failed":
|
|
396
|
+
print(
|
|
397
|
+
f"{icons.red_dot} The export for the '{report}' report within the '{workspace}' workspace in the '{export_format}' format has failed."
|
|
398
|
+
)
|
|
399
|
+
else:
|
|
400
|
+
response = client.get(
|
|
401
|
+
f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}/exports/{exportId}/file"
|
|
402
|
+
)
|
|
403
|
+
print(
|
|
404
|
+
f"{icons.in_progress} Saving the '{export_format}' export for the '{report}' report within the '{workspace}' workspace to the lakehouse..."
|
|
405
|
+
)
|
|
406
|
+
with open(filePath, "wb") as export_file:
|
|
407
|
+
export_file.write(response.content)
|
|
408
|
+
print(
|
|
409
|
+
f"{icons.green_dot} The '{export_format}' export for the '{report}' report within the '{workspace}' workspace has been saved to the following location: '{filePath}'."
|
|
410
|
+
)
|
|
411
|
+
|
|
412
|
+
|
|
413
|
+
def clone_report(
|
|
414
|
+
report: str,
|
|
415
|
+
cloned_report: str,
|
|
416
|
+
workspace: Optional[str] = None,
|
|
417
|
+
target_workspace: Optional[str] = None,
|
|
418
|
+
target_dataset: Optional[str] = None,
|
|
419
|
+
):
|
|
420
|
+
"""
|
|
421
|
+
Clones a Power BI report.
|
|
422
|
+
|
|
423
|
+
Parameters
|
|
424
|
+
----------
|
|
425
|
+
report : str
|
|
426
|
+
Name of the Power BI report.
|
|
427
|
+
cloned_report : str
|
|
428
|
+
Name of the new Power BI report.
|
|
429
|
+
workspace : str, default=None
|
|
430
|
+
The Fabric workspace name.
|
|
431
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
432
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
433
|
+
target_workspace : str, default=None
|
|
434
|
+
The name of the Fabric workspace to place the cloned report.
|
|
435
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
436
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
437
|
+
target_dataset : str, default=None
|
|
438
|
+
The name of the semantic model to be used by the cloned report.
|
|
439
|
+
Defaults to None which resolves to the semantic model used by the initial report.
|
|
440
|
+
"""
|
|
441
|
+
|
|
442
|
+
# https://learn.microsoft.com/rest/api/power-bi/reports/clone-report-in-group
|
|
443
|
+
|
|
444
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
445
|
+
|
|
446
|
+
dfI = fabric.list_items(workspace=workspace, type="Report")
|
|
447
|
+
dfI_filt = dfI[(dfI["Display Name"] == report)]
|
|
448
|
+
|
|
449
|
+
if len(dfI_filt) == 0:
|
|
450
|
+
print(
|
|
451
|
+
f"{icons.red_dot} The '{report}' report does not exist within the '{workspace}' workspace."
|
|
452
|
+
)
|
|
453
|
+
return
|
|
454
|
+
|
|
455
|
+
reportId = resolve_report_id(report, workspace)
|
|
456
|
+
|
|
457
|
+
if target_workspace is None:
|
|
458
|
+
target_workspace = workspace
|
|
459
|
+
target_workspace_id = workspace_id
|
|
460
|
+
else:
|
|
461
|
+
dfW = fabric.list_workspaces()
|
|
462
|
+
dfW_filt = dfW[dfW["Name"] == target_workspace]
|
|
463
|
+
|
|
464
|
+
if len(dfW_filt) == 0:
|
|
465
|
+
print(f"{icons.red_dot} The '{workspace}' is not a valid workspace.")
|
|
466
|
+
return
|
|
467
|
+
target_workspace_id = dfW_filt["Id"].iloc[0]
|
|
468
|
+
|
|
469
|
+
if target_dataset == None:
|
|
470
|
+
dfR = fabric.list_reports(workspace=target_workspace)
|
|
471
|
+
dfR_filt = dfR[dfR["Name"] == report]
|
|
472
|
+
target_dataset_id = dfR_filt["Dataset Id"].iloc[0]
|
|
473
|
+
target_dataset = resolve_dataset_name(
|
|
474
|
+
dataset_id=target_dataset_id, workspace=target_workspace
|
|
475
|
+
)
|
|
476
|
+
else:
|
|
477
|
+
dfD = fabric.list_datasets(workspace=target_workspace)
|
|
478
|
+
dfD_filt = dfD[dfD["Dataset Name"] == target_dataset]
|
|
479
|
+
|
|
480
|
+
if len(dfD_filt) == 0:
|
|
481
|
+
print(
|
|
482
|
+
f"{icons.red_dot} The '{target_dataset}' target dataset does not exist in the '{target_workspace}' workspace."
|
|
483
|
+
)
|
|
484
|
+
return
|
|
485
|
+
target_dataset_id = dfD_filt["Dataset Id"].iloc[0]
|
|
486
|
+
|
|
487
|
+
client = fabric.PowerBIRestClient()
|
|
488
|
+
|
|
489
|
+
if target_workspace is None and target_dataset is None:
|
|
490
|
+
request_body = {"name": cloned_report}
|
|
491
|
+
elif target_workspace is not None and target_dataset is None:
|
|
492
|
+
request_body = {"name": cloned_report, "targetWorkspaceId": target_workspace_id}
|
|
493
|
+
elif target_workspace is not None and target_dataset is not None:
|
|
494
|
+
request_body = {
|
|
495
|
+
"name": cloned_report,
|
|
496
|
+
"targetModelId": target_dataset_id,
|
|
497
|
+
"targetWorkspaceId": target_workspace_id,
|
|
498
|
+
}
|
|
499
|
+
elif target_workspace is None and target_dataset is not None:
|
|
500
|
+
request_body = {"name": cloned_report, "targetModelId": target_dataset_id}
|
|
501
|
+
|
|
502
|
+
response = client.post(
|
|
503
|
+
f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}/Clone", json=request_body
|
|
504
|
+
)
|
|
505
|
+
|
|
506
|
+
if response.status_code == 200:
|
|
507
|
+
print(
|
|
508
|
+
f"{icons.green_dot} The '{report}' report has been successfully cloned as the '{cloned_report}' report within the '{target_workspace}' workspace using the '{target_dataset}' semantic model."
|
|
509
|
+
)
|
|
510
|
+
else:
|
|
511
|
+
print(
|
|
512
|
+
f"{icons.red_dot} POST request failed with status code: {response.status_code}"
|
|
513
|
+
)
|
|
514
|
+
|
|
515
|
+
|
|
516
|
+
def launch_report(report: str, workspace: Optional[str] = None):
|
|
517
|
+
"""
|
|
518
|
+
Shows a Power BI report within a Fabric notebook.
|
|
519
|
+
|
|
520
|
+
Parameters
|
|
521
|
+
----------
|
|
522
|
+
report : str
|
|
523
|
+
Name of the Power BI report.
|
|
524
|
+
workspace : str, default=None
|
|
525
|
+
The Fabric workspace name.
|
|
526
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
527
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
528
|
+
|
|
529
|
+
Returns
|
|
530
|
+
-------
|
|
531
|
+
str
|
|
532
|
+
An embedded Power BI report within the notebook.
|
|
533
|
+
"""
|
|
534
|
+
|
|
535
|
+
from .HelperFunctions import resolve_report_id
|
|
536
|
+
|
|
537
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
538
|
+
|
|
539
|
+
reportId = resolve_report_id(report, workspace)
|
|
540
|
+
|
|
541
|
+
report = Report(group_id=workspace_id, report_id=reportId)
|
|
542
|
+
|
|
543
|
+
return report
|
|
544
|
+
|
|
545
|
+
|
|
546
|
+
def list_report_pages(report: str, workspace: Optional[str] = None):
|
|
547
|
+
"""
|
|
548
|
+
Shows the properties of all pages within a Power BI report.
|
|
549
|
+
|
|
550
|
+
Parameters
|
|
551
|
+
----------
|
|
552
|
+
report : str
|
|
553
|
+
Name of the Power BI report.
|
|
554
|
+
workspace : str, default=None
|
|
555
|
+
The Fabric workspace name.
|
|
556
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
557
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
558
|
+
|
|
559
|
+
Returns
|
|
560
|
+
-------
|
|
561
|
+
pandas.DataFrame
|
|
562
|
+
A pandas dataframe showing the pages within a Power BI report and their properties.
|
|
563
|
+
"""
|
|
564
|
+
|
|
565
|
+
if workspace == None:
|
|
566
|
+
workspace_id = fabric.get_workspace_id()
|
|
567
|
+
workspace = fabric.resolve_workspace_name(workspace_id)
|
|
568
|
+
|
|
569
|
+
df = pd.DataFrame(
|
|
570
|
+
columns=["Page ID", "Page Name", "Hidden", "Width", "Height", "Visual Count"]
|
|
571
|
+
)
|
|
572
|
+
|
|
573
|
+
reportJson = get_report_json(report=report, workspace=workspace)
|
|
574
|
+
|
|
575
|
+
for section in reportJson["sections"]:
|
|
576
|
+
pageID = section["name"]
|
|
577
|
+
pageName = section["displayName"]
|
|
578
|
+
# pageFilters = section['filters']
|
|
579
|
+
pageWidth = section["width"]
|
|
580
|
+
pageHeight = section["height"]
|
|
581
|
+
visualCount = len(section["visualContainers"])
|
|
582
|
+
pageHidden = False
|
|
583
|
+
pageConfig = section["config"]
|
|
584
|
+
pageConfigJson = json.loads(pageConfig)
|
|
585
|
+
|
|
586
|
+
try:
|
|
587
|
+
pageH = pageConfigJson["visibility"]
|
|
588
|
+
if pageH == 1:
|
|
589
|
+
pageHidden = True
|
|
590
|
+
except:
|
|
591
|
+
pass
|
|
592
|
+
|
|
593
|
+
new_data = {
|
|
594
|
+
"Page ID": pageID,
|
|
595
|
+
"Page Name": pageName,
|
|
596
|
+
"Hidden": pageHidden,
|
|
597
|
+
"Width": pageWidth,
|
|
598
|
+
"Height": pageHeight,
|
|
599
|
+
"Visual Count": visualCount,
|
|
600
|
+
}
|
|
601
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
602
|
+
|
|
603
|
+
df["Hidden"] = df["Hidden"].astype(bool)
|
|
604
|
+
intCol = ["Width", "Height", "Visual Count"]
|
|
605
|
+
df[intCol] = df[intCol].astype(int)
|
|
606
|
+
|
|
607
|
+
return df
|
|
608
|
+
|
|
609
|
+
|
|
610
|
+
def list_report_visuals(report: str, workspace: Optional[str] = None):
|
|
611
|
+
"""
|
|
612
|
+
Shows the properties of all visuals within a Power BI report.
|
|
613
|
+
|
|
614
|
+
Parameters
|
|
615
|
+
----------
|
|
616
|
+
report : str
|
|
617
|
+
Name of the Power BI report.
|
|
618
|
+
workspace : str, default=None
|
|
619
|
+
The Fabric workspace name.
|
|
620
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
621
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
622
|
+
|
|
623
|
+
Returns
|
|
624
|
+
-------
|
|
625
|
+
pandas.DataFrame
|
|
626
|
+
A pandas dataframe showing the visuals within a Power BI report and their properties.
|
|
627
|
+
"""
|
|
628
|
+
|
|
629
|
+
if workspace == None:
|
|
630
|
+
workspace_id = fabric.get_workspace_id()
|
|
631
|
+
workspace = fabric.resolve_workspace_name(workspace_id)
|
|
632
|
+
|
|
633
|
+
reportJson = get_report_json(report=report, workspace=workspace)
|
|
634
|
+
|
|
635
|
+
df = pd.DataFrame(columns=["Page Name", "Page ID", "Visual ID", "Title"])
|
|
636
|
+
|
|
637
|
+
for section in reportJson["sections"]:
|
|
638
|
+
pageID = section["name"]
|
|
639
|
+
pageName = section["displayName"]
|
|
640
|
+
|
|
641
|
+
for visual in section["visualContainers"]:
|
|
642
|
+
visualConfig = visual["config"]
|
|
643
|
+
visualConfigJson = json.loads(visualConfig)
|
|
644
|
+
visualID = visualConfigJson["name"]
|
|
645
|
+
|
|
646
|
+
try:
|
|
647
|
+
title = visualConfigJson["singleVisual"]["vcObjects"]["title"][0][
|
|
648
|
+
"properties"
|
|
649
|
+
]["text"]["expr"]["Literal"]["Value"]
|
|
650
|
+
title = title[1:-1]
|
|
651
|
+
except:
|
|
652
|
+
title = ""
|
|
653
|
+
|
|
654
|
+
new_data = {
|
|
655
|
+
"Page Name": pageName,
|
|
656
|
+
"Page ID": pageID,
|
|
657
|
+
"Visual ID": visualID,
|
|
658
|
+
"Title": title,
|
|
659
|
+
}
|
|
660
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
661
|
+
|
|
662
|
+
return df
|
|
663
|
+
|
|
664
|
+
|
|
665
|
+
def list_report_bookmarks(report: str, workspace: Optional[str] = None):
|
|
666
|
+
"""
|
|
667
|
+
Shows the properties of all bookmarks within a Power BI report.
|
|
668
|
+
|
|
669
|
+
Parameters
|
|
670
|
+
----------
|
|
671
|
+
report : str
|
|
672
|
+
Name of the Power BI report.
|
|
673
|
+
workspace : str, default=None
|
|
674
|
+
The Fabric workspace name.
|
|
675
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
676
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
677
|
+
|
|
678
|
+
Returns
|
|
679
|
+
-------
|
|
680
|
+
pandas.DataFrame
|
|
681
|
+
A pandas dataframe showing the bookmarks within a Power BI report and their properties.
|
|
682
|
+
"""
|
|
683
|
+
|
|
684
|
+
if workspace == None:
|
|
685
|
+
workspace_id = fabric.get_workspace_id()
|
|
686
|
+
workspace = fabric.resolve_workspace_name(workspace_id)
|
|
687
|
+
|
|
688
|
+
df = pd.DataFrame(
|
|
689
|
+
columns=[
|
|
690
|
+
"Bookmark ID",
|
|
691
|
+
"Bookmark Name",
|
|
692
|
+
"Page ID",
|
|
693
|
+
"Visual ID",
|
|
694
|
+
"Visual Hidden",
|
|
695
|
+
]
|
|
696
|
+
)
|
|
697
|
+
|
|
698
|
+
reportJson = get_report_json(report=report, workspace=workspace)
|
|
699
|
+
reportConfig = reportJson["config"]
|
|
700
|
+
reportConfigJson = json.loads(reportConfig)
|
|
701
|
+
|
|
702
|
+
try:
|
|
703
|
+
for bookmark in reportConfigJson["bookmarks"]:
|
|
704
|
+
bID = bookmark["name"]
|
|
705
|
+
bName = bookmark["displayName"]
|
|
706
|
+
rptPageId = bookmark["explorationState"]["activeSection"]
|
|
707
|
+
|
|
708
|
+
for rptPg in bookmark["explorationState"]["sections"]:
|
|
709
|
+
for vc in bookmark["explorationState"]["sections"][rptPg][
|
|
710
|
+
"visualContainers"
|
|
711
|
+
]:
|
|
712
|
+
vHidden = False
|
|
713
|
+
try:
|
|
714
|
+
hidden = bookmark["explorationState"]["sections"][rptPg][
|
|
715
|
+
"visualContainers"
|
|
716
|
+
][vc]["singleVisual"]["display"]["mode"]
|
|
717
|
+
if hidden == "hidden":
|
|
718
|
+
vHidden = True
|
|
719
|
+
except:
|
|
720
|
+
pass
|
|
721
|
+
|
|
722
|
+
new_data = {
|
|
723
|
+
"Bookmark ID": bID,
|
|
724
|
+
"Bookmark Name": bName,
|
|
725
|
+
"Page ID": rptPageId,
|
|
726
|
+
"Visual ID": vc,
|
|
727
|
+
"Visual Hidden": vHidden,
|
|
728
|
+
}
|
|
729
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
730
|
+
|
|
731
|
+
listPages = list_report_pages(report=report, workspace=workspace)
|
|
732
|
+
|
|
733
|
+
df = pd.merge(df, listPages[["Page ID", "Page Name"]], on="Page ID", how="left")
|
|
734
|
+
df = df[
|
|
735
|
+
[
|
|
736
|
+
"Bookmark ID",
|
|
737
|
+
"Bookmark Name",
|
|
738
|
+
"Page ID",
|
|
739
|
+
"Page Name",
|
|
740
|
+
"Visual ID",
|
|
741
|
+
"Visual Hidden",
|
|
742
|
+
]
|
|
743
|
+
]
|
|
744
|
+
|
|
745
|
+
return df
|
|
746
|
+
|
|
747
|
+
except:
|
|
748
|
+
print(
|
|
749
|
+
f"The '{report}' report within the '{workspace}' workspace has no bookmarks."
|
|
750
|
+
)
|
|
751
|
+
|
|
752
|
+
|
|
753
|
+
def translate_report_titles(
|
|
754
|
+
report: str, languages: Union[str, List[str]], workspace: Optional[str] = None
|
|
755
|
+
):
|
|
756
|
+
"""
|
|
757
|
+
Dynamically generates new Power BI reports which have report titles translated into the specified language(s).
|
|
758
|
+
|
|
759
|
+
Parameters
|
|
760
|
+
----------
|
|
761
|
+
report : str
|
|
762
|
+
Name of the Power BI report.
|
|
763
|
+
languages : str, List[str]
|
|
764
|
+
The language code(s) in which to translate the report titles.
|
|
765
|
+
workspace : str, default=None
|
|
766
|
+
The Fabric workspace name.
|
|
767
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
768
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
769
|
+
"""
|
|
770
|
+
|
|
771
|
+
if isinstance(languages, str):
|
|
772
|
+
languages = [languages]
|
|
773
|
+
|
|
774
|
+
for lang in languages:
|
|
775
|
+
language_validate(lang)
|
|
776
|
+
|
|
777
|
+
reportJson = get_report_json(report=report, workspace=workspace)
|
|
778
|
+
dfV = list_report_visuals(report=report, workspace=workspace)
|
|
779
|
+
spark = SparkSession.builder.getOrCreate()
|
|
780
|
+
df = spark.createDataFrame(dfV)
|
|
781
|
+
columnToTranslate = "Title"
|
|
782
|
+
|
|
783
|
+
translate = (
|
|
784
|
+
Translate()
|
|
785
|
+
.setTextCol(columnToTranslate)
|
|
786
|
+
.setToLanguage(languages)
|
|
787
|
+
.setOutputCol("translation")
|
|
788
|
+
.setConcurrency(5)
|
|
789
|
+
)
|
|
790
|
+
|
|
791
|
+
transDF = (
|
|
792
|
+
translate.transform(df)
|
|
793
|
+
.withColumn("translation", flatten(col("translation.translations")))
|
|
794
|
+
.withColumn("translation", col("translation.text"))
|
|
795
|
+
.select("Visual ID", columnToTranslate, "translation")
|
|
796
|
+
)
|
|
797
|
+
|
|
798
|
+
df_panda = transDF.toPandas()
|
|
799
|
+
|
|
800
|
+
i = 0
|
|
801
|
+
for lang in languages:
|
|
802
|
+
# Clone report
|
|
803
|
+
language = language_validate(lang)
|
|
804
|
+
clonedReportName = f"{report}_{language}"
|
|
805
|
+
|
|
806
|
+
dfRep = fabric.list_reports(workspace=workspace)
|
|
807
|
+
dfRep_filt = dfRep[
|
|
808
|
+
(dfRep["Name"] == clonedReportName)
|
|
809
|
+
& (dfRep["Report Type"] == "PowerBIReport")
|
|
810
|
+
]
|
|
811
|
+
|
|
812
|
+
if len(dfRep_filt) > 0:
|
|
813
|
+
print(
|
|
814
|
+
f"{icons.yellow_dot} The '{clonedReportName}' report already exists in the '{workspace} workspace."
|
|
815
|
+
)
|
|
816
|
+
else:
|
|
817
|
+
clone_report(
|
|
818
|
+
report=report, cloned_report=clonedReportName, workspace=workspace
|
|
819
|
+
)
|
|
820
|
+
print(
|
|
821
|
+
f"{icons.green_dot} The '{clonedReportName}' report has been created via clone in the '{workspace} workspace."
|
|
822
|
+
)
|
|
823
|
+
|
|
824
|
+
rptJsonTr = copy.deepcopy(reportJson)
|
|
825
|
+
|
|
826
|
+
# Update report json file
|
|
827
|
+
for section in rptJsonTr["sections"]:
|
|
828
|
+
for visual in section["visualContainers"]:
|
|
829
|
+
visualConfig = visual["config"]
|
|
830
|
+
visualConfigJson = json.loads(visualConfig)
|
|
831
|
+
visualID = visualConfigJson["name"]
|
|
832
|
+
|
|
833
|
+
df_filt = df_panda[
|
|
834
|
+
(df_panda["Visual ID"] == visualID) & (df_panda["Title"] != "")
|
|
835
|
+
]
|
|
836
|
+
|
|
837
|
+
if len(df_filt) == 1:
|
|
838
|
+
tr = df_filt["translation"].str[i].iloc[0]
|
|
839
|
+
if len(tr) > 0:
|
|
840
|
+
prop = visualConfigJson["singleVisual"]["vcObjects"]["title"][
|
|
841
|
+
0
|
|
842
|
+
]["properties"]["text"]["expr"]["Literal"]
|
|
843
|
+
prop["Value"] = f"'{tr}'"
|
|
844
|
+
|
|
845
|
+
visual["config"] = json.dumps(visualConfigJson)
|
|
846
|
+
|
|
847
|
+
i += 1
|
|
848
|
+
|
|
849
|
+
# Post updated report json file to cloned report
|
|
850
|
+
update_report_from_reportjson(
|
|
851
|
+
report=clonedReportName, report_json=rptJsonTr, workspace=workspace
|
|
852
|
+
)
|
|
853
|
+
print(
|
|
854
|
+
f"{icons.green_dot} The visual titles within the '{clonedReportName}' report within the '{workspace}' have been translated into '{language}' accordingly."
|
|
855
|
+
)
|