semantic-link-labs 0.9.3__py3-none-any.whl → 0.9.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.4.dist-info}/METADATA +9 -6
- {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.4.dist-info}/RECORD +41 -31
- {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.4.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +27 -1
- sempy_labs/_capacity_migration.py +3 -2
- sempy_labs/_dax.py +17 -3
- sempy_labs/_delta_analyzer.py +279 -127
- sempy_labs/_eventhouses.py +70 -1
- sempy_labs/_generate_semantic_model.py +30 -9
- sempy_labs/_helper_functions.py +30 -1
- sempy_labs/_job_scheduler.py +226 -2
- sempy_labs/_list_functions.py +40 -16
- sempy_labs/_model_bpa.py +15 -0
- sempy_labs/_model_bpa_rules.py +12 -2
- sempy_labs/_semantic_models.py +117 -0
- sempy_labs/_sql.py +73 -6
- sempy_labs/_sqldatabase.py +227 -0
- sempy_labs/admin/__init__.py +49 -8
- sempy_labs/admin/_activities.py +166 -0
- sempy_labs/admin/_apps.py +143 -0
- sempy_labs/admin/_basic_functions.py +32 -652
- sempy_labs/admin/_capacities.py +250 -0
- sempy_labs/admin/_datasets.py +184 -0
- sempy_labs/admin/_domains.py +1 -1
- sempy_labs/admin/_items.py +3 -1
- sempy_labs/admin/_reports.py +165 -0
- sempy_labs/admin/_scanner.py +0 -1
- sempy_labs/admin/_shared.py +74 -0
- sempy_labs/admin/_tenant.py +489 -0
- sempy_labs/directlake/_dl_helper.py +0 -1
- sempy_labs/directlake/_update_directlake_partition_entity.py +6 -0
- sempy_labs/graph/_teams.py +1 -1
- sempy_labs/graph/_users.py +9 -1
- sempy_labs/lakehouse/_shortcuts.py +28 -15
- sempy_labs/report/__init__.py +3 -1
- sempy_labs/report/_download_report.py +4 -1
- sempy_labs/report/_export_report.py +272 -0
- sempy_labs/report/_report_functions.py +9 -261
- sempy_labs/tom/_model.py +278 -29
- {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.4.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.4.dist-info}/top_level.txt +0 -0
|
@@ -2,7 +2,6 @@ import sempy.fabric as fabric
|
|
|
2
2
|
import pandas as pd
|
|
3
3
|
import json
|
|
4
4
|
import os
|
|
5
|
-
import time
|
|
6
5
|
import copy
|
|
7
6
|
from anytree import Node, RenderTree
|
|
8
7
|
from powerbiclient import Report
|
|
@@ -10,9 +9,7 @@ from pyspark.sql.functions import col, flatten
|
|
|
10
9
|
from sempy_labs.report._generate_report import update_report_from_reportjson
|
|
11
10
|
from sempy_labs.lakehouse._lakehouse import lakehouse_attached
|
|
12
11
|
from sempy_labs._helper_functions import (
|
|
13
|
-
generate_embedded_filter,
|
|
14
12
|
resolve_report_id,
|
|
15
|
-
resolve_lakehouse_name,
|
|
16
13
|
language_validate,
|
|
17
14
|
resolve_workspace_name_and_id,
|
|
18
15
|
_decode_b64,
|
|
@@ -20,6 +17,7 @@ from sempy_labs._helper_functions import (
|
|
|
20
17
|
_update_dataframe_datatypes,
|
|
21
18
|
_base_api,
|
|
22
19
|
_create_spark_session,
|
|
20
|
+
_mount,
|
|
23
21
|
)
|
|
24
22
|
from typing import List, Optional, Union
|
|
25
23
|
from sempy._utils._log import log
|
|
@@ -76,18 +74,16 @@ def get_report_json(
|
|
|
76
74
|
f"{icons.red_dot} In order to save the report.json file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
|
|
77
75
|
)
|
|
78
76
|
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
filePath = os.path.join(folderPath, save_to_file_name)
|
|
87
|
-
with open(filePath, "w") as json_file:
|
|
77
|
+
local_path = _mount()
|
|
78
|
+
save_folder = f"{local_path}/Files"
|
|
79
|
+
file_ext = ".json"
|
|
80
|
+
if not save_to_file_name.endswith(file_ext):
|
|
81
|
+
save_to_file_name = f"{save_to_file_name}{file_ext}"
|
|
82
|
+
file_path = os.path.join(save_folder, save_to_file_name)
|
|
83
|
+
with open(file_path, "w") as json_file:
|
|
88
84
|
json.dump(report_json, json_file, indent=4)
|
|
89
85
|
print(
|
|
90
|
-
f"{icons.green_dot} The report.json file for the '{report}' report has been saved to the
|
|
86
|
+
f"{icons.green_dot} The report.json file for the '{report}' report has been saved to the lakehouse attached to this notebook in this location: Files/'{save_to_file_name}'.\n\n"
|
|
91
87
|
)
|
|
92
88
|
|
|
93
89
|
return report_json
|
|
@@ -145,254 +141,6 @@ def report_dependency_tree(workspace: Optional[str | UUID] = None):
|
|
|
145
141
|
print(f"{pre}{node.custom_property}'{node.name}'")
|
|
146
142
|
|
|
147
143
|
|
|
148
|
-
@log
|
|
149
|
-
def export_report(
|
|
150
|
-
report: str,
|
|
151
|
-
export_format: str,
|
|
152
|
-
file_name: Optional[str] = None,
|
|
153
|
-
bookmark_name: Optional[str] = None,
|
|
154
|
-
page_name: Optional[str] = None,
|
|
155
|
-
visual_name: Optional[str] = None,
|
|
156
|
-
report_filter: Optional[str] = None,
|
|
157
|
-
workspace: Optional[str | UUID] = None,
|
|
158
|
-
):
|
|
159
|
-
"""
|
|
160
|
-
Exports a Power BI report to a file in your lakehouse.
|
|
161
|
-
|
|
162
|
-
This is a wrapper function for the following APIs: `Reports - Export To File In Group <https://learn.microsoft.com/rest/api/power-bi/reports/export-to-file-in-group>`_, `Reports - Get Export To File Status In Group <https://learn.microsoft.com/rest/api/power-bi/reports/get-export-to-file-status-in-group>`_, `Reports - Get File Of Export To File In Group <https://learn.microsoft.com/rest/api/power-bi/reports/get-file-of-export-to-file-in-group>`_.
|
|
163
|
-
|
|
164
|
-
Parameters
|
|
165
|
-
----------
|
|
166
|
-
report : str
|
|
167
|
-
Name of the Power BI report.
|
|
168
|
-
export_format : str
|
|
169
|
-
The format in which to export the report. For image formats, enter the file extension in this parameter, not 'IMAGE'.
|
|
170
|
-
`Valid formats <https://learn.microsoft.com/rest/api/power-bi/reports/export-to-file-in-group#fileformat>`_
|
|
171
|
-
file_name : str, default=None
|
|
172
|
-
The name of the file to be saved within the lakehouse. Do not include the file extension. Defaults ot the reportName parameter value.
|
|
173
|
-
bookmark_name : str, default=None
|
|
174
|
-
The name (GUID) of a bookmark within the report.
|
|
175
|
-
page_name : str, default=None
|
|
176
|
-
The name (GUID) of the report page.
|
|
177
|
-
visual_name : str, default=None
|
|
178
|
-
The name (GUID) of a visual. If you specify this parameter you must also specify the page_name parameter.
|
|
179
|
-
report_filter : str, default=None
|
|
180
|
-
A report filter to be applied when exporting the report. Syntax is user-friendly. See above for examples.
|
|
181
|
-
workspace : str | uuid.UUID, default=None
|
|
182
|
-
The Fabric workspace name or ID.
|
|
183
|
-
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
184
|
-
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
185
|
-
"""
|
|
186
|
-
|
|
187
|
-
if not lakehouse_attached():
|
|
188
|
-
raise ValueError(
|
|
189
|
-
f"{icons.red_dot} In order to run the 'export_report' function, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
|
|
190
|
-
)
|
|
191
|
-
|
|
192
|
-
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
193
|
-
|
|
194
|
-
if isinstance(page_name, str):
|
|
195
|
-
page_name = [page_name]
|
|
196
|
-
if isinstance(visual_name, str):
|
|
197
|
-
visual_name = [visual_name]
|
|
198
|
-
|
|
199
|
-
if bookmark_name is not None and (page_name is not None or visual_name is not None):
|
|
200
|
-
raise ValueError(
|
|
201
|
-
f"{icons.red_dot} If the 'bookmark_name' parameter is set, the 'page_name' and 'visual_name' parameters must not be set."
|
|
202
|
-
)
|
|
203
|
-
|
|
204
|
-
if visual_name is not None and page_name is None:
|
|
205
|
-
raise ValueError(
|
|
206
|
-
f"{icons.red_dot} If the 'visual_name' parameter is set, the 'page_name' parameter must be set."
|
|
207
|
-
)
|
|
208
|
-
|
|
209
|
-
validFormats = {
|
|
210
|
-
"ACCESSIBLEPDF": ".pdf",
|
|
211
|
-
"CSV": ".csv",
|
|
212
|
-
"DOCX": ".docx",
|
|
213
|
-
"MHTML": ".mhtml",
|
|
214
|
-
"PDF": ".pdf",
|
|
215
|
-
"PNG": ".png",
|
|
216
|
-
"PPTX": ".pptx",
|
|
217
|
-
"XLSX": ".xlsx",
|
|
218
|
-
"XML": ".xml",
|
|
219
|
-
"BMP": ".bmp",
|
|
220
|
-
"EMF": ".emf",
|
|
221
|
-
"GIF": ".gif",
|
|
222
|
-
"JPEG": ".jpeg",
|
|
223
|
-
"TIFF": ".tiff",
|
|
224
|
-
}
|
|
225
|
-
|
|
226
|
-
export_format = export_format.upper()
|
|
227
|
-
fileExt = validFormats.get(export_format)
|
|
228
|
-
if fileExt is None:
|
|
229
|
-
raise ValueError(
|
|
230
|
-
f"{icons.red_dot} The '{export_format}' format is not a valid format for exporting Power BI reports. Please enter a valid format. Options: {validFormats}"
|
|
231
|
-
)
|
|
232
|
-
|
|
233
|
-
if file_name is None:
|
|
234
|
-
file_name = f"{report}{fileExt}"
|
|
235
|
-
else:
|
|
236
|
-
file_name = f"{file_name}{fileExt}"
|
|
237
|
-
|
|
238
|
-
folderPath = "/lakehouse/default/Files"
|
|
239
|
-
filePath = os.path.join(folderPath, file_name)
|
|
240
|
-
|
|
241
|
-
dfI = fabric.list_items(workspace=workspace_id)
|
|
242
|
-
dfI_filt = dfI[
|
|
243
|
-
(dfI["Type"].isin(["Report", "PaginatedReport"]))
|
|
244
|
-
& (dfI["Display Name"] == report)
|
|
245
|
-
]
|
|
246
|
-
|
|
247
|
-
if len(dfI_filt) == 0:
|
|
248
|
-
raise ValueError(
|
|
249
|
-
f"{icons.red_dot} The '{report}' report does not exist in the '{workspace_name}' workspace."
|
|
250
|
-
)
|
|
251
|
-
|
|
252
|
-
reportType = dfI_filt["Type"].iloc[0]
|
|
253
|
-
|
|
254
|
-
# Limitations
|
|
255
|
-
pbiOnly = ["PNG"]
|
|
256
|
-
paginatedOnly = [
|
|
257
|
-
"ACCESSIBLEPDF",
|
|
258
|
-
"CSV",
|
|
259
|
-
"DOCX",
|
|
260
|
-
"BMP",
|
|
261
|
-
"EMF",
|
|
262
|
-
"GIF",
|
|
263
|
-
"JPEG",
|
|
264
|
-
"TIFF",
|
|
265
|
-
"MHTML",
|
|
266
|
-
"XLSX",
|
|
267
|
-
"XML",
|
|
268
|
-
]
|
|
269
|
-
|
|
270
|
-
if reportType == "Report" and export_format in paginatedOnly:
|
|
271
|
-
raise ValueError(
|
|
272
|
-
f"{icons.red_dot} The '{export_format}' format is only supported for paginated reports."
|
|
273
|
-
)
|
|
274
|
-
|
|
275
|
-
if reportType == "PaginatedReport" and export_format in pbiOnly:
|
|
276
|
-
raise ValueError(
|
|
277
|
-
f"{icons.red_dot} The '{export_format}' format is only supported for Power BI reports."
|
|
278
|
-
)
|
|
279
|
-
|
|
280
|
-
if reportType == "PaginatedReport" and (
|
|
281
|
-
bookmark_name is not None or page_name is not None or visual_name is not None
|
|
282
|
-
):
|
|
283
|
-
raise ValueError(
|
|
284
|
-
f"{icons.red_dot} Export for paginated reports does not support bookmarks/pages/visuals. Those parameters must not be set for paginated reports."
|
|
285
|
-
)
|
|
286
|
-
|
|
287
|
-
reportId = dfI_filt["Id"].iloc[0]
|
|
288
|
-
|
|
289
|
-
if (
|
|
290
|
-
export_format in ["BMP", "EMF", "GIF", "JPEG", "TIFF"]
|
|
291
|
-
and reportType == "PaginatedReport"
|
|
292
|
-
):
|
|
293
|
-
request_body = {
|
|
294
|
-
"format": "IMAGE",
|
|
295
|
-
"paginatedReportConfiguration": {
|
|
296
|
-
"formatSettings": {"OutputFormat": export_format.lower()}
|
|
297
|
-
},
|
|
298
|
-
}
|
|
299
|
-
elif bookmark_name is None and page_name is None and visual_name is None:
|
|
300
|
-
request_body = {"format": export_format}
|
|
301
|
-
elif bookmark_name is not None:
|
|
302
|
-
if reportType == "Report":
|
|
303
|
-
request_body = {
|
|
304
|
-
"format": export_format,
|
|
305
|
-
"powerBIReportConfiguration": {
|
|
306
|
-
"defaultBookmark": {"name": bookmark_name}
|
|
307
|
-
},
|
|
308
|
-
}
|
|
309
|
-
elif page_name is not None and visual_name is None:
|
|
310
|
-
if reportType == "Report":
|
|
311
|
-
request_body = {"format": export_format, "powerBIReportConfiguration": {}}
|
|
312
|
-
|
|
313
|
-
request_body["powerBIReportConfiguration"]["pages"] = []
|
|
314
|
-
dfPage = list_report_pages(report=report, workspace=workspace_id)
|
|
315
|
-
|
|
316
|
-
for page in page_name:
|
|
317
|
-
dfPage_filt = dfPage[dfPage["Page ID"] == page]
|
|
318
|
-
if len(dfPage_filt) == 0:
|
|
319
|
-
raise ValueError(
|
|
320
|
-
f"{icons.red_dot} The '{page}' page does not exist in the '{report}' report within the '{workspace_name}' workspace."
|
|
321
|
-
)
|
|
322
|
-
|
|
323
|
-
page_dict = {"pageName": page}
|
|
324
|
-
request_body["powerBIReportConfiguration"]["pages"].append(page_dict)
|
|
325
|
-
|
|
326
|
-
elif page_name is not None and visual_name is not None:
|
|
327
|
-
if len(page_name) != len(visual_name):
|
|
328
|
-
raise ValueError(
|
|
329
|
-
f"{icons.red_dot} Each 'visual_name' must map to a single 'page_name'."
|
|
330
|
-
)
|
|
331
|
-
|
|
332
|
-
if reportType == "Report":
|
|
333
|
-
request_body = {"format": export_format, "powerBIReportConfiguration": {}}
|
|
334
|
-
|
|
335
|
-
request_body["powerBIReportConfiguration"]["pages"] = []
|
|
336
|
-
dfVisual = list_report_visuals(report=report, workspace=workspace_id)
|
|
337
|
-
a = 0
|
|
338
|
-
for page in page_name:
|
|
339
|
-
visual = visual_name[a]
|
|
340
|
-
|
|
341
|
-
dfVisual_filt = dfVisual[
|
|
342
|
-
(dfVisual["Page ID"] == page) & (dfVisual["Visual ID"] == visual)
|
|
343
|
-
]
|
|
344
|
-
if len(dfVisual_filt) == 0:
|
|
345
|
-
raise ValueError(
|
|
346
|
-
f"{icons.red_dot} The '{visual}' visual does not exist on the '{page}' in the '{report}' report within the '{workspace_name}' workspace."
|
|
347
|
-
)
|
|
348
|
-
|
|
349
|
-
page_dict = {"pageName": page, "visualName": visual}
|
|
350
|
-
request_body["powerBIReportConfiguration"]["pages"].append(page_dict)
|
|
351
|
-
a += 1
|
|
352
|
-
|
|
353
|
-
# Transform and add report filter if it is specified
|
|
354
|
-
if report_filter is not None and reportType == "Report":
|
|
355
|
-
reportFilter = generate_embedded_filter(filter=report_filter)
|
|
356
|
-
report_level_filter = {"filter": reportFilter}
|
|
357
|
-
|
|
358
|
-
if "powerBIReportConfiguration" not in request_body:
|
|
359
|
-
request_body["powerBIReportConfiguration"] = {}
|
|
360
|
-
request_body["powerBIReportConfiguration"]["reportLevelFilters"] = [
|
|
361
|
-
report_level_filter
|
|
362
|
-
]
|
|
363
|
-
|
|
364
|
-
base_url = f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}"
|
|
365
|
-
response = _base_api(
|
|
366
|
-
request=f"{base_url}/ExportTo",
|
|
367
|
-
method="post",
|
|
368
|
-
payload=request_body,
|
|
369
|
-
status_codes=202,
|
|
370
|
-
)
|
|
371
|
-
export_id = json.loads(response.content).get("id")
|
|
372
|
-
|
|
373
|
-
get_status_url = f"{base_url}/exports/{export_id}"
|
|
374
|
-
response = _base_api(request=get_status_url, status_codes=[200, 202])
|
|
375
|
-
response_body = json.loads(response.content)
|
|
376
|
-
while response_body["status"] not in ["Succeeded", "Failed"]:
|
|
377
|
-
time.sleep(3)
|
|
378
|
-
response = _base_api(request=get_status_url, status_codes=[200, 202])
|
|
379
|
-
response_body = json.loads(response.content)
|
|
380
|
-
if response_body["status"] == "Failed":
|
|
381
|
-
raise ValueError(
|
|
382
|
-
f"{icons.red_dot} The export for the '{report}' report within the '{workspace_name}' workspace in the '{export_format}' format has failed."
|
|
383
|
-
)
|
|
384
|
-
else:
|
|
385
|
-
response = _base_api(request=f"{get_status_url}/file")
|
|
386
|
-
print(
|
|
387
|
-
f"{icons.in_progress} Saving the '{export_format}' export for the '{report}' report within the '{workspace_name}' workspace to the lakehouse..."
|
|
388
|
-
)
|
|
389
|
-
with open(filePath, "wb") as export_file:
|
|
390
|
-
export_file.write(response.content)
|
|
391
|
-
print(
|
|
392
|
-
f"{icons.green_dot} The '{export_format}' export for the '{report}' report within the '{workspace_name}' workspace has been saved to the following location: '{filePath}'."
|
|
393
|
-
)
|
|
394
|
-
|
|
395
|
-
|
|
396
144
|
def clone_report(
|
|
397
145
|
report: str,
|
|
398
146
|
cloned_report: str,
|