semantic-link-labs 0.9.2__py3-none-any.whl → 0.9.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (54) hide show
  1. {semantic_link_labs-0.9.2.dist-info → semantic_link_labs-0.9.4.dist-info}/METADATA +10 -6
  2. {semantic_link_labs-0.9.2.dist-info → semantic_link_labs-0.9.4.dist-info}/RECORD +54 -44
  3. {semantic_link_labs-0.9.2.dist-info → semantic_link_labs-0.9.4.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +27 -1
  5. sempy_labs/_ai.py +8 -5
  6. sempy_labs/_capacity_migration.py +3 -2
  7. sempy_labs/_connections.py +45 -9
  8. sempy_labs/_dax.py +17 -3
  9. sempy_labs/_delta_analyzer.py +308 -138
  10. sempy_labs/_eventhouses.py +70 -1
  11. sempy_labs/_gateways.py +56 -8
  12. sempy_labs/_generate_semantic_model.py +30 -9
  13. sempy_labs/_helper_functions.py +84 -9
  14. sempy_labs/_job_scheduler.py +226 -2
  15. sempy_labs/_list_functions.py +42 -19
  16. sempy_labs/_ml_experiments.py +1 -1
  17. sempy_labs/_model_bpa.py +17 -2
  18. sempy_labs/_model_bpa_rules.py +20 -8
  19. sempy_labs/_semantic_models.py +117 -0
  20. sempy_labs/_sql.py +73 -6
  21. sempy_labs/_sqldatabase.py +227 -0
  22. sempy_labs/_translations.py +2 -2
  23. sempy_labs/_vertipaq.py +3 -3
  24. sempy_labs/_warehouses.py +1 -1
  25. sempy_labs/admin/__init__.py +49 -8
  26. sempy_labs/admin/_activities.py +166 -0
  27. sempy_labs/admin/_apps.py +143 -0
  28. sempy_labs/admin/_basic_functions.py +32 -652
  29. sempy_labs/admin/_capacities.py +250 -0
  30. sempy_labs/admin/_datasets.py +184 -0
  31. sempy_labs/admin/_domains.py +1 -3
  32. sempy_labs/admin/_items.py +3 -1
  33. sempy_labs/admin/_reports.py +165 -0
  34. sempy_labs/admin/_scanner.py +53 -49
  35. sempy_labs/admin/_shared.py +74 -0
  36. sempy_labs/admin/_tenant.py +489 -0
  37. sempy_labs/directlake/_dl_helper.py +0 -1
  38. sempy_labs/directlake/_update_directlake_partition_entity.py +6 -0
  39. sempy_labs/graph/_teams.py +1 -1
  40. sempy_labs/graph/_users.py +9 -1
  41. sempy_labs/lakehouse/_get_lakehouse_columns.py +2 -2
  42. sempy_labs/lakehouse/_get_lakehouse_tables.py +2 -2
  43. sempy_labs/lakehouse/_lakehouse.py +3 -3
  44. sempy_labs/lakehouse/_shortcuts.py +29 -16
  45. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +2 -2
  46. sempy_labs/migration/_refresh_calc_tables.py +2 -2
  47. sempy_labs/report/__init__.py +3 -1
  48. sempy_labs/report/_download_report.py +4 -1
  49. sempy_labs/report/_export_report.py +272 -0
  50. sempy_labs/report/_report_functions.py +11 -263
  51. sempy_labs/report/_report_rebind.py +1 -1
  52. sempy_labs/tom/_model.py +281 -29
  53. {semantic_link_labs-0.9.2.dist-info → semantic_link_labs-0.9.4.dist-info}/LICENSE +0 -0
  54. {semantic_link_labs-0.9.2.dist-info → semantic_link_labs-0.9.4.dist-info}/top_level.txt +0 -0
@@ -2,7 +2,6 @@ import sempy.fabric as fabric
2
2
  import pandas as pd
3
3
  import json
4
4
  import os
5
- import time
6
5
  import copy
7
6
  from anytree import Node, RenderTree
8
7
  from powerbiclient import Report
@@ -10,15 +9,15 @@ from pyspark.sql.functions import col, flatten
10
9
  from sempy_labs.report._generate_report import update_report_from_reportjson
11
10
  from sempy_labs.lakehouse._lakehouse import lakehouse_attached
12
11
  from sempy_labs._helper_functions import (
13
- generate_embedded_filter,
14
12
  resolve_report_id,
15
- resolve_lakehouse_name,
16
13
  language_validate,
17
14
  resolve_workspace_name_and_id,
18
15
  _decode_b64,
19
16
  resolve_dataset_id,
20
17
  _update_dataframe_datatypes,
21
18
  _base_api,
19
+ _create_spark_session,
20
+ _mount,
22
21
  )
23
22
  from typing import List, Optional, Union
24
23
  from sempy._utils._log import log
@@ -75,18 +74,16 @@ def get_report_json(
75
74
  f"{icons.red_dot} In order to save the report.json file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
76
75
  )
77
76
 
78
- lakehouse_id = fabric.get_lakehouse_id()
79
- lake_workspace = fabric.resolve_workspace_name()
80
- lakehouse = resolve_lakehouse_name(lakehouse_id, lake_workspace)
81
- folderPath = "/lakehouse/default/Files"
82
- fileExt = ".json"
83
- if not save_to_file_name.endswith(fileExt):
84
- save_to_file_name = f"{save_to_file_name}{fileExt}"
85
- filePath = os.path.join(folderPath, save_to_file_name)
86
- with open(filePath, "w") as json_file:
77
+ local_path = _mount()
78
+ save_folder = f"{local_path}/Files"
79
+ file_ext = ".json"
80
+ if not save_to_file_name.endswith(file_ext):
81
+ save_to_file_name = f"{save_to_file_name}{file_ext}"
82
+ file_path = os.path.join(save_folder, save_to_file_name)
83
+ with open(file_path, "w") as json_file:
87
84
  json.dump(report_json, json_file, indent=4)
88
85
  print(
89
- f"{icons.green_dot} The report.json file for the '{report}' report has been saved to the '{lakehouse}' in this location: '{filePath}'.\n\n"
86
+ f"{icons.green_dot} The report.json file for the '{report}' report has been saved to the lakehouse attached to this notebook in this location: Files/'{save_to_file_name}'.\n\n"
90
87
  )
91
88
 
92
89
  return report_json
@@ -144,254 +141,6 @@ def report_dependency_tree(workspace: Optional[str | UUID] = None):
144
141
  print(f"{pre}{node.custom_property}'{node.name}'")
145
142
 
146
143
 
147
- @log
148
- def export_report(
149
- report: str,
150
- export_format: str,
151
- file_name: Optional[str] = None,
152
- bookmark_name: Optional[str] = None,
153
- page_name: Optional[str] = None,
154
- visual_name: Optional[str] = None,
155
- report_filter: Optional[str] = None,
156
- workspace: Optional[str | UUID] = None,
157
- ):
158
- """
159
- Exports a Power BI report to a file in your lakehouse.
160
-
161
- This is a wrapper function for the following APIs: `Reports - Export To File In Group <https://learn.microsoft.com/rest/api/power-bi/reports/export-to-file-in-group>`_, `Reports - Get Export To File Status In Group <https://learn.microsoft.com/rest/api/power-bi/reports/get-export-to-file-status-in-group>`_, `Reports - Get File Of Export To File In Group <https://learn.microsoft.com/rest/api/power-bi/reports/get-file-of-export-to-file-in-group>`_.
162
-
163
- Parameters
164
- ----------
165
- report : str
166
- Name of the Power BI report.
167
- export_format : str
168
- The format in which to export the report. For image formats, enter the file extension in this parameter, not 'IMAGE'.
169
- `Valid formats <https://learn.microsoft.com/rest/api/power-bi/reports/export-to-file-in-group#fileformat>`_
170
- file_name : str, default=None
171
- The name of the file to be saved within the lakehouse. Do not include the file extension. Defaults ot the reportName parameter value.
172
- bookmark_name : str, default=None
173
- The name (GUID) of a bookmark within the report.
174
- page_name : str, default=None
175
- The name (GUID) of the report page.
176
- visual_name : str, default=None
177
- The name (GUID) of a visual. If you specify this parameter you must also specify the page_name parameter.
178
- report_filter : str, default=None
179
- A report filter to be applied when exporting the report. Syntax is user-friendly. See above for examples.
180
- workspace : str | uuid.UUID, default=None
181
- The Fabric workspace name or ID.
182
- Defaults to None which resolves to the workspace of the attached lakehouse
183
- or if no lakehouse attached, resolves to the workspace of the notebook.
184
- """
185
-
186
- if not lakehouse_attached():
187
- raise ValueError(
188
- f"{icons.red_dot} In order to run the 'export_report' function, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
189
- )
190
-
191
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
192
-
193
- if isinstance(page_name, str):
194
- page_name = [page_name]
195
- if isinstance(visual_name, str):
196
- visual_name = [visual_name]
197
-
198
- if bookmark_name is not None and (page_name is not None or visual_name is not None):
199
- raise ValueError(
200
- f"{icons.red_dot} If the 'bookmark_name' parameter is set, the 'page_name' and 'visual_name' parameters must not be set."
201
- )
202
-
203
- if visual_name is not None and page_name is None:
204
- raise ValueError(
205
- f"{icons.red_dot} If the 'visual_name' parameter is set, the 'page_name' parameter must be set."
206
- )
207
-
208
- validFormats = {
209
- "ACCESSIBLEPDF": ".pdf",
210
- "CSV": ".csv",
211
- "DOCX": ".docx",
212
- "MHTML": ".mhtml",
213
- "PDF": ".pdf",
214
- "PNG": ".png",
215
- "PPTX": ".pptx",
216
- "XLSX": ".xlsx",
217
- "XML": ".xml",
218
- "BMP": ".bmp",
219
- "EMF": ".emf",
220
- "GIF": ".gif",
221
- "JPEG": ".jpeg",
222
- "TIFF": ".tiff",
223
- }
224
-
225
- export_format = export_format.upper()
226
- fileExt = validFormats.get(export_format)
227
- if fileExt is None:
228
- raise ValueError(
229
- f"{icons.red_dot} The '{export_format}' format is not a valid format for exporting Power BI reports. Please enter a valid format. Options: {validFormats}"
230
- )
231
-
232
- if file_name is None:
233
- file_name = f"{report}{fileExt}"
234
- else:
235
- file_name = f"{file_name}{fileExt}"
236
-
237
- folderPath = "/lakehouse/default/Files"
238
- filePath = os.path.join(folderPath, file_name)
239
-
240
- dfI = fabric.list_items(workspace=workspace_id)
241
- dfI_filt = dfI[
242
- (dfI["Type"].isin(["Report", "PaginatedReport"]))
243
- & (dfI["Display Name"] == report)
244
- ]
245
-
246
- if len(dfI_filt) == 0:
247
- raise ValueError(
248
- f"{icons.red_dot} The '{report}' report does not exist in the '{workspace_name}' workspace."
249
- )
250
-
251
- reportType = dfI_filt["Type"].iloc[0]
252
-
253
- # Limitations
254
- pbiOnly = ["PNG"]
255
- paginatedOnly = [
256
- "ACCESSIBLEPDF",
257
- "CSV",
258
- "DOCX",
259
- "BMP",
260
- "EMF",
261
- "GIF",
262
- "JPEG",
263
- "TIFF",
264
- "MHTML",
265
- "XLSX",
266
- "XML",
267
- ]
268
-
269
- if reportType == "Report" and export_format in paginatedOnly:
270
- raise ValueError(
271
- f"{icons.red_dot} The '{export_format}' format is only supported for paginated reports."
272
- )
273
-
274
- if reportType == "PaginatedReport" and export_format in pbiOnly:
275
- raise ValueError(
276
- f"{icons.red_dot} The '{export_format}' format is only supported for Power BI reports."
277
- )
278
-
279
- if reportType == "PaginatedReport" and (
280
- bookmark_name is not None or page_name is not None or visual_name is not None
281
- ):
282
- raise ValueError(
283
- f"{icons.red_dot} Export for paginated reports does not support bookmarks/pages/visuals. Those parameters must not be set for paginated reports."
284
- )
285
-
286
- reportId = dfI_filt["Id"].iloc[0]
287
-
288
- if (
289
- export_format in ["BMP", "EMF", "GIF", "JPEG", "TIFF"]
290
- and reportType == "PaginatedReport"
291
- ):
292
- request_body = {
293
- "format": "IMAGE",
294
- "paginatedReportConfiguration": {
295
- "formatSettings": {"OutputFormat": export_format.lower()}
296
- },
297
- }
298
- elif bookmark_name is None and page_name is None and visual_name is None:
299
- request_body = {"format": export_format}
300
- elif bookmark_name is not None:
301
- if reportType == "Report":
302
- request_body = {
303
- "format": export_format,
304
- "powerBIReportConfiguration": {
305
- "defaultBookmark": {"name": bookmark_name}
306
- },
307
- }
308
- elif page_name is not None and visual_name is None:
309
- if reportType == "Report":
310
- request_body = {"format": export_format, "powerBIReportConfiguration": {}}
311
-
312
- request_body["powerBIReportConfiguration"]["pages"] = []
313
- dfPage = list_report_pages(report=report, workspace=workspace_id)
314
-
315
- for page in page_name:
316
- dfPage_filt = dfPage[dfPage["Page ID"] == page]
317
- if len(dfPage_filt) == 0:
318
- raise ValueError(
319
- f"{icons.red_dot} The '{page}' page does not exist in the '{report}' report within the '{workspace_name}' workspace."
320
- )
321
-
322
- page_dict = {"pageName": page}
323
- request_body["powerBIReportConfiguration"]["pages"].append(page_dict)
324
-
325
- elif page_name is not None and visual_name is not None:
326
- if len(page_name) != len(visual_name):
327
- raise ValueError(
328
- f"{icons.red_dot} Each 'visual_name' must map to a single 'page_name'."
329
- )
330
-
331
- if reportType == "Report":
332
- request_body = {"format": export_format, "powerBIReportConfiguration": {}}
333
-
334
- request_body["powerBIReportConfiguration"]["pages"] = []
335
- dfVisual = list_report_visuals(report=report, workspace=workspace_id)
336
- a = 0
337
- for page in page_name:
338
- visual = visual_name[a]
339
-
340
- dfVisual_filt = dfVisual[
341
- (dfVisual["Page ID"] == page) & (dfVisual["Visual ID"] == visual)
342
- ]
343
- if len(dfVisual_filt) == 0:
344
- raise ValueError(
345
- f"{icons.red_dot} The '{visual}' visual does not exist on the '{page}' in the '{report}' report within the '{workspace_name}' workspace."
346
- )
347
-
348
- page_dict = {"pageName": page, "visualName": visual}
349
- request_body["powerBIReportConfiguration"]["pages"].append(page_dict)
350
- a += 1
351
-
352
- # Transform and add report filter if it is specified
353
- if report_filter is not None and reportType == "Report":
354
- reportFilter = generate_embedded_filter(filter=report_filter)
355
- report_level_filter = {"filter": reportFilter}
356
-
357
- if "powerBIReportConfiguration" not in request_body:
358
- request_body["powerBIReportConfiguration"] = {}
359
- request_body["powerBIReportConfiguration"]["reportLevelFilters"] = [
360
- report_level_filter
361
- ]
362
-
363
- base_url = f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}"
364
- response = _base_api(
365
- request=f"{base_url}/ExportTo",
366
- method="post",
367
- payload=request_body,
368
- status_codes=202,
369
- )
370
- export_id = json.loads(response.content).get("id")
371
-
372
- get_status_url = f"{base_url}/exports/{export_id}"
373
- response = _base_api(request=get_status_url, status_codes=[200, 202])
374
- response_body = json.loads(response.content)
375
- while response_body["status"] not in ["Succeeded", "Failed"]:
376
- time.sleep(3)
377
- response = _base_api(request=get_status_url, status_codes=[200, 202])
378
- response_body = json.loads(response.content)
379
- if response_body["status"] == "Failed":
380
- raise ValueError(
381
- f"{icons.red_dot} The export for the '{report}' report within the '{workspace_name}' workspace in the '{export_format}' format has failed."
382
- )
383
- else:
384
- response = _base_api(request=f"{get_status_url}/file")
385
- print(
386
- f"{icons.in_progress} Saving the '{export_format}' export for the '{report}' report within the '{workspace_name}' workspace to the lakehouse..."
387
- )
388
- with open(filePath, "wb") as export_file:
389
- export_file.write(response.content)
390
- print(
391
- f"{icons.green_dot} The '{export_format}' export for the '{report}' report within the '{workspace_name}' workspace has been saved to the following location: '{filePath}'."
392
- )
393
-
394
-
395
144
  def clone_report(
396
145
  report: str,
397
146
  cloned_report: str,
@@ -726,7 +475,6 @@ def translate_report_titles(
726
475
  or if no lakehouse attached, resolves to the workspace of the notebook.
727
476
  """
728
477
  from synapse.ml.services import Translate
729
- from pyspark.sql import SparkSession
730
478
 
731
479
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
732
480
 
@@ -738,7 +486,7 @@ def translate_report_titles(
738
486
 
739
487
  reportJson = get_report_json(report=report, workspace=workspace_id)
740
488
  dfV = list_report_visuals(report=report, workspace=workspace_id)
741
- spark = SparkSession.builder.getOrCreate()
489
+ spark = _create_spark_session()
742
490
  df = spark.createDataFrame(dfV)
743
491
  columnToTranslate = "Title"
744
492
 
@@ -58,7 +58,7 @@ def report_rebind(
58
58
  _base_api(
59
59
  request=f"v1.0/myorg/groups/{report_workspace_id}/reports/{report_id}/Rebind",
60
60
  method="post",
61
- json=payload,
61
+ payload=payload,
62
62
  )
63
63
 
64
64
  print(