semantic-link-labs 0.9.3__py3-none-any.whl → 0.9.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (41) hide show
  1. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.4.dist-info}/METADATA +9 -6
  2. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.4.dist-info}/RECORD +41 -31
  3. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.4.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +27 -1
  5. sempy_labs/_capacity_migration.py +3 -2
  6. sempy_labs/_dax.py +17 -3
  7. sempy_labs/_delta_analyzer.py +279 -127
  8. sempy_labs/_eventhouses.py +70 -1
  9. sempy_labs/_generate_semantic_model.py +30 -9
  10. sempy_labs/_helper_functions.py +30 -1
  11. sempy_labs/_job_scheduler.py +226 -2
  12. sempy_labs/_list_functions.py +40 -16
  13. sempy_labs/_model_bpa.py +15 -0
  14. sempy_labs/_model_bpa_rules.py +12 -2
  15. sempy_labs/_semantic_models.py +117 -0
  16. sempy_labs/_sql.py +73 -6
  17. sempy_labs/_sqldatabase.py +227 -0
  18. sempy_labs/admin/__init__.py +49 -8
  19. sempy_labs/admin/_activities.py +166 -0
  20. sempy_labs/admin/_apps.py +143 -0
  21. sempy_labs/admin/_basic_functions.py +32 -652
  22. sempy_labs/admin/_capacities.py +250 -0
  23. sempy_labs/admin/_datasets.py +184 -0
  24. sempy_labs/admin/_domains.py +1 -1
  25. sempy_labs/admin/_items.py +3 -1
  26. sempy_labs/admin/_reports.py +165 -0
  27. sempy_labs/admin/_scanner.py +0 -1
  28. sempy_labs/admin/_shared.py +74 -0
  29. sempy_labs/admin/_tenant.py +489 -0
  30. sempy_labs/directlake/_dl_helper.py +0 -1
  31. sempy_labs/directlake/_update_directlake_partition_entity.py +6 -0
  32. sempy_labs/graph/_teams.py +1 -1
  33. sempy_labs/graph/_users.py +9 -1
  34. sempy_labs/lakehouse/_shortcuts.py +28 -15
  35. sempy_labs/report/__init__.py +3 -1
  36. sempy_labs/report/_download_report.py +4 -1
  37. sempy_labs/report/_export_report.py +272 -0
  38. sempy_labs/report/_report_functions.py +9 -261
  39. sempy_labs/tom/_model.py +278 -29
  40. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.4.dist-info}/LICENSE +0 -0
  41. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.4.dist-info}/top_level.txt +0 -0
@@ -18,6 +18,8 @@ def create_shortcut_onelake(
18
18
  destination_lakehouse: str,
19
19
  destination_workspace: Optional[str | UUID] = None,
20
20
  shortcut_name: Optional[str] = None,
21
+ source_path: str = "Tables",
22
+ destination_path: str = "Tables",
21
23
  ):
22
24
  """
23
25
  Creates a `shortcut <https://learn.microsoft.com/fabric/onelake/onelake-shortcuts>`_ to a delta table in OneLake.
@@ -40,8 +42,23 @@ def create_shortcut_onelake(
40
42
  or if no lakehouse attached, resolves to the workspace of the notebook.
41
43
  shortcut_name : str, default=None
42
44
  The name of the shortcut 'table' to be created. This defaults to the 'table_name' parameter value.
45
+ source_path : str, default="Tables"
46
+ A string representing the full path to the table/file in the source lakehouse, including either "Files" or "Tables". Examples: Tables/FolderName/SubFolderName; Files/FolderName/SubFolderName.
47
+ destination_path: str, default="Tables"
48
+ A string representing the full path where the shortcut is created, including either "Files" or "Tables". Examples: Tables/FolderName/SubFolderName; Files/FolderName/SubFolderName.
43
49
  """
44
50
 
51
+ if not (source_path.startswith("Files") or source_path.startswith("Tables")):
52
+ raise ValueError(
53
+ f"{icons.red_dot} The 'source_path' parameter must be either 'Files' or 'Tables'."
54
+ )
55
+ if not (
56
+ destination_path.startswith("Files") or destination_path.startswith("Tables")
57
+ ):
58
+ raise ValueError(
59
+ f"{icons.red_dot} The 'destination_path' parameter must be either 'Files' or 'Tables'."
60
+ )
61
+
45
62
  (source_workspace_name, source_workspace_id) = resolve_workspace_name_and_id(
46
63
  source_workspace
47
64
  )
@@ -50,16 +67,9 @@ def create_shortcut_onelake(
50
67
  item_id=source_lakehouse_id, type="Lakehouse", workspace=source_workspace_id
51
68
  )
52
69
 
53
- if destination_workspace is None:
54
- destination_workspace_name = source_workspace_name
55
- destination_workspace_id = source_workspace_id
56
- else:
57
- destination_workspace_name = destination_workspace
58
- destination_workspace_id = fabric.resolve_workspace_id(
59
- destination_workspace_name
60
- )
61
-
62
- destination_workspace_id = fabric.resolve_workspace_id(destination_workspace)
70
+ (destination_workspace_name, destination_workspace_id) = resolve_workspace_name_and_id(
71
+ destination_workspace
72
+ )
63
73
  destination_lakehouse_id = resolve_lakehouse_id(
64
74
  destination_lakehouse, destination_workspace
65
75
  )
@@ -72,16 +82,16 @@ def create_shortcut_onelake(
72
82
  if shortcut_name is None:
73
83
  shortcut_name = table_name
74
84
 
75
- table_path = f"Tables/{table_name}"
85
+ source_full_path = f"{source_path}/{table_name}"
76
86
 
77
87
  payload = {
78
- "path": "Tables",
88
+ "path": destination_path,
79
89
  "name": shortcut_name.replace(" ", ""),
80
90
  "target": {
81
91
  "oneLake": {
82
92
  "workspaceId": source_workspace_id,
83
93
  "itemId": source_lakehouse_id,
84
- "path": table_path,
94
+ "path": source_full_path,
85
95
  }
86
96
  },
87
97
  }
@@ -94,7 +104,7 @@ def create_shortcut_onelake(
94
104
  )
95
105
 
96
106
  print(
97
- f"{icons.green_dot} The shortcut '{shortcut_name}' was created in the '{destination_lakehouse_name}' lakehouse within the '{destination_workspace_name} workspace. It is based on the '{table_name}' table in the '{source_lakehouse_name}' lakehouse within the '{source_workspace_name}' workspace."
107
+ f"{icons.green_dot} The shortcut '{shortcut_name}' was created in the '{destination_lakehouse_name}' lakehouse within the '{destination_workspace_name}' workspace. It is based on the '{table_name}' table in the '{source_lakehouse_name}' lakehouse within the '{source_workspace_name}' workspace."
98
108
  )
99
109
 
100
110
 
@@ -178,6 +188,7 @@ def create_shortcut(
178
188
 
179
189
  def delete_shortcut(
180
190
  shortcut_name: str,
191
+ shortcut_path: str = "Tables",
181
192
  lakehouse: Optional[str] = None,
182
193
  workspace: Optional[str | UUID] = None,
183
194
  ):
@@ -190,6 +201,8 @@ def delete_shortcut(
190
201
  ----------
191
202
  shortcut_name : str
192
203
  The name of the shortcut.
204
+ shortcut_path : str = "Tables"
205
+ The path of the shortcut to be deleted. Must start with either "Files" or "Tables". Examples: Tables/FolderName/SubFolderName; Files/FolderName/SubFolderName.
193
206
  lakehouse : str, default=None
194
207
  The Fabric lakehouse name in which the shortcut resides.
195
208
  Defaults to None which resolves to the lakehouse attached to the notebook.
@@ -209,7 +222,7 @@ def delete_shortcut(
209
222
 
210
223
  client = fabric.FabricRestClient()
211
224
  response = client.delete(
212
- f"/v1/workspaces/{workspace_id}/items/{lakehouse_id}/shortcuts/Tables/{shortcut_name}"
225
+ f"/v1/workspaces/{workspace_id}/items/{lakehouse_id}/shortcuts/{shortcut_path}/{shortcut_name}"
213
226
  )
214
227
 
215
228
  if response.status_code != 200:
@@ -14,7 +14,6 @@ from sempy_labs.report._download_report import download_report
14
14
  from sempy_labs.report._report_functions import (
15
15
  get_report_json,
16
16
  # report_dependency_tree,
17
- export_report,
18
17
  clone_report,
19
18
  launch_report,
20
19
  # translate_report_titles
@@ -25,6 +24,9 @@ from sempy_labs.report._report_rebind import (
25
24
  )
26
25
  from sempy_labs.report._report_bpa_rules import report_bpa_rules
27
26
  from sempy_labs.report._report_bpa import run_report_bpa
27
+ from sempy_labs.report._export_report import (
28
+ export_report,
29
+ )
28
30
 
29
31
  __all__ = [
30
32
  "create_report_from_reportjson",
@@ -6,6 +6,7 @@ from sempy_labs._helper_functions import (
6
6
  resolve_lakehouse_name,
7
7
  _base_api,
8
8
  resolve_item_id,
9
+ _mount,
9
10
  )
10
11
  from sempy_labs.lakehouse._lakehouse import lakehouse_attached
11
12
  from uuid import UUID
@@ -63,7 +64,9 @@ def download_report(
63
64
  )
64
65
 
65
66
  # Save file to the attached lakehouse
66
- with open(f"/lakehouse/default/Files/{file_name}.pbix", "wb") as file:
67
+ local_path = _mount()
68
+ save_file = f"{local_path}/Files/{file_name}.pbix"
69
+ with open(save_file, "wb") as file:
67
70
  file.write(response.content)
68
71
 
69
72
  print(
@@ -0,0 +1,272 @@
1
+ import sempy.fabric as fabric
2
+ import json
3
+ import os
4
+ import time
5
+ from sempy_labs._helper_functions import (
6
+ generate_embedded_filter,
7
+ resolve_workspace_name_and_id,
8
+ _base_api,
9
+ _mount,
10
+ )
11
+ from typing import Optional
12
+ from sempy._utils._log import log
13
+ import sempy_labs._icons as icons
14
+ from uuid import UUID
15
+ from sempy_labs.report._report_functions import (
16
+ list_report_visuals,
17
+ list_report_pages,
18
+ )
19
+
20
+
21
+ @log
22
+ def export_report(
23
+ report: str,
24
+ export_format: str,
25
+ file_name: Optional[str] = None,
26
+ bookmark_name: Optional[str] = None,
27
+ page_name: Optional[str] = None,
28
+ visual_name: Optional[str] = None,
29
+ report_filter: Optional[str] = None,
30
+ workspace: Optional[str | UUID] = None,
31
+ lakehouse: Optional[str | UUID] = None,
32
+ lakehouse_workspace: Optional[str | UUID] = None,
33
+ ):
34
+ """
35
+ Exports a Power BI report to a file in your lakehouse.
36
+
37
+ This is a wrapper function for the following APIs: `Reports - Export To File In Group <https://learn.microsoft.com/rest/api/power-bi/reports/export-to-file-in-group>`_, `Reports - Get Export To File Status In Group <https://learn.microsoft.com/rest/api/power-bi/reports/get-export-to-file-status-in-group>`_, `Reports - Get File Of Export To File In Group <https://learn.microsoft.com/rest/api/power-bi/reports/get-file-of-export-to-file-in-group>`_.
38
+
39
+ Parameters
40
+ ----------
41
+ report : str
42
+ Name of the Power BI report.
43
+ export_format : str
44
+ The format in which to export the report. For image formats, enter the file extension in this parameter, not 'IMAGE'.
45
+ `Valid formats <https://learn.microsoft.com/rest/api/power-bi/reports/export-to-file-in-group#fileformat>`_
46
+ file_name : str, default=None
47
+ The name of the file to be saved within the lakehouse. Do not include the file extension. Defaults ot the reportName parameter value.
48
+ bookmark_name : str, default=None
49
+ The name (GUID) of a bookmark within the report.
50
+ page_name : str, default=None
51
+ The name (GUID) of the report page.
52
+ visual_name : str, default=None
53
+ The name (GUID) of a visual. If you specify this parameter you must also specify the page_name parameter.
54
+ report_filter : str, default=None
55
+ A report filter to be applied when exporting the report. Syntax is user-friendly. See above for examples.
56
+ workspace : str | uuid.UUID, default=None
57
+ The Fabric workspace name or ID.
58
+ Defaults to None which resolves to the workspace of the attached lakehouse
59
+ or if no lakehouse attached, resolves to the workspace of the notebook.
60
+ lakehouse : str | uuid.UUID, default=None
61
+ The Fabric lakehouse name or ID. This will be the lakehouse to which the export of the report is saved.
62
+ Defaults to None which resolves to the lakehouse attached to the notebook.
63
+ lakehouse_workspace : str | uuid.UUID, default=None
64
+ The Fabric workspace name or ID used by the lakehouse.
65
+ Defaults to None which resolves to the workspace of the attached lakehouse
66
+ or if no lakehouse attached, resolves to the workspace of the notebook.
67
+ """
68
+
69
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
70
+
71
+ if isinstance(page_name, str):
72
+ page_name = [page_name]
73
+ if isinstance(visual_name, str):
74
+ visual_name = [visual_name]
75
+
76
+ if bookmark_name is not None and (page_name is not None or visual_name is not None):
77
+ raise ValueError(
78
+ f"{icons.red_dot} If the 'bookmark_name' parameter is set, the 'page_name' and 'visual_name' parameters must not be set."
79
+ )
80
+
81
+ if visual_name is not None and page_name is None:
82
+ raise ValueError(
83
+ f"{icons.red_dot} If the 'visual_name' parameter is set, the 'page_name' parameter must be set."
84
+ )
85
+
86
+ valid_formats = {
87
+ "ACCESSIBLEPDF": ".pdf",
88
+ "CSV": ".csv",
89
+ "DOCX": ".docx",
90
+ "MHTML": ".mhtml",
91
+ "PDF": ".pdf",
92
+ "PNG": ".png",
93
+ "PPTX": ".pptx",
94
+ "XLSX": ".xlsx",
95
+ "XML": ".xml",
96
+ "BMP": ".bmp",
97
+ "EMF": ".emf",
98
+ "GIF": ".gif",
99
+ "JPEG": ".jpeg",
100
+ "TIFF": ".tiff",
101
+ }
102
+
103
+ export_format = export_format.upper()
104
+ file_ext = valid_formats.get(export_format)
105
+ if file_ext is None:
106
+ raise ValueError(
107
+ f"{icons.red_dot} The '{export_format}' format is not a valid format for exporting Power BI reports. Please enter a valid format. Options: {valid_formats}"
108
+ )
109
+
110
+ if file_name is None:
111
+ file_name = f"{report}{file_ext}"
112
+ else:
113
+ file_name = f"{file_name}{file_ext}"
114
+
115
+ dfI = fabric.list_items(workspace=workspace)
116
+ dfI_filt = dfI[
117
+ (dfI["Type"].isin(["Report", "PaginatedReport"]))
118
+ & (dfI["Display Name"] == report)
119
+ ]
120
+
121
+ if dfI_filt.empty:
122
+ raise ValueError(
123
+ f"{icons.red_dot} The '{report}' report does not exist in the '{workspace_name}' workspace."
124
+ )
125
+
126
+ report_type = dfI_filt["Type"].iloc[0]
127
+
128
+ # Limitations
129
+ pbiOnly = ["PNG"]
130
+ paginatedOnly = [
131
+ "ACCESSIBLEPDF",
132
+ "CSV",
133
+ "DOCX",
134
+ "BMP",
135
+ "EMF",
136
+ "GIF",
137
+ "JPEG",
138
+ "TIFF",
139
+ "MHTML",
140
+ "XLSX",
141
+ "XML",
142
+ ]
143
+
144
+ if report_type == "Report" and export_format in paginatedOnly:
145
+ raise ValueError(
146
+ f"{icons.red_dot} The '{export_format}' format is only supported for paginated reports."
147
+ )
148
+
149
+ if report_type == "PaginatedReport" and export_format in pbiOnly:
150
+ raise ValueError(
151
+ f"{icons.red_dot} The '{export_format}' format is only supported for Power BI reports."
152
+ )
153
+
154
+ if report_type == "PaginatedReport" and (
155
+ bookmark_name is not None or page_name is not None or visual_name is not None
156
+ ):
157
+ raise ValueError(
158
+ f"{icons.red_dot} Export for paginated reports does not support bookmarks/pages/visuals. Those parameters must not be set for paginated reports."
159
+ )
160
+
161
+ reportId = dfI_filt["Id"].iloc[0]
162
+
163
+ if (
164
+ export_format in ["BMP", "EMF", "GIF", "JPEG", "TIFF"]
165
+ and report_type == "PaginatedReport"
166
+ ):
167
+ request_body = {
168
+ "format": "IMAGE",
169
+ "paginatedReportConfiguration": {
170
+ "formatSettings": {"OutputFormat": export_format.lower()}
171
+ },
172
+ }
173
+ elif bookmark_name is None and page_name is None and visual_name is None:
174
+ request_body = {"format": export_format}
175
+ elif bookmark_name is not None:
176
+ if report_type == "Report":
177
+ request_body = {
178
+ "format": export_format,
179
+ "powerBIReportConfiguration": {
180
+ "defaultBookmark": {"name": bookmark_name}
181
+ },
182
+ }
183
+ elif page_name is not None and visual_name is None:
184
+ if report_type == "Report":
185
+ request_body = {"format": export_format, "powerBIReportConfiguration": {}}
186
+
187
+ request_body["powerBIReportConfiguration"]["pages"] = []
188
+ dfPage = list_report_pages(report=report, workspace=workspace_id)
189
+
190
+ for page in page_name:
191
+ dfPage_filt = dfPage[dfPage["Page ID"] == page]
192
+ if len(dfPage_filt) == 0:
193
+ raise ValueError(
194
+ f"{icons.red_dot} The '{page}' page does not exist in the '{report}' report within the '{workspace_name}' workspace."
195
+ )
196
+
197
+ page_dict = {"pageName": page}
198
+ request_body["powerBIReportConfiguration"]["pages"].append(page_dict)
199
+
200
+ elif page_name is not None and visual_name is not None:
201
+ if len(page_name) != len(visual_name):
202
+ raise ValueError(
203
+ f"{icons.red_dot} Each 'visual_name' must map to a single 'page_name'."
204
+ )
205
+
206
+ if report_type == "Report":
207
+ request_body = {"format": export_format, "powerBIReportConfiguration": {}}
208
+
209
+ request_body["powerBIReportConfiguration"]["pages"] = []
210
+ dfVisual = list_report_visuals(report=report, workspace=workspace_id)
211
+ a = 0
212
+ for page in page_name:
213
+ visual = visual_name[a]
214
+
215
+ dfVisual_filt = dfVisual[
216
+ (dfVisual["Page ID"] == page) & (dfVisual["Visual ID"] == visual)
217
+ ]
218
+ if len(dfVisual_filt) == 0:
219
+ raise ValueError(
220
+ f"{icons.red_dot} The '{visual}' visual does not exist on the '{page}' in the '{report}' report within the '{workspace_name}' workspace."
221
+ )
222
+
223
+ page_dict = {"pageName": page, "visualName": visual}
224
+ request_body["powerBIReportConfiguration"]["pages"].append(page_dict)
225
+ a += 1
226
+
227
+ # Transform and add report filter if it is specified
228
+ if report_filter is not None and report_type == "Report":
229
+ reportFilter = generate_embedded_filter(filter=report_filter)
230
+ report_level_filter = {"filter": reportFilter}
231
+
232
+ if "powerBIReportConfiguration" not in request_body:
233
+ request_body["powerBIReportConfiguration"] = {}
234
+ request_body["powerBIReportConfiguration"]["reportLevelFilters"] = [
235
+ report_level_filter
236
+ ]
237
+
238
+ base_url = f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}"
239
+ response = _base_api(
240
+ request=f"{base_url}/ExportTo",
241
+ method="post",
242
+ payload=request_body,
243
+ status_codes=202,
244
+ )
245
+ export_id = json.loads(response.content).get("id")
246
+
247
+ get_status_url = f"{base_url}/exports/{export_id}"
248
+ response = _base_api(request=get_status_url, status_codes=[200, 202])
249
+ response_body = json.loads(response.content)
250
+ while response_body["status"] not in ["Succeeded", "Failed"]:
251
+ time.sleep(3)
252
+ response = _base_api(request=get_status_url, status_codes=[200, 202])
253
+ response_body = json.loads(response.content)
254
+ if response_body["status"] == "Failed":
255
+ raise ValueError(
256
+ f"{icons.red_dot} The export for the '{report}' report within the '{workspace_name}' workspace in the '{export_format}' format has failed."
257
+ )
258
+ else:
259
+ response = _base_api(request=f"{get_status_url}/file")
260
+ print(
261
+ f"{icons.in_progress} Saving the '{export_format}' export for the '{report}' report within the '{workspace_name}' workspace to the lakehouse..."
262
+ )
263
+
264
+ local_path = _mount(lakehouse=lakehouse, workspace=lakehouse_workspace)
265
+ folder_path = f"{local_path}/Files"
266
+ file_path = os.path.join(folder_path, file_name)
267
+
268
+ with open(file_path, "wb") as export_file:
269
+ export_file.write(response.content)
270
+ print(
271
+ f"{icons.green_dot} The '{export_format}' export for the '{report}' report within the '{workspace_name}' workspace has been saved '{file_name}' in the '{lakehouse}' within the '{lakehouse_workspace}' workspace."
272
+ )