semantic-link-labs 0.4.2__py3-none-any.whl → 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.4.2.dist-info → semantic_link_labs-0.5.0.dist-info}/METADATA +1 -1
- semantic_link_labs-0.5.0.dist-info/RECORD +53 -0
- {semantic_link_labs-0.4.2.dist-info → semantic_link_labs-0.5.0.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +27 -3
- sempy_labs/_ai.py +12 -32
- sempy_labs/_clear_cache.py +1 -3
- sempy_labs/_connections.py +39 -38
- sempy_labs/_generate_semantic_model.py +9 -14
- sempy_labs/_helper_functions.py +3 -12
- sempy_labs/_icons.py +1 -0
- sempy_labs/_list_functions.py +915 -391
- sempy_labs/_model_auto_build.py +2 -4
- sempy_labs/_model_bpa.py +26 -30
- sempy_labs/_model_dependencies.py +7 -13
- sempy_labs/_one_lake_integration.py +2 -5
- sempy_labs/_query_scale_out.py +12 -30
- sempy_labs/_refresh_semantic_model.py +5 -15
- sempy_labs/_translations.py +1 -1
- sempy_labs/_vertipaq.py +3 -10
- sempy_labs/directlake/_directlake_schema_compare.py +3 -9
- sempy_labs/directlake/_directlake_schema_sync.py +2 -6
- sempy_labs/directlake/_fallback.py +2 -6
- sempy_labs/directlake/_get_shared_expression.py +3 -9
- sempy_labs/directlake/_guardrails.py +3 -5
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +3 -4
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +1 -2
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +3 -7
- sempy_labs/directlake/_update_directlake_partition_entity.py +2 -8
- sempy_labs/directlake/_warm_cache.py +5 -8
- sempy_labs/lakehouse/_get_lakehouse_columns.py +1 -1
- sempy_labs/lakehouse/_get_lakehouse_tables.py +3 -5
- sempy_labs/lakehouse/_lakehouse.py +1 -3
- sempy_labs/lakehouse/_shortcuts.py +2 -5
- sempy_labs/migration/_create_pqt_file.py +4 -13
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +2 -6
- sempy_labs/migration/_migration_validation.py +4 -0
- sempy_labs/migration/_refresh_calc_tables.py +2 -0
- sempy_labs/report/_generate_report.py +2 -6
- sempy_labs/report/_report_functions.py +30 -73
- sempy_labs/report/_report_rebind.py +39 -39
- sempy_labs/tom/_model.py +141 -183
- semantic_link_labs-0.4.2.dist-info/RECORD +0 -53
- {semantic_link_labs-0.4.2.dist-info → semantic_link_labs-0.5.0.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.4.2.dist-info → semantic_link_labs-0.5.0.dist-info}/top_level.txt +0 -0
|
@@ -53,20 +53,14 @@ def update_direct_lake_partition_entity(
|
|
|
53
53
|
entity_name = [entity_name]
|
|
54
54
|
|
|
55
55
|
if len(table_name) != len(entity_name):
|
|
56
|
-
|
|
57
|
-
f"{icons.red_dot} The 'table_name' and 'entity_name' arrays must be of equal length."
|
|
58
|
-
)
|
|
59
|
-
return
|
|
56
|
+
raise ValueError(f"{icons.red_dot} The 'table_name' and 'entity_name' arrays must be of equal length.")
|
|
60
57
|
|
|
61
58
|
with connect_semantic_model(
|
|
62
59
|
dataset=dataset, readonly=False, workspace=workspace
|
|
63
60
|
) as tom:
|
|
64
61
|
|
|
65
62
|
if not tom.is_direct_lake():
|
|
66
|
-
|
|
67
|
-
f"{icons.yellow_dot} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode."
|
|
68
|
-
)
|
|
69
|
-
return
|
|
63
|
+
raise ValueError(f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode.")
|
|
70
64
|
|
|
71
65
|
for tName in table_name:
|
|
72
66
|
i = table_name.index(tName)
|
|
@@ -45,10 +45,7 @@ def warm_direct_lake_cache_perspective(
|
|
|
45
45
|
|
|
46
46
|
dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
|
|
47
47
|
if not any(r["Mode"] == "DirectLake" for i, r in dfP.iterrows()):
|
|
48
|
-
|
|
49
|
-
f"{icons.red_dot} The '{dataset}' semantic model in the '{workspace}' workspace is not in Direct Lake mode. This function is specifically for semantic models in Direct Lake mode."
|
|
50
|
-
)
|
|
51
|
-
return
|
|
48
|
+
raise ValueError(f"{icons.red_dot} The '{dataset}' semantic model in the '{workspace}' workspace is not in Direct Lake mode. This function is specifically for semantic models in Direct Lake mode.")
|
|
52
49
|
|
|
53
50
|
dfPersp = fabric.list_perspectives(dataset=dataset, workspace=workspace)
|
|
54
51
|
dfPersp["DAX Object Name"] = format_dax_object_name(
|
|
@@ -57,10 +54,8 @@ def warm_direct_lake_cache_perspective(
|
|
|
57
54
|
dfPersp_filt = dfPersp[dfPersp["Perspective Name"] == perspective]
|
|
58
55
|
|
|
59
56
|
if len(dfPersp_filt) == 0:
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
)
|
|
63
|
-
return
|
|
57
|
+
raise ValueError(f"{icons.red_dot} The '{perspective} perspective does not exist or contains no objects within the '{dataset}' semantic model in the '{workspace}' workspace.")
|
|
58
|
+
|
|
64
59
|
dfPersp_c = dfPersp_filt[dfPersp_filt["Object Type"] == "Column"]
|
|
65
60
|
|
|
66
61
|
column_values = dfPersp_c["DAX Object Name"].tolist()
|
|
@@ -166,6 +161,8 @@ def warm_direct_lake_cache_isresident(
|
|
|
166
161
|
Returns a pandas dataframe showing the columns that have been put into memory.
|
|
167
162
|
"""
|
|
168
163
|
|
|
164
|
+
workspace = fabric.resolve_workspace_name(workspace)
|
|
165
|
+
|
|
169
166
|
dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
|
|
170
167
|
if not any(r["Mode"] == "DirectLake" for i, r in dfP.iterrows()):
|
|
171
168
|
raise ValueError(
|
|
@@ -25,7 +25,7 @@ def get_lakehouse_tables(
|
|
|
25
25
|
extended: Optional[bool] = False,
|
|
26
26
|
count_rows: Optional[bool] = False,
|
|
27
27
|
export: Optional[bool] = False,
|
|
28
|
-
):
|
|
28
|
+
) -> pd.DataFrame:
|
|
29
29
|
"""
|
|
30
30
|
Shows the tables of a lakehouse and their respective properties. Option to include additional properties relevant to Direct Lake guardrails.
|
|
31
31
|
|
|
@@ -174,10 +174,8 @@ def get_lakehouse_tables(
|
|
|
174
174
|
if export:
|
|
175
175
|
lakeAttach = lakehouse_attached()
|
|
176
176
|
if lakeAttach is False:
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
)
|
|
180
|
-
return
|
|
177
|
+
raise ValueError(f"{icons.red_dot} In order to save the report.json file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook.")
|
|
178
|
+
|
|
181
179
|
spark = SparkSession.builder.getOrCreate()
|
|
182
180
|
|
|
183
181
|
lakehouse_id = fabric.get_lakehouse_id()
|
|
@@ -50,9 +50,7 @@ def optimize_lakehouse_tables(
|
|
|
50
50
|
from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
|
|
51
51
|
from delta import DeltaTable
|
|
52
52
|
|
|
53
|
-
|
|
54
|
-
workspace_id = fabric.get_workspace_id()
|
|
55
|
-
workspace = fabric.resolve_workspace_name(workspace_id)
|
|
53
|
+
workspace = fabric.resolve_workspace_name(workspace)
|
|
56
54
|
|
|
57
55
|
if lakehouse is None:
|
|
58
56
|
lakehouse_id = fabric.get_lakehouse_id()
|
|
@@ -114,10 +114,7 @@ def create_shortcut(
|
|
|
114
114
|
sourceValues = list(source_titles.keys())
|
|
115
115
|
|
|
116
116
|
if source not in sourceValues:
|
|
117
|
-
|
|
118
|
-
f"{icons.red_dot} The 'source' parameter must be one of these values: {sourceValues}."
|
|
119
|
-
)
|
|
120
|
-
return
|
|
117
|
+
raise ValueError(f"{icons.red_dot} The 'source' parameter must be one of these values: {sourceValues}.")
|
|
121
118
|
|
|
122
119
|
sourceTitle = source_titles[source]
|
|
123
120
|
|
|
@@ -195,4 +192,4 @@ def delete_shortcut(
|
|
|
195
192
|
f"{icons.green_dot} The '{shortcut_name}' shortcut in the '{lakehouse}' within the '{workspace}' workspace has been deleted."
|
|
196
193
|
)
|
|
197
194
|
else:
|
|
198
|
-
|
|
195
|
+
raise ValueError(f"{icons.red_dot} The '{shortcut_name}' has not been deleted.")
|
|
@@ -11,7 +11,7 @@ import sempy_labs._icons as icons
|
|
|
11
11
|
|
|
12
12
|
@log
|
|
13
13
|
def create_pqt_file(
|
|
14
|
-
dataset: str, workspace: Optional[str] = None, file_name: Optional[str] =
|
|
14
|
+
dataset: str, workspace: Optional[str] = None, file_name: Optional[str] = 'PowerQueryTemplate'
|
|
15
15
|
):
|
|
16
16
|
"""
|
|
17
17
|
Dynamically generates a `Power Query Template <https://learn.microsoft.com/power-query/power-query-template>`_ file based on the semantic model. The .pqt file is saved within the Files section of your lakehouse.
|
|
@@ -24,25 +24,16 @@ def create_pqt_file(
|
|
|
24
24
|
The Fabric workspace name.
|
|
25
25
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
26
26
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
27
|
-
file_name : str, default=
|
|
27
|
+
file_name : str, default='PowerQueryTemplate'
|
|
28
28
|
The name of the Power Query Template file to be generated.
|
|
29
|
-
Defaults to None which resolves to 'PowerQueryTemplate'.
|
|
30
29
|
"""
|
|
31
30
|
|
|
32
|
-
if file_name is None:
|
|
33
|
-
file_name = "PowerQueryTemplate"
|
|
34
|
-
|
|
35
31
|
lakeAttach = lakehouse_attached()
|
|
36
32
|
|
|
37
33
|
if lakeAttach is False:
|
|
38
|
-
|
|
39
|
-
f"{icons.red_dot} In order to run the 'create_pqt_file' function, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
|
|
40
|
-
)
|
|
41
|
-
return
|
|
34
|
+
raise ValueError(f"{icons.red_dot} In order to run the 'create_pqt_file' function, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook.")
|
|
42
35
|
|
|
43
|
-
|
|
44
|
-
workspace_id = fabric.get_workspace_id()
|
|
45
|
-
workspace = fabric.resolve_workspace_name(workspace_id)
|
|
36
|
+
workspace = fabric.resolve_workspace_name(workspace)
|
|
46
37
|
|
|
47
38
|
folderPath = "/lakehouse/default/Files"
|
|
48
39
|
subFolderPath = os.path.join(folderPath, "pqtnewfolder")
|
|
@@ -38,11 +38,7 @@ def migrate_model_objects_to_semantic_model(
|
|
|
38
38
|
import Microsoft.AnalysisServices.Tabular as TOM
|
|
39
39
|
import System
|
|
40
40
|
|
|
41
|
-
|
|
42
|
-
workspace_id = fabric.get_workspace_id()
|
|
43
|
-
workspace = fabric.resolve_workspace_name(workspace_id)
|
|
44
|
-
else:
|
|
45
|
-
workspace_id = fabric.resolve_workspace_id(workspace)
|
|
41
|
+
workspace = fabric.resolve_workspace_name(workspace)
|
|
46
42
|
|
|
47
43
|
if new_dataset_workspace is None:
|
|
48
44
|
new_dataset_workspace = workspace
|
|
@@ -238,7 +234,7 @@ def migrate_model_objects_to_semantic_model(
|
|
|
238
234
|
f"\n{icons.in_progress} Updating calculation group column name..."
|
|
239
235
|
)
|
|
240
236
|
dfC_filt = dfC[
|
|
241
|
-
(dfC["Table Name"] == cgName) & (dfC["Hidden"]
|
|
237
|
+
(dfC["Table Name"] == cgName) & (dfC["Hidden"] == False)
|
|
242
238
|
]
|
|
243
239
|
colName = dfC_filt["Column Name"].iloc[0]
|
|
244
240
|
tom.model.Tables[cgName].Columns["Name"].Name = colName
|
|
@@ -36,6 +36,10 @@ def migration_validation(
|
|
|
36
36
|
A pandas dataframe showing a list of objects and whether they were successfully migrated. Also shows the % of objects which were migrated successfully.
|
|
37
37
|
"""
|
|
38
38
|
|
|
39
|
+
workspace = fabric.resolve_workspace_name(workspace)
|
|
40
|
+
if new_dataset_workspace is None:
|
|
41
|
+
new_dataset_workspace = workspace
|
|
42
|
+
|
|
39
43
|
dfA = list_semantic_model_objects(dataset=dataset, workspace=workspace)
|
|
40
44
|
dfB = list_semantic_model_objects(
|
|
41
45
|
dataset=new_dataset, workspace=new_dataset_workspace
|
|
@@ -41,10 +41,7 @@ def create_report_from_reportjson(
|
|
|
41
41
|
dfI_model = dfI_m[(dfI_m["Display Name"] == dataset)]
|
|
42
42
|
|
|
43
43
|
if len(dfI_model) == 0:
|
|
44
|
-
|
|
45
|
-
f"{icons.red_dot} The '{dataset}' semantic model does not exist in the '{workspace}' workspace."
|
|
46
|
-
)
|
|
47
|
-
return
|
|
44
|
+
raise ValueError(f"{icons.red_dot} The '{dataset}' semantic model does not exist in the '{workspace}' workspace.")
|
|
48
45
|
|
|
49
46
|
datasetId = dfI_model["Id"].iloc[0]
|
|
50
47
|
|
|
@@ -172,8 +169,7 @@ def update_report_from_reportjson(
|
|
|
172
169
|
dfR_filt = dfR[(dfR["Name"] == report) & (dfR["Report Type"] == "PowerBIReport")]
|
|
173
170
|
|
|
174
171
|
if len(dfR_filt) == 0:
|
|
175
|
-
|
|
176
|
-
return
|
|
172
|
+
raise ValueError(f"{icons.red_dot} The '{report}' report does not exist in the '{workspace}' workspace.")
|
|
177
173
|
|
|
178
174
|
reportId = dfR_filt["Id"].iloc[0]
|
|
179
175
|
client = fabric.FabricRestClient()
|
|
@@ -55,10 +55,7 @@ def get_report_json(
|
|
|
55
55
|
dfI_filt = dfI[(dfI["Display Name"] == report)]
|
|
56
56
|
|
|
57
57
|
if len(dfI_filt) == 0:
|
|
58
|
-
|
|
59
|
-
f"{icons.red_dot} The '{report}' report does not exist in the '{workspace}' workspace."
|
|
60
|
-
)
|
|
61
|
-
return
|
|
58
|
+
raise ValueError(f"{icons.red_dot} The '{report}' report does not exist in the '{workspace}' workspace.")
|
|
62
59
|
|
|
63
60
|
itemId = dfI_filt["Id"].iloc[0]
|
|
64
61
|
response = client.post(
|
|
@@ -74,10 +71,7 @@ def get_report_json(
|
|
|
74
71
|
if save_to_file_name is not None:
|
|
75
72
|
lakeAttach = lakehouse_attached()
|
|
76
73
|
if lakeAttach is False:
|
|
77
|
-
|
|
78
|
-
f"{icons.red_dot} In order to save the report.json file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
|
|
79
|
-
)
|
|
80
|
-
return
|
|
74
|
+
raise ValueError(f"{icons.red_dot} In order to save the report.json file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook.")
|
|
81
75
|
|
|
82
76
|
lakehouse_id = fabric.get_lakehouse_id()
|
|
83
77
|
lakehouse = resolve_lakehouse_name(lakehouse_id, workspace)
|
|
@@ -191,10 +185,7 @@ def export_report(
|
|
|
191
185
|
lakeAttach = lakehouse_attached()
|
|
192
186
|
|
|
193
187
|
if lakeAttach is False:
|
|
194
|
-
|
|
195
|
-
f"{icons.red_dot} In order to run the 'export_report' function, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
|
|
196
|
-
)
|
|
197
|
-
return
|
|
188
|
+
raise ValueError(f"{icons.red_dot} In order to run the 'export_report' function, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook.")
|
|
198
189
|
|
|
199
190
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
200
191
|
|
|
@@ -204,15 +195,10 @@ def export_report(
|
|
|
204
195
|
visual_name = [visual_name]
|
|
205
196
|
|
|
206
197
|
if bookmark_name is not None and (page_name is not None or visual_name is not None):
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
)
|
|
210
|
-
return
|
|
198
|
+
raise ValueError(f"{icons.red_dot} If the 'bookmark_name' parameter is set, the 'page_name' and 'visual_name' parameters must not be set.")
|
|
199
|
+
|
|
211
200
|
if visual_name is not None and page_name is None:
|
|
212
|
-
|
|
213
|
-
f"{icons.red_dot} If the 'visual_name' parameter is set, the 'page_name' parameter must be set."
|
|
214
|
-
)
|
|
215
|
-
return
|
|
201
|
+
raise ValueError(f"{icons.red_dot} If the 'visual_name' parameter is set, the 'page_name' parameter must be set.")
|
|
216
202
|
|
|
217
203
|
validFormats = {
|
|
218
204
|
"ACCESSIBLEPDF": ".pdf",
|
|
@@ -235,10 +221,7 @@ def export_report(
|
|
|
235
221
|
|
|
236
222
|
fileExt = validFormats.get(export_format)
|
|
237
223
|
if fileExt is None:
|
|
238
|
-
|
|
239
|
-
f"{icons.red_dot} The '{export_format}' format is not a valid format for exporting Power BI reports. Please enter a valid format. Options: {validFormats}"
|
|
240
|
-
)
|
|
241
|
-
return
|
|
224
|
+
raise ValueError(f"{icons.red_dot} The '{export_format}' format is not a valid format for exporting Power BI reports. Please enter a valid format. Options: {validFormats}")
|
|
242
225
|
|
|
243
226
|
if file_name is None:
|
|
244
227
|
file_name = report + fileExt
|
|
@@ -255,10 +238,7 @@ def export_report(
|
|
|
255
238
|
]
|
|
256
239
|
|
|
257
240
|
if len(dfI_filt) == 0:
|
|
258
|
-
|
|
259
|
-
f"{icons.red_dot} The '{report}' report does not exist in the '{workspace}' workspace."
|
|
260
|
-
)
|
|
261
|
-
return
|
|
241
|
+
raise ValueError(f"{icons.red_dot} The '{report}' report does not exist in the '{workspace}' workspace.")
|
|
262
242
|
|
|
263
243
|
reportType = dfI_filt["Type"].iloc[0]
|
|
264
244
|
|
|
@@ -279,23 +259,15 @@ def export_report(
|
|
|
279
259
|
]
|
|
280
260
|
|
|
281
261
|
if reportType == "Report" and export_format in paginatedOnly:
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
)
|
|
285
|
-
return
|
|
262
|
+
raise ValueError(f"{icons.red_dot} The '{export_format}' format is only supported for paginated reports.")
|
|
263
|
+
|
|
286
264
|
if reportType == "PaginatedReport" and export_format in pbiOnly:
|
|
287
|
-
|
|
288
|
-
f"{icons.red_dot} The '{export_format}' format is only supported for Power BI reports."
|
|
289
|
-
)
|
|
290
|
-
return
|
|
265
|
+
raise ValueError(f"{icons.red_dot} The '{export_format}' format is only supported for Power BI reports.")
|
|
291
266
|
|
|
292
267
|
if reportType == "PaginatedReport" and (
|
|
293
268
|
bookmark_name is not None or page_name is not None or visual_name is not None
|
|
294
269
|
):
|
|
295
|
-
|
|
296
|
-
f"{icons.red_dot} Export for paginated reports does not support bookmarks/pages/visuals. Those parameters must not be set for paginated reports."
|
|
297
|
-
)
|
|
298
|
-
return
|
|
270
|
+
raise ValueError(f"{icons.red_dot} Export for paginated reports does not support bookmarks/pages/visuals. Those parameters must not be set for paginated reports.")
|
|
299
271
|
|
|
300
272
|
reportId = dfI_filt["Id"].iloc[0]
|
|
301
273
|
client = fabric.PowerBIRestClient()
|
|
@@ -332,19 +304,15 @@ def export_report(
|
|
|
332
304
|
for page in page_name:
|
|
333
305
|
dfPage_filt = dfPage[dfPage["Page ID"] == page]
|
|
334
306
|
if len(dfPage_filt) == 0:
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
)
|
|
338
|
-
return
|
|
307
|
+
raise ValueError(f"{icons.red_dot} The '{page}' page does not exist in the '{report}' report within the '{workspace}' workspace.")
|
|
308
|
+
|
|
339
309
|
page_dict = {"pageName": page}
|
|
340
310
|
request_body["powerBIReportConfiguration"]["pages"].append(page_dict)
|
|
341
311
|
|
|
342
312
|
elif page_name is not None and visual_name is not None:
|
|
343
313
|
if len(page_name) != len(visual_name):
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
)
|
|
347
|
-
return
|
|
314
|
+
raise ValueError(f"{icons.red_dot} Each 'visual_name' must map to a single 'page_name'.")
|
|
315
|
+
|
|
348
316
|
if reportType == "Report":
|
|
349
317
|
request_body = {"format": export_format, "powerBIReportConfiguration": {}}
|
|
350
318
|
|
|
@@ -356,10 +324,8 @@ def export_report(
|
|
|
356
324
|
(dfVisual["Page ID"] == page) & (dfVisual["Visual ID"] == visual)
|
|
357
325
|
]
|
|
358
326
|
if len(dfVisual_filt) == 0:
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
)
|
|
362
|
-
return
|
|
327
|
+
raise ValueError(f"{icons.red_dot} The '{visual}' visual does not exist on the '{page}' in the '{report}' report within the '{workspace}' workspace.")
|
|
328
|
+
|
|
363
329
|
page_dict = {"pageName": page, "visualName": visual}
|
|
364
330
|
request_body["powerBIReportConfiguration"]["pages"].append(page_dict)
|
|
365
331
|
a += 1
|
|
@@ -393,9 +359,7 @@ def export_report(
|
|
|
393
359
|
)
|
|
394
360
|
response_body = json.loads(response.content)
|
|
395
361
|
if response_body["status"] == "Failed":
|
|
396
|
-
|
|
397
|
-
f"{icons.red_dot} The export for the '{report}' report within the '{workspace}' workspace in the '{export_format}' format has failed."
|
|
398
|
-
)
|
|
362
|
+
raise ValueError(f"{icons.red_dot} The export for the '{report}' report within the '{workspace}' workspace in the '{export_format}' format has failed.")
|
|
399
363
|
else:
|
|
400
364
|
response = client.get(
|
|
401
365
|
f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}/exports/{exportId}/file"
|
|
@@ -447,10 +411,7 @@ def clone_report(
|
|
|
447
411
|
dfI_filt = dfI[(dfI["Display Name"] == report)]
|
|
448
412
|
|
|
449
413
|
if len(dfI_filt) == 0:
|
|
450
|
-
|
|
451
|
-
f"{icons.red_dot} The '{report}' report does not exist within the '{workspace}' workspace."
|
|
452
|
-
)
|
|
453
|
-
return
|
|
414
|
+
raise ValueError(f"{icons.red_dot} The '{report}' report does not exist within the '{workspace}' workspace.")
|
|
454
415
|
|
|
455
416
|
reportId = resolve_report_id(report, workspace)
|
|
456
417
|
|
|
@@ -462,8 +423,8 @@ def clone_report(
|
|
|
462
423
|
dfW_filt = dfW[dfW["Name"] == target_workspace]
|
|
463
424
|
|
|
464
425
|
if len(dfW_filt) == 0:
|
|
465
|
-
|
|
466
|
-
|
|
426
|
+
raise ValueError(f"{icons.red_dot} The '{workspace}' is not a valid workspace.")
|
|
427
|
+
|
|
467
428
|
target_workspace_id = dfW_filt["Id"].iloc[0]
|
|
468
429
|
|
|
469
430
|
if target_dataset is None:
|
|
@@ -478,10 +439,8 @@ def clone_report(
|
|
|
478
439
|
dfD_filt = dfD[dfD["Dataset Name"] == target_dataset]
|
|
479
440
|
|
|
480
441
|
if len(dfD_filt) == 0:
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
)
|
|
484
|
-
return
|
|
442
|
+
raise ValueError(f"{icons.red_dot} The '{target_dataset}' target dataset does not exist in the '{target_workspace}' workspace.")
|
|
443
|
+
|
|
485
444
|
target_dataset_id = dfD_filt["Dataset Id"].iloc[0]
|
|
486
445
|
|
|
487
446
|
client = fabric.PowerBIRestClient()
|
|
@@ -508,9 +467,7 @@ def clone_report(
|
|
|
508
467
|
f"{icons.green_dot} The '{report}' report has been successfully cloned as the '{cloned_report}' report within the '{target_workspace}' workspace using the '{target_dataset}' semantic model."
|
|
509
468
|
)
|
|
510
469
|
else:
|
|
511
|
-
|
|
512
|
-
f"{icons.red_dot} POST request failed with status code: {response.status_code}"
|
|
513
|
-
)
|
|
470
|
+
raise ValueError(f"{icons.red_dot} POST request failed with status code: {response.status_code}")
|
|
514
471
|
|
|
515
472
|
|
|
516
473
|
def launch_report(report: str, workspace: Optional[str] = None):
|
|
@@ -573,14 +530,14 @@ def list_report_pages(report: str, workspace: Optional[str] = None):
|
|
|
573
530
|
reportJson = get_report_json(report=report, workspace=workspace)
|
|
574
531
|
|
|
575
532
|
for section in reportJson["sections"]:
|
|
576
|
-
pageID = section
|
|
577
|
-
pageName = section
|
|
533
|
+
pageID = section.get("name")
|
|
534
|
+
pageName = section.get("displayName")
|
|
578
535
|
# pageFilters = section['filters']
|
|
579
|
-
pageWidth = section
|
|
580
|
-
pageHeight = section
|
|
536
|
+
pageWidth = section.get("width")
|
|
537
|
+
pageHeight = section.get("height")
|
|
581
538
|
visualCount = len(section["visualContainers"])
|
|
582
539
|
pageHidden = False
|
|
583
|
-
pageConfig = section
|
|
540
|
+
pageConfig = section.get("config")
|
|
584
541
|
pageConfigJson = json.loads(pageConfig)
|
|
585
542
|
|
|
586
543
|
try:
|
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
import sempy
|
|
2
2
|
import sempy.fabric as fabric
|
|
3
3
|
from sempy_labs._helper_functions import resolve_dataset_id, resolve_report_id
|
|
4
|
-
from typing import Optional
|
|
4
|
+
from typing import Optional, List
|
|
5
5
|
from sempy._utils._log import log
|
|
6
6
|
import sempy_labs._icons as icons
|
|
7
7
|
|
|
8
8
|
@log
|
|
9
9
|
def report_rebind(
|
|
10
|
-
report: str,
|
|
10
|
+
report: str | List[str],
|
|
11
11
|
dataset: str,
|
|
12
12
|
report_workspace: Optional[str] = None,
|
|
13
13
|
dataset_workspace: Optional[str] = None,
|
|
@@ -17,8 +17,8 @@ def report_rebind(
|
|
|
17
17
|
|
|
18
18
|
Parameters
|
|
19
19
|
----------
|
|
20
|
-
report : str
|
|
21
|
-
Name of the Power BI report.
|
|
20
|
+
report : str | List[str]
|
|
21
|
+
Name(s) of the Power BI report(s).
|
|
22
22
|
dataset : str
|
|
23
23
|
Name of the semantic model.
|
|
24
24
|
report_workspace : str, default=None
|
|
@@ -45,26 +45,27 @@ def report_rebind(
|
|
|
45
45
|
|
|
46
46
|
client = fabric.PowerBIRestClient()
|
|
47
47
|
|
|
48
|
-
|
|
49
|
-
|
|
48
|
+
if isinstance(report, str):
|
|
49
|
+
report = [report]
|
|
50
|
+
|
|
51
|
+
for rpt in report:
|
|
52
|
+
reportId = resolve_report_id(report=rpt, workspace=report_workspace)
|
|
53
|
+
datasetId = resolve_dataset_id(dataset=dataset, workspace=dataset_workspace)
|
|
50
54
|
|
|
51
|
-
|
|
52
|
-
|
|
55
|
+
# Prepare API
|
|
56
|
+
request_body = {"datasetId": datasetId}
|
|
53
57
|
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
)
|
|
58
|
-
|
|
59
|
-
if response.status_code == 200:
|
|
60
|
-
print(
|
|
61
|
-
f"{icons.green_dot} The '{report}' report has been successfully rebinded to the '{dataset}' semantic model."
|
|
62
|
-
)
|
|
63
|
-
else:
|
|
64
|
-
print(
|
|
65
|
-
f"{icons.red_dot} The '{report}' report within the '{report_workspace}' workspace failed to rebind to the '{dataset}' semantic model within the '{dataset_workspace}' workspace."
|
|
58
|
+
response = client.post(
|
|
59
|
+
f"/v1.0/myorg/groups/{report_workspace_id}/reports/{reportId}/Rebind",
|
|
60
|
+
json=request_body,
|
|
66
61
|
)
|
|
67
62
|
|
|
63
|
+
if response.status_code == 200:
|
|
64
|
+
print(
|
|
65
|
+
f"{icons.green_dot} The '{rpt}' report has been successfully rebinded to the '{dataset}' semantic model."
|
|
66
|
+
)
|
|
67
|
+
else:
|
|
68
|
+
raise ValueError(f"{icons.red_dot} The '{rpt}' report within the '{report_workspace}' workspace failed to rebind to the '{dataset}' semantic model within the '{dataset_workspace}' workspace.")
|
|
68
69
|
|
|
69
70
|
@log
|
|
70
71
|
def report_rebind_all(
|
|
@@ -72,7 +73,7 @@ def report_rebind_all(
|
|
|
72
73
|
new_dataset: str,
|
|
73
74
|
dataset_workspace: Optional[str] = None,
|
|
74
75
|
new_dataset_workpace: Optional[str] = None,
|
|
75
|
-
report_workspace: Optional[str] = None,
|
|
76
|
+
report_workspace: Optional[str | List[str]] = None,
|
|
76
77
|
):
|
|
77
78
|
"""
|
|
78
79
|
Rebinds all reports in a workspace which are bound to a specific semantic model to a new semantic model.
|
|
@@ -93,8 +94,8 @@ def report_rebind_all(
|
|
|
93
94
|
The name of the Fabric workspace in which the new semantic model resides.
|
|
94
95
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
95
96
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
96
|
-
report_workspace : str, default=None
|
|
97
|
-
The name of the Fabric workspace in which the report
|
|
97
|
+
report_workspace : str | List[str], default=None
|
|
98
|
+
The name(s) of the Fabric workspace(s) in which the report(s) reside(s).
|
|
98
99
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
99
100
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
100
101
|
|
|
@@ -103,11 +104,7 @@ def report_rebind_all(
|
|
|
103
104
|
|
|
104
105
|
"""
|
|
105
106
|
|
|
106
|
-
|
|
107
|
-
dataset_workspace_id = fabric.get_workspace_id()
|
|
108
|
-
dataset_workspace = fabric.resolve_workspace_name(dataset_workspace_id)
|
|
109
|
-
else:
|
|
110
|
-
dataset_workspace_id = fabric.resolve_workspace_id(dataset_workspace)
|
|
107
|
+
dataset_workspace = fabric.resolve_workspace_name()
|
|
111
108
|
|
|
112
109
|
if new_dataset_workpace is None:
|
|
113
110
|
new_dataset_workpace = dataset_workspace
|
|
@@ -115,16 +112,19 @@ def report_rebind_all(
|
|
|
115
112
|
if report_workspace is None:
|
|
116
113
|
report_workspace = dataset_workspace
|
|
117
114
|
|
|
118
|
-
|
|
115
|
+
if isinstance(report_workspace, str):
|
|
116
|
+
report_workspace = [report_workspace]
|
|
119
117
|
|
|
120
|
-
|
|
121
|
-
dfRep_filt = dfRep[dfRep["Dataset Id"] == datasetId]
|
|
118
|
+
datasetId = resolve_dataset_id(dataset, dataset_workspace)
|
|
122
119
|
|
|
123
|
-
for
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
120
|
+
for rw in report_workspace:
|
|
121
|
+
dfRep = fabric.list_reports(workspace=rw)
|
|
122
|
+
dfRep_filt = dfRep[dfRep["Dataset Id"] == datasetId]
|
|
123
|
+
for i, r in dfRep_filt.iterrows():
|
|
124
|
+
rptName = r["Name"]
|
|
125
|
+
report_rebind(
|
|
126
|
+
report=rptName,
|
|
127
|
+
dataset=new_dataset,
|
|
128
|
+
report_workspace=rw,
|
|
129
|
+
dataset_workspace=new_dataset_workpace,
|
|
130
|
+
)
|