semantic-link-labs 0.8.2__py3-none-any.whl → 0.8.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/METADATA +37 -8
- {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/RECORD +108 -104
- {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +38 -0
- sempy_labs/_bpa_translation/_model/_translations_am-ET.po +24 -5
- sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +28 -4
- sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +33 -4
- sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_da-DK.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_de-DE.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_el-GR.po +36 -4
- sempy_labs/_bpa_translation/_model/_translations_es-ES.po +90 -58
- sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +31 -5
- sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_he-IL.po +28 -4
- sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_id-ID.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_is-IS.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_it-IT.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +24 -4
- sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +72 -56
- sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +95 -71
- sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +33 -4
- sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_te-IN.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_th-TH.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +100 -72
- sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +23 -5
- sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +32 -4
- sempy_labs/_capacities.py +138 -25
- sempy_labs/_capacity_migration.py +161 -60
- sempy_labs/_clear_cache.py +3 -3
- sempy_labs/_data_pipelines.py +54 -0
- sempy_labs/_dataflows.py +4 -0
- sempy_labs/_deployment_pipelines.py +13 -7
- sempy_labs/_environments.py +6 -0
- sempy_labs/_eventhouses.py +6 -0
- sempy_labs/_eventstreams.py +6 -0
- sempy_labs/_external_data_shares.py +190 -0
- sempy_labs/_generate_semantic_model.py +26 -4
- sempy_labs/_git.py +15 -15
- sempy_labs/_helper_functions.py +186 -11
- sempy_labs/_icons.py +55 -22
- sempy_labs/_kql_databases.py +6 -0
- sempy_labs/_kql_querysets.py +6 -0
- sempy_labs/_list_functions.py +6 -3
- sempy_labs/_managed_private_endpoints.py +166 -0
- sempy_labs/_mirrored_warehouses.py +2 -0
- sempy_labs/_ml_experiments.py +6 -0
- sempy_labs/_ml_models.py +6 -0
- sempy_labs/_model_bpa.py +11 -6
- sempy_labs/_model_bpa_bulk.py +14 -30
- sempy_labs/_model_bpa_rules.py +8 -3
- sempy_labs/_notebooks.py +111 -15
- sempy_labs/_query_scale_out.py +8 -6
- sempy_labs/_refresh_semantic_model.py +299 -49
- sempy_labs/_spark.py +12 -5
- sempy_labs/_sql.py +2 -2
- sempy_labs/_translations.py +16 -14
- sempy_labs/_vertipaq.py +127 -116
- sempy_labs/_warehouses.py +90 -1
- sempy_labs/_workloads.py +128 -0
- sempy_labs/_workspace_identity.py +4 -4
- sempy_labs/_workspaces.py +14 -1
- sempy_labs/admin/__init__.py +2 -0
- sempy_labs/admin/_basic_functions.py +203 -58
- sempy_labs/admin/_domains.py +18 -18
- sempy_labs/directlake/__init__.py +2 -0
- sempy_labs/directlake/_directlake_schema_sync.py +2 -6
- sempy_labs/directlake/_dl_helper.py +4 -1
- sempy_labs/directlake/_generate_shared_expression.py +1 -1
- sempy_labs/directlake/_get_shared_expression.py +7 -1
- sempy_labs/directlake/_guardrails.py +3 -2
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +2 -8
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +78 -0
- sempy_labs/directlake/_update_directlake_partition_entity.py +13 -32
- sempy_labs/lakehouse/_get_lakehouse_tables.py +6 -2
- sempy_labs/lakehouse/_shortcuts.py +4 -0
- sempy_labs/migration/_create_pqt_file.py +2 -2
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +3 -2
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +2 -0
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +2 -8
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +17 -0
- sempy_labs/migration/_migration_validation.py +2 -0
- sempy_labs/migration/_refresh_calc_tables.py +1 -0
- sempy_labs/report/__init__.py +4 -1
- sempy_labs/report/_generate_report.py +16 -14
- sempy_labs/report/_paginated.py +74 -0
- sempy_labs/report/_report_bpa.py +8 -10
- sempy_labs/report/_report_functions.py +19 -19
- sempy_labs/report/_report_rebind.py +6 -1
- sempy_labs/report/_reportwrapper.py +3 -3
- sempy_labs/tom/_model.py +173 -67
- {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/top_level.txt +0 -0
sempy_labs/report/__init__.py
CHANGED
|
@@ -1,7 +1,9 @@
|
|
|
1
1
|
from sempy_labs.report._reportwrapper import (
|
|
2
2
|
ReportWrapper,
|
|
3
3
|
)
|
|
4
|
-
|
|
4
|
+
from sempy_labs.report._paginated import (
|
|
5
|
+
get_report_datasources,
|
|
6
|
+
)
|
|
5
7
|
from sempy_labs.report._generate_report import (
|
|
6
8
|
create_report_from_reportjson,
|
|
7
9
|
get_report_definition,
|
|
@@ -39,4 +41,5 @@ __all__ = [
|
|
|
39
41
|
"ReportWrapper",
|
|
40
42
|
"report_bpa_rules",
|
|
41
43
|
"run_report_bpa",
|
|
44
|
+
"get_report_datasources",
|
|
42
45
|
]
|
|
@@ -2,7 +2,6 @@ import sempy.fabric as fabric
|
|
|
2
2
|
import pandas as pd
|
|
3
3
|
import json
|
|
4
4
|
import os
|
|
5
|
-
import time
|
|
6
5
|
from typing import Optional
|
|
7
6
|
from sempy_labs._helper_functions import (
|
|
8
7
|
resolve_workspace_name_and_id,
|
|
@@ -12,7 +11,6 @@ from sempy_labs._helper_functions import (
|
|
|
12
11
|
)
|
|
13
12
|
import sempy_labs._icons as icons
|
|
14
13
|
from sempy._utils._log import log
|
|
15
|
-
from sempy.fabric.exceptions import FabricHTTPException
|
|
16
14
|
|
|
17
15
|
|
|
18
16
|
def create_report_from_reportjson(
|
|
@@ -25,6 +23,8 @@ def create_report_from_reportjson(
|
|
|
25
23
|
"""
|
|
26
24
|
Creates a report based on a report.json file (and an optional themes.json file).
|
|
27
25
|
|
|
26
|
+
This is a wrapper function for the following API: `Items - Create Report <https://learn.microsoft.com/rest/api/fabric/report/items/create-report`_.
|
|
27
|
+
|
|
28
28
|
Parameters
|
|
29
29
|
----------
|
|
30
30
|
report : str
|
|
@@ -113,7 +113,7 @@ def create_report_from_reportjson(
|
|
|
113
113
|
|
|
114
114
|
response = client.post(f"/v1/workspaces/{workspace_id}/reports", json=request_body)
|
|
115
115
|
|
|
116
|
-
lro(client, response, status_codes=[201, 202])
|
|
116
|
+
lro(client, response, status_codes=[201, 202], return_status_code=True)
|
|
117
117
|
|
|
118
118
|
print(
|
|
119
119
|
f"{icons.green_dot} Succesfully created the '{report}' report within the '{workspace}' workspace."
|
|
@@ -126,6 +126,8 @@ def update_report_from_reportjson(
|
|
|
126
126
|
"""
|
|
127
127
|
Updates a report based on a report.json file.
|
|
128
128
|
|
|
129
|
+
This is a wrapper function for the following API: `Items - Update Report Definition <https://learn.microsoft.com/rest/api/fabric/report/items/update-report-definition`_.
|
|
130
|
+
|
|
129
131
|
Parameters
|
|
130
132
|
----------
|
|
131
133
|
report : str
|
|
@@ -181,6 +183,8 @@ def get_report_definition(report: str, workspace: Optional[str] = None) -> pd.Da
|
|
|
181
183
|
"""
|
|
182
184
|
Gets the collection of definition files of a report.
|
|
183
185
|
|
|
186
|
+
This is a wrapper function for the following API: `Items - Get Report Definition <https://learn.microsoft.com/rest/api/fabric/report/items/get-report-definition`_.
|
|
187
|
+
|
|
184
188
|
Parameters
|
|
185
189
|
----------
|
|
186
190
|
report : str
|
|
@@ -313,50 +317,48 @@ def _create_report(
|
|
|
313
317
|
dataset: str,
|
|
314
318
|
dataset_workspace: Optional[str] = None,
|
|
315
319
|
report_workspace: Optional[str] = None,
|
|
316
|
-
|
|
320
|
+
overwrite: bool = False,
|
|
317
321
|
):
|
|
318
322
|
|
|
319
323
|
from sempy_labs.report import report_rebind
|
|
320
324
|
|
|
321
325
|
report_workspace = fabric.resolve_workspace_name(report_workspace)
|
|
322
326
|
report_workspace_id = fabric.resolve_workspace_id(report_workspace)
|
|
323
|
-
|
|
327
|
+
dataset_workspace = fabric.resolve_workspace_name(dataset_workspace)
|
|
324
328
|
|
|
325
329
|
dfR = fabric.list_reports(workspace=report_workspace)
|
|
326
330
|
dfR_filt = dfR[dfR["Name"] == report]
|
|
327
331
|
|
|
328
332
|
updated_report = False
|
|
329
|
-
|
|
333
|
+
client = fabric.FabricRestClient()
|
|
330
334
|
# Create report if it does not exist
|
|
331
335
|
if len(dfR_filt) == 0:
|
|
332
336
|
response = client.post(
|
|
333
337
|
f"/v1/workspaces/{report_workspace_id}/reports",
|
|
334
338
|
json=request_body,
|
|
335
|
-
lro_wait=True,
|
|
336
339
|
)
|
|
337
|
-
|
|
338
|
-
|
|
340
|
+
|
|
341
|
+
lro(client, response, status_codes=[201, 202], return_status_code=True)
|
|
342
|
+
|
|
339
343
|
print(
|
|
340
344
|
f"{icons.green_dot} The '{report}' report has been created within the '{report_workspace}'"
|
|
341
345
|
)
|
|
342
346
|
updated_report = True
|
|
343
347
|
# Update the report if it exists
|
|
344
|
-
elif len(dfR_filt) > 0 and
|
|
348
|
+
elif len(dfR_filt) > 0 and overwrite:
|
|
345
349
|
report_id = dfR_filt["Id"].iloc[0]
|
|
346
350
|
response = client.post(
|
|
347
351
|
f"/v1/workspaces/{report_workspace_id}/reports/{report_id}/updateDefinition",
|
|
348
352
|
json=request_body,
|
|
349
|
-
lro_wait=True,
|
|
350
353
|
)
|
|
351
|
-
|
|
352
|
-
raise FabricHTTPException(response)
|
|
354
|
+
lro(client, response, return_status_code=True)
|
|
353
355
|
print(
|
|
354
356
|
f"{icons.green_dot} The '{report}' report has been updated within the '{report_workspace}'"
|
|
355
357
|
)
|
|
356
358
|
updated_report = True
|
|
357
359
|
else:
|
|
358
360
|
raise ValueError(
|
|
359
|
-
f"{icons.red_dot} The '{report}' report within the '{report_workspace}' workspace already exists and
|
|
361
|
+
f"{icons.red_dot} The '{report}' report within the '{report_workspace}' workspace already exists and the 'overwrite' parameter was set to False."
|
|
360
362
|
)
|
|
361
363
|
|
|
362
364
|
# Rebind the report to the semantic model to make sure it is pointed at the correct semantic model
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
import sempy.fabric as fabric
|
|
2
|
+
from typing import Optional
|
|
3
|
+
from sempy.fabric.exceptions import FabricHTTPException
|
|
4
|
+
import pandas as pd
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def get_report_datasources(
|
|
8
|
+
report: str,
|
|
9
|
+
workspace: Optional[str] = None,
|
|
10
|
+
) -> pd.DataFrame:
|
|
11
|
+
"""
|
|
12
|
+
Returns a list of data sources for the specified paginated report (RDL) from the specified workspace.
|
|
13
|
+
|
|
14
|
+
Parameters
|
|
15
|
+
----------
|
|
16
|
+
report : str | List[str]
|
|
17
|
+
Name(s) of the Power BI report(s).
|
|
18
|
+
workspace : str, default=None
|
|
19
|
+
The name of the Fabric workspace in which the report resides.
|
|
20
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
21
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
22
|
+
|
|
23
|
+
Returns
|
|
24
|
+
-------
|
|
25
|
+
pandas.DataFrame
|
|
26
|
+
A pandas dataframe showing a list of data sources for the specified paginated report (RDL) from the specified workspace.
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
df = pd.DataFrame(
|
|
30
|
+
columns=[
|
|
31
|
+
"Report Name",
|
|
32
|
+
"Report Id",
|
|
33
|
+
"Datasource Id",
|
|
34
|
+
"Datasource Type",
|
|
35
|
+
"Gateway Id",
|
|
36
|
+
"Server",
|
|
37
|
+
"Database",
|
|
38
|
+
]
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
if workspace is None:
|
|
42
|
+
workspace_id = fabric.get_workspace_id()
|
|
43
|
+
workspace = fabric.resolve_workspace_name(workspace_id)
|
|
44
|
+
else:
|
|
45
|
+
workspace_id = fabric.resolve_workspace_id(workspace)
|
|
46
|
+
|
|
47
|
+
report_id = fabric.resolve_item_id(
|
|
48
|
+
item_name=report, type="PaginatedReport", workspace=workspace
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
client = fabric.PowerBIRestClient()
|
|
52
|
+
|
|
53
|
+
response = client.get(
|
|
54
|
+
f"/v1.0/myorg/groups/{workspace_id}/reports/{report_id}/datasources"
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
if response.status_code != 200:
|
|
58
|
+
raise FabricHTTPException(response)
|
|
59
|
+
|
|
60
|
+
for i in response.json().get("value", []):
|
|
61
|
+
conn = i.get("connectionDetails", {})
|
|
62
|
+
new_data = {
|
|
63
|
+
"Report Name": report,
|
|
64
|
+
"Report Id": report_id,
|
|
65
|
+
"Datasource Id": i.get("datasourceId"),
|
|
66
|
+
"Datasource Type": i.get("datasourceType"),
|
|
67
|
+
"Gateway Id": i.get("gatewayId"),
|
|
68
|
+
"Server": conn.get("server") if conn else None,
|
|
69
|
+
"Database": conn.get("database") if conn else None,
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
73
|
+
|
|
74
|
+
return df
|
sempy_labs/report/_report_bpa.py
CHANGED
|
@@ -10,6 +10,7 @@ from sempy_labs._helper_functions import (
|
|
|
10
10
|
resolve_report_id,
|
|
11
11
|
resolve_lakehouse_name,
|
|
12
12
|
resolve_workspace_capacity,
|
|
13
|
+
_get_max_run_id,
|
|
13
14
|
)
|
|
14
15
|
from sempy_labs.lakehouse import get_lakehouse_tables, lakehouse_attached
|
|
15
16
|
import sempy_labs._icons as icons
|
|
@@ -192,8 +193,6 @@ def run_report_bpa(
|
|
|
192
193
|
return finalDF
|
|
193
194
|
|
|
194
195
|
if export:
|
|
195
|
-
from pyspark.sql import SparkSession
|
|
196
|
-
|
|
197
196
|
if not lakehouse_attached():
|
|
198
197
|
raise ValueError(
|
|
199
198
|
f"{icons.red_dot} In order to export the BPA results, a lakehouse must be attached to the notebook."
|
|
@@ -202,7 +201,7 @@ def run_report_bpa(
|
|
|
202
201
|
now = datetime.datetime.now()
|
|
203
202
|
delta_table_name = "reportbparesults"
|
|
204
203
|
lakehouse_id = fabric.get_lakehouse_id()
|
|
205
|
-
lake_workspace = fabric.
|
|
204
|
+
lake_workspace = fabric.resolve_workspace_name()
|
|
206
205
|
lakehouse = resolve_lakehouse_name(
|
|
207
206
|
lakehouse_id=lakehouse_id, workspace=lake_workspace
|
|
208
207
|
)
|
|
@@ -210,15 +209,13 @@ def run_report_bpa(
|
|
|
210
209
|
lakeT = get_lakehouse_tables(lakehouse=lakehouse, workspace=lake_workspace)
|
|
211
210
|
lakeT_filt = lakeT[lakeT["Table Name"] == delta_table_name]
|
|
212
211
|
|
|
213
|
-
spark = SparkSession.builder.getOrCreate()
|
|
214
|
-
query = f"SELECT MAX(RunId) FROM {lakehouse}.{delta_table_name}"
|
|
215
|
-
|
|
216
212
|
if len(lakeT_filt) == 0:
|
|
217
213
|
runId = 1
|
|
218
214
|
else:
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
215
|
+
max_run_id = _get_max_run_id(
|
|
216
|
+
lakehouse=lakehouse, table_name=delta_table_name
|
|
217
|
+
)
|
|
218
|
+
runId = max_run_id + 1
|
|
222
219
|
|
|
223
220
|
export_df = finalDF.copy()
|
|
224
221
|
capacity_id, capacity_name = resolve_workspace_capacity(workspace=workspace)
|
|
@@ -232,7 +229,7 @@ def run_report_bpa(
|
|
|
232
229
|
export_df["Timestamp"] = now
|
|
233
230
|
export_df["RunId"] = export_df["RunId"].astype(int)
|
|
234
231
|
|
|
235
|
-
export_df = [
|
|
232
|
+
export_df = export_df[
|
|
236
233
|
[
|
|
237
234
|
"Capacity Name",
|
|
238
235
|
"Capacity Id",
|
|
@@ -249,6 +246,7 @@ def run_report_bpa(
|
|
|
249
246
|
"URL",
|
|
250
247
|
]
|
|
251
248
|
]
|
|
249
|
+
|
|
252
250
|
save_as_delta_table(
|
|
253
251
|
dataframe=export_df,
|
|
254
252
|
delta_table_name=delta_table_name,
|
|
@@ -33,6 +33,8 @@ def get_report_json(
|
|
|
33
33
|
"""
|
|
34
34
|
Gets the report.json file content of a Power BI report.
|
|
35
35
|
|
|
36
|
+
This is a wrapper function for the following API: `Items - Get Report Definition <https://learn.microsoft.com/rest/api/fabric/report/items/get-report-definition`_.
|
|
37
|
+
|
|
36
38
|
Parameters
|
|
37
39
|
----------
|
|
38
40
|
report : str
|
|
@@ -157,6 +159,8 @@ def export_report(
|
|
|
157
159
|
"""
|
|
158
160
|
Exports a Power BI report to a file in your lakehouse.
|
|
159
161
|
|
|
162
|
+
This is a wrapper function for the following APIs: `Reports - Export To File In Group <https://learn.microsoft.com/rest/api/power-bi/reports/export-to-file-in-group`_, `Reports - Get Export To File Status In Group <https://learn.microsoft.com/rest/api/power-bi/reports/get-export-to-file-status-in-group>`_, `Reports - Get File Of Export To File In Group <https://learn.microsoft.com/rest/api/power-bi/reports/get-file-of-export-to-file-in-group>`_.
|
|
163
|
+
|
|
160
164
|
Parameters
|
|
161
165
|
----------
|
|
162
166
|
report : str
|
|
@@ -181,6 +185,8 @@ def export_report(
|
|
|
181
185
|
"""
|
|
182
186
|
|
|
183
187
|
# https://learn.microsoft.com/rest/api/power-bi/reports/export-to-file-in-group
|
|
188
|
+
# https://learn.microsoft.com/rest/api/power-bi/reports/get-export-to-file-status-in-group
|
|
189
|
+
# https://learn.microsoft.com/rest/api/power-bi/reports/get-file-of-export-to-file-in-group
|
|
184
190
|
|
|
185
191
|
if not lakehouse_attached():
|
|
186
192
|
raise ValueError(
|
|
@@ -222,7 +228,6 @@ def export_report(
|
|
|
222
228
|
}
|
|
223
229
|
|
|
224
230
|
export_format = export_format.upper()
|
|
225
|
-
|
|
226
231
|
fileExt = validFormats.get(export_format)
|
|
227
232
|
if fileExt is None:
|
|
228
233
|
raise ValueError(
|
|
@@ -286,9 +291,6 @@ def export_report(
|
|
|
286
291
|
reportId = dfI_filt["Id"].iloc[0]
|
|
287
292
|
client = fabric.PowerBIRestClient()
|
|
288
293
|
|
|
289
|
-
dfVisual = list_report_visuals(report=report, workspace=workspace)
|
|
290
|
-
dfPage = list_report_pages(report=report, workspace=workspace)
|
|
291
|
-
|
|
292
294
|
if (
|
|
293
295
|
export_format in ["BMP", "EMF", "GIF", "JPEG", "TIFF"]
|
|
294
296
|
and reportType == "PaginatedReport"
|
|
@@ -314,6 +316,7 @@ def export_report(
|
|
|
314
316
|
request_body = {"format": export_format, "powerBIReportConfiguration": {}}
|
|
315
317
|
|
|
316
318
|
request_body["powerBIReportConfiguration"]["pages"] = []
|
|
319
|
+
dfPage = list_report_pages(report=report, workspace=workspace)
|
|
317
320
|
|
|
318
321
|
for page in page_name:
|
|
319
322
|
dfPage_filt = dfPage[dfPage["Page ID"] == page]
|
|
@@ -335,9 +338,11 @@ def export_report(
|
|
|
335
338
|
request_body = {"format": export_format, "powerBIReportConfiguration": {}}
|
|
336
339
|
|
|
337
340
|
request_body["powerBIReportConfiguration"]["pages"] = []
|
|
341
|
+
dfVisual = list_report_visuals(report=report, workspace=workspace)
|
|
338
342
|
a = 0
|
|
339
343
|
for page in page_name:
|
|
340
344
|
visual = visual_name[a]
|
|
345
|
+
|
|
341
346
|
dfVisual_filt = dfVisual[
|
|
342
347
|
(dfVisual["Page ID"] == page) & (dfVisual["Visual ID"] == visual)
|
|
343
348
|
]
|
|
@@ -360,32 +365,25 @@ def export_report(
|
|
|
360
365
|
request_body["powerBIReportConfiguration"]["reportLevelFilters"] = [
|
|
361
366
|
report_level_filter
|
|
362
367
|
]
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
)
|
|
368
|
+
|
|
369
|
+
base_url = f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}"
|
|
370
|
+
response = client.post(f"{base_url}/ExportTo", json=request_body)
|
|
371
|
+
|
|
368
372
|
if response.status_code == 202:
|
|
369
373
|
response_body = json.loads(response.content)
|
|
370
|
-
|
|
371
|
-
response = client.get(
|
|
372
|
-
f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}/exports/{exportId}"
|
|
373
|
-
)
|
|
374
|
+
export_id = response_body["id"]
|
|
375
|
+
response = client.get(f"{base_url}/exports/{export_id}")
|
|
374
376
|
response_body = json.loads(response.content)
|
|
375
377
|
while response_body["status"] not in ["Succeeded", "Failed"]:
|
|
376
378
|
time.sleep(3)
|
|
377
|
-
response = client.get(
|
|
378
|
-
f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}/exports/{exportId}"
|
|
379
|
-
)
|
|
379
|
+
response = client.get(f"{base_url}/exports/{export_id}")
|
|
380
380
|
response_body = json.loads(response.content)
|
|
381
381
|
if response_body["status"] == "Failed":
|
|
382
382
|
raise ValueError(
|
|
383
383
|
f"{icons.red_dot} The export for the '{report}' report within the '{workspace}' workspace in the '{export_format}' format has failed."
|
|
384
384
|
)
|
|
385
385
|
else:
|
|
386
|
-
response = client.get(
|
|
387
|
-
f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}/exports/{exportId}/file"
|
|
388
|
-
)
|
|
386
|
+
response = client.get(f"{base_url}/exports/{export_id}/file")
|
|
389
387
|
print(
|
|
390
388
|
f"{icons.in_progress} Saving the '{export_format}' export for the '{report}' report within the '{workspace}' workspace to the lakehouse..."
|
|
391
389
|
)
|
|
@@ -407,6 +405,8 @@ def clone_report(
|
|
|
407
405
|
"""
|
|
408
406
|
Clones a Power BI report.
|
|
409
407
|
|
|
408
|
+
This is a wrapper function for the following API: `Reports - Clone Report In Group <https://learn.microsoft.com/rest/api/power-bi/reports/clone-report-in-group`_.
|
|
409
|
+
|
|
410
410
|
Parameters
|
|
411
411
|
----------
|
|
412
412
|
report : str
|
|
@@ -1,5 +1,8 @@
|
|
|
1
1
|
import sempy.fabric as fabric
|
|
2
|
-
from sempy_labs._helper_functions import
|
|
2
|
+
from sempy_labs._helper_functions import (
|
|
3
|
+
resolve_dataset_id,
|
|
4
|
+
resolve_report_id,
|
|
5
|
+
)
|
|
3
6
|
from typing import Optional, List
|
|
4
7
|
from sempy._utils._log import log
|
|
5
8
|
import sempy_labs._icons as icons
|
|
@@ -16,6 +19,8 @@ def report_rebind(
|
|
|
16
19
|
"""
|
|
17
20
|
Rebinds a report to a semantic model.
|
|
18
21
|
|
|
22
|
+
This is a wrapper function for the following API: `Reports - Rebind Report In Group <https://learn.microsoft.com/rest/api/power-bi/reports/rebind-report-in-group`_.
|
|
23
|
+
|
|
19
24
|
Parameters
|
|
20
25
|
----------
|
|
21
26
|
report : str | List[str]
|
|
@@ -7,7 +7,7 @@ from sempy_labs._helper_functions import (
|
|
|
7
7
|
_extract_json,
|
|
8
8
|
_add_part,
|
|
9
9
|
lro,
|
|
10
|
-
|
|
10
|
+
# _make_clickable,
|
|
11
11
|
)
|
|
12
12
|
from typing import Optional, List
|
|
13
13
|
import pandas as pd
|
|
@@ -370,7 +370,7 @@ class ReportWrapper:
|
|
|
370
370
|
df = self._add_extended(dataframe=df)
|
|
371
371
|
|
|
372
372
|
return df
|
|
373
|
-
# return df.style.format({"Page URL":
|
|
373
|
+
# return df.style.format({"Page URL": _make_clickable})
|
|
374
374
|
|
|
375
375
|
def list_visual_filters(self, extended: bool = False) -> pd.DataFrame:
|
|
376
376
|
"""
|
|
@@ -642,7 +642,7 @@ class ReportWrapper:
|
|
|
642
642
|
)
|
|
643
643
|
|
|
644
644
|
return df
|
|
645
|
-
# return df.style.format({"Page URL":
|
|
645
|
+
# return df.style.format({"Page URL": _make_clickable})
|
|
646
646
|
|
|
647
647
|
def list_visuals(self) -> pd.DataFrame:
|
|
648
648
|
"""
|