semantic-link-labs 0.8.2__py3-none-any.whl → 0.8.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.3.dist-info}/METADATA +7 -3
- {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.3.dist-info}/RECORD +35 -34
- sempy_labs/__init__.py +14 -0
- sempy_labs/_capacities.py +89 -11
- sempy_labs/_capacity_migration.py +167 -60
- sempy_labs/_clear_cache.py +3 -3
- sempy_labs/_data_pipelines.py +48 -0
- sempy_labs/_external_data_shares.py +188 -0
- sempy_labs/_generate_semantic_model.py +0 -1
- sempy_labs/_git.py +1 -1
- sempy_labs/_helper_functions.py +14 -11
- sempy_labs/_list_functions.py +6 -3
- sempy_labs/_model_bpa.py +5 -5
- sempy_labs/_model_bpa_bulk.py +3 -5
- sempy_labs/_notebooks.py +4 -3
- sempy_labs/_sql.py +2 -2
- sempy_labs/_translations.py +14 -14
- sempy_labs/_vertipaq.py +121 -101
- sempy_labs/_warehouses.py +11 -1
- sempy_labs/admin/__init__.py +2 -0
- sempy_labs/admin/_basic_functions.py +124 -21
- sempy_labs/directlake/_directlake_schema_sync.py +0 -5
- sempy_labs/directlake/_generate_shared_expression.py +1 -1
- sempy_labs/directlake/_guardrails.py +1 -1
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +1 -1
- sempy_labs/migration/_create_pqt_file.py +2 -2
- sempy_labs/report/_generate_report.py +10 -14
- sempy_labs/report/_report_bpa.py +8 -10
- sempy_labs/report/_report_functions.py +13 -19
- sempy_labs/report/_report_rebind.py +4 -1
- sempy_labs/report/_reportwrapper.py +3 -3
- sempy_labs/tom/_model.py +109 -34
- {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.3.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.3.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.3.dist-info}/top_level.txt +0 -0
|
@@ -7,7 +7,6 @@ from sempy_labs._helper_functions import (
|
|
|
7
7
|
resolve_workspace_name_and_id,
|
|
8
8
|
pagination,
|
|
9
9
|
)
|
|
10
|
-
import datetime
|
|
11
10
|
import numpy as np
|
|
12
11
|
import pandas as pd
|
|
13
12
|
import time
|
|
@@ -106,27 +105,28 @@ def assign_workspaces_to_capacity(
|
|
|
106
105
|
if isinstance(workspace, str):
|
|
107
106
|
workspace = [workspace]
|
|
108
107
|
|
|
109
|
-
dfC =
|
|
110
|
-
dfC_filt = dfC[dfC["
|
|
111
|
-
source_capacity_id = dfC_filt["Id"].iloc[0]
|
|
108
|
+
dfC = list_capacities()
|
|
109
|
+
dfC_filt = dfC[dfC["Capacity Name"] == source_capacity]
|
|
110
|
+
source_capacity_id = dfC_filt["Capacity Id"].iloc[0]
|
|
112
111
|
|
|
113
|
-
dfC_filt = dfC[dfC["
|
|
114
|
-
target_capacity_id = dfC_filt["Id"].iloc[0]
|
|
112
|
+
dfC_filt = dfC[dfC["Capacity Name"] == target_capacity]
|
|
113
|
+
target_capacity_id = dfC_filt["Capacity Id"].iloc[0]
|
|
115
114
|
|
|
116
115
|
if workspace is None:
|
|
117
|
-
workspaces = fabric.list_workspaces(
|
|
118
|
-
|
|
119
|
-
)["Id"].values
|
|
116
|
+
# workspaces = fabric.list_workspaces(
|
|
117
|
+
# filter=f"capacityId eq '{source_capacity_id.upper()}'"
|
|
118
|
+
# )["Id"].values
|
|
119
|
+
dfW = list_workspaces()
|
|
120
|
+
dfW = dfW[dfW["Capacity Id"].str.upper() == source_capacity_id.upper()]
|
|
121
|
+
workspaces = dfW["Id"].tolist()
|
|
120
122
|
else:
|
|
121
|
-
dfW =
|
|
122
|
-
workspaces = dfW[dfW["Name"].isin(workspace)]["Id"].
|
|
123
|
+
dfW = list_workspaces()
|
|
124
|
+
workspaces = dfW[dfW["Name"].isin(workspace)]["Id"].tolist()
|
|
123
125
|
|
|
124
126
|
workspaces = np.array(workspaces)
|
|
125
127
|
batch_size = 999
|
|
126
128
|
for i in range(0, len(workspaces), batch_size):
|
|
127
129
|
batch = workspaces[i : i + batch_size].tolist()
|
|
128
|
-
batch_length = len(batch)
|
|
129
|
-
start_time = datetime.datetime.now()
|
|
130
130
|
request_body = {
|
|
131
131
|
"capacityMigrationAssignments": [
|
|
132
132
|
{
|
|
@@ -144,10 +144,6 @@ def assign_workspaces_to_capacity(
|
|
|
144
144
|
|
|
145
145
|
if response.status_code != 200:
|
|
146
146
|
raise FabricHTTPException(response)
|
|
147
|
-
end_time = datetime.datetime.now()
|
|
148
|
-
print(
|
|
149
|
-
f"Total time for assigning {str(batch_length)} workspaces is {str((end_time - start_time).total_seconds())}"
|
|
150
|
-
)
|
|
151
147
|
print(
|
|
152
148
|
f"{icons.green_dot} The workspaces have been assigned to the '{target_capacity}' capacity."
|
|
153
149
|
)
|
|
@@ -157,9 +153,6 @@ def list_capacities() -> pd.DataFrame:
|
|
|
157
153
|
"""
|
|
158
154
|
Shows the a list of capacities and their properties. This function is the admin version.
|
|
159
155
|
|
|
160
|
-
Parameters
|
|
161
|
-
----------
|
|
162
|
-
|
|
163
156
|
Returns
|
|
164
157
|
-------
|
|
165
158
|
pandas.DataFrame
|
|
@@ -488,7 +481,8 @@ def scan_workspaces(
|
|
|
488
481
|
workspace: Optional[str | List[str]] = None,
|
|
489
482
|
) -> dict:
|
|
490
483
|
|
|
491
|
-
workspace
|
|
484
|
+
if workspace is None:
|
|
485
|
+
workspace = fabric.resolve_workspace_name()
|
|
492
486
|
|
|
493
487
|
if isinstance(workspace, str):
|
|
494
488
|
workspace = [workspace]
|
|
@@ -886,3 +880,112 @@ def list_items(
|
|
|
886
880
|
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
887
881
|
|
|
888
882
|
return df
|
|
883
|
+
|
|
884
|
+
|
|
885
|
+
def list_activity_events(
|
|
886
|
+
start_time: str,
|
|
887
|
+
end_time: str,
|
|
888
|
+
activity_filter: Optional[str] = None,
|
|
889
|
+
user_id_filter: Optional[str] = None,
|
|
890
|
+
):
|
|
891
|
+
"""
|
|
892
|
+
Shows a list of audit activity events for a tenant.
|
|
893
|
+
|
|
894
|
+
Parameters
|
|
895
|
+
----------
|
|
896
|
+
start_time : str
|
|
897
|
+
Start date and time of the window for audit event results. Example: "2024-09-25T07:55:00".
|
|
898
|
+
end_time : str
|
|
899
|
+
End date and time of the window for audit event results. Example: "2024-09-25T08:55:00".
|
|
900
|
+
activity_filter : str, default=None
|
|
901
|
+
Filter value for activities. Example: 'viewreport'.
|
|
902
|
+
user_id_filter : str, default=None
|
|
903
|
+
Email address of the user.
|
|
904
|
+
|
|
905
|
+
Returns
|
|
906
|
+
-------
|
|
907
|
+
pandas.DataFrame
|
|
908
|
+
A pandas dataframe showing a list of audit activity events for a tenant.
|
|
909
|
+
"""
|
|
910
|
+
|
|
911
|
+
# https://learn.microsoft.com/en-us/rest/api/power-bi/admin/get-activity-events
|
|
912
|
+
|
|
913
|
+
df = pd.DataFrame(
|
|
914
|
+
columns=[
|
|
915
|
+
"Id",
|
|
916
|
+
"Record Type",
|
|
917
|
+
"Creation Time",
|
|
918
|
+
"Operation",
|
|
919
|
+
"Organization Id",
|
|
920
|
+
"User Type",
|
|
921
|
+
"User Key",
|
|
922
|
+
"Workload",
|
|
923
|
+
"Result Status",
|
|
924
|
+
"User Id",
|
|
925
|
+
"Client IP",
|
|
926
|
+
"User Agent",
|
|
927
|
+
"Activity",
|
|
928
|
+
"Workspace Name",
|
|
929
|
+
"Workspace Id",
|
|
930
|
+
"Object Id",
|
|
931
|
+
"Request Id",
|
|
932
|
+
"Object Type",
|
|
933
|
+
"Object Display Name",
|
|
934
|
+
"Experience",
|
|
935
|
+
"Refresh Enforcement Policy",
|
|
936
|
+
]
|
|
937
|
+
)
|
|
938
|
+
|
|
939
|
+
tic = "%27"
|
|
940
|
+
space = "%20"
|
|
941
|
+
client = fabric.PowerBIRestClient()
|
|
942
|
+
base_url = "/v1.0/myorg/admin/activityevents"
|
|
943
|
+
conditions = []
|
|
944
|
+
|
|
945
|
+
if activity_filter is not None:
|
|
946
|
+
conditions.append(f"Activity{space}eq{space}{tic}{activity_filter}{tic}")
|
|
947
|
+
if user_id_filter is not None:
|
|
948
|
+
conditions.append(f"UserId{space}eq{space}{tic}{user_id_filter}{tic}")
|
|
949
|
+
|
|
950
|
+
filter_value = (
|
|
951
|
+
f"&filter={f'{space}and{space}'.join(conditions)}" if conditions else ""
|
|
952
|
+
)
|
|
953
|
+
|
|
954
|
+
full_url = f"{base_url}?startDateTime={tic}{start_time}{tic}&endDateTime={tic}{end_time}{tic}{filter_value}"
|
|
955
|
+
response = client.get(full_url)
|
|
956
|
+
if response.status_code != 200:
|
|
957
|
+
raise FabricHTTPException(response)
|
|
958
|
+
|
|
959
|
+
responses = pagination(client, response)
|
|
960
|
+
|
|
961
|
+
for r in responses:
|
|
962
|
+
for i in r.get("activityEventEntities", []):
|
|
963
|
+
new_data = {
|
|
964
|
+
"Id": i.get("id"),
|
|
965
|
+
"Record Type": i.get("RecordType"),
|
|
966
|
+
"Creation Time": i.get("CreationTime"),
|
|
967
|
+
"Operation": i.get("Operation"),
|
|
968
|
+
"Organization Id": i.get("OrganizationId"),
|
|
969
|
+
"User Type": i.get("UserType"),
|
|
970
|
+
"User Key": i.get("UserKey"),
|
|
971
|
+
"Workload": i.get("Workload"),
|
|
972
|
+
"Result Status": i.get("ResultStatus"),
|
|
973
|
+
"User Id": i.get("UserId"),
|
|
974
|
+
"Client IP": i.get("ClientIP"),
|
|
975
|
+
"User Agent": i.get("UserAgent"),
|
|
976
|
+
"Activity": i.get("Activity"),
|
|
977
|
+
"Workspace Name": i.get("WorkSpaceName"),
|
|
978
|
+
"Workspace Id": i.get("WorkspaceId"),
|
|
979
|
+
"Object Id": i.get("ObjectId"),
|
|
980
|
+
"Request Id": i.get("RequestId"),
|
|
981
|
+
"Object Type": i.get("ObjectType"),
|
|
982
|
+
"Object Display Name": i.get("ObjectDisplayName"),
|
|
983
|
+
"Experience": i.get("Experience"),
|
|
984
|
+
"Refresh Enforcement Policy": i.get("RefreshEnforcementPolicy"),
|
|
985
|
+
}
|
|
986
|
+
df = pd.concat(
|
|
987
|
+
[df, pd.DataFrame(new_data, index=[0])],
|
|
988
|
+
ignore_index=True,
|
|
989
|
+
)
|
|
990
|
+
|
|
991
|
+
return df
|
|
@@ -56,11 +56,6 @@ def direct_lake_schema_sync(
|
|
|
56
56
|
)
|
|
57
57
|
lakehouse_workspace = fabric.resolve_workspace_name(lakehouse_workspace_id)
|
|
58
58
|
|
|
59
|
-
if artifact_type == "Warehouse":
|
|
60
|
-
raise ValueError(
|
|
61
|
-
f"{icons.red_dot} This function is only valid for Direct Lake semantic models which source from Fabric lakehouses (not warehouses)."
|
|
62
|
-
)
|
|
63
|
-
|
|
64
59
|
lc = get_lakehouse_columns(lakehouse_name, lakehouse_workspace)
|
|
65
60
|
|
|
66
61
|
with connect_semantic_model(
|
|
@@ -11,7 +11,7 @@ from sempy.fabric.exceptions import FabricHTTPException
|
|
|
11
11
|
|
|
12
12
|
def generate_shared_expression(
|
|
13
13
|
item_name: Optional[str] = None,
|
|
14
|
-
item_type:
|
|
14
|
+
item_type: str = "Lakehouse",
|
|
15
15
|
workspace: Optional[str] = None,
|
|
16
16
|
) -> str:
|
|
17
17
|
"""
|
|
@@ -11,7 +11,7 @@ def show_unsupported_direct_lake_objects(
|
|
|
11
11
|
) -> Tuple[pd.DataFrame, pd.DataFrame, pd.DataFrame]:
|
|
12
12
|
"""
|
|
13
13
|
Returns a list of a semantic model's objects which are not supported by Direct Lake based on
|
|
14
|
-
|
|
14
|
+
`official documentation <https://learn.microsoft.com/power-bi/enterprise/directlake-overview#known-issues-and-limitations>`_.
|
|
15
15
|
|
|
16
16
|
Parameters
|
|
17
17
|
----------
|
|
@@ -13,11 +13,11 @@ import sempy_labs._icons as icons
|
|
|
13
13
|
def create_pqt_file(
|
|
14
14
|
dataset: str,
|
|
15
15
|
workspace: Optional[str] = None,
|
|
16
|
-
file_name:
|
|
16
|
+
file_name: str = "PowerQueryTemplate",
|
|
17
17
|
):
|
|
18
18
|
"""
|
|
19
19
|
Dynamically generates a `Power Query Template <https://learn.microsoft.com/power-query/power-query-template>`_ file based on the semantic model. The .pqt file is
|
|
20
|
-
|
|
20
|
+
saved within the Files section of your lakehouse.
|
|
21
21
|
|
|
22
22
|
Dataflows Gen2 has a `limit of 50 tables <https://learn.microsoft.com/power-query/power-query-online-limits>`_. If there are more than 50 tables, this will save multiple Power Query Template
|
|
23
23
|
files (with each file having a max of 50 tables).
|
|
@@ -2,7 +2,6 @@ import sempy.fabric as fabric
|
|
|
2
2
|
import pandas as pd
|
|
3
3
|
import json
|
|
4
4
|
import os
|
|
5
|
-
import time
|
|
6
5
|
from typing import Optional
|
|
7
6
|
from sempy_labs._helper_functions import (
|
|
8
7
|
resolve_workspace_name_and_id,
|
|
@@ -12,7 +11,6 @@ from sempy_labs._helper_functions import (
|
|
|
12
11
|
)
|
|
13
12
|
import sempy_labs._icons as icons
|
|
14
13
|
from sempy._utils._log import log
|
|
15
|
-
from sempy.fabric.exceptions import FabricHTTPException
|
|
16
14
|
|
|
17
15
|
|
|
18
16
|
def create_report_from_reportjson(
|
|
@@ -113,7 +111,7 @@ def create_report_from_reportjson(
|
|
|
113
111
|
|
|
114
112
|
response = client.post(f"/v1/workspaces/{workspace_id}/reports", json=request_body)
|
|
115
113
|
|
|
116
|
-
lro(client, response, status_codes=[201, 202])
|
|
114
|
+
lro(client, response, status_codes=[201, 202], return_status_code=True)
|
|
117
115
|
|
|
118
116
|
print(
|
|
119
117
|
f"{icons.green_dot} Succesfully created the '{report}' report within the '{workspace}' workspace."
|
|
@@ -313,50 +311,48 @@ def _create_report(
|
|
|
313
311
|
dataset: str,
|
|
314
312
|
dataset_workspace: Optional[str] = None,
|
|
315
313
|
report_workspace: Optional[str] = None,
|
|
316
|
-
|
|
314
|
+
overwrite: bool = False,
|
|
317
315
|
):
|
|
318
316
|
|
|
319
317
|
from sempy_labs.report import report_rebind
|
|
320
318
|
|
|
321
319
|
report_workspace = fabric.resolve_workspace_name(report_workspace)
|
|
322
320
|
report_workspace_id = fabric.resolve_workspace_id(report_workspace)
|
|
323
|
-
|
|
321
|
+
dataset_workspace = fabric.resolve_workspace_name(dataset_workspace)
|
|
324
322
|
|
|
325
323
|
dfR = fabric.list_reports(workspace=report_workspace)
|
|
326
324
|
dfR_filt = dfR[dfR["Name"] == report]
|
|
327
325
|
|
|
328
326
|
updated_report = False
|
|
329
|
-
|
|
327
|
+
client = fabric.FabricRestClient()
|
|
330
328
|
# Create report if it does not exist
|
|
331
329
|
if len(dfR_filt) == 0:
|
|
332
330
|
response = client.post(
|
|
333
331
|
f"/v1/workspaces/{report_workspace_id}/reports",
|
|
334
332
|
json=request_body,
|
|
335
|
-
lro_wait=True,
|
|
336
333
|
)
|
|
337
|
-
|
|
338
|
-
|
|
334
|
+
|
|
335
|
+
lro(client, response, status_codes=[201, 202], return_status_code=True)
|
|
336
|
+
|
|
339
337
|
print(
|
|
340
338
|
f"{icons.green_dot} The '{report}' report has been created within the '{report_workspace}'"
|
|
341
339
|
)
|
|
342
340
|
updated_report = True
|
|
343
341
|
# Update the report if it exists
|
|
344
|
-
elif len(dfR_filt) > 0 and
|
|
342
|
+
elif len(dfR_filt) > 0 and overwrite:
|
|
345
343
|
report_id = dfR_filt["Id"].iloc[0]
|
|
346
344
|
response = client.post(
|
|
347
345
|
f"/v1/workspaces/{report_workspace_id}/reports/{report_id}/updateDefinition",
|
|
348
346
|
json=request_body,
|
|
349
|
-
lro_wait=True,
|
|
350
347
|
)
|
|
351
|
-
|
|
352
|
-
raise FabricHTTPException(response)
|
|
348
|
+
lro(client, response, return_status_code=True)
|
|
353
349
|
print(
|
|
354
350
|
f"{icons.green_dot} The '{report}' report has been updated within the '{report_workspace}'"
|
|
355
351
|
)
|
|
356
352
|
updated_report = True
|
|
357
353
|
else:
|
|
358
354
|
raise ValueError(
|
|
359
|
-
f"{icons.red_dot} The '{report}' report within the '{report_workspace}' workspace already exists and
|
|
355
|
+
f"{icons.red_dot} The '{report}' report within the '{report_workspace}' workspace already exists and the 'overwrite' parameter was set to False."
|
|
360
356
|
)
|
|
361
357
|
|
|
362
358
|
# Rebind the report to the semantic model to make sure it is pointed at the correct semantic model
|
sempy_labs/report/_report_bpa.py
CHANGED
|
@@ -10,6 +10,7 @@ from sempy_labs._helper_functions import (
|
|
|
10
10
|
resolve_report_id,
|
|
11
11
|
resolve_lakehouse_name,
|
|
12
12
|
resolve_workspace_capacity,
|
|
13
|
+
_get_max_run_id,
|
|
13
14
|
)
|
|
14
15
|
from sempy_labs.lakehouse import get_lakehouse_tables, lakehouse_attached
|
|
15
16
|
import sempy_labs._icons as icons
|
|
@@ -192,8 +193,6 @@ def run_report_bpa(
|
|
|
192
193
|
return finalDF
|
|
193
194
|
|
|
194
195
|
if export:
|
|
195
|
-
from pyspark.sql import SparkSession
|
|
196
|
-
|
|
197
196
|
if not lakehouse_attached():
|
|
198
197
|
raise ValueError(
|
|
199
198
|
f"{icons.red_dot} In order to export the BPA results, a lakehouse must be attached to the notebook."
|
|
@@ -202,7 +201,7 @@ def run_report_bpa(
|
|
|
202
201
|
now = datetime.datetime.now()
|
|
203
202
|
delta_table_name = "reportbparesults"
|
|
204
203
|
lakehouse_id = fabric.get_lakehouse_id()
|
|
205
|
-
lake_workspace = fabric.
|
|
204
|
+
lake_workspace = fabric.resolve_workspace_name()
|
|
206
205
|
lakehouse = resolve_lakehouse_name(
|
|
207
206
|
lakehouse_id=lakehouse_id, workspace=lake_workspace
|
|
208
207
|
)
|
|
@@ -210,15 +209,13 @@ def run_report_bpa(
|
|
|
210
209
|
lakeT = get_lakehouse_tables(lakehouse=lakehouse, workspace=lake_workspace)
|
|
211
210
|
lakeT_filt = lakeT[lakeT["Table Name"] == delta_table_name]
|
|
212
211
|
|
|
213
|
-
spark = SparkSession.builder.getOrCreate()
|
|
214
|
-
query = f"SELECT MAX(RunId) FROM {lakehouse}.{delta_table_name}"
|
|
215
|
-
|
|
216
212
|
if len(lakeT_filt) == 0:
|
|
217
213
|
runId = 1
|
|
218
214
|
else:
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
215
|
+
max_run_id = _get_max_run_id(
|
|
216
|
+
lakehouse=lakehouse, table_name=delta_table_name
|
|
217
|
+
)
|
|
218
|
+
runId = max_run_id + 1
|
|
222
219
|
|
|
223
220
|
export_df = finalDF.copy()
|
|
224
221
|
capacity_id, capacity_name = resolve_workspace_capacity(workspace=workspace)
|
|
@@ -232,7 +229,7 @@ def run_report_bpa(
|
|
|
232
229
|
export_df["Timestamp"] = now
|
|
233
230
|
export_df["RunId"] = export_df["RunId"].astype(int)
|
|
234
231
|
|
|
235
|
-
export_df = [
|
|
232
|
+
export_df = export_df[
|
|
236
233
|
[
|
|
237
234
|
"Capacity Name",
|
|
238
235
|
"Capacity Id",
|
|
@@ -249,6 +246,7 @@ def run_report_bpa(
|
|
|
249
246
|
"URL",
|
|
250
247
|
]
|
|
251
248
|
]
|
|
249
|
+
|
|
252
250
|
save_as_delta_table(
|
|
253
251
|
dataframe=export_df,
|
|
254
252
|
delta_table_name=delta_table_name,
|
|
@@ -181,6 +181,8 @@ def export_report(
|
|
|
181
181
|
"""
|
|
182
182
|
|
|
183
183
|
# https://learn.microsoft.com/rest/api/power-bi/reports/export-to-file-in-group
|
|
184
|
+
# https://learn.microsoft.com/rest/api/power-bi/reports/get-export-to-file-status-in-group
|
|
185
|
+
# https://learn.microsoft.com/rest/api/power-bi/reports/get-file-of-export-to-file-in-group
|
|
184
186
|
|
|
185
187
|
if not lakehouse_attached():
|
|
186
188
|
raise ValueError(
|
|
@@ -222,7 +224,6 @@ def export_report(
|
|
|
222
224
|
}
|
|
223
225
|
|
|
224
226
|
export_format = export_format.upper()
|
|
225
|
-
|
|
226
227
|
fileExt = validFormats.get(export_format)
|
|
227
228
|
if fileExt is None:
|
|
228
229
|
raise ValueError(
|
|
@@ -286,9 +287,6 @@ def export_report(
|
|
|
286
287
|
reportId = dfI_filt["Id"].iloc[0]
|
|
287
288
|
client = fabric.PowerBIRestClient()
|
|
288
289
|
|
|
289
|
-
dfVisual = list_report_visuals(report=report, workspace=workspace)
|
|
290
|
-
dfPage = list_report_pages(report=report, workspace=workspace)
|
|
291
|
-
|
|
292
290
|
if (
|
|
293
291
|
export_format in ["BMP", "EMF", "GIF", "JPEG", "TIFF"]
|
|
294
292
|
and reportType == "PaginatedReport"
|
|
@@ -314,6 +312,7 @@ def export_report(
|
|
|
314
312
|
request_body = {"format": export_format, "powerBIReportConfiguration": {}}
|
|
315
313
|
|
|
316
314
|
request_body["powerBIReportConfiguration"]["pages"] = []
|
|
315
|
+
dfPage = list_report_pages(report=report, workspace=workspace)
|
|
317
316
|
|
|
318
317
|
for page in page_name:
|
|
319
318
|
dfPage_filt = dfPage[dfPage["Page ID"] == page]
|
|
@@ -335,9 +334,11 @@ def export_report(
|
|
|
335
334
|
request_body = {"format": export_format, "powerBIReportConfiguration": {}}
|
|
336
335
|
|
|
337
336
|
request_body["powerBIReportConfiguration"]["pages"] = []
|
|
337
|
+
dfVisual = list_report_visuals(report=report, workspace=workspace)
|
|
338
338
|
a = 0
|
|
339
339
|
for page in page_name:
|
|
340
340
|
visual = visual_name[a]
|
|
341
|
+
|
|
341
342
|
dfVisual_filt = dfVisual[
|
|
342
343
|
(dfVisual["Page ID"] == page) & (dfVisual["Visual ID"] == visual)
|
|
343
344
|
]
|
|
@@ -360,32 +361,25 @@ def export_report(
|
|
|
360
361
|
request_body["powerBIReportConfiguration"]["reportLevelFilters"] = [
|
|
361
362
|
report_level_filter
|
|
362
363
|
]
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
)
|
|
364
|
+
|
|
365
|
+
base_url = f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}"
|
|
366
|
+
response = client.post(f"{base_url}/ExportTo", json=request_body)
|
|
367
|
+
|
|
368
368
|
if response.status_code == 202:
|
|
369
369
|
response_body = json.loads(response.content)
|
|
370
|
-
|
|
371
|
-
response = client.get(
|
|
372
|
-
f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}/exports/{exportId}"
|
|
373
|
-
)
|
|
370
|
+
export_id = response_body["id"]
|
|
371
|
+
response = client.get(f"{base_url}/exports/{export_id}")
|
|
374
372
|
response_body = json.loads(response.content)
|
|
375
373
|
while response_body["status"] not in ["Succeeded", "Failed"]:
|
|
376
374
|
time.sleep(3)
|
|
377
|
-
response = client.get(
|
|
378
|
-
f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}/exports/{exportId}"
|
|
379
|
-
)
|
|
375
|
+
response = client.get(f"{base_url}/exports/{export_id}")
|
|
380
376
|
response_body = json.loads(response.content)
|
|
381
377
|
if response_body["status"] == "Failed":
|
|
382
378
|
raise ValueError(
|
|
383
379
|
f"{icons.red_dot} The export for the '{report}' report within the '{workspace}' workspace in the '{export_format}' format has failed."
|
|
384
380
|
)
|
|
385
381
|
else:
|
|
386
|
-
response = client.get(
|
|
387
|
-
f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}/exports/{exportId}/file"
|
|
388
|
-
)
|
|
382
|
+
response = client.get(f"{base_url}/exports/{export_id}/file")
|
|
389
383
|
print(
|
|
390
384
|
f"{icons.in_progress} Saving the '{export_format}' export for the '{report}' report within the '{workspace}' workspace to the lakehouse..."
|
|
391
385
|
)
|
|
@@ -1,5 +1,8 @@
|
|
|
1
1
|
import sempy.fabric as fabric
|
|
2
|
-
from sempy_labs._helper_functions import
|
|
2
|
+
from sempy_labs._helper_functions import (
|
|
3
|
+
resolve_dataset_id,
|
|
4
|
+
resolve_report_id,
|
|
5
|
+
)
|
|
3
6
|
from typing import Optional, List
|
|
4
7
|
from sempy._utils._log import log
|
|
5
8
|
import sempy_labs._icons as icons
|
|
@@ -7,7 +7,7 @@ from sempy_labs._helper_functions import (
|
|
|
7
7
|
_extract_json,
|
|
8
8
|
_add_part,
|
|
9
9
|
lro,
|
|
10
|
-
|
|
10
|
+
# _make_clickable,
|
|
11
11
|
)
|
|
12
12
|
from typing import Optional, List
|
|
13
13
|
import pandas as pd
|
|
@@ -370,7 +370,7 @@ class ReportWrapper:
|
|
|
370
370
|
df = self._add_extended(dataframe=df)
|
|
371
371
|
|
|
372
372
|
return df
|
|
373
|
-
# return df.style.format({"Page URL":
|
|
373
|
+
# return df.style.format({"Page URL": _make_clickable})
|
|
374
374
|
|
|
375
375
|
def list_visual_filters(self, extended: bool = False) -> pd.DataFrame:
|
|
376
376
|
"""
|
|
@@ -642,7 +642,7 @@ class ReportWrapper:
|
|
|
642
642
|
)
|
|
643
643
|
|
|
644
644
|
return df
|
|
645
|
-
# return df.style.format({"Page URL":
|
|
645
|
+
# return df.style.format({"Page URL": _make_clickable})
|
|
646
646
|
|
|
647
647
|
def list_visuals(self) -> pd.DataFrame:
|
|
648
648
|
"""
|