semantic-link-labs 0.8.9__py3-none-any.whl → 0.8.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/METADATA +5 -2
- {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/RECORD +76 -75
- sempy_labs/__init__.py +14 -2
- sempy_labs/_authentication.py +31 -2
- sempy_labs/_clear_cache.py +39 -37
- sempy_labs/_connections.py +13 -13
- sempy_labs/_data_pipelines.py +20 -20
- sempy_labs/_dataflows.py +27 -28
- sempy_labs/_dax.py +41 -47
- sempy_labs/_environments.py +26 -23
- sempy_labs/_eventhouses.py +16 -15
- sempy_labs/_eventstreams.py +16 -15
- sempy_labs/_external_data_shares.py +18 -20
- sempy_labs/_gateways.py +57 -11
- sempy_labs/_generate_semantic_model.py +100 -71
- sempy_labs/_git.py +134 -67
- sempy_labs/_helper_functions.py +199 -145
- sempy_labs/_job_scheduler.py +92 -0
- sempy_labs/_kql_databases.py +16 -15
- sempy_labs/_kql_querysets.py +16 -15
- sempy_labs/_list_functions.py +281 -120
- sempy_labs/_managed_private_endpoints.py +19 -17
- sempy_labs/_mirrored_databases.py +51 -48
- sempy_labs/_mirrored_warehouses.py +5 -4
- sempy_labs/_ml_experiments.py +16 -15
- sempy_labs/_ml_models.py +15 -14
- sempy_labs/_model_bpa.py +27 -25
- sempy_labs/_model_bpa_bulk.py +3 -3
- sempy_labs/_model_dependencies.py +60 -28
- sempy_labs/_notebooks.py +73 -39
- sempy_labs/_one_lake_integration.py +23 -26
- sempy_labs/_query_scale_out.py +67 -64
- sempy_labs/_refresh_semantic_model.py +47 -42
- sempy_labs/_spark.py +33 -32
- sempy_labs/_sql.py +12 -9
- sempy_labs/_translations.py +10 -7
- sempy_labs/_vertipaq.py +34 -31
- sempy_labs/_warehouses.py +22 -21
- sempy_labs/_workspace_identity.py +11 -10
- sempy_labs/_workspaces.py +40 -33
- sempy_labs/admin/__init__.py +4 -0
- sempy_labs/admin/_basic_functions.py +44 -12
- sempy_labs/admin/_external_data_share.py +3 -3
- sempy_labs/admin/_items.py +4 -4
- sempy_labs/admin/_scanner.py +7 -5
- sempy_labs/directlake/_directlake_schema_compare.py +18 -14
- sempy_labs/directlake/_directlake_schema_sync.py +18 -12
- sempy_labs/directlake/_dl_helper.py +36 -32
- sempy_labs/directlake/_generate_shared_expression.py +10 -9
- sempy_labs/directlake/_get_directlake_lakehouse.py +16 -13
- sempy_labs/directlake/_get_shared_expression.py +4 -3
- sempy_labs/directlake/_guardrails.py +12 -6
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +15 -9
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +16 -10
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +35 -31
- sempy_labs/directlake/_update_directlake_partition_entity.py +34 -31
- sempy_labs/directlake/_warm_cache.py +87 -65
- sempy_labs/lakehouse/_get_lakehouse_columns.py +10 -8
- sempy_labs/lakehouse/_get_lakehouse_tables.py +10 -9
- sempy_labs/lakehouse/_lakehouse.py +17 -13
- sempy_labs/lakehouse/_shortcuts.py +42 -23
- sempy_labs/migration/_create_pqt_file.py +16 -11
- sempy_labs/migration/_refresh_calc_tables.py +16 -10
- sempy_labs/report/_download_report.py +9 -8
- sempy_labs/report/_generate_report.py +40 -44
- sempy_labs/report/_paginated.py +9 -9
- sempy_labs/report/_report_bpa.py +13 -9
- sempy_labs/report/_report_functions.py +80 -91
- sempy_labs/report/_report_helper.py +8 -4
- sempy_labs/report/_report_list_functions.py +24 -13
- sempy_labs/report/_report_rebind.py +17 -16
- sempy_labs/report/_reportwrapper.py +41 -33
- sempy_labs/tom/_model.py +117 -38
- {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/top_level.txt +0 -0
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
import sempy
|
|
2
|
-
import sempy.fabric as fabric
|
|
3
2
|
import json
|
|
4
3
|
import os
|
|
5
4
|
import shutil
|
|
@@ -7,12 +6,17 @@ from sempy_labs.lakehouse._lakehouse import lakehouse_attached
|
|
|
7
6
|
from sempy._utils._log import log
|
|
8
7
|
from typing import Optional
|
|
9
8
|
import sempy_labs._icons as icons
|
|
9
|
+
from uuid import UUID
|
|
10
|
+
from sempy_labs._helper_functions import (
|
|
11
|
+
resolve_dataset_name_and_id,
|
|
12
|
+
resolve_workspace_name_and_id,
|
|
13
|
+
)
|
|
10
14
|
|
|
11
15
|
|
|
12
16
|
@log
|
|
13
17
|
def create_pqt_file(
|
|
14
|
-
dataset: str,
|
|
15
|
-
workspace: Optional[str] = None,
|
|
18
|
+
dataset: str | UUID,
|
|
19
|
+
workspace: Optional[str | UUID] = None,
|
|
16
20
|
file_name: str = "PowerQueryTemplate",
|
|
17
21
|
):
|
|
18
22
|
"""
|
|
@@ -24,10 +28,10 @@ def create_pqt_file(
|
|
|
24
28
|
|
|
25
29
|
Parameters
|
|
26
30
|
----------
|
|
27
|
-
dataset : str
|
|
28
|
-
Name of the semantic model.
|
|
29
|
-
workspace : str, default=None
|
|
30
|
-
The Fabric workspace name.
|
|
31
|
+
dataset : str | uuid.UUID
|
|
32
|
+
Name or ID of the semantic model.
|
|
33
|
+
workspace : str | uuid.UUID, default=None
|
|
34
|
+
The Fabric workspace name or ID.
|
|
31
35
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
32
36
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
33
37
|
file_name : str, default='PowerQueryTemplate'
|
|
@@ -43,19 +47,20 @@ def create_pqt_file(
|
|
|
43
47
|
f"{icons.red_dot} In order to run the 'create_pqt_file' function, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
|
|
44
48
|
)
|
|
45
49
|
|
|
46
|
-
|
|
50
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
51
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
47
52
|
|
|
48
53
|
folderPath = "/lakehouse/default/Files"
|
|
49
54
|
subFolderPath = os.path.join(folderPath, "pqtnewfolder")
|
|
50
55
|
|
|
51
56
|
with connect_semantic_model(
|
|
52
|
-
dataset=
|
|
57
|
+
dataset=dataset_id, workspace=workspace_id, readonly=True
|
|
53
58
|
) as tom:
|
|
54
59
|
if not any(
|
|
55
60
|
p.SourceType == TOM.PartitionSourceType.M for p in tom.all_partitions()
|
|
56
61
|
) and not any(t.RefreshPolicy for t in tom.model.Tables):
|
|
57
62
|
print(
|
|
58
|
-
f"{icons.info} The '{
|
|
63
|
+
f"{icons.info} The '{dataset_name}' semantic model within the '{workspace_name}' workspace has no Power Query logic."
|
|
59
64
|
)
|
|
60
65
|
return
|
|
61
66
|
|
|
@@ -220,7 +225,7 @@ def create_pqt_file(
|
|
|
220
225
|
shutil.rmtree(subFolderPath, ignore_errors=True)
|
|
221
226
|
|
|
222
227
|
print(
|
|
223
|
-
f"{icons.green_dot} '{file_name}.pqt' has been created based on the '{
|
|
228
|
+
f"{icons.green_dot} '{file_name}.pqt' has been created based on the '{dataset_name}' semantic model in the '{workspace_name}' workspace within the Files section of your lakehouse."
|
|
224
229
|
)
|
|
225
230
|
|
|
226
231
|
a = 0
|
|
@@ -7,25 +7,31 @@ from sempy_labs.tom import connect_semantic_model
|
|
|
7
7
|
from typing import Optional
|
|
8
8
|
from sempy._utils._log import log
|
|
9
9
|
import sempy_labs._icons as icons
|
|
10
|
+
from uuid import UUID
|
|
11
|
+
from sempy_labs._helper_functions import (
|
|
12
|
+
resolve_workspace_name_and_id,
|
|
13
|
+
resolve_dataset_name_and_id,
|
|
14
|
+
)
|
|
10
15
|
|
|
11
16
|
|
|
12
17
|
@log
|
|
13
|
-
def refresh_calc_tables(dataset: str, workspace: Optional[str] = None):
|
|
18
|
+
def refresh_calc_tables(dataset: str | UUID, workspace: Optional[str | UUID] = None):
|
|
14
19
|
"""
|
|
15
20
|
Recreates the delta tables in the lakehouse based on the DAX expressions stored as model annotations in the Direct Lake semantic model.
|
|
16
21
|
|
|
17
22
|
Parameters
|
|
18
23
|
----------
|
|
19
|
-
dataset : str
|
|
20
|
-
Name of the semantic model.
|
|
21
|
-
workspace : str, default=None
|
|
22
|
-
The Fabric workspace name.
|
|
24
|
+
dataset : str | UUID
|
|
25
|
+
Name or ID of the semantic model.
|
|
26
|
+
workspace : str | UUID, default=None
|
|
27
|
+
The Fabric workspace name or ID.
|
|
23
28
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
24
29
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
25
30
|
"""
|
|
26
31
|
|
|
27
32
|
spark = SparkSession.builder.getOrCreate()
|
|
28
|
-
|
|
33
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
34
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
29
35
|
icons.sll_tags.append("DirectLakeMigration")
|
|
30
36
|
|
|
31
37
|
@retry(
|
|
@@ -34,7 +40,7 @@ def refresh_calc_tables(dataset: str, workspace: Optional[str] = None):
|
|
|
34
40
|
)
|
|
35
41
|
def dyn_connect():
|
|
36
42
|
with connect_semantic_model(
|
|
37
|
-
dataset=
|
|
43
|
+
dataset=dataset_id, readonly=True, workspace=workspace_id
|
|
38
44
|
) as tom:
|
|
39
45
|
|
|
40
46
|
tom.model
|
|
@@ -42,7 +48,7 @@ def refresh_calc_tables(dataset: str, workspace: Optional[str] = None):
|
|
|
42
48
|
dyn_connect()
|
|
43
49
|
|
|
44
50
|
with connect_semantic_model(
|
|
45
|
-
dataset=
|
|
51
|
+
dataset=dataset_id, readonly=True, workspace=workspace_id
|
|
46
52
|
) as tom:
|
|
47
53
|
for a in tom.model.Annotations:
|
|
48
54
|
if any(a.Name == t.Name for t in tom.model.Tables):
|
|
@@ -56,9 +62,9 @@ def refresh_calc_tables(dataset: str, workspace: Optional[str] = None):
|
|
|
56
62
|
|
|
57
63
|
try:
|
|
58
64
|
df = fabric.evaluate_dax(
|
|
59
|
-
dataset=
|
|
65
|
+
dataset=dataset_id,
|
|
60
66
|
dax_string=daxquery,
|
|
61
|
-
workspace=
|
|
67
|
+
workspace=workspace_id,
|
|
62
68
|
)
|
|
63
69
|
|
|
64
70
|
# Update column names for non-field parameters
|
|
@@ -7,13 +7,14 @@ from sempy_labs._helper_functions import (
|
|
|
7
7
|
)
|
|
8
8
|
from sempy_labs.lakehouse._lakehouse import lakehouse_attached
|
|
9
9
|
from sempy.fabric.exceptions import FabricHTTPException
|
|
10
|
+
from uuid import UUID
|
|
10
11
|
|
|
11
12
|
|
|
12
13
|
def download_report(
|
|
13
14
|
report: str,
|
|
14
15
|
file_name: Optional[str] = None,
|
|
15
16
|
download_type: str = "LiveConnect",
|
|
16
|
-
workspace: Optional[str] = None,
|
|
17
|
+
workspace: Optional[str | UUID] = None,
|
|
17
18
|
):
|
|
18
19
|
"""
|
|
19
20
|
Downloads the specified report from the specified workspace to a Power BI .pbix file.
|
|
@@ -29,8 +30,8 @@ def download_report(
|
|
|
29
30
|
Defaults to None which resolves to the name of the report.
|
|
30
31
|
download_type : str, default="LiveConnect"
|
|
31
32
|
The type of download. Valid values are "LiveConnect" and "IncludeModel".
|
|
32
|
-
workspace : str, default=None
|
|
33
|
-
The Fabric workspace name.
|
|
33
|
+
workspace : str | uuid.UUID, default=None
|
|
34
|
+
The Fabric workspace name or ID.
|
|
34
35
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
35
36
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
36
37
|
"""
|
|
@@ -40,10 +41,11 @@ def download_report(
|
|
|
40
41
|
f"{icons.red_dot} A lakehouse must be attached to the notebook."
|
|
41
42
|
)
|
|
42
43
|
|
|
44
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
43
45
|
lakehouse_id = fabric.get_lakehouse_id()
|
|
44
|
-
|
|
46
|
+
lakehouse_workspace = fabric.resolve_workspace_name()
|
|
45
47
|
lakehouse_name = resolve_lakehouse_name(
|
|
46
|
-
lakehouse_id=lakehouse_id, workspace=
|
|
48
|
+
lakehouse_id=lakehouse_id, workspace=lakehouse_workspace
|
|
47
49
|
)
|
|
48
50
|
|
|
49
51
|
download_types = ["LiveConnect", "IncludeModel"]
|
|
@@ -53,9 +55,8 @@ def download_report(
|
|
|
53
55
|
)
|
|
54
56
|
|
|
55
57
|
file_name = file_name or report
|
|
56
|
-
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
57
58
|
report_id = fabric.resolve_item_id(
|
|
58
|
-
item_name=report, type="Report", workspace=
|
|
59
|
+
item_name=report, type="Report", workspace=workspace_id
|
|
59
60
|
)
|
|
60
61
|
|
|
61
62
|
client = fabric.PowerBIRestClient()
|
|
@@ -71,5 +72,5 @@ def download_report(
|
|
|
71
72
|
file.write(response.content)
|
|
72
73
|
|
|
73
74
|
print(
|
|
74
|
-
f"{icons.green_dot} The '{report}' report within the '{
|
|
75
|
+
f"{icons.green_dot} The '{report}' report within the '{workspace_name}' workspace has been exported as the '{file_name}' file in the '{lakehouse_name}' lakehouse within the '{lakehouse_workspace}' workspace."
|
|
75
76
|
)
|
|
@@ -7,18 +7,20 @@ from sempy_labs._helper_functions import (
|
|
|
7
7
|
resolve_workspace_name_and_id,
|
|
8
8
|
_conv_b64,
|
|
9
9
|
resolve_report_id,
|
|
10
|
+
resolve_dataset_name_and_id,
|
|
10
11
|
lro,
|
|
11
12
|
)
|
|
12
13
|
import sempy_labs._icons as icons
|
|
13
14
|
from sempy._utils._log import log
|
|
15
|
+
from uuid import UUID
|
|
14
16
|
|
|
15
17
|
|
|
16
18
|
def create_report_from_reportjson(
|
|
17
19
|
report: str,
|
|
18
|
-
dataset: str,
|
|
20
|
+
dataset: str | UUID,
|
|
19
21
|
report_json: dict,
|
|
20
22
|
theme_json: Optional[dict] = None,
|
|
21
|
-
workspace: Optional[str] = None,
|
|
23
|
+
workspace: Optional[str | UUID] = None,
|
|
22
24
|
):
|
|
23
25
|
"""
|
|
24
26
|
Creates a report based on a report.json file (and an optional themes.json file).
|
|
@@ -29,36 +31,27 @@ def create_report_from_reportjson(
|
|
|
29
31
|
----------
|
|
30
32
|
report : str
|
|
31
33
|
Name of the report.
|
|
32
|
-
dataset : str
|
|
33
|
-
Name of the semantic model to connect to the report.
|
|
34
|
+
dataset : str | uuid.UUID
|
|
35
|
+
Name or ID of the semantic model to connect to the report.
|
|
34
36
|
report_json : dict
|
|
35
37
|
The report.json file to be used to create the report.
|
|
36
38
|
theme_json : dict, default=None
|
|
37
39
|
The theme.json file to be used for the theme of the report.
|
|
38
|
-
workspace : str, default=None
|
|
39
|
-
The Fabric workspace name.
|
|
40
|
+
workspace : str | uuid.UUID, default=None
|
|
41
|
+
The Fabric workspace name or ID.
|
|
40
42
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
41
43
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
42
44
|
"""
|
|
43
45
|
|
|
44
|
-
(
|
|
45
|
-
|
|
46
|
-
dfI = fabric.list_items(workspace=workspace)
|
|
47
|
-
|
|
48
|
-
dfI_model = dfI[(dfI["Display Name"] == dataset) & (dfI["Type"] == "SemanticModel")]
|
|
49
|
-
|
|
50
|
-
if len(dfI_model) == 0:
|
|
51
|
-
raise ValueError(
|
|
52
|
-
f"{icons.red_dot} The '{dataset}' semantic model does not exist in the '{workspace}' workspace."
|
|
53
|
-
)
|
|
46
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
47
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
54
48
|
|
|
55
|
-
|
|
49
|
+
dfI = fabric.list_items(workspace=workspace, type="Report")
|
|
50
|
+
dfI_rpt = dfI[dfI["Display Name"] == report]
|
|
56
51
|
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
if len(dfI_rpt) > 0:
|
|
52
|
+
if not dfI_rpt.empty:
|
|
60
53
|
print(
|
|
61
|
-
f"{icons.yellow_dot} '{report}' already exists
|
|
54
|
+
f"{icons.yellow_dot} '{report}' report already exists in the '{workspace_name}' workspace."
|
|
62
55
|
)
|
|
63
56
|
return
|
|
64
57
|
|
|
@@ -71,7 +64,7 @@ def create_report_from_reportjson(
|
|
|
71
64
|
"connectionString": None,
|
|
72
65
|
"pbiServiceModelId": None,
|
|
73
66
|
"pbiModelVirtualServerName": "sobe_wowvirtualserver",
|
|
74
|
-
"pbiModelDatabaseName":
|
|
67
|
+
"pbiModelDatabaseName": dataset_id,
|
|
75
68
|
"name": "EntityDataSource",
|
|
76
69
|
"connectionType": "pbiServiceXmlaStyleLive",
|
|
77
70
|
},
|
|
@@ -116,12 +109,12 @@ def create_report_from_reportjson(
|
|
|
116
109
|
lro(client, response, status_codes=[201, 202], return_status_code=True)
|
|
117
110
|
|
|
118
111
|
print(
|
|
119
|
-
f"{icons.green_dot} Succesfully created the '{report}' report within the '{
|
|
112
|
+
f"{icons.green_dot} Succesfully created the '{report}' report within the '{workspace_name}' workspace."
|
|
120
113
|
)
|
|
121
114
|
|
|
122
115
|
|
|
123
116
|
def update_report_from_reportjson(
|
|
124
|
-
report: str, report_json: dict, workspace: Optional[str] = None
|
|
117
|
+
report: str, report_json: dict, workspace: Optional[str | UUID] = None
|
|
125
118
|
):
|
|
126
119
|
"""
|
|
127
120
|
Updates a report based on a report.json file.
|
|
@@ -134,17 +127,17 @@ def update_report_from_reportjson(
|
|
|
134
127
|
Name of the report.
|
|
135
128
|
report_json : dict
|
|
136
129
|
The report.json file to be used to update the report.
|
|
137
|
-
workspace : str, default=None
|
|
138
|
-
The Fabric workspace name in which the report resides.
|
|
130
|
+
workspace : str | uuid.UUID, default=None
|
|
131
|
+
The Fabric workspace name or ID in which the report resides.
|
|
139
132
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
140
133
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
141
134
|
"""
|
|
142
135
|
|
|
143
|
-
(
|
|
144
|
-
report_id = resolve_report_id(report=report, workspace=
|
|
136
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
137
|
+
report_id = resolve_report_id(report=report, workspace=workspace_id)
|
|
145
138
|
|
|
146
139
|
# Get the existing PBIR file
|
|
147
|
-
df_items = get_report_definition(report=report, workspace=
|
|
140
|
+
df_items = get_report_definition(report=report, workspace=workspace_id)
|
|
148
141
|
df_items_filt = df_items[df_items["path"] == "definition.pbir"]
|
|
149
142
|
rptDefFile = df_items_filt["payload"].iloc[0]
|
|
150
143
|
payloadReportJson = _conv_b64(report_json)
|
|
@@ -175,12 +168,12 @@ def update_report_from_reportjson(
|
|
|
175
168
|
lro(client, response, return_status_code=True)
|
|
176
169
|
|
|
177
170
|
print(
|
|
178
|
-
f"{icons.green_dot} The '{report}' report within the '{
|
|
171
|
+
f"{icons.green_dot} The '{report}' report within the '{workspace_name}' workspace has been successfully updated."
|
|
179
172
|
)
|
|
180
173
|
|
|
181
174
|
|
|
182
175
|
def get_report_definition(
|
|
183
|
-
report: str, workspace: Optional[str] = None, return_dataframe: bool = True
|
|
176
|
+
report: str, workspace: Optional[str | UUID] = None, return_dataframe: bool = True
|
|
184
177
|
) -> pd.DataFrame | dict:
|
|
185
178
|
"""
|
|
186
179
|
Gets the collection of definition files of a report.
|
|
@@ -191,8 +184,8 @@ def get_report_definition(
|
|
|
191
184
|
----------
|
|
192
185
|
report : str
|
|
193
186
|
Name of the report.
|
|
194
|
-
workspace : str, default=None
|
|
195
|
-
The Fabric workspace name in which the report resides.
|
|
187
|
+
workspace : str | uuid.UUID, default=None
|
|
188
|
+
The Fabric workspace name or ID in which the report resides.
|
|
196
189
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
197
190
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
198
191
|
return_dataframe : bool, default=True
|
|
@@ -204,9 +197,9 @@ def get_report_definition(
|
|
|
204
197
|
The collection of report definition files within a pandas dataframe.
|
|
205
198
|
"""
|
|
206
199
|
|
|
207
|
-
(
|
|
200
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
208
201
|
|
|
209
|
-
report_id = resolve_report_id(report=report, workspace=
|
|
202
|
+
report_id = resolve_report_id(report=report, workspace=workspace_id)
|
|
210
203
|
client = fabric.FabricRestClient()
|
|
211
204
|
response = client.post(
|
|
212
205
|
f"/v1/workspaces/{workspace_id}/reports/{report_id}/getDefinition",
|
|
@@ -224,7 +217,7 @@ def get_report_definition(
|
|
|
224
217
|
def create_model_bpa_report(
|
|
225
218
|
report: Optional[str] = icons.model_bpa_name,
|
|
226
219
|
dataset: Optional[str] = icons.model_bpa_name,
|
|
227
|
-
dataset_workspace: Optional[str] = None,
|
|
220
|
+
dataset_workspace: Optional[str | UUID] = None,
|
|
228
221
|
):
|
|
229
222
|
"""
|
|
230
223
|
Dynamically generates a Best Practice Analyzer report for analyzing semantic models.
|
|
@@ -237,24 +230,27 @@ def create_model_bpa_report(
|
|
|
237
230
|
dataset : str, default='ModelBPA'
|
|
238
231
|
Name of the semantic model which feeds this report.
|
|
239
232
|
Defaults to 'ModelBPA'
|
|
240
|
-
dataset_workspace : str, default=None
|
|
241
|
-
The Fabric workspace name in which the semantic model resides.
|
|
233
|
+
dataset_workspace : str | uuid.UUID, default=None
|
|
234
|
+
The Fabric workspace name or ID in which the semantic model resides.
|
|
242
235
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
243
236
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
244
237
|
|
|
245
238
|
"""
|
|
246
|
-
|
|
247
239
|
# from sempy_labs._helper_functions import resolve_dataset_id
|
|
248
240
|
|
|
249
|
-
|
|
241
|
+
(dataset_workspace_name, dataset_workspace_id) = resolve_workspace_name_and_id(
|
|
242
|
+
dataset_workspace
|
|
243
|
+
)
|
|
244
|
+
|
|
245
|
+
dfI = fabric.list_items(workspace=dataset_workspace_id, type="SemanticModel")
|
|
250
246
|
dfI_filt = dfI[dfI["Display Name"] == dataset]
|
|
251
247
|
|
|
252
248
|
if len(dfI_filt) == 0:
|
|
253
249
|
raise ValueError(
|
|
254
|
-
f"The '{dataset}' semantic model does not exist within the '{
|
|
250
|
+
f"The '{dataset}' semantic model does not exist within the '{dataset_workspace_name}' workspace."
|
|
255
251
|
)
|
|
256
252
|
|
|
257
|
-
dfR = fabric.list_reports(workspace=
|
|
253
|
+
dfR = fabric.list_reports(workspace=dataset_workspace_id)
|
|
258
254
|
dfR_filt = dfR[dfR["Name"] == report]
|
|
259
255
|
# dataset_id = resolve_dataset_id(dataset=dataset, workspace=dataset_workspace)
|
|
260
256
|
|
|
@@ -306,14 +302,14 @@ def create_model_bpa_report(
|
|
|
306
302
|
|
|
307
303
|
if len(dfR_filt) > 0:
|
|
308
304
|
update_report_from_reportjson(
|
|
309
|
-
report=report, report_json=report_json, workspace=
|
|
305
|
+
report=report, report_json=report_json, workspace=dataset_workspace_id
|
|
310
306
|
)
|
|
311
307
|
else:
|
|
312
308
|
create_report_from_reportjson(
|
|
313
309
|
report=report,
|
|
314
310
|
dataset=dataset,
|
|
315
311
|
report_json=report_json,
|
|
316
|
-
workspace=
|
|
312
|
+
workspace=dataset_workspace_id,
|
|
317
313
|
)
|
|
318
314
|
|
|
319
315
|
|
sempy_labs/report/_paginated.py
CHANGED
|
@@ -2,11 +2,15 @@ import sempy.fabric as fabric
|
|
|
2
2
|
from typing import Optional
|
|
3
3
|
from sempy.fabric.exceptions import FabricHTTPException
|
|
4
4
|
import pandas as pd
|
|
5
|
+
from uuid import UUID
|
|
6
|
+
from sempy_labs._helper_functions import (
|
|
7
|
+
resolve_workspace_name_and_id,
|
|
8
|
+
)
|
|
5
9
|
|
|
6
10
|
|
|
7
11
|
def get_report_datasources(
|
|
8
12
|
report: str,
|
|
9
|
-
workspace: Optional[str] = None,
|
|
13
|
+
workspace: Optional[str | UUID] = None,
|
|
10
14
|
) -> pd.DataFrame:
|
|
11
15
|
"""
|
|
12
16
|
Returns a list of data sources for the specified paginated report (RDL) from the specified workspace.
|
|
@@ -15,8 +19,8 @@ def get_report_datasources(
|
|
|
15
19
|
----------
|
|
16
20
|
report : str | List[str]
|
|
17
21
|
Name(s) of the Power BI report(s).
|
|
18
|
-
workspace : str, default=None
|
|
19
|
-
The name of the Fabric workspace in which the report resides.
|
|
22
|
+
workspace : str | uuid.UUID, default=None
|
|
23
|
+
The name or ID of the Fabric workspace in which the report resides.
|
|
20
24
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
21
25
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
22
26
|
|
|
@@ -38,14 +42,10 @@ def get_report_datasources(
|
|
|
38
42
|
]
|
|
39
43
|
)
|
|
40
44
|
|
|
41
|
-
|
|
42
|
-
workspace_id = fabric.get_workspace_id()
|
|
43
|
-
workspace = fabric.resolve_workspace_name(workspace_id)
|
|
44
|
-
else:
|
|
45
|
-
workspace_id = fabric.resolve_workspace_id(workspace)
|
|
45
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
46
46
|
|
|
47
47
|
report_id = fabric.resolve_item_id(
|
|
48
|
-
item_name=report, type="PaginatedReport", workspace=
|
|
48
|
+
item_name=report, type="PaginatedReport", workspace=workspace_id
|
|
49
49
|
)
|
|
50
50
|
|
|
51
51
|
client = fabric.PowerBIRestClient()
|
sempy_labs/report/_report_bpa.py
CHANGED
|
@@ -11,18 +11,20 @@ from sempy_labs._helper_functions import (
|
|
|
11
11
|
resolve_lakehouse_name,
|
|
12
12
|
resolve_workspace_capacity,
|
|
13
13
|
_get_max_run_id,
|
|
14
|
+
resolve_workspace_name_and_id,
|
|
14
15
|
)
|
|
15
16
|
from sempy_labs.lakehouse import get_lakehouse_tables, lakehouse_attached
|
|
16
17
|
import sempy_labs._icons as icons
|
|
17
18
|
from IPython.display import display, HTML
|
|
18
19
|
import sempy_labs.report._report_helper as helper
|
|
20
|
+
from uuid import UUID
|
|
19
21
|
|
|
20
22
|
|
|
21
23
|
@log
|
|
22
24
|
def run_report_bpa(
|
|
23
25
|
report: str,
|
|
24
26
|
rules: Optional[pd.DataFrame] = None,
|
|
25
|
-
workspace: Optional[str] = None,
|
|
27
|
+
workspace: Optional[str | UUID] = None,
|
|
26
28
|
# language: Optional[str] = None,
|
|
27
29
|
export: bool = False,
|
|
28
30
|
return_dataframe: bool = False,
|
|
@@ -37,8 +39,8 @@ def run_report_bpa(
|
|
|
37
39
|
Name of the report.
|
|
38
40
|
rules : pandas.DataFrame, default=None
|
|
39
41
|
A pandas dataframe containing rules to be evaluated.
|
|
40
|
-
workspace : str, default=None
|
|
41
|
-
The Fabric workspace name.
|
|
42
|
+
workspace : str | uuid.UUID, default=None
|
|
43
|
+
The Fabric workspace name or ID.
|
|
42
44
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
43
45
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
44
46
|
export : bool, default=False
|
|
@@ -52,7 +54,9 @@ def run_report_bpa(
|
|
|
52
54
|
A pandas dataframe in HTML format showing report objects which violated the best practice analyzer rules.
|
|
53
55
|
"""
|
|
54
56
|
|
|
55
|
-
|
|
57
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
58
|
+
|
|
59
|
+
rpt = ReportWrapper(report=report, workspace=workspace_id)
|
|
56
60
|
|
|
57
61
|
dfCV = rpt.list_custom_visuals()
|
|
58
62
|
dfP = rpt.list_pages()
|
|
@@ -145,7 +149,7 @@ def run_report_bpa(
|
|
|
145
149
|
df_output["Description"] = row["Description"]
|
|
146
150
|
df_output["URL"] = row["URL"]
|
|
147
151
|
df_output["Report URL"] = helper.get_web_url(
|
|
148
|
-
report=report, workspace=
|
|
152
|
+
report=report, workspace=workspace_id
|
|
149
153
|
)
|
|
150
154
|
|
|
151
155
|
page_mapping_dict = dfP.set_index("Page Display Name")["Page URL"].to_dict()
|
|
@@ -219,13 +223,13 @@ def run_report_bpa(
|
|
|
219
223
|
runId = max_run_id + 1
|
|
220
224
|
|
|
221
225
|
export_df = finalDF.copy()
|
|
222
|
-
capacity_id, capacity_name = resolve_workspace_capacity(workspace=
|
|
226
|
+
capacity_id, capacity_name = resolve_workspace_capacity(workspace=workspace_id)
|
|
223
227
|
export_df["Capacity Name"] = capacity_name
|
|
224
228
|
export_df["Capacity Id"] = capacity_id
|
|
225
|
-
export_df["Workspace Name"] =
|
|
226
|
-
export_df["Workspace Id"] =
|
|
229
|
+
export_df["Workspace Name"] = workspace_name
|
|
230
|
+
export_df["Workspace Id"] = workspace_id
|
|
227
231
|
export_df["Report Name"] = report
|
|
228
|
-
export_df["Report Id"] = resolve_report_id(report,
|
|
232
|
+
export_df["Report Id"] = resolve_report_id(report, workspace_id)
|
|
229
233
|
export_df["RunId"] = runId
|
|
230
234
|
export_df["Timestamp"] = now
|
|
231
235
|
export_df["RunId"] = export_df["RunId"].astype(int)
|