semantic-link-labs 0.8.9__py3-none-any.whl → 0.8.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/METADATA +5 -2
- {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/RECORD +76 -75
- sempy_labs/__init__.py +14 -2
- sempy_labs/_authentication.py +31 -2
- sempy_labs/_clear_cache.py +39 -37
- sempy_labs/_connections.py +13 -13
- sempy_labs/_data_pipelines.py +20 -20
- sempy_labs/_dataflows.py +27 -28
- sempy_labs/_dax.py +41 -47
- sempy_labs/_environments.py +26 -23
- sempy_labs/_eventhouses.py +16 -15
- sempy_labs/_eventstreams.py +16 -15
- sempy_labs/_external_data_shares.py +18 -20
- sempy_labs/_gateways.py +57 -11
- sempy_labs/_generate_semantic_model.py +100 -71
- sempy_labs/_git.py +134 -67
- sempy_labs/_helper_functions.py +199 -145
- sempy_labs/_job_scheduler.py +92 -0
- sempy_labs/_kql_databases.py +16 -15
- sempy_labs/_kql_querysets.py +16 -15
- sempy_labs/_list_functions.py +281 -120
- sempy_labs/_managed_private_endpoints.py +19 -17
- sempy_labs/_mirrored_databases.py +51 -48
- sempy_labs/_mirrored_warehouses.py +5 -4
- sempy_labs/_ml_experiments.py +16 -15
- sempy_labs/_ml_models.py +15 -14
- sempy_labs/_model_bpa.py +27 -25
- sempy_labs/_model_bpa_bulk.py +3 -3
- sempy_labs/_model_dependencies.py +60 -28
- sempy_labs/_notebooks.py +73 -39
- sempy_labs/_one_lake_integration.py +23 -26
- sempy_labs/_query_scale_out.py +67 -64
- sempy_labs/_refresh_semantic_model.py +47 -42
- sempy_labs/_spark.py +33 -32
- sempy_labs/_sql.py +12 -9
- sempy_labs/_translations.py +10 -7
- sempy_labs/_vertipaq.py +34 -31
- sempy_labs/_warehouses.py +22 -21
- sempy_labs/_workspace_identity.py +11 -10
- sempy_labs/_workspaces.py +40 -33
- sempy_labs/admin/__init__.py +4 -0
- sempy_labs/admin/_basic_functions.py +44 -12
- sempy_labs/admin/_external_data_share.py +3 -3
- sempy_labs/admin/_items.py +4 -4
- sempy_labs/admin/_scanner.py +7 -5
- sempy_labs/directlake/_directlake_schema_compare.py +18 -14
- sempy_labs/directlake/_directlake_schema_sync.py +18 -12
- sempy_labs/directlake/_dl_helper.py +36 -32
- sempy_labs/directlake/_generate_shared_expression.py +10 -9
- sempy_labs/directlake/_get_directlake_lakehouse.py +16 -13
- sempy_labs/directlake/_get_shared_expression.py +4 -3
- sempy_labs/directlake/_guardrails.py +12 -6
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +15 -9
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +16 -10
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +35 -31
- sempy_labs/directlake/_update_directlake_partition_entity.py +34 -31
- sempy_labs/directlake/_warm_cache.py +87 -65
- sempy_labs/lakehouse/_get_lakehouse_columns.py +10 -8
- sempy_labs/lakehouse/_get_lakehouse_tables.py +10 -9
- sempy_labs/lakehouse/_lakehouse.py +17 -13
- sempy_labs/lakehouse/_shortcuts.py +42 -23
- sempy_labs/migration/_create_pqt_file.py +16 -11
- sempy_labs/migration/_refresh_calc_tables.py +16 -10
- sempy_labs/report/_download_report.py +9 -8
- sempy_labs/report/_generate_report.py +40 -44
- sempy_labs/report/_paginated.py +9 -9
- sempy_labs/report/_report_bpa.py +13 -9
- sempy_labs/report/_report_functions.py +80 -91
- sempy_labs/report/_report_helper.py +8 -4
- sempy_labs/report/_report_list_functions.py +24 -13
- sempy_labs/report/_report_rebind.py +17 -16
- sempy_labs/report/_reportwrapper.py +41 -33
- sempy_labs/tom/_model.py +117 -38
- {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/top_level.txt +0 -0
|
@@ -2,14 +2,18 @@ import sempy.fabric as fabric
|
|
|
2
2
|
import pandas as pd
|
|
3
3
|
from typing import Optional
|
|
4
4
|
from sempy._utils._log import log
|
|
5
|
-
from sempy_labs._helper_functions import
|
|
5
|
+
from sempy_labs._helper_functions import (
|
|
6
|
+
resolve_workspace_name_and_id,
|
|
7
|
+
resolve_dataset_name_and_id,
|
|
8
|
+
)
|
|
6
9
|
import sempy_labs._icons as icons
|
|
10
|
+
from uuid import UUID
|
|
7
11
|
|
|
8
12
|
|
|
9
13
|
@log
|
|
10
14
|
def export_model_to_onelake(
|
|
11
|
-
dataset: str,
|
|
12
|
-
workspace: Optional[str] = None,
|
|
15
|
+
dataset: str | UUID,
|
|
16
|
+
workspace: Optional[str | UUID] = None,
|
|
13
17
|
destination_lakehouse: Optional[str] = None,
|
|
14
18
|
destination_workspace: Optional[str] = None,
|
|
15
19
|
):
|
|
@@ -18,10 +22,10 @@ def export_model_to_onelake(
|
|
|
18
22
|
|
|
19
23
|
Parameters
|
|
20
24
|
----------
|
|
21
|
-
dataset : str
|
|
22
|
-
Name of the semantic model.
|
|
23
|
-
workspace : str, default=None
|
|
24
|
-
The Fabric workspace name.
|
|
25
|
+
dataset : str | uuid.UUID
|
|
26
|
+
Name or ID of the semantic model.
|
|
27
|
+
workspace : str | uuid.UUID, default=None
|
|
28
|
+
The Fabric workspace name or ID.
|
|
25
29
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
26
30
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
27
31
|
destination_lakehouse : str, default=None
|
|
@@ -30,22 +34,15 @@ def export_model_to_onelake(
|
|
|
30
34
|
The name of the Fabric workspace in which the lakehouse resides.
|
|
31
35
|
"""
|
|
32
36
|
|
|
33
|
-
(
|
|
37
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
38
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
34
39
|
|
|
35
40
|
if destination_workspace is None:
|
|
36
|
-
destination_workspace =
|
|
41
|
+
destination_workspace = workspace_name
|
|
37
42
|
destination_workspace_id = workspace_id
|
|
38
43
|
else:
|
|
39
44
|
destination_workspace_id = fabric.resolve_workspace_id(destination_workspace)
|
|
40
45
|
|
|
41
|
-
dfD = fabric.list_datasets(workspace=workspace)
|
|
42
|
-
dfD_filt = dfD[dfD["Dataset Name"] == dataset]
|
|
43
|
-
|
|
44
|
-
if len(dfD_filt) == 0:
|
|
45
|
-
raise ValueError(
|
|
46
|
-
f"{icons.red_dot} The '{dataset}' semantic model does not exist in the '{workspace}' workspace."
|
|
47
|
-
)
|
|
48
|
-
|
|
49
46
|
tmsl = f"""
|
|
50
47
|
{{
|
|
51
48
|
'export': {{
|
|
@@ -53,7 +50,7 @@ def export_model_to_onelake(
|
|
|
53
50
|
'type': 'full',
|
|
54
51
|
'objects': [
|
|
55
52
|
{{
|
|
56
|
-
'database': '{
|
|
53
|
+
'database': '{dataset_name}'
|
|
57
54
|
}}
|
|
58
55
|
]
|
|
59
56
|
}}
|
|
@@ -62,13 +59,13 @@ def export_model_to_onelake(
|
|
|
62
59
|
|
|
63
60
|
# Export model's tables as delta tables
|
|
64
61
|
try:
|
|
65
|
-
fabric.execute_tmsl(script=tmsl, workspace=
|
|
62
|
+
fabric.execute_tmsl(script=tmsl, workspace=workspace_id)
|
|
66
63
|
print(
|
|
67
|
-
f"{icons.green_dot} The '{
|
|
64
|
+
f"{icons.green_dot} The '{dataset_name}' semantic model's tables have been exported as delta tables to the '{workspace_name}' workspace.\n"
|
|
68
65
|
)
|
|
69
66
|
except Exception as e:
|
|
70
67
|
raise ValueError(
|
|
71
|
-
f"{icons.red_dot} The '{
|
|
68
|
+
f"{icons.red_dot} The '{dataset_name}' semantic model's tables have not been exported as delta tables to the '{workspace_name}' workspace.\nMake sure you enable OneLake integration for the '{dataset_name}' semantic model. Follow the instructions here: https://learn.microsoft.com/power-bi/enterprise/onelake-integration-overview#enable-onelake-integration"
|
|
72
69
|
) from e
|
|
73
70
|
|
|
74
71
|
# Create shortcuts if destination lakehouse is specified
|
|
@@ -92,14 +89,14 @@ def export_model_to_onelake(
|
|
|
92
89
|
destination_lakehouse_id = dfI_filt["Id"].iloc[0]
|
|
93
90
|
|
|
94
91
|
# Source...
|
|
95
|
-
dfI_Source = fabric.list_items(workspace=
|
|
92
|
+
dfI_Source = fabric.list_items(workspace=workspace_id, type="SemanticModel")
|
|
96
93
|
dfI_filtSource = dfI_Source[(dfI_Source["Display Name"] == dataset)]
|
|
97
94
|
sourceLakehouseId = dfI_filtSource["Id"].iloc[0]
|
|
98
95
|
|
|
99
96
|
# Valid tables
|
|
100
97
|
dfP = fabric.list_partitions(
|
|
101
|
-
dataset=
|
|
102
|
-
workspace=
|
|
98
|
+
dataset=dataset_id,
|
|
99
|
+
workspace=workspace_id,
|
|
103
100
|
additional_xmla_properties=["Parent.SystemManaged"],
|
|
104
101
|
)
|
|
105
102
|
dfP_filt = dfP[
|
|
@@ -107,7 +104,7 @@ def export_model_to_onelake(
|
|
|
107
104
|
& (dfP["Source Type"] != "CalculationGroup")
|
|
108
105
|
& (dfP["Parent System Managed"] == False)
|
|
109
106
|
]
|
|
110
|
-
dfC = fabric.list_columns(dataset=
|
|
107
|
+
dfC = fabric.list_columns(dataset=dataset_id, workspace=workspace_id)
|
|
111
108
|
tmc = pd.DataFrame(dfP.groupby("Table Name")["Mode"].nunique()).reset_index()
|
|
112
109
|
oneMode = tmc[tmc["Mode"] == 1]
|
|
113
110
|
tableAll = dfP_filt[
|
|
@@ -141,7 +138,7 @@ def export_model_to_onelake(
|
|
|
141
138
|
)
|
|
142
139
|
if response.status_code == 201:
|
|
143
140
|
print(
|
|
144
|
-
f"{icons.bullet} The shortcut '{shortcutName}' was created in the '{destination_lakehouse}' lakehouse within the '{destination_workspace}' workspace. It is based on the '{tableName}' table in the '{
|
|
141
|
+
f"{icons.bullet} The shortcut '{shortcutName}' was created in the '{destination_lakehouse}' lakehouse within the '{destination_workspace}' workspace. It is based on the '{tableName}' table in the '{dataset_name}' semantic model within the '{workspace_name}' workspace.\n"
|
|
145
142
|
)
|
|
146
143
|
else:
|
|
147
144
|
print(response.status_code)
|
sempy_labs/_query_scale_out.py
CHANGED
|
@@ -1,15 +1,16 @@
|
|
|
1
1
|
import sempy.fabric as fabric
|
|
2
2
|
import pandas as pd
|
|
3
3
|
from sempy_labs._helper_functions import (
|
|
4
|
-
resolve_dataset_id,
|
|
5
4
|
resolve_workspace_name_and_id,
|
|
5
|
+
resolve_dataset_name_and_id,
|
|
6
6
|
)
|
|
7
7
|
from typing import Optional, Tuple
|
|
8
8
|
import sempy_labs._icons as icons
|
|
9
9
|
from sempy.fabric.exceptions import FabricHTTPException
|
|
10
|
+
from uuid import UUID
|
|
10
11
|
|
|
11
12
|
|
|
12
|
-
def qso_sync(dataset: str, workspace: Optional[str] = None):
|
|
13
|
+
def qso_sync(dataset: str | UUID, workspace: Optional[str | UUID] = None):
|
|
13
14
|
"""
|
|
14
15
|
Triggers a query scale-out sync of read-only replicas for the specified dataset from the specified workspace.
|
|
15
16
|
|
|
@@ -17,16 +18,16 @@ def qso_sync(dataset: str, workspace: Optional[str] = None):
|
|
|
17
18
|
|
|
18
19
|
Parameters
|
|
19
20
|
----------
|
|
20
|
-
dataset : str
|
|
21
|
-
Name of the semantic model.
|
|
22
|
-
workspace : str, default=None
|
|
23
|
-
The Fabric workspace name.
|
|
21
|
+
dataset : str | uuid.UUID
|
|
22
|
+
Name or ID of the semantic model.
|
|
23
|
+
workspace : str | uuid.UUID, default=None
|
|
24
|
+
The Fabric workspace name or ID.
|
|
24
25
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
25
26
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
26
27
|
"""
|
|
27
28
|
|
|
28
|
-
(
|
|
29
|
-
dataset_id =
|
|
29
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
30
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
30
31
|
|
|
31
32
|
client = fabric.PowerBIRestClient()
|
|
32
33
|
response = client.post(
|
|
@@ -36,12 +37,12 @@ def qso_sync(dataset: str, workspace: Optional[str] = None):
|
|
|
36
37
|
if response.status_code != 200:
|
|
37
38
|
raise FabricHTTPException(response)
|
|
38
39
|
print(
|
|
39
|
-
f"{icons.green_dot} QSO sync initiated for the '{
|
|
40
|
+
f"{icons.green_dot} QSO sync initiated for the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
|
|
40
41
|
)
|
|
41
42
|
|
|
42
43
|
|
|
43
44
|
def qso_sync_status(
|
|
44
|
-
dataset: str, workspace: Optional[str] = None
|
|
45
|
+
dataset: str | UUID, workspace: Optional[str | UUID] = None
|
|
45
46
|
) -> Tuple[pd.DataFrame, pd.DataFrame]:
|
|
46
47
|
"""
|
|
47
48
|
Returns the query scale-out sync status for the specified dataset from the specified workspace.
|
|
@@ -50,10 +51,10 @@ def qso_sync_status(
|
|
|
50
51
|
|
|
51
52
|
Parameters
|
|
52
53
|
----------
|
|
53
|
-
dataset : str
|
|
54
|
-
Name of the semantic model.
|
|
55
|
-
workspace : str, default=None
|
|
56
|
-
The Fabric workspace name.
|
|
54
|
+
dataset : str | uuid.UUID
|
|
55
|
+
Name or ID of the semantic model.
|
|
56
|
+
workspace : str | uuid.UUID, default=None
|
|
57
|
+
The Fabric workspace name or ID.
|
|
57
58
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
58
59
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
59
60
|
|
|
@@ -81,8 +82,8 @@ def qso_sync_status(
|
|
|
81
82
|
columns=["Replica ID", "Replica Type", "Replica Version", "Replica Timestamp"]
|
|
82
83
|
)
|
|
83
84
|
|
|
84
|
-
(
|
|
85
|
-
dataset_id =
|
|
85
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
86
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
86
87
|
|
|
87
88
|
client = fabric.PowerBIRestClient()
|
|
88
89
|
response = client.get(
|
|
@@ -139,7 +140,9 @@ def qso_sync_status(
|
|
|
139
140
|
return df, dfRep
|
|
140
141
|
|
|
141
142
|
|
|
142
|
-
def disable_qso(
|
|
143
|
+
def disable_qso(
|
|
144
|
+
dataset: str | UUID, workspace: Optional[str | UUID] = None
|
|
145
|
+
) -> pd.DataFrame:
|
|
143
146
|
"""
|
|
144
147
|
Sets the max read-only replicas to 0, disabling query scale out.
|
|
145
148
|
|
|
@@ -147,10 +150,10 @@ def disable_qso(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
147
150
|
|
|
148
151
|
Parameters
|
|
149
152
|
----------
|
|
150
|
-
dataset : str
|
|
151
|
-
Name of the semantic model.
|
|
152
|
-
workspace : str, default=None
|
|
153
|
-
The Fabric workspace name.
|
|
153
|
+
dataset : str | uuid.UUID
|
|
154
|
+
Name or ID of the semantic model.
|
|
155
|
+
workspace : str | uuid.UUID, default=None
|
|
156
|
+
The Fabric workspace name or ID.
|
|
154
157
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
155
158
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
156
159
|
|
|
@@ -160,8 +163,8 @@ def disable_qso(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
160
163
|
A pandas dataframe showing the current query scale out settings.
|
|
161
164
|
"""
|
|
162
165
|
|
|
163
|
-
(
|
|
164
|
-
dataset_id =
|
|
166
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
167
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
165
168
|
|
|
166
169
|
request_body = {"queryScaleOutSettings": {"maxReadOnlyReplicas": "0"}}
|
|
167
170
|
|
|
@@ -172,20 +175,20 @@ def disable_qso(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
172
175
|
if response.status_code != 200:
|
|
173
176
|
raise FabricHTTPException(response)
|
|
174
177
|
|
|
175
|
-
df = list_qso_settings(dataset=
|
|
178
|
+
df = list_qso_settings(dataset=dataset_id, workspace=workspace_id)
|
|
176
179
|
|
|
177
180
|
print(
|
|
178
|
-
f"{icons.green_dot} Query scale out has been disabled for the '{
|
|
181
|
+
f"{icons.green_dot} Query scale out has been disabled for the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
|
|
179
182
|
)
|
|
180
183
|
|
|
181
184
|
return df
|
|
182
185
|
|
|
183
186
|
|
|
184
187
|
def set_qso(
|
|
185
|
-
dataset: str,
|
|
188
|
+
dataset: str | UUID,
|
|
186
189
|
auto_sync: bool = True,
|
|
187
190
|
max_read_only_replicas: int = -1,
|
|
188
|
-
workspace: Optional[str] = None,
|
|
191
|
+
workspace: Optional[str | UUID] = None,
|
|
189
192
|
) -> pd.DataFrame:
|
|
190
193
|
"""
|
|
191
194
|
Sets the query scale out settings for a semantic model.
|
|
@@ -194,14 +197,14 @@ def set_qso(
|
|
|
194
197
|
|
|
195
198
|
Parameters
|
|
196
199
|
----------
|
|
197
|
-
dataset : str
|
|
198
|
-
Name of the semantic model.
|
|
200
|
+
dataset : str | uuid.UUID
|
|
201
|
+
Name or ID of the semantic model.
|
|
199
202
|
auto_sync : bool, default=True
|
|
200
203
|
Whether the semantic model automatically syncs read-only replicas.
|
|
201
204
|
max_read_only_replicas : int, default=-1
|
|
202
205
|
To enable semantic model scale-out, set max_read_only_replicas to -1, or any non-0 value. A value of -1 allows Power BI to create as many read-only replicas as your Power BI capacity supports. You can also explicitly set the replica count to a value lower than that of the capacity maximum. Setting max_read_only_replicas to -1 is recommended.
|
|
203
|
-
workspace : str, default=None
|
|
204
|
-
The Fabric workspace name.
|
|
206
|
+
workspace : str | uuid.UUID, default=None
|
|
207
|
+
The Fabric workspace name or ID.
|
|
205
208
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
206
209
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
207
210
|
|
|
@@ -213,16 +216,16 @@ def set_qso(
|
|
|
213
216
|
|
|
214
217
|
from sempy_labs._helper_functions import is_default_semantic_model
|
|
215
218
|
|
|
216
|
-
(
|
|
217
|
-
dataset_id =
|
|
219
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
220
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
218
221
|
|
|
219
|
-
if is_default_semantic_model(dataset=
|
|
222
|
+
if is_default_semantic_model(dataset=dataset_id, workspace=workspace_id):
|
|
220
223
|
raise ValueError(
|
|
221
224
|
f"{icons.red_dot} The 'set_qso' function does not run against default semantic models."
|
|
222
225
|
)
|
|
223
226
|
|
|
224
227
|
if max_read_only_replicas == 0:
|
|
225
|
-
disable_qso(dataset=
|
|
228
|
+
disable_qso(dataset=dataset_id, workspace=workspace_id)
|
|
226
229
|
return
|
|
227
230
|
|
|
228
231
|
request_body = {
|
|
@@ -232,12 +235,12 @@ def set_qso(
|
|
|
232
235
|
}
|
|
233
236
|
}
|
|
234
237
|
|
|
235
|
-
dfL = list_qso_settings(dataset=
|
|
238
|
+
dfL = list_qso_settings(dataset=dataset_id, workspace=workspace_id)
|
|
236
239
|
storage_mode = dfL["Storage Mode"].iloc[0]
|
|
237
240
|
|
|
238
241
|
if storage_mode == "Small":
|
|
239
242
|
set_semantic_model_storage_format(
|
|
240
|
-
dataset=
|
|
243
|
+
dataset=dataset_id, storage_format="Large", workspace=workspace_id
|
|
241
244
|
)
|
|
242
245
|
|
|
243
246
|
client = fabric.PowerBIRestClient()
|
|
@@ -248,34 +251,34 @@ def set_qso(
|
|
|
248
251
|
if response.status_code != 200:
|
|
249
252
|
raise FabricHTTPException(response)
|
|
250
253
|
|
|
251
|
-
df = list_qso_settings(dataset=
|
|
254
|
+
df = list_qso_settings(dataset=dataset_id, workspace=workspace_id)
|
|
252
255
|
print(
|
|
253
|
-
f"{icons.green_dot} Query scale out has been set on the '{
|
|
256
|
+
f"{icons.green_dot} Query scale out has been set on the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
|
|
254
257
|
)
|
|
255
258
|
|
|
256
259
|
return df
|
|
257
260
|
|
|
258
261
|
|
|
259
262
|
def set_semantic_model_storage_format(
|
|
260
|
-
dataset: str, storage_format: str, workspace: Optional[str] = None
|
|
263
|
+
dataset: str | UUID, storage_format: str, workspace: Optional[str | UUID] = None
|
|
261
264
|
):
|
|
262
265
|
"""
|
|
263
266
|
Sets the semantic model storage format.
|
|
264
267
|
|
|
265
268
|
Parameters
|
|
266
269
|
----------
|
|
267
|
-
dataset : str
|
|
268
|
-
Name of the semantic model.
|
|
270
|
+
dataset : str | uuid.UUID
|
|
271
|
+
Name or ID of the semantic model.
|
|
269
272
|
storage_format : str
|
|
270
273
|
The storage format for the semantic model. Valid options: 'Large', 'Small'.
|
|
271
|
-
workspace : str, default=None
|
|
272
|
-
The Fabric workspace name.
|
|
274
|
+
workspace : str | uuid.UUID, default=None
|
|
275
|
+
The Fabric workspace name or ID.
|
|
273
276
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
274
277
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
275
278
|
"""
|
|
276
279
|
|
|
277
|
-
(
|
|
278
|
-
dataset_id =
|
|
280
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
281
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
279
282
|
|
|
280
283
|
storage_format = storage_format.capitalize()
|
|
281
284
|
|
|
@@ -295,12 +298,12 @@ def set_semantic_model_storage_format(
|
|
|
295
298
|
f"{icons.red_dot} Invalid storage format value. Valid options: {storageFormats}."
|
|
296
299
|
)
|
|
297
300
|
|
|
298
|
-
dfL = list_qso_settings(dataset=
|
|
301
|
+
dfL = list_qso_settings(dataset=dataset_id, workspace=workspace_id)
|
|
299
302
|
current_storage_format = dfL["Storage Mode"].iloc[0]
|
|
300
303
|
|
|
301
304
|
if current_storage_format == storage_format:
|
|
302
305
|
print(
|
|
303
|
-
f"{icons.info} The '{
|
|
306
|
+
f"{icons.info} The '{dataset_name}' semantic model within the '{workspace_name}' workspace is already set to '{storage_format.lower()}' storage format."
|
|
304
307
|
)
|
|
305
308
|
return
|
|
306
309
|
|
|
@@ -311,22 +314,22 @@ def set_semantic_model_storage_format(
|
|
|
311
314
|
if response.status_code != 200:
|
|
312
315
|
raise FabricHTTPException(response)
|
|
313
316
|
print(
|
|
314
|
-
f"{icons.green_dot} The semantic model storage format for the '{
|
|
317
|
+
f"{icons.green_dot} The semantic model storage format for the '{dataset_name}' semantic model within the '{workspace_name}' workspace has been set to '{storage_format}'."
|
|
315
318
|
)
|
|
316
319
|
|
|
317
320
|
|
|
318
321
|
def list_qso_settings(
|
|
319
|
-
dataset: Optional[str] = None, workspace: Optional[str] = None
|
|
322
|
+
dataset: Optional[str | UUID] = None, workspace: Optional[str | UUID] = None
|
|
320
323
|
) -> pd.DataFrame:
|
|
321
324
|
"""
|
|
322
325
|
Shows the query scale out settings for a semantic model (or all semantic models within a workspace).
|
|
323
326
|
|
|
324
327
|
Parameters
|
|
325
328
|
----------
|
|
326
|
-
dataset : str, default=None
|
|
327
|
-
Name of the semantic model.
|
|
328
|
-
workspace : str, default=None
|
|
329
|
-
The Fabric workspace name.
|
|
329
|
+
dataset : str | uuid.UUID, default=None
|
|
330
|
+
Name or ID of the semantic model.
|
|
331
|
+
workspace : str | uuid.UUID, default=None
|
|
332
|
+
The Fabric workspace name or ID.
|
|
330
333
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
331
334
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
332
335
|
|
|
@@ -336,10 +339,10 @@ def list_qso_settings(
|
|
|
336
339
|
A pandas dataframe showing the query scale out settings.
|
|
337
340
|
"""
|
|
338
341
|
|
|
339
|
-
(
|
|
342
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
340
343
|
|
|
341
344
|
if dataset is not None:
|
|
342
|
-
dataset_id =
|
|
345
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
343
346
|
|
|
344
347
|
df = pd.DataFrame(
|
|
345
348
|
columns=[
|
|
@@ -382,7 +385,7 @@ def list_qso_settings(
|
|
|
382
385
|
|
|
383
386
|
|
|
384
387
|
def set_workspace_default_storage_format(
|
|
385
|
-
storage_format: str, workspace: Optional[str] = None
|
|
388
|
+
storage_format: str, workspace: Optional[str | UUID] = None
|
|
386
389
|
):
|
|
387
390
|
"""
|
|
388
391
|
Sets the default storage format for semantic models within a workspace.
|
|
@@ -391,8 +394,8 @@ def set_workspace_default_storage_format(
|
|
|
391
394
|
----------
|
|
392
395
|
storage_format : str
|
|
393
396
|
The storage format for the semantic model. Valid options: 'Large', 'Small'.
|
|
394
|
-
workspace : str, default=None
|
|
395
|
-
The Fabric workspace name.
|
|
397
|
+
workspace : str | uuid.UUID, default=None
|
|
398
|
+
The Fabric workspace name or ID.
|
|
396
399
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
397
400
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
398
401
|
"""
|
|
@@ -408,22 +411,22 @@ def set_workspace_default_storage_format(
|
|
|
408
411
|
f"{icons.red_dot} Invalid storage format. Please choose from these options: {storageFormats}."
|
|
409
412
|
)
|
|
410
413
|
|
|
411
|
-
(
|
|
414
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
412
415
|
|
|
413
416
|
# Check current storage format
|
|
414
|
-
dfW = fabric.list_workspaces(filter=f"name eq '{
|
|
417
|
+
dfW = fabric.list_workspaces(filter=f"name eq '{workspace_name}'")
|
|
415
418
|
if len(dfW) == 0:
|
|
416
419
|
raise ValueError()
|
|
417
420
|
current_storage_format = dfW["Default Dataset Storage Format"].iloc[0]
|
|
418
421
|
|
|
419
422
|
if current_storage_format == storage_format:
|
|
420
423
|
print(
|
|
421
|
-
f"{icons.info} The '{
|
|
424
|
+
f"{icons.info} The '{workspace_name}' is already set to a default storage format of '{current_storage_format}'."
|
|
422
425
|
)
|
|
423
426
|
return
|
|
424
427
|
|
|
425
428
|
request_body = {
|
|
426
|
-
"name":
|
|
429
|
+
"name": workspace_name,
|
|
427
430
|
"defaultDatasetStorageFormat": storage_format,
|
|
428
431
|
}
|
|
429
432
|
|
|
@@ -434,5 +437,5 @@ def set_workspace_default_storage_format(
|
|
|
434
437
|
raise FabricHTTPException(response)
|
|
435
438
|
|
|
436
439
|
print(
|
|
437
|
-
f"{icons.green_dot} The default storage format for the '{
|
|
440
|
+
f"{icons.green_dot} The default storage format for the '{workspace_name}' workspace has been updated to '{storage_format}."
|
|
438
441
|
)
|