semantic-link-labs 0.8.10__py3-none-any.whl → 0.9.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/METADATA +6 -5
- {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/RECORD +81 -80
- {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +34 -3
- sempy_labs/_authentication.py +80 -4
- sempy_labs/_capacities.py +770 -200
- sempy_labs/_capacity_migration.py +7 -37
- sempy_labs/_clear_cache.py +37 -35
- sempy_labs/_connections.py +13 -13
- sempy_labs/_data_pipelines.py +20 -20
- sempy_labs/_dataflows.py +27 -28
- sempy_labs/_dax.py +41 -47
- sempy_labs/_deployment_pipelines.py +1 -1
- sempy_labs/_environments.py +26 -23
- sempy_labs/_eventhouses.py +16 -15
- sempy_labs/_eventstreams.py +16 -15
- sempy_labs/_external_data_shares.py +18 -20
- sempy_labs/_gateways.py +16 -14
- sempy_labs/_generate_semantic_model.py +107 -62
- sempy_labs/_git.py +105 -43
- sempy_labs/_helper_functions.py +251 -194
- sempy_labs/_job_scheduler.py +227 -0
- sempy_labs/_kql_databases.py +16 -15
- sempy_labs/_kql_querysets.py +16 -15
- sempy_labs/_list_functions.py +150 -126
- sempy_labs/_managed_private_endpoints.py +19 -17
- sempy_labs/_mirrored_databases.py +51 -48
- sempy_labs/_mirrored_warehouses.py +5 -4
- sempy_labs/_ml_experiments.py +16 -15
- sempy_labs/_ml_models.py +15 -14
- sempy_labs/_model_bpa.py +210 -207
- sempy_labs/_model_bpa_bulk.py +2 -2
- sempy_labs/_model_bpa_rules.py +3 -3
- sempy_labs/_model_dependencies.py +55 -29
- sempy_labs/_notebooks.py +29 -25
- sempy_labs/_one_lake_integration.py +23 -26
- sempy_labs/_query_scale_out.py +75 -64
- sempy_labs/_refresh_semantic_model.py +25 -26
- sempy_labs/_spark.py +33 -32
- sempy_labs/_sql.py +19 -12
- sempy_labs/_translations.py +10 -7
- sempy_labs/_vertipaq.py +38 -33
- sempy_labs/_warehouses.py +26 -25
- sempy_labs/_workspace_identity.py +11 -10
- sempy_labs/_workspaces.py +40 -33
- sempy_labs/admin/_basic_functions.py +166 -115
- sempy_labs/admin/_domains.py +7 -2
- sempy_labs/admin/_external_data_share.py +3 -3
- sempy_labs/admin/_git.py +4 -1
- sempy_labs/admin/_items.py +11 -6
- sempy_labs/admin/_scanner.py +10 -5
- sempy_labs/directlake/_directlake_schema_compare.py +25 -16
- sempy_labs/directlake/_directlake_schema_sync.py +24 -12
- sempy_labs/directlake/_dl_helper.py +74 -55
- sempy_labs/directlake/_generate_shared_expression.py +10 -9
- sempy_labs/directlake/_get_directlake_lakehouse.py +32 -36
- sempy_labs/directlake/_get_shared_expression.py +4 -3
- sempy_labs/directlake/_guardrails.py +12 -6
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +15 -9
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +16 -10
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +35 -31
- sempy_labs/directlake/_update_directlake_partition_entity.py +39 -31
- sempy_labs/directlake/_warm_cache.py +87 -65
- sempy_labs/lakehouse/_get_lakehouse_columns.py +23 -26
- sempy_labs/lakehouse/_get_lakehouse_tables.py +27 -38
- sempy_labs/lakehouse/_lakehouse.py +7 -20
- sempy_labs/lakehouse/_shortcuts.py +42 -23
- sempy_labs/migration/_create_pqt_file.py +16 -11
- sempy_labs/migration/_refresh_calc_tables.py +16 -10
- sempy_labs/report/_download_report.py +9 -8
- sempy_labs/report/_generate_report.py +85 -44
- sempy_labs/report/_paginated.py +9 -9
- sempy_labs/report/_report_bpa.py +15 -11
- sempy_labs/report/_report_functions.py +80 -91
- sempy_labs/report/_report_helper.py +8 -4
- sempy_labs/report/_report_list_functions.py +24 -13
- sempy_labs/report/_report_rebind.py +17 -16
- sempy_labs/report/_reportwrapper.py +41 -33
- sempy_labs/tom/_model.py +139 -21
- {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/top_level.txt +0 -0
sempy_labs/_query_scale_out.py
CHANGED
|
@@ -1,15 +1,18 @@
|
|
|
1
1
|
import sempy.fabric as fabric
|
|
2
2
|
import pandas as pd
|
|
3
3
|
from sempy_labs._helper_functions import (
|
|
4
|
-
resolve_dataset_id,
|
|
5
4
|
resolve_workspace_name_and_id,
|
|
5
|
+
resolve_dataset_name_and_id,
|
|
6
6
|
)
|
|
7
|
+
from sempy._utils._log import log
|
|
7
8
|
from typing import Optional, Tuple
|
|
8
9
|
import sempy_labs._icons as icons
|
|
9
10
|
from sempy.fabric.exceptions import FabricHTTPException
|
|
11
|
+
from uuid import UUID
|
|
10
12
|
|
|
11
13
|
|
|
12
|
-
|
|
14
|
+
@log
|
|
15
|
+
def qso_sync(dataset: str | UUID, workspace: Optional[str | UUID] = None):
|
|
13
16
|
"""
|
|
14
17
|
Triggers a query scale-out sync of read-only replicas for the specified dataset from the specified workspace.
|
|
15
18
|
|
|
@@ -17,16 +20,16 @@ def qso_sync(dataset: str, workspace: Optional[str] = None):
|
|
|
17
20
|
|
|
18
21
|
Parameters
|
|
19
22
|
----------
|
|
20
|
-
dataset : str
|
|
21
|
-
Name of the semantic model.
|
|
22
|
-
workspace : str, default=None
|
|
23
|
-
The Fabric workspace name.
|
|
23
|
+
dataset : str | uuid.UUID
|
|
24
|
+
Name or ID of the semantic model.
|
|
25
|
+
workspace : str | uuid.UUID, default=None
|
|
26
|
+
The Fabric workspace name or ID.
|
|
24
27
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
25
28
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
26
29
|
"""
|
|
27
30
|
|
|
28
|
-
(
|
|
29
|
-
dataset_id =
|
|
31
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
32
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
30
33
|
|
|
31
34
|
client = fabric.PowerBIRestClient()
|
|
32
35
|
response = client.post(
|
|
@@ -36,12 +39,13 @@ def qso_sync(dataset: str, workspace: Optional[str] = None):
|
|
|
36
39
|
if response.status_code != 200:
|
|
37
40
|
raise FabricHTTPException(response)
|
|
38
41
|
print(
|
|
39
|
-
f"{icons.green_dot} QSO sync initiated for the '{
|
|
42
|
+
f"{icons.green_dot} QSO sync initiated for the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
|
|
40
43
|
)
|
|
41
44
|
|
|
42
45
|
|
|
46
|
+
@log
|
|
43
47
|
def qso_sync_status(
|
|
44
|
-
dataset: str, workspace: Optional[str] = None
|
|
48
|
+
dataset: str | UUID, workspace: Optional[str | UUID] = None
|
|
45
49
|
) -> Tuple[pd.DataFrame, pd.DataFrame]:
|
|
46
50
|
"""
|
|
47
51
|
Returns the query scale-out sync status for the specified dataset from the specified workspace.
|
|
@@ -50,10 +54,10 @@ def qso_sync_status(
|
|
|
50
54
|
|
|
51
55
|
Parameters
|
|
52
56
|
----------
|
|
53
|
-
dataset : str
|
|
54
|
-
Name of the semantic model.
|
|
55
|
-
workspace : str, default=None
|
|
56
|
-
The Fabric workspace name.
|
|
57
|
+
dataset : str | uuid.UUID
|
|
58
|
+
Name or ID of the semantic model.
|
|
59
|
+
workspace : str | uuid.UUID, default=None
|
|
60
|
+
The Fabric workspace name or ID.
|
|
57
61
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
58
62
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
59
63
|
|
|
@@ -81,8 +85,8 @@ def qso_sync_status(
|
|
|
81
85
|
columns=["Replica ID", "Replica Type", "Replica Version", "Replica Timestamp"]
|
|
82
86
|
)
|
|
83
87
|
|
|
84
|
-
(
|
|
85
|
-
dataset_id =
|
|
88
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
89
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
86
90
|
|
|
87
91
|
client = fabric.PowerBIRestClient()
|
|
88
92
|
response = client.get(
|
|
@@ -139,7 +143,10 @@ def qso_sync_status(
|
|
|
139
143
|
return df, dfRep
|
|
140
144
|
|
|
141
145
|
|
|
142
|
-
|
|
146
|
+
@log
|
|
147
|
+
def disable_qso(
|
|
148
|
+
dataset: str | UUID, workspace: Optional[str | UUID] = None
|
|
149
|
+
) -> pd.DataFrame:
|
|
143
150
|
"""
|
|
144
151
|
Sets the max read-only replicas to 0, disabling query scale out.
|
|
145
152
|
|
|
@@ -147,10 +154,10 @@ def disable_qso(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
147
154
|
|
|
148
155
|
Parameters
|
|
149
156
|
----------
|
|
150
|
-
dataset : str
|
|
151
|
-
Name of the semantic model.
|
|
152
|
-
workspace : str, default=None
|
|
153
|
-
The Fabric workspace name.
|
|
157
|
+
dataset : str | uuid.UUID
|
|
158
|
+
Name or ID of the semantic model.
|
|
159
|
+
workspace : str | uuid.UUID, default=None
|
|
160
|
+
The Fabric workspace name or ID.
|
|
154
161
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
155
162
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
156
163
|
|
|
@@ -160,8 +167,8 @@ def disable_qso(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
160
167
|
A pandas dataframe showing the current query scale out settings.
|
|
161
168
|
"""
|
|
162
169
|
|
|
163
|
-
(
|
|
164
|
-
dataset_id =
|
|
170
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
171
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
165
172
|
|
|
166
173
|
request_body = {"queryScaleOutSettings": {"maxReadOnlyReplicas": "0"}}
|
|
167
174
|
|
|
@@ -172,20 +179,21 @@ def disable_qso(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
172
179
|
if response.status_code != 200:
|
|
173
180
|
raise FabricHTTPException(response)
|
|
174
181
|
|
|
175
|
-
df = list_qso_settings(dataset=
|
|
182
|
+
df = list_qso_settings(dataset=dataset_id, workspace=workspace_id)
|
|
176
183
|
|
|
177
184
|
print(
|
|
178
|
-
f"{icons.green_dot} Query scale out has been disabled for the '{
|
|
185
|
+
f"{icons.green_dot} Query scale out has been disabled for the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
|
|
179
186
|
)
|
|
180
187
|
|
|
181
188
|
return df
|
|
182
189
|
|
|
183
190
|
|
|
191
|
+
@log
|
|
184
192
|
def set_qso(
|
|
185
|
-
dataset: str,
|
|
193
|
+
dataset: str | UUID,
|
|
186
194
|
auto_sync: bool = True,
|
|
187
195
|
max_read_only_replicas: int = -1,
|
|
188
|
-
workspace: Optional[str] = None,
|
|
196
|
+
workspace: Optional[str | UUID] = None,
|
|
189
197
|
) -> pd.DataFrame:
|
|
190
198
|
"""
|
|
191
199
|
Sets the query scale out settings for a semantic model.
|
|
@@ -194,14 +202,14 @@ def set_qso(
|
|
|
194
202
|
|
|
195
203
|
Parameters
|
|
196
204
|
----------
|
|
197
|
-
dataset : str
|
|
198
|
-
Name of the semantic model.
|
|
205
|
+
dataset : str | uuid.UUID
|
|
206
|
+
Name or ID of the semantic model.
|
|
199
207
|
auto_sync : bool, default=True
|
|
200
208
|
Whether the semantic model automatically syncs read-only replicas.
|
|
201
209
|
max_read_only_replicas : int, default=-1
|
|
202
210
|
To enable semantic model scale-out, set max_read_only_replicas to -1, or any non-0 value. A value of -1 allows Power BI to create as many read-only replicas as your Power BI capacity supports. You can also explicitly set the replica count to a value lower than that of the capacity maximum. Setting max_read_only_replicas to -1 is recommended.
|
|
203
|
-
workspace : str, default=None
|
|
204
|
-
The Fabric workspace name.
|
|
211
|
+
workspace : str | uuid.UUID, default=None
|
|
212
|
+
The Fabric workspace name or ID.
|
|
205
213
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
206
214
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
207
215
|
|
|
@@ -213,16 +221,16 @@ def set_qso(
|
|
|
213
221
|
|
|
214
222
|
from sempy_labs._helper_functions import is_default_semantic_model
|
|
215
223
|
|
|
216
|
-
(
|
|
217
|
-
dataset_id =
|
|
224
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
225
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
218
226
|
|
|
219
|
-
if is_default_semantic_model(dataset=
|
|
227
|
+
if is_default_semantic_model(dataset=dataset_id, workspace=workspace_id):
|
|
220
228
|
raise ValueError(
|
|
221
229
|
f"{icons.red_dot} The 'set_qso' function does not run against default semantic models."
|
|
222
230
|
)
|
|
223
231
|
|
|
224
232
|
if max_read_only_replicas == 0:
|
|
225
|
-
disable_qso(dataset=
|
|
233
|
+
disable_qso(dataset=dataset_id, workspace=workspace_id)
|
|
226
234
|
return
|
|
227
235
|
|
|
228
236
|
request_body = {
|
|
@@ -232,12 +240,12 @@ def set_qso(
|
|
|
232
240
|
}
|
|
233
241
|
}
|
|
234
242
|
|
|
235
|
-
dfL = list_qso_settings(dataset=
|
|
243
|
+
dfL = list_qso_settings(dataset=dataset_id, workspace=workspace_id)
|
|
236
244
|
storage_mode = dfL["Storage Mode"].iloc[0]
|
|
237
245
|
|
|
238
246
|
if storage_mode == "Small":
|
|
239
247
|
set_semantic_model_storage_format(
|
|
240
|
-
dataset=
|
|
248
|
+
dataset=dataset_id, storage_format="Large", workspace=workspace_id
|
|
241
249
|
)
|
|
242
250
|
|
|
243
251
|
client = fabric.PowerBIRestClient()
|
|
@@ -248,34 +256,35 @@ def set_qso(
|
|
|
248
256
|
if response.status_code != 200:
|
|
249
257
|
raise FabricHTTPException(response)
|
|
250
258
|
|
|
251
|
-
df = list_qso_settings(dataset=
|
|
259
|
+
df = list_qso_settings(dataset=dataset_id, workspace=workspace_id)
|
|
252
260
|
print(
|
|
253
|
-
f"{icons.green_dot} Query scale out has been set on the '{
|
|
261
|
+
f"{icons.green_dot} Query scale out has been set on the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
|
|
254
262
|
)
|
|
255
263
|
|
|
256
264
|
return df
|
|
257
265
|
|
|
258
266
|
|
|
267
|
+
@log
|
|
259
268
|
def set_semantic_model_storage_format(
|
|
260
|
-
dataset: str, storage_format: str, workspace: Optional[str] = None
|
|
269
|
+
dataset: str | UUID, storage_format: str, workspace: Optional[str | UUID] = None
|
|
261
270
|
):
|
|
262
271
|
"""
|
|
263
272
|
Sets the semantic model storage format.
|
|
264
273
|
|
|
265
274
|
Parameters
|
|
266
275
|
----------
|
|
267
|
-
dataset : str
|
|
268
|
-
Name of the semantic model.
|
|
276
|
+
dataset : str | uuid.UUID
|
|
277
|
+
Name or ID of the semantic model.
|
|
269
278
|
storage_format : str
|
|
270
279
|
The storage format for the semantic model. Valid options: 'Large', 'Small'.
|
|
271
|
-
workspace : str, default=None
|
|
272
|
-
The Fabric workspace name.
|
|
280
|
+
workspace : str | uuid.UUID, default=None
|
|
281
|
+
The Fabric workspace name or ID.
|
|
273
282
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
274
283
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
275
284
|
"""
|
|
276
285
|
|
|
277
|
-
(
|
|
278
|
-
dataset_id =
|
|
286
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
287
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
279
288
|
|
|
280
289
|
storage_format = storage_format.capitalize()
|
|
281
290
|
|
|
@@ -295,12 +304,12 @@ def set_semantic_model_storage_format(
|
|
|
295
304
|
f"{icons.red_dot} Invalid storage format value. Valid options: {storageFormats}."
|
|
296
305
|
)
|
|
297
306
|
|
|
298
|
-
dfL = list_qso_settings(dataset=
|
|
307
|
+
dfL = list_qso_settings(dataset=dataset_id, workspace=workspace_id)
|
|
299
308
|
current_storage_format = dfL["Storage Mode"].iloc[0]
|
|
300
309
|
|
|
301
310
|
if current_storage_format == storage_format:
|
|
302
311
|
print(
|
|
303
|
-
f"{icons.info} The '{
|
|
312
|
+
f"{icons.info} The '{dataset_name}' semantic model within the '{workspace_name}' workspace is already set to '{storage_format.lower()}' storage format."
|
|
304
313
|
)
|
|
305
314
|
return
|
|
306
315
|
|
|
@@ -311,22 +320,23 @@ def set_semantic_model_storage_format(
|
|
|
311
320
|
if response.status_code != 200:
|
|
312
321
|
raise FabricHTTPException(response)
|
|
313
322
|
print(
|
|
314
|
-
f"{icons.green_dot} The semantic model storage format for the '{
|
|
323
|
+
f"{icons.green_dot} The semantic model storage format for the '{dataset_name}' semantic model within the '{workspace_name}' workspace has been set to '{storage_format}'."
|
|
315
324
|
)
|
|
316
325
|
|
|
317
326
|
|
|
327
|
+
@log
|
|
318
328
|
def list_qso_settings(
|
|
319
|
-
dataset: Optional[str] = None, workspace: Optional[str] = None
|
|
329
|
+
dataset: Optional[str | UUID] = None, workspace: Optional[str | UUID] = None
|
|
320
330
|
) -> pd.DataFrame:
|
|
321
331
|
"""
|
|
322
332
|
Shows the query scale out settings for a semantic model (or all semantic models within a workspace).
|
|
323
333
|
|
|
324
334
|
Parameters
|
|
325
335
|
----------
|
|
326
|
-
dataset : str, default=None
|
|
327
|
-
Name of the semantic model.
|
|
328
|
-
workspace : str, default=None
|
|
329
|
-
The Fabric workspace name.
|
|
336
|
+
dataset : str | uuid.UUID, default=None
|
|
337
|
+
Name or ID of the semantic model.
|
|
338
|
+
workspace : str | uuid.UUID, default=None
|
|
339
|
+
The Fabric workspace name or ID.
|
|
330
340
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
331
341
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
332
342
|
|
|
@@ -336,10 +346,10 @@ def list_qso_settings(
|
|
|
336
346
|
A pandas dataframe showing the query scale out settings.
|
|
337
347
|
"""
|
|
338
348
|
|
|
339
|
-
(
|
|
349
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
340
350
|
|
|
341
351
|
if dataset is not None:
|
|
342
|
-
dataset_id =
|
|
352
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
343
353
|
|
|
344
354
|
df = pd.DataFrame(
|
|
345
355
|
columns=[
|
|
@@ -381,8 +391,9 @@ def list_qso_settings(
|
|
|
381
391
|
return df
|
|
382
392
|
|
|
383
393
|
|
|
394
|
+
@log
|
|
384
395
|
def set_workspace_default_storage_format(
|
|
385
|
-
storage_format: str, workspace: Optional[str] = None
|
|
396
|
+
storage_format: str, workspace: Optional[str | UUID] = None
|
|
386
397
|
):
|
|
387
398
|
"""
|
|
388
399
|
Sets the default storage format for semantic models within a workspace.
|
|
@@ -391,8 +402,8 @@ def set_workspace_default_storage_format(
|
|
|
391
402
|
----------
|
|
392
403
|
storage_format : str
|
|
393
404
|
The storage format for the semantic model. Valid options: 'Large', 'Small'.
|
|
394
|
-
workspace : str, default=None
|
|
395
|
-
The Fabric workspace name.
|
|
405
|
+
workspace : str | uuid.UUID, default=None
|
|
406
|
+
The Fabric workspace name or ID.
|
|
396
407
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
397
408
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
398
409
|
"""
|
|
@@ -408,22 +419,22 @@ def set_workspace_default_storage_format(
|
|
|
408
419
|
f"{icons.red_dot} Invalid storage format. Please choose from these options: {storageFormats}."
|
|
409
420
|
)
|
|
410
421
|
|
|
411
|
-
(
|
|
422
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
412
423
|
|
|
413
424
|
# Check current storage format
|
|
414
|
-
dfW = fabric.list_workspaces(filter=f"name eq '{
|
|
425
|
+
dfW = fabric.list_workspaces(filter=f"name eq '{workspace_name}'")
|
|
415
426
|
if len(dfW) == 0:
|
|
416
427
|
raise ValueError()
|
|
417
428
|
current_storage_format = dfW["Default Dataset Storage Format"].iloc[0]
|
|
418
429
|
|
|
419
430
|
if current_storage_format == storage_format:
|
|
420
431
|
print(
|
|
421
|
-
f"{icons.info} The '{
|
|
432
|
+
f"{icons.info} The '{workspace_name}' is already set to a default storage format of '{current_storage_format}'."
|
|
422
433
|
)
|
|
423
434
|
return
|
|
424
435
|
|
|
425
436
|
request_body = {
|
|
426
|
-
"name":
|
|
437
|
+
"name": workspace_name,
|
|
427
438
|
"defaultDatasetStorageFormat": storage_format,
|
|
428
439
|
}
|
|
429
440
|
|
|
@@ -434,5 +445,5 @@ def set_workspace_default_storage_format(
|
|
|
434
445
|
raise FabricHTTPException(response)
|
|
435
446
|
|
|
436
447
|
print(
|
|
437
|
-
f"{icons.green_dot} The default storage format for the '{
|
|
448
|
+
f"{icons.green_dot} The default storage format for the '{workspace_name}' workspace has been updated to '{storage_format}."
|
|
438
449
|
)
|
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
import sempy.fabric as fabric
|
|
2
2
|
import time
|
|
3
3
|
from sempy_labs._helper_functions import (
|
|
4
|
-
resolve_dataset_id,
|
|
5
4
|
resolve_workspace_name_and_id,
|
|
6
5
|
_get_partition_map,
|
|
7
6
|
_process_and_display_chart,
|
|
@@ -27,7 +26,7 @@ def refresh_semantic_model(
|
|
|
27
26
|
retry_count: int = 0,
|
|
28
27
|
apply_refresh_policy: bool = True,
|
|
29
28
|
max_parallelism: int = 10,
|
|
30
|
-
workspace: Optional[str] = None,
|
|
29
|
+
workspace: Optional[str | UUID] = None,
|
|
31
30
|
visualize: bool = False,
|
|
32
31
|
commit_mode: str = "transactional",
|
|
33
32
|
) -> pd.DataFrame | None:
|
|
@@ -36,7 +35,7 @@ def refresh_semantic_model(
|
|
|
36
35
|
|
|
37
36
|
Parameters
|
|
38
37
|
----------
|
|
39
|
-
dataset : str | UUID
|
|
38
|
+
dataset : str | uuid.UUID
|
|
40
39
|
Name or ID of the semantic model.
|
|
41
40
|
tables : str, List[str], default=None
|
|
42
41
|
A string or a list of tables to refresh.
|
|
@@ -52,8 +51,8 @@ def refresh_semantic_model(
|
|
|
52
51
|
Determines the maximum number of threads that can run the processing commands in parallel.
|
|
53
52
|
This value aligns with the MaxParallelism property that can be set in the TMSL Sequence command or by using other methods.
|
|
54
53
|
Defaults to 10.
|
|
55
|
-
workspace : str, default=None
|
|
56
|
-
The Fabric workspace name.
|
|
54
|
+
workspace : str | uuid.UUID, default=None
|
|
55
|
+
The Fabric workspace name or ID.
|
|
57
56
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
58
57
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
59
58
|
visualize : bool, default=False
|
|
@@ -278,39 +277,40 @@ def refresh_semantic_model(
|
|
|
278
277
|
|
|
279
278
|
@log
|
|
280
279
|
def cancel_dataset_refresh(
|
|
281
|
-
dataset: str
|
|
280
|
+
dataset: str | UUID,
|
|
281
|
+
request_id: Optional[str] = None,
|
|
282
|
+
workspace: Optional[str | UUID] = None,
|
|
282
283
|
):
|
|
283
284
|
"""
|
|
284
285
|
Cancels the refresh of a semantic model which was executed via the `Enhanced Refresh API <https://learn.microsoft.com/power-bi/connect-data/asynchronous-refresh>`_
|
|
285
286
|
|
|
286
287
|
Parameters
|
|
287
288
|
----------
|
|
288
|
-
dataset : str
|
|
289
|
-
Name of the semantic model.
|
|
289
|
+
dataset : str | uuid.UUID
|
|
290
|
+
Name or ID of the semantic model.
|
|
290
291
|
request_id : str, default=None
|
|
291
292
|
The request id of a semantic model refresh.
|
|
292
293
|
Defaults to finding the latest active refresh of the semantic model.
|
|
293
|
-
workspace : str, default=None
|
|
294
|
-
The Fabric workspace name.
|
|
294
|
+
workspace : str | uuid.UUID, default=None
|
|
295
|
+
The Fabric workspace name or ID.
|
|
295
296
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
296
297
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
297
298
|
"""
|
|
298
299
|
|
|
299
|
-
(
|
|
300
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
301
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
300
302
|
|
|
301
|
-
rr = fabric.list_refresh_requests(dataset=
|
|
303
|
+
rr = fabric.list_refresh_requests(dataset=dataset_id, workspace=workspace_id)
|
|
302
304
|
rr_filt = rr[rr["Status"] == "Unknown"]
|
|
303
305
|
|
|
304
306
|
if request_id is None:
|
|
305
307
|
if len(rr_filt) == 0:
|
|
306
308
|
raise ValueError(
|
|
307
|
-
f"{icons.red_dot} There are no active Enhanced API refreshes of the '{
|
|
309
|
+
f"{icons.red_dot} There are no active Enhanced API refreshes of the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
|
|
308
310
|
)
|
|
309
311
|
|
|
310
312
|
request_id = rr_filt["Request Id"].iloc[0]
|
|
311
313
|
|
|
312
|
-
dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace)
|
|
313
|
-
|
|
314
314
|
client = fabric.PowerBIRestClient()
|
|
315
315
|
|
|
316
316
|
response = client.delete(
|
|
@@ -320,12 +320,14 @@ def cancel_dataset_refresh(
|
|
|
320
320
|
if response.status_code != 200:
|
|
321
321
|
raise FabricHTTPException(response)
|
|
322
322
|
print(
|
|
323
|
-
f"{icons.green_dot} The '{request_id}' refresh request for the '{
|
|
323
|
+
f"{icons.green_dot} The '{request_id}' refresh request for the '{dataset_name}' semantic model within the '{workspace_name}' workspace has been cancelled."
|
|
324
324
|
)
|
|
325
325
|
|
|
326
326
|
|
|
327
327
|
def get_semantic_model_refresh_history(
|
|
328
|
-
dataset: str
|
|
328
|
+
dataset: str | UUID,
|
|
329
|
+
request_id: Optional[str] = None,
|
|
330
|
+
workspace: Optional[str | UUID] = None,
|
|
329
331
|
) -> pd.DataFrame:
|
|
330
332
|
"""
|
|
331
333
|
Obtains the semantic model refresh history (refreshes executed via the Enhanced Refresh API).
|
|
@@ -334,13 +336,13 @@ def get_semantic_model_refresh_history(
|
|
|
334
336
|
|
|
335
337
|
Parameters
|
|
336
338
|
----------
|
|
337
|
-
dataset : str
|
|
338
|
-
Name of the semantic model.
|
|
339
|
+
dataset : str | uuid.UUID
|
|
340
|
+
Name or ID of the semantic model.
|
|
339
341
|
request_id : str, default=None
|
|
340
342
|
The request id of a semantic model refresh.
|
|
341
343
|
Defaults to None which resolves to showing all refresh requests for the given semantic model.
|
|
342
|
-
workspace : str, default=None
|
|
343
|
-
The Fabric workspace name.
|
|
344
|
+
workspace : str | uuid.UUID, default=None
|
|
345
|
+
The Fabric workspace name or ID.
|
|
344
346
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
345
347
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
346
348
|
|
|
@@ -350,8 +352,8 @@ def get_semantic_model_refresh_history(
|
|
|
350
352
|
A pandas dataframe showing the semantic model refresh history.
|
|
351
353
|
"""
|
|
352
354
|
|
|
353
|
-
workspace_name =
|
|
354
|
-
|
|
355
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
356
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
355
357
|
df = pd.DataFrame(
|
|
356
358
|
columns=[
|
|
357
359
|
"Request Id",
|
|
@@ -363,9 +365,6 @@ def get_semantic_model_refresh_history(
|
|
|
363
365
|
]
|
|
364
366
|
)
|
|
365
367
|
|
|
366
|
-
dataset_id = fabric.resolve_item_id(
|
|
367
|
-
item_name=dataset, workspace=workspace_id, type="SemanticModel"
|
|
368
|
-
)
|
|
369
368
|
client = fabric.PowerBIRestClient()
|
|
370
369
|
response = client.get(
|
|
371
370
|
f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/refreshes"
|