semantic-link-labs 0.8.9__py3-none-any.whl → 0.8.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/METADATA +5 -2
- {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/RECORD +76 -75
- sempy_labs/__init__.py +14 -2
- sempy_labs/_authentication.py +31 -2
- sempy_labs/_clear_cache.py +39 -37
- sempy_labs/_connections.py +13 -13
- sempy_labs/_data_pipelines.py +20 -20
- sempy_labs/_dataflows.py +27 -28
- sempy_labs/_dax.py +41 -47
- sempy_labs/_environments.py +26 -23
- sempy_labs/_eventhouses.py +16 -15
- sempy_labs/_eventstreams.py +16 -15
- sempy_labs/_external_data_shares.py +18 -20
- sempy_labs/_gateways.py +57 -11
- sempy_labs/_generate_semantic_model.py +100 -71
- sempy_labs/_git.py +134 -67
- sempy_labs/_helper_functions.py +199 -145
- sempy_labs/_job_scheduler.py +92 -0
- sempy_labs/_kql_databases.py +16 -15
- sempy_labs/_kql_querysets.py +16 -15
- sempy_labs/_list_functions.py +281 -120
- sempy_labs/_managed_private_endpoints.py +19 -17
- sempy_labs/_mirrored_databases.py +51 -48
- sempy_labs/_mirrored_warehouses.py +5 -4
- sempy_labs/_ml_experiments.py +16 -15
- sempy_labs/_ml_models.py +15 -14
- sempy_labs/_model_bpa.py +27 -25
- sempy_labs/_model_bpa_bulk.py +3 -3
- sempy_labs/_model_dependencies.py +60 -28
- sempy_labs/_notebooks.py +73 -39
- sempy_labs/_one_lake_integration.py +23 -26
- sempy_labs/_query_scale_out.py +67 -64
- sempy_labs/_refresh_semantic_model.py +47 -42
- sempy_labs/_spark.py +33 -32
- sempy_labs/_sql.py +12 -9
- sempy_labs/_translations.py +10 -7
- sempy_labs/_vertipaq.py +34 -31
- sempy_labs/_warehouses.py +22 -21
- sempy_labs/_workspace_identity.py +11 -10
- sempy_labs/_workspaces.py +40 -33
- sempy_labs/admin/__init__.py +4 -0
- sempy_labs/admin/_basic_functions.py +44 -12
- sempy_labs/admin/_external_data_share.py +3 -3
- sempy_labs/admin/_items.py +4 -4
- sempy_labs/admin/_scanner.py +7 -5
- sempy_labs/directlake/_directlake_schema_compare.py +18 -14
- sempy_labs/directlake/_directlake_schema_sync.py +18 -12
- sempy_labs/directlake/_dl_helper.py +36 -32
- sempy_labs/directlake/_generate_shared_expression.py +10 -9
- sempy_labs/directlake/_get_directlake_lakehouse.py +16 -13
- sempy_labs/directlake/_get_shared_expression.py +4 -3
- sempy_labs/directlake/_guardrails.py +12 -6
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +15 -9
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +16 -10
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +35 -31
- sempy_labs/directlake/_update_directlake_partition_entity.py +34 -31
- sempy_labs/directlake/_warm_cache.py +87 -65
- sempy_labs/lakehouse/_get_lakehouse_columns.py +10 -8
- sempy_labs/lakehouse/_get_lakehouse_tables.py +10 -9
- sempy_labs/lakehouse/_lakehouse.py +17 -13
- sempy_labs/lakehouse/_shortcuts.py +42 -23
- sempy_labs/migration/_create_pqt_file.py +16 -11
- sempy_labs/migration/_refresh_calc_tables.py +16 -10
- sempy_labs/report/_download_report.py +9 -8
- sempy_labs/report/_generate_report.py +40 -44
- sempy_labs/report/_paginated.py +9 -9
- sempy_labs/report/_report_bpa.py +13 -9
- sempy_labs/report/_report_functions.py +80 -91
- sempy_labs/report/_report_helper.py +8 -4
- sempy_labs/report/_report_list_functions.py +24 -13
- sempy_labs/report/_report_rebind.py +17 -16
- sempy_labs/report/_reportwrapper.py +41 -33
- sempy_labs/tom/_model.py +117 -38
- {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/top_level.txt +0 -0
|
@@ -6,7 +6,7 @@ from typing import Optional, List
|
|
|
6
6
|
from sempy_labs._helper_functions import (
|
|
7
7
|
resolve_lakehouse_name,
|
|
8
8
|
resolve_workspace_name_and_id,
|
|
9
|
-
|
|
9
|
+
resolve_dataset_name_and_id,
|
|
10
10
|
_conv_b64,
|
|
11
11
|
_decode_b64,
|
|
12
12
|
lro,
|
|
@@ -14,12 +14,13 @@ from sempy_labs._helper_functions import (
|
|
|
14
14
|
from sempy_labs.lakehouse._lakehouse import lakehouse_attached
|
|
15
15
|
import sempy_labs._icons as icons
|
|
16
16
|
from sempy_labs._refresh_semantic_model import refresh_semantic_model
|
|
17
|
+
from uuid import UUID
|
|
17
18
|
|
|
18
19
|
|
|
19
20
|
def create_blank_semantic_model(
|
|
20
21
|
dataset: str,
|
|
21
22
|
compatibility_level: int = 1605,
|
|
22
|
-
workspace: Optional[str] = None,
|
|
23
|
+
workspace: Optional[str | UUID] = None,
|
|
23
24
|
overwrite: bool = True,
|
|
24
25
|
):
|
|
25
26
|
"""
|
|
@@ -31,21 +32,21 @@ def create_blank_semantic_model(
|
|
|
31
32
|
Name of the semantic model.
|
|
32
33
|
compatibility_level : int, default=1605
|
|
33
34
|
The compatibility level of the semantic model.
|
|
34
|
-
workspace : str, default=None
|
|
35
|
-
The Fabric workspace name.
|
|
35
|
+
workspace : str | uuid.UUID, default=None
|
|
36
|
+
The Fabric workspace name or ID.
|
|
36
37
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
37
38
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
38
39
|
overwrite : bool, default=False
|
|
39
40
|
If set to True, overwrites the existing semantic model in the workspace if it exists.
|
|
40
41
|
"""
|
|
41
42
|
|
|
42
|
-
|
|
43
|
-
dfD = fabric.list_datasets(workspace=
|
|
43
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
44
|
+
dfD = fabric.list_datasets(workspace=workspace_id, mode="rest")
|
|
44
45
|
dfD_filt = dfD[dfD["Dataset Name"] == dataset]
|
|
45
46
|
|
|
46
47
|
if len(dfD_filt) > 0 and not overwrite:
|
|
47
48
|
raise ValueError(
|
|
48
|
-
f"{icons.warning} The '{dataset}' semantic model already exists within the '{
|
|
49
|
+
f"{icons.warning} The '{dataset}' semantic model already exists within the '{workspace_name}' workspace. The 'overwrite' parameter is set to False so the blank new semantic model was not created."
|
|
49
50
|
)
|
|
50
51
|
|
|
51
52
|
min_compat = 1500
|
|
@@ -109,15 +110,15 @@ def create_blank_semantic_model(
|
|
|
109
110
|
}}
|
|
110
111
|
"""
|
|
111
112
|
|
|
112
|
-
fabric.execute_tmsl(script=tmsl, workspace=
|
|
113
|
+
fabric.execute_tmsl(script=tmsl, workspace=workspace_id)
|
|
113
114
|
|
|
114
115
|
return print(
|
|
115
|
-
f"{icons.green_dot} The '{dataset}' semantic model was created within the '{
|
|
116
|
+
f"{icons.green_dot} The '{dataset}' semantic model was created within the '{workspace_name}' workspace."
|
|
116
117
|
)
|
|
117
118
|
|
|
118
119
|
|
|
119
120
|
def create_semantic_model_from_bim(
|
|
120
|
-
dataset: str, bim_file: dict, workspace: Optional[str] = None
|
|
121
|
+
dataset: str, bim_file: dict, workspace: Optional[str | UUID] = None
|
|
121
122
|
):
|
|
122
123
|
"""
|
|
123
124
|
Creates a new semantic model based on a Model.bim file.
|
|
@@ -130,20 +131,20 @@ def create_semantic_model_from_bim(
|
|
|
130
131
|
Name of the semantic model.
|
|
131
132
|
bim_file : dict
|
|
132
133
|
The model.bim file.
|
|
133
|
-
workspace : str, default=None
|
|
134
|
-
The Fabric workspace name.
|
|
134
|
+
workspace : str | uuid.UUID, default=None
|
|
135
|
+
The Fabric workspace name or ID.
|
|
135
136
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
136
137
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
137
138
|
"""
|
|
138
139
|
|
|
139
|
-
(
|
|
140
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
140
141
|
|
|
141
|
-
dfI = fabric.list_datasets(workspace=
|
|
142
|
+
dfI = fabric.list_datasets(workspace=workspace_id, mode="rest")
|
|
142
143
|
dfI_filt = dfI[(dfI["Dataset Name"] == dataset)]
|
|
143
144
|
|
|
144
145
|
if len(dfI_filt) > 0:
|
|
145
146
|
raise ValueError(
|
|
146
|
-
f"{icons.red_dot} The '{dataset}' semantic model already exists as a semantic model in the '{
|
|
147
|
+
f"{icons.red_dot} The '{dataset}' semantic model already exists as a semantic model in the '{workspace_name}' workspace."
|
|
147
148
|
)
|
|
148
149
|
|
|
149
150
|
client = fabric.FabricRestClient()
|
|
@@ -178,12 +179,12 @@ def create_semantic_model_from_bim(
|
|
|
178
179
|
lro(client, response, status_codes=[201, 202])
|
|
179
180
|
|
|
180
181
|
print(
|
|
181
|
-
f"{icons.green_dot} The '{dataset}' semantic model has been created within the '{
|
|
182
|
+
f"{icons.green_dot} The '{dataset}' semantic model has been created within the '{workspace_name}' workspace."
|
|
182
183
|
)
|
|
183
184
|
|
|
184
185
|
|
|
185
186
|
def update_semantic_model_from_bim(
|
|
186
|
-
dataset: str, bim_file: dict, workspace: Optional[str] = None
|
|
187
|
+
dataset: str | UUID, bim_file: dict, workspace: Optional[str | UUID] = None
|
|
187
188
|
):
|
|
188
189
|
"""
|
|
189
190
|
Updates a semantic model definition based on a Model.bim file.
|
|
@@ -192,25 +193,18 @@ def update_semantic_model_from_bim(
|
|
|
192
193
|
|
|
193
194
|
Parameters
|
|
194
195
|
----------
|
|
195
|
-
dataset : str
|
|
196
|
-
Name of the semantic model.
|
|
196
|
+
dataset : str | uuid.UUID
|
|
197
|
+
Name or ID of the semantic model.
|
|
197
198
|
bim_file : dict
|
|
198
199
|
The model.bim file.
|
|
199
|
-
workspace : str, default=None
|
|
200
|
-
The Fabric workspace name.
|
|
200
|
+
workspace : str | uuid.UUID, default=None
|
|
201
|
+
The Fabric workspace name or ID.
|
|
201
202
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
202
203
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
203
204
|
"""
|
|
204
205
|
|
|
205
|
-
(
|
|
206
|
-
|
|
207
|
-
dfD = fabric.list_datasets(workspace=workspace, mode="rest")
|
|
208
|
-
dfD_filt = dfD[dfD["Dataset Name"] == dataset]
|
|
209
|
-
if len(dfD_filt) == 0:
|
|
210
|
-
raise ValueError(
|
|
211
|
-
f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace does not exist."
|
|
212
|
-
)
|
|
213
|
-
dataset_id = dfD_filt["Dataset Id"].iloc[0]
|
|
206
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
207
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
214
208
|
|
|
215
209
|
client = fabric.FabricRestClient()
|
|
216
210
|
defPBIDataset = {"version": "1.0", "settings": {}}
|
|
@@ -219,7 +213,7 @@ def update_semantic_model_from_bim(
|
|
|
219
213
|
payloadBim = _conv_b64(bim_file)
|
|
220
214
|
|
|
221
215
|
request_body = {
|
|
222
|
-
"displayName":
|
|
216
|
+
"displayName": dataset_name,
|
|
223
217
|
"definition": {
|
|
224
218
|
"parts": [
|
|
225
219
|
{
|
|
@@ -244,15 +238,15 @@ def update_semantic_model_from_bim(
|
|
|
244
238
|
lro(client, response, status_codes=[200, 202], return_status_code=True)
|
|
245
239
|
|
|
246
240
|
print(
|
|
247
|
-
f"{icons.green_dot} The '{
|
|
241
|
+
f"{icons.green_dot} The '{dataset_name}' semantic model has been updated within the '{workspace_name}' workspace."
|
|
248
242
|
)
|
|
249
243
|
|
|
250
244
|
|
|
251
245
|
def deploy_semantic_model(
|
|
252
246
|
source_dataset: str,
|
|
253
|
-
source_workspace: Optional[str] = None,
|
|
247
|
+
source_workspace: Optional[str | UUID] = None,
|
|
254
248
|
target_dataset: Optional[str] = None,
|
|
255
|
-
target_workspace: Optional[str] = None,
|
|
249
|
+
target_workspace: Optional[str | UUID] = None,
|
|
256
250
|
refresh_target_dataset: bool = True,
|
|
257
251
|
overwrite: bool = False,
|
|
258
252
|
):
|
|
@@ -263,14 +257,14 @@ def deploy_semantic_model(
|
|
|
263
257
|
----------
|
|
264
258
|
source_dataset : str
|
|
265
259
|
Name of the semantic model to deploy.
|
|
266
|
-
source_workspace : str, default=None
|
|
267
|
-
The Fabric workspace name.
|
|
260
|
+
source_workspace : str | uuid.UUID, default=None
|
|
261
|
+
The Fabric workspace name or ID.
|
|
268
262
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
269
263
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
270
264
|
target_dataset: str
|
|
271
265
|
Name of the new semantic model to be created.
|
|
272
|
-
target_workspace : str, default=None
|
|
273
|
-
The Fabric workspace name in which the new semantic model will be deployed.
|
|
266
|
+
target_workspace : str | uuid.UUID, default=None
|
|
267
|
+
The Fabric workspace name or ID in which the new semantic model will be deployed.
|
|
274
268
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
275
269
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
276
270
|
refresh_target_dataset : bool, default=True
|
|
@@ -279,50 +273,59 @@ def deploy_semantic_model(
|
|
|
279
273
|
If set to True, overwrites the existing semantic model in the workspace if it exists.
|
|
280
274
|
"""
|
|
281
275
|
|
|
282
|
-
|
|
276
|
+
(source_workspace_name, source_workspace_id) = resolve_workspace_name_and_id(
|
|
277
|
+
source_workspace
|
|
278
|
+
)
|
|
283
279
|
|
|
284
280
|
if target_workspace is None:
|
|
285
|
-
|
|
281
|
+
target_workspace_name = source_workspace_name
|
|
282
|
+
target_workspace_id = fabric.resolve_workspace_id(target_workspace_name)
|
|
283
|
+
else:
|
|
284
|
+
(target_workspace_name, target_workspace_id) = resolve_workspace_name_and_id(
|
|
285
|
+
target_workspace
|
|
286
|
+
)
|
|
286
287
|
|
|
287
288
|
if target_dataset is None:
|
|
288
289
|
target_dataset = source_dataset
|
|
289
290
|
|
|
290
|
-
if
|
|
291
|
+
if (
|
|
292
|
+
target_dataset == source_dataset
|
|
293
|
+
and target_workspace_name == source_workspace_name
|
|
294
|
+
):
|
|
291
295
|
raise ValueError(
|
|
292
296
|
f"{icons.red_dot} The 'dataset' and 'new_dataset' parameters have the same value. And, the 'workspace' and 'new_dataset_workspace' "
|
|
293
297
|
f"parameters have the same value. At least one of these must be different. Please update the parameters."
|
|
294
298
|
)
|
|
295
299
|
|
|
296
|
-
dfD = fabric.list_datasets(workspace=
|
|
300
|
+
dfD = fabric.list_datasets(workspace=target_workspace_id, mode="rest")
|
|
297
301
|
dfD_filt = dfD[dfD["Dataset Name"] == target_dataset]
|
|
298
302
|
if len(dfD_filt) > 0 and not overwrite:
|
|
299
303
|
raise ValueError(
|
|
300
|
-
f"{icons.warning} The '{target_dataset}' semantic model already exists within the '{
|
|
304
|
+
f"{icons.warning} The '{target_dataset}' semantic model already exists within the '{target_workspace_name}' workspace. The 'overwrite' parameter is set to False so the source semantic model was not deployed to the target destination."
|
|
301
305
|
)
|
|
302
306
|
|
|
303
|
-
bim = get_semantic_model_bim(dataset=source_dataset, workspace=
|
|
307
|
+
bim = get_semantic_model_bim(dataset=source_dataset, workspace=source_workspace_id)
|
|
304
308
|
|
|
305
309
|
# Create the semantic model if the model does not exist
|
|
306
|
-
if
|
|
310
|
+
if dfD_filt.empty:
|
|
307
311
|
create_semantic_model_from_bim(
|
|
308
312
|
dataset=target_dataset,
|
|
309
313
|
bim_file=bim,
|
|
310
|
-
workspace=
|
|
311
|
-
overwrite=overwrite,
|
|
314
|
+
workspace=target_workspace_id,
|
|
312
315
|
)
|
|
313
316
|
# Update the semantic model if the model exists
|
|
314
317
|
else:
|
|
315
318
|
update_semantic_model_from_bim(
|
|
316
|
-
dataset=target_dataset, bim_file=bim, workspace=
|
|
319
|
+
dataset=target_dataset, bim_file=bim, workspace=target_workspace_id
|
|
317
320
|
)
|
|
318
321
|
|
|
319
322
|
if refresh_target_dataset:
|
|
320
|
-
refresh_semantic_model(dataset=target_dataset, workspace=
|
|
323
|
+
refresh_semantic_model(dataset=target_dataset, workspace=target_workspace_id)
|
|
321
324
|
|
|
322
325
|
|
|
323
326
|
def get_semantic_model_bim(
|
|
324
|
-
dataset: str,
|
|
325
|
-
workspace: Optional[str] = None,
|
|
327
|
+
dataset: str | UUID,
|
|
328
|
+
workspace: Optional[str | UUID] = None,
|
|
326
329
|
save_to_file_name: Optional[str] = None,
|
|
327
330
|
lakehouse_workspace: Optional[str] = None,
|
|
328
331
|
) -> dict:
|
|
@@ -331,10 +334,10 @@ def get_semantic_model_bim(
|
|
|
331
334
|
|
|
332
335
|
Parameters
|
|
333
336
|
----------
|
|
334
|
-
dataset : str
|
|
335
|
-
Name of the semantic model.
|
|
336
|
-
workspace : str, default=None
|
|
337
|
-
The Fabric workspace name in which the semantic model resides.
|
|
337
|
+
dataset : str | uuid.UUID
|
|
338
|
+
Name or ID of the semantic model.
|
|
339
|
+
workspace : str | uuid.UUID, default=None
|
|
340
|
+
The Fabric workspace name or ID in which the semantic model resides.
|
|
338
341
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
339
342
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
340
343
|
save_to_file_name : str, default=None
|
|
@@ -350,8 +353,14 @@ def get_semantic_model_bim(
|
|
|
350
353
|
The Model.bim file for the semantic model.
|
|
351
354
|
"""
|
|
352
355
|
|
|
356
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
357
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
358
|
+
|
|
353
359
|
bimJson = get_semantic_model_definition(
|
|
354
|
-
dataset=
|
|
360
|
+
dataset=dataset_id,
|
|
361
|
+
workspace=workspace_id,
|
|
362
|
+
format="TMSL",
|
|
363
|
+
return_dataframe=False,
|
|
355
364
|
)
|
|
356
365
|
|
|
357
366
|
if save_to_file_name is not None:
|
|
@@ -371,16 +380,16 @@ def get_semantic_model_bim(
|
|
|
371
380
|
with open(filePath, "w") as json_file:
|
|
372
381
|
json.dump(bimJson, json_file, indent=4)
|
|
373
382
|
print(
|
|
374
|
-
f"{icons.green_dot} The {fileExt} file for the '{
|
|
383
|
+
f"{icons.green_dot} The {fileExt} file for the '{dataset_name}' semantic model has been saved to the '{lakehouse}' in this location: '{filePath}'.\n\n"
|
|
375
384
|
)
|
|
376
385
|
|
|
377
386
|
return bimJson
|
|
378
387
|
|
|
379
388
|
|
|
380
389
|
def get_semantic_model_definition(
|
|
381
|
-
dataset: str,
|
|
390
|
+
dataset: str | UUID,
|
|
382
391
|
format: str = "TMSL",
|
|
383
|
-
workspace: Optional[str] = None,
|
|
392
|
+
workspace: Optional[str | UUID] = None,
|
|
384
393
|
return_dataframe: bool = True,
|
|
385
394
|
) -> pd.DataFrame | dict | List:
|
|
386
395
|
"""
|
|
@@ -390,12 +399,12 @@ def get_semantic_model_definition(
|
|
|
390
399
|
|
|
391
400
|
Parameters
|
|
392
401
|
----------
|
|
393
|
-
dataset : str
|
|
394
|
-
Name of the semantic model.
|
|
402
|
+
dataset : str | uuid.UUID
|
|
403
|
+
Name or ID of the semantic model.
|
|
395
404
|
format : str, default="TMSL"
|
|
396
405
|
The output format. Valid options are "TMSL" or "TMDL". "TMSL" returns the .bim file whereas "TMDL" returns the collection of TMDL files. Can also enter 'bim' for the TMSL version.
|
|
397
|
-
workspace : str, default=None
|
|
398
|
-
The Fabric workspace name in which the semantic model resides.
|
|
406
|
+
workspace : str | uuid.UUID, default=None
|
|
407
|
+
The Fabric workspace name or ID in which the semantic model resides.
|
|
399
408
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
400
409
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
401
410
|
return_dataframe : bool, default=True
|
|
@@ -418,10 +427,10 @@ def get_semantic_model_definition(
|
|
|
418
427
|
f"{icons.red_dot} Invalid format. Valid options: {valid_formats}."
|
|
419
428
|
)
|
|
420
429
|
|
|
421
|
-
(
|
|
430
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
431
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
422
432
|
|
|
423
433
|
client = fabric.FabricRestClient()
|
|
424
|
-
dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace)
|
|
425
434
|
response = client.post(
|
|
426
435
|
f"/v1/workspaces/{workspace_id}/semanticModels/{dataset_id}/getDefinition?format={format}",
|
|
427
436
|
)
|
|
@@ -445,21 +454,41 @@ def get_semantic_model_definition(
|
|
|
445
454
|
return decoded_parts
|
|
446
455
|
|
|
447
456
|
|
|
448
|
-
def get_semantic_model_size(
|
|
457
|
+
def get_semantic_model_size(
|
|
458
|
+
dataset: str | UUID, workspace: Optional[str | UUID] = None
|
|
459
|
+
):
|
|
460
|
+
"""
|
|
461
|
+
Gets size of the semantic model in bytes.
|
|
462
|
+
|
|
463
|
+
Parameters
|
|
464
|
+
----------
|
|
465
|
+
dataset : str | uuid.UUID
|
|
466
|
+
Name or ID of the semantic model.
|
|
467
|
+
workspace : str | uuid.UUID, default=None
|
|
468
|
+
The Fabric workspace name or ID in which the semantic model resides.
|
|
469
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
470
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
471
|
+
|
|
472
|
+
Returns
|
|
473
|
+
-------
|
|
474
|
+
int
|
|
475
|
+
The size of the semantic model in
|
|
476
|
+
"""
|
|
449
477
|
|
|
450
|
-
|
|
478
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
479
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
451
480
|
|
|
452
481
|
dict = fabric.evaluate_dax(
|
|
453
|
-
dataset=
|
|
454
|
-
workspace=
|
|
482
|
+
dataset=dataset_id,
|
|
483
|
+
workspace=workspace_id,
|
|
455
484
|
dax_string="""
|
|
456
485
|
EVALUATE SELECTCOLUMNS(FILTER(INFO.STORAGETABLECOLUMNS(), [COLUMN_TYPE] = "BASIC_DATA"),[DICTIONARY_SIZE])
|
|
457
486
|
""",
|
|
458
487
|
)
|
|
459
488
|
|
|
460
489
|
used_size = fabric.evaluate_dax(
|
|
461
|
-
dataset=
|
|
462
|
-
workspace=
|
|
490
|
+
dataset=dataset_id,
|
|
491
|
+
workspace=workspace_id,
|
|
463
492
|
dax_string="""
|
|
464
493
|
EVALUATE SELECTCOLUMNS(INFO.STORAGETABLECOLUMNSEGMENTS(),[USED_SIZE])
|
|
465
494
|
""",
|