semantic-link-labs 0.8.10__py3-none-any.whl → 0.9.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/METADATA +6 -5
- {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/RECORD +81 -80
- {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +34 -3
- sempy_labs/_authentication.py +80 -4
- sempy_labs/_capacities.py +770 -200
- sempy_labs/_capacity_migration.py +7 -37
- sempy_labs/_clear_cache.py +37 -35
- sempy_labs/_connections.py +13 -13
- sempy_labs/_data_pipelines.py +20 -20
- sempy_labs/_dataflows.py +27 -28
- sempy_labs/_dax.py +41 -47
- sempy_labs/_deployment_pipelines.py +1 -1
- sempy_labs/_environments.py +26 -23
- sempy_labs/_eventhouses.py +16 -15
- sempy_labs/_eventstreams.py +16 -15
- sempy_labs/_external_data_shares.py +18 -20
- sempy_labs/_gateways.py +16 -14
- sempy_labs/_generate_semantic_model.py +107 -62
- sempy_labs/_git.py +105 -43
- sempy_labs/_helper_functions.py +251 -194
- sempy_labs/_job_scheduler.py +227 -0
- sempy_labs/_kql_databases.py +16 -15
- sempy_labs/_kql_querysets.py +16 -15
- sempy_labs/_list_functions.py +150 -126
- sempy_labs/_managed_private_endpoints.py +19 -17
- sempy_labs/_mirrored_databases.py +51 -48
- sempy_labs/_mirrored_warehouses.py +5 -4
- sempy_labs/_ml_experiments.py +16 -15
- sempy_labs/_ml_models.py +15 -14
- sempy_labs/_model_bpa.py +210 -207
- sempy_labs/_model_bpa_bulk.py +2 -2
- sempy_labs/_model_bpa_rules.py +3 -3
- sempy_labs/_model_dependencies.py +55 -29
- sempy_labs/_notebooks.py +29 -25
- sempy_labs/_one_lake_integration.py +23 -26
- sempy_labs/_query_scale_out.py +75 -64
- sempy_labs/_refresh_semantic_model.py +25 -26
- sempy_labs/_spark.py +33 -32
- sempy_labs/_sql.py +19 -12
- sempy_labs/_translations.py +10 -7
- sempy_labs/_vertipaq.py +38 -33
- sempy_labs/_warehouses.py +26 -25
- sempy_labs/_workspace_identity.py +11 -10
- sempy_labs/_workspaces.py +40 -33
- sempy_labs/admin/_basic_functions.py +166 -115
- sempy_labs/admin/_domains.py +7 -2
- sempy_labs/admin/_external_data_share.py +3 -3
- sempy_labs/admin/_git.py +4 -1
- sempy_labs/admin/_items.py +11 -6
- sempy_labs/admin/_scanner.py +10 -5
- sempy_labs/directlake/_directlake_schema_compare.py +25 -16
- sempy_labs/directlake/_directlake_schema_sync.py +24 -12
- sempy_labs/directlake/_dl_helper.py +74 -55
- sempy_labs/directlake/_generate_shared_expression.py +10 -9
- sempy_labs/directlake/_get_directlake_lakehouse.py +32 -36
- sempy_labs/directlake/_get_shared_expression.py +4 -3
- sempy_labs/directlake/_guardrails.py +12 -6
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +15 -9
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +16 -10
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +35 -31
- sempy_labs/directlake/_update_directlake_partition_entity.py +39 -31
- sempy_labs/directlake/_warm_cache.py +87 -65
- sempy_labs/lakehouse/_get_lakehouse_columns.py +23 -26
- sempy_labs/lakehouse/_get_lakehouse_tables.py +27 -38
- sempy_labs/lakehouse/_lakehouse.py +7 -20
- sempy_labs/lakehouse/_shortcuts.py +42 -23
- sempy_labs/migration/_create_pqt_file.py +16 -11
- sempy_labs/migration/_refresh_calc_tables.py +16 -10
- sempy_labs/report/_download_report.py +9 -8
- sempy_labs/report/_generate_report.py +85 -44
- sempy_labs/report/_paginated.py +9 -9
- sempy_labs/report/_report_bpa.py +15 -11
- sempy_labs/report/_report_functions.py +80 -91
- sempy_labs/report/_report_helper.py +8 -4
- sempy_labs/report/_report_list_functions.py +24 -13
- sempy_labs/report/_report_rebind.py +17 -16
- sempy_labs/report/_reportwrapper.py +41 -33
- sempy_labs/tom/_model.py +139 -21
- {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/top_level.txt +0 -0
|
@@ -3,10 +3,11 @@ import pandas as pd
|
|
|
3
3
|
import json
|
|
4
4
|
import os
|
|
5
5
|
from typing import Optional, List
|
|
6
|
+
from sempy._utils._log import log
|
|
6
7
|
from sempy_labs._helper_functions import (
|
|
7
8
|
resolve_lakehouse_name,
|
|
8
9
|
resolve_workspace_name_and_id,
|
|
9
|
-
|
|
10
|
+
resolve_dataset_name_and_id,
|
|
10
11
|
_conv_b64,
|
|
11
12
|
_decode_b64,
|
|
12
13
|
lro,
|
|
@@ -14,12 +15,14 @@ from sempy_labs._helper_functions import (
|
|
|
14
15
|
from sempy_labs.lakehouse._lakehouse import lakehouse_attached
|
|
15
16
|
import sempy_labs._icons as icons
|
|
16
17
|
from sempy_labs._refresh_semantic_model import refresh_semantic_model
|
|
18
|
+
from uuid import UUID
|
|
17
19
|
|
|
18
20
|
|
|
21
|
+
@log
|
|
19
22
|
def create_blank_semantic_model(
|
|
20
23
|
dataset: str,
|
|
21
24
|
compatibility_level: int = 1605,
|
|
22
|
-
workspace: Optional[str] = None,
|
|
25
|
+
workspace: Optional[str | UUID] = None,
|
|
23
26
|
overwrite: bool = True,
|
|
24
27
|
):
|
|
25
28
|
"""
|
|
@@ -31,21 +34,21 @@ def create_blank_semantic_model(
|
|
|
31
34
|
Name of the semantic model.
|
|
32
35
|
compatibility_level : int, default=1605
|
|
33
36
|
The compatibility level of the semantic model.
|
|
34
|
-
workspace : str, default=None
|
|
35
|
-
The Fabric workspace name.
|
|
37
|
+
workspace : str | uuid.UUID, default=None
|
|
38
|
+
The Fabric workspace name or ID.
|
|
36
39
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
37
40
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
38
41
|
overwrite : bool, default=False
|
|
39
42
|
If set to True, overwrites the existing semantic model in the workspace if it exists.
|
|
40
43
|
"""
|
|
41
44
|
|
|
42
|
-
|
|
43
|
-
dfD = fabric.list_datasets(workspace=
|
|
45
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
46
|
+
dfD = fabric.list_datasets(workspace=workspace_id, mode="rest")
|
|
44
47
|
dfD_filt = dfD[dfD["Dataset Name"] == dataset]
|
|
45
48
|
|
|
46
49
|
if len(dfD_filt) > 0 and not overwrite:
|
|
47
50
|
raise ValueError(
|
|
48
|
-
f"{icons.warning} The '{dataset}' semantic model already exists within the '{
|
|
51
|
+
f"{icons.warning} The '{dataset}' semantic model already exists within the '{workspace_name}' workspace. The 'overwrite' parameter is set to False so the blank new semantic model was not created."
|
|
49
52
|
)
|
|
50
53
|
|
|
51
54
|
min_compat = 1500
|
|
@@ -109,15 +112,16 @@ def create_blank_semantic_model(
|
|
|
109
112
|
}}
|
|
110
113
|
"""
|
|
111
114
|
|
|
112
|
-
fabric.execute_tmsl(script=tmsl, workspace=
|
|
115
|
+
fabric.execute_tmsl(script=tmsl, workspace=workspace_id)
|
|
113
116
|
|
|
114
117
|
return print(
|
|
115
|
-
f"{icons.green_dot} The '{dataset}' semantic model was created within the '{
|
|
118
|
+
f"{icons.green_dot} The '{dataset}' semantic model was created within the '{workspace_name}' workspace."
|
|
116
119
|
)
|
|
117
120
|
|
|
118
121
|
|
|
122
|
+
@log
|
|
119
123
|
def create_semantic_model_from_bim(
|
|
120
|
-
dataset: str, bim_file: dict, workspace: Optional[str] = None
|
|
124
|
+
dataset: str, bim_file: dict, workspace: Optional[str | UUID] = None
|
|
121
125
|
):
|
|
122
126
|
"""
|
|
123
127
|
Creates a new semantic model based on a Model.bim file.
|
|
@@ -130,20 +134,20 @@ def create_semantic_model_from_bim(
|
|
|
130
134
|
Name of the semantic model.
|
|
131
135
|
bim_file : dict
|
|
132
136
|
The model.bim file.
|
|
133
|
-
workspace : str, default=None
|
|
134
|
-
The Fabric workspace name.
|
|
137
|
+
workspace : str | uuid.UUID, default=None
|
|
138
|
+
The Fabric workspace name or ID.
|
|
135
139
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
136
140
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
137
141
|
"""
|
|
138
142
|
|
|
139
|
-
(
|
|
143
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
140
144
|
|
|
141
|
-
dfI = fabric.list_datasets(workspace=
|
|
145
|
+
dfI = fabric.list_datasets(workspace=workspace_id, mode="rest")
|
|
142
146
|
dfI_filt = dfI[(dfI["Dataset Name"] == dataset)]
|
|
143
147
|
|
|
144
148
|
if len(dfI_filt) > 0:
|
|
145
149
|
raise ValueError(
|
|
146
|
-
f"{icons.red_dot} The '{dataset}' semantic model already exists as a semantic model in the '{
|
|
150
|
+
f"{icons.red_dot} The '{dataset}' semantic model already exists as a semantic model in the '{workspace_name}' workspace."
|
|
147
151
|
)
|
|
148
152
|
|
|
149
153
|
client = fabric.FabricRestClient()
|
|
@@ -178,12 +182,13 @@ def create_semantic_model_from_bim(
|
|
|
178
182
|
lro(client, response, status_codes=[201, 202])
|
|
179
183
|
|
|
180
184
|
print(
|
|
181
|
-
f"{icons.green_dot} The '{dataset}' semantic model has been created within the '{
|
|
185
|
+
f"{icons.green_dot} The '{dataset}' semantic model has been created within the '{workspace_name}' workspace."
|
|
182
186
|
)
|
|
183
187
|
|
|
184
188
|
|
|
189
|
+
@log
|
|
185
190
|
def update_semantic_model_from_bim(
|
|
186
|
-
dataset: str, bim_file: dict, workspace: Optional[str] = None
|
|
191
|
+
dataset: str | UUID, bim_file: dict, workspace: Optional[str | UUID] = None
|
|
187
192
|
):
|
|
188
193
|
"""
|
|
189
194
|
Updates a semantic model definition based on a Model.bim file.
|
|
@@ -192,18 +197,18 @@ def update_semantic_model_from_bim(
|
|
|
192
197
|
|
|
193
198
|
Parameters
|
|
194
199
|
----------
|
|
195
|
-
dataset : str
|
|
196
|
-
Name of the semantic model.
|
|
200
|
+
dataset : str | uuid.UUID
|
|
201
|
+
Name or ID of the semantic model.
|
|
197
202
|
bim_file : dict
|
|
198
203
|
The model.bim file.
|
|
199
|
-
workspace : str, default=None
|
|
200
|
-
The Fabric workspace name.
|
|
204
|
+
workspace : str | uuid.UUID, default=None
|
|
205
|
+
The Fabric workspace name or ID.
|
|
201
206
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
202
207
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
203
208
|
"""
|
|
204
209
|
|
|
205
|
-
(
|
|
206
|
-
dataset_id =
|
|
210
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
211
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
207
212
|
|
|
208
213
|
client = fabric.FabricRestClient()
|
|
209
214
|
defPBIDataset = {"version": "1.0", "settings": {}}
|
|
@@ -212,7 +217,7 @@ def update_semantic_model_from_bim(
|
|
|
212
217
|
payloadBim = _conv_b64(bim_file)
|
|
213
218
|
|
|
214
219
|
request_body = {
|
|
215
|
-
"displayName":
|
|
220
|
+
"displayName": dataset_name,
|
|
216
221
|
"definition": {
|
|
217
222
|
"parts": [
|
|
218
223
|
{
|
|
@@ -237,15 +242,16 @@ def update_semantic_model_from_bim(
|
|
|
237
242
|
lro(client, response, status_codes=[200, 202], return_status_code=True)
|
|
238
243
|
|
|
239
244
|
print(
|
|
240
|
-
f"{icons.green_dot} The '{
|
|
245
|
+
f"{icons.green_dot} The '{dataset_name}' semantic model has been updated within the '{workspace_name}' workspace."
|
|
241
246
|
)
|
|
242
247
|
|
|
243
248
|
|
|
249
|
+
@log
|
|
244
250
|
def deploy_semantic_model(
|
|
245
251
|
source_dataset: str,
|
|
246
|
-
source_workspace: Optional[str] = None,
|
|
252
|
+
source_workspace: Optional[str | UUID] = None,
|
|
247
253
|
target_dataset: Optional[str] = None,
|
|
248
|
-
target_workspace: Optional[str] = None,
|
|
254
|
+
target_workspace: Optional[str | UUID] = None,
|
|
249
255
|
refresh_target_dataset: bool = True,
|
|
250
256
|
overwrite: bool = False,
|
|
251
257
|
):
|
|
@@ -256,14 +262,14 @@ def deploy_semantic_model(
|
|
|
256
262
|
----------
|
|
257
263
|
source_dataset : str
|
|
258
264
|
Name of the semantic model to deploy.
|
|
259
|
-
source_workspace : str, default=None
|
|
260
|
-
The Fabric workspace name.
|
|
265
|
+
source_workspace : str | uuid.UUID, default=None
|
|
266
|
+
The Fabric workspace name or ID.
|
|
261
267
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
262
268
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
263
269
|
target_dataset: str
|
|
264
270
|
Name of the new semantic model to be created.
|
|
265
|
-
target_workspace : str, default=None
|
|
266
|
-
The Fabric workspace name in which the new semantic model will be deployed.
|
|
271
|
+
target_workspace : str | uuid.UUID, default=None
|
|
272
|
+
The Fabric workspace name or ID in which the new semantic model will be deployed.
|
|
267
273
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
268
274
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
269
275
|
refresh_target_dataset : bool, default=True
|
|
@@ -272,49 +278,60 @@ def deploy_semantic_model(
|
|
|
272
278
|
If set to True, overwrites the existing semantic model in the workspace if it exists.
|
|
273
279
|
"""
|
|
274
280
|
|
|
275
|
-
|
|
281
|
+
(source_workspace_name, source_workspace_id) = resolve_workspace_name_and_id(
|
|
282
|
+
source_workspace
|
|
283
|
+
)
|
|
276
284
|
|
|
277
285
|
if target_workspace is None:
|
|
278
|
-
|
|
286
|
+
target_workspace_name = source_workspace_name
|
|
287
|
+
target_workspace_id = fabric.resolve_workspace_id(target_workspace_name)
|
|
288
|
+
else:
|
|
289
|
+
(target_workspace_name, target_workspace_id) = resolve_workspace_name_and_id(
|
|
290
|
+
target_workspace
|
|
291
|
+
)
|
|
279
292
|
|
|
280
293
|
if target_dataset is None:
|
|
281
294
|
target_dataset = source_dataset
|
|
282
295
|
|
|
283
|
-
if
|
|
296
|
+
if (
|
|
297
|
+
target_dataset == source_dataset
|
|
298
|
+
and target_workspace_name == source_workspace_name
|
|
299
|
+
):
|
|
284
300
|
raise ValueError(
|
|
285
301
|
f"{icons.red_dot} The 'dataset' and 'new_dataset' parameters have the same value. And, the 'workspace' and 'new_dataset_workspace' "
|
|
286
302
|
f"parameters have the same value. At least one of these must be different. Please update the parameters."
|
|
287
303
|
)
|
|
288
304
|
|
|
289
|
-
dfD = fabric.list_datasets(workspace=
|
|
305
|
+
dfD = fabric.list_datasets(workspace=target_workspace_id, mode="rest")
|
|
290
306
|
dfD_filt = dfD[dfD["Dataset Name"] == target_dataset]
|
|
291
307
|
if len(dfD_filt) > 0 and not overwrite:
|
|
292
308
|
raise ValueError(
|
|
293
|
-
f"{icons.warning} The '{target_dataset}' semantic model already exists within the '{
|
|
309
|
+
f"{icons.warning} The '{target_dataset}' semantic model already exists within the '{target_workspace_name}' workspace. The 'overwrite' parameter is set to False so the source semantic model was not deployed to the target destination."
|
|
294
310
|
)
|
|
295
311
|
|
|
296
|
-
bim = get_semantic_model_bim(dataset=source_dataset, workspace=
|
|
312
|
+
bim = get_semantic_model_bim(dataset=source_dataset, workspace=source_workspace_id)
|
|
297
313
|
|
|
298
314
|
# Create the semantic model if the model does not exist
|
|
299
315
|
if dfD_filt.empty:
|
|
300
316
|
create_semantic_model_from_bim(
|
|
301
317
|
dataset=target_dataset,
|
|
302
318
|
bim_file=bim,
|
|
303
|
-
workspace=
|
|
319
|
+
workspace=target_workspace_id,
|
|
304
320
|
)
|
|
305
321
|
# Update the semantic model if the model exists
|
|
306
322
|
else:
|
|
307
323
|
update_semantic_model_from_bim(
|
|
308
|
-
dataset=target_dataset, bim_file=bim, workspace=
|
|
324
|
+
dataset=target_dataset, bim_file=bim, workspace=target_workspace_id
|
|
309
325
|
)
|
|
310
326
|
|
|
311
327
|
if refresh_target_dataset:
|
|
312
|
-
refresh_semantic_model(dataset=target_dataset, workspace=
|
|
328
|
+
refresh_semantic_model(dataset=target_dataset, workspace=target_workspace_id)
|
|
313
329
|
|
|
314
330
|
|
|
331
|
+
@log
|
|
315
332
|
def get_semantic_model_bim(
|
|
316
|
-
dataset: str,
|
|
317
|
-
workspace: Optional[str] = None,
|
|
333
|
+
dataset: str | UUID,
|
|
334
|
+
workspace: Optional[str | UUID] = None,
|
|
318
335
|
save_to_file_name: Optional[str] = None,
|
|
319
336
|
lakehouse_workspace: Optional[str] = None,
|
|
320
337
|
) -> dict:
|
|
@@ -323,10 +340,10 @@ def get_semantic_model_bim(
|
|
|
323
340
|
|
|
324
341
|
Parameters
|
|
325
342
|
----------
|
|
326
|
-
dataset : str
|
|
327
|
-
Name of the semantic model.
|
|
328
|
-
workspace : str, default=None
|
|
329
|
-
The Fabric workspace name in which the semantic model resides.
|
|
343
|
+
dataset : str | uuid.UUID
|
|
344
|
+
Name or ID of the semantic model.
|
|
345
|
+
workspace : str | uuid.UUID, default=None
|
|
346
|
+
The Fabric workspace name or ID in which the semantic model resides.
|
|
330
347
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
331
348
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
332
349
|
save_to_file_name : str, default=None
|
|
@@ -342,8 +359,14 @@ def get_semantic_model_bim(
|
|
|
342
359
|
The Model.bim file for the semantic model.
|
|
343
360
|
"""
|
|
344
361
|
|
|
362
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
363
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
364
|
+
|
|
345
365
|
bimJson = get_semantic_model_definition(
|
|
346
|
-
dataset=
|
|
366
|
+
dataset=dataset_id,
|
|
367
|
+
workspace=workspace_id,
|
|
368
|
+
format="TMSL",
|
|
369
|
+
return_dataframe=False,
|
|
347
370
|
)
|
|
348
371
|
|
|
349
372
|
if save_to_file_name is not None:
|
|
@@ -363,16 +386,17 @@ def get_semantic_model_bim(
|
|
|
363
386
|
with open(filePath, "w") as json_file:
|
|
364
387
|
json.dump(bimJson, json_file, indent=4)
|
|
365
388
|
print(
|
|
366
|
-
f"{icons.green_dot} The {fileExt} file for the '{
|
|
389
|
+
f"{icons.green_dot} The {fileExt} file for the '{dataset_name}' semantic model has been saved to the '{lakehouse}' in this location: '{filePath}'.\n\n"
|
|
367
390
|
)
|
|
368
391
|
|
|
369
392
|
return bimJson
|
|
370
393
|
|
|
371
394
|
|
|
395
|
+
@log
|
|
372
396
|
def get_semantic_model_definition(
|
|
373
|
-
dataset: str,
|
|
397
|
+
dataset: str | UUID,
|
|
374
398
|
format: str = "TMSL",
|
|
375
|
-
workspace: Optional[str] = None,
|
|
399
|
+
workspace: Optional[str | UUID] = None,
|
|
376
400
|
return_dataframe: bool = True,
|
|
377
401
|
) -> pd.DataFrame | dict | List:
|
|
378
402
|
"""
|
|
@@ -382,12 +406,12 @@ def get_semantic_model_definition(
|
|
|
382
406
|
|
|
383
407
|
Parameters
|
|
384
408
|
----------
|
|
385
|
-
dataset : str
|
|
386
|
-
Name of the semantic model.
|
|
409
|
+
dataset : str | uuid.UUID
|
|
410
|
+
Name or ID of the semantic model.
|
|
387
411
|
format : str, default="TMSL"
|
|
388
412
|
The output format. Valid options are "TMSL" or "TMDL". "TMSL" returns the .bim file whereas "TMDL" returns the collection of TMDL files. Can also enter 'bim' for the TMSL version.
|
|
389
|
-
workspace : str, default=None
|
|
390
|
-
The Fabric workspace name in which the semantic model resides.
|
|
413
|
+
workspace : str | uuid.UUID, default=None
|
|
414
|
+
The Fabric workspace name or ID in which the semantic model resides.
|
|
391
415
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
392
416
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
393
417
|
return_dataframe : bool, default=True
|
|
@@ -410,10 +434,10 @@ def get_semantic_model_definition(
|
|
|
410
434
|
f"{icons.red_dot} Invalid format. Valid options: {valid_formats}."
|
|
411
435
|
)
|
|
412
436
|
|
|
413
|
-
(
|
|
437
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
438
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
414
439
|
|
|
415
440
|
client = fabric.FabricRestClient()
|
|
416
|
-
dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace)
|
|
417
441
|
response = client.post(
|
|
418
442
|
f"/v1/workspaces/{workspace_id}/semanticModels/{dataset_id}/getDefinition?format={format}",
|
|
419
443
|
)
|
|
@@ -437,21 +461,42 @@ def get_semantic_model_definition(
|
|
|
437
461
|
return decoded_parts
|
|
438
462
|
|
|
439
463
|
|
|
440
|
-
|
|
464
|
+
@log
|
|
465
|
+
def get_semantic_model_size(
|
|
466
|
+
dataset: str | UUID, workspace: Optional[str | UUID] = None
|
|
467
|
+
):
|
|
468
|
+
"""
|
|
469
|
+
Gets size of the semantic model in bytes.
|
|
470
|
+
|
|
471
|
+
Parameters
|
|
472
|
+
----------
|
|
473
|
+
dataset : str | uuid.UUID
|
|
474
|
+
Name or ID of the semantic model.
|
|
475
|
+
workspace : str | uuid.UUID, default=None
|
|
476
|
+
The Fabric workspace name or ID in which the semantic model resides.
|
|
477
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
478
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
479
|
+
|
|
480
|
+
Returns
|
|
481
|
+
-------
|
|
482
|
+
int
|
|
483
|
+
The size of the semantic model in
|
|
484
|
+
"""
|
|
441
485
|
|
|
442
|
-
|
|
486
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
487
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
443
488
|
|
|
444
489
|
dict = fabric.evaluate_dax(
|
|
445
|
-
dataset=
|
|
446
|
-
workspace=
|
|
490
|
+
dataset=dataset_id,
|
|
491
|
+
workspace=workspace_id,
|
|
447
492
|
dax_string="""
|
|
448
493
|
EVALUATE SELECTCOLUMNS(FILTER(INFO.STORAGETABLECOLUMNS(), [COLUMN_TYPE] = "BASIC_DATA"),[DICTIONARY_SIZE])
|
|
449
494
|
""",
|
|
450
495
|
)
|
|
451
496
|
|
|
452
497
|
used_size = fabric.evaluate_dax(
|
|
453
|
-
dataset=
|
|
454
|
-
workspace=
|
|
498
|
+
dataset=dataset_id,
|
|
499
|
+
workspace=workspace_id,
|
|
455
500
|
dax_string="""
|
|
456
501
|
EVALUATE SELECTCOLUMNS(INFO.STORAGETABLECOLUMNSEGMENTS(),[USED_SIZE])
|
|
457
502
|
""",
|