semantic-link-labs 0.8.9__py3-none-any.whl → 0.8.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (76) hide show
  1. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/METADATA +5 -2
  2. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/RECORD +76 -75
  3. sempy_labs/__init__.py +14 -2
  4. sempy_labs/_authentication.py +31 -2
  5. sempy_labs/_clear_cache.py +39 -37
  6. sempy_labs/_connections.py +13 -13
  7. sempy_labs/_data_pipelines.py +20 -20
  8. sempy_labs/_dataflows.py +27 -28
  9. sempy_labs/_dax.py +41 -47
  10. sempy_labs/_environments.py +26 -23
  11. sempy_labs/_eventhouses.py +16 -15
  12. sempy_labs/_eventstreams.py +16 -15
  13. sempy_labs/_external_data_shares.py +18 -20
  14. sempy_labs/_gateways.py +57 -11
  15. sempy_labs/_generate_semantic_model.py +100 -71
  16. sempy_labs/_git.py +134 -67
  17. sempy_labs/_helper_functions.py +199 -145
  18. sempy_labs/_job_scheduler.py +92 -0
  19. sempy_labs/_kql_databases.py +16 -15
  20. sempy_labs/_kql_querysets.py +16 -15
  21. sempy_labs/_list_functions.py +281 -120
  22. sempy_labs/_managed_private_endpoints.py +19 -17
  23. sempy_labs/_mirrored_databases.py +51 -48
  24. sempy_labs/_mirrored_warehouses.py +5 -4
  25. sempy_labs/_ml_experiments.py +16 -15
  26. sempy_labs/_ml_models.py +15 -14
  27. sempy_labs/_model_bpa.py +27 -25
  28. sempy_labs/_model_bpa_bulk.py +3 -3
  29. sempy_labs/_model_dependencies.py +60 -28
  30. sempy_labs/_notebooks.py +73 -39
  31. sempy_labs/_one_lake_integration.py +23 -26
  32. sempy_labs/_query_scale_out.py +67 -64
  33. sempy_labs/_refresh_semantic_model.py +47 -42
  34. sempy_labs/_spark.py +33 -32
  35. sempy_labs/_sql.py +12 -9
  36. sempy_labs/_translations.py +10 -7
  37. sempy_labs/_vertipaq.py +34 -31
  38. sempy_labs/_warehouses.py +22 -21
  39. sempy_labs/_workspace_identity.py +11 -10
  40. sempy_labs/_workspaces.py +40 -33
  41. sempy_labs/admin/__init__.py +4 -0
  42. sempy_labs/admin/_basic_functions.py +44 -12
  43. sempy_labs/admin/_external_data_share.py +3 -3
  44. sempy_labs/admin/_items.py +4 -4
  45. sempy_labs/admin/_scanner.py +7 -5
  46. sempy_labs/directlake/_directlake_schema_compare.py +18 -14
  47. sempy_labs/directlake/_directlake_schema_sync.py +18 -12
  48. sempy_labs/directlake/_dl_helper.py +36 -32
  49. sempy_labs/directlake/_generate_shared_expression.py +10 -9
  50. sempy_labs/directlake/_get_directlake_lakehouse.py +16 -13
  51. sempy_labs/directlake/_get_shared_expression.py +4 -3
  52. sempy_labs/directlake/_guardrails.py +12 -6
  53. sempy_labs/directlake/_list_directlake_model_calc_tables.py +15 -9
  54. sempy_labs/directlake/_show_unsupported_directlake_objects.py +16 -10
  55. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +35 -31
  56. sempy_labs/directlake/_update_directlake_partition_entity.py +34 -31
  57. sempy_labs/directlake/_warm_cache.py +87 -65
  58. sempy_labs/lakehouse/_get_lakehouse_columns.py +10 -8
  59. sempy_labs/lakehouse/_get_lakehouse_tables.py +10 -9
  60. sempy_labs/lakehouse/_lakehouse.py +17 -13
  61. sempy_labs/lakehouse/_shortcuts.py +42 -23
  62. sempy_labs/migration/_create_pqt_file.py +16 -11
  63. sempy_labs/migration/_refresh_calc_tables.py +16 -10
  64. sempy_labs/report/_download_report.py +9 -8
  65. sempy_labs/report/_generate_report.py +40 -44
  66. sempy_labs/report/_paginated.py +9 -9
  67. sempy_labs/report/_report_bpa.py +13 -9
  68. sempy_labs/report/_report_functions.py +80 -91
  69. sempy_labs/report/_report_helper.py +8 -4
  70. sempy_labs/report/_report_list_functions.py +24 -13
  71. sempy_labs/report/_report_rebind.py +17 -16
  72. sempy_labs/report/_reportwrapper.py +41 -33
  73. sempy_labs/tom/_model.py +117 -38
  74. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/LICENSE +0 -0
  75. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/WHEEL +0 -0
  76. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/top_level.txt +0 -0
@@ -6,7 +6,7 @@ from typing import Optional, List
6
6
  from sempy_labs._helper_functions import (
7
7
  resolve_lakehouse_name,
8
8
  resolve_workspace_name_and_id,
9
- resolve_dataset_id,
9
+ resolve_dataset_name_and_id,
10
10
  _conv_b64,
11
11
  _decode_b64,
12
12
  lro,
@@ -14,12 +14,13 @@ from sempy_labs._helper_functions import (
14
14
  from sempy_labs.lakehouse._lakehouse import lakehouse_attached
15
15
  import sempy_labs._icons as icons
16
16
  from sempy_labs._refresh_semantic_model import refresh_semantic_model
17
+ from uuid import UUID
17
18
 
18
19
 
19
20
  def create_blank_semantic_model(
20
21
  dataset: str,
21
22
  compatibility_level: int = 1605,
22
- workspace: Optional[str] = None,
23
+ workspace: Optional[str | UUID] = None,
23
24
  overwrite: bool = True,
24
25
  ):
25
26
  """
@@ -31,21 +32,21 @@ def create_blank_semantic_model(
31
32
  Name of the semantic model.
32
33
  compatibility_level : int, default=1605
33
34
  The compatibility level of the semantic model.
34
- workspace : str, default=None
35
- The Fabric workspace name.
35
+ workspace : str | uuid.UUID, default=None
36
+ The Fabric workspace name or ID.
36
37
  Defaults to None which resolves to the workspace of the attached lakehouse
37
38
  or if no lakehouse attached, resolves to the workspace of the notebook.
38
39
  overwrite : bool, default=False
39
40
  If set to True, overwrites the existing semantic model in the workspace if it exists.
40
41
  """
41
42
 
42
- workspace = fabric.resolve_workspace_name(workspace)
43
- dfD = fabric.list_datasets(workspace=workspace, mode="rest")
43
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
44
+ dfD = fabric.list_datasets(workspace=workspace_id, mode="rest")
44
45
  dfD_filt = dfD[dfD["Dataset Name"] == dataset]
45
46
 
46
47
  if len(dfD_filt) > 0 and not overwrite:
47
48
  raise ValueError(
48
- f"{icons.warning} The '{dataset}' semantic model already exists within the '{workspace}' workspace. The 'overwrite' parameter is set to False so the blank new semantic model was not created."
49
+ f"{icons.warning} The '{dataset}' semantic model already exists within the '{workspace_name}' workspace. The 'overwrite' parameter is set to False so the blank new semantic model was not created."
49
50
  )
50
51
 
51
52
  min_compat = 1500
@@ -109,15 +110,15 @@ def create_blank_semantic_model(
109
110
  }}
110
111
  """
111
112
 
112
- fabric.execute_tmsl(script=tmsl, workspace=workspace)
113
+ fabric.execute_tmsl(script=tmsl, workspace=workspace_id)
113
114
 
114
115
  return print(
115
- f"{icons.green_dot} The '{dataset}' semantic model was created within the '{workspace}' workspace."
116
+ f"{icons.green_dot} The '{dataset}' semantic model was created within the '{workspace_name}' workspace."
116
117
  )
117
118
 
118
119
 
119
120
  def create_semantic_model_from_bim(
120
- dataset: str, bim_file: dict, workspace: Optional[str] = None
121
+ dataset: str, bim_file: dict, workspace: Optional[str | UUID] = None
121
122
  ):
122
123
  """
123
124
  Creates a new semantic model based on a Model.bim file.
@@ -130,20 +131,20 @@ def create_semantic_model_from_bim(
130
131
  Name of the semantic model.
131
132
  bim_file : dict
132
133
  The model.bim file.
133
- workspace : str, default=None
134
- The Fabric workspace name.
134
+ workspace : str | uuid.UUID, default=None
135
+ The Fabric workspace name or ID.
135
136
  Defaults to None which resolves to the workspace of the attached lakehouse
136
137
  or if no lakehouse attached, resolves to the workspace of the notebook.
137
138
  """
138
139
 
139
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
140
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
140
141
 
141
- dfI = fabric.list_datasets(workspace=workspace, mode="rest")
142
+ dfI = fabric.list_datasets(workspace=workspace_id, mode="rest")
142
143
  dfI_filt = dfI[(dfI["Dataset Name"] == dataset)]
143
144
 
144
145
  if len(dfI_filt) > 0:
145
146
  raise ValueError(
146
- f"{icons.red_dot} The '{dataset}' semantic model already exists as a semantic model in the '{workspace}' workspace."
147
+ f"{icons.red_dot} The '{dataset}' semantic model already exists as a semantic model in the '{workspace_name}' workspace."
147
148
  )
148
149
 
149
150
  client = fabric.FabricRestClient()
@@ -178,12 +179,12 @@ def create_semantic_model_from_bim(
178
179
  lro(client, response, status_codes=[201, 202])
179
180
 
180
181
  print(
181
- f"{icons.green_dot} The '{dataset}' semantic model has been created within the '{workspace}' workspace."
182
+ f"{icons.green_dot} The '{dataset}' semantic model has been created within the '{workspace_name}' workspace."
182
183
  )
183
184
 
184
185
 
185
186
  def update_semantic_model_from_bim(
186
- dataset: str, bim_file: dict, workspace: Optional[str] = None
187
+ dataset: str | UUID, bim_file: dict, workspace: Optional[str | UUID] = None
187
188
  ):
188
189
  """
189
190
  Updates a semantic model definition based on a Model.bim file.
@@ -192,25 +193,18 @@ def update_semantic_model_from_bim(
192
193
 
193
194
  Parameters
194
195
  ----------
195
- dataset : str
196
- Name of the semantic model.
196
+ dataset : str | uuid.UUID
197
+ Name or ID of the semantic model.
197
198
  bim_file : dict
198
199
  The model.bim file.
199
- workspace : str, default=None
200
- The Fabric workspace name.
200
+ workspace : str | uuid.UUID, default=None
201
+ The Fabric workspace name or ID.
201
202
  Defaults to None which resolves to the workspace of the attached lakehouse
202
203
  or if no lakehouse attached, resolves to the workspace of the notebook.
203
204
  """
204
205
 
205
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
206
-
207
- dfD = fabric.list_datasets(workspace=workspace, mode="rest")
208
- dfD_filt = dfD[dfD["Dataset Name"] == dataset]
209
- if len(dfD_filt) == 0:
210
- raise ValueError(
211
- f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace does not exist."
212
- )
213
- dataset_id = dfD_filt["Dataset Id"].iloc[0]
206
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
207
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
214
208
 
215
209
  client = fabric.FabricRestClient()
216
210
  defPBIDataset = {"version": "1.0", "settings": {}}
@@ -219,7 +213,7 @@ def update_semantic_model_from_bim(
219
213
  payloadBim = _conv_b64(bim_file)
220
214
 
221
215
  request_body = {
222
- "displayName": dataset,
216
+ "displayName": dataset_name,
223
217
  "definition": {
224
218
  "parts": [
225
219
  {
@@ -244,15 +238,15 @@ def update_semantic_model_from_bim(
244
238
  lro(client, response, status_codes=[200, 202], return_status_code=True)
245
239
 
246
240
  print(
247
- f"{icons.green_dot} The '{dataset}' semantic model has been updated within the '{workspace}' workspace."
241
+ f"{icons.green_dot} The '{dataset_name}' semantic model has been updated within the '{workspace_name}' workspace."
248
242
  )
249
243
 
250
244
 
251
245
  def deploy_semantic_model(
252
246
  source_dataset: str,
253
- source_workspace: Optional[str] = None,
247
+ source_workspace: Optional[str | UUID] = None,
254
248
  target_dataset: Optional[str] = None,
255
- target_workspace: Optional[str] = None,
249
+ target_workspace: Optional[str | UUID] = None,
256
250
  refresh_target_dataset: bool = True,
257
251
  overwrite: bool = False,
258
252
  ):
@@ -263,14 +257,14 @@ def deploy_semantic_model(
263
257
  ----------
264
258
  source_dataset : str
265
259
  Name of the semantic model to deploy.
266
- source_workspace : str, default=None
267
- The Fabric workspace name.
260
+ source_workspace : str | uuid.UUID, default=None
261
+ The Fabric workspace name or ID.
268
262
  Defaults to None which resolves to the workspace of the attached lakehouse
269
263
  or if no lakehouse attached, resolves to the workspace of the notebook.
270
264
  target_dataset: str
271
265
  Name of the new semantic model to be created.
272
- target_workspace : str, default=None
273
- The Fabric workspace name in which the new semantic model will be deployed.
266
+ target_workspace : str | uuid.UUID, default=None
267
+ The Fabric workspace name or ID in which the new semantic model will be deployed.
274
268
  Defaults to None which resolves to the workspace of the attached lakehouse
275
269
  or if no lakehouse attached, resolves to the workspace of the notebook.
276
270
  refresh_target_dataset : bool, default=True
@@ -279,50 +273,59 @@ def deploy_semantic_model(
279
273
  If set to True, overwrites the existing semantic model in the workspace if it exists.
280
274
  """
281
275
 
282
- source_workspace = fabric.resolve_workspace_name(source_workspace)
276
+ (source_workspace_name, source_workspace_id) = resolve_workspace_name_and_id(
277
+ source_workspace
278
+ )
283
279
 
284
280
  if target_workspace is None:
285
- target_workspace = source_workspace
281
+ target_workspace_name = source_workspace_name
282
+ target_workspace_id = fabric.resolve_workspace_id(target_workspace_name)
283
+ else:
284
+ (target_workspace_name, target_workspace_id) = resolve_workspace_name_and_id(
285
+ target_workspace
286
+ )
286
287
 
287
288
  if target_dataset is None:
288
289
  target_dataset = source_dataset
289
290
 
290
- if target_dataset == source_dataset and target_workspace == source_workspace:
291
+ if (
292
+ target_dataset == source_dataset
293
+ and target_workspace_name == source_workspace_name
294
+ ):
291
295
  raise ValueError(
292
296
  f"{icons.red_dot} The 'dataset' and 'new_dataset' parameters have the same value. And, the 'workspace' and 'new_dataset_workspace' "
293
297
  f"parameters have the same value. At least one of these must be different. Please update the parameters."
294
298
  )
295
299
 
296
- dfD = fabric.list_datasets(workspace=target_workspace, mode="rest")
300
+ dfD = fabric.list_datasets(workspace=target_workspace_id, mode="rest")
297
301
  dfD_filt = dfD[dfD["Dataset Name"] == target_dataset]
298
302
  if len(dfD_filt) > 0 and not overwrite:
299
303
  raise ValueError(
300
- f"{icons.warning} The '{target_dataset}' semantic model already exists within the '{target_workspace}' workspace. The 'overwrite' parameter is set to False so the source semantic model was not deployed to the target destination."
304
+ f"{icons.warning} The '{target_dataset}' semantic model already exists within the '{target_workspace_name}' workspace. The 'overwrite' parameter is set to False so the source semantic model was not deployed to the target destination."
301
305
  )
302
306
 
303
- bim = get_semantic_model_bim(dataset=source_dataset, workspace=source_workspace)
307
+ bim = get_semantic_model_bim(dataset=source_dataset, workspace=source_workspace_id)
304
308
 
305
309
  # Create the semantic model if the model does not exist
306
- if len(dfD_filt) == 0:
310
+ if dfD_filt.empty:
307
311
  create_semantic_model_from_bim(
308
312
  dataset=target_dataset,
309
313
  bim_file=bim,
310
- workspace=target_workspace,
311
- overwrite=overwrite,
314
+ workspace=target_workspace_id,
312
315
  )
313
316
  # Update the semantic model if the model exists
314
317
  else:
315
318
  update_semantic_model_from_bim(
316
- dataset=target_dataset, bim_file=bim, workspace=target_workspace
319
+ dataset=target_dataset, bim_file=bim, workspace=target_workspace_id
317
320
  )
318
321
 
319
322
  if refresh_target_dataset:
320
- refresh_semantic_model(dataset=target_dataset, workspace=target_workspace)
323
+ refresh_semantic_model(dataset=target_dataset, workspace=target_workspace_id)
321
324
 
322
325
 
323
326
  def get_semantic_model_bim(
324
- dataset: str,
325
- workspace: Optional[str] = None,
327
+ dataset: str | UUID,
328
+ workspace: Optional[str | UUID] = None,
326
329
  save_to_file_name: Optional[str] = None,
327
330
  lakehouse_workspace: Optional[str] = None,
328
331
  ) -> dict:
@@ -331,10 +334,10 @@ def get_semantic_model_bim(
331
334
 
332
335
  Parameters
333
336
  ----------
334
- dataset : str
335
- Name of the semantic model.
336
- workspace : str, default=None
337
- The Fabric workspace name in which the semantic model resides.
337
+ dataset : str | uuid.UUID
338
+ Name or ID of the semantic model.
339
+ workspace : str | uuid.UUID, default=None
340
+ The Fabric workspace name or ID in which the semantic model resides.
338
341
  Defaults to None which resolves to the workspace of the attached lakehouse
339
342
  or if no lakehouse attached, resolves to the workspace of the notebook.
340
343
  save_to_file_name : str, default=None
@@ -350,8 +353,14 @@ def get_semantic_model_bim(
350
353
  The Model.bim file for the semantic model.
351
354
  """
352
355
 
356
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
357
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
358
+
353
359
  bimJson = get_semantic_model_definition(
354
- dataset=dataset, workspace=workspace, format="TMSL", return_dataframe=False
360
+ dataset=dataset_id,
361
+ workspace=workspace_id,
362
+ format="TMSL",
363
+ return_dataframe=False,
355
364
  )
356
365
 
357
366
  if save_to_file_name is not None:
@@ -371,16 +380,16 @@ def get_semantic_model_bim(
371
380
  with open(filePath, "w") as json_file:
372
381
  json.dump(bimJson, json_file, indent=4)
373
382
  print(
374
- f"{icons.green_dot} The {fileExt} file for the '{dataset}' semantic model has been saved to the '{lakehouse}' in this location: '{filePath}'.\n\n"
383
+ f"{icons.green_dot} The {fileExt} file for the '{dataset_name}' semantic model has been saved to the '{lakehouse}' in this location: '{filePath}'.\n\n"
375
384
  )
376
385
 
377
386
  return bimJson
378
387
 
379
388
 
380
389
  def get_semantic_model_definition(
381
- dataset: str,
390
+ dataset: str | UUID,
382
391
  format: str = "TMSL",
383
- workspace: Optional[str] = None,
392
+ workspace: Optional[str | UUID] = None,
384
393
  return_dataframe: bool = True,
385
394
  ) -> pd.DataFrame | dict | List:
386
395
  """
@@ -390,12 +399,12 @@ def get_semantic_model_definition(
390
399
 
391
400
  Parameters
392
401
  ----------
393
- dataset : str
394
- Name of the semantic model.
402
+ dataset : str | uuid.UUID
403
+ Name or ID of the semantic model.
395
404
  format : str, default="TMSL"
396
405
  The output format. Valid options are "TMSL" or "TMDL". "TMSL" returns the .bim file whereas "TMDL" returns the collection of TMDL files. Can also enter 'bim' for the TMSL version.
397
- workspace : str, default=None
398
- The Fabric workspace name in which the semantic model resides.
406
+ workspace : str | uuid.UUID, default=None
407
+ The Fabric workspace name or ID in which the semantic model resides.
399
408
  Defaults to None which resolves to the workspace of the attached lakehouse
400
409
  or if no lakehouse attached, resolves to the workspace of the notebook.
401
410
  return_dataframe : bool, default=True
@@ -418,10 +427,10 @@ def get_semantic_model_definition(
418
427
  f"{icons.red_dot} Invalid format. Valid options: {valid_formats}."
419
428
  )
420
429
 
421
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
430
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
431
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
422
432
 
423
433
  client = fabric.FabricRestClient()
424
- dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace)
425
434
  response = client.post(
426
435
  f"/v1/workspaces/{workspace_id}/semanticModels/{dataset_id}/getDefinition?format={format}",
427
436
  )
@@ -445,21 +454,41 @@ def get_semantic_model_definition(
445
454
  return decoded_parts
446
455
 
447
456
 
448
- def get_semantic_model_size(dataset: str, workspace: Optional[str] = None):
457
+ def get_semantic_model_size(
458
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
459
+ ):
460
+ """
461
+ Gets size of the semantic model in bytes.
462
+
463
+ Parameters
464
+ ----------
465
+ dataset : str | uuid.UUID
466
+ Name or ID of the semantic model.
467
+ workspace : str | uuid.UUID, default=None
468
+ The Fabric workspace name or ID in which the semantic model resides.
469
+ Defaults to None which resolves to the workspace of the attached lakehouse
470
+ or if no lakehouse attached, resolves to the workspace of the notebook.
471
+
472
+ Returns
473
+ -------
474
+ int
475
+ The size of the semantic model in
476
+ """
449
477
 
450
- workspace = fabric.resolve_workspace_name(workspace)
478
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
479
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
451
480
 
452
481
  dict = fabric.evaluate_dax(
453
- dataset=dataset,
454
- workspace=workspace,
482
+ dataset=dataset_id,
483
+ workspace=workspace_id,
455
484
  dax_string="""
456
485
  EVALUATE SELECTCOLUMNS(FILTER(INFO.STORAGETABLECOLUMNS(), [COLUMN_TYPE] = "BASIC_DATA"),[DICTIONARY_SIZE])
457
486
  """,
458
487
  )
459
488
 
460
489
  used_size = fabric.evaluate_dax(
461
- dataset=dataset,
462
- workspace=workspace,
490
+ dataset=dataset_id,
491
+ workspace=workspace_id,
463
492
  dax_string="""
464
493
  EVALUATE SELECTCOLUMNS(INFO.STORAGETABLECOLUMNSEGMENTS(),[USED_SIZE])
465
494
  """,