semantic-link-labs 0.8.10__py3-none-any.whl → 0.8.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (73) hide show
  1. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/METADATA +3 -2
  2. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/RECORD +73 -72
  3. sempy_labs/__init__.py +6 -2
  4. sempy_labs/_clear_cache.py +39 -37
  5. sempy_labs/_connections.py +13 -13
  6. sempy_labs/_data_pipelines.py +20 -20
  7. sempy_labs/_dataflows.py +27 -28
  8. sempy_labs/_dax.py +41 -47
  9. sempy_labs/_environments.py +26 -23
  10. sempy_labs/_eventhouses.py +16 -15
  11. sempy_labs/_eventstreams.py +16 -15
  12. sempy_labs/_external_data_shares.py +18 -20
  13. sempy_labs/_gateways.py +14 -14
  14. sempy_labs/_generate_semantic_model.py +99 -62
  15. sempy_labs/_git.py +105 -43
  16. sempy_labs/_helper_functions.py +148 -131
  17. sempy_labs/_job_scheduler.py +92 -0
  18. sempy_labs/_kql_databases.py +16 -15
  19. sempy_labs/_kql_querysets.py +16 -15
  20. sempy_labs/_list_functions.py +114 -99
  21. sempy_labs/_managed_private_endpoints.py +19 -17
  22. sempy_labs/_mirrored_databases.py +51 -48
  23. sempy_labs/_mirrored_warehouses.py +5 -4
  24. sempy_labs/_ml_experiments.py +16 -15
  25. sempy_labs/_ml_models.py +15 -14
  26. sempy_labs/_model_bpa.py +3 -3
  27. sempy_labs/_model_dependencies.py +55 -29
  28. sempy_labs/_notebooks.py +27 -25
  29. sempy_labs/_one_lake_integration.py +23 -26
  30. sempy_labs/_query_scale_out.py +67 -64
  31. sempy_labs/_refresh_semantic_model.py +25 -26
  32. sempy_labs/_spark.py +33 -32
  33. sempy_labs/_sql.py +12 -9
  34. sempy_labs/_translations.py +10 -7
  35. sempy_labs/_vertipaq.py +34 -31
  36. sempy_labs/_warehouses.py +22 -21
  37. sempy_labs/_workspace_identity.py +11 -10
  38. sempy_labs/_workspaces.py +40 -33
  39. sempy_labs/admin/_basic_functions.py +10 -12
  40. sempy_labs/admin/_external_data_share.py +3 -3
  41. sempy_labs/admin/_items.py +4 -4
  42. sempy_labs/admin/_scanner.py +3 -1
  43. sempy_labs/directlake/_directlake_schema_compare.py +18 -14
  44. sempy_labs/directlake/_directlake_schema_sync.py +18 -12
  45. sempy_labs/directlake/_dl_helper.py +25 -26
  46. sempy_labs/directlake/_generate_shared_expression.py +10 -9
  47. sempy_labs/directlake/_get_directlake_lakehouse.py +16 -13
  48. sempy_labs/directlake/_get_shared_expression.py +4 -3
  49. sempy_labs/directlake/_guardrails.py +12 -6
  50. sempy_labs/directlake/_list_directlake_model_calc_tables.py +15 -9
  51. sempy_labs/directlake/_show_unsupported_directlake_objects.py +16 -10
  52. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +35 -31
  53. sempy_labs/directlake/_update_directlake_partition_entity.py +34 -31
  54. sempy_labs/directlake/_warm_cache.py +87 -65
  55. sempy_labs/lakehouse/_get_lakehouse_columns.py +10 -8
  56. sempy_labs/lakehouse/_get_lakehouse_tables.py +10 -9
  57. sempy_labs/lakehouse/_lakehouse.py +17 -13
  58. sempy_labs/lakehouse/_shortcuts.py +42 -23
  59. sempy_labs/migration/_create_pqt_file.py +16 -11
  60. sempy_labs/migration/_refresh_calc_tables.py +16 -10
  61. sempy_labs/report/_download_report.py +9 -8
  62. sempy_labs/report/_generate_report.py +40 -44
  63. sempy_labs/report/_paginated.py +9 -9
  64. sempy_labs/report/_report_bpa.py +13 -9
  65. sempy_labs/report/_report_functions.py +80 -91
  66. sempy_labs/report/_report_helper.py +8 -4
  67. sempy_labs/report/_report_list_functions.py +24 -13
  68. sempy_labs/report/_report_rebind.py +17 -16
  69. sempy_labs/report/_reportwrapper.py +41 -33
  70. sempy_labs/tom/_model.py +43 -6
  71. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/LICENSE +0 -0
  72. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/WHEEL +0 -0
  73. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/top_level.txt +0 -0
@@ -6,7 +6,7 @@ from typing import Optional, List
6
6
  from sempy_labs._helper_functions import (
7
7
  resolve_lakehouse_name,
8
8
  resolve_workspace_name_and_id,
9
- resolve_dataset_id,
9
+ resolve_dataset_name_and_id,
10
10
  _conv_b64,
11
11
  _decode_b64,
12
12
  lro,
@@ -14,12 +14,13 @@ from sempy_labs._helper_functions import (
14
14
  from sempy_labs.lakehouse._lakehouse import lakehouse_attached
15
15
  import sempy_labs._icons as icons
16
16
  from sempy_labs._refresh_semantic_model import refresh_semantic_model
17
+ from uuid import UUID
17
18
 
18
19
 
19
20
  def create_blank_semantic_model(
20
21
  dataset: str,
21
22
  compatibility_level: int = 1605,
22
- workspace: Optional[str] = None,
23
+ workspace: Optional[str | UUID] = None,
23
24
  overwrite: bool = True,
24
25
  ):
25
26
  """
@@ -31,21 +32,21 @@ def create_blank_semantic_model(
31
32
  Name of the semantic model.
32
33
  compatibility_level : int, default=1605
33
34
  The compatibility level of the semantic model.
34
- workspace : str, default=None
35
- The Fabric workspace name.
35
+ workspace : str | uuid.UUID, default=None
36
+ The Fabric workspace name or ID.
36
37
  Defaults to None which resolves to the workspace of the attached lakehouse
37
38
  or if no lakehouse attached, resolves to the workspace of the notebook.
38
39
  overwrite : bool, default=False
39
40
  If set to True, overwrites the existing semantic model in the workspace if it exists.
40
41
  """
41
42
 
42
- workspace = fabric.resolve_workspace_name(workspace)
43
- dfD = fabric.list_datasets(workspace=workspace, mode="rest")
43
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
44
+ dfD = fabric.list_datasets(workspace=workspace_id, mode="rest")
44
45
  dfD_filt = dfD[dfD["Dataset Name"] == dataset]
45
46
 
46
47
  if len(dfD_filt) > 0 and not overwrite:
47
48
  raise ValueError(
48
- f"{icons.warning} The '{dataset}' semantic model already exists within the '{workspace}' workspace. The 'overwrite' parameter is set to False so the blank new semantic model was not created."
49
+ f"{icons.warning} The '{dataset}' semantic model already exists within the '{workspace_name}' workspace. The 'overwrite' parameter is set to False so the blank new semantic model was not created."
49
50
  )
50
51
 
51
52
  min_compat = 1500
@@ -109,15 +110,15 @@ def create_blank_semantic_model(
109
110
  }}
110
111
  """
111
112
 
112
- fabric.execute_tmsl(script=tmsl, workspace=workspace)
113
+ fabric.execute_tmsl(script=tmsl, workspace=workspace_id)
113
114
 
114
115
  return print(
115
- f"{icons.green_dot} The '{dataset}' semantic model was created within the '{workspace}' workspace."
116
+ f"{icons.green_dot} The '{dataset}' semantic model was created within the '{workspace_name}' workspace."
116
117
  )
117
118
 
118
119
 
119
120
  def create_semantic_model_from_bim(
120
- dataset: str, bim_file: dict, workspace: Optional[str] = None
121
+ dataset: str, bim_file: dict, workspace: Optional[str | UUID] = None
121
122
  ):
122
123
  """
123
124
  Creates a new semantic model based on a Model.bim file.
@@ -130,20 +131,20 @@ def create_semantic_model_from_bim(
130
131
  Name of the semantic model.
131
132
  bim_file : dict
132
133
  The model.bim file.
133
- workspace : str, default=None
134
- The Fabric workspace name.
134
+ workspace : str | uuid.UUID, default=None
135
+ The Fabric workspace name or ID.
135
136
  Defaults to None which resolves to the workspace of the attached lakehouse
136
137
  or if no lakehouse attached, resolves to the workspace of the notebook.
137
138
  """
138
139
 
139
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
140
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
140
141
 
141
- dfI = fabric.list_datasets(workspace=workspace, mode="rest")
142
+ dfI = fabric.list_datasets(workspace=workspace_id, mode="rest")
142
143
  dfI_filt = dfI[(dfI["Dataset Name"] == dataset)]
143
144
 
144
145
  if len(dfI_filt) > 0:
145
146
  raise ValueError(
146
- f"{icons.red_dot} The '{dataset}' semantic model already exists as a semantic model in the '{workspace}' workspace."
147
+ f"{icons.red_dot} The '{dataset}' semantic model already exists as a semantic model in the '{workspace_name}' workspace."
147
148
  )
148
149
 
149
150
  client = fabric.FabricRestClient()
@@ -178,12 +179,12 @@ def create_semantic_model_from_bim(
178
179
  lro(client, response, status_codes=[201, 202])
179
180
 
180
181
  print(
181
- f"{icons.green_dot} The '{dataset}' semantic model has been created within the '{workspace}' workspace."
182
+ f"{icons.green_dot} The '{dataset}' semantic model has been created within the '{workspace_name}' workspace."
182
183
  )
183
184
 
184
185
 
185
186
  def update_semantic_model_from_bim(
186
- dataset: str, bim_file: dict, workspace: Optional[str] = None
187
+ dataset: str | UUID, bim_file: dict, workspace: Optional[str | UUID] = None
187
188
  ):
188
189
  """
189
190
  Updates a semantic model definition based on a Model.bim file.
@@ -192,18 +193,18 @@ def update_semantic_model_from_bim(
192
193
 
193
194
  Parameters
194
195
  ----------
195
- dataset : str
196
- Name of the semantic model.
196
+ dataset : str | uuid.UUID
197
+ Name or ID of the semantic model.
197
198
  bim_file : dict
198
199
  The model.bim file.
199
- workspace : str, default=None
200
- The Fabric workspace name.
200
+ workspace : str | uuid.UUID, default=None
201
+ The Fabric workspace name or ID.
201
202
  Defaults to None which resolves to the workspace of the attached lakehouse
202
203
  or if no lakehouse attached, resolves to the workspace of the notebook.
203
204
  """
204
205
 
205
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
206
- dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace)
206
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
207
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
207
208
 
208
209
  client = fabric.FabricRestClient()
209
210
  defPBIDataset = {"version": "1.0", "settings": {}}
@@ -212,7 +213,7 @@ def update_semantic_model_from_bim(
212
213
  payloadBim = _conv_b64(bim_file)
213
214
 
214
215
  request_body = {
215
- "displayName": dataset,
216
+ "displayName": dataset_name,
216
217
  "definition": {
217
218
  "parts": [
218
219
  {
@@ -237,15 +238,15 @@ def update_semantic_model_from_bim(
237
238
  lro(client, response, status_codes=[200, 202], return_status_code=True)
238
239
 
239
240
  print(
240
- f"{icons.green_dot} The '{dataset}' semantic model has been updated within the '{workspace}' workspace."
241
+ f"{icons.green_dot} The '{dataset_name}' semantic model has been updated within the '{workspace_name}' workspace."
241
242
  )
242
243
 
243
244
 
244
245
  def deploy_semantic_model(
245
246
  source_dataset: str,
246
- source_workspace: Optional[str] = None,
247
+ source_workspace: Optional[str | UUID] = None,
247
248
  target_dataset: Optional[str] = None,
248
- target_workspace: Optional[str] = None,
249
+ target_workspace: Optional[str | UUID] = None,
249
250
  refresh_target_dataset: bool = True,
250
251
  overwrite: bool = False,
251
252
  ):
@@ -256,14 +257,14 @@ def deploy_semantic_model(
256
257
  ----------
257
258
  source_dataset : str
258
259
  Name of the semantic model to deploy.
259
- source_workspace : str, default=None
260
- The Fabric workspace name.
260
+ source_workspace : str | uuid.UUID, default=None
261
+ The Fabric workspace name or ID.
261
262
  Defaults to None which resolves to the workspace of the attached lakehouse
262
263
  or if no lakehouse attached, resolves to the workspace of the notebook.
263
264
  target_dataset: str
264
265
  Name of the new semantic model to be created.
265
- target_workspace : str, default=None
266
- The Fabric workspace name in which the new semantic model will be deployed.
266
+ target_workspace : str | uuid.UUID, default=None
267
+ The Fabric workspace name or ID in which the new semantic model will be deployed.
267
268
  Defaults to None which resolves to the workspace of the attached lakehouse
268
269
  or if no lakehouse attached, resolves to the workspace of the notebook.
269
270
  refresh_target_dataset : bool, default=True
@@ -272,49 +273,59 @@ def deploy_semantic_model(
272
273
  If set to True, overwrites the existing semantic model in the workspace if it exists.
273
274
  """
274
275
 
275
- source_workspace = fabric.resolve_workspace_name(source_workspace)
276
+ (source_workspace_name, source_workspace_id) = resolve_workspace_name_and_id(
277
+ source_workspace
278
+ )
276
279
 
277
280
  if target_workspace is None:
278
- target_workspace = source_workspace
281
+ target_workspace_name = source_workspace_name
282
+ target_workspace_id = fabric.resolve_workspace_id(target_workspace_name)
283
+ else:
284
+ (target_workspace_name, target_workspace_id) = resolve_workspace_name_and_id(
285
+ target_workspace
286
+ )
279
287
 
280
288
  if target_dataset is None:
281
289
  target_dataset = source_dataset
282
290
 
283
- if target_dataset == source_dataset and target_workspace == source_workspace:
291
+ if (
292
+ target_dataset == source_dataset
293
+ and target_workspace_name == source_workspace_name
294
+ ):
284
295
  raise ValueError(
285
296
  f"{icons.red_dot} The 'dataset' and 'new_dataset' parameters have the same value. And, the 'workspace' and 'new_dataset_workspace' "
286
297
  f"parameters have the same value. At least one of these must be different. Please update the parameters."
287
298
  )
288
299
 
289
- dfD = fabric.list_datasets(workspace=target_workspace, mode="rest")
300
+ dfD = fabric.list_datasets(workspace=target_workspace_id, mode="rest")
290
301
  dfD_filt = dfD[dfD["Dataset Name"] == target_dataset]
291
302
  if len(dfD_filt) > 0 and not overwrite:
292
303
  raise ValueError(
293
- f"{icons.warning} The '{target_dataset}' semantic model already exists within the '{target_workspace}' workspace. The 'overwrite' parameter is set to False so the source semantic model was not deployed to the target destination."
304
+ f"{icons.warning} The '{target_dataset}' semantic model already exists within the '{target_workspace_name}' workspace. The 'overwrite' parameter is set to False so the source semantic model was not deployed to the target destination."
294
305
  )
295
306
 
296
- bim = get_semantic_model_bim(dataset=source_dataset, workspace=source_workspace)
307
+ bim = get_semantic_model_bim(dataset=source_dataset, workspace=source_workspace_id)
297
308
 
298
309
  # Create the semantic model if the model does not exist
299
310
  if dfD_filt.empty:
300
311
  create_semantic_model_from_bim(
301
312
  dataset=target_dataset,
302
313
  bim_file=bim,
303
- workspace=target_workspace,
314
+ workspace=target_workspace_id,
304
315
  )
305
316
  # Update the semantic model if the model exists
306
317
  else:
307
318
  update_semantic_model_from_bim(
308
- dataset=target_dataset, bim_file=bim, workspace=target_workspace
319
+ dataset=target_dataset, bim_file=bim, workspace=target_workspace_id
309
320
  )
310
321
 
311
322
  if refresh_target_dataset:
312
- refresh_semantic_model(dataset=target_dataset, workspace=target_workspace)
323
+ refresh_semantic_model(dataset=target_dataset, workspace=target_workspace_id)
313
324
 
314
325
 
315
326
  def get_semantic_model_bim(
316
- dataset: str,
317
- workspace: Optional[str] = None,
327
+ dataset: str | UUID,
328
+ workspace: Optional[str | UUID] = None,
318
329
  save_to_file_name: Optional[str] = None,
319
330
  lakehouse_workspace: Optional[str] = None,
320
331
  ) -> dict:
@@ -323,10 +334,10 @@ def get_semantic_model_bim(
323
334
 
324
335
  Parameters
325
336
  ----------
326
- dataset : str
327
- Name of the semantic model.
328
- workspace : str, default=None
329
- The Fabric workspace name in which the semantic model resides.
337
+ dataset : str | uuid.UUID
338
+ Name or ID of the semantic model.
339
+ workspace : str | uuid.UUID, default=None
340
+ The Fabric workspace name or ID in which the semantic model resides.
330
341
  Defaults to None which resolves to the workspace of the attached lakehouse
331
342
  or if no lakehouse attached, resolves to the workspace of the notebook.
332
343
  save_to_file_name : str, default=None
@@ -342,8 +353,14 @@ def get_semantic_model_bim(
342
353
  The Model.bim file for the semantic model.
343
354
  """
344
355
 
356
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
357
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
358
+
345
359
  bimJson = get_semantic_model_definition(
346
- dataset=dataset, workspace=workspace, format="TMSL", return_dataframe=False
360
+ dataset=dataset_id,
361
+ workspace=workspace_id,
362
+ format="TMSL",
363
+ return_dataframe=False,
347
364
  )
348
365
 
349
366
  if save_to_file_name is not None:
@@ -363,16 +380,16 @@ def get_semantic_model_bim(
363
380
  with open(filePath, "w") as json_file:
364
381
  json.dump(bimJson, json_file, indent=4)
365
382
  print(
366
- f"{icons.green_dot} The {fileExt} file for the '{dataset}' semantic model has been saved to the '{lakehouse}' in this location: '{filePath}'.\n\n"
383
+ f"{icons.green_dot} The {fileExt} file for the '{dataset_name}' semantic model has been saved to the '{lakehouse}' in this location: '{filePath}'.\n\n"
367
384
  )
368
385
 
369
386
  return bimJson
370
387
 
371
388
 
372
389
  def get_semantic_model_definition(
373
- dataset: str,
390
+ dataset: str | UUID,
374
391
  format: str = "TMSL",
375
- workspace: Optional[str] = None,
392
+ workspace: Optional[str | UUID] = None,
376
393
  return_dataframe: bool = True,
377
394
  ) -> pd.DataFrame | dict | List:
378
395
  """
@@ -382,12 +399,12 @@ def get_semantic_model_definition(
382
399
 
383
400
  Parameters
384
401
  ----------
385
- dataset : str
386
- Name of the semantic model.
402
+ dataset : str | uuid.UUID
403
+ Name or ID of the semantic model.
387
404
  format : str, default="TMSL"
388
405
  The output format. Valid options are "TMSL" or "TMDL". "TMSL" returns the .bim file whereas "TMDL" returns the collection of TMDL files. Can also enter 'bim' for the TMSL version.
389
- workspace : str, default=None
390
- The Fabric workspace name in which the semantic model resides.
406
+ workspace : str | uuid.UUID, default=None
407
+ The Fabric workspace name or ID in which the semantic model resides.
391
408
  Defaults to None which resolves to the workspace of the attached lakehouse
392
409
  or if no lakehouse attached, resolves to the workspace of the notebook.
393
410
  return_dataframe : bool, default=True
@@ -410,10 +427,10 @@ def get_semantic_model_definition(
410
427
  f"{icons.red_dot} Invalid format. Valid options: {valid_formats}."
411
428
  )
412
429
 
413
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
430
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
431
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
414
432
 
415
433
  client = fabric.FabricRestClient()
416
- dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace)
417
434
  response = client.post(
418
435
  f"/v1/workspaces/{workspace_id}/semanticModels/{dataset_id}/getDefinition?format={format}",
419
436
  )
@@ -437,21 +454,41 @@ def get_semantic_model_definition(
437
454
  return decoded_parts
438
455
 
439
456
 
440
- def get_semantic_model_size(dataset: str, workspace: Optional[str] = None):
457
+ def get_semantic_model_size(
458
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
459
+ ):
460
+ """
461
+ Gets size of the semantic model in bytes.
462
+
463
+ Parameters
464
+ ----------
465
+ dataset : str | uuid.UUID
466
+ Name or ID of the semantic model.
467
+ workspace : str | uuid.UUID, default=None
468
+ The Fabric workspace name or ID in which the semantic model resides.
469
+ Defaults to None which resolves to the workspace of the attached lakehouse
470
+ or if no lakehouse attached, resolves to the workspace of the notebook.
471
+
472
+ Returns
473
+ -------
474
+ int
475
+ The size of the semantic model in
476
+ """
441
477
 
442
- workspace = fabric.resolve_workspace_name(workspace)
478
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
479
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
443
480
 
444
481
  dict = fabric.evaluate_dax(
445
- dataset=dataset,
446
- workspace=workspace,
482
+ dataset=dataset_id,
483
+ workspace=workspace_id,
447
484
  dax_string="""
448
485
  EVALUATE SELECTCOLUMNS(FILTER(INFO.STORAGETABLECOLUMNS(), [COLUMN_TYPE] = "BASIC_DATA"),[DICTIONARY_SIZE])
449
486
  """,
450
487
  )
451
488
 
452
489
  used_size = fabric.evaluate_dax(
453
- dataset=dataset,
454
- workspace=workspace,
490
+ dataset=dataset_id,
491
+ workspace=workspace_id,
455
492
  dax_string="""
456
493
  EVALUATE SELECTCOLUMNS(INFO.STORAGETABLECOLUMNSEGMENTS(),[USED_SIZE])
457
494
  """,