semantic-link-labs 0.8.9__py3-none-any.whl → 0.8.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (76) hide show
  1. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/METADATA +5 -2
  2. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/RECORD +76 -75
  3. sempy_labs/__init__.py +14 -2
  4. sempy_labs/_authentication.py +31 -2
  5. sempy_labs/_clear_cache.py +39 -37
  6. sempy_labs/_connections.py +13 -13
  7. sempy_labs/_data_pipelines.py +20 -20
  8. sempy_labs/_dataflows.py +27 -28
  9. sempy_labs/_dax.py +41 -47
  10. sempy_labs/_environments.py +26 -23
  11. sempy_labs/_eventhouses.py +16 -15
  12. sempy_labs/_eventstreams.py +16 -15
  13. sempy_labs/_external_data_shares.py +18 -20
  14. sempy_labs/_gateways.py +57 -11
  15. sempy_labs/_generate_semantic_model.py +100 -71
  16. sempy_labs/_git.py +134 -67
  17. sempy_labs/_helper_functions.py +199 -145
  18. sempy_labs/_job_scheduler.py +92 -0
  19. sempy_labs/_kql_databases.py +16 -15
  20. sempy_labs/_kql_querysets.py +16 -15
  21. sempy_labs/_list_functions.py +281 -120
  22. sempy_labs/_managed_private_endpoints.py +19 -17
  23. sempy_labs/_mirrored_databases.py +51 -48
  24. sempy_labs/_mirrored_warehouses.py +5 -4
  25. sempy_labs/_ml_experiments.py +16 -15
  26. sempy_labs/_ml_models.py +15 -14
  27. sempy_labs/_model_bpa.py +27 -25
  28. sempy_labs/_model_bpa_bulk.py +3 -3
  29. sempy_labs/_model_dependencies.py +60 -28
  30. sempy_labs/_notebooks.py +73 -39
  31. sempy_labs/_one_lake_integration.py +23 -26
  32. sempy_labs/_query_scale_out.py +67 -64
  33. sempy_labs/_refresh_semantic_model.py +47 -42
  34. sempy_labs/_spark.py +33 -32
  35. sempy_labs/_sql.py +12 -9
  36. sempy_labs/_translations.py +10 -7
  37. sempy_labs/_vertipaq.py +34 -31
  38. sempy_labs/_warehouses.py +22 -21
  39. sempy_labs/_workspace_identity.py +11 -10
  40. sempy_labs/_workspaces.py +40 -33
  41. sempy_labs/admin/__init__.py +4 -0
  42. sempy_labs/admin/_basic_functions.py +44 -12
  43. sempy_labs/admin/_external_data_share.py +3 -3
  44. sempy_labs/admin/_items.py +4 -4
  45. sempy_labs/admin/_scanner.py +7 -5
  46. sempy_labs/directlake/_directlake_schema_compare.py +18 -14
  47. sempy_labs/directlake/_directlake_schema_sync.py +18 -12
  48. sempy_labs/directlake/_dl_helper.py +36 -32
  49. sempy_labs/directlake/_generate_shared_expression.py +10 -9
  50. sempy_labs/directlake/_get_directlake_lakehouse.py +16 -13
  51. sempy_labs/directlake/_get_shared_expression.py +4 -3
  52. sempy_labs/directlake/_guardrails.py +12 -6
  53. sempy_labs/directlake/_list_directlake_model_calc_tables.py +15 -9
  54. sempy_labs/directlake/_show_unsupported_directlake_objects.py +16 -10
  55. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +35 -31
  56. sempy_labs/directlake/_update_directlake_partition_entity.py +34 -31
  57. sempy_labs/directlake/_warm_cache.py +87 -65
  58. sempy_labs/lakehouse/_get_lakehouse_columns.py +10 -8
  59. sempy_labs/lakehouse/_get_lakehouse_tables.py +10 -9
  60. sempy_labs/lakehouse/_lakehouse.py +17 -13
  61. sempy_labs/lakehouse/_shortcuts.py +42 -23
  62. sempy_labs/migration/_create_pqt_file.py +16 -11
  63. sempy_labs/migration/_refresh_calc_tables.py +16 -10
  64. sempy_labs/report/_download_report.py +9 -8
  65. sempy_labs/report/_generate_report.py +40 -44
  66. sempy_labs/report/_paginated.py +9 -9
  67. sempy_labs/report/_report_bpa.py +13 -9
  68. sempy_labs/report/_report_functions.py +80 -91
  69. sempy_labs/report/_report_helper.py +8 -4
  70. sempy_labs/report/_report_list_functions.py +24 -13
  71. sempy_labs/report/_report_rebind.py +17 -16
  72. sempy_labs/report/_reportwrapper.py +41 -33
  73. sempy_labs/tom/_model.py +117 -38
  74. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/LICENSE +0 -0
  75. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/WHEEL +0 -0
  76. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/top_level.txt +0 -0
@@ -102,7 +102,7 @@ def create_relationship_name(
102
102
  )
103
103
 
104
104
 
105
- def resolve_report_id(report: str, workspace: Optional[str] = None) -> UUID:
105
+ def resolve_report_id(report: str, workspace: Optional[str | UUID] = None) -> UUID:
106
106
  """
107
107
  Obtains the ID of the Power BI report.
108
108
 
@@ -110,8 +110,8 @@ def resolve_report_id(report: str, workspace: Optional[str] = None) -> UUID:
110
110
  ----------
111
111
  report : str
112
112
  The name of the Power BI report.
113
- workspace : str, default=None
114
- The Fabric workspace name.
113
+ workspace : str | uuid.UUID, default=None
114
+ The Fabric workspace name or ID.
115
115
  Defaults to None which resolves to the workspace of the attached lakehouse
116
116
  or if no lakehouse attached, resolves to the workspace of the notebook.
117
117
 
@@ -121,25 +121,19 @@ def resolve_report_id(report: str, workspace: Optional[str] = None) -> UUID:
121
121
  The ID of the Power BI report.
122
122
  """
123
123
 
124
- if workspace is None:
125
- workspace_id = fabric.get_workspace_id()
126
- workspace = fabric.resolve_workspace_name(workspace_id)
127
-
128
- obj = fabric.resolve_item_id(item_name=report, type="Report", workspace=workspace)
129
-
130
- return obj
124
+ return fabric.resolve_item_id(item_name=report, type="Report", workspace=workspace)
131
125
 
132
126
 
133
- def resolve_report_name(report_id: UUID, workspace: Optional[str] = None) -> str:
127
+ def resolve_report_name(report_id: UUID, workspace: Optional[str | UUID] = None) -> str:
134
128
  """
135
129
  Obtains the name of the Power BI report.
136
130
 
137
131
  Parameters
138
132
  ----------
139
- report_id : UUID
133
+ report_id : uuid.UUID
140
134
  The name of the Power BI report.
141
- workspace : str, default=None
142
- The Fabric workspace name.
135
+ workspace : str | uuid.UUID, default=None
136
+ The Fabric workspace name or ID.
143
137
  Defaults to None which resolves to the workspace of the attached lakehouse
144
138
  or if no lakehouse attached, resolves to the workspace of the notebook.
145
139
 
@@ -149,26 +143,66 @@ def resolve_report_name(report_id: UUID, workspace: Optional[str] = None) -> str
149
143
  The name of the Power BI report.
150
144
  """
151
145
 
152
- if workspace is None:
153
- workspace_id = fabric.get_workspace_id()
154
- workspace = fabric.resolve_workspace_name(workspace_id)
155
-
156
- obj = fabric.resolve_item_name(
146
+ return fabric.resolve_item_name(
157
147
  item_id=report_id, type="Report", workspace=workspace
158
148
  )
159
149
 
160
- return obj
150
+
151
+ def resolve_item_name_and_id(
152
+ item: str | UUID, type: Optional[str] = None, workspace: Optional[str | UUID] = None
153
+ ) -> Tuple[str, UUID]:
154
+
155
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
156
+
157
+ if _is_valid_uuid(item):
158
+ item_id = item
159
+ item_name = fabric.resolve_item_name(
160
+ item_id=item_id, type=type, workspace=workspace_id
161
+ )
162
+ else:
163
+ if type is None:
164
+ raise ValueError(
165
+ f"{icons.warning} Must specify a 'type' if specifying a name as the 'item'."
166
+ )
167
+ item_name = item
168
+ item_id = fabric.resolve_item_id(
169
+ item_name=item, type=type, workspace=workspace_id
170
+ )
171
+
172
+ return item_name, item_id
173
+
174
+
175
+ def resolve_dataset_name_and_id(
176
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
177
+ ) -> Tuple[str, UUID]:
178
+
179
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
180
+
181
+ if _is_valid_uuid(dataset):
182
+ dataset_id = dataset
183
+ dataset_name = fabric.resolve_item_name(
184
+ item_id=dataset_id, type="SemanticModel", workspace=workspace_id
185
+ )
186
+ else:
187
+ dataset_name = dataset
188
+ dataset_id = fabric.resolve_item_id(
189
+ item_name=dataset, type="SemanticModel", workspace=workspace_id
190
+ )
191
+
192
+ return dataset_name, dataset_id
161
193
 
162
194
 
163
- def resolve_dataset_id(dataset: str, workspace: Optional[str] = None) -> UUID:
195
+ def resolve_dataset_id(
196
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
197
+ ) -> UUID:
164
198
  """
165
199
  Obtains the ID of the semantic model.
166
200
 
167
201
  Parameters
168
202
  ----------
169
- dataset : str
170
- The name of the semantic model.
171
- workspace : str, default=None
203
+ dataset : str | uuid.UUID
204
+ The name or ID of the semantic model.
205
+ workspace : str | uuid.UUID, default=None
172
206
  The Fabric workspace name.
173
207
  Defaults to None which resolves to the workspace of the attached lakehouse
174
208
  or if no lakehouse attached, resolves to the workspace of the notebook.
@@ -179,26 +213,27 @@ def resolve_dataset_id(dataset: str, workspace: Optional[str] = None) -> UUID:
179
213
  The ID of the semantic model.
180
214
  """
181
215
 
182
- if workspace is None:
183
- workspace_id = fabric.get_workspace_id()
184
- workspace = fabric.resolve_workspace_name(workspace_id)
185
-
186
- obj = fabric.resolve_item_id(
187
- item_name=dataset, type="SemanticModel", workspace=workspace
188
- )
216
+ if _is_valid_uuid(dataset):
217
+ dataset_id = dataset
218
+ else:
219
+ dataset_id = fabric.resolve_item_id(
220
+ item_name=dataset, type="SemanticModel", workspace=workspace
221
+ )
189
222
 
190
- return obj
223
+ return dataset_id
191
224
 
192
225
 
193
- def resolve_dataset_name(dataset_id: UUID, workspace: Optional[str] = None) -> str:
226
+ def resolve_dataset_name(
227
+ dataset_id: UUID, workspace: Optional[str | UUID] = None
228
+ ) -> str:
194
229
  """
195
230
  Obtains the name of the semantic model.
196
231
 
197
232
  Parameters
198
233
  ----------
199
- dataset_id : UUID
234
+ dataset_id : uuid.UUID
200
235
  The name of the semantic model.
201
- workspace : str, default=None
236
+ workspace : str | uuid.UUID, default=None
202
237
  The Fabric workspace name.
203
238
  Defaults to None which resolves to the workspace of the attached lakehouse
204
239
  or if no lakehouse attached, resolves to the workspace of the notebook.
@@ -209,30 +244,24 @@ def resolve_dataset_name(dataset_id: UUID, workspace: Optional[str] = None) -> s
209
244
  The name of the semantic model.
210
245
  """
211
246
 
212
- if workspace is None:
213
- workspace_id = fabric.get_workspace_id()
214
- workspace = fabric.resolve_workspace_name(workspace_id)
215
-
216
- obj = fabric.resolve_item_name(
247
+ return fabric.resolve_item_name(
217
248
  item_id=dataset_id, type="SemanticModel", workspace=workspace
218
249
  )
219
250
 
220
- return obj
221
-
222
251
 
223
252
  def resolve_lakehouse_name(
224
- lakehouse_id: Optional[UUID] = None, workspace: Optional[str] = None
253
+ lakehouse_id: Optional[UUID] = None, workspace: Optional[str | UUID] = None
225
254
  ) -> str:
226
255
  """
227
256
  Obtains the name of the Fabric lakehouse.
228
257
 
229
258
  Parameters
230
259
  ----------
231
- lakehouse_id : UUID, default=None
260
+ lakehouse_id : uuid.UUID, default=None
232
261
  The name of the Fabric lakehouse.
233
262
  Defaults to None which resolves to the lakehouse attached to the notebook.
234
- workspace : str, default=None
235
- The Fabric workspace name.
263
+ workspace : str | uuid.UUID, default=None
264
+ The Fabric workspace name or ID.
236
265
  Defaults to None which resolves to the workspace of the attached lakehouse
237
266
  or if no lakehouse attached, resolves to the workspace of the notebook.
238
267
 
@@ -242,21 +271,17 @@ def resolve_lakehouse_name(
242
271
  The name of the Fabric lakehouse.
243
272
  """
244
273
 
245
- if workspace is None:
246
- workspace_id = fabric.get_workspace_id()
247
- workspace = fabric.resolve_workspace_name(workspace_id)
248
-
249
274
  if lakehouse_id is None:
250
275
  lakehouse_id = fabric.get_lakehouse_id()
251
276
 
252
- obj = fabric.resolve_item_name(
277
+ return fabric.resolve_item_name(
253
278
  item_id=lakehouse_id, type="Lakehouse", workspace=workspace
254
279
  )
255
280
 
256
- return obj
257
-
258
281
 
259
- def resolve_lakehouse_id(lakehouse: str, workspace: Optional[str] = None) -> UUID:
282
+ def resolve_lakehouse_id(
283
+ lakehouse: str, workspace: Optional[str | UUID] = None
284
+ ) -> UUID:
260
285
  """
261
286
  Obtains the ID of the Fabric lakehouse.
262
287
 
@@ -264,38 +289,34 @@ def resolve_lakehouse_id(lakehouse: str, workspace: Optional[str] = None) -> UUI
264
289
  ----------
265
290
  lakehouse : str
266
291
  The name of the Fabric lakehouse.
267
- workspace : str, default=None
268
- The Fabric workspace name.
292
+ workspace : str | uuid.UUID, default=None
293
+ The Fabric workspace name or ID.
269
294
  Defaults to None which resolves to the workspace of the attached lakehouse
270
295
  or if no lakehouse attached, resolves to the workspace of the notebook.
271
296
 
272
297
  Returns
273
298
  -------
274
- UUID
299
+ uuid.UUID
275
300
  The ID of the Fabric lakehouse.
276
301
  """
277
302
 
278
- if workspace is None:
279
- workspace_id = fabric.get_workspace_id()
280
- workspace = fabric.resolve_workspace_name(workspace_id)
281
-
282
- obj = fabric.resolve_item_id(
303
+ return fabric.resolve_item_id(
283
304
  item_name=lakehouse, type="Lakehouse", workspace=workspace
284
305
  )
285
306
 
286
- return obj
287
-
288
307
 
289
- def get_direct_lake_sql_endpoint(dataset: str, workspace: Optional[str] = None) -> UUID:
308
+ def get_direct_lake_sql_endpoint(
309
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
310
+ ) -> UUID:
290
311
  """
291
312
  Obtains the SQL Endpoint ID of the semantic model.
292
313
 
293
314
  Parameters
294
315
  ----------
295
- dataset : str
296
- The name of the semantic model.
297
- workspace : str, default=None
298
- The Fabric workspace name.
316
+ dataset : str | uuid.UUID
317
+ The name or ID of the semantic model.
318
+ workspace : str | uuid.UUID, default=None
319
+ The Fabric workspace name or ID.
299
320
  Defaults to None which resolves to the workspace of the attached lakehouse
300
321
  or if no lakehouse attached, resolves to the workspace of the notebook.
301
322
 
@@ -307,9 +328,8 @@ def get_direct_lake_sql_endpoint(dataset: str, workspace: Optional[str] = None)
307
328
 
308
329
  from sempy_labs.tom import connect_semantic_model
309
330
 
310
- if workspace is None:
311
- workspace_id = fabric.get_workspace_id()
312
- workspace = fabric.resolve_workspace_name(workspace_id)
331
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
332
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
313
333
 
314
334
  # dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
315
335
  # dfP_filt = dfP[dfP["Mode"] == "DirectLake"]
@@ -320,7 +340,7 @@ def get_direct_lake_sql_endpoint(dataset: str, workspace: Optional[str] = None)
320
340
  # )
321
341
 
322
342
  with connect_semantic_model(
323
- dataset=dataset, readonly=True, workspace=workspace
343
+ dataset=dataset_id, readonly=True, workspace=workspace_id
324
344
  ) as tom:
325
345
  sqlEndpointId = None
326
346
  for e in tom.model.Expressions:
@@ -330,7 +350,7 @@ def get_direct_lake_sql_endpoint(dataset: str, workspace: Optional[str] = None)
330
350
  sqlEndpointId = matches[1]
331
351
 
332
352
  if sqlEndpointId is None:
333
- raise ValueError("SQL Endpoint not found.")
353
+ raise ValueError(f"{icons.red_dot} SQL Endpoint not found.")
334
354
 
335
355
  return sqlEndpointId
336
356
 
@@ -407,7 +427,7 @@ def save_as_delta_table(
407
427
  merge_schema: bool = False,
408
428
  schema: Optional[dict] = None,
409
429
  lakehouse: Optional[str] = None,
410
- workspace: Optional[str] = None,
430
+ workspace: Optional[str | UUID] = None,
411
431
  ):
412
432
  """
413
433
  Saves a pandas dataframe as a delta table in a Fabric lakehouse.
@@ -427,8 +447,8 @@ def save_as_delta_table(
427
447
  lakehouse : str, default=None
428
448
  The Fabric lakehouse used by the Direct Lake semantic model.
429
449
  Defaults to None which resolves to the lakehouse attached to the notebook.
430
- workspace : str, default=None
431
- The Fabric workspace name.
450
+ workspace : str | uuid.UUID, default=None
451
+ The Fabric workspace name or ID.
432
452
  Defaults to None which resolves to the workspace of the attached lakehouse
433
453
  or if no lakehouse attached, resolves to the workspace of the notebook.
434
454
  """
@@ -447,19 +467,15 @@ def save_as_delta_table(
447
467
  TimestampType,
448
468
  )
449
469
 
450
- if workspace is None:
451
- workspace_id = fabric.get_workspace_id()
452
- workspace = fabric.resolve_workspace_name(workspace_id)
453
- else:
454
- workspace_id = fabric.resolve_workspace_id(workspace)
470
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
455
471
 
456
472
  if lakehouse is None:
457
473
  lakehouse_id = fabric.get_lakehouse_id()
458
474
  lakehouse = resolve_lakehouse_name(
459
- lakehouse_id=lakehouse_id, workspace=workspace
475
+ lakehouse_id=lakehouse_id, workspace=workspace_id
460
476
  )
461
477
  else:
462
- lakehouse_id = resolve_lakehouse_id(lakehouse, workspace)
478
+ lakehouse_id = resolve_lakehouse_id(lakehouse, workspace_id)
463
479
 
464
480
  writeModes = ["append", "overwrite"]
465
481
  write_mode = write_mode.lower()
@@ -515,7 +531,7 @@ def save_as_delta_table(
515
531
  else:
516
532
  spark_df.write.mode(write_mode).format("delta").save(filePath)
517
533
  print(
518
- f"{icons.green_dot} The dataframe has been saved as the '{delta_table_name}' table in the '{lakehouse}' lakehouse within the '{workspace}' workspace."
534
+ f"{icons.green_dot} The dataframe has been saved as the '{delta_table_name}' table in the '{lakehouse}' lakehouse within the '{workspace_name}' workspace."
519
535
  )
520
536
 
521
537
 
@@ -555,14 +571,16 @@ def language_validate(language: str):
555
571
  return lang
556
572
 
557
573
 
558
- def resolve_workspace_name_and_id(workspace: Optional[str] = None) -> Tuple[str, str]:
574
+ def resolve_workspace_name_and_id(
575
+ workspace: Optional[str | UUID] = None,
576
+ ) -> Tuple[str, str]:
559
577
  """
560
578
  Obtains the name and ID of the Fabric workspace.
561
579
 
562
580
  Parameters
563
581
  ----------
564
- workspace : str, default=None
565
- The Fabric workspace name.
582
+ workspace : str | uuid.UUID, default=None
583
+ The Fabric workspace name or ID.
566
584
  Defaults to None which resolves to the workspace of the attached lakehouse
567
585
  or if no lakehouse attached, resolves to the workspace of the notebook.
568
586
 
@@ -574,11 +592,15 @@ def resolve_workspace_name_and_id(workspace: Optional[str] = None) -> Tuple[str,
574
592
 
575
593
  if workspace is None:
576
594
  workspace_id = fabric.get_workspace_id()
577
- workspace = fabric.resolve_workspace_name(workspace_id)
595
+ workspace_name = fabric.resolve_workspace_name(workspace_id)
596
+ elif _is_valid_uuid(workspace):
597
+ workspace_id = workspace
598
+ workspace_name = fabric.resolve_workspace_name(workspace_id)
578
599
  else:
579
- workspace_id = fabric.resolve_workspace_id(workspace)
600
+ workspace_name = workspace
601
+ workspace_id = fabric.resolve_workspace_id(workspace_name)
580
602
 
581
- return str(workspace), str(workspace_id)
603
+ return str(workspace_name), str(workspace_id)
582
604
 
583
605
 
584
606
  def _extract_json(dataframe: pd.DataFrame) -> dict:
@@ -604,7 +626,9 @@ def _decode_b64(file, format: Optional[str] = "utf-8"):
604
626
  return result
605
627
 
606
628
 
607
- def is_default_semantic_model(dataset: str, workspace: Optional[str] = None) -> bool:
629
+ def is_default_semantic_model(
630
+ dataset: str, workspace: Optional[str | UUID] = None
631
+ ) -> bool:
608
632
  """
609
633
  Identifies whether a semantic model is a default semantic model.
610
634
 
@@ -612,8 +636,8 @@ def is_default_semantic_model(dataset: str, workspace: Optional[str] = None) ->
612
636
  ----------
613
637
  dataset : str
614
638
  The name of the semantic model.
615
- workspace : str, default=None
616
- The Fabric workspace name.
639
+ workspace : str | uuid.UUID, default=None
640
+ The Fabric workspace name or ID.
617
641
  Defaults to None which resolves to the workspace of the attached lakehouse
618
642
  or if no lakehouse attached, resolves to the workspace of the notebook.
619
643
 
@@ -623,9 +647,9 @@ def is_default_semantic_model(dataset: str, workspace: Optional[str] = None) ->
623
647
  A True/False value indicating whether the semantic model is a default semantic model.
624
648
  """
625
649
 
626
- workspace = fabric.resolve_workspace_name(workspace)
650
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
627
651
 
628
- dfI = fabric.list_items(workspace=workspace)
652
+ dfI = fabric.list_items(workspace=workspace_id)
629
653
  filtered_df = dfI.groupby("Display Name").filter(
630
654
  lambda x: set(["Warehouse", "SemanticModel"]).issubset(set(x["Type"]))
631
655
  or set(["Lakehouse", "SemanticModel"]).issubset(set(x["Type"]))
@@ -635,16 +659,16 @@ def is_default_semantic_model(dataset: str, workspace: Optional[str] = None) ->
635
659
  return dataset in default_semantic_models
636
660
 
637
661
 
638
- def resolve_item_type(item_id: UUID, workspace: Optional[str] = None) -> str:
662
+ def resolve_item_type(item_id: UUID, workspace: Optional[str | UUID] = None) -> str:
639
663
  """
640
664
  Obtains the item type for a given Fabric Item Id within a Fabric workspace.
641
665
 
642
666
  Parameters
643
667
  ----------
644
- item_id : UUID
668
+ item_id : uuid.UUID
645
669
  The item/artifact Id.
646
- workspace : str, default=None
647
- The Fabric workspace name.
670
+ workspace : str | uuid.UUID, default=None
671
+ The Fabric workspace name or ID.
648
672
  Defaults to None which resolves to the workspace of the attached lakehouse
649
673
  or if no lakehouse attached, resolves to the workspace of the notebook.
650
674
 
@@ -654,21 +678,19 @@ def resolve_item_type(item_id: UUID, workspace: Optional[str] = None) -> str:
654
678
  The item type for the item Id.
655
679
  """
656
680
 
657
- workspace = fabric.resolve_workspace_name(workspace)
658
- dfI = fabric.list_items(workspace=workspace)
681
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
682
+ dfI = fabric.list_items(workspace=workspace_id)
659
683
  dfI_filt = dfI[dfI["Id"] == item_id]
660
684
 
661
- if len(dfI_filt) == 0:
685
+ if dfI_filt.empty:
662
686
  raise ValueError(
663
- f"Invalid 'item_id' parameter. The '{item_id}' item was not found in the '{workspace}' workspace."
687
+ f"Invalid 'item_id' parameter. The '{item_id}' item was not found in the '{workspace_name}' workspace."
664
688
  )
665
- item_type = dfI_filt["Type"].iloc[0]
666
-
667
- return item_type
689
+ return dfI_filt["Type"].iloc[0]
668
690
 
669
691
 
670
692
  def resolve_dataset_from_report(
671
- report: str, workspace: Optional[str] = None
693
+ report: str, workspace: Optional[str | UUID] = None
672
694
  ) -> Tuple[UUID, str, UUID, str]:
673
695
  """
674
696
  Obtains the basic semantic model properties from which the report's data is sourced.
@@ -677,8 +699,8 @@ def resolve_dataset_from_report(
677
699
  ----------
678
700
  report : str
679
701
  The name of the Power BI report.
680
- workspace : str, default=None
681
- The Fabric workspace name.
702
+ workspace : str | uuid.UUID, default=None
703
+ The Fabric workspace name or ID.
682
704
  Defaults to None which resolves to the workspace of the attached lakehouse
683
705
  or if no lakehouse attached, resolves to the workspace of the notebook.
684
706
 
@@ -688,13 +710,13 @@ def resolve_dataset_from_report(
688
710
  The semantic model UUID, semantic model name, semantic model workspace UUID, semantic model workspace name
689
711
  """
690
712
 
691
- workspace = fabric.resolve_workspace_name(workspace)
713
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
692
714
 
693
- dfR = fabric.list_reports(workspace=workspace)
715
+ dfR = fabric.list_reports(workspace=workspace_id)
694
716
  dfR_filt = dfR[dfR["Name"] == report]
695
717
  if len(dfR_filt) == 0:
696
718
  raise ValueError(
697
- f"{icons.red_dot} The '{report}' report does not exist within the '{workspace}' workspace."
719
+ f"{icons.red_dot} The '{report}' report does not exist within the '{workspace_name}' workspace."
698
720
  )
699
721
  dataset_id = dfR_filt["Dataset Id"].iloc[0]
700
722
  dataset_workspace_id = dfR_filt["Dataset Workspace Id"].iloc[0]
@@ -713,14 +735,16 @@ def _add_part(target_dict, path, payload):
713
735
  target_dict["definition"]["parts"].append(part)
714
736
 
715
737
 
716
- def resolve_workspace_capacity(workspace: Optional[str] = None) -> Tuple[UUID, str]:
738
+ def resolve_workspace_capacity(
739
+ workspace: Optional[str | UUID] = None,
740
+ ) -> Tuple[UUID, str]:
717
741
  """
718
742
  Obtains the capacity Id and capacity name for a given workspace.
719
743
 
720
744
  Parameters
721
745
  ----------
722
- workspace : str, default=None
723
- The Fabric workspace name.
746
+ workspace : str | uuid.UUID, default=None
747
+ The Fabric workspace name or UUID.
724
748
  Defaults to None which resolves to the workspace of the attached lakehouse
725
749
  or if no lakehouse attached, resolves to the workspace of the notebook.
726
750
 
@@ -730,9 +754,9 @@ def resolve_workspace_capacity(workspace: Optional[str] = None) -> Tuple[UUID, s
730
754
  capacity Id; capacity came.
731
755
  """
732
756
 
733
- workspace = fabric.resolve_workspace_name(workspace)
734
- filter_condition = urllib.parse.quote(workspace)
735
- dfW = fabric.list_workspaces(filter=f"name eq '{filter_condition}'")
757
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
758
+ filter_condition = urllib.parse.quote(workspace_id)
759
+ dfW = fabric.list_workspaces(filter=f"id eq '{filter_condition}'")
736
760
  capacity_id = dfW["Capacity Id"].iloc[0]
737
761
  dfC = fabric.list_capacities()
738
762
  dfC_filt = dfC[dfC["Id"] == capacity_id]
@@ -744,14 +768,14 @@ def resolve_workspace_capacity(workspace: Optional[str] = None) -> Tuple[UUID, s
744
768
  return capacity_id, capacity_name
745
769
 
746
770
 
747
- def get_capacity_id(workspace: Optional[str] = None) -> UUID:
771
+ def get_capacity_id(workspace: Optional[str | UUID] = None) -> UUID:
748
772
  """
749
773
  Obtains the Capacity Id for a given workspace.
750
774
 
751
775
  Parameters
752
776
  ----------
753
- workspace : str, default=None
754
- The Fabric workspace name.
777
+ workspace : str | uuid.UUID, default=None
778
+ The Fabric workspace name or ID.
755
779
  Defaults to None which resolves to the workspace of the attached lakehouse
756
780
  or if no lakehouse attached, resolves to the workspace of the notebook.
757
781
 
@@ -761,23 +785,28 @@ def get_capacity_id(workspace: Optional[str] = None) -> UUID:
761
785
  The capacity Id.
762
786
  """
763
787
 
764
- workspace = fabric.resolve_workspace_name(workspace)
765
- filter_condition = urllib.parse.quote(workspace)
766
- dfW = fabric.list_workspaces(filter=f"name eq '{filter_condition}'")
767
- if len(dfW) == 0:
768
- raise ValueError(f"{icons.red_dot} The '{workspace}' does not exist'.")
788
+ if workspace is None:
789
+ capacity_id = _get_fabric_context_setting(name="trident.capacity.id")
790
+ else:
791
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
792
+ filter_condition = urllib.parse.quote(workspace_id)
793
+ dfW = fabric.list_workspaces(filter=f"id eq '{filter_condition}'")
794
+ if len(dfW) == 0:
795
+ raise ValueError(f"{icons.red_dot} The '{workspace_name}' does not exist'.")
796
+
797
+ capacity_id = dfW["Capacity Id"].iloc[0]
769
798
 
770
- return dfW["Capacity Id"].iloc[0]
799
+ return capacity_id
771
800
 
772
801
 
773
- def get_capacity_name(workspace: Optional[str] = None) -> str:
802
+ def get_capacity_name(workspace: Optional[str | UUID] = None) -> str:
774
803
  """
775
804
  Obtains the capacity name for a given workspace.
776
805
 
777
806
  Parameters
778
807
  ----------
779
- workspace : str, default=None
780
- The Fabric workspace name.
808
+ workspace : str | uuid.UUID, default=None
809
+ The Fabric workspace name or ID.
781
810
  Defaults to None which resolves to the workspace of the attached lakehouse
782
811
  or if no lakehouse attached, resolves to the workspace of the notebook.
783
812
 
@@ -804,7 +833,7 @@ def resolve_capacity_name(capacity_id: Optional[UUID] = None) -> str:
804
833
 
805
834
  Parameters
806
835
  ----------
807
- capacity_id : UUID, default=None
836
+ capacity_id : uuid.UUID, default=None
808
837
  The capacity Id.
809
838
  Defaults to None which resolves to the capacity name of the workspace of the attached lakehouse
810
839
  or if no lakehouse attached, resolves to the capacity name of the workspace of the notebook.
@@ -995,7 +1024,7 @@ def _get_adls_client(account_name):
995
1024
  return service_client
996
1025
 
997
1026
 
998
- def resolve_warehouse_id(warehouse: str, workspace: Optional[str]) -> UUID:
1027
+ def resolve_warehouse_id(warehouse: str, workspace: Optional[str | UUID]) -> UUID:
999
1028
  """
1000
1029
  Obtains the Id for a given warehouse.
1001
1030
 
@@ -1003,6 +1032,10 @@ def resolve_warehouse_id(warehouse: str, workspace: Optional[str]) -> UUID:
1003
1032
  ----------
1004
1033
  warehouse : str
1005
1034
  The warehouse name
1035
+ workspace : str | uuid.UUID, default=None
1036
+ The Fabric workspace name or ID in which the semantic model resides.
1037
+ Defaults to None which resolves to the workspace of the attached lakehouse
1038
+ or if no lakehouse attached, resolves to the workspace of the notebook.
1006
1039
 
1007
1040
  Returns
1008
1041
  -------
@@ -1010,7 +1043,6 @@ def resolve_warehouse_id(warehouse: str, workspace: Optional[str]) -> UUID:
1010
1043
  The warehouse Id.
1011
1044
  """
1012
1045
 
1013
- workspace = fabric.resolve_workspace_name(workspace)
1014
1046
  return fabric.resolve_item_id(
1015
1047
  item_name=warehouse, type="Warehouse", workspace=workspace
1016
1048
  )
@@ -1072,7 +1104,9 @@ def convert_to_alphanumeric_lowercase(input_string):
1072
1104
  return cleaned_string
1073
1105
 
1074
1106
 
1075
- def resolve_environment_id(environment: str, workspace: Optional[str] = None) -> UUID:
1107
+ def resolve_environment_id(
1108
+ environment: str, workspace: Optional[str | UUID] = None
1109
+ ) -> UUID:
1076
1110
  """
1077
1111
  Obtains the environment Id for a given environment.
1078
1112
 
@@ -1080,6 +1114,10 @@ def resolve_environment_id(environment: str, workspace: Optional[str] = None) ->
1080
1114
  ----------
1081
1115
  environment: str
1082
1116
  Name of the environment.
1117
+ workspace : str | uuid.UUID, default=None
1118
+ The Fabric workspace name or ID in which the semantic model resides.
1119
+ Defaults to None which resolves to the workspace of the attached lakehouse
1120
+ or if no lakehouse attached, resolves to the workspace of the notebook.
1083
1121
 
1084
1122
  Returns
1085
1123
  -------
@@ -1087,7 +1125,6 @@ def resolve_environment_id(environment: str, workspace: Optional[str] = None) ->
1087
1125
  The environment Id.
1088
1126
  """
1089
1127
 
1090
- workspace = fabric.resolve_workspace_name(workspace)
1091
1128
  return fabric.resolve_item_id(
1092
1129
  item_name=environment, type="Environment", workspace=workspace
1093
1130
  )
@@ -1122,7 +1159,7 @@ def convert_to_friendly_case(text: str) -> str:
1122
1159
  return text
1123
1160
 
1124
1161
 
1125
- def resolve_notebook_id(notebook: str, workspace: Optional[str] = None) -> UUID:
1162
+ def resolve_notebook_id(notebook: str, workspace: Optional[str | UUID] = None) -> UUID:
1126
1163
  """
1127
1164
  Obtains the notebook Id for a given notebook.
1128
1165
 
@@ -1130,6 +1167,10 @@ def resolve_notebook_id(notebook: str, workspace: Optional[str] = None) -> UUID:
1130
1167
  ----------
1131
1168
  notebook: str
1132
1169
  Name of the notebook.
1170
+ workspace : str | uuid.UUID, default=None
1171
+ The Fabric workspace name or ID in which the semantic model resides.
1172
+ Defaults to None which resolves to the workspace of the attached lakehouse
1173
+ or if no lakehouse attached, resolves to the workspace of the notebook.
1133
1174
 
1134
1175
  Returns
1135
1176
  -------
@@ -1137,7 +1178,6 @@ def resolve_notebook_id(notebook: str, workspace: Optional[str] = None) -> UUID:
1137
1178
  The notebook Id.
1138
1179
  """
1139
1180
 
1140
- workspace = fabric.resolve_workspace_name(workspace)
1141
1181
  return fabric.resolve_item_id(
1142
1182
  item_name=notebook, type="Notebook", workspace=workspace
1143
1183
  )
@@ -1165,22 +1205,24 @@ def _make_list_unique(my_list):
1165
1205
  return list(set(my_list))
1166
1206
 
1167
1207
 
1168
- def _get_partition_map(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
1208
+ def _get_partition_map(
1209
+ dataset: str, workspace: Optional[str | UUID] = None
1210
+ ) -> pd.DataFrame:
1169
1211
 
1170
- if workspace is None:
1171
- workspace = fabric.resolve_workspace_name()
1212
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
1213
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
1172
1214
 
1173
1215
  partitions = fabric.evaluate_dax(
1174
- dataset=dataset,
1175
- workspace=workspace,
1216
+ dataset=dataset_id,
1217
+ workspace=workspace_id,
1176
1218
  dax_string="""
1177
1219
  select [ID] AS [PartitionID], [TableID], [Name] AS [PartitionName] from $system.tmschema_partitions
1178
1220
  """,
1179
1221
  )
1180
1222
 
1181
1223
  tables = fabric.evaluate_dax(
1182
- dataset=dataset,
1183
- workspace=workspace,
1224
+ dataset=dataset_id,
1225
+ workspace=workspace_id,
1184
1226
  dax_string="""
1185
1227
  select [ID] AS [TableID], [Name] AS [TableName] from $system.tmschema_tables
1186
1228
  """,
@@ -1352,3 +1394,15 @@ def _is_valid_uuid(
1352
1394
  return True
1353
1395
  except ValueError:
1354
1396
  return False
1397
+
1398
+
1399
+ def _get_fabric_context_setting(name: str):
1400
+
1401
+ from synapse.ml.internal_utils.session_utils import get_fabric_context
1402
+
1403
+ return get_fabric_context().get(name)
1404
+
1405
+
1406
+ def get_tenant_id():
1407
+
1408
+ _get_fabric_context_setting(name="trident.tenant.id")