semantic-link-labs 0.8.10__py3-none-any.whl → 0.8.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (73) hide show
  1. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/METADATA +3 -2
  2. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/RECORD +73 -72
  3. sempy_labs/__init__.py +6 -2
  4. sempy_labs/_clear_cache.py +39 -37
  5. sempy_labs/_connections.py +13 -13
  6. sempy_labs/_data_pipelines.py +20 -20
  7. sempy_labs/_dataflows.py +27 -28
  8. sempy_labs/_dax.py +41 -47
  9. sempy_labs/_environments.py +26 -23
  10. sempy_labs/_eventhouses.py +16 -15
  11. sempy_labs/_eventstreams.py +16 -15
  12. sempy_labs/_external_data_shares.py +18 -20
  13. sempy_labs/_gateways.py +14 -14
  14. sempy_labs/_generate_semantic_model.py +99 -62
  15. sempy_labs/_git.py +105 -43
  16. sempy_labs/_helper_functions.py +148 -131
  17. sempy_labs/_job_scheduler.py +92 -0
  18. sempy_labs/_kql_databases.py +16 -15
  19. sempy_labs/_kql_querysets.py +16 -15
  20. sempy_labs/_list_functions.py +114 -99
  21. sempy_labs/_managed_private_endpoints.py +19 -17
  22. sempy_labs/_mirrored_databases.py +51 -48
  23. sempy_labs/_mirrored_warehouses.py +5 -4
  24. sempy_labs/_ml_experiments.py +16 -15
  25. sempy_labs/_ml_models.py +15 -14
  26. sempy_labs/_model_bpa.py +3 -3
  27. sempy_labs/_model_dependencies.py +55 -29
  28. sempy_labs/_notebooks.py +27 -25
  29. sempy_labs/_one_lake_integration.py +23 -26
  30. sempy_labs/_query_scale_out.py +67 -64
  31. sempy_labs/_refresh_semantic_model.py +25 -26
  32. sempy_labs/_spark.py +33 -32
  33. sempy_labs/_sql.py +12 -9
  34. sempy_labs/_translations.py +10 -7
  35. sempy_labs/_vertipaq.py +34 -31
  36. sempy_labs/_warehouses.py +22 -21
  37. sempy_labs/_workspace_identity.py +11 -10
  38. sempy_labs/_workspaces.py +40 -33
  39. sempy_labs/admin/_basic_functions.py +10 -12
  40. sempy_labs/admin/_external_data_share.py +3 -3
  41. sempy_labs/admin/_items.py +4 -4
  42. sempy_labs/admin/_scanner.py +3 -1
  43. sempy_labs/directlake/_directlake_schema_compare.py +18 -14
  44. sempy_labs/directlake/_directlake_schema_sync.py +18 -12
  45. sempy_labs/directlake/_dl_helper.py +25 -26
  46. sempy_labs/directlake/_generate_shared_expression.py +10 -9
  47. sempy_labs/directlake/_get_directlake_lakehouse.py +16 -13
  48. sempy_labs/directlake/_get_shared_expression.py +4 -3
  49. sempy_labs/directlake/_guardrails.py +12 -6
  50. sempy_labs/directlake/_list_directlake_model_calc_tables.py +15 -9
  51. sempy_labs/directlake/_show_unsupported_directlake_objects.py +16 -10
  52. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +35 -31
  53. sempy_labs/directlake/_update_directlake_partition_entity.py +34 -31
  54. sempy_labs/directlake/_warm_cache.py +87 -65
  55. sempy_labs/lakehouse/_get_lakehouse_columns.py +10 -8
  56. sempy_labs/lakehouse/_get_lakehouse_tables.py +10 -9
  57. sempy_labs/lakehouse/_lakehouse.py +17 -13
  58. sempy_labs/lakehouse/_shortcuts.py +42 -23
  59. sempy_labs/migration/_create_pqt_file.py +16 -11
  60. sempy_labs/migration/_refresh_calc_tables.py +16 -10
  61. sempy_labs/report/_download_report.py +9 -8
  62. sempy_labs/report/_generate_report.py +40 -44
  63. sempy_labs/report/_paginated.py +9 -9
  64. sempy_labs/report/_report_bpa.py +13 -9
  65. sempy_labs/report/_report_functions.py +80 -91
  66. sempy_labs/report/_report_helper.py +8 -4
  67. sempy_labs/report/_report_list_functions.py +24 -13
  68. sempy_labs/report/_report_rebind.py +17 -16
  69. sempy_labs/report/_reportwrapper.py +41 -33
  70. sempy_labs/tom/_model.py +43 -6
  71. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/LICENSE +0 -0
  72. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/WHEEL +0 -0
  73. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/top_level.txt +0 -0
@@ -102,7 +102,7 @@ def create_relationship_name(
102
102
  )
103
103
 
104
104
 
105
- def resolve_report_id(report: str, workspace: Optional[str] = None) -> UUID:
105
+ def resolve_report_id(report: str, workspace: Optional[str | UUID] = None) -> UUID:
106
106
  """
107
107
  Obtains the ID of the Power BI report.
108
108
 
@@ -110,8 +110,8 @@ def resolve_report_id(report: str, workspace: Optional[str] = None) -> UUID:
110
110
  ----------
111
111
  report : str
112
112
  The name of the Power BI report.
113
- workspace : str, default=None
114
- The Fabric workspace name.
113
+ workspace : str | uuid.UUID, default=None
114
+ The Fabric workspace name or ID.
115
115
  Defaults to None which resolves to the workspace of the attached lakehouse
116
116
  or if no lakehouse attached, resolves to the workspace of the notebook.
117
117
 
@@ -121,25 +121,19 @@ def resolve_report_id(report: str, workspace: Optional[str] = None) -> UUID:
121
121
  The ID of the Power BI report.
122
122
  """
123
123
 
124
- if workspace is None:
125
- workspace_id = fabric.get_workspace_id()
126
- workspace = fabric.resolve_workspace_name(workspace_id)
127
-
128
- obj = fabric.resolve_item_id(item_name=report, type="Report", workspace=workspace)
129
-
130
- return obj
124
+ return fabric.resolve_item_id(item_name=report, type="Report", workspace=workspace)
131
125
 
132
126
 
133
- def resolve_report_name(report_id: UUID, workspace: Optional[str] = None) -> str:
127
+ def resolve_report_name(report_id: UUID, workspace: Optional[str | UUID] = None) -> str:
134
128
  """
135
129
  Obtains the name of the Power BI report.
136
130
 
137
131
  Parameters
138
132
  ----------
139
- report_id : UUID
133
+ report_id : uuid.UUID
140
134
  The name of the Power BI report.
141
- workspace : str, default=None
142
- The Fabric workspace name.
135
+ workspace : str | uuid.UUID, default=None
136
+ The Fabric workspace name or ID.
143
137
  Defaults to None which resolves to the workspace of the attached lakehouse
144
138
  or if no lakehouse attached, resolves to the workspace of the notebook.
145
139
 
@@ -149,19 +143,37 @@ def resolve_report_name(report_id: UUID, workspace: Optional[str] = None) -> str
149
143
  The name of the Power BI report.
150
144
  """
151
145
 
152
- if workspace is None:
153
- workspace_id = fabric.get_workspace_id()
154
- workspace = fabric.resolve_workspace_name(workspace_id)
155
-
156
- obj = fabric.resolve_item_name(
146
+ return fabric.resolve_item_name(
157
147
  item_id=report_id, type="Report", workspace=workspace
158
148
  )
159
149
 
160
- return obj
150
+
151
+ def resolve_item_name_and_id(
152
+ item: str | UUID, type: Optional[str] = None, workspace: Optional[str | UUID] = None
153
+ ) -> Tuple[str, UUID]:
154
+
155
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
156
+
157
+ if _is_valid_uuid(item):
158
+ item_id = item
159
+ item_name = fabric.resolve_item_name(
160
+ item_id=item_id, type=type, workspace=workspace_id
161
+ )
162
+ else:
163
+ if type is None:
164
+ raise ValueError(
165
+ f"{icons.warning} Must specify a 'type' if specifying a name as the 'item'."
166
+ )
167
+ item_name = item
168
+ item_id = fabric.resolve_item_id(
169
+ item_name=item, type=type, workspace=workspace_id
170
+ )
171
+
172
+ return item_name, item_id
161
173
 
162
174
 
163
175
  def resolve_dataset_name_and_id(
164
- dataset: str | UUID, workspace: Optional[str] = None
176
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
165
177
  ) -> Tuple[str, UUID]:
166
178
 
167
179
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
@@ -180,15 +192,17 @@ def resolve_dataset_name_and_id(
180
192
  return dataset_name, dataset_id
181
193
 
182
194
 
183
- def resolve_dataset_id(dataset: str | UUID, workspace: Optional[str] = None) -> UUID:
195
+ def resolve_dataset_id(
196
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
197
+ ) -> UUID:
184
198
  """
185
199
  Obtains the ID of the semantic model.
186
200
 
187
201
  Parameters
188
202
  ----------
189
- dataset : str | UUID
203
+ dataset : str | uuid.UUID
190
204
  The name or ID of the semantic model.
191
- workspace : str, default=None
205
+ workspace : str | uuid.UUID, default=None
192
206
  The Fabric workspace name.
193
207
  Defaults to None which resolves to the workspace of the attached lakehouse
194
208
  or if no lakehouse attached, resolves to the workspace of the notebook.
@@ -209,15 +223,17 @@ def resolve_dataset_id(dataset: str | UUID, workspace: Optional[str] = None) ->
209
223
  return dataset_id
210
224
 
211
225
 
212
- def resolve_dataset_name(dataset_id: UUID, workspace: Optional[str] = None) -> str:
226
+ def resolve_dataset_name(
227
+ dataset_id: UUID, workspace: Optional[str | UUID] = None
228
+ ) -> str:
213
229
  """
214
230
  Obtains the name of the semantic model.
215
231
 
216
232
  Parameters
217
233
  ----------
218
- dataset_id : UUID
234
+ dataset_id : uuid.UUID
219
235
  The name of the semantic model.
220
- workspace : str, default=None
236
+ workspace : str | uuid.UUID, default=None
221
237
  The Fabric workspace name.
222
238
  Defaults to None which resolves to the workspace of the attached lakehouse
223
239
  or if no lakehouse attached, resolves to the workspace of the notebook.
@@ -228,30 +244,24 @@ def resolve_dataset_name(dataset_id: UUID, workspace: Optional[str] = None) -> s
228
244
  The name of the semantic model.
229
245
  """
230
246
 
231
- if workspace is None:
232
- workspace_id = fabric.get_workspace_id()
233
- workspace = fabric.resolve_workspace_name(workspace_id)
234
-
235
- obj = fabric.resolve_item_name(
247
+ return fabric.resolve_item_name(
236
248
  item_id=dataset_id, type="SemanticModel", workspace=workspace
237
249
  )
238
250
 
239
- return obj
240
-
241
251
 
242
252
  def resolve_lakehouse_name(
243
- lakehouse_id: Optional[UUID] = None, workspace: Optional[str] = None
253
+ lakehouse_id: Optional[UUID] = None, workspace: Optional[str | UUID] = None
244
254
  ) -> str:
245
255
  """
246
256
  Obtains the name of the Fabric lakehouse.
247
257
 
248
258
  Parameters
249
259
  ----------
250
- lakehouse_id : UUID, default=None
260
+ lakehouse_id : uuid.UUID, default=None
251
261
  The name of the Fabric lakehouse.
252
262
  Defaults to None which resolves to the lakehouse attached to the notebook.
253
- workspace : str, default=None
254
- The Fabric workspace name.
263
+ workspace : str | uuid.UUID, default=None
264
+ The Fabric workspace name or ID.
255
265
  Defaults to None which resolves to the workspace of the attached lakehouse
256
266
  or if no lakehouse attached, resolves to the workspace of the notebook.
257
267
 
@@ -261,21 +271,17 @@ def resolve_lakehouse_name(
261
271
  The name of the Fabric lakehouse.
262
272
  """
263
273
 
264
- if workspace is None:
265
- workspace_id = fabric.get_workspace_id()
266
- workspace = fabric.resolve_workspace_name(workspace_id)
267
-
268
274
  if lakehouse_id is None:
269
275
  lakehouse_id = fabric.get_lakehouse_id()
270
276
 
271
- obj = fabric.resolve_item_name(
277
+ return fabric.resolve_item_name(
272
278
  item_id=lakehouse_id, type="Lakehouse", workspace=workspace
273
279
  )
274
280
 
275
- return obj
276
-
277
281
 
278
- def resolve_lakehouse_id(lakehouse: str, workspace: Optional[str] = None) -> UUID:
282
+ def resolve_lakehouse_id(
283
+ lakehouse: str, workspace: Optional[str | UUID] = None
284
+ ) -> UUID:
279
285
  """
280
286
  Obtains the ID of the Fabric lakehouse.
281
287
 
@@ -283,38 +289,34 @@ def resolve_lakehouse_id(lakehouse: str, workspace: Optional[str] = None) -> UUI
283
289
  ----------
284
290
  lakehouse : str
285
291
  The name of the Fabric lakehouse.
286
- workspace : str, default=None
287
- The Fabric workspace name.
292
+ workspace : str | uuid.UUID, default=None
293
+ The Fabric workspace name or ID.
288
294
  Defaults to None which resolves to the workspace of the attached lakehouse
289
295
  or if no lakehouse attached, resolves to the workspace of the notebook.
290
296
 
291
297
  Returns
292
298
  -------
293
- UUID
299
+ uuid.UUID
294
300
  The ID of the Fabric lakehouse.
295
301
  """
296
302
 
297
- if workspace is None:
298
- workspace_id = fabric.get_workspace_id()
299
- workspace = fabric.resolve_workspace_name(workspace_id)
300
-
301
- obj = fabric.resolve_item_id(
303
+ return fabric.resolve_item_id(
302
304
  item_name=lakehouse, type="Lakehouse", workspace=workspace
303
305
  )
304
306
 
305
- return obj
306
-
307
307
 
308
- def get_direct_lake_sql_endpoint(dataset: str, workspace: Optional[str] = None) -> UUID:
308
+ def get_direct_lake_sql_endpoint(
309
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
310
+ ) -> UUID:
309
311
  """
310
312
  Obtains the SQL Endpoint ID of the semantic model.
311
313
 
312
314
  Parameters
313
315
  ----------
314
- dataset : str
315
- The name of the semantic model.
316
- workspace : str, default=None
317
- The Fabric workspace name.
316
+ dataset : str | uuid.UUID
317
+ The name or ID of the semantic model.
318
+ workspace : str | uuid.UUID, default=None
319
+ The Fabric workspace name or ID.
318
320
  Defaults to None which resolves to the workspace of the attached lakehouse
319
321
  or if no lakehouse attached, resolves to the workspace of the notebook.
320
322
 
@@ -326,9 +328,8 @@ def get_direct_lake_sql_endpoint(dataset: str, workspace: Optional[str] = None)
326
328
 
327
329
  from sempy_labs.tom import connect_semantic_model
328
330
 
329
- if workspace is None:
330
- workspace_id = fabric.get_workspace_id()
331
- workspace = fabric.resolve_workspace_name(workspace_id)
331
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
332
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
332
333
 
333
334
  # dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
334
335
  # dfP_filt = dfP[dfP["Mode"] == "DirectLake"]
@@ -339,7 +340,7 @@ def get_direct_lake_sql_endpoint(dataset: str, workspace: Optional[str] = None)
339
340
  # )
340
341
 
341
342
  with connect_semantic_model(
342
- dataset=dataset, readonly=True, workspace=workspace
343
+ dataset=dataset_id, readonly=True, workspace=workspace_id
343
344
  ) as tom:
344
345
  sqlEndpointId = None
345
346
  for e in tom.model.Expressions:
@@ -349,7 +350,7 @@ def get_direct_lake_sql_endpoint(dataset: str, workspace: Optional[str] = None)
349
350
  sqlEndpointId = matches[1]
350
351
 
351
352
  if sqlEndpointId is None:
352
- raise ValueError("SQL Endpoint not found.")
353
+ raise ValueError(f"{icons.red_dot} SQL Endpoint not found.")
353
354
 
354
355
  return sqlEndpointId
355
356
 
@@ -426,7 +427,7 @@ def save_as_delta_table(
426
427
  merge_schema: bool = False,
427
428
  schema: Optional[dict] = None,
428
429
  lakehouse: Optional[str] = None,
429
- workspace: Optional[str] = None,
430
+ workspace: Optional[str | UUID] = None,
430
431
  ):
431
432
  """
432
433
  Saves a pandas dataframe as a delta table in a Fabric lakehouse.
@@ -446,8 +447,8 @@ def save_as_delta_table(
446
447
  lakehouse : str, default=None
447
448
  The Fabric lakehouse used by the Direct Lake semantic model.
448
449
  Defaults to None which resolves to the lakehouse attached to the notebook.
449
- workspace : str, default=None
450
- The Fabric workspace name.
450
+ workspace : str | uuid.UUID, default=None
451
+ The Fabric workspace name or ID.
451
452
  Defaults to None which resolves to the workspace of the attached lakehouse
452
453
  or if no lakehouse attached, resolves to the workspace of the notebook.
453
454
  """
@@ -466,19 +467,15 @@ def save_as_delta_table(
466
467
  TimestampType,
467
468
  )
468
469
 
469
- if workspace is None:
470
- workspace_id = fabric.get_workspace_id()
471
- workspace = fabric.resolve_workspace_name(workspace_id)
472
- else:
473
- workspace_id = fabric.resolve_workspace_id(workspace)
470
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
474
471
 
475
472
  if lakehouse is None:
476
473
  lakehouse_id = fabric.get_lakehouse_id()
477
474
  lakehouse = resolve_lakehouse_name(
478
- lakehouse_id=lakehouse_id, workspace=workspace
475
+ lakehouse_id=lakehouse_id, workspace=workspace_id
479
476
  )
480
477
  else:
481
- lakehouse_id = resolve_lakehouse_id(lakehouse, workspace)
478
+ lakehouse_id = resolve_lakehouse_id(lakehouse, workspace_id)
482
479
 
483
480
  writeModes = ["append", "overwrite"]
484
481
  write_mode = write_mode.lower()
@@ -534,7 +531,7 @@ def save_as_delta_table(
534
531
  else:
535
532
  spark_df.write.mode(write_mode).format("delta").save(filePath)
536
533
  print(
537
- f"{icons.green_dot} The dataframe has been saved as the '{delta_table_name}' table in the '{lakehouse}' lakehouse within the '{workspace}' workspace."
534
+ f"{icons.green_dot} The dataframe has been saved as the '{delta_table_name}' table in the '{lakehouse}' lakehouse within the '{workspace_name}' workspace."
538
535
  )
539
536
 
540
537
 
@@ -574,14 +571,16 @@ def language_validate(language: str):
574
571
  return lang
575
572
 
576
573
 
577
- def resolve_workspace_name_and_id(workspace: Optional[str] = None) -> Tuple[str, str]:
574
+ def resolve_workspace_name_and_id(
575
+ workspace: Optional[str | UUID] = None,
576
+ ) -> Tuple[str, str]:
578
577
  """
579
578
  Obtains the name and ID of the Fabric workspace.
580
579
 
581
580
  Parameters
582
581
  ----------
583
- workspace : str, default=None
584
- The Fabric workspace name.
582
+ workspace : str | uuid.UUID, default=None
583
+ The Fabric workspace name or ID.
585
584
  Defaults to None which resolves to the workspace of the attached lakehouse
586
585
  or if no lakehouse attached, resolves to the workspace of the notebook.
587
586
 
@@ -593,11 +592,15 @@ def resolve_workspace_name_and_id(workspace: Optional[str] = None) -> Tuple[str,
593
592
 
594
593
  if workspace is None:
595
594
  workspace_id = fabric.get_workspace_id()
596
- workspace = fabric.resolve_workspace_name(workspace_id)
595
+ workspace_name = fabric.resolve_workspace_name(workspace_id)
596
+ elif _is_valid_uuid(workspace):
597
+ workspace_id = workspace
598
+ workspace_name = fabric.resolve_workspace_name(workspace_id)
597
599
  else:
598
- workspace_id = fabric.resolve_workspace_id(workspace)
600
+ workspace_name = workspace
601
+ workspace_id = fabric.resolve_workspace_id(workspace_name)
599
602
 
600
- return str(workspace), str(workspace_id)
603
+ return str(workspace_name), str(workspace_id)
601
604
 
602
605
 
603
606
  def _extract_json(dataframe: pd.DataFrame) -> dict:
@@ -623,7 +626,9 @@ def _decode_b64(file, format: Optional[str] = "utf-8"):
623
626
  return result
624
627
 
625
628
 
626
- def is_default_semantic_model(dataset: str, workspace: Optional[str] = None) -> bool:
629
+ def is_default_semantic_model(
630
+ dataset: str, workspace: Optional[str | UUID] = None
631
+ ) -> bool:
627
632
  """
628
633
  Identifies whether a semantic model is a default semantic model.
629
634
 
@@ -631,8 +636,8 @@ def is_default_semantic_model(dataset: str, workspace: Optional[str] = None) ->
631
636
  ----------
632
637
  dataset : str
633
638
  The name of the semantic model.
634
- workspace : str, default=None
635
- The Fabric workspace name.
639
+ workspace : str | uuid.UUID, default=None
640
+ The Fabric workspace name or ID.
636
641
  Defaults to None which resolves to the workspace of the attached lakehouse
637
642
  or if no lakehouse attached, resolves to the workspace of the notebook.
638
643
 
@@ -642,9 +647,9 @@ def is_default_semantic_model(dataset: str, workspace: Optional[str] = None) ->
642
647
  A True/False value indicating whether the semantic model is a default semantic model.
643
648
  """
644
649
 
645
- workspace = fabric.resolve_workspace_name(workspace)
650
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
646
651
 
647
- dfI = fabric.list_items(workspace=workspace)
652
+ dfI = fabric.list_items(workspace=workspace_id)
648
653
  filtered_df = dfI.groupby("Display Name").filter(
649
654
  lambda x: set(["Warehouse", "SemanticModel"]).issubset(set(x["Type"]))
650
655
  or set(["Lakehouse", "SemanticModel"]).issubset(set(x["Type"]))
@@ -654,16 +659,16 @@ def is_default_semantic_model(dataset: str, workspace: Optional[str] = None) ->
654
659
  return dataset in default_semantic_models
655
660
 
656
661
 
657
- def resolve_item_type(item_id: UUID, workspace: Optional[str] = None) -> str:
662
+ def resolve_item_type(item_id: UUID, workspace: Optional[str | UUID] = None) -> str:
658
663
  """
659
664
  Obtains the item type for a given Fabric Item Id within a Fabric workspace.
660
665
 
661
666
  Parameters
662
667
  ----------
663
- item_id : UUID
668
+ item_id : uuid.UUID
664
669
  The item/artifact Id.
665
- workspace : str, default=None
666
- The Fabric workspace name.
670
+ workspace : str | uuid.UUID, default=None
671
+ The Fabric workspace name or ID.
667
672
  Defaults to None which resolves to the workspace of the attached lakehouse
668
673
  or if no lakehouse attached, resolves to the workspace of the notebook.
669
674
 
@@ -673,21 +678,19 @@ def resolve_item_type(item_id: UUID, workspace: Optional[str] = None) -> str:
673
678
  The item type for the item Id.
674
679
  """
675
680
 
676
- workspace = fabric.resolve_workspace_name(workspace)
677
- dfI = fabric.list_items(workspace=workspace)
681
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
682
+ dfI = fabric.list_items(workspace=workspace_id)
678
683
  dfI_filt = dfI[dfI["Id"] == item_id]
679
684
 
680
- if len(dfI_filt) == 0:
685
+ if dfI_filt.empty:
681
686
  raise ValueError(
682
- f"Invalid 'item_id' parameter. The '{item_id}' item was not found in the '{workspace}' workspace."
687
+ f"Invalid 'item_id' parameter. The '{item_id}' item was not found in the '{workspace_name}' workspace."
683
688
  )
684
- item_type = dfI_filt["Type"].iloc[0]
685
-
686
- return item_type
689
+ return dfI_filt["Type"].iloc[0]
687
690
 
688
691
 
689
692
  def resolve_dataset_from_report(
690
- report: str, workspace: Optional[str] = None
693
+ report: str, workspace: Optional[str | UUID] = None
691
694
  ) -> Tuple[UUID, str, UUID, str]:
692
695
  """
693
696
  Obtains the basic semantic model properties from which the report's data is sourced.
@@ -696,8 +699,8 @@ def resolve_dataset_from_report(
696
699
  ----------
697
700
  report : str
698
701
  The name of the Power BI report.
699
- workspace : str, default=None
700
- The Fabric workspace name.
702
+ workspace : str | uuid.UUID, default=None
703
+ The Fabric workspace name or ID.
701
704
  Defaults to None which resolves to the workspace of the attached lakehouse
702
705
  or if no lakehouse attached, resolves to the workspace of the notebook.
703
706
 
@@ -707,13 +710,13 @@ def resolve_dataset_from_report(
707
710
  The semantic model UUID, semantic model name, semantic model workspace UUID, semantic model workspace name
708
711
  """
709
712
 
710
- workspace = fabric.resolve_workspace_name(workspace)
713
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
711
714
 
712
- dfR = fabric.list_reports(workspace=workspace)
715
+ dfR = fabric.list_reports(workspace=workspace_id)
713
716
  dfR_filt = dfR[dfR["Name"] == report]
714
717
  if len(dfR_filt) == 0:
715
718
  raise ValueError(
716
- f"{icons.red_dot} The '{report}' report does not exist within the '{workspace}' workspace."
719
+ f"{icons.red_dot} The '{report}' report does not exist within the '{workspace_name}' workspace."
717
720
  )
718
721
  dataset_id = dfR_filt["Dataset Id"].iloc[0]
719
722
  dataset_workspace_id = dfR_filt["Dataset Workspace Id"].iloc[0]
@@ -732,14 +735,16 @@ def _add_part(target_dict, path, payload):
732
735
  target_dict["definition"]["parts"].append(part)
733
736
 
734
737
 
735
- def resolve_workspace_capacity(workspace: Optional[str] = None) -> Tuple[UUID, str]:
738
+ def resolve_workspace_capacity(
739
+ workspace: Optional[str | UUID] = None,
740
+ ) -> Tuple[UUID, str]:
736
741
  """
737
742
  Obtains the capacity Id and capacity name for a given workspace.
738
743
 
739
744
  Parameters
740
745
  ----------
741
- workspace : str, default=None
742
- The Fabric workspace name.
746
+ workspace : str | uuid.UUID, default=None
747
+ The Fabric workspace name or UUID.
743
748
  Defaults to None which resolves to the workspace of the attached lakehouse
744
749
  or if no lakehouse attached, resolves to the workspace of the notebook.
745
750
 
@@ -749,9 +754,9 @@ def resolve_workspace_capacity(workspace: Optional[str] = None) -> Tuple[UUID, s
749
754
  capacity Id; capacity came.
750
755
  """
751
756
 
752
- workspace = fabric.resolve_workspace_name(workspace)
753
- filter_condition = urllib.parse.quote(workspace)
754
- dfW = fabric.list_workspaces(filter=f"name eq '{filter_condition}'")
757
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
758
+ filter_condition = urllib.parse.quote(workspace_id)
759
+ dfW = fabric.list_workspaces(filter=f"id eq '{filter_condition}'")
755
760
  capacity_id = dfW["Capacity Id"].iloc[0]
756
761
  dfC = fabric.list_capacities()
757
762
  dfC_filt = dfC[dfC["Id"] == capacity_id]
@@ -763,14 +768,14 @@ def resolve_workspace_capacity(workspace: Optional[str] = None) -> Tuple[UUID, s
763
768
  return capacity_id, capacity_name
764
769
 
765
770
 
766
- def get_capacity_id(workspace: Optional[str] = None) -> UUID:
771
+ def get_capacity_id(workspace: Optional[str | UUID] = None) -> UUID:
767
772
  """
768
773
  Obtains the Capacity Id for a given workspace.
769
774
 
770
775
  Parameters
771
776
  ----------
772
- workspace : str, default=None
773
- The Fabric workspace name.
777
+ workspace : str | uuid.UUID, default=None
778
+ The Fabric workspace name or ID.
774
779
  Defaults to None which resolves to the workspace of the attached lakehouse
775
780
  or if no lakehouse attached, resolves to the workspace of the notebook.
776
781
 
@@ -781,28 +786,27 @@ def get_capacity_id(workspace: Optional[str] = None) -> UUID:
781
786
  """
782
787
 
783
788
  if workspace is None:
784
- capacity_id = _get_x_id(name="trident.capacity.id")
789
+ capacity_id = _get_fabric_context_setting(name="trident.capacity.id")
785
790
  else:
786
-
787
- workspace = fabric.resolve_workspace_name(workspace)
788
- filter_condition = urllib.parse.quote(workspace)
789
- dfW = fabric.list_workspaces(filter=f"name eq '{filter_condition}'")
791
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
792
+ filter_condition = urllib.parse.quote(workspace_id)
793
+ dfW = fabric.list_workspaces(filter=f"id eq '{filter_condition}'")
790
794
  if len(dfW) == 0:
791
- raise ValueError(f"{icons.red_dot} The '{workspace}' does not exist'.")
795
+ raise ValueError(f"{icons.red_dot} The '{workspace_name}' does not exist'.")
792
796
 
793
797
  capacity_id = dfW["Capacity Id"].iloc[0]
794
798
 
795
799
  return capacity_id
796
800
 
797
801
 
798
- def get_capacity_name(workspace: Optional[str] = None) -> str:
802
+ def get_capacity_name(workspace: Optional[str | UUID] = None) -> str:
799
803
  """
800
804
  Obtains the capacity name for a given workspace.
801
805
 
802
806
  Parameters
803
807
  ----------
804
- workspace : str, default=None
805
- The Fabric workspace name.
808
+ workspace : str | uuid.UUID, default=None
809
+ The Fabric workspace name or ID.
806
810
  Defaults to None which resolves to the workspace of the attached lakehouse
807
811
  or if no lakehouse attached, resolves to the workspace of the notebook.
808
812
 
@@ -829,7 +833,7 @@ def resolve_capacity_name(capacity_id: Optional[UUID] = None) -> str:
829
833
 
830
834
  Parameters
831
835
  ----------
832
- capacity_id : UUID, default=None
836
+ capacity_id : uuid.UUID, default=None
833
837
  The capacity Id.
834
838
  Defaults to None which resolves to the capacity name of the workspace of the attached lakehouse
835
839
  or if no lakehouse attached, resolves to the capacity name of the workspace of the notebook.
@@ -1020,7 +1024,7 @@ def _get_adls_client(account_name):
1020
1024
  return service_client
1021
1025
 
1022
1026
 
1023
- def resolve_warehouse_id(warehouse: str, workspace: Optional[str]) -> UUID:
1027
+ def resolve_warehouse_id(warehouse: str, workspace: Optional[str | UUID]) -> UUID:
1024
1028
  """
1025
1029
  Obtains the Id for a given warehouse.
1026
1030
 
@@ -1028,6 +1032,10 @@ def resolve_warehouse_id(warehouse: str, workspace: Optional[str]) -> UUID:
1028
1032
  ----------
1029
1033
  warehouse : str
1030
1034
  The warehouse name
1035
+ workspace : str | uuid.UUID, default=None
1036
+ The Fabric workspace name or ID in which the semantic model resides.
1037
+ Defaults to None which resolves to the workspace of the attached lakehouse
1038
+ or if no lakehouse attached, resolves to the workspace of the notebook.
1031
1039
 
1032
1040
  Returns
1033
1041
  -------
@@ -1035,7 +1043,6 @@ def resolve_warehouse_id(warehouse: str, workspace: Optional[str]) -> UUID:
1035
1043
  The warehouse Id.
1036
1044
  """
1037
1045
 
1038
- workspace = fabric.resolve_workspace_name(workspace)
1039
1046
  return fabric.resolve_item_id(
1040
1047
  item_name=warehouse, type="Warehouse", workspace=workspace
1041
1048
  )
@@ -1097,7 +1104,9 @@ def convert_to_alphanumeric_lowercase(input_string):
1097
1104
  return cleaned_string
1098
1105
 
1099
1106
 
1100
- def resolve_environment_id(environment: str, workspace: Optional[str] = None) -> UUID:
1107
+ def resolve_environment_id(
1108
+ environment: str, workspace: Optional[str | UUID] = None
1109
+ ) -> UUID:
1101
1110
  """
1102
1111
  Obtains the environment Id for a given environment.
1103
1112
 
@@ -1105,6 +1114,10 @@ def resolve_environment_id(environment: str, workspace: Optional[str] = None) ->
1105
1114
  ----------
1106
1115
  environment: str
1107
1116
  Name of the environment.
1117
+ workspace : str | uuid.UUID, default=None
1118
+ The Fabric workspace name or ID in which the semantic model resides.
1119
+ Defaults to None which resolves to the workspace of the attached lakehouse
1120
+ or if no lakehouse attached, resolves to the workspace of the notebook.
1108
1121
 
1109
1122
  Returns
1110
1123
  -------
@@ -1112,7 +1125,6 @@ def resolve_environment_id(environment: str, workspace: Optional[str] = None) ->
1112
1125
  The environment Id.
1113
1126
  """
1114
1127
 
1115
- workspace = fabric.resolve_workspace_name(workspace)
1116
1128
  return fabric.resolve_item_id(
1117
1129
  item_name=environment, type="Environment", workspace=workspace
1118
1130
  )
@@ -1147,7 +1159,7 @@ def convert_to_friendly_case(text: str) -> str:
1147
1159
  return text
1148
1160
 
1149
1161
 
1150
- def resolve_notebook_id(notebook: str, workspace: Optional[str] = None) -> UUID:
1162
+ def resolve_notebook_id(notebook: str, workspace: Optional[str | UUID] = None) -> UUID:
1151
1163
  """
1152
1164
  Obtains the notebook Id for a given notebook.
1153
1165
 
@@ -1155,6 +1167,10 @@ def resolve_notebook_id(notebook: str, workspace: Optional[str] = None) -> UUID:
1155
1167
  ----------
1156
1168
  notebook: str
1157
1169
  Name of the notebook.
1170
+ workspace : str | uuid.UUID, default=None
1171
+ The Fabric workspace name or ID in which the semantic model resides.
1172
+ Defaults to None which resolves to the workspace of the attached lakehouse
1173
+ or if no lakehouse attached, resolves to the workspace of the notebook.
1158
1174
 
1159
1175
  Returns
1160
1176
  -------
@@ -1162,7 +1178,6 @@ def resolve_notebook_id(notebook: str, workspace: Optional[str] = None) -> UUID:
1162
1178
  The notebook Id.
1163
1179
  """
1164
1180
 
1165
- workspace = fabric.resolve_workspace_name(workspace)
1166
1181
  return fabric.resolve_item_id(
1167
1182
  item_name=notebook, type="Notebook", workspace=workspace
1168
1183
  )
@@ -1190,7 +1205,9 @@ def _make_list_unique(my_list):
1190
1205
  return list(set(my_list))
1191
1206
 
1192
1207
 
1193
- def _get_partition_map(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
1208
+ def _get_partition_map(
1209
+ dataset: str, workspace: Optional[str | UUID] = None
1210
+ ) -> pd.DataFrame:
1194
1211
 
1195
1212
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
1196
1213
  (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)