semantic-link-labs 0.9.4__py3-none-any.whl → 0.9.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (71) hide show
  1. {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/METADATA +19 -2
  2. {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/RECORD +71 -64
  3. {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +33 -4
  5. sempy_labs/_capacities.py +59 -128
  6. sempy_labs/_capacity_migration.py +19 -21
  7. sempy_labs/_connections.py +2 -4
  8. sempy_labs/_dashboards.py +60 -0
  9. sempy_labs/_data_pipelines.py +5 -31
  10. sempy_labs/_dataflows.py +2 -2
  11. sempy_labs/_dax_query_view.py +55 -0
  12. sempy_labs/_delta_analyzer.py +16 -14
  13. sempy_labs/_environments.py +28 -49
  14. sempy_labs/_eventhouses.py +27 -53
  15. sempy_labs/_eventstreams.py +16 -34
  16. sempy_labs/_external_data_shares.py +4 -10
  17. sempy_labs/_gateways.py +4 -4
  18. sempy_labs/_generate_semantic_model.py +2 -2
  19. sempy_labs/_git.py +90 -1
  20. sempy_labs/_graphQL.py +8 -21
  21. sempy_labs/_helper_functions.py +440 -91
  22. sempy_labs/_kql_databases.py +24 -35
  23. sempy_labs/_kql_querysets.py +15 -32
  24. sempy_labs/_list_functions.py +17 -192
  25. sempy_labs/_managed_private_endpoints.py +9 -2
  26. sempy_labs/_mirrored_databases.py +17 -49
  27. sempy_labs/_ml_experiments.py +6 -31
  28. sempy_labs/_ml_models.py +4 -28
  29. sempy_labs/_model_bpa.py +4 -11
  30. sempy_labs/_model_bpa_bulk.py +23 -27
  31. sempy_labs/_mounted_data_factories.py +119 -0
  32. sempy_labs/_notebooks.py +16 -26
  33. sempy_labs/_one_lake_integration.py +2 -1
  34. sempy_labs/_semantic_models.py +20 -0
  35. sempy_labs/_sql.py +13 -8
  36. sempy_labs/_sqldatabase.py +61 -100
  37. sempy_labs/_utils.py +42 -0
  38. sempy_labs/_vertipaq.py +25 -13
  39. sempy_labs/_warehouses.py +19 -20
  40. sempy_labs/_workloads.py +23 -9
  41. sempy_labs/_workspace_identity.py +6 -0
  42. sempy_labs/_workspaces.py +55 -7
  43. sempy_labs/admin/__init__.py +21 -1
  44. sempy_labs/admin/_apps.py +1 -1
  45. sempy_labs/admin/_artifacts.py +62 -0
  46. sempy_labs/admin/_basic_functions.py +3 -54
  47. sempy_labs/admin/_capacities.py +61 -0
  48. sempy_labs/admin/_reports.py +74 -0
  49. sempy_labs/admin/_scanner.py +2 -2
  50. sempy_labs/admin/_shared.py +4 -2
  51. sempy_labs/admin/_users.py +133 -0
  52. sempy_labs/admin/_workspaces.py +148 -0
  53. sempy_labs/directlake/_directlake_schema_compare.py +2 -1
  54. sempy_labs/directlake/_directlake_schema_sync.py +65 -19
  55. sempy_labs/directlake/_dl_helper.py +0 -6
  56. sempy_labs/directlake/_generate_shared_expression.py +10 -11
  57. sempy_labs/directlake/_guardrails.py +2 -1
  58. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +12 -25
  59. sempy_labs/directlake/_update_directlake_partition_entity.py +11 -3
  60. sempy_labs/lakehouse/__init__.py +2 -0
  61. sempy_labs/lakehouse/_lakehouse.py +6 -7
  62. sempy_labs/lakehouse/_shortcuts.py +198 -57
  63. sempy_labs/migration/_migration_validation.py +0 -4
  64. sempy_labs/report/_download_report.py +4 -6
  65. sempy_labs/report/_generate_report.py +15 -23
  66. sempy_labs/report/_report_bpa.py +12 -19
  67. sempy_labs/report/_report_functions.py +2 -1
  68. sempy_labs/report/_report_rebind.py +8 -6
  69. sempy_labs/tom/_model.py +34 -16
  70. {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/LICENSE +0 -0
  71. {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/top_level.txt +0 -0
@@ -4,7 +4,7 @@ import json
4
4
  import base64
5
5
  import time
6
6
  import uuid
7
- from sempy.fabric.exceptions import FabricHTTPException
7
+ from sempy.fabric.exceptions import FabricHTTPException, WorkspaceNotFoundException
8
8
  import pandas as pd
9
9
  from functools import wraps
10
10
  import datetime
@@ -30,10 +30,16 @@ def _build_url(url: str, params: dict) -> str:
30
30
  return url
31
31
 
32
32
 
33
+ def _encode_user(user: str) -> str:
34
+
35
+ return urllib.parse.quote(user, safe="@")
36
+
37
+
33
38
  def create_abfss_path(
34
39
  lakehouse_id: UUID,
35
40
  lakehouse_workspace_id: UUID,
36
41
  delta_table_name: Optional[str] = None,
42
+ schema: Optional[str] = None,
37
43
  ) -> str:
38
44
  """
39
45
  Creates an abfss path for a delta table in a Fabric lakehouse.
@@ -46,6 +52,8 @@ def create_abfss_path(
46
52
  ID of the Fabric workspace.
47
53
  delta_table_name : str, default=None
48
54
  Name of the delta table name.
55
+ schema : str, default=None
56
+ The schema of the delta table.
49
57
 
50
58
  Returns
51
59
  -------
@@ -57,6 +65,8 @@ def create_abfss_path(
57
65
  path = f"abfss://{lakehouse_workspace_id}@{fp}/{lakehouse_id}"
58
66
 
59
67
  if delta_table_name is not None:
68
+ if schema is not None:
69
+ path += f"/{schema}"
60
70
  path += f"/Tables/{delta_table_name}"
61
71
 
62
72
  return path
@@ -130,14 +140,16 @@ def create_relationship_name(
130
140
  )
131
141
 
132
142
 
133
- def resolve_report_id(report: str, workspace: Optional[str | UUID] = None) -> UUID:
143
+ def resolve_report_id(
144
+ report: str | UUID, workspace: Optional[str | UUID] = None
145
+ ) -> UUID:
134
146
  """
135
147
  Obtains the ID of the Power BI report.
136
148
 
137
149
  Parameters
138
150
  ----------
139
- report : str
140
- The name of the Power BI report.
151
+ report : str | uuid.UUID
152
+ The name or ID of the Power BI report.
141
153
  workspace : str | uuid.UUID, default=None
142
154
  The Fabric workspace name or ID.
143
155
  Defaults to None which resolves to the workspace of the attached lakehouse
@@ -145,11 +157,11 @@ def resolve_report_id(report: str, workspace: Optional[str | UUID] = None) -> UU
145
157
 
146
158
  Returns
147
159
  -------
148
- UUID
160
+ uuid.UUID
149
161
  The ID of the Power BI report.
150
162
  """
151
163
 
152
- return fabric.resolve_item_id(item_name=report, type="Report", workspace=workspace)
164
+ return resolve_item_id(item=report, type="Report", workspace=workspace)
153
165
 
154
166
 
155
167
  def resolve_report_name(report_id: UUID, workspace: Optional[str | UUID] = None) -> str:
@@ -171,45 +183,211 @@ def resolve_report_name(report_id: UUID, workspace: Optional[str | UUID] = None)
171
183
  The name of the Power BI report.
172
184
  """
173
185
 
174
- return fabric.resolve_item_name(
175
- item_id=report_id, type="Report", workspace=workspace
186
+ return resolve_item_name(item_id=report_id, type="Report", workspace=workspace)
187
+
188
+
189
+ def delete_item(
190
+ item: str | UUID, type: str, workspace: Optional[str | UUID] = None
191
+ ) -> None:
192
+ """
193
+ Deletes an item from a Fabric workspace.
194
+
195
+ Parameters
196
+ ----------
197
+ item : str | uuid.UUID
198
+ The name or ID of the item to be deleted.
199
+ type : str
200
+ The type of the item to be deleted.
201
+ workspace : str | uuid.UUID, default=None
202
+ The Fabric workspace name or ID.
203
+ Defaults to None which resolves to the workspace of the attached lakehouse
204
+ or if no lakehouse attached, resolves to the workspace of the notebook.
205
+ """
206
+
207
+ from sempy_labs._utils import item_types
208
+
209
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
210
+ (item_name, item_id) = resolve_item_name_and_id(item, type, workspace_id)
211
+ item_type = item_types.get(type)[0].lower()
212
+
213
+ fabric.delete_item(item_id=item_id, workspace=workspace_id)
214
+
215
+ print(
216
+ f"{icons.green_dot} The '{item_name}' {item_type} has been successfully deleted from the '{workspace_name}' workspace."
176
217
  )
177
218
 
178
219
 
179
- def resolve_item_id(
180
- item: str | UUID, type: str, workspace: Optional[str] = None
181
- ) -> UUID:
220
+ def create_item(
221
+ name: str,
222
+ type: str,
223
+ description: Optional[str] = None,
224
+ definition: Optional[dict] = None,
225
+ workspace: Optional[str | UUID] = None,
226
+ ):
227
+ """
228
+ Creates an item in a Fabric workspace.
182
229
 
183
- if _is_valid_uuid(item):
184
- return item
230
+ Parameters
231
+ ----------
232
+ name : str
233
+ The name of the item to be created.
234
+ type : str
235
+ The type of the item to be created.
236
+ description : str, default=None
237
+ A description of the item to be created.
238
+ definition : dict, default=None
239
+ The definition of the item to be created.
240
+ workspace : str | uuid.UUID, default=None
241
+ The Fabric workspace name or ID.
242
+ Defaults to None which resolves to the workspace of the attached lakehouse
243
+ or if no lakehouse attached, resolves to the workspace of the notebook.
244
+ """
245
+ from sempy_labs._utils import item_types
246
+
247
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
248
+ item_type = item_types.get(type)[0].lower()
249
+ item_type_url = item_types.get(type)[1]
250
+
251
+ payload = {
252
+ "displayName": name,
253
+ }
254
+ if description:
255
+ payload["description"] = description
256
+ if definition:
257
+ payload["definition"] = definition
258
+
259
+ _base_api(
260
+ request=f"/v1/workspaces/{workspace_id}/{item_type_url}",
261
+ method="post",
262
+ payload=payload,
263
+ status_codes=[201, 202],
264
+ lro_return_status_code=True,
265
+ )
266
+ print(
267
+ f"{icons.green_dot} The '{name}' {item_type} has been successfully created within the in the '{workspace_name}' workspace."
268
+ )
269
+
270
+
271
+ def get_item_definition(
272
+ item: str | UUID,
273
+ type: str,
274
+ workspace: Optional[str | UUID] = None,
275
+ format: Optional[str] = None,
276
+ return_dataframe: bool = True,
277
+ decode: bool = True,
278
+ ):
279
+
280
+ from sempy_labs._utils import item_types
281
+
282
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
283
+ item_id = resolve_item_id(item, type, workspace_id)
284
+ item_type_url = item_types.get(type)[1]
285
+ path = item_types.get(type)[2]
286
+
287
+ url = f"/v1/workspaces/{workspace_id}/{item_type_url}/{item_id}/getDefinition"
288
+ if format:
289
+ url += f"?format={format}"
290
+
291
+ result = _base_api(
292
+ request=url,
293
+ method="post",
294
+ status_codes=None,
295
+ lro_return_json=True,
296
+ )
297
+
298
+ if return_dataframe:
299
+ return pd.json_normalize(result["definition"]["parts"])
300
+
301
+ value = next(
302
+ p.get("payload") for p in result["definition"]["parts"] if p.get("path") == path
303
+ )
304
+ if decode:
305
+ json.loads(_decode_b64(value))
185
306
  else:
186
- return fabric.resolve_item_id(item_name=item, type=type, workspace=workspace)
307
+ return value
187
308
 
188
309
 
189
- def resolve_item_name_and_id(
310
+ def resolve_item_id(
190
311
  item: str | UUID, type: Optional[str] = None, workspace: Optional[str | UUID] = None
191
- ) -> Tuple[str, UUID]:
312
+ ) -> UUID:
192
313
 
193
314
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
315
+ item_id = None
194
316
 
195
317
  if _is_valid_uuid(item):
318
+ # Check (optional)
196
319
  item_id = item
197
- item_name = fabric.resolve_item_name(
198
- item_id=item_id, type=type, workspace=workspace_id
199
- )
320
+ try:
321
+ _base_api(
322
+ request=f"/v1/workspaces/{workspace_id}/items/{item_id}",
323
+ client="fabric_sp",
324
+ )
325
+ except FabricHTTPException:
326
+ raise ValueError(
327
+ f"{icons.red_dot} The '{item_id}' item was not found in the '{workspace_name}' workspace."
328
+ )
200
329
  else:
201
330
  if type is None:
202
331
  raise ValueError(
203
- f"{icons.warning} Must specify a 'type' if specifying a name as the 'item'."
332
+ f"{icons.red_dot} The 'type' parameter is required if specifying an item name."
204
333
  )
205
- item_name = item
206
- item_id = fabric.resolve_item_id(
207
- item_name=item, type=type, workspace=workspace_id
334
+ responses = _base_api(
335
+ request=f"/v1/workspaces/{workspace_id}/items?type={type}",
336
+ client="fabric_sp",
337
+ uses_pagination=True,
338
+ )
339
+ for r in responses:
340
+ for v in r.get("value", []):
341
+ display_name = v.get("displayName")
342
+ if display_name == item:
343
+ item_id = v.get("id")
344
+ break
345
+
346
+ if item_id is None:
347
+ raise ValueError(
348
+ f"{icons.red_dot} There's no item '{item}' of type '{type}' in the '{workspace_name}' workspace."
208
349
  )
209
350
 
351
+ return item_id
352
+
353
+
354
+ def resolve_item_name_and_id(
355
+ item: str | UUID, type: Optional[str] = None, workspace: Optional[str | UUID] = None
356
+ ) -> Tuple[str, UUID]:
357
+
358
+ workspace_id = resolve_workspace_id(workspace)
359
+ item_id = resolve_item_id(item=item, type=type, workspace=workspace_id)
360
+ item_name = (
361
+ _base_api(
362
+ request=f"/v1/workspaces/{workspace_id}/items/{item_id}", client="fabric_sp"
363
+ )
364
+ .json()
365
+ .get("displayName")
366
+ )
367
+
210
368
  return item_name, item_id
211
369
 
212
370
 
371
+ def resolve_item_name(item_id: UUID, workspace: Optional[str | UUID] = None) -> str:
372
+
373
+ workspace_id = resolve_workspace_id(workspace)
374
+ try:
375
+ item_name = (
376
+ _base_api(
377
+ request=f"/v1/workspaces/{workspace_id}/items/{item_id}",
378
+ client="fabric_sp",
379
+ )
380
+ .json()
381
+ .get("displayName")
382
+ )
383
+ except FabricHTTPException:
384
+ raise ValueError(
385
+ f"{icons.red_dot} The '{item_id}' item was not found in the '{workspace_id}' workspace."
386
+ )
387
+
388
+ return item_name
389
+
390
+
213
391
  def resolve_lakehouse_name_and_id(
214
392
  lakehouse: Optional[str | UUID] = None, workspace: Optional[str | UUID] = None
215
393
  ) -> Tuple[str, UUID]:
@@ -218,19 +396,18 @@ def resolve_lakehouse_name_and_id(
218
396
  type = "Lakehouse"
219
397
 
220
398
  if lakehouse is None:
221
- lakehouse_id = fabric.get_lakehouse_id()
222
- lakehouse_name = fabric.resolve_item_name(
223
- item_id=lakehouse_id, type=type, workspace=workspace_id
224
- )
225
- elif _is_valid_uuid(lakehouse):
226
- lakehouse_id = lakehouse
227
- lakehouse_name = fabric.resolve_item_name(
228
- item_id=lakehouse_id, type=type, workspace=workspace_id
399
+ lakehouse_id = _get_fabric_context_setting(name="trident.lakehouse.id")
400
+ if lakehouse_id == "":
401
+ raise ValueError(
402
+ f"{icons.red_dot} Cannot resolve a lakehouse. Please enter a valid lakehouse or make sure a lakehouse is attached to the notebook."
403
+ )
404
+ (lakehouse_name, lakehouse_id) = resolve_item_name_and_id(
405
+ item=lakehouse_id, type=type, workspace=workspace_id
229
406
  )
407
+
230
408
  else:
231
- lakehouse_name = lakehouse
232
- lakehouse_id = fabric.resolve_item_id(
233
- item_name=lakehouse, type=type, workspace=workspace_id
409
+ (lakehouse_name, lakehouse_id) = resolve_item_name_and_id(
410
+ item=lakehouse, type=type, workspace=workspace_id
234
411
  )
235
412
 
236
413
  return lakehouse_name, lakehouse_id
@@ -268,14 +445,7 @@ def resolve_dataset_id(
268
445
  The ID of the semantic model.
269
446
  """
270
447
 
271
- if _is_valid_uuid(dataset):
272
- dataset_id = dataset
273
- else:
274
- dataset_id = fabric.resolve_item_id(
275
- item_name=dataset, type="SemanticModel", workspace=workspace
276
- )
277
-
278
- return dataset_id
448
+ return resolve_item_id(item=dataset, type="SemanticModel", workspace=workspace)
279
449
 
280
450
 
281
451
  def resolve_dataset_name(
@@ -299,7 +469,7 @@ def resolve_dataset_name(
299
469
  The name of the semantic model.
300
470
  """
301
471
 
302
- return fabric.resolve_item_name(
472
+ return resolve_item_name(
303
473
  item_id=dataset_id, type="SemanticModel", workspace=workspace
304
474
  )
305
475
 
@@ -327,9 +497,13 @@ def resolve_lakehouse_name(
327
497
  """
328
498
 
329
499
  if lakehouse_id is None:
330
- lakehouse_id = fabric.get_lakehouse_id()
500
+ lakehouse_id = _get_fabric_context_setting(name="trident.lakehouse.id")
501
+ if lakehouse_id == "":
502
+ raise ValueError(
503
+ f"{icons.red_dot} Cannot resolve a lakehouse. Please enter a valid lakehouse or make sure a lakehouse is attached to the notebook."
504
+ )
331
505
 
332
- return fabric.resolve_item_name(
506
+ return resolve_item_name(
333
507
  item_id=lakehouse_id, type="Lakehouse", workspace=workspace
334
508
  )
335
509
 
@@ -356,12 +530,14 @@ def resolve_lakehouse_id(
356
530
  """
357
531
 
358
532
  if lakehouse is None:
359
- lakehouse_id = fabric.get_lakehouse_id()
360
- elif _is_valid_uuid(lakehouse):
361
- lakehouse_id = lakehouse
533
+ lakehouse_id = _get_fabric_context_setting(name="trident.lakehouse.id")
534
+ if lakehouse_id == "":
535
+ raise ValueError(
536
+ f"{icons.red_dot} Cannot resolve a lakehouse. Please enter a valid lakehouse or make sure a lakehouse is attached to the notebook."
537
+ )
362
538
  else:
363
- lakehouse_id = fabric.resolve_item_id(
364
- item_name=lakehouse, type="Lakehouse", workspace=workspace
539
+ lakehouse_id = resolve_item_id(
540
+ item=lakehouse, type="Lakehouse", workspace=workspace
365
541
  )
366
542
 
367
543
  return lakehouse_id
@@ -543,8 +719,6 @@ def save_as_delta_table(
543
719
  f"{icons.red_dot} Invalid 'delta_table_name'. Delta tables in the lakehouse cannot have spaces in their names."
544
720
  )
545
721
 
546
- dataframe.columns = [col.replace(" ", "_") for col in dataframe.columns]
547
-
548
722
  spark = _create_spark_session()
549
723
 
550
724
  type_mapping = {
@@ -562,6 +736,7 @@ def save_as_delta_table(
562
736
  }
563
737
 
564
738
  if isinstance(dataframe, pd.DataFrame):
739
+ dataframe.columns = [col.replace(" ", "_") for col in dataframe.columns]
565
740
  if schema is None:
566
741
  spark_df = spark.createDataFrame(dataframe)
567
742
  else:
@@ -573,6 +748,9 @@ def save_as_delta_table(
573
748
  )
574
749
  spark_df = spark.createDataFrame(dataframe, schema_map)
575
750
  else:
751
+ for col_name in dataframe.columns:
752
+ new_name = col_name.replace(" ", "_")
753
+ dataframe = dataframe.withColumnRenamed(col_name, new_name)
576
754
  spark_df = dataframe
577
755
 
578
756
  filePath = create_abfss_path(
@@ -628,6 +806,55 @@ def language_validate(language: str):
628
806
  return lang
629
807
 
630
808
 
809
+ def resolve_workspace_id(
810
+ workspace: Optional[str | UUID] = None,
811
+ ) -> UUID:
812
+ if workspace is None:
813
+ workspace_id = _get_fabric_context_setting(name="trident.workspace.id")
814
+ elif _is_valid_uuid(workspace):
815
+ # Check (optional)
816
+ workspace_id = workspace
817
+ try:
818
+ _base_api(request=f"/v1/workspaces/{workspace_id}", client="fabric_sp")
819
+ except FabricHTTPException:
820
+ raise ValueError(
821
+ f"{icons.red_dot} The '{workspace_id}' workspace was not found."
822
+ )
823
+ else:
824
+ responses = _base_api(
825
+ request="/v1/workspaces", client="fabric_sp", uses_pagination=True
826
+ )
827
+ workspace_id = None
828
+ for r in responses:
829
+ for v in r.get("value", []):
830
+ display_name = v.get("displayName")
831
+ if display_name == workspace:
832
+ workspace_id = v.get("id")
833
+ break
834
+
835
+ if workspace_id is None:
836
+ raise WorkspaceNotFoundException(workspace)
837
+
838
+ return workspace_id
839
+
840
+
841
+ def resolve_workspace_name(workspace_id: Optional[UUID] = None) -> str:
842
+
843
+ if workspace_id is None:
844
+ workspace_id = _get_fabric_context_setting(name="trident.workspace.id")
845
+
846
+ try:
847
+ response = _base_api(
848
+ request=f"/v1/workspaces/{workspace_id}", client="fabric_sp"
849
+ ).json()
850
+ except FabricHTTPException:
851
+ raise ValueError(
852
+ f"{icons.red_dot} The '{workspace_id}' workspace was not found."
853
+ )
854
+
855
+ return response.get("displayName")
856
+
857
+
631
858
  def resolve_workspace_name_and_id(
632
859
  workspace: Optional[str | UUID] = None,
633
860
  ) -> Tuple[str, str]:
@@ -643,21 +870,34 @@ def resolve_workspace_name_and_id(
643
870
 
644
871
  Returns
645
872
  -------
646
- str, str
873
+ str, uuid.UUID
647
874
  The name and ID of the Fabric workspace.
648
875
  """
649
876
 
650
877
  if workspace is None:
651
- workspace_id = fabric.get_workspace_id()
652
- workspace_name = fabric.resolve_workspace_name(workspace_id)
878
+ workspace_id = _get_fabric_context_setting(name="trident.workspace.id")
879
+ workspace_name = resolve_workspace_name(workspace_id)
653
880
  elif _is_valid_uuid(workspace):
654
881
  workspace_id = workspace
655
- workspace_name = fabric.resolve_workspace_name(workspace_id)
882
+ workspace_name = resolve_workspace_name(workspace_id)
656
883
  else:
657
- workspace_name = workspace
658
- workspace_id = fabric.resolve_workspace_id(workspace_name)
884
+ responses = _base_api(
885
+ request="/v1/workspaces", client="fabric_sp", uses_pagination=True
886
+ )
887
+ workspace_id = None
888
+ workspace_name = None
889
+ for r in responses:
890
+ for v in r.get("value", []):
891
+ display_name = v.get("displayName")
892
+ if display_name == workspace:
893
+ workspace_name = workspace
894
+ workspace_id = v.get("id")
895
+ break
896
+
897
+ if workspace_name is None or workspace_id is None:
898
+ raise WorkspaceNotFoundException(workspace)
659
899
 
660
- return str(workspace_name), str(workspace_id)
900
+ return workspace_name, workspace_id
661
901
 
662
902
 
663
903
  def _extract_json(dataframe: pd.DataFrame) -> dict:
@@ -770,7 +1010,7 @@ def resolve_dataset_from_report(
770
1010
  dfR = _get_report(report=report, workspace=workspace)
771
1011
  dataset_id = dfR["Dataset Id"].iloc[0]
772
1012
  dataset_workspace_id = dfR["Dataset Workspace Id"].iloc[0]
773
- dataset_workspace = fabric.resolve_workspace_name(dataset_workspace_id)
1013
+ dataset_workspace = resolve_workspace_name(workspace_id=dataset_workspace_id)
774
1014
  dataset_name = resolve_dataset_name(
775
1015
  dataset_id=dataset_id, workspace=dataset_workspace
776
1016
  )
@@ -803,12 +1043,13 @@ def resolve_workspace_capacity(
803
1043
  Tuple[uuid.UUID, str]
804
1044
  capacity Id; capacity came.
805
1045
  """
1046
+ from sempy_labs._capacities import list_capacities
806
1047
 
807
1048
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
808
1049
  filter_condition = urllib.parse.quote(workspace_id)
809
1050
  dfW = fabric.list_workspaces(filter=f"id eq '{filter_condition}'")
810
1051
  capacity_id = dfW["Capacity Id"].iloc[0]
811
- dfC = fabric.list_capacities()
1052
+ dfC = list_capacities()
812
1053
  dfC_filt = dfC[dfC["Id"] == capacity_id]
813
1054
  if len(dfC_filt) == 1:
814
1055
  capacity_name = dfC_filt["Display Name"].iloc[0]
@@ -866,8 +1107,10 @@ def get_capacity_name(workspace: Optional[str | UUID] = None) -> str:
866
1107
  The capacity name.
867
1108
  """
868
1109
 
1110
+ from sempy_labs._capacities import list_capacities
1111
+
869
1112
  capacity_id = get_capacity_id(workspace)
870
- dfC = fabric.list_capacities()
1113
+ dfC = list_capacities()
871
1114
  dfC_filt = dfC[dfC["Id"] == capacity_id]
872
1115
  if dfC_filt.empty:
873
1116
  raise ValueError(
@@ -893,11 +1136,12 @@ def resolve_capacity_name(capacity_id: Optional[UUID] = None) -> str:
893
1136
  str
894
1137
  The capacity name.
895
1138
  """
1139
+ from sempy_labs._capacities import list_capacities
896
1140
 
897
1141
  if capacity_id is None:
898
1142
  return get_capacity_name()
899
1143
 
900
- dfC = fabric.list_capacities()
1144
+ dfC = list_capacities()
901
1145
  dfC_filt = dfC[dfC["Id"] == capacity_id]
902
1146
 
903
1147
  if dfC_filt.empty:
@@ -908,14 +1152,14 @@ def resolve_capacity_name(capacity_id: Optional[UUID] = None) -> str:
908
1152
  return dfC_filt["Display Name"].iloc[0]
909
1153
 
910
1154
 
911
- def resolve_capacity_id(capacity_name: Optional[str] = None) -> UUID:
1155
+ def resolve_capacity_id(capacity: Optional[str | UUID] = None, **kwargs) -> UUID:
912
1156
  """
913
1157
  Obtains the capacity Id for a given capacity name.
914
1158
 
915
1159
  Parameters
916
1160
  ----------
917
- capacity_name : str, default=None
918
- The capacity name.
1161
+ capacity : str | uuid.UUID, default=None
1162
+ The capacity name or ID.
919
1163
  Defaults to None which resolves to the capacity id of the workspace of the attached lakehouse
920
1164
  or if no lakehouse attached, resolves to the capacity name of the workspace of the notebook.
921
1165
 
@@ -924,17 +1168,24 @@ def resolve_capacity_id(capacity_name: Optional[str] = None) -> UUID:
924
1168
  uuid.UUID
925
1169
  The capacity Id.
926
1170
  """
1171
+ from sempy_labs._capacities import list_capacities
1172
+
1173
+ if "capacity_name" in kwargs:
1174
+ capacity = kwargs["capacity_name"]
1175
+ print(
1176
+ f"{icons.warning} The 'capacity_name' parameter is deprecated. Please use 'capacity' instead."
1177
+ )
927
1178
 
928
- if capacity_name is None:
1179
+ if capacity is None:
929
1180
  return get_capacity_id()
1181
+ if _is_valid_uuid(capacity):
1182
+ return capacity
930
1183
 
931
- dfC = fabric.list_capacities()
932
- dfC_filt = dfC[dfC["Display Name"] == capacity_name]
1184
+ dfC = list_capacities()
1185
+ dfC_filt = dfC[dfC["Display Name"] == capacity]
933
1186
 
934
1187
  if dfC_filt.empty:
935
- raise ValueError(
936
- f"{icons.red_dot} The '{capacity_name}' capacity does not exist."
937
- )
1188
+ raise ValueError(f"{icons.red_dot} The '{capacity}' capacity does not exist.")
938
1189
 
939
1190
  return dfC_filt["Id"].iloc[0]
940
1191
 
@@ -1097,12 +1348,7 @@ def resolve_warehouse_id(
1097
1348
  The warehouse Id.
1098
1349
  """
1099
1350
 
1100
- if _is_valid_uuid(warehouse):
1101
- return warehouse
1102
- else:
1103
- return fabric.resolve_item_id(
1104
- item_name=warehouse, type="Warehouse", workspace=workspace
1105
- )
1351
+ return resolve_item_id(item=warehouse, type="Warehouse", workspace=workspace)
1106
1352
 
1107
1353
 
1108
1354
  def get_language_codes(languages: str | List[str]):
@@ -1162,14 +1408,14 @@ def convert_to_alphanumeric_lowercase(input_string):
1162
1408
 
1163
1409
 
1164
1410
  def resolve_environment_id(
1165
- environment: str, workspace: Optional[str | UUID] = None
1411
+ environment: str | UUID, workspace: Optional[str | UUID] = None
1166
1412
  ) -> UUID:
1167
1413
  """
1168
1414
  Obtains the environment Id for a given environment.
1169
1415
 
1170
1416
  Parameters
1171
1417
  ----------
1172
- environment: str
1418
+ environment: str | uuid.UUID
1173
1419
  Name of the environment.
1174
1420
  workspace : str | uuid.UUID, default=None
1175
1421
  The Fabric workspace name or ID in which the semantic model resides.
@@ -1178,13 +1424,11 @@ def resolve_environment_id(
1178
1424
 
1179
1425
  Returns
1180
1426
  -------
1181
- UUID
1427
+ uuid.UUID
1182
1428
  The environment Id.
1183
1429
  """
1184
1430
 
1185
- return fabric.resolve_item_id(
1186
- item_name=environment, type="Environment", workspace=workspace
1187
- )
1431
+ return resolve_item_id(item=environment, type="Environment", workspace=workspace)
1188
1432
 
1189
1433
 
1190
1434
  def _make_clickable(val):
@@ -1216,14 +1460,16 @@ def convert_to_friendly_case(text: str) -> str:
1216
1460
  return text
1217
1461
 
1218
1462
 
1219
- def resolve_notebook_id(notebook: str, workspace: Optional[str | UUID] = None) -> UUID:
1463
+ def resolve_notebook_id(
1464
+ notebook: str | UUID, workspace: Optional[str | UUID] = None
1465
+ ) -> UUID:
1220
1466
  """
1221
1467
  Obtains the notebook Id for a given notebook.
1222
1468
 
1223
1469
  Parameters
1224
1470
  ----------
1225
- notebook: str
1226
- Name of the notebook.
1471
+ notebook: str | uuid.UUID
1472
+ Name or ID of the notebook.
1227
1473
  workspace : str | uuid.UUID, default=None
1228
1474
  The Fabric workspace name or ID in which the semantic model resides.
1229
1475
  Defaults to None which resolves to the workspace of the attached lakehouse
@@ -1235,9 +1481,7 @@ def resolve_notebook_id(notebook: str, workspace: Optional[str | UUID] = None) -
1235
1481
  The notebook Id.
1236
1482
  """
1237
1483
 
1238
- return fabric.resolve_item_id(
1239
- item_name=notebook, type="Notebook", workspace=workspace
1240
- )
1484
+ return resolve_item_id(item=notebook, type="Notebook", workspace=workspace)
1241
1485
 
1242
1486
 
1243
1487
  def generate_guid():
@@ -1436,6 +1680,7 @@ def _convert_data_type(input_data_type: str) -> str:
1436
1680
  "date": "DateTime",
1437
1681
  "double": "Double",
1438
1682
  "float": "Double",
1683
+ "binary": "Boolean",
1439
1684
  }
1440
1685
 
1441
1686
  if "decimal" in input_data_type:
@@ -1523,9 +1768,15 @@ def _base_api(
1523
1768
  raise NotImplementedError
1524
1769
  else:
1525
1770
  headers = _get_headers(auth.token_provider.get(), audience=client)
1771
+ if client == "graph":
1772
+ url = f"https://graph.microsoft.com/v1.0/{request}"
1773
+ elif client == "azure":
1774
+ url = request
1775
+ else:
1776
+ raise NotImplementedError
1526
1777
  response = requests.request(
1527
1778
  method.upper(),
1528
- f"https://graph.microsoft.com/v1.0/{request}",
1779
+ url,
1529
1780
  headers=headers,
1530
1781
  json=payload,
1531
1782
  )
@@ -1666,3 +1917,101 @@ def _mount(lakehouse, workspace) -> str:
1666
1917
  )
1667
1918
 
1668
1919
  return local_path
1920
+
1921
+
1922
+ def _get_or_create_workspace(
1923
+ workspace: str,
1924
+ capacity: Optional[str | UUID] = None,
1925
+ description: Optional[str] = None,
1926
+ ) -> Tuple[str, UUID]:
1927
+
1928
+ capacity_id = resolve_capacity_id(capacity)
1929
+ dfW = fabric.list_workspaces()
1930
+ dfW_filt_name = dfW[dfW["Name"] == workspace]
1931
+ dfW_filt_id = dfW[dfW["Id"] == workspace]
1932
+
1933
+ # Workspace already exists
1934
+ if (not dfW_filt_name.empty) or (not dfW_filt_id.empty):
1935
+ print(f"{icons.green_dot} The '{workspace}' workspace already exists.")
1936
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
1937
+ return (workspace_name, workspace_id)
1938
+
1939
+ # Do not create workspace with name of an ID
1940
+ if _is_valid_uuid(workspace):
1941
+ raise ValueError(f"{icons.warning} Must enter a workspace name, not an ID.")
1942
+
1943
+ print(f"{icons.in_progress} Creating the '{workspace}' workspace...")
1944
+ workspace_id = fabric.create_workspace(
1945
+ display_name=workspace, capacity_id=capacity_id, description=description
1946
+ )
1947
+ print(
1948
+ f"{icons.green_dot} The '{workspace}' workspace has been successfully created."
1949
+ )
1950
+
1951
+ return (workspace, workspace_id)
1952
+
1953
+
1954
+ def _get_or_create_lakehouse(
1955
+ lakehouse: str,
1956
+ workspace: Optional[str | UUID] = None,
1957
+ description: Optional[str] = None,
1958
+ ) -> Tuple[str, UUID]:
1959
+
1960
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
1961
+
1962
+ dfI = fabric.list_items(type="Lakehouse", workspace=workspace)
1963
+ dfI_filt_name = dfI[dfI["Display Name"] == lakehouse]
1964
+ dfI_filt_id = dfI[dfI["Id"] == lakehouse]
1965
+
1966
+ if (not dfI_filt_name.empty) or (not dfI_filt_id.empty):
1967
+ print(f"{icons.green_dot} The '{lakehouse}' lakehouse already exists.")
1968
+ (lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id(
1969
+ lakehouse=lakehouse, workspace=workspace
1970
+ )
1971
+ return (lakehouse_name, lakehouse_id)
1972
+ if _is_valid_uuid(lakehouse):
1973
+ raise ValueError(f"{icons.warning} Must enter a lakehouse name, not an ID.")
1974
+
1975
+ print(f"{icons.in_progress} Creating the '{lakehouse}' lakehouse...")
1976
+ lakehouse_id = fabric.create_lakehouse(
1977
+ display_name=lakehouse, workspace=workspace, description=description
1978
+ )
1979
+ print(
1980
+ f"{icons.green_dot} The '{lakehouse}' lakehouse has been successfully created within the '{workspace_name}' workspace."
1981
+ )
1982
+
1983
+ return (lakehouse, lakehouse_id)
1984
+
1985
+
1986
+ def _get_or_create_warehouse(
1987
+ warehouse: str,
1988
+ workspace: Optional[str | UUID] = None,
1989
+ description: Optional[str] = None,
1990
+ ) -> Tuple[str, UUID]:
1991
+
1992
+ from sempy_labs._warehouses import create_warehouse
1993
+
1994
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
1995
+
1996
+ dfI = fabric.list_items(type="Warehouse", workspace=workspace)
1997
+ dfI_filt_name = dfI[dfI["Display Name"] == warehouse]
1998
+ dfI_filt_id = dfI[dfI["Id"] == warehouse]
1999
+
2000
+ if (not dfI_filt_name.empty) or (not dfI_filt_id.empty):
2001
+ print(f"{icons.green_dot} The '{warehouse}' warehouse already exists.")
2002
+ (warehouse_name, warehouse_id) = resolve_item_name_and_id(
2003
+ warehouse=warehouse, type="Warehouse", workspace=workspace
2004
+ )
2005
+ return (warehouse_name, warehouse_id)
2006
+ if _is_valid_uuid(warehouse):
2007
+ raise ValueError(f"{icons.warning} Must enter a warehouse name, not an ID.")
2008
+
2009
+ print(f"{icons.in_progress} Creating the '{warehouse}' warehouse...")
2010
+ warehouse_id = create_warehouse(
2011
+ display_name=warehouse, workspace=workspace, description=description
2012
+ )
2013
+ print(
2014
+ f"{icons.green_dot} The '{warehouse}' warehouse has been successfully created within the '{workspace_name}' workspace."
2015
+ )
2016
+
2017
+ return (warehouse, warehouse_id)