semantic-link-labs 0.8.9__py3-none-any.whl → 0.8.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (76) hide show
  1. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/METADATA +5 -2
  2. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/RECORD +76 -75
  3. sempy_labs/__init__.py +14 -2
  4. sempy_labs/_authentication.py +31 -2
  5. sempy_labs/_clear_cache.py +39 -37
  6. sempy_labs/_connections.py +13 -13
  7. sempy_labs/_data_pipelines.py +20 -20
  8. sempy_labs/_dataflows.py +27 -28
  9. sempy_labs/_dax.py +41 -47
  10. sempy_labs/_environments.py +26 -23
  11. sempy_labs/_eventhouses.py +16 -15
  12. sempy_labs/_eventstreams.py +16 -15
  13. sempy_labs/_external_data_shares.py +18 -20
  14. sempy_labs/_gateways.py +57 -11
  15. sempy_labs/_generate_semantic_model.py +100 -71
  16. sempy_labs/_git.py +134 -67
  17. sempy_labs/_helper_functions.py +199 -145
  18. sempy_labs/_job_scheduler.py +92 -0
  19. sempy_labs/_kql_databases.py +16 -15
  20. sempy_labs/_kql_querysets.py +16 -15
  21. sempy_labs/_list_functions.py +281 -120
  22. sempy_labs/_managed_private_endpoints.py +19 -17
  23. sempy_labs/_mirrored_databases.py +51 -48
  24. sempy_labs/_mirrored_warehouses.py +5 -4
  25. sempy_labs/_ml_experiments.py +16 -15
  26. sempy_labs/_ml_models.py +15 -14
  27. sempy_labs/_model_bpa.py +27 -25
  28. sempy_labs/_model_bpa_bulk.py +3 -3
  29. sempy_labs/_model_dependencies.py +60 -28
  30. sempy_labs/_notebooks.py +73 -39
  31. sempy_labs/_one_lake_integration.py +23 -26
  32. sempy_labs/_query_scale_out.py +67 -64
  33. sempy_labs/_refresh_semantic_model.py +47 -42
  34. sempy_labs/_spark.py +33 -32
  35. sempy_labs/_sql.py +12 -9
  36. sempy_labs/_translations.py +10 -7
  37. sempy_labs/_vertipaq.py +34 -31
  38. sempy_labs/_warehouses.py +22 -21
  39. sempy_labs/_workspace_identity.py +11 -10
  40. sempy_labs/_workspaces.py +40 -33
  41. sempy_labs/admin/__init__.py +4 -0
  42. sempy_labs/admin/_basic_functions.py +44 -12
  43. sempy_labs/admin/_external_data_share.py +3 -3
  44. sempy_labs/admin/_items.py +4 -4
  45. sempy_labs/admin/_scanner.py +7 -5
  46. sempy_labs/directlake/_directlake_schema_compare.py +18 -14
  47. sempy_labs/directlake/_directlake_schema_sync.py +18 -12
  48. sempy_labs/directlake/_dl_helper.py +36 -32
  49. sempy_labs/directlake/_generate_shared_expression.py +10 -9
  50. sempy_labs/directlake/_get_directlake_lakehouse.py +16 -13
  51. sempy_labs/directlake/_get_shared_expression.py +4 -3
  52. sempy_labs/directlake/_guardrails.py +12 -6
  53. sempy_labs/directlake/_list_directlake_model_calc_tables.py +15 -9
  54. sempy_labs/directlake/_show_unsupported_directlake_objects.py +16 -10
  55. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +35 -31
  56. sempy_labs/directlake/_update_directlake_partition_entity.py +34 -31
  57. sempy_labs/directlake/_warm_cache.py +87 -65
  58. sempy_labs/lakehouse/_get_lakehouse_columns.py +10 -8
  59. sempy_labs/lakehouse/_get_lakehouse_tables.py +10 -9
  60. sempy_labs/lakehouse/_lakehouse.py +17 -13
  61. sempy_labs/lakehouse/_shortcuts.py +42 -23
  62. sempy_labs/migration/_create_pqt_file.py +16 -11
  63. sempy_labs/migration/_refresh_calc_tables.py +16 -10
  64. sempy_labs/report/_download_report.py +9 -8
  65. sempy_labs/report/_generate_report.py +40 -44
  66. sempy_labs/report/_paginated.py +9 -9
  67. sempy_labs/report/_report_bpa.py +13 -9
  68. sempy_labs/report/_report_functions.py +80 -91
  69. sempy_labs/report/_report_helper.py +8 -4
  70. sempy_labs/report/_report_list_functions.py +24 -13
  71. sempy_labs/report/_report_rebind.py +17 -16
  72. sempy_labs/report/_reportwrapper.py +41 -33
  73. sempy_labs/tom/_model.py +117 -38
  74. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/LICENSE +0 -0
  75. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/WHEEL +0 -0
  76. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/top_level.txt +0 -0
@@ -1,10 +1,10 @@
1
1
  import sempy.fabric as fabric
2
2
  import time
3
3
  from sempy_labs._helper_functions import (
4
- resolve_dataset_id,
5
4
  resolve_workspace_name_and_id,
6
5
  _get_partition_map,
7
6
  _process_and_display_chart,
7
+ resolve_dataset_name_and_id,
8
8
  )
9
9
  from typing import Any, List, Optional, Union
10
10
  from sempy._utils._log import log
@@ -14,18 +14,19 @@ import pandas as pd
14
14
  import warnings
15
15
  import ipywidgets as widgets
16
16
  import json
17
+ from uuid import UUID
17
18
 
18
19
 
19
20
  @log
20
21
  def refresh_semantic_model(
21
- dataset: str,
22
+ dataset: str | UUID,
22
23
  tables: Optional[Union[str, List[str]]] = None,
23
24
  partitions: Optional[Union[str, List[str]]] = None,
24
25
  refresh_type: str = "full",
25
26
  retry_count: int = 0,
26
27
  apply_refresh_policy: bool = True,
27
28
  max_parallelism: int = 10,
28
- workspace: Optional[str] = None,
29
+ workspace: Optional[str | UUID] = None,
29
30
  visualize: bool = False,
30
31
  commit_mode: str = "transactional",
31
32
  ) -> pd.DataFrame | None:
@@ -34,8 +35,8 @@ def refresh_semantic_model(
34
35
 
35
36
  Parameters
36
37
  ----------
37
- dataset : str
38
- Name of the semantic model.
38
+ dataset : str | uuid.UUID
39
+ Name or ID of the semantic model.
39
40
  tables : str, List[str], default=None
40
41
  A string or a list of tables to refresh.
41
42
  partitions: str, List[str], default=None
@@ -50,8 +51,8 @@ def refresh_semantic_model(
50
51
  Determines the maximum number of threads that can run the processing commands in parallel.
51
52
  This value aligns with the MaxParallelism property that can be set in the TMSL Sequence command or by using other methods.
52
53
  Defaults to 10.
53
- workspace : str, default=None
54
- The Fabric workspace name.
54
+ workspace : str | uuid.UUID, default=None
55
+ The Fabric workspace name or ID.
55
56
  Defaults to None which resolves to the workspace of the attached lakehouse
56
57
  or if no lakehouse attached, resolves to the workspace of the notebook.
57
58
  visualize : bool, default=False
@@ -65,7 +66,8 @@ def refresh_semantic_model(
65
66
  If 'visualize' is set to True, returns a pandas dataframe showing the SSAS trace output used to generate the visualization.
66
67
  """
67
68
 
68
- workspace = fabric.resolve_workspace_name(workspace)
69
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
70
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
69
71
 
70
72
  if isinstance(tables, str):
71
73
  tables = [tables]
@@ -118,11 +120,11 @@ def refresh_semantic_model(
118
120
  def extract_failure_error():
119
121
  error_messages = []
120
122
  combined_messages = ""
121
- final_message = f"{icons.red_dot} The refresh of the '{dataset}' semantic model within the '{workspace}' workspace has failed."
123
+ final_message = f"{icons.red_dot} The refresh of the '{dataset_name}' semantic model within the '{workspace_name}' workspace has failed."
122
124
  for _, r in fabric.get_refresh_execution_details(
123
125
  refresh_request_id=request_id,
124
- dataset=dataset,
125
- workspace=workspace,
126
+ dataset=dataset_id,
127
+ workspace=workspace_id,
126
128
  ).messages.iterrows():
127
129
  error_messages.append(f"{r['Type']}: {r['Message']}")
128
130
 
@@ -135,8 +137,8 @@ def refresh_semantic_model(
135
137
  # Function to perform dataset refresh
136
138
  def refresh_dataset():
137
139
  return fabric.refresh_dataset(
138
- dataset=dataset,
139
- workspace=workspace,
140
+ dataset=dataset_id,
141
+ workspace=workspace_id,
140
142
  refresh_type=refresh_type,
141
143
  retry_count=retry_count,
142
144
  apply_refresh_policy=apply_refresh_policy,
@@ -147,7 +149,9 @@ def refresh_semantic_model(
147
149
 
148
150
  def check_refresh_status(request_id):
149
151
  request_details = fabric.get_refresh_execution_details(
150
- dataset=dataset, refresh_request_id=request_id, workspace=workspace
152
+ dataset=dataset_id,
153
+ refresh_request_id=request_id,
154
+ workspace=workspace_id,
151
155
  )
152
156
  return request_details.status
153
157
 
@@ -169,7 +173,8 @@ def refresh_semantic_model(
169
173
  right_on="PartitionID",
170
174
  how="left",
171
175
  )
172
- _process_and_display_chart(df, title=title, widget=widget)
176
+ if not df.empty:
177
+ _process_and_display_chart(df, title=title, widget=widget)
173
178
  if stop:
174
179
  df.drop(["Object Name", "PartitionID"], axis=1, inplace=True)
175
180
  df.rename(columns={"TableName": "Table Name"}, inplace=True)
@@ -180,7 +185,7 @@ def refresh_semantic_model(
180
185
  if not visualize:
181
186
  request_id = refresh_dataset()
182
187
  print(
183
- f"{icons.in_progress} Refresh of the '{dataset}' semantic model within the '{workspace}' workspace is in progress..."
188
+ f"{icons.in_progress} Refresh of the '{dataset_name}' semantic model within the '{workspace_name}' workspace is in progress..."
184
189
  )
185
190
 
186
191
  # Monitor refresh progress and handle tracing if visualize is enabled
@@ -189,7 +194,7 @@ def refresh_semantic_model(
189
194
  widget = widgets.Output()
190
195
 
191
196
  with fabric.create_trace_connection(
192
- dataset=dataset, workspace=workspace
197
+ dataset=dataset_id, workspace=workspace_id
193
198
  ) as trace_connection:
194
199
  with trace_connection.create_trace(icons.refresh_event_schema) as trace:
195
200
  trace.start()
@@ -204,7 +209,7 @@ def refresh_semantic_model(
204
209
  raise ValueError(extract_failure_error())
205
210
  elif status == "Cancelled":
206
211
  print(
207
- f"{icons.yellow_dot} The refresh of the '{dataset}' semantic model within the '{workspace}' workspace has been cancelled."
212
+ f"{icons.yellow_dot} The refresh of the '{dataset_name}' semantic model within the '{workspace_name}' workspace has been cancelled."
208
213
  )
209
214
  return
210
215
 
@@ -231,7 +236,7 @@ def refresh_semantic_model(
231
236
  )
232
237
 
233
238
  print(
234
- f"{icons.green_dot} Refresh '{refresh_type}' of the '{dataset}' semantic model within the '{workspace}' workspace is complete."
239
+ f"{icons.green_dot} Refresh '{refresh_type}' of the '{dataset_name}' semantic model within the '{workspace_name}' workspace is complete."
235
240
  )
236
241
  return final_df
237
242
 
@@ -245,14 +250,14 @@ def refresh_semantic_model(
245
250
  raise ValueError(extract_failure_error())
246
251
  elif status == "Cancelled":
247
252
  print(
248
- f"{icons.yellow_dot} The refresh of the '{dataset}' semantic model within the '{workspace}' workspace has been cancelled."
253
+ f"{icons.yellow_dot} The refresh of the '{dataset_name}' semantic model within the '{workspace_name}' workspace has been cancelled."
249
254
  )
250
255
  return
251
256
 
252
257
  time.sleep(3)
253
258
 
254
259
  print(
255
- f"{icons.green_dot} Refresh '{refresh_type}' of the '{dataset}' semantic model within the '{workspace}' workspace is complete."
260
+ f"{icons.green_dot} Refresh '{refresh_type}' of the '{dataset_name}' semantic model within the '{workspace_name}' workspace is complete."
256
261
  )
257
262
 
258
263
  final_output = refresh_and_trace_dataset(
@@ -272,39 +277,40 @@ def refresh_semantic_model(
272
277
 
273
278
  @log
274
279
  def cancel_dataset_refresh(
275
- dataset: str, request_id: Optional[str] = None, workspace: Optional[str] = None
280
+ dataset: str | UUID,
281
+ request_id: Optional[str] = None,
282
+ workspace: Optional[str | UUID] = None,
276
283
  ):
277
284
  """
278
285
  Cancels the refresh of a semantic model which was executed via the `Enhanced Refresh API <https://learn.microsoft.com/power-bi/connect-data/asynchronous-refresh>`_
279
286
 
280
287
  Parameters
281
288
  ----------
282
- dataset : str
283
- Name of the semantic model.
289
+ dataset : str | uuid.UUID
290
+ Name or ID of the semantic model.
284
291
  request_id : str, default=None
285
292
  The request id of a semantic model refresh.
286
293
  Defaults to finding the latest active refresh of the semantic model.
287
- workspace : str, default=None
288
- The Fabric workspace name.
294
+ workspace : str | uuid.UUID, default=None
295
+ The Fabric workspace name or ID.
289
296
  Defaults to None which resolves to the workspace of the attached lakehouse
290
297
  or if no lakehouse attached, resolves to the workspace of the notebook.
291
298
  """
292
299
 
293
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
300
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
301
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
294
302
 
295
- rr = fabric.list_refresh_requests(dataset=dataset, workspace=workspace)
303
+ rr = fabric.list_refresh_requests(dataset=dataset_id, workspace=workspace_id)
296
304
  rr_filt = rr[rr["Status"] == "Unknown"]
297
305
 
298
306
  if request_id is None:
299
307
  if len(rr_filt) == 0:
300
308
  raise ValueError(
301
- f"{icons.red_dot} There are no active Enhanced API refreshes of the '{dataset}' semantic model within the '{workspace}' workspace."
309
+ f"{icons.red_dot} There are no active Enhanced API refreshes of the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
302
310
  )
303
311
 
304
312
  request_id = rr_filt["Request Id"].iloc[0]
305
313
 
306
- dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace)
307
-
308
314
  client = fabric.PowerBIRestClient()
309
315
 
310
316
  response = client.delete(
@@ -314,12 +320,14 @@ def cancel_dataset_refresh(
314
320
  if response.status_code != 200:
315
321
  raise FabricHTTPException(response)
316
322
  print(
317
- f"{icons.green_dot} The '{request_id}' refresh request for the '{dataset}' semantic model within the '{workspace}' workspace has been cancelled."
323
+ f"{icons.green_dot} The '{request_id}' refresh request for the '{dataset_name}' semantic model within the '{workspace_name}' workspace has been cancelled."
318
324
  )
319
325
 
320
326
 
321
327
  def get_semantic_model_refresh_history(
322
- dataset: str, request_id: Optional[str] = None, workspace: Optional[str] = None
328
+ dataset: str | UUID,
329
+ request_id: Optional[str] = None,
330
+ workspace: Optional[str | UUID] = None,
323
331
  ) -> pd.DataFrame:
324
332
  """
325
333
  Obtains the semantic model refresh history (refreshes executed via the Enhanced Refresh API).
@@ -328,13 +336,13 @@ def get_semantic_model_refresh_history(
328
336
 
329
337
  Parameters
330
338
  ----------
331
- dataset : str
332
- Name of the semantic model.
339
+ dataset : str | uuid.UUID
340
+ Name or ID of the semantic model.
333
341
  request_id : str, default=None
334
342
  The request id of a semantic model refresh.
335
343
  Defaults to None which resolves to showing all refresh requests for the given semantic model.
336
- workspace : str, default=None
337
- The Fabric workspace name.
344
+ workspace : str | uuid.UUID, default=None
345
+ The Fabric workspace name or ID.
338
346
  Defaults to None which resolves to the workspace of the attached lakehouse
339
347
  or if no lakehouse attached, resolves to the workspace of the notebook.
340
348
 
@@ -344,8 +352,8 @@ def get_semantic_model_refresh_history(
344
352
  A pandas dataframe showing the semantic model refresh history.
345
353
  """
346
354
 
347
- workspace_name = fabric.resolve_workspace_name(workspace)
348
- workspace_id = fabric.resolve_workspace_id(workspace_name)
355
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
356
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
349
357
  df = pd.DataFrame(
350
358
  columns=[
351
359
  "Request Id",
@@ -357,9 +365,6 @@ def get_semantic_model_refresh_history(
357
365
  ]
358
366
  )
359
367
 
360
- dataset_id = fabric.resolve_item_id(
361
- item_name=dataset, workspace=workspace_id, type="SemanticModel"
362
- )
363
368
  client = fabric.PowerBIRestClient()
364
369
  response = client.get(
365
370
  f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/refreshes"
sempy_labs/_spark.py CHANGED
@@ -6,9 +6,10 @@ from sempy_labs._helper_functions import (
6
6
  resolve_workspace_name_and_id,
7
7
  )
8
8
  from sempy.fabric.exceptions import FabricHTTPException
9
+ from uuid import UUID
9
10
 
10
11
 
11
- def list_custom_pools(workspace: Optional[str] = None) -> pd.DataFrame:
12
+ def list_custom_pools(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
12
13
  """
13
14
  Lists all `custom pools <https://learn.microsoft.com/fabric/data-engineering/create-custom-spark-pools>`_ within a workspace.
14
15
 
@@ -16,7 +17,7 @@ def list_custom_pools(workspace: Optional[str] = None) -> pd.DataFrame:
16
17
 
17
18
  Parameters
18
19
  ----------
19
- workspace : str, default=None
20
+ workspace : str | uuid.UUID, default=None
20
21
  The name of the Fabric workspace.
21
22
  Defaults to None which resolves to the workspace of the attached lakehouse
22
23
  or if no lakehouse attached, resolves to the workspace of the notebook.
@@ -27,7 +28,7 @@ def list_custom_pools(workspace: Optional[str] = None) -> pd.DataFrame:
27
28
  A pandas dataframe showing all the custom pools within the Fabric workspace.
28
29
  """
29
30
 
30
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
31
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
31
32
 
32
33
  df = pd.DataFrame(
33
34
  columns=[
@@ -95,7 +96,7 @@ def create_custom_pool(
95
96
  node_family: str = "MemoryOptimized",
96
97
  auto_scale_enabled: bool = True,
97
98
  dynamic_executor_allocation_enabled: bool = True,
98
- workspace: Optional[str] = None,
99
+ workspace: Optional[str | UUID] = None,
99
100
  ):
100
101
  """
101
102
  Creates a `custom pool <https://learn.microsoft.com/fabric/data-engineering/create-custom-spark-pools>`_ within a workspace.
@@ -122,13 +123,13 @@ def create_custom_pool(
122
123
  The status of `auto scale <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#autoscaleproperties>`_.
123
124
  dynamic_executor_allocation_enabled : bool, default=True
124
125
  The status of the `dynamic executor allocation <https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#dynamicexecutorallocationproperties>`_.
125
- workspace : str, default=None
126
- The name of the Fabric workspace.
126
+ workspace : str | uuid.UUID, default=None
127
+ The name or ID of the Fabric workspace.
127
128
  Defaults to None which resolves to the workspace of the attached lakehouse
128
129
  or if no lakehouse attached, resolves to the workspace of the notebook.
129
130
  """
130
131
 
131
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
132
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
132
133
 
133
134
  request_body = {
134
135
  "name": pool_name,
@@ -154,7 +155,7 @@ def create_custom_pool(
154
155
  if response.status_code != 201:
155
156
  raise FabricHTTPException(response)
156
157
  print(
157
- f"{icons.green_dot} The '{pool_name}' spark pool has been created within the '{workspace}' workspace."
158
+ f"{icons.green_dot} The '{pool_name}' spark pool has been created within the '{workspace_name}' workspace."
158
159
  )
159
160
 
160
161
 
@@ -168,7 +169,7 @@ def update_custom_pool(
168
169
  node_family: Optional[str] = None,
169
170
  auto_scale_enabled: Optional[bool] = None,
170
171
  dynamic_executor_allocation_enabled: Optional[bool] = None,
171
- workspace: Optional[str] = None,
172
+ workspace: Optional[str | UUID] = None,
172
173
  ):
173
174
  """
174
175
  Updates the properties of a `custom pool <https://learn.microsoft.com/fabric/data-engineering/create-custom-spark-pools>`_ within a workspace.
@@ -203,20 +204,20 @@ def update_custom_pool(
203
204
  dynamic_executor_allocation_enabled : bool, default=None
204
205
  The status of the `dynamic executor allocation <https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#dynamicexecutorallocationproperties>`_.
205
206
  Defaults to None which keeps the existing property setting.
206
- workspace : str, default=None
207
- The name of the Fabric workspace.
207
+ workspace : str | uuid.UUID, default=None
208
+ The name or ID of the Fabric workspace.
208
209
  Defaults to None which resolves to the workspace of the attached lakehouse
209
210
  or if no lakehouse attached, resolves to the workspace of the notebook.
210
211
  """
211
212
 
212
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
213
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
213
214
 
214
215
  df = list_custom_pools(workspace=workspace)
215
216
  df_pool = df[df["Custom Pool Name"] == pool_name]
216
217
 
217
218
  if len(df_pool) == 0:
218
219
  raise ValueError(
219
- f"{icons.red_dot} The '{pool_name}' custom pool does not exist within the '{workspace}'. Please choose a valid custom pool."
220
+ f"{icons.red_dot} The '{pool_name}' custom pool does not exist within the '{workspace_name}'. Please choose a valid custom pool."
220
221
  )
221
222
 
222
223
  if node_family is None:
@@ -262,11 +263,11 @@ def update_custom_pool(
262
263
  if response.status_code != 200:
263
264
  raise FabricHTTPException(response)
264
265
  print(
265
- f"{icons.green_dot} The '{pool_name}' spark pool within the '{workspace}' workspace has been updated."
266
+ f"{icons.green_dot} The '{pool_name}' spark pool within the '{workspace_name}' workspace has been updated."
266
267
  )
267
268
 
268
269
 
269
- def delete_custom_pool(pool_name: str, workspace: Optional[str] = None):
270
+ def delete_custom_pool(pool_name: str, workspace: Optional[str | UUID] = None):
270
271
  """
271
272
  Deletes a `custom pool <https://learn.microsoft.com/fabric/data-engineering/create-custom-spark-pools>`_ within a workspace.
272
273
 
@@ -276,35 +277,35 @@ def delete_custom_pool(pool_name: str, workspace: Optional[str] = None):
276
277
  ----------
277
278
  pool_name : str
278
279
  The custom pool name.
279
- workspace : str, default=None
280
- The name of the Fabric workspace.
280
+ workspace : str | uuid.UUID, default=None
281
+ The name or ID of the Fabric workspace.
281
282
  Defaults to None which resolves to the workspace of the attached lakehouse
282
283
  or if no lakehouse attached, resolves to the workspace of the notebook.
283
284
  """
284
285
 
285
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
286
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
286
287
 
287
- dfL = list_custom_pools(workspace=workspace)
288
+ dfL = list_custom_pools(workspace=workspace_id)
288
289
  dfL_filt = dfL[dfL["Custom Pool Name"] == pool_name]
289
290
 
290
291
  if len(dfL_filt) == 0:
291
292
  raise ValueError(
292
- f"{icons.red_dot} The '{pool_name}' custom pool does not exist within the '{workspace}' workspace."
293
+ f"{icons.red_dot} The '{pool_name}' custom pool does not exist within the '{workspace_name}' workspace."
293
294
  )
294
- poolId = dfL_filt["Custom Pool ID"].iloc[0]
295
+ pool_id = dfL_filt["Custom Pool ID"].iloc[0]
295
296
 
296
297
  client = fabric.FabricRestClient()
297
- response = client.delete(f"/v1/workspaces/{workspace_id}/spark/pools/{poolId}")
298
+ response = client.delete(f"/v1/workspaces/{workspace_id}/spark/pools/{pool_id}")
298
299
 
299
300
  if response.status_code != 200:
300
301
  raise FabricHTTPException(response)
301
302
  print(
302
- f"{icons.green_dot} The '{pool_name}' spark pool has been deleted from the '{workspace}' workspace."
303
+ f"{icons.green_dot} The '{pool_name}' spark pool has been deleted from the '{workspace_name}' workspace."
303
304
  )
304
305
 
305
306
 
306
307
  def get_spark_settings(
307
- workspace: Optional[str] = None, return_dataframe: bool = True
308
+ workspace: Optional[str | UUID] = None, return_dataframe: bool = True
308
309
  ) -> pd.DataFrame | dict:
309
310
  """
310
311
  Shows the spark settings for a workspace.
@@ -313,8 +314,8 @@ def get_spark_settings(
313
314
 
314
315
  Parameters
315
316
  ----------
316
- workspace : str, default=None
317
- The name of the Fabric workspace.
317
+ workspace : str | uuid.UUID, default=None
318
+ The name or ID of the Fabric workspace.
318
319
  Defaults to None which resolves to the workspace of the attached lakehouse
319
320
  or if no lakehouse attached, resolves to the workspace of the notebook.
320
321
  return_dataframe : bool, default=True
@@ -326,7 +327,7 @@ def get_spark_settings(
326
327
  A pandas dataframe showing the spark settings for a workspace.
327
328
  """
328
329
 
329
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
330
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
330
331
 
331
332
  df = pd.DataFrame(
332
333
  columns=[
@@ -393,7 +394,7 @@ def update_spark_settings(
393
394
  max_executors: Optional[int] = None,
394
395
  environment_name: Optional[str] = None,
395
396
  runtime_version: Optional[str] = None,
396
- workspace: Optional[str] = None,
397
+ workspace: Optional[str | UUID] = None,
397
398
  ):
398
399
  """
399
400
  Updates the spark settings for a workspace.
@@ -426,13 +427,13 @@ def update_spark_settings(
426
427
  runtime_version : str, default=None
427
428
  The `runtime version <https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#environmentproperties>`_.
428
429
  Defaults to None which keeps the existing property setting.
429
- workspace : str, default=None
430
- The name of the Fabric workspace.
430
+ workspace : str | uuid.UUID, default=None
431
+ The name or ID of the Fabric workspace.
431
432
  Defaults to None which resolves to the workspace of the attached lakehouse
432
433
  or if no lakehouse attached, resolves to the workspace of the notebook.
433
434
  """
434
435
 
435
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
436
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
436
437
 
437
438
  request_body = get_spark_settings(workspace=workspace, return_dataframe=False)
438
439
 
@@ -463,5 +464,5 @@ def update_spark_settings(
463
464
  if response.status_code != 200:
464
465
  raise FabricHTTPException(response)
465
466
  print(
466
- f"{icons.green_dot} The spark settings within the '{workspace}' workspace have been updated accordingly."
467
+ f"{icons.green_dot} The spark settings within the '{workspace_name}' workspace have been updated accordingly."
467
468
  )
sempy_labs/_sql.py CHANGED
@@ -3,10 +3,14 @@ import pandas as pd
3
3
  from typing import Optional, Union, List
4
4
  from sempy._utils._log import log
5
5
  import struct
6
- import uuid
7
6
  from itertools import chain, repeat
8
7
  from sempy.fabric.exceptions import FabricHTTPException
9
- from sempy_labs._helper_functions import resolve_warehouse_id, resolve_lakehouse_id
8
+ from sempy_labs._helper_functions import (
9
+ resolve_warehouse_id,
10
+ resolve_lakehouse_id,
11
+ resolve_workspace_name_and_id,
12
+ )
13
+ from uuid import UUID
10
14
 
11
15
 
12
16
  def _bytes2mswin_bstr(value: bytes) -> bytes:
@@ -32,21 +36,20 @@ class ConnectBase:
32
36
  def __init__(
33
37
  self,
34
38
  name: str,
35
- workspace: Optional[Union[str, uuid.UUID]] = None,
39
+ workspace: Optional[Union[str, UUID]] = None,
36
40
  timeout: Optional[int] = None,
37
41
  endpoint_type: str = "warehouse",
38
42
  ):
39
43
  from sempy.fabric._token_provider import SynapseTokenProvider
40
44
  import pyodbc
41
45
 
42
- workspace = fabric.resolve_workspace_name(workspace)
43
- workspace_id = fabric.resolve_workspace_id(workspace)
46
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
44
47
 
45
48
  # Resolve the appropriate ID (warehouse or lakehouse)
46
49
  if endpoint_type == "warehouse":
47
- resource_id = resolve_warehouse_id(warehouse=name, workspace=workspace)
50
+ resource_id = resolve_warehouse_id(warehouse=name, workspace=workspace_id)
48
51
  else:
49
- resource_id = resolve_lakehouse_id(lakehouse=name, workspace=workspace)
52
+ resource_id = resolve_lakehouse_id(lakehouse=name, workspace=workspace_id)
50
53
 
51
54
  # Get the TDS endpoint
52
55
  client = fabric.FabricRestClient()
@@ -139,7 +142,7 @@ class ConnectWarehouse(ConnectBase):
139
142
  def __init__(
140
143
  self,
141
144
  warehouse: str,
142
- workspace: Optional[Union[str, uuid.UUID]] = None,
145
+ workspace: Optional[Union[str, UUID]] = None,
143
146
  timeout: Optional[int] = None,
144
147
  ):
145
148
  super().__init__(
@@ -154,7 +157,7 @@ class ConnectLakehouse(ConnectBase):
154
157
  def __init__(
155
158
  self,
156
159
  lakehouse: str,
157
- workspace: Optional[Union[str, uuid.UUID]] = None,
160
+ workspace: Optional[Union[str, UUID]] = None,
158
161
  timeout: Optional[int] = None,
159
162
  ):
160
163
  super().__init__(
@@ -3,29 +3,32 @@ import pandas as pd
3
3
  from typing import List, Optional, Union
4
4
  from sempy._utils._log import log
5
5
  import sempy_labs._icons as icons
6
- from sempy_labs._helper_functions import get_language_codes
6
+ from sempy_labs._helper_functions import (
7
+ get_language_codes,
8
+ )
9
+ from uuid import UUID
7
10
 
8
11
 
9
12
  @log
10
13
  def translate_semantic_model(
11
- dataset: str,
14
+ dataset: str | UUID,
12
15
  languages: Union[str, List[str]],
13
16
  exclude_characters: Optional[str] = None,
14
- workspace: Optional[str] = None,
17
+ workspace: Optional[str | UUID] = None,
15
18
  ) -> pd.DataFrame:
16
19
  """
17
20
  Translates names, descriptions, display folders for all objects in a semantic model.
18
21
 
19
22
  Parameters
20
23
  ----------
21
- dataset : str
22
- Name of the semantic model.
24
+ dataset : str | uuid.UUID
25
+ Name or ID of the semantic model.
23
26
  languages : str, List[str]
24
27
  The language code(s) in which to translate the semantic model.
25
28
  exclude_characters : str
26
29
  A string specifying characters which will be replaced by a space in the translation text when sent to the translation service.
27
- workspace : str, default=None
28
- The Fabric workspace name.
30
+ workspace : str | uuid.UUID, default=None
31
+ The Fabric workspace name or ID.
29
32
  Defaults to None which resolves to the workspace of the attached lakehouse
30
33
  or if no lakehouse attached, resolves to the workspace of the notebook.
31
34