semantic-link-labs 0.8.10__py3-none-any.whl → 0.8.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (73) hide show
  1. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/METADATA +3 -2
  2. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/RECORD +73 -72
  3. sempy_labs/__init__.py +6 -2
  4. sempy_labs/_clear_cache.py +39 -37
  5. sempy_labs/_connections.py +13 -13
  6. sempy_labs/_data_pipelines.py +20 -20
  7. sempy_labs/_dataflows.py +27 -28
  8. sempy_labs/_dax.py +41 -47
  9. sempy_labs/_environments.py +26 -23
  10. sempy_labs/_eventhouses.py +16 -15
  11. sempy_labs/_eventstreams.py +16 -15
  12. sempy_labs/_external_data_shares.py +18 -20
  13. sempy_labs/_gateways.py +14 -14
  14. sempy_labs/_generate_semantic_model.py +99 -62
  15. sempy_labs/_git.py +105 -43
  16. sempy_labs/_helper_functions.py +148 -131
  17. sempy_labs/_job_scheduler.py +92 -0
  18. sempy_labs/_kql_databases.py +16 -15
  19. sempy_labs/_kql_querysets.py +16 -15
  20. sempy_labs/_list_functions.py +114 -99
  21. sempy_labs/_managed_private_endpoints.py +19 -17
  22. sempy_labs/_mirrored_databases.py +51 -48
  23. sempy_labs/_mirrored_warehouses.py +5 -4
  24. sempy_labs/_ml_experiments.py +16 -15
  25. sempy_labs/_ml_models.py +15 -14
  26. sempy_labs/_model_bpa.py +3 -3
  27. sempy_labs/_model_dependencies.py +55 -29
  28. sempy_labs/_notebooks.py +27 -25
  29. sempy_labs/_one_lake_integration.py +23 -26
  30. sempy_labs/_query_scale_out.py +67 -64
  31. sempy_labs/_refresh_semantic_model.py +25 -26
  32. sempy_labs/_spark.py +33 -32
  33. sempy_labs/_sql.py +12 -9
  34. sempy_labs/_translations.py +10 -7
  35. sempy_labs/_vertipaq.py +34 -31
  36. sempy_labs/_warehouses.py +22 -21
  37. sempy_labs/_workspace_identity.py +11 -10
  38. sempy_labs/_workspaces.py +40 -33
  39. sempy_labs/admin/_basic_functions.py +10 -12
  40. sempy_labs/admin/_external_data_share.py +3 -3
  41. sempy_labs/admin/_items.py +4 -4
  42. sempy_labs/admin/_scanner.py +3 -1
  43. sempy_labs/directlake/_directlake_schema_compare.py +18 -14
  44. sempy_labs/directlake/_directlake_schema_sync.py +18 -12
  45. sempy_labs/directlake/_dl_helper.py +25 -26
  46. sempy_labs/directlake/_generate_shared_expression.py +10 -9
  47. sempy_labs/directlake/_get_directlake_lakehouse.py +16 -13
  48. sempy_labs/directlake/_get_shared_expression.py +4 -3
  49. sempy_labs/directlake/_guardrails.py +12 -6
  50. sempy_labs/directlake/_list_directlake_model_calc_tables.py +15 -9
  51. sempy_labs/directlake/_show_unsupported_directlake_objects.py +16 -10
  52. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +35 -31
  53. sempy_labs/directlake/_update_directlake_partition_entity.py +34 -31
  54. sempy_labs/directlake/_warm_cache.py +87 -65
  55. sempy_labs/lakehouse/_get_lakehouse_columns.py +10 -8
  56. sempy_labs/lakehouse/_get_lakehouse_tables.py +10 -9
  57. sempy_labs/lakehouse/_lakehouse.py +17 -13
  58. sempy_labs/lakehouse/_shortcuts.py +42 -23
  59. sempy_labs/migration/_create_pqt_file.py +16 -11
  60. sempy_labs/migration/_refresh_calc_tables.py +16 -10
  61. sempy_labs/report/_download_report.py +9 -8
  62. sempy_labs/report/_generate_report.py +40 -44
  63. sempy_labs/report/_paginated.py +9 -9
  64. sempy_labs/report/_report_bpa.py +13 -9
  65. sempy_labs/report/_report_functions.py +80 -91
  66. sempy_labs/report/_report_helper.py +8 -4
  67. sempy_labs/report/_report_list_functions.py +24 -13
  68. sempy_labs/report/_report_rebind.py +17 -16
  69. sempy_labs/report/_reportwrapper.py +41 -33
  70. sempy_labs/tom/_model.py +43 -6
  71. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/LICENSE +0 -0
  72. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/WHEEL +0 -0
  73. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/top_level.txt +0 -0
@@ -6,9 +6,10 @@ from sempy_labs._helper_functions import (
6
6
  from typing import Optional
7
7
  import sempy_labs._icons as icons
8
8
  from sempy.fabric.exceptions import FabricHTTPException
9
+ from uuid import UUID
9
10
 
10
11
 
11
- def provision_workspace_identity(workspace: Optional[str] = None):
12
+ def provision_workspace_identity(workspace: Optional[str | UUID] = None):
12
13
  """
13
14
  Provisions a workspace identity for a workspace.
14
15
 
@@ -16,13 +17,13 @@ def provision_workspace_identity(workspace: Optional[str] = None):
16
17
 
17
18
  Parameters
18
19
  ----------
19
- workspace : str, default=None
20
- The Fabric workspace name.
20
+ workspace : str | uuid.UUID, default=None
21
+ The Fabric workspace name or ID.
21
22
  Defaults to None which resolves to the workspace of the attached lakehouse
22
23
  or if no lakehouse attached, resolves to the workspace of the notebook.
23
24
  """
24
25
 
25
- workspace, workspace_id = resolve_workspace_name_and_id(workspace)
26
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
26
27
 
27
28
  client = fabric.FabricRestClient()
28
29
  response = client.post(f"/v1/workspaces/{workspace_id}/provisionIdentity")
@@ -33,11 +34,11 @@ def provision_workspace_identity(workspace: Optional[str] = None):
33
34
  lro(client, response)
34
35
 
35
36
  print(
36
- f"{icons.green_dot} A workspace identity has been provisioned for the '{workspace}' workspace."
37
+ f"{icons.green_dot} A workspace identity has been provisioned for the '{workspace_name}' workspace."
37
38
  )
38
39
 
39
40
 
40
- def deprovision_workspace_identity(workspace: Optional[str] = None):
41
+ def deprovision_workspace_identity(workspace: Optional[str | UUID] = None):
41
42
  """
42
43
  Deprovisions a workspace identity for a workspace.
43
44
 
@@ -45,13 +46,13 @@ def deprovision_workspace_identity(workspace: Optional[str] = None):
45
46
 
46
47
  Parameters
47
48
  ----------
48
- workspace : str, default=None
49
- The Fabric workspace name.
49
+ workspace : str | uuid.UUID, default=None
50
+ The Fabric workspace name or ID.
50
51
  Defaults to None which resolves to the workspace of the attached lakehouse
51
52
  or if no lakehouse attached, resolves to the workspace of the notebook.
52
53
  """
53
54
 
54
- workspace, workspace_id = resolve_workspace_name_and_id(workspace)
55
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
55
56
 
56
57
  client = fabric.FabricRestClient()
57
58
  response = client.post(f"/v1/workspaces/{workspace_id}/deprovisionIdentity")
@@ -62,5 +63,5 @@ def deprovision_workspace_identity(workspace: Optional[str] = None):
62
63
  lro(client, response)
63
64
 
64
65
  print(
65
- f"{icons.green_dot} The workspace identity has been deprovisioned from the '{workspace}' workspace."
66
+ f"{icons.green_dot} The workspace identity has been deprovisioned from the '{workspace_name}' workspace."
66
67
  )
sempy_labs/_workspaces.py CHANGED
@@ -8,9 +8,12 @@ from sempy_labs._helper_functions import (
8
8
  resolve_capacity_id,
9
9
  )
10
10
  from sempy.fabric.exceptions import FabricHTTPException
11
+ from uuid import UUID
11
12
 
12
13
 
13
- def delete_user_from_workspace(email_address: str, workspace: Optional[str] = None):
14
+ def delete_user_from_workspace(
15
+ email_address: str, workspace: Optional[str | UUID] = None
16
+ ):
14
17
  """
15
18
  Removes a user from a workspace.
16
19
 
@@ -20,13 +23,13 @@ def delete_user_from_workspace(email_address: str, workspace: Optional[str] = No
20
23
  ----------
21
24
  email_address : str
22
25
  The email address of the user.
23
- workspace : str, default=None
24
- The name of the workspace.
26
+ workspace : str | uuid.UUID, default=None
27
+ The name or ID of the workspace.
25
28
  Defaults to None which resolves to the workspace of the attached lakehouse
26
29
  or if no lakehouse attached, resolves to the workspace of the notebook.
27
30
  """
28
31
 
29
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
32
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
30
33
 
31
34
  client = fabric.PowerBIRestClient()
32
35
  response = client.delete(f"/v1.0/myorg/groups/{workspace_id}/users/{email_address}")
@@ -34,7 +37,7 @@ def delete_user_from_workspace(email_address: str, workspace: Optional[str] = No
34
37
  if response.status_code != 200:
35
38
  raise FabricHTTPException(response)
36
39
  print(
37
- f"{icons.green_dot} The '{email_address}' user has been removed from accessing the '{workspace}' workspace."
40
+ f"{icons.green_dot} The '{email_address}' user has been removed from accessing the '{workspace_name}' workspace."
38
41
  )
39
42
 
40
43
 
@@ -42,7 +45,7 @@ def update_workspace_user(
42
45
  email_address: str,
43
46
  role_name: str,
44
47
  principal_type: Optional[str] = "User",
45
- workspace: Optional[str] = None,
48
+ workspace: Optional[str | UUID] = None,
46
49
  ):
47
50
  """
48
51
  Updates a user's role within a workspace.
@@ -57,13 +60,13 @@ def update_workspace_user(
57
60
  The `role <https://learn.microsoft.com/rest/api/power-bi/groups/add-group-user#groupuseraccessright>`_ of the user within the workspace.
58
61
  principal_type : str, default='User'
59
62
  The `principal type <https://learn.microsoft.com/rest/api/power-bi/groups/add-group-user#principaltype>`_.
60
- workspace : str, default=None
61
- The name of the workspace.
63
+ workspace : str | uuid.UUID, default=None
64
+ The name or ID of the workspace.
62
65
  Defaults to None which resolves to the workspace of the attached lakehouse
63
66
  or if no lakehouse attached, resolves to the workspace of the notebook.
64
67
  """
65
68
 
66
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
69
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
67
70
 
68
71
  role_names = icons.workspace_roles
69
72
  role_name = role_name.capitalize()
@@ -91,11 +94,11 @@ def update_workspace_user(
91
94
  if response.status_code != 200:
92
95
  raise FabricHTTPException(response)
93
96
  print(
94
- f"{icons.green_dot} The '{email_address}' user has been updated to a '{role_name}' within the '{workspace}' workspace."
97
+ f"{icons.green_dot} The '{email_address}' user has been updated to a '{role_name}' within the '{workspace_name}' workspace."
95
98
  )
96
99
 
97
100
 
98
- def list_workspace_users(workspace: Optional[str] = None) -> pd.DataFrame:
101
+ def list_workspace_users(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
99
102
  """
100
103
  A list of all the users of a workspace and their roles.
101
104
 
@@ -103,8 +106,8 @@ def list_workspace_users(workspace: Optional[str] = None) -> pd.DataFrame:
103
106
 
104
107
  Parameters
105
108
  ----------
106
- workspace : str, default=None
107
- The name of the workspace.
109
+ workspace : str | uuid.UUID, default=None
110
+ The name or ID of the workspace.
108
111
  Defaults to None which resolves to the workspace of the attached lakehouse
109
112
  or if no lakehouse attached, resolves to the workspace of the notebook.
110
113
 
@@ -114,7 +117,7 @@ def list_workspace_users(workspace: Optional[str] = None) -> pd.DataFrame:
114
117
  A pandas dataframe the users of a workspace and their properties.
115
118
  """
116
119
 
117
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
120
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
118
121
 
119
122
  df = pd.DataFrame(columns=["User Name", "Email Address", "Role", "Type", "User ID"])
120
123
  client = fabric.FabricRestClient()
@@ -143,7 +146,7 @@ def add_user_to_workspace(
143
146
  email_address: str,
144
147
  role_name: str,
145
148
  principal_type: Optional[str] = "User",
146
- workspace: Optional[str] = None,
149
+ workspace: Optional[str | UUID] = None,
147
150
  ):
148
151
  """
149
152
  Adds a user to a workspace.
@@ -158,13 +161,13 @@ def add_user_to_workspace(
158
161
  The `role <https://learn.microsoft.com/rest/api/power-bi/groups/add-group-user#groupuseraccessright>`_ of the user within the workspace.
159
162
  principal_type : str, default='User'
160
163
  The `principal type <https://learn.microsoft.com/rest/api/power-bi/groups/add-group-user#principaltype>`_.
161
- workspace : str, default=None
162
- The name of the workspace.
164
+ workspace : str | uuid.UUID, default=None
165
+ The name or ID of the workspace.
163
166
  Defaults to None which resolves to the workspace of the attached lakehouse
164
167
  or if no lakehouse attached, resolves to the workspace of the notebook.
165
168
  """
166
169
 
167
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
170
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
168
171
 
169
172
  role_names = icons.workspace_roles
170
173
  role_name = role_name.capitalize()
@@ -196,11 +199,13 @@ def add_user_to_workspace(
196
199
  if response.status_code != 200:
197
200
  raise FabricHTTPException(response)
198
201
  print(
199
- f"{icons.green_dot} The '{email_address}' user has been added as a{plural} '{role_name}' within the '{workspace}' workspace."
202
+ f"{icons.green_dot} The '{email_address}' user has been added as a{plural} '{role_name}' within the '{workspace_name}' workspace."
200
203
  )
201
204
 
202
205
 
203
- def assign_workspace_to_capacity(capacity_name: str, workspace: Optional[str] = None):
206
+ def assign_workspace_to_capacity(
207
+ capacity_name: str, workspace: Optional[str | UUID] = None
208
+ ):
204
209
  """
205
210
  Assigns a workspace to a capacity.
206
211
 
@@ -210,13 +215,13 @@ def assign_workspace_to_capacity(capacity_name: str, workspace: Optional[str] =
210
215
  ----------
211
216
  capacity_name : str
212
217
  The name of the capacity.
213
- workspace : str, default=None
214
- The name of the Fabric workspace.
218
+ workspace : str | uuid.UUID, default=None
219
+ The name or ID of the Fabric workspace.
215
220
  Defaults to None which resolves to the workspace of the attached lakehouse
216
221
  or if no lakehouse attached, resolves to the workspace of the notebook.
217
222
  """
218
223
 
219
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
224
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
220
225
  capacity_id = resolve_capacity_id(capacity_name=capacity_name)
221
226
 
222
227
  request_body = {"capacityId": capacity_id}
@@ -230,11 +235,11 @@ def assign_workspace_to_capacity(capacity_name: str, workspace: Optional[str] =
230
235
  if response.status_code not in [200, 202]:
231
236
  raise FabricHTTPException(response)
232
237
  print(
233
- f"{icons.green_dot} The '{workspace}' workspace has been assigned to the '{capacity_name}' capacity."
238
+ f"{icons.green_dot} The '{workspace_name}' workspace has been assigned to the '{capacity_name}' capacity."
234
239
  )
235
240
 
236
241
 
237
- def unassign_workspace_from_capacity(workspace: Optional[str] = None):
242
+ def unassign_workspace_from_capacity(workspace: Optional[str | UUID] = None):
238
243
  """
239
244
  Unassigns a workspace from its assigned capacity.
240
245
 
@@ -242,13 +247,13 @@ def unassign_workspace_from_capacity(workspace: Optional[str] = None):
242
247
 
243
248
  Parameters
244
249
  ----------
245
- workspace : str, default=None
246
- The name of the Fabric workspace.
250
+ workspace : str | uuid.UUID, default=None
251
+ The name or ID of the Fabric workspace.
247
252
  Defaults to None which resolves to the workspace of the attached lakehouse
248
253
  or if no lakehouse attached, resolves to the workspace of the notebook.
249
254
  """
250
255
 
251
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
256
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
252
257
 
253
258
  client = fabric.FabricRestClient()
254
259
  response = client.post(f"/v1/workspaces/{workspace_id}/unassignFromCapacity")
@@ -256,11 +261,13 @@ def unassign_workspace_from_capacity(workspace: Optional[str] = None):
256
261
  if response.status_code not in [200, 202]:
257
262
  raise FabricHTTPException(response)
258
263
  print(
259
- f"{icons.green_dot} The '{workspace}' workspace has been unassigned from its capacity."
264
+ f"{icons.green_dot} The '{workspace_name}' workspace has been unassigned from its capacity."
260
265
  )
261
266
 
262
267
 
263
- def list_workspace_role_assignments(workspace: Optional[str] = None) -> pd.DataFrame:
268
+ def list_workspace_role_assignments(
269
+ workspace: Optional[str | UUID] = None,
270
+ ) -> pd.DataFrame:
264
271
  """
265
272
  Shows the members of a given workspace.
266
273
 
@@ -268,8 +275,8 @@ def list_workspace_role_assignments(workspace: Optional[str] = None) -> pd.DataF
268
275
 
269
276
  Parameters
270
277
  ----------
271
- workspace : str, default=None
272
- The Fabric workspace name.
278
+ workspace : str | uuid.UUID, default=None
279
+ The Fabric workspace name or ID.
273
280
  Defaults to None which resolves to the workspace of the attached lakehouse
274
281
  or if no lakehouse attached, resolves to the workspace of the notebook.
275
282
 
@@ -279,7 +286,7 @@ def list_workspace_role_assignments(workspace: Optional[str] = None) -> pd.DataF
279
286
  A pandas dataframe showing the members of a given workspace and their roles.
280
287
  """
281
288
 
282
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
289
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
283
290
 
284
291
  df = pd.DataFrame(columns=["User Name", "User Email", "Role Name", "Type"])
285
292
 
@@ -27,9 +27,9 @@ def list_workspaces(
27
27
 
28
28
  Parameters
29
29
  ----------
30
- capacity : str | UUID, default=None
30
+ capacity : str | uuid.UUID, default=None
31
31
  Returns only the workspaces in the specified Capacity.
32
- workspace : str | UUID, default=None
32
+ workspace : str | uuid.UUID, default=None
33
33
  Returns the workspace with the specific name.
34
34
  workspace_state : str, default=None
35
35
  Return only the workspace with the requested state. You can find the possible states in `Workspace States <https://learn.microsoft.com/en-us/rest/api/fabric/admin/workspaces/list-workspaces?tabs=HTTP#workspacestate>`_.
@@ -131,7 +131,7 @@ def list_capacities(
131
131
 
132
132
  Parameters
133
133
  ----------
134
- capacity : str | UUID, default=None
134
+ capacity : str | uuid.UUID, default=None
135
135
  Capacity name or id to filter.
136
136
 
137
137
  Returns
@@ -185,11 +185,11 @@ def assign_workspaces_to_capacity(
185
185
 
186
186
  Parameters
187
187
  ----------
188
- source_capacity : str | UUID, default=None
188
+ source_capacity : str | uuid.UUID, default=None
189
189
  The name of the source capacity. If the Workspace is not specified, this is parameter mandatory.
190
- target_capacity : str | UUID, default=None
190
+ target_capacity : str | uuid.UUID, default=None
191
191
  The name of the target capacity.
192
- workspace : str | List[str] | UUID | List[UUID], default=None
192
+ workspace : str | List[str] | uuid.UUID | List[uuid.UUID], default=None
193
193
  The name or id of the workspace(s).
194
194
  Defaults to None which resolves to migrating all workspaces within the source capacity to the target capacity.
195
195
  """
@@ -274,7 +274,7 @@ def unassign_workspaces_from_capacity(
274
274
 
275
275
  Parameters
276
276
  ----------
277
- workspaces : str | List[str] | UUID | List[UUID]
277
+ workspaces : str | List[str] | uuid.UUID | List[uuid.UUID]
278
278
  The Fabric workspace name(s) or id(s).
279
279
  """
280
280
  if isinstance(workspaces, str):
@@ -512,8 +512,6 @@ def list_datasets(
512
512
  Returns a subset of a results based on Odata filter query parameter condition.
513
513
  skip : int, default=None
514
514
  Skips the first n results.
515
- token_provider : Optional[TokenProvider] = None,
516
- Authentication provider used to be use in the request. Supports Service Principal.
517
515
 
518
516
  Returns
519
517
  -------
@@ -673,7 +671,7 @@ def list_workspace_access_details(
673
671
 
674
672
  Parameters
675
673
  ----------
676
- workspace : str | UUID, default=None
674
+ workspace : str | uuid.UUID, default=None
677
675
  The Fabric workspace name or id.
678
676
  Defaults to None which resolves to the workspace of the attached lakehouse
679
677
  or if no lakehouse attached, resolves to the workspace of the notebook.
@@ -721,7 +719,7 @@ def list_activity_events(
721
719
  end_time: str,
722
720
  activity_filter: Optional[str] = None,
723
721
  user_id_filter: Optional[str] = None,
724
- return_dataframe: Optional[bool] = True,
722
+ return_dataframe: bool = True,
725
723
  ) -> pd.DataFrame | dict:
726
724
  """
727
725
  Shows a list of audit activity events for a tenant.
@@ -1037,7 +1035,7 @@ def get_capacity_assignment_status(workspace: Optional[str | UUID] = None):
1037
1035
 
1038
1036
  Parameters
1039
1037
  ----------
1040
- workspace : str | UUID, default=None
1038
+ workspace : str | uuid.UUID, default=None
1041
1039
  The Fabric workspace name or id.
1042
1040
  Defaults to None which resolves to the workspace of the attached lakehouse
1043
1041
  or if no lakehouse attached, resolves to the workspace of the notebook.
@@ -75,11 +75,11 @@ def revoke_external_data_share(
75
75
 
76
76
  Parameters
77
77
  ----------
78
- external_data_share_id : UUID
78
+ external_data_share_id : uuid.UUID
79
79
  The external data share ID.
80
- item_id : int, default=None
80
+ item_id : uuid.UUID, default=None
81
81
  The Item ID
82
- workspace : str
82
+ workspace : str | uuid.UUID
83
83
  The Fabric workspace name or id.
84
84
  """
85
85
  (workspace, workspace_id) = _resolve_workspace_name_and_id(workspace)
@@ -65,7 +65,7 @@ def _resolve_item_name_and_id(
65
65
 
66
66
  def list_items(
67
67
  capacity: Optional[str | UUID] = None,
68
- workspace: Optional[str] = None,
68
+ workspace: Optional[str | UUID] = None,
69
69
  state: Optional[str] = None,
70
70
  type: Optional[str] = None,
71
71
  item: Optional[str | UUID] = None,
@@ -78,9 +78,9 @@ def list_items(
78
78
 
79
79
  Parameters
80
80
  ----------
81
- capacity : str | UUID, default=None
81
+ capacity : str | uuid.UUID, default=None
82
82
  The capacity name or id.
83
- workspace : str, default=None
83
+ workspace : str | uuid.UUID, default=None
84
84
  The Fabric workspace name.
85
85
  Defaults to None which resolves to the workspace of the attached lakehouse
86
86
  or if no lakehouse attached, resolves to the workspace of the notebook.
@@ -195,7 +195,7 @@ def list_item_access_details(
195
195
  Name or id of the Fabric item.
196
196
  type : str, default=None
197
197
  Type of Fabric item.
198
- workspace : str, default=None
198
+ workspace : str | uuid.UUID, default=None
199
199
  The Fabric workspace name or id.
200
200
  Defaults to None which resolves to the workspace of the attached lakehouse
201
201
  or if no lakehouse attached, resolves to the workspace of the notebook.
@@ -5,8 +5,10 @@ from sempy.fabric.exceptions import FabricHTTPException
5
5
  import numpy as np
6
6
  import time
7
7
  from sempy_labs.admin._basic_functions import list_workspaces
8
+ from sempy._utils._log import log
8
9
 
9
10
 
11
+ @log
10
12
  def scan_workspaces(
11
13
  data_source_details: bool = False,
12
14
  dataset_schema: bool = False,
@@ -35,7 +37,7 @@ def scan_workspaces(
35
37
  Whether to return lineage info (upstream dataflows, tiles, data source IDs)
36
38
  artifact_users : bool, default=False
37
39
  Whether to return user details for a Power BI item (such as a report or a dashboard)
38
- workspace : str | List[str] | UUID | List[UUID], default=None
40
+ workspace : str | List[str] | uuid.UUID | List[uuid.UUID], default=None
39
41
  The required workspace name(s) or id(s) to be scanned
40
42
 
41
43
  Returns
@@ -2,6 +2,8 @@ import sempy.fabric as fabric
2
2
  import pandas as pd
3
3
  from sempy_labs._helper_functions import (
4
4
  format_dax_object_name,
5
+ resolve_workspace_name_and_id,
6
+ resolve_dataset_name_and_id,
5
7
  )
6
8
  from IPython.display import display
7
9
  from sempy_labs.lakehouse import get_lakehouse_columns
@@ -9,12 +11,13 @@ from sempy_labs.directlake._dl_helper import get_direct_lake_source
9
11
  from typing import Optional
10
12
  import sempy_labs._icons as icons
11
13
  from sempy._utils._log import log
14
+ from uuid import UUID
12
15
 
13
16
 
14
17
  @log
15
18
  def direct_lake_schema_compare(
16
- dataset: str,
17
- workspace: Optional[str] = None,
19
+ dataset: str | UUID,
20
+ workspace: Optional[str | UUID] = None,
18
21
  **kwargs,
19
22
  ):
20
23
  """
@@ -22,10 +25,10 @@ def direct_lake_schema_compare(
22
25
 
23
26
  Parameters
24
27
  ----------
25
- dataset : str
26
- Name of the semantic model.
27
- workspace : str, default=None
28
- The Fabric workspace name.
28
+ dataset : str | uuid.UUID
29
+ Name or ID of the semantic model.
30
+ workspace : str | uuid.UUID, default=None
31
+ The Fabric workspace name or ID.
29
32
  Defaults to None which resolves to the workspace of the attached lakehouse
30
33
  or if no lakehouse attached, resolves to the workspace of the notebook.
31
34
  """
@@ -41,10 +44,11 @@ def direct_lake_schema_compare(
41
44
  )
42
45
  del kwargs["lakehouse_workspace"]
43
46
 
44
- workspace = fabric.resolve_workspace_name(workspace)
47
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
48
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
45
49
 
46
50
  artifact_type, lakehouse_name, lakehouse_id, lakehouse_workspace_id = (
47
- get_direct_lake_source(dataset=dataset, workspace=workspace)
51
+ get_direct_lake_source(dataset=dataset_id, workspace=workspace_id)
48
52
  )
49
53
  lakehouse_workspace = fabric.resolve_workspace_name(lakehouse_workspace_id)
50
54
 
@@ -53,15 +57,15 @@ def direct_lake_schema_compare(
53
57
  f"{icons.red_dot} This function is only valid for Direct Lake semantic models which source from Fabric lakehouses (not warehouses)."
54
58
  )
55
59
 
56
- dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
60
+ dfP = fabric.list_partitions(dataset=dataset_id, workspace=workspace_id)
57
61
 
58
62
  if not any(r["Mode"] == "DirectLake" for i, r in dfP.iterrows()):
59
63
  raise ValueError(
60
- f"{icons.red_dot} The '{dataset}' semantic model is not in Direct Lake mode."
64
+ f"{icons.red_dot} The '{dataset_name}' semantic model within the '{workspace_name}' workspace is not in Direct Lake mode."
61
65
  )
62
66
 
63
- dfT = fabric.list_tables(dataset=dataset, workspace=workspace)
64
- dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
67
+ dfT = fabric.list_tables(dataset=dataset_id, workspace=workspace_id)
68
+ dfC = fabric.list_columns(dataset=dataset_id, workspace=workspace_id)
65
69
  lc = get_lakehouse_columns(lakehouse_name, lakehouse_workspace)
66
70
 
67
71
  dfT.rename(columns={"Type": "Table Type"}, inplace=True)
@@ -92,7 +96,7 @@ def direct_lake_schema_compare(
92
96
  )
93
97
  else:
94
98
  print(
95
- f"{icons.yellow_dot} The following tables exist in the '{dataset}' semantic model within the '{workspace}' workspace"
99
+ f"{icons.yellow_dot} The following tables exist in the '{dataset_name}' semantic model within the '{workspace_name}' workspace"
96
100
  f" but do not exist in the '{lakehouse_name}' lakehouse within the '{lakehouse_workspace}' workspace."
97
101
  )
98
102
  display(missingtbls)
@@ -102,7 +106,7 @@ def direct_lake_schema_compare(
102
106
  )
103
107
  else:
104
108
  print(
105
- f"{icons.yellow_dot} The following columns exist in the '{dataset}' semantic model within the '{workspace}' workspace "
109
+ f"{icons.yellow_dot} The following columns exist in the '{dataset_name}' semantic model within the '{workspace_name}' workspace "
106
110
  f"but do not exist in the '{lakehouse_name}' lakehouse within the '{lakehouse_workspace}' workspace."
107
111
  )
108
112
  display(missingcols)
@@ -3,16 +3,21 @@ import sempy.fabric as fabric
3
3
  from sempy_labs.lakehouse import get_lakehouse_columns
4
4
  from sempy_labs.directlake._dl_helper import get_direct_lake_source
5
5
  from sempy_labs.tom import connect_semantic_model
6
- from sempy_labs._helper_functions import _convert_data_type
6
+ from sempy_labs._helper_functions import (
7
+ _convert_data_type,
8
+ resolve_workspace_name_and_id,
9
+ resolve_dataset_name_and_id,
10
+ )
7
11
  from typing import Optional
8
12
  from sempy._utils._log import log
9
13
  import sempy_labs._icons as icons
14
+ from uuid import UUID
10
15
 
11
16
 
12
17
  @log
13
18
  def direct_lake_schema_sync(
14
- dataset: str,
15
- workspace: Optional[str] = None,
19
+ dataset: str | UUID,
20
+ workspace: Optional[str | UUID] = None,
16
21
  add_to_model: bool = False,
17
22
  **kwargs,
18
23
  ):
@@ -21,10 +26,10 @@ def direct_lake_schema_sync(
21
26
 
22
27
  Parameters
23
28
  ----------
24
- dataset : str
25
- Name of the semantic model.
26
- workspace : str, default=None
27
- The Fabric workspace name.
29
+ dataset : str | uuid.UUID
30
+ Name or ID of the semantic model.
31
+ workspace : str | uuid.UUID, default=None
32
+ The Fabric workspace name or ID.
28
33
  Defaults to None which resolves to the workspace of the attached lakehouse
29
34
  or if no lakehouse attached, resolves to the workspace of the notebook.
30
35
  add_to_model : bool, default=False
@@ -45,10 +50,11 @@ def direct_lake_schema_sync(
45
50
  )
46
51
  del kwargs["lakehouse_workspace"]
47
52
 
48
- workspace = fabric.resolve_workspace_name(workspace)
53
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
54
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
49
55
 
50
56
  artifact_type, lakehouse_name, lakehouse_id, lakehouse_workspace_id = (
51
- get_direct_lake_source(dataset=dataset, workspace=workspace)
57
+ get_direct_lake_source(dataset=dataset_id, workspace=workspace_id)
52
58
  )
53
59
 
54
60
  if artifact_type == "Warehouse":
@@ -60,7 +66,7 @@ def direct_lake_schema_sync(
60
66
  lc = get_lakehouse_columns(lakehouse_name, lakehouse_workspace)
61
67
 
62
68
  with connect_semantic_model(
63
- dataset=dataset, readonly=False, workspace=workspace
69
+ dataset=dataset_id, readonly=False, workspace=workspace_id
64
70
  ) as tom:
65
71
 
66
72
  for i, r in lc.iterrows():
@@ -86,7 +92,7 @@ def direct_lake_schema_sync(
86
92
  for c in tom.all_columns()
87
93
  ):
88
94
  print(
89
- f"{icons.yellow_dot} The '{lakeCName}' column exists in the '{lakeTName}' lakehouse table but not in the '{dataset}' semantic model within the '{workspace}' workspace."
95
+ f"{icons.yellow_dot} The '{lakeCName}' column exists in the '{lakeTName}' lakehouse table but not in the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
90
96
  )
91
97
  if add_to_model:
92
98
  dt = _convert_data_type(dType)
@@ -97,5 +103,5 @@ def direct_lake_schema_sync(
97
103
  data_type=dt,
98
104
  )
99
105
  print(
100
- f"{icons.green_dot} The '{lakeCName}' column in the '{lakeTName}' lakehouse table was added to the '{dataset}' semantic model within the '{workspace}' workspace."
106
+ f"{icons.green_dot} The '{lakeCName}' column in the '{lakeTName}' lakehouse table was added to the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
101
107
  )