semantic-link-labs 0.8.10__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (81) hide show
  1. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/METADATA +6 -5
  2. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/RECORD +81 -80
  3. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +34 -3
  5. sempy_labs/_authentication.py +80 -4
  6. sempy_labs/_capacities.py +770 -200
  7. sempy_labs/_capacity_migration.py +7 -37
  8. sempy_labs/_clear_cache.py +37 -35
  9. sempy_labs/_connections.py +13 -13
  10. sempy_labs/_data_pipelines.py +20 -20
  11. sempy_labs/_dataflows.py +27 -28
  12. sempy_labs/_dax.py +41 -47
  13. sempy_labs/_deployment_pipelines.py +1 -1
  14. sempy_labs/_environments.py +26 -23
  15. sempy_labs/_eventhouses.py +16 -15
  16. sempy_labs/_eventstreams.py +16 -15
  17. sempy_labs/_external_data_shares.py +18 -20
  18. sempy_labs/_gateways.py +16 -14
  19. sempy_labs/_generate_semantic_model.py +107 -62
  20. sempy_labs/_git.py +105 -43
  21. sempy_labs/_helper_functions.py +251 -194
  22. sempy_labs/_job_scheduler.py +227 -0
  23. sempy_labs/_kql_databases.py +16 -15
  24. sempy_labs/_kql_querysets.py +16 -15
  25. sempy_labs/_list_functions.py +150 -126
  26. sempy_labs/_managed_private_endpoints.py +19 -17
  27. sempy_labs/_mirrored_databases.py +51 -48
  28. sempy_labs/_mirrored_warehouses.py +5 -4
  29. sempy_labs/_ml_experiments.py +16 -15
  30. sempy_labs/_ml_models.py +15 -14
  31. sempy_labs/_model_bpa.py +210 -207
  32. sempy_labs/_model_bpa_bulk.py +2 -2
  33. sempy_labs/_model_bpa_rules.py +3 -3
  34. sempy_labs/_model_dependencies.py +55 -29
  35. sempy_labs/_notebooks.py +29 -25
  36. sempy_labs/_one_lake_integration.py +23 -26
  37. sempy_labs/_query_scale_out.py +75 -64
  38. sempy_labs/_refresh_semantic_model.py +25 -26
  39. sempy_labs/_spark.py +33 -32
  40. sempy_labs/_sql.py +19 -12
  41. sempy_labs/_translations.py +10 -7
  42. sempy_labs/_vertipaq.py +38 -33
  43. sempy_labs/_warehouses.py +26 -25
  44. sempy_labs/_workspace_identity.py +11 -10
  45. sempy_labs/_workspaces.py +40 -33
  46. sempy_labs/admin/_basic_functions.py +166 -115
  47. sempy_labs/admin/_domains.py +7 -2
  48. sempy_labs/admin/_external_data_share.py +3 -3
  49. sempy_labs/admin/_git.py +4 -1
  50. sempy_labs/admin/_items.py +11 -6
  51. sempy_labs/admin/_scanner.py +10 -5
  52. sempy_labs/directlake/_directlake_schema_compare.py +25 -16
  53. sempy_labs/directlake/_directlake_schema_sync.py +24 -12
  54. sempy_labs/directlake/_dl_helper.py +74 -55
  55. sempy_labs/directlake/_generate_shared_expression.py +10 -9
  56. sempy_labs/directlake/_get_directlake_lakehouse.py +32 -36
  57. sempy_labs/directlake/_get_shared_expression.py +4 -3
  58. sempy_labs/directlake/_guardrails.py +12 -6
  59. sempy_labs/directlake/_list_directlake_model_calc_tables.py +15 -9
  60. sempy_labs/directlake/_show_unsupported_directlake_objects.py +16 -10
  61. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +35 -31
  62. sempy_labs/directlake/_update_directlake_partition_entity.py +39 -31
  63. sempy_labs/directlake/_warm_cache.py +87 -65
  64. sempy_labs/lakehouse/_get_lakehouse_columns.py +23 -26
  65. sempy_labs/lakehouse/_get_lakehouse_tables.py +27 -38
  66. sempy_labs/lakehouse/_lakehouse.py +7 -20
  67. sempy_labs/lakehouse/_shortcuts.py +42 -23
  68. sempy_labs/migration/_create_pqt_file.py +16 -11
  69. sempy_labs/migration/_refresh_calc_tables.py +16 -10
  70. sempy_labs/report/_download_report.py +9 -8
  71. sempy_labs/report/_generate_report.py +85 -44
  72. sempy_labs/report/_paginated.py +9 -9
  73. sempy_labs/report/_report_bpa.py +15 -11
  74. sempy_labs/report/_report_functions.py +80 -91
  75. sempy_labs/report/_report_helper.py +8 -4
  76. sempy_labs/report/_report_list_functions.py +24 -13
  77. sempy_labs/report/_report_rebind.py +17 -16
  78. sempy_labs/report/_reportwrapper.py +41 -33
  79. sempy_labs/tom/_model.py +139 -21
  80. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/LICENSE +0 -0
  81. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/top_level.txt +0 -0
@@ -16,7 +16,7 @@ def create_managed_private_endpoint(
16
16
  target_private_link_resource_id: UUID,
17
17
  target_subresource_type: str,
18
18
  request_message: Optional[str] = None,
19
- workspace: Optional[str] = None,
19
+ workspace: Optional[str | UUID] = None,
20
20
  ):
21
21
  """
22
22
  Creates a managed private endpoint.
@@ -27,19 +27,19 @@ def create_managed_private_endpoint(
27
27
  ----------
28
28
  name: str
29
29
  Name of the managed private endpoint.
30
- target_private_link_resource_id: UUID
30
+ target_private_link_resource_id: uuid.UUID
31
31
  Resource Id of data source for which private endpoint needs to be created.
32
32
  target_subresource_type : str
33
33
  Sub-resource pointing to Private-link resoure.
34
34
  request_message : str, default=None
35
35
  Message to approve private endpoint request. Should not be more than 140 characters.
36
- workspace : str, default=None
37
- The Fabric workspace name.
36
+ workspace : str | uuid.UUID, default=None
37
+ The Fabric workspace name or ID.
38
38
  Defaults to None which resolves to the workspace of the attached lakehouse
39
39
  or if no lakehouse attached, resolves to the workspace of the notebook.
40
40
  """
41
41
 
42
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
42
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
43
43
 
44
44
  request_body = {
45
45
  "name": name,
@@ -62,11 +62,13 @@ def create_managed_private_endpoint(
62
62
  lro(client, response, status_codes=[201, 202])
63
63
 
64
64
  print(
65
- f"{icons.green_dot} The '{name}' managed private endpoint has been created within the '{workspace}' workspace."
65
+ f"{icons.green_dot} The '{name}' managed private endpoint has been created within the '{workspace_name}' workspace."
66
66
  )
67
67
 
68
68
 
69
- def list_managed_private_endpoints(workspace: Optional[str] = None) -> pd.DataFrame:
69
+ def list_managed_private_endpoints(
70
+ workspace: Optional[str | UUID] = None,
71
+ ) -> pd.DataFrame:
70
72
  """
71
73
  Shows the managed private endpoints within a workspace.
72
74
 
@@ -74,8 +76,8 @@ def list_managed_private_endpoints(workspace: Optional[str] = None) -> pd.DataFr
74
76
 
75
77
  Parameters
76
78
  ----------
77
- workspace : str, default=None
78
- The Fabric workspace name.
79
+ workspace : str | uuid.UUID, default=None
80
+ The Fabric workspace name or ID.
79
81
  Defaults to None which resolves to the workspace of the attached lakehouse
80
82
  or if no lakehouse attached, resolves to the workspace of the notebook.
81
83
 
@@ -97,7 +99,7 @@ def list_managed_private_endpoints(workspace: Optional[str] = None) -> pd.DataFr
97
99
  ]
98
100
  )
99
101
 
100
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
102
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
101
103
 
102
104
  client = fabric.FabricRestClient()
103
105
  response = client.get(f"/v1/workspaces/{workspace_id}/managedPrivateEndpoints")
@@ -124,7 +126,7 @@ def list_managed_private_endpoints(workspace: Optional[str] = None) -> pd.DataFr
124
126
 
125
127
 
126
128
  def delete_managed_private_endpoint(
127
- managed_private_endpoint: str, workspace: Optional[str] = None
129
+ managed_private_endpoint: str, workspace: Optional[str | UUID] = None
128
130
  ):
129
131
  """
130
132
  Deletes a Fabric managed private endpoint.
@@ -135,20 +137,20 @@ def delete_managed_private_endpoint(
135
137
  ----------
136
138
  managed_private_endpoint: str
137
139
  Name of the managed private endpoint.
138
- workspace : str, default=None
139
- The Fabric workspace name.
140
+ workspace : str | uuid.UUID, default=None
141
+ The Fabric workspace name or ID.
140
142
  Defaults to None which resolves to the workspace of the attached lakehouse
141
143
  or if no lakehouse attached, resolves to the workspace of the notebook.
142
144
  """
143
145
 
144
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
146
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
145
147
 
146
- df = list_managed_private_endpoints(workspace=workspace)
148
+ df = list_managed_private_endpoints(workspace=workspace_id)
147
149
  df_filt = df[df["Managed Private Endpoint Name"] == managed_private_endpoint]
148
150
 
149
151
  if len(df_filt) == 0:
150
152
  raise ValueError(
151
- f"{icons.red_dot} The '{managed_private_endpoint}' managed private endpoint does not exist within the '{workspace}' workspace."
153
+ f"{icons.red_dot} The '{managed_private_endpoint}' managed private endpoint does not exist within the '{workspace_name}' workspace."
152
154
  )
153
155
 
154
156
  item_id = df_filt["Managed Private Endpoint Id"].iloc[0]
@@ -162,5 +164,5 @@ def delete_managed_private_endpoint(
162
164
  raise FabricHTTPException(response)
163
165
 
164
166
  print(
165
- f"{icons.green_dot} The '{managed_private_endpoint}' managed private endpoint within the '{workspace}' workspace has been deleted."
167
+ f"{icons.green_dot} The '{managed_private_endpoint}' managed private endpoint within the '{workspace_name}' workspace has been deleted."
166
168
  )
@@ -10,9 +10,10 @@ from sempy_labs._helper_functions import (
10
10
  from sempy.fabric.exceptions import FabricHTTPException
11
11
  import sempy_labs._icons as icons
12
12
  import base64
13
+ from uuid import UUID
13
14
 
14
15
 
15
- def list_mirrored_databases(workspace: Optional[str] = None) -> pd.DataFrame:
16
+ def list_mirrored_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
16
17
  """
17
18
  Shows the mirrored databases within a workspace.
18
19
 
@@ -20,8 +21,8 @@ def list_mirrored_databases(workspace: Optional[str] = None) -> pd.DataFrame:
20
21
 
21
22
  Parameters
22
23
  ----------
23
- workspace : str, default=None
24
- The Fabric workspace name.
24
+ workspace : str | uuid.UUID, default=None
25
+ The Fabric workspace name or ID.
25
26
  Defaults to None which resolves to the workspace of the attached lakehouse
26
27
  or if no lakehouse attached, resolves to the workspace of the notebook.
27
28
 
@@ -44,7 +45,7 @@ def list_mirrored_databases(workspace: Optional[str] = None) -> pd.DataFrame:
44
45
  ]
45
46
  )
46
47
 
47
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
48
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
48
49
 
49
50
  client = fabric.FabricRestClient()
50
51
  response = client.get(f"/v1/workspaces/{workspace_id}/mirroredDatabases")
@@ -72,7 +73,7 @@ def list_mirrored_databases(workspace: Optional[str] = None) -> pd.DataFrame:
72
73
 
73
74
 
74
75
  def create_mirrored_database(
75
- name: str, description: Optional[str] = None, workspace: Optional[str] = None
76
+ name: str, description: Optional[str] = None, workspace: Optional[str | UUID] = None
76
77
  ):
77
78
  """
78
79
  Creates a Fabric mirrored database.
@@ -85,13 +86,13 @@ def create_mirrored_database(
85
86
  Name of the mirrored database.
86
87
  description : str, default=None
87
88
  A description of the mirrored database.
88
- workspace : str, default=None
89
- The Fabric workspace name.
89
+ workspace : str | uuid.UUID, default=None
90
+ The Fabric workspace name or ID.
90
91
  Defaults to None which resolves to the workspace of the attached lakehouse
91
92
  or if no lakehouse attached, resolves to the workspace of the notebook.
92
93
  """
93
94
 
94
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
95
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
95
96
 
96
97
  request_body = {"displayName": name}
97
98
 
@@ -107,11 +108,13 @@ def create_mirrored_database(
107
108
  raise FabricHTTPException(response)
108
109
 
109
110
  print(
110
- f"{icons.green_dot} The '{name}' mirrored database has been created within the '{workspace}' workspace."
111
+ f"{icons.green_dot} The '{name}' mirrored database has been created within the '{workspace_name}' workspace."
111
112
  )
112
113
 
113
114
 
114
- def delete_mirrored_database(mirrored_database: str, workspace: Optional[str] = None):
115
+ def delete_mirrored_database(
116
+ mirrored_database: str, workspace: Optional[str | UUID] = None
117
+ ):
115
118
  """
116
119
  Deletes a mirrored database.
117
120
 
@@ -121,16 +124,16 @@ def delete_mirrored_database(mirrored_database: str, workspace: Optional[str] =
121
124
  ----------
122
125
  mirrored_database: str
123
126
  Name of the mirrored database.
124
- workspace : str, default=None
125
- The Fabric workspace name.
127
+ workspace : str | uuid.UUID, default=None
128
+ The Fabric workspace name or ID.
126
129
  Defaults to None which resolves to the workspace of the attached lakehouse
127
130
  or if no lakehouse attached, resolves to the workspace of the notebook.
128
131
  """
129
132
 
130
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
133
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
131
134
 
132
135
  item_id = fabric.resolve_item_id(
133
- item_name=mirrored_database, type="MirroredDatabase", workspace=workspace
136
+ item_name=mirrored_database, type="MirroredDatabase", workspace=workspace_id
134
137
  )
135
138
 
136
139
  client = fabric.FabricRestClient()
@@ -142,12 +145,12 @@ def delete_mirrored_database(mirrored_database: str, workspace: Optional[str] =
142
145
  raise FabricHTTPException(response)
143
146
 
144
147
  print(
145
- f"{icons.green_dot} The '{mirrored_database}' mirrored database within the '{workspace}' workspace has been deleted."
148
+ f"{icons.green_dot} The '{mirrored_database}' mirrored database within the '{workspace_name}' workspace has been deleted."
146
149
  )
147
150
 
148
151
 
149
152
  def get_mirroring_status(
150
- mirrored_database: str, workspace: Optional[str] = None
153
+ mirrored_database: str, workspace: Optional[str | UUID] = None
151
154
  ) -> str:
152
155
  """
153
156
  Get the status of the mirrored database.
@@ -158,8 +161,8 @@ def get_mirroring_status(
158
161
  ----------
159
162
  mirrored_database: str
160
163
  Name of the mirrored database.
161
- workspace : str, default=None
162
- The Fabric workspace name.
164
+ workspace : str | uuid.UUID, default=None
165
+ The Fabric workspace name or ID.
163
166
  Defaults to None which resolves to the workspace of the attached lakehouse
164
167
  or if no lakehouse attached, resolves to the workspace of the notebook.
165
168
 
@@ -169,10 +172,10 @@ def get_mirroring_status(
169
172
  The status of a mirrored database.
170
173
  """
171
174
 
172
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
175
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
173
176
 
174
177
  item_id = fabric.resolve_item_id(
175
- item_name=mirrored_database, type="MirroredDatabase", workspace=workspace
178
+ item_name=mirrored_database, type="MirroredDatabase", workspace=workspace_id
176
179
  )
177
180
 
178
181
  client = fabric.FabricRestClient()
@@ -187,7 +190,7 @@ def get_mirroring_status(
187
190
 
188
191
 
189
192
  def get_tables_mirroring_status(
190
- mirrored_database: str, workspace: Optional[str] = None
193
+ mirrored_database: str, workspace: Optional[str | UUID] = None
191
194
  ) -> pd.DataFrame:
192
195
  """
193
196
  Gets the mirroring status of the tables.
@@ -198,8 +201,8 @@ def get_tables_mirroring_status(
198
201
  ----------
199
202
  mirrored_database: str
200
203
  Name of the mirrored database.
201
- workspace : str, default=None
202
- The Fabric workspace name.
204
+ workspace : str | uuid.UUID, default=None
205
+ The Fabric workspace name or ID.
203
206
  Defaults to None which resolves to the workspace of the attached lakehouse
204
207
  or if no lakehouse attached, resolves to the workspace of the notebook.
205
208
 
@@ -209,10 +212,10 @@ def get_tables_mirroring_status(
209
212
  A pandas dataframe showing the mirroring status of the tables.
210
213
  """
211
214
 
212
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
215
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
213
216
 
214
217
  item_id = fabric.resolve_item_id(
215
- item_name=mirrored_database, type="MirroredDatabase", workspace=workspace
218
+ item_name=mirrored_database, type="MirroredDatabase", workspace=workspace_id
216
219
  )
217
220
 
218
221
  client = fabric.FabricRestClient()
@@ -257,7 +260,7 @@ def get_tables_mirroring_status(
257
260
  return df
258
261
 
259
262
 
260
- def start_mirroring(mirrored_database: str, workspace: Optional[str] = None):
263
+ def start_mirroring(mirrored_database: str, workspace: Optional[str | UUID] = None):
261
264
  """
262
265
  Starts the mirroring for a database.
263
266
 
@@ -267,16 +270,16 @@ def start_mirroring(mirrored_database: str, workspace: Optional[str] = None):
267
270
  ----------
268
271
  mirrored_database: str
269
272
  Name of the mirrored database.
270
- workspace : str, default=None
271
- The Fabric workspace name.
273
+ workspace : str | uuid.UUID, default=None
274
+ The Fabric workspace name or ID.
272
275
  Defaults to None which resolves to the workspace of the attached lakehouse
273
276
  or if no lakehouse attached, resolves to the workspace of the notebook.
274
277
  """
275
278
 
276
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
279
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
277
280
 
278
281
  item_id = fabric.resolve_item_id(
279
- item_name=mirrored_database, type="MirroredDatabase", workspace=workspace
282
+ item_name=mirrored_database, type="MirroredDatabase", workspace=workspace_id
280
283
  )
281
284
 
282
285
  client = fabric.FabricRestClient()
@@ -288,11 +291,11 @@ def start_mirroring(mirrored_database: str, workspace: Optional[str] = None):
288
291
  raise FabricHTTPException(response)
289
292
 
290
293
  print(
291
- f"{icons.green_dot} Mirroring has started for the '{mirrored_database}' database within the '{workspace}' workspace."
294
+ f"{icons.green_dot} Mirroring has started for the '{mirrored_database}' database within the '{workspace_name}' workspace."
292
295
  )
293
296
 
294
297
 
295
- def stop_mirroring(mirrored_database: str, workspace: Optional[str] = None):
298
+ def stop_mirroring(mirrored_database: str, workspace: Optional[str | UUID] = None):
296
299
  """
297
300
  Stops the mirroring for a database.
298
301
 
@@ -302,16 +305,16 @@ def stop_mirroring(mirrored_database: str, workspace: Optional[str] = None):
302
305
  ----------
303
306
  mirrored_database: str
304
307
  Name of the mirrored database.
305
- workspace : str, default=None
306
- The Fabric workspace name.
308
+ workspace : str | uuid.UUID, default=None
309
+ The Fabric workspace name or ID.
307
310
  Defaults to None which resolves to the workspace of the attached lakehouse
308
311
  or if no lakehouse attached, resolves to the workspace of the notebook.
309
312
  """
310
313
 
311
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
314
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
312
315
 
313
316
  item_id = fabric.resolve_item_id(
314
- item_name=mirrored_database, type="MirroredDatabase", workspace=workspace
317
+ item_name=mirrored_database, type="MirroredDatabase", workspace=workspace_id
315
318
  )
316
319
 
317
320
  client = fabric.FabricRestClient()
@@ -323,12 +326,12 @@ def stop_mirroring(mirrored_database: str, workspace: Optional[str] = None):
323
326
  raise FabricHTTPException(response)
324
327
 
325
328
  print(
326
- f"{icons.green_dot} Mirroring has stopped for the '{mirrored_database}' database within the '{workspace}' workspace."
329
+ f"{icons.green_dot} Mirroring has stopped for the '{mirrored_database}' database within the '{workspace_name}' workspace."
327
330
  )
328
331
 
329
332
 
330
333
  def get_mirrored_database_definition(
331
- mirrored_database: str, workspace: Optional[str] = None, decode: bool = True
334
+ mirrored_database: str, workspace: Optional[str | UUID] = None, decode: bool = True
332
335
  ) -> str:
333
336
  """
334
337
  Obtains the mirrored database definition.
@@ -339,8 +342,8 @@ def get_mirrored_database_definition(
339
342
  ----------
340
343
  mirrored_database : str
341
344
  The name of the mirrored database.
342
- workspace : str, default=None
343
- The name of the workspace.
345
+ workspace : str | uuid.UUID, default=None
346
+ The name or ID of the workspace.
344
347
  Defaults to None which resolves to the workspace of the attached lakehouse
345
348
  or if no lakehouse attached, resolves to the workspace of the notebook.
346
349
  decode : bool, default=True
@@ -353,9 +356,9 @@ def get_mirrored_database_definition(
353
356
  The mirrored database definition.
354
357
  """
355
358
 
356
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
359
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
357
360
  item_id = fabric.resolve_item_id(
358
- item_name=mirrored_database, type="MirroredDatabase", workspace=workspace
361
+ item_name=mirrored_database, type="MirroredDatabase", workspace=workspace_id
359
362
  )
360
363
  client = fabric.FabricRestClient()
361
364
  response = client.post(
@@ -378,7 +381,7 @@ def get_mirrored_database_definition(
378
381
  def update_mirrored_database_definition(
379
382
  mirrored_database: str,
380
383
  mirrored_database_content: dict,
381
- workspace: Optional[str] = None,
384
+ workspace: Optional[str | UUID] = None,
382
385
  ):
383
386
  """
384
387
  Updates an existing notebook with a new definition.
@@ -389,17 +392,17 @@ def update_mirrored_database_definition(
389
392
  The name of the mirrored database to be created.
390
393
  mirrored_database_content : dict
391
394
  The mirrored database definition (not in Base64 format).
392
- workspace : str, default=None
393
- The name of the workspace.
395
+ workspace : str | uuid.UUID, default=None
396
+ The name or ID of the workspace.
394
397
  Defaults to None which resolves to the workspace of the attached lakehouse
395
398
  or if no lakehouse attached, resolves to the workspace of the notebook.
396
399
  """
397
400
 
398
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
401
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
399
402
  client = fabric.FabricRestClient()
400
403
  payload = base64.b64encode(mirrored_database_content)
401
404
  item_id = fabric.resolve_item_id(
402
- item_name=mirrored_database, type="MirroredDatabase", workspace=workspace
405
+ item_name=mirrored_database, type="MirroredDatabase", workspace=workspace_id
403
406
  )
404
407
 
405
408
  request_body = {
@@ -424,5 +427,5 @@ def update_mirrored_database_definition(
424
427
  lro(client, response, return_status_code=True)
425
428
 
426
429
  print(
427
- f"{icons.green_dot} The '{mirrored_database}' mirrored database was updated within the '{workspace}' workspace."
430
+ f"{icons.green_dot} The '{mirrored_database}' mirrored database was updated within the '{workspace_name}' workspace."
428
431
  )
@@ -6,9 +6,10 @@ from sempy_labs._helper_functions import (
6
6
  pagination,
7
7
  )
8
8
  from sempy.fabric.exceptions import FabricHTTPException
9
+ from uuid import UUID
9
10
 
10
11
 
11
- def list_mirrored_warehouses(workspace: Optional[str] = None) -> pd.DataFrame:
12
+ def list_mirrored_warehouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
12
13
  """
13
14
  Shows the mirrored warehouses within a workspace.
14
15
 
@@ -16,8 +17,8 @@ def list_mirrored_warehouses(workspace: Optional[str] = None) -> pd.DataFrame:
16
17
 
17
18
  Parameters
18
19
  ----------
19
- workspace : str, default=None
20
- The Fabric workspace name.
20
+ workspace : str | uuid.UUID, default=None
21
+ The Fabric workspace name or ID.
21
22
  Defaults to None which resolves to the workspace of the attached lakehouse
22
23
  or if no lakehouse attached, resolves to the workspace of the notebook.
23
24
 
@@ -31,7 +32,7 @@ def list_mirrored_warehouses(workspace: Optional[str] = None) -> pd.DataFrame:
31
32
  columns=["Mirrored Warehouse Name", "Mirrored Warehouse Id", "Description"]
32
33
  )
33
34
 
34
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
35
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
35
36
 
36
37
  client = fabric.FabricRestClient()
37
38
  response = client.get(f"/v1/workspaces/{workspace_id}/mirroredWarehouses")
@@ -8,9 +8,10 @@ from sempy_labs._helper_functions import (
8
8
  pagination,
9
9
  )
10
10
  from sempy.fabric.exceptions import FabricHTTPException
11
+ from uuid import UUID
11
12
 
12
13
 
13
- def list_ml_experiments(workspace: Optional[str] = None) -> pd.DataFrame:
14
+ def list_ml_experiments(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
14
15
  """
15
16
  Shows the ML experiments within a workspace.
16
17
 
@@ -18,8 +19,8 @@ def list_ml_experiments(workspace: Optional[str] = None) -> pd.DataFrame:
18
19
 
19
20
  Parameters
20
21
  ----------
21
- workspace : str, default=None
22
- The Fabric workspace name.
22
+ workspace : str | uuid.UUID, default=None
23
+ The Fabric workspace name or ID.
23
24
  Defaults to None which resolves to the workspace of the attached lakehouse
24
25
  or if no lakehouse attached, resolves to the workspace of the notebook.
25
26
 
@@ -31,7 +32,7 @@ def list_ml_experiments(workspace: Optional[str] = None) -> pd.DataFrame:
31
32
 
32
33
  df = pd.DataFrame(columns=["ML Experiment Name", "ML Experiment Id", "Description"])
33
34
 
34
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
35
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
35
36
 
36
37
  client = fabric.FabricRestClient()
37
38
  response = client.get(f"/v1/workspaces/{workspace_id}/mlExperiments")
@@ -57,7 +58,7 @@ def list_ml_experiments(workspace: Optional[str] = None) -> pd.DataFrame:
57
58
 
58
59
 
59
60
  def create_ml_experiment(
60
- name: str, description: Optional[str] = None, workspace: Optional[str] = None
61
+ name: str, description: Optional[str] = None, workspace: Optional[str | UUID] = None
61
62
  ):
62
63
  """
63
64
  Creates a Fabric ML experiment.
@@ -70,13 +71,13 @@ def create_ml_experiment(
70
71
  Name of the ML experiment.
71
72
  description : str, default=None
72
73
  A description of the environment.
73
- workspace : str, default=None
74
- The Fabric workspace name.
74
+ workspace : str | uuid.UUID, default=None
75
+ The Fabric workspace name or ID.
75
76
  Defaults to None which resolves to the workspace of the attached lakehouse
76
77
  or if no lakehouse attached, resolves to the workspace of the notebook.
77
78
  """
78
79
 
79
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
80
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
80
81
 
81
82
  request_body = {"displayName": name}
82
83
 
@@ -91,11 +92,11 @@ def create_ml_experiment(
91
92
  lro(client, response, status_codes=[201, 202])
92
93
 
93
94
  print(
94
- f"{icons.green_dot} The '{name}' ML experiment has been created within the '{workspace}' workspace."
95
+ f"{icons.green_dot} The '{name}' ML experiment has been created within the '{workspace_name}' workspace."
95
96
  )
96
97
 
97
98
 
98
- def delete_ml_experiment(name: str, workspace: Optional[str] = None):
99
+ def delete_ml_experiment(name: str, workspace: Optional[str | UUID] = None):
99
100
  """
100
101
  Deletes a Fabric ML experiment.
101
102
 
@@ -105,16 +106,16 @@ def delete_ml_experiment(name: str, workspace: Optional[str] = None):
105
106
  ----------
106
107
  name: str
107
108
  Name of the ML experiment.
108
- workspace : str, default=None
109
- The Fabric workspace name.
109
+ workspace : str | uuid.UUID, default=None
110
+ The Fabric workspace name or ID.
110
111
  Defaults to None which resolves to the workspace of the attached lakehouse
111
112
  or if no lakehouse attached, resolves to the workspace of the notebook.
112
113
  """
113
114
 
114
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
115
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
115
116
 
116
117
  item_id = fabric.resolve_item_id(
117
- item_name=name, type="MLExperiment", workspace=workspace
118
+ item_name=name, type="MLExperiment", workspace=workspace_id
118
119
  )
119
120
 
120
121
  client = fabric.FabricRestClient()
@@ -124,5 +125,5 @@ def delete_ml_experiment(name: str, workspace: Optional[str] = None):
124
125
  raise FabricHTTPException(response)
125
126
 
126
127
  print(
127
- f"{icons.green_dot} The '{name}' ML experiment within the '{workspace}' workspace has been deleted."
128
+ f"{icons.green_dot} The '{name}' ML experiment within the '{workspace_name}' workspace has been deleted."
128
129
  )
sempy_labs/_ml_models.py CHANGED
@@ -8,9 +8,10 @@ from sempy_labs._helper_functions import (
8
8
  pagination,
9
9
  )
10
10
  from sempy.fabric.exceptions import FabricHTTPException
11
+ from uuid import UUID
11
12
 
12
13
 
13
- def list_ml_models(workspace: Optional[str] = None) -> pd.DataFrame:
14
+ def list_ml_models(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
14
15
  """
15
16
  Shows the ML models within a workspace.
16
17
 
@@ -18,8 +19,8 @@ def list_ml_models(workspace: Optional[str] = None) -> pd.DataFrame:
18
19
 
19
20
  Parameters
20
21
  ----------
21
- workspace : str, default=None
22
- The Fabric workspace name.
22
+ workspace : str | uuid.UUID, default=None
23
+ The Fabric workspace name or ID.
23
24
  Defaults to None which resolves to the workspace of the attached lakehouse
24
25
  or if no lakehouse attached, resolves to the workspace of the notebook.
25
26
 
@@ -31,7 +32,7 @@ def list_ml_models(workspace: Optional[str] = None) -> pd.DataFrame:
31
32
 
32
33
  df = pd.DataFrame(columns=["ML Model Name", "ML Model Id", "Description"])
33
34
 
34
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
35
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
35
36
 
36
37
  client = fabric.FabricRestClient()
37
38
  response = client.get(f"/v1/workspaces/{workspace_id}/mlModels")
@@ -57,7 +58,7 @@ def list_ml_models(workspace: Optional[str] = None) -> pd.DataFrame:
57
58
 
58
59
 
59
60
  def create_ml_model(
60
- name: str, description: Optional[str] = None, workspace: Optional[str] = None
61
+ name: str, description: Optional[str] = None, workspace: Optional[str | UUID] = None
61
62
  ):
62
63
  """
63
64
  Creates a Fabric ML model.
@@ -70,13 +71,13 @@ def create_ml_model(
70
71
  Name of the ML model.
71
72
  description : str, default=None
72
73
  A description of the ML model.
73
- workspace : str, default=None
74
- The Fabric workspace name.
74
+ workspace : str | uuid.UUID, default=None
75
+ The Fabric workspace name or ID.
75
76
  Defaults to None which resolves to the workspace of the attached lakehouse
76
77
  or if no lakehouse attached, resolves to the workspace of the notebook.
77
78
  """
78
79
 
79
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
80
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
80
81
 
81
82
  request_body = {"displayName": name}
82
83
 
@@ -89,11 +90,11 @@ def create_ml_model(
89
90
  lro(client, response, status_codes=[201, 202])
90
91
 
91
92
  print(
92
- f"{icons.green_dot} The '{name}' ML model has been created within the '{workspace}' workspace."
93
+ f"{icons.green_dot} The '{name}' ML model has been created within the '{workspace_name}' workspace."
93
94
  )
94
95
 
95
96
 
96
- def delete_ml_model(name: str, workspace: Optional[str] = None):
97
+ def delete_ml_model(name: str, workspace: Optional[str | UUID] = None):
97
98
  """
98
99
  Deletes a Fabric ML model.
99
100
 
@@ -103,13 +104,13 @@ def delete_ml_model(name: str, workspace: Optional[str] = None):
103
104
  ----------
104
105
  name: str
105
106
  Name of the ML model.
106
- workspace : str, default=None
107
- The Fabric workspace name.
107
+ workspace : str | uuid.UUID, default=None
108
+ The Fabric workspace name or ID.
108
109
  Defaults to None which resolves to the workspace of the attached lakehouse
109
110
  or if no lakehouse attached, resolves to the workspace of the notebook.
110
111
  """
111
112
 
112
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
113
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
113
114
 
114
115
  item_id = fabric.resolve_item_id(
115
116
  item_name=name, type="MLModel", workspace=workspace
@@ -122,5 +123,5 @@ def delete_ml_model(name: str, workspace: Optional[str] = None):
122
123
  raise FabricHTTPException(response)
123
124
 
124
125
  print(
125
- f"{icons.green_dot} The '{name}' ML model within the '{workspace}' workspace has been deleted."
126
+ f"{icons.green_dot} The '{name}' ML model within the '{workspace_name}' workspace has been deleted."
126
127
  )