semantic-link-labs 0.8.10__py3-none-any.whl → 0.8.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (73) hide show
  1. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/METADATA +3 -2
  2. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/RECORD +73 -72
  3. sempy_labs/__init__.py +6 -2
  4. sempy_labs/_clear_cache.py +39 -37
  5. sempy_labs/_connections.py +13 -13
  6. sempy_labs/_data_pipelines.py +20 -20
  7. sempy_labs/_dataflows.py +27 -28
  8. sempy_labs/_dax.py +41 -47
  9. sempy_labs/_environments.py +26 -23
  10. sempy_labs/_eventhouses.py +16 -15
  11. sempy_labs/_eventstreams.py +16 -15
  12. sempy_labs/_external_data_shares.py +18 -20
  13. sempy_labs/_gateways.py +14 -14
  14. sempy_labs/_generate_semantic_model.py +99 -62
  15. sempy_labs/_git.py +105 -43
  16. sempy_labs/_helper_functions.py +148 -131
  17. sempy_labs/_job_scheduler.py +92 -0
  18. sempy_labs/_kql_databases.py +16 -15
  19. sempy_labs/_kql_querysets.py +16 -15
  20. sempy_labs/_list_functions.py +114 -99
  21. sempy_labs/_managed_private_endpoints.py +19 -17
  22. sempy_labs/_mirrored_databases.py +51 -48
  23. sempy_labs/_mirrored_warehouses.py +5 -4
  24. sempy_labs/_ml_experiments.py +16 -15
  25. sempy_labs/_ml_models.py +15 -14
  26. sempy_labs/_model_bpa.py +3 -3
  27. sempy_labs/_model_dependencies.py +55 -29
  28. sempy_labs/_notebooks.py +27 -25
  29. sempy_labs/_one_lake_integration.py +23 -26
  30. sempy_labs/_query_scale_out.py +67 -64
  31. sempy_labs/_refresh_semantic_model.py +25 -26
  32. sempy_labs/_spark.py +33 -32
  33. sempy_labs/_sql.py +12 -9
  34. sempy_labs/_translations.py +10 -7
  35. sempy_labs/_vertipaq.py +34 -31
  36. sempy_labs/_warehouses.py +22 -21
  37. sempy_labs/_workspace_identity.py +11 -10
  38. sempy_labs/_workspaces.py +40 -33
  39. sempy_labs/admin/_basic_functions.py +10 -12
  40. sempy_labs/admin/_external_data_share.py +3 -3
  41. sempy_labs/admin/_items.py +4 -4
  42. sempy_labs/admin/_scanner.py +3 -1
  43. sempy_labs/directlake/_directlake_schema_compare.py +18 -14
  44. sempy_labs/directlake/_directlake_schema_sync.py +18 -12
  45. sempy_labs/directlake/_dl_helper.py +25 -26
  46. sempy_labs/directlake/_generate_shared_expression.py +10 -9
  47. sempy_labs/directlake/_get_directlake_lakehouse.py +16 -13
  48. sempy_labs/directlake/_get_shared_expression.py +4 -3
  49. sempy_labs/directlake/_guardrails.py +12 -6
  50. sempy_labs/directlake/_list_directlake_model_calc_tables.py +15 -9
  51. sempy_labs/directlake/_show_unsupported_directlake_objects.py +16 -10
  52. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +35 -31
  53. sempy_labs/directlake/_update_directlake_partition_entity.py +34 -31
  54. sempy_labs/directlake/_warm_cache.py +87 -65
  55. sempy_labs/lakehouse/_get_lakehouse_columns.py +10 -8
  56. sempy_labs/lakehouse/_get_lakehouse_tables.py +10 -9
  57. sempy_labs/lakehouse/_lakehouse.py +17 -13
  58. sempy_labs/lakehouse/_shortcuts.py +42 -23
  59. sempy_labs/migration/_create_pqt_file.py +16 -11
  60. sempy_labs/migration/_refresh_calc_tables.py +16 -10
  61. sempy_labs/report/_download_report.py +9 -8
  62. sempy_labs/report/_generate_report.py +40 -44
  63. sempy_labs/report/_paginated.py +9 -9
  64. sempy_labs/report/_report_bpa.py +13 -9
  65. sempy_labs/report/_report_functions.py +80 -91
  66. sempy_labs/report/_report_helper.py +8 -4
  67. sempy_labs/report/_report_list_functions.py +24 -13
  68. sempy_labs/report/_report_rebind.py +17 -16
  69. sempy_labs/report/_reportwrapper.py +41 -33
  70. sempy_labs/tom/_model.py +43 -6
  71. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/LICENSE +0 -0
  72. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/WHEEL +0 -0
  73. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,16 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
3
  from sempy_labs._helper_functions import (
4
- resolve_dataset_id,
5
4
  resolve_workspace_name_and_id,
5
+ resolve_dataset_name_and_id,
6
6
  )
7
7
  from typing import Optional, Tuple
8
8
  import sempy_labs._icons as icons
9
9
  from sempy.fabric.exceptions import FabricHTTPException
10
+ from uuid import UUID
10
11
 
11
12
 
12
- def qso_sync(dataset: str, workspace: Optional[str] = None):
13
+ def qso_sync(dataset: str | UUID, workspace: Optional[str | UUID] = None):
13
14
  """
14
15
  Triggers a query scale-out sync of read-only replicas for the specified dataset from the specified workspace.
15
16
 
@@ -17,16 +18,16 @@ def qso_sync(dataset: str, workspace: Optional[str] = None):
17
18
 
18
19
  Parameters
19
20
  ----------
20
- dataset : str
21
- Name of the semantic model.
22
- workspace : str, default=None
23
- The Fabric workspace name.
21
+ dataset : str | uuid.UUID
22
+ Name or ID of the semantic model.
23
+ workspace : str | uuid.UUID, default=None
24
+ The Fabric workspace name or ID.
24
25
  Defaults to None which resolves to the workspace of the attached lakehouse
25
26
  or if no lakehouse attached, resolves to the workspace of the notebook.
26
27
  """
27
28
 
28
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
29
- dataset_id = resolve_dataset_id(dataset, workspace)
29
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
30
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
30
31
 
31
32
  client = fabric.PowerBIRestClient()
32
33
  response = client.post(
@@ -36,12 +37,12 @@ def qso_sync(dataset: str, workspace: Optional[str] = None):
36
37
  if response.status_code != 200:
37
38
  raise FabricHTTPException(response)
38
39
  print(
39
- f"{icons.green_dot} QSO sync initiated for the '{dataset}' semantic model within the '{workspace}' workspace."
40
+ f"{icons.green_dot} QSO sync initiated for the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
40
41
  )
41
42
 
42
43
 
43
44
  def qso_sync_status(
44
- dataset: str, workspace: Optional[str] = None
45
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
45
46
  ) -> Tuple[pd.DataFrame, pd.DataFrame]:
46
47
  """
47
48
  Returns the query scale-out sync status for the specified dataset from the specified workspace.
@@ -50,10 +51,10 @@ def qso_sync_status(
50
51
 
51
52
  Parameters
52
53
  ----------
53
- dataset : str
54
- Name of the semantic model.
55
- workspace : str, default=None
56
- The Fabric workspace name.
54
+ dataset : str | uuid.UUID
55
+ Name or ID of the semantic model.
56
+ workspace : str | uuid.UUID, default=None
57
+ The Fabric workspace name or ID.
57
58
  Defaults to None which resolves to the workspace of the attached lakehouse
58
59
  or if no lakehouse attached, resolves to the workspace of the notebook.
59
60
 
@@ -81,8 +82,8 @@ def qso_sync_status(
81
82
  columns=["Replica ID", "Replica Type", "Replica Version", "Replica Timestamp"]
82
83
  )
83
84
 
84
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
85
- dataset_id = resolve_dataset_id(dataset, workspace)
85
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
86
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
86
87
 
87
88
  client = fabric.PowerBIRestClient()
88
89
  response = client.get(
@@ -139,7 +140,9 @@ def qso_sync_status(
139
140
  return df, dfRep
140
141
 
141
142
 
142
- def disable_qso(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
143
+ def disable_qso(
144
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
145
+ ) -> pd.DataFrame:
143
146
  """
144
147
  Sets the max read-only replicas to 0, disabling query scale out.
145
148
 
@@ -147,10 +150,10 @@ def disable_qso(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
147
150
 
148
151
  Parameters
149
152
  ----------
150
- dataset : str
151
- Name of the semantic model.
152
- workspace : str, default=None
153
- The Fabric workspace name.
153
+ dataset : str | uuid.UUID
154
+ Name or ID of the semantic model.
155
+ workspace : str | uuid.UUID, default=None
156
+ The Fabric workspace name or ID.
154
157
  Defaults to None which resolves to the workspace of the attached lakehouse
155
158
  or if no lakehouse attached, resolves to the workspace of the notebook.
156
159
 
@@ -160,8 +163,8 @@ def disable_qso(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
160
163
  A pandas dataframe showing the current query scale out settings.
161
164
  """
162
165
 
163
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
164
- dataset_id = resolve_dataset_id(dataset, workspace)
166
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
167
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
165
168
 
166
169
  request_body = {"queryScaleOutSettings": {"maxReadOnlyReplicas": "0"}}
167
170
 
@@ -172,20 +175,20 @@ def disable_qso(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
172
175
  if response.status_code != 200:
173
176
  raise FabricHTTPException(response)
174
177
 
175
- df = list_qso_settings(dataset=dataset, workspace=workspace)
178
+ df = list_qso_settings(dataset=dataset_id, workspace=workspace_id)
176
179
 
177
180
  print(
178
- f"{icons.green_dot} Query scale out has been disabled for the '{dataset}' semantic model within the '{workspace}' workspace."
181
+ f"{icons.green_dot} Query scale out has been disabled for the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
179
182
  )
180
183
 
181
184
  return df
182
185
 
183
186
 
184
187
  def set_qso(
185
- dataset: str,
188
+ dataset: str | UUID,
186
189
  auto_sync: bool = True,
187
190
  max_read_only_replicas: int = -1,
188
- workspace: Optional[str] = None,
191
+ workspace: Optional[str | UUID] = None,
189
192
  ) -> pd.DataFrame:
190
193
  """
191
194
  Sets the query scale out settings for a semantic model.
@@ -194,14 +197,14 @@ def set_qso(
194
197
 
195
198
  Parameters
196
199
  ----------
197
- dataset : str
198
- Name of the semantic model.
200
+ dataset : str | uuid.UUID
201
+ Name or ID of the semantic model.
199
202
  auto_sync : bool, default=True
200
203
  Whether the semantic model automatically syncs read-only replicas.
201
204
  max_read_only_replicas : int, default=-1
202
205
  To enable semantic model scale-out, set max_read_only_replicas to -1, or any non-0 value. A value of -1 allows Power BI to create as many read-only replicas as your Power BI capacity supports. You can also explicitly set the replica count to a value lower than that of the capacity maximum. Setting max_read_only_replicas to -1 is recommended.
203
- workspace : str, default=None
204
- The Fabric workspace name.
206
+ workspace : str | uuid.UUID, default=None
207
+ The Fabric workspace name or ID.
205
208
  Defaults to None which resolves to the workspace of the attached lakehouse
206
209
  or if no lakehouse attached, resolves to the workspace of the notebook.
207
210
 
@@ -213,16 +216,16 @@ def set_qso(
213
216
 
214
217
  from sempy_labs._helper_functions import is_default_semantic_model
215
218
 
216
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
217
- dataset_id = resolve_dataset_id(dataset, workspace)
219
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
220
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
218
221
 
219
- if is_default_semantic_model(dataset=dataset, workspace=workspace):
222
+ if is_default_semantic_model(dataset=dataset_id, workspace=workspace_id):
220
223
  raise ValueError(
221
224
  f"{icons.red_dot} The 'set_qso' function does not run against default semantic models."
222
225
  )
223
226
 
224
227
  if max_read_only_replicas == 0:
225
- disable_qso(dataset=dataset, workspace=workspace)
228
+ disable_qso(dataset=dataset_id, workspace=workspace_id)
226
229
  return
227
230
 
228
231
  request_body = {
@@ -232,12 +235,12 @@ def set_qso(
232
235
  }
233
236
  }
234
237
 
235
- dfL = list_qso_settings(dataset=dataset, workspace=workspace)
238
+ dfL = list_qso_settings(dataset=dataset_id, workspace=workspace_id)
236
239
  storage_mode = dfL["Storage Mode"].iloc[0]
237
240
 
238
241
  if storage_mode == "Small":
239
242
  set_semantic_model_storage_format(
240
- dataset=dataset, storage_format="Large", workspace=workspace
243
+ dataset=dataset_id, storage_format="Large", workspace=workspace_id
241
244
  )
242
245
 
243
246
  client = fabric.PowerBIRestClient()
@@ -248,34 +251,34 @@ def set_qso(
248
251
  if response.status_code != 200:
249
252
  raise FabricHTTPException(response)
250
253
 
251
- df = list_qso_settings(dataset=dataset, workspace=workspace)
254
+ df = list_qso_settings(dataset=dataset_id, workspace=workspace_id)
252
255
  print(
253
- f"{icons.green_dot} Query scale out has been set on the '{dataset}' semantic model within the '{workspace}' workspace."
256
+ f"{icons.green_dot} Query scale out has been set on the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
254
257
  )
255
258
 
256
259
  return df
257
260
 
258
261
 
259
262
  def set_semantic_model_storage_format(
260
- dataset: str, storage_format: str, workspace: Optional[str] = None
263
+ dataset: str | UUID, storage_format: str, workspace: Optional[str | UUID] = None
261
264
  ):
262
265
  """
263
266
  Sets the semantic model storage format.
264
267
 
265
268
  Parameters
266
269
  ----------
267
- dataset : str
268
- Name of the semantic model.
270
+ dataset : str | uuid.UUID
271
+ Name or ID of the semantic model.
269
272
  storage_format : str
270
273
  The storage format for the semantic model. Valid options: 'Large', 'Small'.
271
- workspace : str, default=None
272
- The Fabric workspace name.
274
+ workspace : str | uuid.UUID, default=None
275
+ The Fabric workspace name or ID.
273
276
  Defaults to None which resolves to the workspace of the attached lakehouse
274
277
  or if no lakehouse attached, resolves to the workspace of the notebook.
275
278
  """
276
279
 
277
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
278
- dataset_id = resolve_dataset_id(dataset, workspace)
280
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
281
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
279
282
 
280
283
  storage_format = storage_format.capitalize()
281
284
 
@@ -295,12 +298,12 @@ def set_semantic_model_storage_format(
295
298
  f"{icons.red_dot} Invalid storage format value. Valid options: {storageFormats}."
296
299
  )
297
300
 
298
- dfL = list_qso_settings(dataset=dataset, workspace=workspace)
301
+ dfL = list_qso_settings(dataset=dataset_id, workspace=workspace_id)
299
302
  current_storage_format = dfL["Storage Mode"].iloc[0]
300
303
 
301
304
  if current_storage_format == storage_format:
302
305
  print(
303
- f"{icons.info} The '{dataset}' semantic model within the '{workspace}' workspace is already set to '{storage_format.lower()}' storage format."
306
+ f"{icons.info} The '{dataset_name}' semantic model within the '{workspace_name}' workspace is already set to '{storage_format.lower()}' storage format."
304
307
  )
305
308
  return
306
309
 
@@ -311,22 +314,22 @@ def set_semantic_model_storage_format(
311
314
  if response.status_code != 200:
312
315
  raise FabricHTTPException(response)
313
316
  print(
314
- f"{icons.green_dot} The semantic model storage format for the '{dataset}' semantic model within the '{workspace}' workspace has been set to '{storage_format}'."
317
+ f"{icons.green_dot} The semantic model storage format for the '{dataset_name}' semantic model within the '{workspace_name}' workspace has been set to '{storage_format}'."
315
318
  )
316
319
 
317
320
 
318
321
  def list_qso_settings(
319
- dataset: Optional[str] = None, workspace: Optional[str] = None
322
+ dataset: Optional[str | UUID] = None, workspace: Optional[str | UUID] = None
320
323
  ) -> pd.DataFrame:
321
324
  """
322
325
  Shows the query scale out settings for a semantic model (or all semantic models within a workspace).
323
326
 
324
327
  Parameters
325
328
  ----------
326
- dataset : str, default=None
327
- Name of the semantic model.
328
- workspace : str, default=None
329
- The Fabric workspace name.
329
+ dataset : str | uuid.UUID, default=None
330
+ Name or ID of the semantic model.
331
+ workspace : str | uuid.UUID, default=None
332
+ The Fabric workspace name or ID.
330
333
  Defaults to None which resolves to the workspace of the attached lakehouse
331
334
  or if no lakehouse attached, resolves to the workspace of the notebook.
332
335
 
@@ -336,10 +339,10 @@ def list_qso_settings(
336
339
  A pandas dataframe showing the query scale out settings.
337
340
  """
338
341
 
339
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
342
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
340
343
 
341
344
  if dataset is not None:
342
- dataset_id = resolve_dataset_id(dataset, workspace)
345
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
343
346
 
344
347
  df = pd.DataFrame(
345
348
  columns=[
@@ -382,7 +385,7 @@ def list_qso_settings(
382
385
 
383
386
 
384
387
  def set_workspace_default_storage_format(
385
- storage_format: str, workspace: Optional[str] = None
388
+ storage_format: str, workspace: Optional[str | UUID] = None
386
389
  ):
387
390
  """
388
391
  Sets the default storage format for semantic models within a workspace.
@@ -391,8 +394,8 @@ def set_workspace_default_storage_format(
391
394
  ----------
392
395
  storage_format : str
393
396
  The storage format for the semantic model. Valid options: 'Large', 'Small'.
394
- workspace : str, default=None
395
- The Fabric workspace name.
397
+ workspace : str | uuid.UUID, default=None
398
+ The Fabric workspace name or ID.
396
399
  Defaults to None which resolves to the workspace of the attached lakehouse
397
400
  or if no lakehouse attached, resolves to the workspace of the notebook.
398
401
  """
@@ -408,22 +411,22 @@ def set_workspace_default_storage_format(
408
411
  f"{icons.red_dot} Invalid storage format. Please choose from these options: {storageFormats}."
409
412
  )
410
413
 
411
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
414
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
412
415
 
413
416
  # Check current storage format
414
- dfW = fabric.list_workspaces(filter=f"name eq '{workspace}'")
417
+ dfW = fabric.list_workspaces(filter=f"name eq '{workspace_name}'")
415
418
  if len(dfW) == 0:
416
419
  raise ValueError()
417
420
  current_storage_format = dfW["Default Dataset Storage Format"].iloc[0]
418
421
 
419
422
  if current_storage_format == storage_format:
420
423
  print(
421
- f"{icons.info} The '{workspace}' is already set to a default storage format of '{current_storage_format}'."
424
+ f"{icons.info} The '{workspace_name}' is already set to a default storage format of '{current_storage_format}'."
422
425
  )
423
426
  return
424
427
 
425
428
  request_body = {
426
- "name": workspace,
429
+ "name": workspace_name,
427
430
  "defaultDatasetStorageFormat": storage_format,
428
431
  }
429
432
 
@@ -434,5 +437,5 @@ def set_workspace_default_storage_format(
434
437
  raise FabricHTTPException(response)
435
438
 
436
439
  print(
437
- f"{icons.green_dot} The default storage format for the '{workspace}' workspace has been updated to '{storage_format}."
440
+ f"{icons.green_dot} The default storage format for the '{workspace_name}' workspace has been updated to '{storage_format}."
438
441
  )
@@ -1,7 +1,6 @@
1
1
  import sempy.fabric as fabric
2
2
  import time
3
3
  from sempy_labs._helper_functions import (
4
- resolve_dataset_id,
5
4
  resolve_workspace_name_and_id,
6
5
  _get_partition_map,
7
6
  _process_and_display_chart,
@@ -27,7 +26,7 @@ def refresh_semantic_model(
27
26
  retry_count: int = 0,
28
27
  apply_refresh_policy: bool = True,
29
28
  max_parallelism: int = 10,
30
- workspace: Optional[str] = None,
29
+ workspace: Optional[str | UUID] = None,
31
30
  visualize: bool = False,
32
31
  commit_mode: str = "transactional",
33
32
  ) -> pd.DataFrame | None:
@@ -36,7 +35,7 @@ def refresh_semantic_model(
36
35
 
37
36
  Parameters
38
37
  ----------
39
- dataset : str | UUID
38
+ dataset : str | uuid.UUID
40
39
  Name or ID of the semantic model.
41
40
  tables : str, List[str], default=None
42
41
  A string or a list of tables to refresh.
@@ -52,8 +51,8 @@ def refresh_semantic_model(
52
51
  Determines the maximum number of threads that can run the processing commands in parallel.
53
52
  This value aligns with the MaxParallelism property that can be set in the TMSL Sequence command or by using other methods.
54
53
  Defaults to 10.
55
- workspace : str, default=None
56
- The Fabric workspace name.
54
+ workspace : str | uuid.UUID, default=None
55
+ The Fabric workspace name or ID.
57
56
  Defaults to None which resolves to the workspace of the attached lakehouse
58
57
  or if no lakehouse attached, resolves to the workspace of the notebook.
59
58
  visualize : bool, default=False
@@ -278,39 +277,40 @@ def refresh_semantic_model(
278
277
 
279
278
  @log
280
279
  def cancel_dataset_refresh(
281
- dataset: str, request_id: Optional[str] = None, workspace: Optional[str] = None
280
+ dataset: str | UUID,
281
+ request_id: Optional[str] = None,
282
+ workspace: Optional[str | UUID] = None,
282
283
  ):
283
284
  """
284
285
  Cancels the refresh of a semantic model which was executed via the `Enhanced Refresh API <https://learn.microsoft.com/power-bi/connect-data/asynchronous-refresh>`_
285
286
 
286
287
  Parameters
287
288
  ----------
288
- dataset : str
289
- Name of the semantic model.
289
+ dataset : str | uuid.UUID
290
+ Name or ID of the semantic model.
290
291
  request_id : str, default=None
291
292
  The request id of a semantic model refresh.
292
293
  Defaults to finding the latest active refresh of the semantic model.
293
- workspace : str, default=None
294
- The Fabric workspace name.
294
+ workspace : str | uuid.UUID, default=None
295
+ The Fabric workspace name or ID.
295
296
  Defaults to None which resolves to the workspace of the attached lakehouse
296
297
  or if no lakehouse attached, resolves to the workspace of the notebook.
297
298
  """
298
299
 
299
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
300
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
301
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
300
302
 
301
- rr = fabric.list_refresh_requests(dataset=dataset, workspace=workspace)
303
+ rr = fabric.list_refresh_requests(dataset=dataset_id, workspace=workspace_id)
302
304
  rr_filt = rr[rr["Status"] == "Unknown"]
303
305
 
304
306
  if request_id is None:
305
307
  if len(rr_filt) == 0:
306
308
  raise ValueError(
307
- f"{icons.red_dot} There are no active Enhanced API refreshes of the '{dataset}' semantic model within the '{workspace}' workspace."
309
+ f"{icons.red_dot} There are no active Enhanced API refreshes of the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
308
310
  )
309
311
 
310
312
  request_id = rr_filt["Request Id"].iloc[0]
311
313
 
312
- dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace)
313
-
314
314
  client = fabric.PowerBIRestClient()
315
315
 
316
316
  response = client.delete(
@@ -320,12 +320,14 @@ def cancel_dataset_refresh(
320
320
  if response.status_code != 200:
321
321
  raise FabricHTTPException(response)
322
322
  print(
323
- f"{icons.green_dot} The '{request_id}' refresh request for the '{dataset}' semantic model within the '{workspace}' workspace has been cancelled."
323
+ f"{icons.green_dot} The '{request_id}' refresh request for the '{dataset_name}' semantic model within the '{workspace_name}' workspace has been cancelled."
324
324
  )
325
325
 
326
326
 
327
327
  def get_semantic_model_refresh_history(
328
- dataset: str, request_id: Optional[str] = None, workspace: Optional[str] = None
328
+ dataset: str | UUID,
329
+ request_id: Optional[str] = None,
330
+ workspace: Optional[str | UUID] = None,
329
331
  ) -> pd.DataFrame:
330
332
  """
331
333
  Obtains the semantic model refresh history (refreshes executed via the Enhanced Refresh API).
@@ -334,13 +336,13 @@ def get_semantic_model_refresh_history(
334
336
 
335
337
  Parameters
336
338
  ----------
337
- dataset : str
338
- Name of the semantic model.
339
+ dataset : str | uuid.UUID
340
+ Name or ID of the semantic model.
339
341
  request_id : str, default=None
340
342
  The request id of a semantic model refresh.
341
343
  Defaults to None which resolves to showing all refresh requests for the given semantic model.
342
- workspace : str, default=None
343
- The Fabric workspace name.
344
+ workspace : str | uuid.UUID, default=None
345
+ The Fabric workspace name or ID.
344
346
  Defaults to None which resolves to the workspace of the attached lakehouse
345
347
  or if no lakehouse attached, resolves to the workspace of the notebook.
346
348
 
@@ -350,8 +352,8 @@ def get_semantic_model_refresh_history(
350
352
  A pandas dataframe showing the semantic model refresh history.
351
353
  """
352
354
 
353
- workspace_name = fabric.resolve_workspace_name(workspace)
354
- workspace_id = fabric.resolve_workspace_id(workspace_name)
355
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
356
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
355
357
  df = pd.DataFrame(
356
358
  columns=[
357
359
  "Request Id",
@@ -363,9 +365,6 @@ def get_semantic_model_refresh_history(
363
365
  ]
364
366
  )
365
367
 
366
- dataset_id = fabric.resolve_item_id(
367
- item_name=dataset, workspace=workspace_id, type="SemanticModel"
368
- )
369
368
  client = fabric.PowerBIRestClient()
370
369
  response = client.get(
371
370
  f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/refreshes"