semantic-link-labs 0.5.0__py3-none-any.whl → 0.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (54) hide show
  1. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.6.0.dist-info}/METADATA +2 -2
  2. semantic_link_labs-0.6.0.dist-info/RECORD +54 -0
  3. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.6.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +19 -13
  5. sempy_labs/_ai.py +43 -24
  6. sempy_labs/_clear_cache.py +4 -5
  7. sempy_labs/_connections.py +77 -70
  8. sempy_labs/_dax.py +7 -9
  9. sempy_labs/_generate_semantic_model.py +55 -44
  10. sempy_labs/_helper_functions.py +13 -6
  11. sempy_labs/_icons.py +14 -0
  12. sempy_labs/_list_functions.py +491 -304
  13. sempy_labs/_model_auto_build.py +4 -3
  14. sempy_labs/_model_bpa.py +131 -1118
  15. sempy_labs/_model_bpa_rules.py +831 -0
  16. sempy_labs/_model_dependencies.py +14 -12
  17. sempy_labs/_one_lake_integration.py +11 -5
  18. sempy_labs/_query_scale_out.py +89 -81
  19. sempy_labs/_refresh_semantic_model.py +16 -10
  20. sempy_labs/_translations.py +213 -287
  21. sempy_labs/_vertipaq.py +53 -37
  22. sempy_labs/directlake/__init__.py +2 -0
  23. sempy_labs/directlake/_directlake_schema_compare.py +12 -5
  24. sempy_labs/directlake/_directlake_schema_sync.py +13 -19
  25. sempy_labs/directlake/_fallback.py +5 -3
  26. sempy_labs/directlake/_get_directlake_lakehouse.py +1 -1
  27. sempy_labs/directlake/_get_shared_expression.py +4 -2
  28. sempy_labs/directlake/_guardrails.py +3 -3
  29. sempy_labs/directlake/_list_directlake_model_calc_tables.py +17 -10
  30. sempy_labs/directlake/_show_unsupported_directlake_objects.py +3 -2
  31. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +10 -5
  32. sempy_labs/directlake/_update_directlake_partition_entity.py +132 -9
  33. sempy_labs/directlake/_warm_cache.py +6 -3
  34. sempy_labs/lakehouse/_get_lakehouse_columns.py +1 -1
  35. sempy_labs/lakehouse/_get_lakehouse_tables.py +5 -3
  36. sempy_labs/lakehouse/_lakehouse.py +2 -1
  37. sempy_labs/lakehouse/_shortcuts.py +19 -12
  38. sempy_labs/migration/__init__.py +1 -1
  39. sempy_labs/migration/_create_pqt_file.py +21 -15
  40. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +16 -13
  41. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +17 -18
  42. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +43 -40
  43. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +14 -14
  44. sempy_labs/migration/_migration_validation.py +2 -2
  45. sempy_labs/migration/_refresh_calc_tables.py +8 -5
  46. sempy_labs/report/__init__.py +2 -2
  47. sempy_labs/report/_generate_report.py +10 -5
  48. sempy_labs/report/_report_functions.py +67 -29
  49. sempy_labs/report/_report_rebind.py +9 -8
  50. sempy_labs/tom/__init__.py +1 -4
  51. sempy_labs/tom/_model.py +555 -152
  52. semantic_link_labs-0.5.0.dist-info/RECORD +0 -53
  53. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.6.0.dist-info}/LICENSE +0 -0
  54. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.6.0.dist-info}/top_level.txt +0 -0
@@ -1,10 +1,12 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
3
  from sempy_labs._helper_functions import format_dax_object_name
4
+ import sempy_labs._icons as icons
4
5
  from typing import Any, Dict, Optional
5
6
  from anytree import Node, RenderTree
6
7
  from sempy._utils._log import log
7
8
 
9
+
8
10
  @log
9
11
  def get_measure_dependencies(dataset: str, workspace: Optional[str] = None):
10
12
  """
@@ -106,9 +108,9 @@ def get_measure_dependencies(dataset: str, workspace: Optional[str] = None):
106
108
  "Table Name": r["Table Name"],
107
109
  "Object Name": r["Object Name"],
108
110
  "Object Type": r["Object Type"],
109
- "Referenced Object": dependency[5],
110
- "Referenced Table": dependency[4],
111
- "Referenced Object Type": dependency[6],
111
+ "Referenced Object": dependency[4],
112
+ "Referenced Table": dependency[3],
113
+ "Referenced Object Type": dependency[5],
112
114
  "Done": d,
113
115
  "Full Object Name": r["Full Object Name"],
114
116
  "Referenced Full Object Name": dependency[
@@ -128,6 +130,7 @@ def get_measure_dependencies(dataset: str, workspace: Optional[str] = None):
128
130
 
129
131
  return df
130
132
 
133
+
131
134
  @log
132
135
  def get_model_calc_dependencies(dataset: str, workspace: Optional[str] = None):
133
136
  """
@@ -162,7 +165,7 @@ def get_model_calc_dependencies(dataset: str, workspace: Optional[str] = None):
162
165
  ,[REFERENCED_TABLE] AS [Referenced Table]
163
166
  ,[REFERENCED_OBJECT] AS [Referenced Object]
164
167
  ,[REFERENCED_OBJECT_TYPE] AS [Referenced Object Type]
165
- FROM $SYSTEM.DISCOVER_CALC_DEPENDENCY
168
+ FROM $SYSTEM.DISCOVER_CALC_DEPENDENCY
166
169
  """,
167
170
  )
168
171
 
@@ -294,9 +297,6 @@ def measure_dependency_tree(
294
297
 
295
298
  # Create a dictionary to hold references to nodes
296
299
  node_dict: Dict[str, Any] = {}
297
- measureIcon = "\u2211"
298
- tableIcon = "\u229E"
299
- columnIcon = "\u229F"
300
300
 
301
301
  # Populate the tree
302
302
  for _, row in df_filt.iterrows():
@@ -311,24 +311,26 @@ def measure_dependency_tree(
311
311
  if parent_node is None:
312
312
  parent_node = Node(parent_node_name)
313
313
  node_dict[parent_node_name] = parent_node
314
- parent_node.custom_property = measureIcon + " "
314
+ parent_node.custom_property = icons.measure_icon + " "
315
315
 
316
316
  # Create the child node
317
317
  child_node_name = ref_obj_name
318
318
  child_node = Node(child_node_name, parent=parent_node)
319
319
  if ref_obj_type == "Column":
320
- child_node.custom_property = columnIcon + " '" + ref_obj_table_name + "'"
320
+ child_node.custom_property = (
321
+ icons.column_icon + " '" + ref_obj_table_name + "'"
322
+ )
321
323
  elif ref_obj_type == "Table":
322
- child_node.custom_property = tableIcon + " "
324
+ child_node.custom_property = icons.table_icon + " "
323
325
  elif ref_obj_type == "Measure":
324
- child_node.custom_property = measureIcon + " "
326
+ child_node.custom_property = icons.measure_icon + " "
325
327
 
326
328
  # Update the dictionary with the child node
327
329
  node_dict[child_node_name] = child_node
328
330
 
329
331
  # Visualize the tree structure using RenderTree
330
332
  for pre, _, node in RenderTree(node_dict[measure_name]):
331
- if tableIcon in node.custom_property:
333
+ if icons.table_icon in node.custom_property:
332
334
  print(f"{pre}{node.custom_property}'{node.name}'")
333
335
  else:
334
336
  print(f"{pre}{node.custom_property}[{node.name}]")
@@ -1,4 +1,3 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  import pandas as pd
4
3
  from typing import Optional
@@ -6,6 +5,7 @@ from sempy._utils._log import log
6
5
  from sempy_labs._helper_functions import resolve_workspace_name_and_id
7
6
  import sempy_labs._icons as icons
8
7
 
8
+
9
9
  @log
10
10
  def export_model_to_onelake(
11
11
  dataset: str,
@@ -42,7 +42,9 @@ def export_model_to_onelake(
42
42
  dfD_filt = dfD[dfD["Dataset Name"] == dataset]
43
43
 
44
44
  if len(dfD_filt) == 0:
45
- raise ValueError(f"{icons.red_dot} The '{dataset}' semantic model does not exist in the '{workspace}' workspace.")
45
+ raise ValueError(
46
+ f"{icons.red_dot} The '{dataset}' semantic model does not exist in the '{workspace}' workspace."
47
+ )
46
48
 
47
49
  tmsl = f"""
48
50
  {{
@@ -65,8 +67,10 @@ def export_model_to_onelake(
65
67
  f"{icons.green_dot} The '{dataset}' semantic model's tables have been exported as delta tables to the '{workspace}' workspace.\n"
66
68
  )
67
69
  except Exception as e:
68
- raise ValueError(f"{icons.red_dot} The '{dataset}' semantic model's tables have not been exported as delta tables to the '{workspace}' workspace.\nMake sure you enable OneLake integration for the '{dataset}' semantic model. Follow the instructions here: https://learn.microsoft.com/power-bi/enterprise/onelake-integration-overview#enable-onelake-integration") from e
69
-
70
+ raise ValueError(
71
+ f"{icons.red_dot} The '{dataset}' semantic model's tables have not been exported as delta tables to the '{workspace}' workspace.\nMake sure you enable OneLake integration for the '{dataset}' semantic model. Follow the instructions here: https://learn.microsoft.com/power-bi/enterprise/onelake-integration-overview#enable-onelake-integration"
72
+ ) from e
73
+
70
74
  # Create shortcuts if destination lakehouse is specified
71
75
  if destination_lakehouse is not None:
72
76
  # Destination...
@@ -142,4 +146,6 @@ def export_model_to_onelake(
142
146
  else:
143
147
  print(response.status_code)
144
148
  except Exception as e:
145
- raise ValueError(f"{icons.red_dot} Failed to create a shortcut for the '{tableName}' table.") from e
149
+ raise ValueError(
150
+ f"{icons.red_dot} Failed to create a shortcut for the '{tableName}' table."
151
+ ) from e
@@ -1,9 +1,9 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  import pandas as pd
4
3
  from sempy_labs._helper_functions import resolve_dataset_id
5
4
  from typing import Optional
6
5
  import sempy_labs._icons as icons
6
+ from sempy.fabric.exceptions import FabricHTTPException
7
7
 
8
8
 
9
9
  def qso_sync(dataset: str, workspace: Optional[str] = None):
@@ -39,12 +39,12 @@ def qso_sync(dataset: str, workspace: Optional[str] = None):
39
39
  f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/queryScaleOut/sync"
40
40
  )
41
41
 
42
- if response.status_code == 200:
43
- print(
44
- f"{icons.green_dot} QSO sync initiated for the '{dataset}' semantic model within the '{workspace}' workspace."
45
- )
46
- else:
47
- raise ValueError(f"{icons.red_dot} QSO sync failed for the '{dataset}' semantic model within the '{workspace}' workspace.")
42
+ if response.status_code != 200:
43
+ raise FabricHTTPException(response)
44
+ print(
45
+ f"{icons.green_dot} QSO sync initiated for the '{dataset}' semantic model within the '{workspace}' workspace."
46
+ )
47
+
48
48
 
49
49
  def qso_sync_status(dataset: str, workspace: Optional[str] = None):
50
50
  """
@@ -97,54 +97,54 @@ def qso_sync_status(dataset: str, workspace: Optional[str] = None):
97
97
  f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/queryScaleOut/syncStatus"
98
98
  )
99
99
 
100
- if response.status_code == 200:
101
- o = response.json()
102
- sos = o["scaleOutStatus"]
100
+ if response.status_code != 200:
101
+ raise FabricHTTPException(response)
102
+
103
+ o = response.json()
104
+ sos = o["scaleOutStatus"]
105
+
106
+ if sos == "Enabled":
107
+ new_data = {
108
+ "Scale Out Status": o["scaleOutStatus"],
109
+ "Sync Start Time": o["syncStartTime"],
110
+ "Sync End Time": o["syncEndTime"],
111
+ "Commit Version": o["commitVersion"],
112
+ "Commit Timestamp": o["commitTimestamp"],
113
+ "Target Sync Version": o["targetSyncVersion"],
114
+ "Target Sync Timestamp": o["targetSyncTimestamp"],
115
+ "Trigger Reason": o["triggerReason"],
116
+ "Min Active Read Version": o["minActiveReadVersion"],
117
+ "Min Active Read Timestamp": o["minActiveReadTimestamp"],
118
+ }
119
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
103
120
 
104
- if sos == "Enabled":
121
+ for r in o["scaleOutReplicas"]:
105
122
  new_data = {
106
- "Scale Out Status": o["scaleOutStatus"],
107
- "Sync Start Time": o["syncStartTime"],
108
- "Sync End Time": o["syncEndTime"],
109
- "Commit Version": o["commitVersion"],
110
- "Commit Timestamp": o["commitTimestamp"],
111
- "Target Sync Version": o["targetSyncVersion"],
112
- "Target Sync Timestamp": o["targetSyncTimestamp"],
113
- "Trigger Reason": o["triggerReason"],
114
- "Min Active Read Version": o["minActiveReadVersion"],
115
- "Min Active Read Timestamp": o["minActiveReadTimestamp"],
123
+ "Replica ID": r["replicaId"],
124
+ "Replica Type": r["replicaType"],
125
+ "Replica Version": str(r["replicaVersion"]),
126
+ "Replica Timestamp": r["replicaTimestamp"],
116
127
  }
117
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
118
-
119
- for r in o["scaleOutReplicas"]:
120
- new_data = {
121
- "Replica ID": r["replicaId"],
122
- "Replica Type": r["replicaType"],
123
- "Replica Version": str(r["replicaVersion"]),
124
- "Replica Timestamp": r["replicaTimestamp"],
125
- }
126
- dfRep = pd.concat(
127
- [dfRep, pd.DataFrame(new_data, index=[0])], ignore_index=True
128
- )
129
-
130
- df["Sync Start Time"] = pd.to_datetime(df["Sync Start Time"])
131
- df["Sync End Time"] = pd.to_datetime(df["Sync End Time"])
132
- df["Commit Timestamp"] = pd.to_datetime(df["Commit Timestamp"])
133
- df["Target Sync Timestamp"] = pd.to_datetime(df["Target Sync Timestamp"])
134
- df["Min Active Read Timestamp"] = pd.to_datetime(
135
- df["Min Active Read Timestamp"]
128
+ dfRep = pd.concat(
129
+ [dfRep, pd.DataFrame(new_data, index=[0])], ignore_index=True
136
130
  )
137
- dfRep["Replica Timestamp"] = pd.to_datetime(dfRep["Replica Timestamp"])
138
- df["Commit Version"] = df["Commit Version"].astype("int")
139
- df["Target Sync Version"] = df["Target Sync Version"].astype("int")
140
- df["Min Active Read Version"] = df["Min Active Read Version"].astype("int")
141
131
 
142
- return df, dfRep
143
- else:
144
- print(f"{sos}\n\n")
145
- return df, dfRep
132
+ df["Sync Start Time"] = pd.to_datetime(df["Sync Start Time"])
133
+ df["Sync End Time"] = pd.to_datetime(df["Sync End Time"])
134
+ df["Commit Timestamp"] = pd.to_datetime(df["Commit Timestamp"])
135
+ df["Target Sync Timestamp"] = pd.to_datetime(df["Target Sync Timestamp"])
136
+ df["Min Active Read Timestamp"] = pd.to_datetime(
137
+ df["Min Active Read Timestamp"]
138
+ )
139
+ dfRep["Replica Timestamp"] = pd.to_datetime(dfRep["Replica Timestamp"])
140
+ df["Commit Version"] = df["Commit Version"].astype("int")
141
+ df["Target Sync Version"] = df["Target Sync Version"].astype("int")
142
+ df["Min Active Read Version"] = df["Min Active Read Version"].astype("int")
143
+
144
+ return df, dfRep
146
145
  else:
147
- return response.status_code
146
+ print(f"{sos}\n\n")
147
+ return df, dfRep
148
148
 
149
149
 
150
150
  def disable_qso(dataset: str, workspace: Optional[str] = None):
@@ -179,14 +179,15 @@ def disable_qso(dataset: str, workspace: Optional[str] = None):
179
179
  response = client.patch(
180
180
  f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}", json=request_body
181
181
  )
182
- if response.status_code == 200:
183
- df = list_qso_settings(dataset=dataset, workspace=workspace)
184
- print(
185
- f"{icons.green_dot} Query scale out has been disabled for the '{dataset}' semantic model within the '{workspace}' workspace."
186
- )
187
- return df
188
- else:
189
- raise ValueError(f"{icons.red_dot} {response.status_code}")
182
+ if response.status_code != 200:
183
+ raise FabricHTTPException(response)
184
+
185
+ df = list_qso_settings(dataset=dataset, workspace=workspace)
186
+ print(
187
+ f"{icons.green_dot} Query scale out has been disabled for the '{dataset}' semantic model within the '{workspace}' workspace."
188
+ )
189
+
190
+ return df
190
191
 
191
192
 
192
193
  def set_qso(
@@ -246,16 +247,19 @@ def set_qso(
246
247
  f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}",
247
248
  json=request_body,
248
249
  )
249
- if response.status_code == 200:
250
- df = list_qso_settings(dataset=dataset, workspace=workspace)
251
- print(
252
- f"{icons.green_dot} Query scale out has been set on the '{dataset}' semantic model within the '{workspace}' workspace."
253
- )
254
- return df
255
- else:
256
- raise ValueError(f"{icons.red_dot} {response.status_code}")
250
+ if response.status_code != 200:
251
+ raise FabricHTTPException(response)
252
+
253
+ df = list_qso_settings(dataset=dataset, workspace=workspace)
254
+ print(
255
+ f"{icons.green_dot} Query scale out has been set on the '{dataset}' semantic model within the '{workspace}' workspace."
256
+ )
257
+ return df
257
258
  else:
258
- raise ValueError(f"{icons.red_dot} Failed to set the '{dataset}' semantic model within the '{workspace}' workspace to large semantic model storage format. This is a prerequisite for enabling Query Scale Out.\n\"https://learn.microsoft.com/power-bi/enterprise/service-premium-scale-out#prerequisites\"")
259
+ raise ValueError(
260
+ f"{icons.red_dot} Failed to set the '{dataset}' semantic model within the '{workspace}' workspace to large semantic model storage format. This is a prerequisite for enabling Query Scale Out.\n\"https://learn.microsoft.com/power-bi/enterprise/service-premium-scale-out#prerequisites\""
261
+ )
262
+
259
263
 
260
264
  def set_semantic_model_storage_format(
261
265
  dataset: str, storage_format: str, workspace: Optional[str] = None
@@ -301,19 +305,20 @@ def set_semantic_model_storage_format(
301
305
  elif storage_format == "Small":
302
306
  request_body = {"targetStorageMode": "Abf"}
303
307
  else:
304
- raise ValueError(f"{icons.red_dot} Invalid storage format value. Valid options: {storageFormats}.")
308
+ raise ValueError(
309
+ f"{icons.red_dot} Invalid storage format value. Valid options: {storageFormats}."
310
+ )
305
311
 
306
312
  client = fabric.PowerBIRestClient()
307
313
  response = client.patch(
308
314
  f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}", json=request_body
309
315
  )
316
+ if response.status_code != 200:
317
+ raise FabricHTTPException(response)
318
+ print(f"{icons.green_dot} Semantic model storage format set to '{storage_format}'.")
319
+
320
+ return response.status_code
310
321
 
311
- if response.status_code == 200:
312
- return print(
313
- f"{icons.green_dot} Semantic model storage format set to '{storage_format}'."
314
- )
315
- else:
316
- raise ValueError(f"{icons.red_dot} {response.status_code}")
317
322
 
318
323
  def list_qso_settings(dataset: Optional[str] = None, workspace: Optional[str] = None):
319
324
  """
@@ -365,8 +370,12 @@ def list_qso_settings(dataset: Optional[str] = None, workspace: Optional[str] =
365
370
  "Dataset Id": v.get("id"),
366
371
  "Dataset Name": v.get("name"),
367
372
  "Storage Mode": sm,
368
- "QSO Auto Sync Enabled": v.get("queryScaleOutSettings",{}).get("autoSyncReadOnlyReplicas"),
369
- "QSO Max Read Only Replicas": v.get("queryScaleOutSettings",{}).get("maxReadOnlyReplicas"),
373
+ "QSO Auto Sync Enabled": v.get("queryScaleOutSettings", {}).get(
374
+ "autoSyncReadOnlyReplicas"
375
+ ),
376
+ "QSO Max Read Only Replicas": v.get("queryScaleOutSettings", {}).get(
377
+ "maxReadOnlyReplicas"
378
+ ),
370
379
  }
371
380
  df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
372
381
 
@@ -421,9 +430,8 @@ def set_workspace_default_storage_format(
421
430
  client = fabric.PowerBIRestClient()
422
431
  response = client.patch(f"/v1.0/myorg/groups/{workspace_id}", json=request_body)
423
432
 
424
- if response.status_code == 200:
425
- print(
426
- f"{icons.green_dot} The default storage format for the '{workspace}' workspace has been updated to '{storage_format}."
427
- )
428
- else:
429
- raise ValueError(f"{icons.red_dot} {response.status_code}")
433
+ if response.status_code != 200:
434
+ raise FabricHTTPException(response)
435
+ print(
436
+ f"{icons.green_dot} The default storage format for the '{workspace}' workspace has been updated to '{storage_format}."
437
+ )
@@ -1,4 +1,3 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  import time
4
3
  from sempy_labs._helper_functions import resolve_dataset_id
@@ -6,6 +5,7 @@ from typing import Any, List, Optional, Union
6
5
  from sempy._utils._log import log
7
6
  import sempy_labs._icons as icons
8
7
  from sempy_labs._helper_functions import resolve_workspace_name_and_id
8
+ from sempy.fabric.exceptions import FabricHTTPException
9
9
 
10
10
 
11
11
  @log
@@ -79,7 +79,9 @@ def refresh_semantic_model(
79
79
  ]
80
80
 
81
81
  if refresh_type not in refreshTypes:
82
- raise ValueError(f"{icons.red_dot} Invalid refresh type. Refresh type must be one of these values: {refreshTypes}.")
82
+ raise ValueError(
83
+ f"{icons.red_dot} Invalid refresh type. Refresh type must be one of these values: {refreshTypes}."
84
+ )
83
85
 
84
86
  if len(objects) == 0:
85
87
  requestID = fabric.refresh_dataset(
@@ -114,7 +116,9 @@ def refresh_semantic_model(
114
116
  if status == "Completed":
115
117
  break
116
118
  elif status == "Failed":
117
- raise ValueError(f"{icons.red_dot} The refresh of the '{dataset}' semantic model within the '{workspace}' workspace has failed.")
119
+ raise ValueError(
120
+ f"{icons.red_dot} The refresh of the '{dataset}' semantic model within the '{workspace}' workspace has failed."
121
+ )
118
122
  elif status == "Cancelled":
119
123
  print(
120
124
  f"{icons.yellow_dot} The refresh of the '{dataset}' semantic model within the '{workspace}' workspace has been cancelled."
@@ -155,7 +159,9 @@ def cancel_dataset_refresh(
155
159
 
156
160
  if request_id is None:
157
161
  if len(rr_filt) == 0:
158
- raise ValueError(f"{icons.red_dot} There are no active Enhanced API refreshes of the '{dataset}' semantic model within the '{workspace}' workspace.")
162
+ raise ValueError(
163
+ f"{icons.red_dot} There are no active Enhanced API refreshes of the '{dataset}' semantic model within the '{workspace}' workspace."
164
+ )
159
165
 
160
166
  request_id = rr_filt["Request Id"].iloc[0]
161
167
 
@@ -166,9 +172,9 @@ def cancel_dataset_refresh(
166
172
  response = client.delete(
167
173
  f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/refreshes/{request_id}"
168
174
  )
169
- if response.status_code == 200:
170
- print(
171
- f"{icons.green_dot} The '{request_id}' refresh request for the '{dataset}' semantic model within the '{workspace}' workspace has been cancelled."
172
- )
173
- else:
174
- print(response.status_code)
175
+
176
+ if response.status_code != 200:
177
+ raise FabricHTTPException(response)
178
+ print(
179
+ f"{icons.green_dot} The '{request_id}' refresh request for the '{dataset}' semantic model within the '{workspace}' workspace has been cancelled."
180
+ )