semantic-link-labs 0.4.1__py3-none-any.whl → 0.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (53) hide show
  1. {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.5.0.dist-info}/METADATA +1 -1
  2. semantic_link_labs-0.5.0.dist-info/RECORD +53 -0
  3. {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.5.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +51 -27
  5. sempy_labs/_ai.py +32 -51
  6. sempy_labs/_clear_cache.py +2 -3
  7. sempy_labs/_connections.py +39 -38
  8. sempy_labs/_dax.py +5 -9
  9. sempy_labs/_generate_semantic_model.py +15 -21
  10. sempy_labs/_helper_functions.py +20 -25
  11. sempy_labs/_icons.py +6 -0
  12. sempy_labs/_list_functions.py +1172 -392
  13. sempy_labs/_model_auto_build.py +3 -5
  14. sempy_labs/_model_bpa.py +20 -24
  15. sempy_labs/_model_dependencies.py +7 -14
  16. sempy_labs/_one_lake_integration.py +14 -24
  17. sempy_labs/_query_scale_out.py +13 -31
  18. sempy_labs/_refresh_semantic_model.py +8 -18
  19. sempy_labs/_translations.py +5 -5
  20. sempy_labs/_vertipaq.py +11 -18
  21. sempy_labs/directlake/_directlake_schema_compare.py +11 -15
  22. sempy_labs/directlake/_directlake_schema_sync.py +35 -40
  23. sempy_labs/directlake/_fallback.py +3 -7
  24. sempy_labs/directlake/_get_directlake_lakehouse.py +3 -4
  25. sempy_labs/directlake/_get_shared_expression.py +5 -11
  26. sempy_labs/directlake/_guardrails.py +5 -7
  27. sempy_labs/directlake/_list_directlake_model_calc_tables.py +28 -26
  28. sempy_labs/directlake/_show_unsupported_directlake_objects.py +3 -4
  29. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +11 -16
  30. sempy_labs/directlake/_update_directlake_partition_entity.py +25 -15
  31. sempy_labs/directlake/_warm_cache.py +10 -15
  32. sempy_labs/lakehouse/__init__.py +0 -2
  33. sempy_labs/lakehouse/_get_lakehouse_columns.py +4 -3
  34. sempy_labs/lakehouse/_get_lakehouse_tables.py +12 -11
  35. sempy_labs/lakehouse/_lakehouse.py +6 -7
  36. sempy_labs/lakehouse/_shortcuts.py +10 -111
  37. sempy_labs/migration/__init__.py +4 -2
  38. sempy_labs/migration/_create_pqt_file.py +5 -14
  39. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +7 -7
  40. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +4 -4
  41. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +3 -8
  42. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +6 -6
  43. sempy_labs/migration/_migration_validation.py +5 -164
  44. sempy_labs/migration/_refresh_calc_tables.py +5 -5
  45. sempy_labs/report/__init__.py +2 -2
  46. sempy_labs/report/_generate_report.py +14 -19
  47. sempy_labs/report/_report_functions.py +41 -83
  48. sempy_labs/report/_report_rebind.py +43 -44
  49. sempy_labs/tom/__init__.py +6 -0
  50. sempy_labs/{_tom.py → tom/_model.py} +274 -337
  51. semantic_link_labs-0.4.1.dist-info/RECORD +0 -52
  52. {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.5.0.dist-info}/LICENSE +0 -0
  53. {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.5.0.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  import sempy
2
2
  import sempy.fabric as fabric
3
3
  import pandas as pd
4
- from sempy_labs._tom import connect_semantic_model
4
+ from sempy_labs.tom import connect_semantic_model
5
5
  from sempy_labs._generate_semantic_model import create_blank_semantic_model
6
6
  from sempy_labs.directlake._get_shared_expression import get_shared_expression
7
7
  from typing import List, Optional, Union
@@ -41,9 +41,7 @@ def model_auto_build(
41
41
 
42
42
  """
43
43
 
44
- if workspace is None:
45
- workspace_id = fabric.get_workspace_id()
46
- workspace = fabric.resolve_workspace_name(workspace_id)
44
+ workspace = fabric.resolve_workspace_name(workspace)
47
45
 
48
46
  if lakehouse_workspace is None:
49
47
  lakehouse_workspace = workspace
@@ -60,7 +58,7 @@ def model_auto_build(
60
58
 
61
59
  create_blank_semantic_model(dataset=dataset, workspace=workspace)
62
60
 
63
- with connect_semantic_model(dataset=dataset, workspace=workspace) as tom:
61
+ with connect_semantic_model(dataset=dataset, workspace=workspace, readonly=False) as tom:
64
62
 
65
63
  # DL Only
66
64
  expr = get_shared_expression(lakehouse=lakehouse, workspace=lakehouse_workspace)
sempy_labs/_model_bpa.py CHANGED
@@ -11,7 +11,7 @@ from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
11
11
  from sempy_labs.lakehouse._lakehouse import lakehouse_attached
12
12
  from typing import List, Optional, Union
13
13
  from sempy._utils._log import log
14
-
14
+ import sempy_labs._icons as icons
15
15
 
16
16
  def model_bpa_rules():
17
17
  """
@@ -744,9 +744,7 @@ def run_model_bpa(
744
744
  message="This pattern is interpreted as a regular expression, and has match groups.",
745
745
  )
746
746
 
747
- if workspace == None:
748
- workspace_id = fabric.get_workspace_id()
749
- workspace = fabric.resolve_workspace_name(workspace_id)
747
+ workspace = fabric.resolve_workspace_name(workspace)
750
748
 
751
749
  if rules_dataframe is None:
752
750
  rules_dataframe = model_bpa_rules()
@@ -798,13 +796,13 @@ def run_model_bpa(
798
796
  cols = ["From Cardinality", "To Cardinality"]
799
797
 
800
798
  for col in cols:
801
- if not col in dfR:
799
+ if col not in dfR:
802
800
  dfR[col] = None
803
801
 
804
802
  cols = ["Parent Is Hidden"]
805
803
 
806
804
  for col in cols:
807
- if not col in dfM:
805
+ if col not in dfM:
808
806
  dfM[col] = None
809
807
 
810
808
  # Data Coverage Definition rule
@@ -842,9 +840,9 @@ def run_model_bpa(
842
840
  dataset=dataset,
843
841
  workspace=workspace,
844
842
  dax_string="""
845
- SELECT [FUNCTION_NAME]
843
+ SELECT [FUNCTION_NAME]
846
844
  FROM $SYSTEM.MDSCHEMA_FUNCTIONS
847
- WHERE [INTERFACE_NAME] = 'DATETIME'
845
+ WHERE [INTERFACE_NAME] = 'DATETIME'
848
846
  """,
849
847
  )
850
848
 
@@ -1008,7 +1006,7 @@ def run_model_bpa(
1008
1006
  dfM["Referenced By"].fillna(0, inplace=True)
1009
1007
  dfM["Referenced By"] = dfM["Referenced By"].fillna(0).astype(int)
1010
1008
 
1011
- pattern = "[^\( ][a-zA-Z0-9_()-]+\[[^\[]+\]|'[^']+'\[[^\[]+\]|\[[^\[]+\]"
1009
+ pattern = r"[^\( ][a-zA-Z0-9_()-]+\[[^\[]+\]|'[^']+'\[[^\[]+\]|\[[^\[]+\]"
1012
1010
 
1013
1011
  dfM["Has Fully Qualified Measure Reference"] = False
1014
1012
  dfM["Has Unqualified Column Reference"] = False
@@ -1041,15 +1039,15 @@ def run_model_bpa(
1041
1039
 
1042
1040
  dfM_filt = dfM[
1043
1041
  dfM["Measure Expression"].str.contains(
1044
- "(?i)USERELATIONSHIP\s*\(\s*'*"
1045
- + fromTable
1046
- + "'*\["
1047
- + fromColumn
1048
- + "\]\s*,\s*'*"
1049
- + toTable
1050
- + "'*\["
1051
- + toColumn
1052
- + "\]",
1042
+ r"(?i)USERELATIONSHIP\s*\(\s*'*"
1043
+ + re.escape(fromTable)
1044
+ + r"'*\["
1045
+ + re.escape(fromColumn)
1046
+ + r"\]\s*,\s*'*"
1047
+ + re.escape(toTable)
1048
+ + r"'*\["
1049
+ + re.escape(toColumn)
1050
+ + r"\]",
1053
1051
  regex=True,
1054
1052
  )
1055
1053
  ]
@@ -1183,11 +1181,9 @@ def run_model_bpa(
1183
1181
 
1184
1182
  if export:
1185
1183
  lakeAttach = lakehouse_attached()
1186
- if lakeAttach == False:
1187
- print(
1188
- f"In order to save the Best Practice Analyzer results, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
1189
- )
1190
- return
1184
+ if lakeAttach is False:
1185
+ raise ValueError(f"{icons.red_dot} In order to save the Best Practice Analyzer results, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook.")
1186
+
1191
1187
  dfExport = finalDF.copy()
1192
1188
  delta_table_name = "modelbparesults"
1193
1189
 
@@ -1230,7 +1226,7 @@ def run_model_bpa(
1230
1226
  spark_df = spark.createDataFrame(dfExport)
1231
1227
  spark_df.write.mode("append").format("delta").saveAsTable(delta_table_name)
1232
1228
  print(
1233
- f"\u2022 Model Best Practice Analyzer results for the '{dataset}' semantic model have been appended to the '{delta_table_name}' delta table."
1229
+ f"{icons.green_dot} Model Best Practice Analyzer results for the '{dataset}' semantic model have been appended to the '{delta_table_name}' delta table."
1234
1230
  )
1235
1231
 
1236
1232
  if return_dataframe:
@@ -1,12 +1,11 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  import pandas as pd
4
3
  from sempy_labs._helper_functions import format_dax_object_name
5
- from typing import List, Optional, Union
4
+ from typing import Any, Dict, Optional
6
5
  from anytree import Node, RenderTree
7
6
  from sempy._utils._log import log
8
7
 
9
-
8
+ @log
10
9
  def get_measure_dependencies(dataset: str, workspace: Optional[str] = None):
11
10
  """
12
11
  Shows all dependencies for all measures in a semantic model.
@@ -26,9 +25,7 @@ def get_measure_dependencies(dataset: str, workspace: Optional[str] = None):
26
25
  Shows all dependencies for all measures in the semantic model.
27
26
  """
28
27
 
29
- if workspace == None:
30
- workspace_id = fabric.get_workspace_id()
31
- workspace = fabric.resolve_workspace_name(workspace_id)
28
+ workspace = fabric.resolve_workspace_name(workspace)
32
29
 
33
30
  dep = fabric.evaluate_dax(
34
31
  dataset=dataset,
@@ -131,7 +128,7 @@ def get_measure_dependencies(dataset: str, workspace: Optional[str] = None):
131
128
 
132
129
  return df
133
130
 
134
-
131
+ @log
135
132
  def get_model_calc_dependencies(dataset: str, workspace: Optional[str] = None):
136
133
  """
137
134
  Shows all dependencies for all objects in a semantic model.
@@ -151,9 +148,7 @@ def get_model_calc_dependencies(dataset: str, workspace: Optional[str] = None):
151
148
  Shows all dependencies for all objects in the semantic model.
152
149
  """
153
150
 
154
- if workspace == None:
155
- workspace_id = fabric.get_workspace_id()
156
- workspace = fabric.resolve_workspace_name(workspace_id)
151
+ workspace = fabric.resolve_workspace_name(workspace)
157
152
 
158
153
  dep = fabric.evaluate_dax(
159
154
  dataset=dataset,
@@ -283,9 +278,7 @@ def measure_dependency_tree(
283
278
 
284
279
  """
285
280
 
286
- if workspace == None:
287
- workspace_id = fabric.get_workspace_id()
288
- workspace = fabric.resolve_workspace_name(workspace_id)
281
+ workspace = fabric.resolve_workspace_name(workspace)
289
282
 
290
283
  dfM = fabric.list_measures(dataset=dataset, workspace=workspace)
291
284
  dfM_filt = dfM[dfM["Measure Name"] == measure_name]
@@ -300,7 +293,7 @@ def measure_dependency_tree(
300
293
  df_filt = md[md["Object Name"] == measure_name]
301
294
 
302
295
  # Create a dictionary to hold references to nodes
303
- node_dict = {}
296
+ node_dict: Dict[str, Any] = {}
304
297
  measureIcon = "\u2211"
305
298
  tableIcon = "\u229E"
306
299
  columnIcon = "\u229F"
@@ -1,9 +1,10 @@
1
+ import sempy
1
2
  import sempy.fabric as fabric
2
3
  import pandas as pd
3
4
  from typing import Optional
4
5
  from sempy._utils._log import log
5
6
  from sempy_labs._helper_functions import resolve_workspace_name_and_id
6
-
7
+ import sempy_labs._icons as icons
7
8
 
8
9
  @log
9
10
  def export_model_to_onelake(
@@ -31,7 +32,7 @@ def export_model_to_onelake(
31
32
 
32
33
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
33
34
 
34
- if destination_workspace == None:
35
+ if destination_workspace is None:
35
36
  destination_workspace = workspace
36
37
  destination_workspace_id = workspace_id
37
38
  else:
@@ -41,10 +42,7 @@ def export_model_to_onelake(
41
42
  dfD_filt = dfD[dfD["Dataset Name"] == dataset]
42
43
 
43
44
  if len(dfD_filt) == 0:
44
- print(
45
- f"The '{dataset}' semantic model does not exist in the '{workspace}' workspace."
46
- )
47
- return
45
+ raise ValueError(f"{icons.red_dot} The '{dataset}' semantic model does not exist in the '{workspace}' workspace.")
48
46
 
49
47
  tmsl = f"""
50
48
  {{
@@ -64,17 +62,11 @@ def export_model_to_onelake(
64
62
  try:
65
63
  fabric.execute_tmsl(script=tmsl, workspace=workspace)
66
64
  print(
67
- f"The '{dataset}' semantic model's tables have been exported as delta tables to the '{workspace}' workspace.\n"
68
- )
69
- except:
70
- print(
71
- f"ERROR: The '{dataset}' semantic model's tables have not been exported as delta tables to the '{workspace}' workspace."
65
+ f"{icons.green_dot} The '{dataset}' semantic model's tables have been exported as delta tables to the '{workspace}' workspace.\n"
72
66
  )
73
- print(
74
- f"Make sure you enable OneLake integration for the '{dataset}' semantic model. Follow the instructions here: https://learn.microsoft.com/power-bi/enterprise/onelake-integration-overview#enable-onelake-integration"
75
- )
76
- return
77
-
67
+ except Exception as e:
68
+ raise ValueError(f"{icons.red_dot} The '{dataset}' semantic model's tables have not been exported as delta tables to the '{workspace}' workspace.\nMake sure you enable OneLake integration for the '{dataset}' semantic model. Follow the instructions here: https://learn.microsoft.com/power-bi/enterprise/onelake-integration-overview#enable-onelake-integration") from e
69
+
78
70
  # Create shortcuts if destination lakehouse is specified
79
71
  if destination_lakehouse is not None:
80
72
  # Destination...
@@ -83,14 +75,14 @@ def export_model_to_onelake(
83
75
 
84
76
  if len(dfI_filt) == 0:
85
77
  print(
86
- f"The '{destination_lakehouse}' lakehouse does not exist within the '{destination_workspace}' workspace."
78
+ f"{icons.red_dot} The '{destination_lakehouse}' lakehouse does not exist within the '{destination_workspace}' workspace."
87
79
  )
88
80
  # Create lakehouse
89
81
  destination_lakehouse_id = fabric.create_lakehouse(
90
82
  display_name=destination_lakehouse, workspace=destination_workspace
91
83
  )
92
84
  print(
93
- f"The '{destination_lakehouse}' lakehouse has been created within the '{destination_workspace}' workspace.\n"
85
+ f"{icons.green_dot} The '{destination_lakehouse}' lakehouse has been created within the '{destination_workspace}' workspace.\n"
94
86
  )
95
87
  else:
96
88
  destination_lakehouse_id = dfI_filt["Id"].iloc[0]
@@ -122,7 +114,7 @@ def export_model_to_onelake(
122
114
 
123
115
  client = fabric.FabricRestClient()
124
116
 
125
- print("Creating shortcuts...\n")
117
+ print(f"{icons.in_progress} Creating shortcuts...\n")
126
118
  for tableName in tables:
127
119
  tablePath = "Tables/" + tableName
128
120
  shortcutName = tableName.replace(" ", "")
@@ -145,11 +137,9 @@ def export_model_to_onelake(
145
137
  )
146
138
  if response.status_code == 201:
147
139
  print(
148
- f"\u2022 The shortcut '{shortcutName}' was created in the '{destination_lakehouse}' lakehouse within the '{destination_workspace}' workspace. It is based on the '{tableName}' table in the '{dataset}' semantic model within the '{workspace}' workspace.\n"
140
+ f"{icons.bullet} The shortcut '{shortcutName}' was created in the '{destination_lakehouse}' lakehouse within the '{destination_workspace}' workspace. It is based on the '{tableName}' table in the '{dataset}' semantic model within the '{workspace}' workspace.\n"
149
141
  )
150
142
  else:
151
143
  print(response.status_code)
152
- except:
153
- print(
154
- f"ERROR: Failed to create a shortcut for the '{tableName}' table."
155
- )
144
+ except Exception as e:
145
+ raise ValueError(f"{icons.red_dot} Failed to create a shortcut for the '{tableName}' table.") from e
@@ -2,7 +2,7 @@ import sempy
2
2
  import sempy.fabric as fabric
3
3
  import pandas as pd
4
4
  from sempy_labs._helper_functions import resolve_dataset_id
5
- from typing import List, Optional, Union
5
+ from typing import Optional
6
6
  import sempy_labs._icons as icons
7
7
 
8
8
 
@@ -44,10 +44,7 @@ def qso_sync(dataset: str, workspace: Optional[str] = None):
44
44
  f"{icons.green_dot} QSO sync initiated for the '{dataset}' semantic model within the '{workspace}' workspace."
45
45
  )
46
46
  else:
47
- print(
48
- f"{icons.red_dot} QSO sync failed for the '{dataset}' semantic model within the '{workspace}' workspace."
49
- )
50
-
47
+ raise ValueError(f"{icons.red_dot} QSO sync failed for the '{dataset}' semantic model within the '{workspace}' workspace.")
51
48
 
52
49
  def qso_sync_status(dataset: str, workspace: Optional[str] = None):
53
50
  """
@@ -189,7 +186,7 @@ def disable_qso(dataset: str, workspace: Optional[str] = None):
189
186
  )
190
187
  return df
191
188
  else:
192
- return f"{icons.red_dot} {response.status_code}"
189
+ raise ValueError(f"{icons.red_dot} {response.status_code}")
193
190
 
194
191
 
195
192
  def set_qso(
@@ -256,16 +253,9 @@ def set_qso(
256
253
  )
257
254
  return df
258
255
  else:
259
- return f"{icons.red_dot} {response.status_code}"
256
+ raise ValueError(f"{icons.red_dot} {response.status_code}")
260
257
  else:
261
- print(
262
- f"{icons.red_dot} Failed to set the '{dataset}' semantic model within the '{workspace}' workspace to large semantic model storage format. This is a prerequisite for enabling Query Scale Out."
263
- )
264
- print(
265
- "https://learn.microsoft.com/power-bi/enterprise/service-premium-scale-out#prerequisites"
266
- )
267
- return
268
-
258
+ raise ValueError(f"{icons.red_dot} Failed to set the '{dataset}' semantic model within the '{workspace}' workspace to large semantic model storage format. This is a prerequisite for enabling Query Scale Out.\n\"https://learn.microsoft.com/power-bi/enterprise/service-premium-scale-out#prerequisites\"")
269
259
 
270
260
  def set_semantic_model_storage_format(
271
261
  dataset: str, storage_format: str, workspace: Optional[str] = None
@@ -311,10 +301,7 @@ def set_semantic_model_storage_format(
311
301
  elif storage_format == "Small":
312
302
  request_body = {"targetStorageMode": "Abf"}
313
303
  else:
314
- print(
315
- f"{icons.red_dot} Invalid storage format value. Valid options: {storageFormats}."
316
- )
317
- return
304
+ raise ValueError(f"{icons.red_dot} Invalid storage format value. Valid options: {storageFormats}.")
318
305
 
319
306
  client = fabric.PowerBIRestClient()
320
307
  response = client.patch(
@@ -326,8 +313,7 @@ def set_semantic_model_storage_format(
326
313
  f"{icons.green_dot} Semantic model storage format set to '{storage_format}'."
327
314
  )
328
315
  else:
329
- return f"{icons.red_dot} {response.status_code}"
330
-
316
+ raise ValueError(f"{icons.red_dot} {response.status_code}")
331
317
 
332
318
  def list_qso_settings(dataset: Optional[str] = None, workspace: Optional[str] = None):
333
319
  """
@@ -370,21 +356,17 @@ def list_qso_settings(dataset: Optional[str] = None, workspace: Optional[str] =
370
356
  client = fabric.PowerBIRestClient()
371
357
  response = client.get(f"/v1.0/myorg/groups/{workspace_id}/datasets")
372
358
  for v in response.json()["value"]:
373
- tsm = v["targetStorageMode"]
359
+ tsm = v.get("targetStorageMode")
374
360
  if tsm == "Abf":
375
361
  sm = "Small"
376
362
  else:
377
363
  sm = "Large"
378
364
  new_data = {
379
- "Dataset Id": v["id"],
380
- "Dataset Name": v["name"],
365
+ "Dataset Id": v.get("id"),
366
+ "Dataset Name": v.get("name"),
381
367
  "Storage Mode": sm,
382
- "QSO Auto Sync Enabled": v["queryScaleOutSettings"][
383
- "autoSyncReadOnlyReplicas"
384
- ],
385
- "QSO Max Read Only Replicas": v["queryScaleOutSettings"][
386
- "maxReadOnlyReplicas"
387
- ],
368
+ "QSO Auto Sync Enabled": v.get("queryScaleOutSettings",{}).get("autoSyncReadOnlyReplicas"),
369
+ "QSO Max Read Only Replicas": v.get("queryScaleOutSettings",{}).get("maxReadOnlyReplicas"),
388
370
  }
389
371
  df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
390
372
 
@@ -444,4 +426,4 @@ def set_workspace_default_storage_format(
444
426
  f"{icons.green_dot} The default storage format for the '{workspace}' workspace has been updated to '{storage_format}."
445
427
  )
446
428
  else:
447
- print(f"{icons.red_dot} {response.status_code}")
429
+ raise ValueError(f"{icons.red_dot} {response.status_code}")
@@ -2,7 +2,7 @@ import sempy
2
2
  import sempy.fabric as fabric
3
3
  import time
4
4
  from sempy_labs._helper_functions import resolve_dataset_id
5
- from typing import List, Optional, Union
5
+ from typing import Any, List, Optional, Union
6
6
  from sempy._utils._log import log
7
7
  import sempy_labs._icons as icons
8
8
  from sempy_labs._helper_functions import resolve_workspace_name_and_id
@@ -41,9 +41,7 @@ def refresh_semantic_model(
41
41
  or if no lakehouse attached, resolves to the workspace of the notebook.
42
42
  """
43
43
 
44
- if workspace == None:
45
- workspace_id = fabric.get_workspace_id()
46
- workspace = fabric.resolve_workspace_name(workspace_id)
44
+ workspace = fabric.resolve_workspace_name(workspace)
47
45
 
48
46
  if refresh_type is None:
49
47
  refresh_type = "full"
@@ -53,7 +51,7 @@ def refresh_semantic_model(
53
51
  if isinstance(partitions, str):
54
52
  partitions = [partitions]
55
53
 
56
- objects = []
54
+ objects: List[Any] = []
57
55
 
58
56
  if tables is not None:
59
57
  objects = objects + [{"table": table} for table in tables]
@@ -81,10 +79,7 @@ def refresh_semantic_model(
81
79
  ]
82
80
 
83
81
  if refresh_type not in refreshTypes:
84
- print(
85
- f"{icons.red_dot} Invalid refresh type. Refresh type must be one of these values: {refreshTypes}."
86
- )
87
- return
82
+ raise ValueError(f"{icons.red_dot} Invalid refresh type. Refresh type must be one of these values: {refreshTypes}.")
88
83
 
89
84
  if len(objects) == 0:
90
85
  requestID = fabric.refresh_dataset(
@@ -119,10 +114,7 @@ def refresh_semantic_model(
119
114
  if status == "Completed":
120
115
  break
121
116
  elif status == "Failed":
122
- print(
123
- f"{icons.red_dot} The refresh of the '{dataset}' semantic model within the '{workspace}' workspace has failed."
124
- )
125
- return
117
+ raise ValueError(f"{icons.red_dot} The refresh of the '{dataset}' semantic model within the '{workspace}' workspace has failed.")
126
118
  elif status == "Cancelled":
127
119
  print(
128
120
  f"{icons.yellow_dot} The refresh of the '{dataset}' semantic model within the '{workspace}' workspace has been cancelled."
@@ -161,12 +153,10 @@ def cancel_dataset_refresh(
161
153
  rr = fabric.list_refresh_requests(dataset=dataset, workspace=workspace)
162
154
  rr_filt = rr[rr["Status"] == "Unknown"]
163
155
 
164
- if request_id == None:
156
+ if request_id is None:
165
157
  if len(rr_filt) == 0:
166
- print(
167
- f"{icons.red_dot} There are no active Enhanced API refreshes of the '{dataset}' semantic model within the '{workspace}' workspace."
168
- )
169
- return
158
+ raise ValueError(f"{icons.red_dot} There are no active Enhanced API refreshes of the '{dataset}' semantic model within the '{workspace}' workspace.")
159
+
170
160
  request_id = rr_filt["Request Id"].iloc[0]
171
161
 
172
162
  dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace)
@@ -35,7 +35,7 @@ def translate_semantic_model(
35
35
  from synapse.ml.services import Translate
36
36
  from pyspark.sql.functions import col, flatten
37
37
  from pyspark.sql import SparkSession
38
- from ._tom import connect_semantic_model
38
+ from sempy_labs.tom import connect_semantic_model
39
39
 
40
40
  if isinstance(languages, str):
41
41
  languages = [languages]
@@ -212,7 +212,7 @@ def translate_semantic_model(
212
212
  print(f"{icons.in_progress} Translating into the '{lang}' language...")
213
213
 
214
214
  for t in tom.model.Tables:
215
- if t.IsHidden == False:
215
+ if t.IsHidden is False:
216
216
  if clm == "Name":
217
217
  df_filt = df_panda[
218
218
  (df_panda["Object Type"] == "Table")
@@ -240,7 +240,7 @@ def translate_semantic_model(
240
240
  value=tr,
241
241
  )
242
242
  for c in t.Columns:
243
- if c.IsHidden == False:
243
+ if c.IsHidden is False:
244
244
  if clm == "Name":
245
245
  df_filt = df_panda[
246
246
  (df_panda["Object Type"] == "Column")
@@ -287,7 +287,7 @@ def translate_semantic_model(
287
287
  value=tr,
288
288
  )
289
289
  for h in t.Hierarchies:
290
- if h.IsHidden == False:
290
+ if h.IsHidden is False:
291
291
  if clm == "Name":
292
292
  df_filt = df_panda[
293
293
  (df_panda["Object Type"] == "Hierarchy")
@@ -331,7 +331,7 @@ def translate_semantic_model(
331
331
  value=tr,
332
332
  )
333
333
  for ms in t.Measures:
334
- if ms.IsHidden == False:
334
+ if ms.IsHidden is False:
335
335
  if clm == "Name":
336
336
  df_filt = df_panda[
337
337
  (df_panda["Object Type"] == "Measure")
sempy_labs/_vertipaq.py CHANGED
@@ -14,7 +14,7 @@ from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
14
14
  from sempy_labs.lakehouse._lakehouse import lakehouse_attached
15
15
  from typing import List, Optional, Union
16
16
  from sempy._utils._log import log
17
-
17
+ import sempy_labs._icons as icons
18
18
 
19
19
  @log
20
20
  def vertipaq_analyzer(
@@ -56,11 +56,9 @@ def vertipaq_analyzer(
56
56
  "ignore", message="createDataFrame attempted Arrow optimization*"
57
57
  )
58
58
 
59
- if workspace == None:
60
- workspace_id = fabric.get_workspace_id()
61
- workspace = fabric.resolve_workspace_name(workspace_id)
59
+ workspace = fabric.resolve_workspace_name(workspace)
62
60
 
63
- if lakehouse_workspace == None:
61
+ if lakehouse_workspace is None:
64
62
  lakehouse_workspace = workspace
65
63
 
66
64
  dfT = fabric.list_tables(dataset=dataset, extended=True, workspace=workspace)
@@ -102,9 +100,7 @@ def vertipaq_analyzer(
102
100
  dfI_filt = dfI[(dfI["Id"] == sqlEndpointId)]
103
101
 
104
102
  if len(dfI_filt) == 0:
105
- print(
106
- f"The lakehouse (SQL Endpoint) used by the '{dataset}' semantic model does not reside in the '{lakehouse_workspace}' workspace. Please update the lakehouse_workspace parameter."
107
- )
103
+ raise ValueError(f"{icons.red_dot} The lakehouse (SQL Endpoint) used by the '{dataset}' semantic model does not reside in the '{lakehouse_workspace}' workspace. Please update the lakehouse_workspace parameter.")
108
104
  else:
109
105
  lakehouseName = dfI_filt["Display Name"].iloc[0]
110
106
 
@@ -233,7 +229,7 @@ def vertipaq_analyzer(
233
229
 
234
230
  query = f"evaluate\nsummarizecolumns(\n\"1\",calculate(countrows('{fromTable}'),isblank({toObject}))\n)"
235
231
 
236
- if isActive == False: # add userelationship
232
+ if isActive is False: # add userelationship
237
233
  query = f"evaluate\nsummarizecolumns(\n\"1\",calculate(countrows('{fromTable}'),userelationship({fromObject},{toObject}),isblank({toObject}))\n)"
238
234
 
239
235
  result = fabric.evaluate_dax(
@@ -359,7 +355,7 @@ def vertipaq_analyzer(
359
355
  "Max To Cardinality",
360
356
  "Missing Rows",
361
357
  ]
362
- if read_stats_from_data == False:
358
+ if read_stats_from_data is False:
363
359
  intList.remove("Missing Rows")
364
360
  dfR[intList] = dfR[intList].applymap("{:,}".format)
365
361
 
@@ -436,11 +432,8 @@ def vertipaq_analyzer(
436
432
  ### Export vertipaq to delta tables in lakehouse
437
433
  if export in ["table", "zip"]:
438
434
  lakeAttach = lakehouse_attached()
439
- if lakeAttach == False:
440
- print(
441
- f"In order to save the Vertipaq Analyzer results, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
442
- )
443
- return
435
+ if lakeAttach is False:
436
+ raise ValueError(f"{icons.red_dot} In order to save the Vertipaq Analyzer results, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook.")
444
437
 
445
438
  if export == "table":
446
439
  spark = SparkSession.builder.getOrCreate()
@@ -472,7 +465,7 @@ def vertipaq_analyzer(
472
465
  "export_Model": ["Model", export_Model],
473
466
  }
474
467
 
475
- print(f"Saving Vertipaq Analyzer to delta tables in the lakehouse...\n")
468
+ print(f"{icons.in_progress} Saving Vertipaq Analyzer to delta tables in the lakehouse...\n")
476
469
  now = datetime.datetime.now()
477
470
  for key, (obj, df) in dfMap.items():
478
471
  df["Timestamp"] = now
@@ -491,7 +484,7 @@ def vertipaq_analyzer(
491
484
  spark_df = spark.createDataFrame(df)
492
485
  spark_df.write.mode("append").format("delta").saveAsTable(delta_table_name)
493
486
  print(
494
- f"\u2022 Vertipaq Analyzer results for '{obj}' have been appended to the '{delta_table_name}' delta table."
487
+ f"{icons.bullet} Vertipaq Analyzer results for '{obj}' have been appended to the '{delta_table_name}' delta table."
495
488
  )
496
489
 
497
490
  ### Export vertipaq to zip file within the lakehouse
@@ -532,7 +525,7 @@ def vertipaq_analyzer(
532
525
  if os.path.exists(filePath):
533
526
  os.remove(filePath)
534
527
  print(
535
- f"The Vertipaq Analyzer info for the '{dataset}' semantic model in the '{workspace}' workspace has been saved to the 'Vertipaq Analyzer/{zipFileName}' in the default lakehouse attached to this notebook."
528
+ f"{icons.green_dot} The Vertipaq Analyzer info for the '{dataset}' semantic model in the '{workspace}' workspace has been saved to the 'Vertipaq Analyzer/{zipFileName}' in the default lakehouse attached to this notebook."
536
529
  )
537
530
 
538
531