semantic-link-labs 0.4.1__py3-none-any.whl → 0.4.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (52) hide show
  1. {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.4.2.dist-info}/METADATA +1 -1
  2. semantic_link_labs-0.4.2.dist-info/RECORD +53 -0
  3. sempy_labs/__init__.py +25 -25
  4. sempy_labs/_ai.py +28 -27
  5. sempy_labs/_clear_cache.py +2 -1
  6. sempy_labs/_dax.py +5 -9
  7. sempy_labs/_generate_semantic_model.py +7 -8
  8. sempy_labs/_helper_functions.py +17 -13
  9. sempy_labs/_icons.py +5 -0
  10. sempy_labs/_list_functions.py +273 -17
  11. sempy_labs/_model_auto_build.py +1 -1
  12. sempy_labs/_model_bpa.py +37 -37
  13. sempy_labs/_model_dependencies.py +11 -12
  14. sempy_labs/_one_lake_integration.py +15 -22
  15. sempy_labs/_query_scale_out.py +1 -1
  16. sempy_labs/_refresh_semantic_model.py +4 -4
  17. sempy_labs/_translations.py +5 -5
  18. sempy_labs/_vertipaq.py +11 -11
  19. sempy_labs/directlake/_directlake_schema_compare.py +11 -9
  20. sempy_labs/directlake/_directlake_schema_sync.py +36 -37
  21. sempy_labs/directlake/_fallback.py +3 -3
  22. sempy_labs/directlake/_get_directlake_lakehouse.py +3 -4
  23. sempy_labs/directlake/_get_shared_expression.py +3 -3
  24. sempy_labs/directlake/_guardrails.py +3 -3
  25. sempy_labs/directlake/_list_directlake_model_calc_tables.py +28 -25
  26. sempy_labs/directlake/_show_unsupported_directlake_objects.py +4 -4
  27. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +10 -11
  28. sempy_labs/directlake/_update_directlake_partition_entity.py +25 -9
  29. sempy_labs/directlake/_warm_cache.py +5 -7
  30. sempy_labs/lakehouse/__init__.py +0 -2
  31. sempy_labs/lakehouse/_get_lakehouse_columns.py +3 -2
  32. sempy_labs/lakehouse/_get_lakehouse_tables.py +10 -7
  33. sempy_labs/lakehouse/_lakehouse.py +6 -5
  34. sempy_labs/lakehouse/_shortcuts.py +8 -106
  35. sempy_labs/migration/__init__.py +4 -2
  36. sempy_labs/migration/_create_pqt_file.py +2 -2
  37. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +7 -7
  38. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +4 -4
  39. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +5 -6
  40. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +6 -6
  41. sempy_labs/migration/_migration_validation.py +1 -164
  42. sempy_labs/migration/_refresh_calc_tables.py +3 -5
  43. sempy_labs/report/__init__.py +2 -2
  44. sempy_labs/report/_generate_report.py +14 -15
  45. sempy_labs/report/_report_functions.py +11 -10
  46. sempy_labs/report/_report_rebind.py +6 -7
  47. sempy_labs/tom/__init__.py +6 -0
  48. sempy_labs/{_tom.py → tom/_model.py} +166 -187
  49. semantic_link_labs-0.4.1.dist-info/RECORD +0 -52
  50. {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.4.2.dist-info}/LICENSE +0 -0
  51. {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.4.2.dist-info}/WHEEL +0 -0
  52. {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.4.2.dist-info}/top_level.txt +0 -0
@@ -1,9 +1,10 @@
1
+ import sempy
1
2
  import sempy.fabric as fabric
2
3
  import pandas as pd
3
4
  from typing import Optional
4
5
  from sempy._utils._log import log
5
6
  from sempy_labs._helper_functions import resolve_workspace_name_and_id
6
-
7
+ import sempy_labs._icons as icons
7
8
 
8
9
  @log
9
10
  def export_model_to_onelake(
@@ -31,7 +32,7 @@ def export_model_to_onelake(
31
32
 
32
33
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
33
34
 
34
- if destination_workspace == None:
35
+ if destination_workspace is None:
35
36
  destination_workspace = workspace
36
37
  destination_workspace_id = workspace_id
37
38
  else:
@@ -42,7 +43,7 @@ def export_model_to_onelake(
42
43
 
43
44
  if len(dfD_filt) == 0:
44
45
  print(
45
- f"The '{dataset}' semantic model does not exist in the '{workspace}' workspace."
46
+ f"{icons.red_dot} The '{dataset}' semantic model does not exist in the '{workspace}' workspace."
46
47
  )
47
48
  return
48
49
 
@@ -64,17 +65,11 @@ def export_model_to_onelake(
64
65
  try:
65
66
  fabric.execute_tmsl(script=tmsl, workspace=workspace)
66
67
  print(
67
- f"The '{dataset}' semantic model's tables have been exported as delta tables to the '{workspace}' workspace.\n"
68
- )
69
- except:
70
- print(
71
- f"ERROR: The '{dataset}' semantic model's tables have not been exported as delta tables to the '{workspace}' workspace."
72
- )
73
- print(
74
- f"Make sure you enable OneLake integration for the '{dataset}' semantic model. Follow the instructions here: https://learn.microsoft.com/power-bi/enterprise/onelake-integration-overview#enable-onelake-integration"
68
+ f"{icons.green_dot} The '{dataset}' semantic model's tables have been exported as delta tables to the '{workspace}' workspace.\n"
75
69
  )
76
- return
77
-
70
+ except Exception as e:
71
+ raise ValueError(f"{icons.red_dot} The '{dataset}' semantic model's tables have not been exported as delta tables to the '{workspace}' workspace.\nMake sure you enable OneLake integration for the '{dataset}' semantic model. Follow the instructions here: https://learn.microsoft.com/power-bi/enterprise/onelake-integration-overview#enable-onelake-integration") from e
72
+
78
73
  # Create shortcuts if destination lakehouse is specified
79
74
  if destination_lakehouse is not None:
80
75
  # Destination...
@@ -83,14 +78,14 @@ def export_model_to_onelake(
83
78
 
84
79
  if len(dfI_filt) == 0:
85
80
  print(
86
- f"The '{destination_lakehouse}' lakehouse does not exist within the '{destination_workspace}' workspace."
81
+ f"{icons.red_dot} The '{destination_lakehouse}' lakehouse does not exist within the '{destination_workspace}' workspace."
87
82
  )
88
83
  # Create lakehouse
89
84
  destination_lakehouse_id = fabric.create_lakehouse(
90
85
  display_name=destination_lakehouse, workspace=destination_workspace
91
86
  )
92
87
  print(
93
- f"The '{destination_lakehouse}' lakehouse has been created within the '{destination_workspace}' workspace.\n"
88
+ f"{icons.green_dot} The '{destination_lakehouse}' lakehouse has been created within the '{destination_workspace}' workspace.\n"
94
89
  )
95
90
  else:
96
91
  destination_lakehouse_id = dfI_filt["Id"].iloc[0]
@@ -109,7 +104,7 @@ def export_model_to_onelake(
109
104
  dfP_filt = dfP[
110
105
  (dfP["Mode"] == "Import")
111
106
  & (dfP["Source Type"] != "CalculationGroup")
112
- & (dfP["Parent System Managed"] == False)
107
+ & (dfP["Parent System Managed"] is False)
113
108
  ]
114
109
  dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
115
110
  tmc = pd.DataFrame(dfP.groupby("Table Name")["Mode"].nunique()).reset_index()
@@ -122,7 +117,7 @@ def export_model_to_onelake(
122
117
 
123
118
  client = fabric.FabricRestClient()
124
119
 
125
- print("Creating shortcuts...\n")
120
+ print(f"{icons.in_progress} Creating shortcuts...\n")
126
121
  for tableName in tables:
127
122
  tablePath = "Tables/" + tableName
128
123
  shortcutName = tableName.replace(" ", "")
@@ -145,11 +140,9 @@ def export_model_to_onelake(
145
140
  )
146
141
  if response.status_code == 201:
147
142
  print(
148
- f"\u2022 The shortcut '{shortcutName}' was created in the '{destination_lakehouse}' lakehouse within the '{destination_workspace}' workspace. It is based on the '{tableName}' table in the '{dataset}' semantic model within the '{workspace}' workspace.\n"
143
+ f"{icons.bullet} The shortcut '{shortcutName}' was created in the '{destination_lakehouse}' lakehouse within the '{destination_workspace}' workspace. It is based on the '{tableName}' table in the '{dataset}' semantic model within the '{workspace}' workspace.\n"
149
144
  )
150
145
  else:
151
146
  print(response.status_code)
152
- except:
153
- print(
154
- f"ERROR: Failed to create a shortcut for the '{tableName}' table."
155
- )
147
+ except Exception as e:
148
+ raise ValueError(f"{icons.red_dot} Failed to create a shortcut for the '{tableName}' table.") from e
@@ -2,7 +2,7 @@ import sempy
2
2
  import sempy.fabric as fabric
3
3
  import pandas as pd
4
4
  from sempy_labs._helper_functions import resolve_dataset_id
5
- from typing import List, Optional, Union
5
+ from typing import Optional
6
6
  import sempy_labs._icons as icons
7
7
 
8
8
 
@@ -2,7 +2,7 @@ import sempy
2
2
  import sempy.fabric as fabric
3
3
  import time
4
4
  from sempy_labs._helper_functions import resolve_dataset_id
5
- from typing import List, Optional, Union
5
+ from typing import Any, List, Optional, Union
6
6
  from sempy._utils._log import log
7
7
  import sempy_labs._icons as icons
8
8
  from sempy_labs._helper_functions import resolve_workspace_name_and_id
@@ -41,7 +41,7 @@ def refresh_semantic_model(
41
41
  or if no lakehouse attached, resolves to the workspace of the notebook.
42
42
  """
43
43
 
44
- if workspace == None:
44
+ if workspace is None:
45
45
  workspace_id = fabric.get_workspace_id()
46
46
  workspace = fabric.resolve_workspace_name(workspace_id)
47
47
 
@@ -53,7 +53,7 @@ def refresh_semantic_model(
53
53
  if isinstance(partitions, str):
54
54
  partitions = [partitions]
55
55
 
56
- objects = []
56
+ objects: List[Any] = []
57
57
 
58
58
  if tables is not None:
59
59
  objects = objects + [{"table": table} for table in tables]
@@ -161,7 +161,7 @@ def cancel_dataset_refresh(
161
161
  rr = fabric.list_refresh_requests(dataset=dataset, workspace=workspace)
162
162
  rr_filt = rr[rr["Status"] == "Unknown"]
163
163
 
164
- if request_id == None:
164
+ if request_id is None:
165
165
  if len(rr_filt) == 0:
166
166
  print(
167
167
  f"{icons.red_dot} There are no active Enhanced API refreshes of the '{dataset}' semantic model within the '{workspace}' workspace."
@@ -35,7 +35,7 @@ def translate_semantic_model(
35
35
  from synapse.ml.services import Translate
36
36
  from pyspark.sql.functions import col, flatten
37
37
  from pyspark.sql import SparkSession
38
- from ._tom import connect_semantic_model
38
+ from .tom import connect_semantic_model
39
39
 
40
40
  if isinstance(languages, str):
41
41
  languages = [languages]
@@ -212,7 +212,7 @@ def translate_semantic_model(
212
212
  print(f"{icons.in_progress} Translating into the '{lang}' language...")
213
213
 
214
214
  for t in tom.model.Tables:
215
- if t.IsHidden == False:
215
+ if t.IsHidden is False:
216
216
  if clm == "Name":
217
217
  df_filt = df_panda[
218
218
  (df_panda["Object Type"] == "Table")
@@ -240,7 +240,7 @@ def translate_semantic_model(
240
240
  value=tr,
241
241
  )
242
242
  for c in t.Columns:
243
- if c.IsHidden == False:
243
+ if c.IsHidden is False:
244
244
  if clm == "Name":
245
245
  df_filt = df_panda[
246
246
  (df_panda["Object Type"] == "Column")
@@ -287,7 +287,7 @@ def translate_semantic_model(
287
287
  value=tr,
288
288
  )
289
289
  for h in t.Hierarchies:
290
- if h.IsHidden == False:
290
+ if h.IsHidden is False:
291
291
  if clm == "Name":
292
292
  df_filt = df_panda[
293
293
  (df_panda["Object Type"] == "Hierarchy")
@@ -331,7 +331,7 @@ def translate_semantic_model(
331
331
  value=tr,
332
332
  )
333
333
  for ms in t.Measures:
334
- if ms.IsHidden == False:
334
+ if ms.IsHidden is False:
335
335
  if clm == "Name":
336
336
  df_filt = df_panda[
337
337
  (df_panda["Object Type"] == "Measure")
sempy_labs/_vertipaq.py CHANGED
@@ -14,7 +14,7 @@ from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
14
14
  from sempy_labs.lakehouse._lakehouse import lakehouse_attached
15
15
  from typing import List, Optional, Union
16
16
  from sempy._utils._log import log
17
-
17
+ import sempy_labs._icons as icons
18
18
 
19
19
  @log
20
20
  def vertipaq_analyzer(
@@ -56,11 +56,11 @@ def vertipaq_analyzer(
56
56
  "ignore", message="createDataFrame attempted Arrow optimization*"
57
57
  )
58
58
 
59
- if workspace == None:
59
+ if workspace is None:
60
60
  workspace_id = fabric.get_workspace_id()
61
61
  workspace = fabric.resolve_workspace_name(workspace_id)
62
62
 
63
- if lakehouse_workspace == None:
63
+ if lakehouse_workspace is None:
64
64
  lakehouse_workspace = workspace
65
65
 
66
66
  dfT = fabric.list_tables(dataset=dataset, extended=True, workspace=workspace)
@@ -103,7 +103,7 @@ def vertipaq_analyzer(
103
103
 
104
104
  if len(dfI_filt) == 0:
105
105
  print(
106
- f"The lakehouse (SQL Endpoint) used by the '{dataset}' semantic model does not reside in the '{lakehouse_workspace}' workspace. Please update the lakehouse_workspace parameter."
106
+ f"{icons.red_dot} The lakehouse (SQL Endpoint) used by the '{dataset}' semantic model does not reside in the '{lakehouse_workspace}' workspace. Please update the lakehouse_workspace parameter."
107
107
  )
108
108
  else:
109
109
  lakehouseName = dfI_filt["Display Name"].iloc[0]
@@ -233,7 +233,7 @@ def vertipaq_analyzer(
233
233
 
234
234
  query = f"evaluate\nsummarizecolumns(\n\"1\",calculate(countrows('{fromTable}'),isblank({toObject}))\n)"
235
235
 
236
- if isActive == False: # add userelationship
236
+ if isActive is False: # add userelationship
237
237
  query = f"evaluate\nsummarizecolumns(\n\"1\",calculate(countrows('{fromTable}'),userelationship({fromObject},{toObject}),isblank({toObject}))\n)"
238
238
 
239
239
  result = fabric.evaluate_dax(
@@ -359,7 +359,7 @@ def vertipaq_analyzer(
359
359
  "Max To Cardinality",
360
360
  "Missing Rows",
361
361
  ]
362
- if read_stats_from_data == False:
362
+ if read_stats_from_data is False:
363
363
  intList.remove("Missing Rows")
364
364
  dfR[intList] = dfR[intList].applymap("{:,}".format)
365
365
 
@@ -436,9 +436,9 @@ def vertipaq_analyzer(
436
436
  ### Export vertipaq to delta tables in lakehouse
437
437
  if export in ["table", "zip"]:
438
438
  lakeAttach = lakehouse_attached()
439
- if lakeAttach == False:
439
+ if lakeAttach is False:
440
440
  print(
441
- f"In order to save the Vertipaq Analyzer results, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
441
+ f"{icons.red_dot} In order to save the Vertipaq Analyzer results, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
442
442
  )
443
443
  return
444
444
 
@@ -472,7 +472,7 @@ def vertipaq_analyzer(
472
472
  "export_Model": ["Model", export_Model],
473
473
  }
474
474
 
475
- print(f"Saving Vertipaq Analyzer to delta tables in the lakehouse...\n")
475
+ print(f"{icons.in_progress} Saving Vertipaq Analyzer to delta tables in the lakehouse...\n")
476
476
  now = datetime.datetime.now()
477
477
  for key, (obj, df) in dfMap.items():
478
478
  df["Timestamp"] = now
@@ -491,7 +491,7 @@ def vertipaq_analyzer(
491
491
  spark_df = spark.createDataFrame(df)
492
492
  spark_df.write.mode("append").format("delta").saveAsTable(delta_table_name)
493
493
  print(
494
- f"\u2022 Vertipaq Analyzer results for '{obj}' have been appended to the '{delta_table_name}' delta table."
494
+ f"{icons.bullet} Vertipaq Analyzer results for '{obj}' have been appended to the '{delta_table_name}' delta table."
495
495
  )
496
496
 
497
497
  ### Export vertipaq to zip file within the lakehouse
@@ -532,7 +532,7 @@ def vertipaq_analyzer(
532
532
  if os.path.exists(filePath):
533
533
  os.remove(filePath)
534
534
  print(
535
- f"The Vertipaq Analyzer info for the '{dataset}' semantic model in the '{workspace}' workspace has been saved to the 'Vertipaq Analyzer/{zipFileName}' in the default lakehouse attached to this notebook."
535
+ f"{icons.green_dot} The Vertipaq Analyzer info for the '{dataset}' semantic model in the '{workspace}' workspace has been saved to the 'Vertipaq Analyzer/{zipFileName}' in the default lakehouse attached to this notebook."
536
536
  )
537
537
 
538
538
 
@@ -10,8 +10,10 @@ from IPython.display import display
10
10
  from sempy_labs.lakehouse._get_lakehouse_columns import get_lakehouse_columns
11
11
  from sempy_labs._list_functions import list_tables
12
12
  from typing import Optional
13
+ import sempy_labs._icons as icons
14
+ from sempy._utils._log import log
13
15
 
14
-
16
+ @log
15
17
  def direct_lake_schema_compare(
16
18
  dataset: str,
17
19
  workspace: Optional[str] = None,
@@ -38,14 +40,14 @@ def direct_lake_schema_compare(
38
40
  or if no lakehouse attached, resolves to the workspace of the notebook.
39
41
  """
40
42
 
41
- if workspace == None:
43
+ if workspace is None:
42
44
  workspace_id = fabric.get_workspace_id()
43
45
  workspace = fabric.resolve_workspace_name(workspace_id)
44
46
 
45
47
  if lakehouse_workspace is None:
46
48
  lakehouse_workspace = workspace
47
49
 
48
- if lakehouse == None:
50
+ if lakehouse is None:
49
51
  lakehouse_id = fabric.get_lakehouse_id()
50
52
  lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)
51
53
 
@@ -56,12 +58,12 @@ def direct_lake_schema_compare(
56
58
 
57
59
  if len(dfI_filt) == 0:
58
60
  print(
59
- f"The SQL Endpoint in the '{dataset}' semantic model in the '{workspace} workspace does not point to the '{lakehouse}' lakehouse in the '{lakehouse_workspace}' workspace as specified."
61
+ f"{icons.red_dot} The SQL Endpoint in the '{dataset}' semantic model in the '{workspace} workspace does not point to the '{lakehouse}' lakehouse in the '{lakehouse_workspace}' workspace as specified."
60
62
  )
61
63
  return
62
64
 
63
65
  if not any(r["Mode"] == "DirectLake" for i, r in dfP.iterrows()):
64
- print(f"The '{dataset}' semantic model is not in Direct Lake mode.")
66
+ print(f"{icons.red_dot} The '{dataset}' semantic model is not in Direct Lake mode.")
65
67
  return
66
68
 
67
69
  dfT = list_tables(dataset, workspace)
@@ -92,19 +94,19 @@ def direct_lake_schema_compare(
92
94
 
93
95
  if len(missingtbls) == 0:
94
96
  print(
95
- f"All tables exist in the '{lakehouse}' lakehouse within the '{lakehouse_workspace}' workspace."
97
+ f"{icons.green_dot} All tables exist in the '{lakehouse}' lakehouse within the '{lakehouse_workspace}' workspace."
96
98
  )
97
99
  else:
98
100
  print(
99
- f"The following tables exist in the '{dataset}' semantic model within the '{workspace}' workspace but do not exist in the '{lakehouse}' lakehouse within the '{lakehouse_workspace}' workspace."
101
+ f"{icons.yellow_dot} The following tables exist in the '{dataset}' semantic model within the '{workspace}' workspace but do not exist in the '{lakehouse}' lakehouse within the '{lakehouse_workspace}' workspace."
100
102
  )
101
103
  display(missingtbls)
102
104
  if len(missingcols) == 0:
103
105
  print(
104
- f"All columns exist in the '{lakehouse}' lakehouse within the '{lakehouse_workspace}' workspace."
106
+ f"{icons.green_dot} All columns exist in the '{lakehouse}' lakehouse within the '{lakehouse_workspace}' workspace."
105
107
  )
106
108
  else:
107
109
  print(
108
- f"The following columns exist in the '{dataset}' semantic model within the '{workspace}' workspace but do not exist in the '{lakehouse}' lakehouse within the '{lakehouse_workspace}' workspace."
110
+ f"{icons.yellow_dot} The following columns exist in the '{dataset}' semantic model within the '{workspace}' workspace but do not exist in the '{lakehouse}' lakehouse within the '{lakehouse_workspace}' workspace."
109
111
  )
110
112
  display(missingcols)
@@ -2,6 +2,7 @@ import sempy
2
2
  import sempy.fabric as fabric
3
3
  import pandas as pd
4
4
  from sempy_labs.lakehouse._get_lakehouse_columns import get_lakehouse_columns
5
+ from sempy_labs.tom import connect_semantic_model
5
6
  from sempy_labs._helper_functions import (
6
7
  format_dax_object_name,
7
8
  resolve_lakehouse_name,
@@ -9,7 +10,7 @@ from sempy_labs._helper_functions import (
9
10
  )
10
11
  from typing import Optional
11
12
  from sempy._utils._log import log
12
-
13
+ import sempy_labs._icons as icons
13
14
 
14
15
  @log
15
16
  def direct_lake_schema_sync(
@@ -45,14 +46,13 @@ def direct_lake_schema_sync(
45
46
  import Microsoft.AnalysisServices.Tabular as TOM
46
47
  import System
47
48
 
48
- if workspace == None:
49
- workspace_id = fabric.get_workspace_id()
50
- workspace = fabric.resolve_workspace_name(workspace_id)
49
+ if workspace is None:
50
+ workspace = fabric.resolve_workspace_name()
51
51
 
52
52
  if lakehouse_workspace is None:
53
53
  lakehouse_workspace = workspace
54
54
 
55
- if lakehouse == None:
55
+ if lakehouse is None:
56
56
  lakehouse_id = fabric.get_lakehouse_id()
57
57
  lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)
58
58
 
@@ -63,7 +63,7 @@ def direct_lake_schema_sync(
63
63
 
64
64
  if len(dfI_filt) == 0:
65
65
  print(
66
- f"The SQL Endpoint in the '{dataset}' semantic model in the '{workspace} workspace does not point to the '{lakehouse}' lakehouse in the '{lakehouse_workspace}' workspace as specified."
66
+ f"{icons.red_dot} The SQL Endpoint in the '{dataset}' semantic model in the '{workspace} workspace does not point to the '{lakehouse}' lakehouse in the '{lakehouse_workspace}' workspace as specified."
67
67
  )
68
68
  return
69
69
 
@@ -93,36 +93,35 @@ def direct_lake_schema_sync(
93
93
  "double": "Double",
94
94
  }
95
95
 
96
- tom_server = fabric.create_tom_server(readonly=False, workspace=workspace)
97
- m = tom_server.Databases.GetByName(dataset).Model
98
- for i, r in lc_filt.iterrows():
99
- lakeTName = r["Table Name"]
100
- lakeCName = r["Column Name"]
101
- fullColName = r["Full Column Name"]
102
- dType = r["Data Type"]
103
-
104
- if fullColName not in dfC_filt["Column Object"].values:
105
- dfL = dfP_filt[dfP_filt["Query"] == lakeTName]
106
- tName = dfL["Table Name"].iloc[0]
107
- if add_to_model:
108
- col = TOM.DataColumn()
109
- col.Name = lakeCName
110
- col.SourceColumn = lakeCName
111
- dt = mapping.get(dType)
112
- try:
113
- col.DataType = System.Enum.Parse(TOM.DataType, dt)
114
- except:
96
+ with connect_semantic_model(
97
+ dataset=dataset, readonly=False, workspace=workspace
98
+ ) as tom:
99
+
100
+ for i, r in lc_filt.iterrows():
101
+ lakeTName = r["Table Name"]
102
+ lakeCName = r["Column Name"]
103
+ fullColName = r["Full Column Name"]
104
+ dType = r["Data Type"]
105
+
106
+ if fullColName not in dfC_filt["Column Object"].values:
107
+ dfL = dfP_filt[dfP_filt["Query"] == lakeTName]
108
+ tName = dfL["Table Name"].iloc[0]
109
+ if add_to_model:
110
+ col = TOM.DataColumn()
111
+ col.Name = lakeCName
112
+ col.SourceColumn = lakeCName
113
+ dt = mapping.get(dType)
114
+ try:
115
+ col.DataType = System.Enum.Parse(TOM.DataType, dt)
116
+ except Exception as e:
117
+ raise ValueError(f"{icons.red_dot} Failed to map '{dType}' data type to the semantic model data types.") from e
118
+
119
+ tom.model.Tables[tName].Columns.Add(col)
120
+ print(
121
+ f"{icons.green_dot} The '{lakeCName}' column has been added to the '{tName}' table as a '{dt}' data type within the '{dataset}' semantic model within the '{workspace}' workspace."
122
+ )
123
+ else:
115
124
  print(
116
- f"ERROR: '{dType}' data type is not mapped properly to the semantic model data types."
125
+ f"{icons.yellow_dot} The {fullColName} column exists in the lakehouse but not in the '{tName}' table in the '{dataset}' semantic model within the '{workspace}' workspace."
117
126
  )
118
- return
119
-
120
- m.Tables[tName].Columns.Add(col)
121
- print(
122
- f"The '{lakeCName}' column has been added to the '{tName}' table as a '{dt}' data type within the '{dataset}' semantic model within the '{workspace}' workspace."
123
- )
124
- else:
125
- print(
126
- f"The {fullColName} column exists in the lakehouse but not in the '{tName}' table in the '{dataset}' semantic model within the '{workspace}' workspace."
127
- )
128
- m.SaveChanges()
127
+
@@ -2,7 +2,7 @@ import sempy
2
2
  import sempy.fabric as fabric
3
3
  import numpy as np
4
4
  from typing import List, Optional, Union
5
-
5
+ import sempy_labs._icons as icons
6
6
 
7
7
  def check_fallback_reason(dataset: str, workspace: Optional[str] = None):
8
8
  """
@@ -23,7 +23,7 @@ def check_fallback_reason(dataset: str, workspace: Optional[str] = None):
23
23
  The tables in the semantic model and their fallback reason.
24
24
  """
25
25
 
26
- if workspace == None:
26
+ if workspace is None:
27
27
  workspace_id = fabric.get_workspace_id()
28
28
  workspace = fabric.resolve_workspace_name(workspace_id)
29
29
 
@@ -32,7 +32,7 @@ def check_fallback_reason(dataset: str, workspace: Optional[str] = None):
32
32
 
33
33
  if len(dfP_filt) == 0:
34
34
  print(
35
- f"The '{dataset}' semantic model is not in Direct Lake. This function is only applicable to Direct Lake semantic models."
35
+ f"{icons.yellow_dot} The '{dataset}' semantic model is not in Direct Lake. This function is only applicable to Direct Lake semantic models."
36
36
  )
37
37
  else:
38
38
  df = fabric.evaluate_dax(
@@ -7,8 +7,7 @@ from sempy_labs._helper_functions import (
7
7
  )
8
8
  from typing import Optional, Tuple
9
9
  from uuid import UUID
10
- from sempy_labs._helper_functions import resolve_workspace_name_and_id
11
-
10
+ import sempy_labs._icons as icons
12
11
 
13
12
  def get_direct_lake_lakehouse(
14
13
  dataset: str,
@@ -46,7 +45,7 @@ def get_direct_lake_lakehouse(
46
45
  if lakehouse_workspace is None:
47
46
  lakehouse_workspace = workspace
48
47
 
49
- if lakehouse == None:
48
+ if lakehouse is None:
50
49
  lakehouse_id = fabric.get_lakehouse_id()
51
50
  lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)
52
51
 
@@ -55,7 +54,7 @@ def get_direct_lake_lakehouse(
55
54
 
56
55
  if len(dfP_filt) == 0:
57
56
  raise ValueError(
58
- f"ERROR: The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode."
57
+ f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode."
59
58
  )
60
59
 
61
60
  sqlEndpointId = get_direct_lake_sql_endpoint(dataset, workspace)
@@ -6,7 +6,7 @@ from sempy_labs._helper_functions import (
6
6
  )
7
7
  from sempy_labs._list_functions import list_lakehouses
8
8
  from typing import Optional
9
-
9
+ import sempy_labs._icons as icons
10
10
 
11
11
  def get_shared_expression(
12
12
  lakehouse: Optional[str] = None, workspace: Optional[str] = None
@@ -31,7 +31,7 @@ def get_shared_expression(
31
31
  """
32
32
 
33
33
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
34
- if lakehouse == None:
34
+ if lakehouse is None:
35
35
  lakehouse_id = fabric.get_lakehouse_id()
36
36
  lakehouse = resolve_lakehouse_name(lakehouse_id)
37
37
 
@@ -44,7 +44,7 @@ def get_shared_expression(
44
44
 
45
45
  if provStatus == "InProgress":
46
46
  print(
47
- f"The SQL Endpoint for the '{lakehouse}' lakehouse within the '{workspace}' workspace has not yet been provisioned. Please wait until it has been provisioned."
47
+ f"{icons.red_dot} The SQL Endpoint for the '{lakehouse}' lakehouse within the '{workspace}' workspace has not yet been provisioned. Please wait until it has been provisioned."
48
48
  )
49
49
  return
50
50
 
@@ -6,7 +6,7 @@ from typing import List, Optional, Union
6
6
 
7
7
  def get_direct_lake_guardrails():
8
8
  """
9
- Shows the guardrails for when Direct Lake semantic models will fallback to Direct Query based on Microsoft's online documentation.
9
+ Shows the guardrails for when Direct Lake semantic models will fallback to Direct Query based on Microsoft's `online documentation <https://learn.microsoft.com/power-bi/enterprise/directlake-overview>`_.
10
10
 
11
11
  Parameters
12
12
  ----------
@@ -44,7 +44,7 @@ def get_sku_size(workspace: Optional[str] = None):
44
44
  The SKU size for a workspace.
45
45
  """
46
46
 
47
- if workspace == None:
47
+ if workspace is None:
48
48
  workspace_id = fabric.get_workspace_id()
49
49
  workspace = fabric.resolve_workspace_name(workspace_id)
50
50
 
@@ -65,7 +65,7 @@ def get_sku_size(workspace: Optional[str] = None):
65
65
  def get_directlake_guardrails_for_sku(sku_size: str):
66
66
  """
67
67
  Shows the guardrails for Direct Lake based on the SKU used by your workspace's capacity.
68
- *Use the result of the 'get_sku_size' function as an input for this function's skuSize parameter.*
68
+ * Use the result of the 'get_sku_size' function as an input for this function's sku_size parameter.*
69
69
 
70
70
  Parameters
71
71
  ----------
@@ -2,14 +2,15 @@ import sempy
2
2
  import sempy.fabric as fabric
3
3
  import pandas as pd
4
4
  from sempy_labs._list_functions import list_tables, list_annotations
5
+ from sempy_labs.tom import connect_semantic_model
5
6
  from typing import Optional
6
7
  from sempy._utils._log import log
7
-
8
+ import sempy_labs._icons as icons
8
9
 
9
10
  @log
10
11
  def list_direct_lake_model_calc_tables(dataset: str, workspace: Optional[str] = None):
11
12
  """
12
- Shows the calculated tables and their respective DAX expression for a Direct Lake model (which has been migrated from import/DirectQuery.
13
+ Shows the calculated tables and their respective DAX expression for a Direct Lake model (which has been migrated from import/DirectQuery).
13
14
 
14
15
  Parameters
15
16
  ----------
@@ -26,29 +27,31 @@ def list_direct_lake_model_calc_tables(dataset: str, workspace: Optional[str] =
26
27
  A pandas dataframe showing the calculated tables which were migrated to Direct Lake and whose DAX expressions are stored as model annotations.
27
28
  """
28
29
 
29
- if workspace == None:
30
- workspace_id = fabric.get_workspace_id()
31
- workspace = fabric.resolve_workspace_name(workspace_id)
30
+ if workspace is None:
31
+ workspace = fabric.resolve_workspace_name()
32
32
 
33
33
  df = pd.DataFrame(columns=["Table Name", "Source Expression"])
34
34
 
35
- dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
36
- dfP_filt = dfP[dfP["Mode"] == "DirectLake"]
37
-
38
- if len(dfP_filt) == 0:
39
- print(f"The '{dataset}' semantic model is not in Direct Lake mode.")
40
- else:
41
- dfA = list_annotations(dataset, workspace)
42
- dfT = list_tables(dataset, workspace)
43
- dfA_filt = dfA[
44
- (dfA["Object Type"] == "Model") & (dfA["Annotation Name"].isin(dfT["Name"]))
45
- ]
46
-
47
- for i, r in dfA_filt.iterrows():
48
- tName = r["Annotation Name"]
49
- se = r["Annotation Value"]
50
-
51
- new_data = {"Table Name": tName, "Source Expression": se}
52
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
53
-
54
- return df
35
+ with connect_semantic_model(
36
+ dataset=dataset, readonly=True, workspace=workspace
37
+ ) as tom:
38
+
39
+ is_direct_lake = tom.is_direct_lake()
40
+
41
+ if not is_direct_lake:
42
+ print(f"{icons.yellow_dot} The '{dataset}' semantic model is not in Direct Lake mode.")
43
+ else:
44
+ dfA = list_annotations(dataset, workspace)
45
+ dfT = list_tables(dataset, workspace)
46
+ dfA_filt = dfA[
47
+ (dfA["Object Type"] == "Model") & (dfA["Annotation Name"].isin(dfT["Name"]))
48
+ ]
49
+
50
+ for i, r in dfA_filt.iterrows():
51
+ tName = r["Annotation Name"]
52
+ se = r["Annotation Value"]
53
+
54
+ new_data = {"Table Name": tName, "Source Expression": se}
55
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
56
+
57
+ return df