semantic-link-labs 0.5.0__py3-none-any.whl → 0.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (54) hide show
  1. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.6.0.dist-info}/METADATA +2 -2
  2. semantic_link_labs-0.6.0.dist-info/RECORD +54 -0
  3. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.6.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +19 -13
  5. sempy_labs/_ai.py +43 -24
  6. sempy_labs/_clear_cache.py +4 -5
  7. sempy_labs/_connections.py +77 -70
  8. sempy_labs/_dax.py +7 -9
  9. sempy_labs/_generate_semantic_model.py +55 -44
  10. sempy_labs/_helper_functions.py +13 -6
  11. sempy_labs/_icons.py +14 -0
  12. sempy_labs/_list_functions.py +491 -304
  13. sempy_labs/_model_auto_build.py +4 -3
  14. sempy_labs/_model_bpa.py +131 -1118
  15. sempy_labs/_model_bpa_rules.py +831 -0
  16. sempy_labs/_model_dependencies.py +14 -12
  17. sempy_labs/_one_lake_integration.py +11 -5
  18. sempy_labs/_query_scale_out.py +89 -81
  19. sempy_labs/_refresh_semantic_model.py +16 -10
  20. sempy_labs/_translations.py +213 -287
  21. sempy_labs/_vertipaq.py +53 -37
  22. sempy_labs/directlake/__init__.py +2 -0
  23. sempy_labs/directlake/_directlake_schema_compare.py +12 -5
  24. sempy_labs/directlake/_directlake_schema_sync.py +13 -19
  25. sempy_labs/directlake/_fallback.py +5 -3
  26. sempy_labs/directlake/_get_directlake_lakehouse.py +1 -1
  27. sempy_labs/directlake/_get_shared_expression.py +4 -2
  28. sempy_labs/directlake/_guardrails.py +3 -3
  29. sempy_labs/directlake/_list_directlake_model_calc_tables.py +17 -10
  30. sempy_labs/directlake/_show_unsupported_directlake_objects.py +3 -2
  31. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +10 -5
  32. sempy_labs/directlake/_update_directlake_partition_entity.py +132 -9
  33. sempy_labs/directlake/_warm_cache.py +6 -3
  34. sempy_labs/lakehouse/_get_lakehouse_columns.py +1 -1
  35. sempy_labs/lakehouse/_get_lakehouse_tables.py +5 -3
  36. sempy_labs/lakehouse/_lakehouse.py +2 -1
  37. sempy_labs/lakehouse/_shortcuts.py +19 -12
  38. sempy_labs/migration/__init__.py +1 -1
  39. sempy_labs/migration/_create_pqt_file.py +21 -15
  40. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +16 -13
  41. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +17 -18
  42. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +43 -40
  43. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +14 -14
  44. sempy_labs/migration/_migration_validation.py +2 -2
  45. sempy_labs/migration/_refresh_calc_tables.py +8 -5
  46. sempy_labs/report/__init__.py +2 -2
  47. sempy_labs/report/_generate_report.py +10 -5
  48. sempy_labs/report/_report_functions.py +67 -29
  49. sempy_labs/report/_report_rebind.py +9 -8
  50. sempy_labs/tom/__init__.py +1 -4
  51. sempy_labs/tom/_model.py +555 -152
  52. semantic_link_labs-0.5.0.dist-info/RECORD +0 -53
  53. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.6.0.dist-info}/LICENSE +0 -0
  54. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.6.0.dist-info}/top_level.txt +0 -0
@@ -1,17 +1,18 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  from sempy_labs.tom import connect_semantic_model
4
3
  from sempy_labs._helper_functions import resolve_lakehouse_name
4
+ from sempy_labs._refresh_semantic_model import refresh_semantic_model
5
5
  from typing import List, Optional, Union
6
6
  import sempy_labs._icons as icons
7
7
 
8
+
8
9
  def update_direct_lake_partition_entity(
9
10
  dataset: str,
10
11
  table_name: Union[str, List[str]],
11
12
  entity_name: Union[str, List[str]],
12
13
  workspace: Optional[str] = None,
13
14
  lakehouse: Optional[str] = None,
14
- lakehouse_workspace: Optional[str] = None
15
+ lakehouse_workspace: Optional[str] = None,
15
16
  ):
16
17
  """
17
18
  Remaps a table (or tables) in a Direct Lake semantic model to a table in a lakehouse.
@@ -53,22 +54,144 @@ def update_direct_lake_partition_entity(
53
54
  entity_name = [entity_name]
54
55
 
55
56
  if len(table_name) != len(entity_name):
56
- raise ValueError(f"{icons.red_dot} The 'table_name' and 'entity_name' arrays must be of equal length.")
57
+ raise ValueError(
58
+ f"{icons.red_dot} The 'table_name' and 'entity_name' arrays must be of equal length."
59
+ )
57
60
 
58
61
  with connect_semantic_model(
59
62
  dataset=dataset, readonly=False, workspace=workspace
60
63
  ) as tom:
61
64
 
62
65
  if not tom.is_direct_lake():
63
- raise ValueError(f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode.")
66
+ raise ValueError(
67
+ f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode."
68
+ )
64
69
 
65
70
  for tName in table_name:
66
71
  i = table_name.index(tName)
67
72
  eName = entity_name[i]
68
- try:
69
- tom.model.Tables[tName].Partitions[0].EntityName = eName
73
+ part_name = (
74
+ p.Name
75
+ for t in tom.model.Tables
76
+ for p in t.Partitions
77
+ if t.Name == tName
78
+ )
79
+
80
+ if part_name is None:
81
+ raise ValueError(
82
+ f"{icons.red_dot} The '{tName}' table in the '{dataset}' semantic model has not been updated."
83
+ )
84
+ else:
85
+ tom.model.Tables[tName].Partitions[part_name].EntityName = eName
70
86
  print(
71
- f"{icons.green_dot} The '{tName}' table in the '{dataset}' semantic model has been updated to point to the '{eName}' table in the '{lakehouse}' lakehouse within the '{lakehouse_workspace}' workspace."
87
+ f"{icons.green_dot} The '{tName}' table in the '{dataset}' semantic model has been updated to point to the '{eName}' table "
88
+ f"in the '{lakehouse}' lakehouse within the '{lakehouse_workspace}' workspace."
72
89
  )
73
- except Exception as e:
74
- raise ValueError(f"{icons.red_dot} The '{tName}' table in the '{dataset}' semantic model has not been updated.") from e
90
+
91
+
92
+ def add_table_to_direct_lake_semantic_model(
93
+ dataset: str,
94
+ table_name: str,
95
+ lakehouse_table_name: str,
96
+ workspace: Optional[str | None] = None,
97
+ ):
98
+ """
99
+ Adds a table and all of its columns to a Direct Lake semantic model, based on a Fabric lakehouse table.
100
+
101
+ Parameters
102
+ ----------
103
+ dataset : str
104
+ Name of the semantic model.
105
+ table_name : str, List[str]
106
+ Name of the table in the semantic model.
107
+ lakehouse_table_name : str
108
+ The name of the Fabric lakehouse table.
109
+ workspace : str, default=None
110
+ The name of the Fabric workspace in which the semantic model resides.
111
+ Defaults to None which resolves to the workspace of the attached lakehouse
112
+ or if no lakehouse attached, resolves to the workspace of the notebook.
113
+
114
+ Returns
115
+ -------
116
+ """
117
+
118
+ import Microsoft.AnalysisServices.Tabular as TOM
119
+ from sempy_labs.lakehouse._get_lakehouse_columns import get_lakehouse_columns
120
+ from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
121
+ from sempy_labs.directlake._get_directlake_lakehouse import (
122
+ get_direct_lake_lakehouse,
123
+ )
124
+
125
+ workspace = fabric.resolve_workspace_name(workspace)
126
+
127
+ with connect_semantic_model(
128
+ dataset=dataset, readonly=False, workspace=workspace
129
+ ) as tom:
130
+
131
+ if tom.is_direct_lake() is False:
132
+ raise ValueError(
133
+ "This function is only valid for Direct Lake semantic models."
134
+ )
135
+
136
+ if any(
137
+ p.Name == lakehouse_table_name
138
+ for p in tom.all_partitions()
139
+ if p.SourceType == TOM.PartitionSourceType.Entity
140
+ ):
141
+ t_name = next(
142
+ p.Parent.Name
143
+ for p in tom.all_partitions()
144
+ if p.Name
145
+ == lakehouse_table_name & p.SourceType
146
+ == TOM.PartitionSourceType.Entity
147
+ )
148
+ raise ValueError(
149
+ f"The '{lakehouse_table_name}' table already exists in the '{dataset}' semantic model within the '{workspace}' workspace as the '{t_name}' table."
150
+ )
151
+
152
+ if any(t.Name == table_name for t in tom.model.Tables):
153
+ raise ValueError(
154
+ f"The '{table_name}' table already exists in the '{dataset}' semantic model within the '{workspace}' workspace."
155
+ )
156
+
157
+ lake_name, lake_id = get_direct_lake_lakehouse(
158
+ dataset=dataset, workspace=workspace
159
+ )
160
+
161
+ dfL = get_lakehouse_tables(lakehouse=lake_name, workspace=workspace)
162
+ dfL_filt = dfL[dfL["Table Name"] == lakehouse_table_name]
163
+
164
+ if len(dfL_filt) == 0:
165
+ raise ValueError(
166
+ f"The '{lakehouse_table_name}' table does not exist in the '{lake_name}' lakehouse within the '{workspace}' workspace."
167
+ )
168
+
169
+ dfLC = get_lakehouse_columns(lakehouse=lake_name, workspace=workspace)
170
+ dfLC_filt = dfLC[dfLC["Table Name"] == lakehouse_table_name]
171
+
172
+ tom.add_table(name=table_name)
173
+ print(
174
+ f"{icons.green_dot} The '{table_name}' table has been added to the '{dataset}' semantic model within the '{workspace}' workspace."
175
+ )
176
+ tom.add_entity_partition(
177
+ table_name=table_name, entity_name=lakehouse_table_name
178
+ )
179
+ print(
180
+ f"{icons.green_dot} The '{lakehouse_table_name}' partition has been added to the '{table_name}' table in the '{dataset}' semantic model within the '{workspace}' workspace."
181
+ )
182
+
183
+ for i, r in dfLC_filt.iterrows():
184
+ lakeCName = r["Column Name"]
185
+ dType = r["Data Type"]
186
+ dt = icons.data_type_mapping.get(dType)
187
+ tom.add_data_column(
188
+ table_name=table_name,
189
+ column_name=lakeCName,
190
+ source_column=lakeCName,
191
+ data_type=dt,
192
+ )
193
+ print(
194
+ f"{icons.green_dot} The '{lakeCName}' column has been added to the '{table_name}' table as a '{dt}' data type in the '{dataset}' semantic model within the '{workspace}' workspace."
195
+ )
196
+
197
+ refresh_semantic_model(dataset=dataset, tables=table_name, workspace=workspace)
@@ -1,4 +1,3 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  import pandas as pd
4
3
  from tqdm.auto import tqdm
@@ -45,7 +44,9 @@ def warm_direct_lake_cache_perspective(
45
44
 
46
45
  dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
47
46
  if not any(r["Mode"] == "DirectLake" for i, r in dfP.iterrows()):
48
- raise ValueError(f"{icons.red_dot} The '{dataset}' semantic model in the '{workspace}' workspace is not in Direct Lake mode. This function is specifically for semantic models in Direct Lake mode.")
47
+ raise ValueError(
48
+ f"{icons.red_dot} The '{dataset}' semantic model in the '{workspace}' workspace is not in Direct Lake mode. This function is specifically for semantic models in Direct Lake mode."
49
+ )
49
50
 
50
51
  dfPersp = fabric.list_perspectives(dataset=dataset, workspace=workspace)
51
52
  dfPersp["DAX Object Name"] = format_dax_object_name(
@@ -54,7 +55,9 @@ def warm_direct_lake_cache_perspective(
54
55
  dfPersp_filt = dfPersp[dfPersp["Perspective Name"] == perspective]
55
56
 
56
57
  if len(dfPersp_filt) == 0:
57
- raise ValueError(f"{icons.red_dot} The '{perspective} perspective does not exist or contains no objects within the '{dataset}' semantic model in the '{workspace}' workspace.")
58
+ raise ValueError(
59
+ f"{icons.red_dot} The '{perspective} perspective does not exist or contains no objects within the '{dataset}' semantic model in the '{workspace}' workspace."
60
+ )
58
61
 
59
62
  dfPersp_c = dfPersp_filt[dfPersp_filt["Object Type"] == "Column"]
60
63
 
@@ -1,4 +1,3 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  import pandas as pd
4
3
  from pyspark.sql import SparkSession
@@ -10,6 +9,7 @@ from sempy_labs._helper_functions import (
10
9
  from typing import Optional
11
10
  from sempy._utils._log import log
12
11
 
12
+
13
13
  @log
14
14
  def get_lakehouse_columns(
15
15
  lakehouse: Optional[str] = None, workspace: Optional[str] = None
@@ -1,4 +1,3 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  import pandas as pd
4
3
  from pyspark.sql import SparkSession
@@ -18,6 +17,7 @@ from typing import Optional
18
17
  import sempy_labs._icons as icons
19
18
  from sempy._utils._log import log
20
19
 
20
+
21
21
  @log
22
22
  def get_lakehouse_tables(
23
23
  lakehouse: Optional[str] = None,
@@ -174,8 +174,10 @@ def get_lakehouse_tables(
174
174
  if export:
175
175
  lakeAttach = lakehouse_attached()
176
176
  if lakeAttach is False:
177
- raise ValueError(f"{icons.red_dot} In order to save the report.json file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook.")
178
-
177
+ raise ValueError(
178
+ f"{icons.red_dot} In order to save the report.json file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
179
+ )
180
+
179
181
  spark = SparkSession.builder.getOrCreate()
180
182
 
181
183
  lakehouse_id = fabric.get_lakehouse_id()
@@ -1,4 +1,3 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  from tqdm.auto import tqdm
4
3
  from pyspark.sql import SparkSession
@@ -7,6 +6,7 @@ from typing import List, Optional, Union
7
6
  import sempy_labs._icons as icons
8
7
  from sempy._utils._log import log
9
8
 
9
+
10
10
  def lakehouse_attached() -> bool:
11
11
  """
12
12
  Identifies if a lakehouse is attached to the notebook.
@@ -25,6 +25,7 @@ def lakehouse_attached() -> bool:
25
25
  else:
26
26
  return False
27
27
 
28
+
28
29
  @log
29
30
  def optimize_lakehouse_tables(
30
31
  tables: Optional[Union[str, List[str]]] = None,
@@ -1,4 +1,3 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  from sempy_labs._helper_functions import (
4
3
  resolve_lakehouse_name,
@@ -7,6 +6,7 @@ from sempy_labs._helper_functions import (
7
6
  )
8
7
  from typing import Optional
9
8
  import sempy_labs._icons as icons
9
+ from sempy.fabric.exceptions import FabricHTTPException
10
10
 
11
11
 
12
12
  def create_shortcut_onelake(
@@ -74,12 +74,15 @@ def create_shortcut_onelake(
74
74
  )
75
75
  if response.status_code == 201:
76
76
  print(
77
- f"{icons.green_dot} The shortcut '{shortcut_name}' was created in the '{destination_lakehouse}' lakehouse within the '{destination_workspace} workspace. It is based on the '{table_name}' table in the '{source_lakehouse}' lakehouse within the '{source_workspace}' workspace."
77
+ f"{icons.green_dot} The shortcut '{shortcut_name}' was created in the '{destination_lakehouse}' lakehouse within"
78
+ f" the '{destination_workspace} workspace. It is based on the '{table_name}' table in the '{source_lakehouse}' lakehouse within the '{source_workspace}' workspace."
78
79
  )
79
80
  else:
80
81
  print(response.status_code)
81
82
  except Exception as e:
82
- raise ValueError(f"{icons.red_dot} Failed to create a shortcut for the '{table_name}' table.") from e
83
+ raise ValueError(
84
+ f"{icons.red_dot} Failed to create a shortcut for the '{table_name}' table."
85
+ ) from e
83
86
 
84
87
 
85
88
  def create_shortcut(
@@ -114,7 +117,9 @@ def create_shortcut(
114
117
  sourceValues = list(source_titles.keys())
115
118
 
116
119
  if source not in sourceValues:
117
- raise ValueError(f"{icons.red_dot} The 'source' parameter must be one of these values: {sourceValues}.")
120
+ raise ValueError(
121
+ f"{icons.red_dot} The 'source' parameter must be one of these values: {sourceValues}."
122
+ )
118
123
 
119
124
  sourceTitle = source_titles[source]
120
125
 
@@ -147,12 +152,15 @@ def create_shortcut(
147
152
  )
148
153
  if response.status_code == 201:
149
154
  print(
150
- f"{icons.green_dot} The shortcut '{shortcutActualName}' was created in the '{lakehouse}' lakehouse within the '{workspace} workspace. It is based on the '{subpath}' table in '{sourceTitle}'."
155
+ f"{icons.green_dot} The shortcut '{shortcutActualName}' was created in the '{lakehouse}' lakehouse within"
156
+ f" the '{workspace} workspace. It is based on the '{subpath}' table in '{sourceTitle}'."
151
157
  )
152
158
  else:
153
159
  print(response.status_code)
154
160
  except Exception as e:
155
- raise ValueError(f"{icons.red_dot} Failed to create a shortcut for the '{shortcut_name}' table.") from e
161
+ raise ValueError(
162
+ f"{icons.red_dot} Failed to create a shortcut for the '{shortcut_name}' table."
163
+ ) from e
156
164
 
157
165
 
158
166
  def delete_shortcut(
@@ -187,9 +195,8 @@ def delete_shortcut(
187
195
  f"/v1/workspaces/{workspace_id}/items/{lakehouse_id}/shortcuts/Tables/{shortcut_name}"
188
196
  )
189
197
 
190
- if response.status_code == 200:
191
- print(
192
- f"{icons.green_dot} The '{shortcut_name}' shortcut in the '{lakehouse}' within the '{workspace}' workspace has been deleted."
193
- )
194
- else:
195
- raise ValueError(f"{icons.red_dot} The '{shortcut_name}' has not been deleted.")
198
+ if response.status_code != 200:
199
+ raise FabricHTTPException(response)
200
+ print(
201
+ f"{icons.green_dot} The '{shortcut_name}' shortcut in the '{lakehouse}' within the '{workspace}' workspace has been deleted."
202
+ )
@@ -27,5 +27,5 @@ __all__ = [
27
27
  "migrate_model_objects_to_semantic_model",
28
28
  "migrate_tables_columns_to_semantic_model",
29
29
  "migration_validation",
30
- "refresh_calc_tables"
30
+ "refresh_calc_tables",
31
31
  ]
@@ -1,6 +1,7 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
- import json, os, shutil
2
+ import json
3
+ import os
4
+ import shutil
4
5
  import xml.etree.ElementTree as ET
5
6
  from sempy_labs._list_functions import list_tables
6
7
  from sempy_labs.lakehouse._lakehouse import lakehouse_attached
@@ -11,10 +12,13 @@ import sempy_labs._icons as icons
11
12
 
12
13
  @log
13
14
  def create_pqt_file(
14
- dataset: str, workspace: Optional[str] = None, file_name: Optional[str] = 'PowerQueryTemplate'
15
+ dataset: str,
16
+ workspace: Optional[str] = None,
17
+ file_name: Optional[str] = "PowerQueryTemplate",
15
18
  ):
16
19
  """
17
- Dynamically generates a `Power Query Template <https://learn.microsoft.com/power-query/power-query-template>`_ file based on the semantic model. The .pqt file is saved within the Files section of your lakehouse.
20
+ Dynamically generates a `Power Query Template <https://learn.microsoft.com/power-query/power-query-template>`_ file based on the semantic model. The .pqt file is
21
+ saved within the Files section of your lakehouse.
18
22
 
19
23
  Parameters
20
24
  ----------
@@ -31,7 +35,9 @@ def create_pqt_file(
31
35
  lakeAttach = lakehouse_attached()
32
36
 
33
37
  if lakeAttach is False:
34
- raise ValueError(f"{icons.red_dot} In order to run the 'create_pqt_file' function, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook.")
38
+ raise ValueError(
39
+ f"{icons.red_dot} In order to run the 'create_pqt_file' function, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
40
+ )
35
41
 
36
42
  workspace = fabric.resolve_workspace_name(workspace)
37
43
 
@@ -188,16 +194,16 @@ def create_pqt_file(
188
194
  ns = "http://schemas.openxmlformats.org/package/2006/content-types"
189
195
  ET.register_namespace("", ns)
190
196
  types = ET.Element("{%s}Types" % ns)
191
- default1 = ET.SubElement(
192
- types,
193
- "{%s}Default" % ns,
194
- {"Extension": "json", "ContentType": "application/json"},
195
- )
196
- default2 = ET.SubElement(
197
- types,
198
- "{%s}Default" % ns,
199
- {"Extension": "pq", "ContentType": "application/x-ms-m"},
200
- )
197
+ # default1 = ET.SubElement(
198
+ # types,
199
+ # "{%s}Default" % ns,
200
+ # {"Extension": "json", "ContentType": "application/json"},
201
+ # )
202
+ # default2 = ET.SubElement(
203
+ # types,
204
+ # "{%s}Default" % ns,
205
+ # {"Extension": "pq", "ContentType": "application/x-ms-m"},
206
+ # )
201
207
  xmlDocument = ET.ElementTree(types)
202
208
  xmlFileName = "[Content_Types].xml"
203
209
  xmlFilePath = os.path.join(subFolderPath, xmlFileName)
@@ -1,7 +1,9 @@
1
1
  import sempy
2
2
  import sempy.fabric as fabric
3
3
  import pandas as pd
4
- import re, datetime, time
4
+ import re
5
+ import datetime
6
+ import time
5
7
  from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
6
8
  from sempy_labs._helper_functions import (
7
9
  resolve_lakehouse_name,
@@ -10,7 +12,7 @@ from sempy_labs._helper_functions import (
10
12
  )
11
13
  from sempy_labs.tom import connect_semantic_model
12
14
  from pyspark.sql import SparkSession
13
- from typing import List, Optional, Union
15
+ from typing import Optional
14
16
  from sempy._utils._log import log
15
17
  import sempy_labs._icons as icons
16
18
 
@@ -25,7 +27,8 @@ def migrate_calc_tables_to_lakehouse(
25
27
  lakehouse_workspace: Optional[str] = None,
26
28
  ):
27
29
  """
28
- Creates delta tables in your lakehouse based on the DAX expression of a calculated table in an import/DirectQuery semantic model. The DAX expression encapsulating the calculated table logic is stored in the new Direct Lake semantic model as model annotations.
30
+ Creates delta tables in your lakehouse based on the DAX expression of a calculated table in an import/DirectQuery semantic model.
31
+ The DAX expression encapsulating the calculated table logic is stored in the new Direct Lake semantic model as model annotations.
29
32
 
30
33
  Parameters
31
34
  ----------
@@ -67,8 +70,6 @@ def migrate_calc_tables_to_lakehouse(
67
70
  else:
68
71
  lakehouse_id = resolve_lakehouse_id(lakehouse, lakehouse_workspace)
69
72
 
70
- dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
71
- # dfC['Column Object'] = "'" + dfC['Table Name'] + "'[" + dfC['Column Name'] + "]"
72
73
  dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
73
74
  dfP_filt = dfP[(dfP["Source Type"] == "Calculated")]
74
75
  dfP_filt = dfP_filt[
@@ -113,7 +114,8 @@ def migrate_calc_tables_to_lakehouse(
113
114
  for t in tom.model.Tables:
114
115
  if tom.is_auto_date_table(table_name=t.Name):
115
116
  print(
116
- f"{icons.yellow_dot} The '{t.Name}' table is an auto-datetime table and is not supported in the Direct Lake migration process. Please create a proper Date/Calendar table in your lakehoues and use it in your Direct Lake model."
117
+ f"{icons.yellow_dot} The '{t.Name}' table is an auto-datetime table and is not supported in the Direct Lake migration process. "
118
+ "Please create a proper Date/Calendar table in your lakehoues and use it in your Direct Lake model."
117
119
  )
118
120
  else:
119
121
  for p in t.Partitions:
@@ -182,7 +184,7 @@ def migrate_calc_tables_to_lakehouse(
182
184
  == "CalculatedTableColumn"
183
185
  and c.SourceColumn == col
184
186
  )
185
- except:
187
+ except Exception:
186
188
  dataType = next(
187
189
  str(c.DataType)
188
190
  for c in tom.model.Tables[
@@ -240,7 +242,7 @@ def migrate_calc_tables_to_lakehouse(
240
242
  name=t.Name,
241
243
  value=daxQuery,
242
244
  )
243
- except Exception as e:
245
+ except Exception:
244
246
  if (
245
247
  datetime.datetime.now()
246
248
  - start_time2
@@ -250,13 +252,14 @@ def migrate_calc_tables_to_lakehouse(
250
252
  time.sleep(1)
251
253
 
252
254
  print(
253
- f"{icons.green_dot} Calculated table '{t.Name}' has been created as delta table '{delta_table_name.lower()}' in the '{lakehouse}' lakehouse within the '{lakehouse_workspace}' workspace."
255
+ f"{icons.green_dot} Calculated table '{t.Name}' has been created as delta table '{delta_table_name.lower()}' "
256
+ f"in the '{lakehouse}' lakehouse within the '{lakehouse_workspace}' workspace."
254
257
  )
255
- except:
258
+ except Exception:
256
259
  print(
257
260
  f"{icons.red_dot} Failed to create calculated table '{t.Name}' as a delta table in the lakehouse."
258
261
  )
259
- except Exception as e:
262
+ except Exception:
260
263
  if datetime.datetime.now() - start_time > timeout:
261
264
  break
262
265
  time.sleep(1)
@@ -419,11 +422,11 @@ def migrate_field_parameters(
419
422
  print(
420
423
  f"{icons.green_dot} The '{tName}' table has been added as a field parameter to the '{new_dataset}' semantic model in the '{new_dataset_workspace}' workspace."
421
424
  )
422
- except:
425
+ except Exception:
423
426
  print(
424
427
  f"{icons.red_dot} The '{tName}' table has not been added as a field parameter."
425
428
  )
426
- except Exception as e:
429
+ except Exception:
427
430
  if datetime.datetime.now() - start_time > timeout:
428
431
  break
429
432
  time.sleep(1)
@@ -1,8 +1,9 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
- import re, datetime, time
2
+ import re
3
+ import datetime
4
+ import time
4
5
  from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
5
- from sempy_labs._helper_functions import resolve_lakehouse_name
6
+ from sempy_labs._helper_functions import resolve_lakehouse_name, format_dax_object_name
6
7
  from sempy_labs.tom import connect_semantic_model
7
8
  from typing import Optional
8
9
  from sempy._utils._log import log
@@ -66,7 +67,7 @@ def migrate_calc_tables_to_semantic_model(
66
67
  dfC_filt = dfC[
67
68
  (dfC["Table Name"].isin(dfP_filt["Table Name"]))
68
69
  ] # & (dfC['Type'] == 'CalculatedTableColumn')]
69
- # dfA = list_annotations(new_dataset, new_dataset_workspace)
70
+ # dfA = fabric.list_annotations(new_dataset, new_dataset_workspace)
70
71
  # dfA_filt = dfA[(dfA['Object Type'] == 'Model') & ~ (dfA['Annotation Value'].str.contains('NAMEOF'))]
71
72
 
72
73
  if len(dfP_filt) == 0:
@@ -87,10 +88,7 @@ def migrate_calc_tables_to_semantic_model(
87
88
  success = True
88
89
  for tName in dfC_filt["Table Name"].unique():
89
90
  if tName.lower() in lc["Table Name"].values:
90
-
91
- try:
92
- tom.model.Tables[tName]
93
- except:
91
+ if not any(t.Name == tName for t in tom.model.Tables):
94
92
  tom.add_table(name=tName)
95
93
  tom.add_entity_partition(
96
94
  table_name=tName,
@@ -112,11 +110,11 @@ def migrate_calc_tables_to_semantic_model(
112
110
  & (dfC["Column Name"] == cName),
113
111
  "Data Type",
114
112
  ].iloc[0]
115
- cType = dfC.loc[
116
- (dfC["Table Name"] == tName)
117
- & (dfC["Column Name"] == cName),
118
- "Type",
119
- ].iloc[0]
113
+ # cType = dfC.loc[
114
+ # (dfC["Table Name"] == tName)
115
+ # & (dfC["Column Name"] == cName),
116
+ # "Type",
117
+ # ].iloc[0]
120
118
 
121
119
  # av = tom.get_annotation_value(object = tom.model, name = tName)
122
120
 
@@ -127,9 +125,10 @@ def migrate_calc_tables_to_semantic_model(
127
125
 
128
126
  matches = re.findall(pattern, scName)
129
127
  lakeColumn = matches[0].replace(" ", "")
130
- try:
131
- tom.model.Tables[tName].Columns[cName]
132
- except:
128
+ if not any(
129
+ c.Name == cName and c.Parent.Name == tName
130
+ for c in tom.all_columns()
131
+ ):
133
132
  tom.add_data_column(
134
133
  table_name=tName,
135
134
  column_name=cName,
@@ -137,14 +136,14 @@ def migrate_calc_tables_to_semantic_model(
137
136
  data_type=cDataType,
138
137
  )
139
138
  print(
140
- f"{icons.green_dot} The '{tName}'[{cName}] column has been added."
139
+ f"{icons.green_dot} The {format_dax_object_name(tName,cName)} column has been added."
141
140
  )
142
141
 
143
142
  print(
144
143
  f"\n{icons.green_dot} All viable calculated tables have been added to the model."
145
144
  )
146
145
 
147
- except Exception as e:
146
+ except Exception:
148
147
  if datetime.datetime.now() - start_time > timeout:
149
148
  break
150
149
  time.sleep(1)