semantic-link-labs 0.4.1__py3-none-any.whl → 0.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (53) hide show
  1. {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.5.0.dist-info}/METADATA +1 -1
  2. semantic_link_labs-0.5.0.dist-info/RECORD +53 -0
  3. {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.5.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +51 -27
  5. sempy_labs/_ai.py +32 -51
  6. sempy_labs/_clear_cache.py +2 -3
  7. sempy_labs/_connections.py +39 -38
  8. sempy_labs/_dax.py +5 -9
  9. sempy_labs/_generate_semantic_model.py +15 -21
  10. sempy_labs/_helper_functions.py +20 -25
  11. sempy_labs/_icons.py +6 -0
  12. sempy_labs/_list_functions.py +1172 -392
  13. sempy_labs/_model_auto_build.py +3 -5
  14. sempy_labs/_model_bpa.py +20 -24
  15. sempy_labs/_model_dependencies.py +7 -14
  16. sempy_labs/_one_lake_integration.py +14 -24
  17. sempy_labs/_query_scale_out.py +13 -31
  18. sempy_labs/_refresh_semantic_model.py +8 -18
  19. sempy_labs/_translations.py +5 -5
  20. sempy_labs/_vertipaq.py +11 -18
  21. sempy_labs/directlake/_directlake_schema_compare.py +11 -15
  22. sempy_labs/directlake/_directlake_schema_sync.py +35 -40
  23. sempy_labs/directlake/_fallback.py +3 -7
  24. sempy_labs/directlake/_get_directlake_lakehouse.py +3 -4
  25. sempy_labs/directlake/_get_shared_expression.py +5 -11
  26. sempy_labs/directlake/_guardrails.py +5 -7
  27. sempy_labs/directlake/_list_directlake_model_calc_tables.py +28 -26
  28. sempy_labs/directlake/_show_unsupported_directlake_objects.py +3 -4
  29. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +11 -16
  30. sempy_labs/directlake/_update_directlake_partition_entity.py +25 -15
  31. sempy_labs/directlake/_warm_cache.py +10 -15
  32. sempy_labs/lakehouse/__init__.py +0 -2
  33. sempy_labs/lakehouse/_get_lakehouse_columns.py +4 -3
  34. sempy_labs/lakehouse/_get_lakehouse_tables.py +12 -11
  35. sempy_labs/lakehouse/_lakehouse.py +6 -7
  36. sempy_labs/lakehouse/_shortcuts.py +10 -111
  37. sempy_labs/migration/__init__.py +4 -2
  38. sempy_labs/migration/_create_pqt_file.py +5 -14
  39. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +7 -7
  40. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +4 -4
  41. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +3 -8
  42. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +6 -6
  43. sempy_labs/migration/_migration_validation.py +5 -164
  44. sempy_labs/migration/_refresh_calc_tables.py +5 -5
  45. sempy_labs/report/__init__.py +2 -2
  46. sempy_labs/report/_generate_report.py +14 -19
  47. sempy_labs/report/_report_functions.py +41 -83
  48. sempy_labs/report/_report_rebind.py +43 -44
  49. sempy_labs/tom/__init__.py +6 -0
  50. sempy_labs/{_tom.py → tom/_model.py} +274 -337
  51. semantic_link_labs-0.4.1.dist-info/RECORD +0 -52
  52. {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.5.0.dist-info}/LICENSE +0 -0
  53. {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.5.0.dist-info}/top_level.txt +0 -0
@@ -8,11 +8,12 @@ from sempy_labs._helper_functions import (
8
8
  resolve_lakehouse_id,
9
9
  )
10
10
  from typing import Optional
11
+ from sempy._utils._log import log
11
12
 
12
-
13
+ @log
13
14
  def get_lakehouse_columns(
14
15
  lakehouse: Optional[str] = None, workspace: Optional[str] = None
15
- ):
16
+ ) -> pd.DataFrame:
16
17
  """
17
18
  Shows the tables and columns of a lakehouse and their respective properties.
18
19
 
@@ -47,7 +48,7 @@ def get_lakehouse_columns(
47
48
 
48
49
  workspace = fabric.resolve_workspace_name(workspace)
49
50
 
50
- if lakehouse == None:
51
+ if lakehouse is None:
51
52
  lakehouse_id = fabric.get_lakehouse_id()
52
53
  lakehouse = resolve_lakehouse_name(lakehouse_id, workspace)
53
54
  else:
@@ -1,3 +1,4 @@
1
+ import sempy
1
2
  import sempy.fabric as fabric
2
3
  import pandas as pd
3
4
  from pyspark.sql import SparkSession
@@ -14,15 +15,17 @@ from sempy_labs.directlake._guardrails import (
14
15
  )
15
16
  from sempy_labs.lakehouse._lakehouse import lakehouse_attached
16
17
  from typing import Optional
18
+ import sempy_labs._icons as icons
19
+ from sempy._utils._log import log
17
20
 
18
-
21
+ @log
19
22
  def get_lakehouse_tables(
20
23
  lakehouse: Optional[str] = None,
21
24
  workspace: Optional[str] = None,
22
25
  extended: Optional[bool] = False,
23
26
  count_rows: Optional[bool] = False,
24
27
  export: Optional[bool] = False,
25
- ):
28
+ ) -> pd.DataFrame:
26
29
  """
27
30
  Shows the tables of a lakehouse and their respective properties. Option to include additional properties relevant to Direct Lake guardrails.
28
31
 
@@ -61,7 +64,7 @@ def get_lakehouse_tables(
61
64
 
62
65
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
63
66
 
64
- if lakehouse == None:
67
+ if lakehouse is None:
65
68
  lakehouse_id = fabric.get_lakehouse_id()
66
69
  lakehouse = resolve_lakehouse_name(lakehouse_id, workspace)
67
70
  else:
@@ -80,7 +83,7 @@ def get_lakehouse_tables(
80
83
  tType = i["type"]
81
84
  tFormat = i["format"]
82
85
  tLocation = i["location"]
83
- if extended == False:
86
+ if not extended:
84
87
  new_data = {
85
88
  "Workspace Name": workspace,
86
89
  "Lakehouse Name": lakehouse,
@@ -170,11 +173,9 @@ def get_lakehouse_tables(
170
173
 
171
174
  if export:
172
175
  lakeAttach = lakehouse_attached()
173
- if lakeAttach == False:
174
- print(
175
- f"In order to save the report.json file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
176
- )
177
- return
176
+ if lakeAttach is False:
177
+ raise ValueError(f"{icons.red_dot} In order to save the report.json file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook.")
178
+
178
179
  spark = SparkSession.builder.getOrCreate()
179
180
 
180
181
  lakehouse_id = fabric.get_lakehouse_id()
@@ -234,7 +235,7 @@ def get_lakehouse_tables(
234
235
  export_df[c] = export_df[c].astype(bool)
235
236
 
236
237
  print(
237
- f"Saving Lakehouse table properties to the '{lakeTName}' table in the lakehouse...\n"
238
+ f"{icons.in_progress} Saving Lakehouse table properties to the '{lakeTName}' table in the lakehouse...\n"
238
239
  )
239
240
  now = datetime.datetime.now()
240
241
  export_df["Timestamp"] = now
@@ -244,7 +245,7 @@ def get_lakehouse_tables(
244
245
  spark_df = spark.createDataFrame(export_df)
245
246
  spark_df.write.mode("append").format("delta").saveAsTable(lakeTName)
246
247
  print(
247
- f"\u2022 Lakehouse table properties have been saved to the '{lakeTName}' delta table."
248
+ f"{icons.bullet} Lakehouse table properties have been saved to the '{lakeTName}' delta table."
248
249
  )
249
250
 
250
251
  return df
@@ -4,7 +4,8 @@ from tqdm.auto import tqdm
4
4
  from pyspark.sql import SparkSession
5
5
  from sempy_labs._helper_functions import resolve_lakehouse_name
6
6
  from typing import List, Optional, Union
7
-
7
+ import sempy_labs._icons as icons
8
+ from sempy._utils._log import log
8
9
 
9
10
  def lakehouse_attached() -> bool:
10
11
  """
@@ -24,7 +25,7 @@ def lakehouse_attached() -> bool:
24
25
  else:
25
26
  return False
26
27
 
27
-
28
+ @log
28
29
  def optimize_lakehouse_tables(
29
30
  tables: Optional[Union[str, List[str]]] = None,
30
31
  lakehouse: Optional[str] = None,
@@ -49,11 +50,9 @@ def optimize_lakehouse_tables(
49
50
  from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
50
51
  from delta import DeltaTable
51
52
 
52
- if workspace == None:
53
- workspace_id = fabric.get_workspace_id()
54
- workspace = fabric.resolve_workspace_name(workspace_id)
53
+ workspace = fabric.resolve_workspace_name(workspace)
55
54
 
56
- if lakehouse == None:
55
+ if lakehouse is None:
57
56
  lakehouse_id = fabric.get_lakehouse_id()
58
57
  lakehouse = resolve_lakehouse_name(lakehouse_id, workspace)
59
58
 
@@ -80,6 +79,6 @@ def optimize_lakehouse_tables(
80
79
  deltaTable = DeltaTable.forPath(spark, tablePath)
81
80
  deltaTable.optimize().executeCompaction()
82
81
  print(
83
- f"The '{tableName}' table has been optimized. ({str(i)}/{str(tableCount)})"
82
+ f"{icons.green_dot} The '{tableName}' table has been optimized. ({str(i)}/{str(tableCount)})"
84
83
  )
85
84
  i += 1
@@ -1,12 +1,11 @@
1
1
  import sempy
2
2
  import sempy.fabric as fabric
3
- import pandas as pd
4
3
  from sempy_labs._helper_functions import (
5
4
  resolve_lakehouse_name,
6
5
  resolve_lakehouse_id,
7
6
  resolve_workspace_name_and_id,
8
7
  )
9
- from typing import List, Optional, Union
8
+ from typing import Optional
10
9
  import sempy_labs._icons as icons
11
10
 
12
11
 
@@ -42,7 +41,7 @@ def create_shortcut_onelake(
42
41
  sourceWorkspaceId = fabric.resolve_workspace_id(source_workspace)
43
42
  sourceLakehouseId = resolve_lakehouse_id(source_lakehouse, source_workspace)
44
43
 
45
- if destination_workspace == None:
44
+ if destination_workspace is None:
46
45
  destination_workspace = source_workspace
47
46
 
48
47
  destinationWorkspaceId = fabric.resolve_workspace_id(destination_workspace)
@@ -50,7 +49,7 @@ def create_shortcut_onelake(
50
49
  destination_lakehouse, destination_workspace
51
50
  )
52
51
 
53
- if shortcut_name == None:
52
+ if shortcut_name is None:
54
53
  shortcut_name = table_name
55
54
 
56
55
  client = fabric.FabricRestClient()
@@ -80,9 +79,7 @@ def create_shortcut_onelake(
80
79
  else:
81
80
  print(response.status_code)
82
81
  except Exception as e:
83
- print(
84
- f"{icons.red_dot} Failed to create a shortcut for the '{table_name}' table: {e}"
85
- )
82
+ raise ValueError(f"{icons.red_dot} Failed to create a shortcut for the '{table_name}' table.") from e
86
83
 
87
84
 
88
85
  def create_shortcut(
@@ -117,16 +114,13 @@ def create_shortcut(
117
114
  sourceValues = list(source_titles.keys())
118
115
 
119
116
  if source not in sourceValues:
120
- print(
121
- f"{icons.red_dot} The 'source' parameter must be one of these values: {sourceValues}."
122
- )
123
- return
117
+ raise ValueError(f"{icons.red_dot} The 'source' parameter must be one of these values: {sourceValues}.")
124
118
 
125
119
  sourceTitle = source_titles[source]
126
120
 
127
121
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
128
122
 
129
- if lakehouse == None:
123
+ if lakehouse is None:
130
124
  lakehouse_id = fabric.get_lakehouse_id()
131
125
  else:
132
126
  lakehouse_id = resolve_lakehouse_id(lakehouse, workspace)
@@ -157,103 +151,8 @@ def create_shortcut(
157
151
  )
158
152
  else:
159
153
  print(response.status_code)
160
- except:
161
- print(
162
- f"{icons.red_dot} Failed to create a shortcut for the '{shortcut_name}' table."
163
- )
164
-
165
-
166
- def list_shortcuts(
167
- lakehouse: Optional[str] = None, workspace: Optional[str] = None
168
- ) -> pd.DataFrame:
169
- """
170
- Shows all shortcuts which exist in a Fabric lakehouse.
171
-
172
- Parameters
173
- ----------
174
- lakehouse : str, default=None
175
- The Fabric lakehouse name.
176
- Defaults to None which resolves to the lakehouse attached to the notebook.
177
- workspace : str, default=None
178
- The name of the Fabric workspace in which lakehouse resides.
179
- Defaults to None which resolves to the workspace of the attached lakehouse
180
- or if no lakehouse attached, resolves to the workspace of the notebook.
181
-
182
- Returns
183
- -------
184
- pandas.DataFrame
185
- A pandas dataframe showing all the shortcuts which exist in the specified lakehouse.
186
- """
187
-
188
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
189
-
190
- if lakehouse == None:
191
- lakehouse_id = fabric.get_lakehouse_id()
192
- lakehouse = resolve_lakehouse_name(lakehouse_id, workspace)
193
- else:
194
- lakehouse_id = resolve_lakehouse_id(lakehouse, workspace)
195
-
196
- df = pd.DataFrame(
197
- columns=[
198
- "Shortcut Name",
199
- "Shortcut Path",
200
- "Source",
201
- "Source Lakehouse Name",
202
- "Source Workspace Name",
203
- "Source Path",
204
- "Source Connection ID",
205
- "Source Location",
206
- "Source SubPath",
207
- ]
208
- )
209
-
210
- client = fabric.FabricRestClient()
211
- response = client.get(
212
- f"/v1/workspaces/{workspace_id}/items/{lakehouse_id}/shortcuts"
213
- )
214
- if response.status_code == 200:
215
- for s in response.json()["value"]:
216
- shortcutName = s["name"]
217
- shortcutPath = s["path"]
218
- source = list(s["target"].keys())[0]
219
- (
220
- sourceLakehouseName,
221
- sourceWorkspaceName,
222
- sourcePath,
223
- connectionId,
224
- location,
225
- subpath,
226
- ) = (None, None, None, None, None, None)
227
- if source == "oneLake":
228
- sourceLakehouseId = s["target"][source]["itemId"]
229
- sourcePath = s["target"][source]["path"]
230
- sourceWorkspaceId = s["target"][source]["workspaceId"]
231
- sourceWorkspaceName = fabric.resolve_workspace_name(sourceWorkspaceId)
232
- sourceLakehouseName = resolve_lakehouse_name(
233
- sourceLakehouseId, sourceWorkspaceName
234
- )
235
- else:
236
- connectionId = s["target"][source]["connectionId"]
237
- location = s["target"][source]["location"]
238
- subpath = s["target"][source]["subpath"]
239
-
240
- new_data = {
241
- "Shortcut Name": shortcutName,
242
- "Shortcut Path": shortcutPath,
243
- "Source": source,
244
- "Source Lakehouse Name": sourceLakehouseName,
245
- "Source Workspace Name": sourceWorkspaceName,
246
- "Source Path": sourcePath,
247
- "Source Connection ID": connectionId,
248
- "Source Location": location,
249
- "Source SubPath": subpath,
250
- }
251
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
252
-
253
- print(
254
- f"This function relies on an API which is not yet official as of May 21, 2024. Once the API becomes official this function will work as expected."
255
- )
256
- return df
154
+ except Exception as e:
155
+ raise ValueError(f"{icons.red_dot} Failed to create a shortcut for the '{shortcut_name}' table.") from e
257
156
 
258
157
 
259
158
  def delete_shortcut(
@@ -277,7 +176,7 @@ def delete_shortcut(
277
176
 
278
177
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
279
178
 
280
- if lakehouse == None:
179
+ if lakehouse is None:
281
180
  lakehouse_id = fabric.get_lakehouse_id()
282
181
  lakehouse = resolve_lakehouse_name(lakehouse_id, workspace)
283
182
  else:
@@ -293,4 +192,4 @@ def delete_shortcut(
293
192
  f"{icons.green_dot} The '{shortcut_name}' shortcut in the '{lakehouse}' within the '{workspace}' workspace has been deleted."
294
193
  )
295
194
  else:
296
- print(f"{icons.red_dot} The '{shortcut_name}' has not been deleted.")
195
+ raise ValueError(f"{icons.red_dot} The '{shortcut_name}' has not been deleted.")
@@ -14,7 +14,9 @@ from sempy_labs.migration._migrate_tables_columns_to_semantic_model import (
14
14
  )
15
15
  from sempy_labs.migration._migration_validation import (
16
16
  migration_validation,
17
- # list_semantic_model_objects
17
+ )
18
+ from sempy_labs.migration._refresh_calc_tables import (
19
+ refresh_calc_tables,
18
20
  )
19
21
 
20
22
  __all__ = [
@@ -25,5 +27,5 @@ __all__ = [
25
27
  "migrate_model_objects_to_semantic_model",
26
28
  "migrate_tables_columns_to_semantic_model",
27
29
  "migration_validation",
28
- # list_semantic_model_objects
30
+ "refresh_calc_tables"
29
31
  ]
@@ -11,7 +11,7 @@ import sempy_labs._icons as icons
11
11
 
12
12
  @log
13
13
  def create_pqt_file(
14
- dataset: str, workspace: Optional[str] = None, file_name: Optional[str] = None
14
+ dataset: str, workspace: Optional[str] = None, file_name: Optional[str] = 'PowerQueryTemplate'
15
15
  ):
16
16
  """
17
17
  Dynamically generates a `Power Query Template <https://learn.microsoft.com/power-query/power-query-template>`_ file based on the semantic model. The .pqt file is saved within the Files section of your lakehouse.
@@ -24,25 +24,16 @@ def create_pqt_file(
24
24
  The Fabric workspace name.
25
25
  Defaults to None which resolves to the workspace of the attached lakehouse
26
26
  or if no lakehouse attached, resolves to the workspace of the notebook.
27
- file_name : str, default=None
27
+ file_name : str, default='PowerQueryTemplate'
28
28
  The name of the Power Query Template file to be generated.
29
- Defaults to None which resolves to 'PowerQueryTemplate'.
30
29
  """
31
30
 
32
- if file_name is None:
33
- file_name = "PowerQueryTemplate"
34
-
35
31
  lakeAttach = lakehouse_attached()
36
32
 
37
- if lakeAttach == False:
38
- print(
39
- f"{icons.red_dot} In order to run the 'create_pqt_file' function, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
40
- )
41
- return
33
+ if lakeAttach is False:
34
+ raise ValueError(f"{icons.red_dot} In order to run the 'create_pqt_file' function, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook.")
42
35
 
43
- if workspace == None:
44
- workspace_id = fabric.get_workspace_id()
45
- workspace = fabric.resolve_workspace_name(workspace_id)
36
+ workspace = fabric.resolve_workspace_name(workspace)
46
37
 
47
38
  folderPath = "/lakehouse/default/Files"
48
39
  subFolderPath = os.path.join(folderPath, "pqtnewfolder")
@@ -8,7 +8,7 @@ from sempy_labs._helper_functions import (
8
8
  resolve_lakehouse_id,
9
9
  create_abfss_path,
10
10
  )
11
- from sempy_labs._tom import connect_semantic_model
11
+ from sempy_labs.tom import connect_semantic_model
12
12
  from pyspark.sql import SparkSession
13
13
  from typing import List, Optional, Union
14
14
  from sempy._utils._log import log
@@ -52,16 +52,16 @@ def migrate_calc_tables_to_lakehouse(
52
52
 
53
53
  workspace = fabric.resolve_workspace_name(workspace)
54
54
 
55
- if new_dataset_workspace == None:
55
+ if new_dataset_workspace is None:
56
56
  new_dataset_workspace = workspace
57
57
 
58
- if lakehouse_workspace == None:
58
+ if lakehouse_workspace is None:
59
59
  lakehouse_workspace = new_dataset_workspace
60
60
  lakehouse_workspace_id = fabric.resolve_workspace_id(lakehouse_workspace)
61
61
  else:
62
62
  lakehouse_workspace_id = fabric.resolve_workspace_id(lakehouse_workspace)
63
63
 
64
- if lakehouse == None:
64
+ if lakehouse is None:
65
65
  lakehouse_id = fabric.get_lakehouse_id()
66
66
  lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)
67
67
  else:
@@ -288,16 +288,16 @@ def migrate_field_parameters(
288
288
  or if no lakehouse attached, resolves to the workspace of the notebook.
289
289
  """
290
290
 
291
- from .HelperFunctions import format_dax_object_name
291
+ from sempy_labs import format_dax_object_name
292
292
 
293
293
  sempy.fabric._client._utils._init_analysis_services()
294
294
  import Microsoft.AnalysisServices.Tabular as TOM
295
295
 
296
- if workspace == None:
296
+ if workspace is None:
297
297
  workspace_id = fabric.get_workspace_id()
298
298
  workspace = fabric.resolve_workspace_name(workspace_id)
299
299
 
300
- if new_dataset_workspace == None:
300
+ if new_dataset_workspace is None:
301
301
  new_dataset_workspace = workspace
302
302
 
303
303
  dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
@@ -3,7 +3,7 @@ import sempy.fabric as fabric
3
3
  import re, datetime, time
4
4
  from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
5
5
  from sempy_labs._helper_functions import resolve_lakehouse_name
6
- from sempy_labs._tom import connect_semantic_model
6
+ from sempy_labs.tom import connect_semantic_model
7
7
  from typing import Optional
8
8
  from sempy._utils._log import log
9
9
  import sempy_labs._icons as icons
@@ -46,12 +46,12 @@ def migrate_calc_tables_to_semantic_model(
46
46
 
47
47
  workspace = fabric.resolve_workspace_name(workspace)
48
48
 
49
- if new_dataset_workspace == None:
49
+ if new_dataset_workspace is None:
50
50
  new_dataset_workspace = workspace
51
51
 
52
- if lakehouse_workspace == None:
52
+ if lakehouse_workspace is None:
53
53
  lakehouse_workspace = new_dataset_workspace
54
- if lakehouse == None:
54
+ if lakehouse is None:
55
55
  lakehouse_id = fabric.get_lakehouse_id()
56
56
  lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)
57
57
 
@@ -3,12 +3,11 @@ import sempy.fabric as fabric
3
3
  import re, datetime, time
4
4
  from sempy_labs._list_functions import list_tables
5
5
  from sempy_labs._helper_functions import create_relationship_name
6
- from sempy_labs._tom import connect_semantic_model
6
+ from sempy_labs.tom import connect_semantic_model
7
7
  from typing import Optional
8
8
  from sempy._utils._log import log
9
9
  import sempy_labs._icons as icons
10
10
 
11
-
12
11
  @log
13
12
  def migrate_model_objects_to_semantic_model(
14
13
  dataset: str,
@@ -39,13 +38,9 @@ def migrate_model_objects_to_semantic_model(
39
38
  import Microsoft.AnalysisServices.Tabular as TOM
40
39
  import System
41
40
 
42
- if workspace == None:
43
- workspace_id = fabric.get_workspace_id()
44
- workspace = fabric.resolve_workspace_name(workspace_id)
45
- else:
46
- workspaceId = fabric.resolve_workspace_id(workspace)
41
+ workspace = fabric.resolve_workspace_name(workspace)
47
42
 
48
- if new_dataset_workspace == None:
43
+ if new_dataset_workspace is None:
49
44
  new_dataset_workspace = workspace
50
45
 
51
46
  dfT = list_tables(dataset, workspace)
@@ -6,7 +6,7 @@ from sempy_labs._list_functions import list_tables
6
6
  from sempy_labs.directlake._get_shared_expression import get_shared_expression
7
7
  from sempy_labs._helper_functions import resolve_lakehouse_name
8
8
  from sempy_labs.lakehouse._lakehouse import lakehouse_attached
9
- from sempy_labs._tom import connect_semantic_model
9
+ from sempy_labs.tom import connect_semantic_model
10
10
  from typing import List, Optional, Union
11
11
  from sempy._utils._log import log
12
12
  import sempy_labs._icons as icons
@@ -50,13 +50,13 @@ def migrate_tables_columns_to_semantic_model(
50
50
 
51
51
  workspace = fabric.resolve_workspace_name(workspace)
52
52
 
53
- if new_dataset_workspace == None:
53
+ if new_dataset_workspace is None:
54
54
  new_dataset_workspace = workspace
55
55
 
56
- if lakehouse_workspace == None:
56
+ if lakehouse_workspace is None:
57
57
  lakehouse_workspace = new_dataset_workspace
58
58
 
59
- if lakehouse == None:
59
+ if lakehouse is None:
60
60
  lakehouse_id = fabric.get_lakehouse_id()
61
61
  lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)
62
62
 
@@ -158,8 +158,8 @@ def migrate_tables_columns_to_semantic_model(
158
158
  f"{icons.red_dot} Lakehouse not attached to notebook and lakehouse/lakehouse_workspace are not specified. Please add your lakehouse to this notebook or specify the lakehouse/lakehouse_workspace parameters."
159
159
  )
160
160
  print(
161
- f"To attach a lakehouse to a notebook, go to the the 'Explorer' window to the left, click 'Lakehouses' to add your lakehouse to this notebook"
161
+ "To attach a lakehouse to a notebook, go to the the 'Explorer' window to the left, click 'Lakehouses' to add your lakehouse to this notebook"
162
162
  )
163
163
  print(
164
- f"\nLearn more here: https://learn.microsoft.com/fabric/data-engineering/lakehouse-notebook-explore#add-or-remove-a-lakehouse"
164
+ "\nLearn more here: https://learn.microsoft.com/fabric/data-engineering/lakehouse-notebook-explore#add-or-remove-a-lakehouse"
165
165
  )