semantic-link-labs 0.8.10__py3-none-any.whl → 0.8.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (73) hide show
  1. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/METADATA +3 -2
  2. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/RECORD +73 -72
  3. sempy_labs/__init__.py +6 -2
  4. sempy_labs/_clear_cache.py +39 -37
  5. sempy_labs/_connections.py +13 -13
  6. sempy_labs/_data_pipelines.py +20 -20
  7. sempy_labs/_dataflows.py +27 -28
  8. sempy_labs/_dax.py +41 -47
  9. sempy_labs/_environments.py +26 -23
  10. sempy_labs/_eventhouses.py +16 -15
  11. sempy_labs/_eventstreams.py +16 -15
  12. sempy_labs/_external_data_shares.py +18 -20
  13. sempy_labs/_gateways.py +14 -14
  14. sempy_labs/_generate_semantic_model.py +99 -62
  15. sempy_labs/_git.py +105 -43
  16. sempy_labs/_helper_functions.py +148 -131
  17. sempy_labs/_job_scheduler.py +92 -0
  18. sempy_labs/_kql_databases.py +16 -15
  19. sempy_labs/_kql_querysets.py +16 -15
  20. sempy_labs/_list_functions.py +114 -99
  21. sempy_labs/_managed_private_endpoints.py +19 -17
  22. sempy_labs/_mirrored_databases.py +51 -48
  23. sempy_labs/_mirrored_warehouses.py +5 -4
  24. sempy_labs/_ml_experiments.py +16 -15
  25. sempy_labs/_ml_models.py +15 -14
  26. sempy_labs/_model_bpa.py +3 -3
  27. sempy_labs/_model_dependencies.py +55 -29
  28. sempy_labs/_notebooks.py +27 -25
  29. sempy_labs/_one_lake_integration.py +23 -26
  30. sempy_labs/_query_scale_out.py +67 -64
  31. sempy_labs/_refresh_semantic_model.py +25 -26
  32. sempy_labs/_spark.py +33 -32
  33. sempy_labs/_sql.py +12 -9
  34. sempy_labs/_translations.py +10 -7
  35. sempy_labs/_vertipaq.py +34 -31
  36. sempy_labs/_warehouses.py +22 -21
  37. sempy_labs/_workspace_identity.py +11 -10
  38. sempy_labs/_workspaces.py +40 -33
  39. sempy_labs/admin/_basic_functions.py +10 -12
  40. sempy_labs/admin/_external_data_share.py +3 -3
  41. sempy_labs/admin/_items.py +4 -4
  42. sempy_labs/admin/_scanner.py +3 -1
  43. sempy_labs/directlake/_directlake_schema_compare.py +18 -14
  44. sempy_labs/directlake/_directlake_schema_sync.py +18 -12
  45. sempy_labs/directlake/_dl_helper.py +25 -26
  46. sempy_labs/directlake/_generate_shared_expression.py +10 -9
  47. sempy_labs/directlake/_get_directlake_lakehouse.py +16 -13
  48. sempy_labs/directlake/_get_shared_expression.py +4 -3
  49. sempy_labs/directlake/_guardrails.py +12 -6
  50. sempy_labs/directlake/_list_directlake_model_calc_tables.py +15 -9
  51. sempy_labs/directlake/_show_unsupported_directlake_objects.py +16 -10
  52. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +35 -31
  53. sempy_labs/directlake/_update_directlake_partition_entity.py +34 -31
  54. sempy_labs/directlake/_warm_cache.py +87 -65
  55. sempy_labs/lakehouse/_get_lakehouse_columns.py +10 -8
  56. sempy_labs/lakehouse/_get_lakehouse_tables.py +10 -9
  57. sempy_labs/lakehouse/_lakehouse.py +17 -13
  58. sempy_labs/lakehouse/_shortcuts.py +42 -23
  59. sempy_labs/migration/_create_pqt_file.py +16 -11
  60. sempy_labs/migration/_refresh_calc_tables.py +16 -10
  61. sempy_labs/report/_download_report.py +9 -8
  62. sempy_labs/report/_generate_report.py +40 -44
  63. sempy_labs/report/_paginated.py +9 -9
  64. sempy_labs/report/_report_bpa.py +13 -9
  65. sempy_labs/report/_report_functions.py +80 -91
  66. sempy_labs/report/_report_helper.py +8 -4
  67. sempy_labs/report/_report_list_functions.py +24 -13
  68. sempy_labs/report/_report_rebind.py +17 -16
  69. sempy_labs/report/_reportwrapper.py +41 -33
  70. sempy_labs/tom/_model.py +43 -6
  71. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/LICENSE +0 -0
  72. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/WHEEL +0 -0
  73. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/top_level.txt +0 -0
@@ -14,17 +14,17 @@ from uuid import UUID
14
14
 
15
15
  @log
16
16
  def get_measure_dependencies(
17
- dataset: str, workspace: Optional[str] = None
17
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
18
18
  ) -> pd.DataFrame:
19
19
  """
20
20
  Shows all dependencies for all measures in a semantic model.
21
21
 
22
22
  Parameters
23
23
  ----------
24
- dataset : str
25
- Name of the semantic model.
26
- workspace : str, default=None
27
- The Fabric workspace name.
24
+ dataset : str | uuid.UUID
25
+ Name or ID of the semantic model.
26
+ workspace : str | uuid.UUID, default=None
27
+ The Fabric workspace name or ID.
28
28
  Defaults to None which resolves to the workspace of the attached lakehouse
29
29
  or if no lakehouse attached, resolves to the workspace of the notebook.
30
30
 
@@ -34,8 +34,6 @@ def get_measure_dependencies(
34
34
  Shows all dependencies for all measures in the semantic model.
35
35
  """
36
36
 
37
- workspace = fabric.resolve_workspace_name(workspace)
38
-
39
37
  dep = fabric.evaluate_dax(
40
38
  dataset=dataset,
41
39
  workspace=workspace,
@@ -144,17 +142,18 @@ def get_measure_dependencies(
144
142
 
145
143
  @log
146
144
  def get_model_calc_dependencies(
147
- dataset: str | UUID, workspace: Optional[str] = None
145
+ dataset: str | UUID,
146
+ workspace: Optional[str] = None,
148
147
  ) -> pd.DataFrame:
149
148
  """
150
149
  Shows all dependencies for all objects in a semantic model.
151
150
 
152
151
  Parameters
153
152
  ----------
154
- dataset : str | UUID
153
+ dataset : str | uuid.UUID
155
154
  Name or ID of the semantic model.
156
- workspace : str, default=None
157
- The Fabric workspace name.
155
+ workspace : str | uuid.UUID, default=None
156
+ The Fabric workspace name or ID.
158
157
  Defaults to None which resolves to the workspace of the attached lakehouse
159
158
  or if no lakehouse attached, resolves to the workspace of the notebook.
160
159
 
@@ -164,11 +163,9 @@ def get_model_calc_dependencies(
164
163
  Shows all dependencies for all objects in the semantic model.
165
164
  """
166
165
 
167
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
168
- (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
169
166
  dep = fabric.evaluate_dax(
170
- dataset=dataset_id,
171
- workspace=workspace_id,
167
+ dataset=dataset,
168
+ workspace=workspace,
172
169
  dax_string="""
173
170
  SELECT
174
171
  [TABLE] AS [Table Name],
@@ -193,12 +190,24 @@ def get_model_calc_dependencies(
193
190
  dep["Referenced Table"], dep["Referenced Object"]
194
191
  )
195
192
  dep["Parent Node"] = dep["Object Name"]
193
+
196
194
  # Initialize dependency DataFrame with 'Done' status
197
195
  df = dep.copy()
198
196
  objs = {"Measure", "Calc Column", "Calculation Item", "Calc Table"}
199
197
  df["Done"] = (
200
198
  df["Referenced Object Type"].apply(lambda x: x not in objs).astype(bool)
201
199
  )
200
+
201
+ # Set to track visited dependencies to prevent circular references
202
+ visited = set(
203
+ zip(
204
+ df["Full Object Name"],
205
+ df["Referenced Full Object Name"],
206
+ df["Object Type"],
207
+ df["Referenced Object Type"],
208
+ )
209
+ )
210
+
202
211
  # Expand dependencies iteratively
203
212
  while not df["Done"].all():
204
213
  incomplete_rows = df[df["Done"] == False]
@@ -212,11 +221,24 @@ def get_model_calc_dependencies(
212
221
  # Expand dependencies and update 'Done' status as needed
213
222
  new_rows = []
214
223
  for _, dependency in dep_filt.iterrows():
224
+ # Check if the dependency has already been visited
225
+ dependency_pair = (
226
+ row["Full Object Name"],
227
+ dependency["Referenced Full Object Name"],
228
+ row["Object Type"],
229
+ dependency["Referenced Object Type"],
230
+ )
231
+ if dependency_pair in visited:
232
+ continue # Skip already visited dependencies
233
+
234
+ visited.add(dependency_pair) # Mark as visited
235
+
215
236
  is_done = dependency["Referenced Object Type"] not in objs
216
237
  new_row = {
217
238
  "Table Name": row["Table Name"],
218
239
  "Object Name": row["Object Name"],
219
240
  "Object Type": row["Object Type"],
241
+ "Expression": row["Expression"],
220
242
  "Referenced Table": dependency["Referenced Table"],
221
243
  "Referenced Object": dependency["Referenced Object"],
222
244
  "Referenced Object Type": dependency["Referenced Object Type"],
@@ -228,7 +250,14 @@ def get_model_calc_dependencies(
228
250
  "Parent Node": row["Referenced Object"],
229
251
  }
230
252
  new_rows.append(new_row)
231
- df = pd.concat([df, pd.DataFrame(new_rows)], ignore_index=True)
253
+
254
+ if new_rows:
255
+ new_rows_df = pd.DataFrame(new_rows)
256
+ new_rows_df = new_rows_df.dropna(
257
+ axis=1, how="all"
258
+ ) # Drop empty columns
259
+ df = pd.concat([df, new_rows_df], ignore_index=True)
260
+
232
261
  df.loc[df.index == row.name, "Done"] = True
233
262
  # Finalize DataFrame and yield result
234
263
  df = df.drop(columns=["Done"])
@@ -238,39 +267,36 @@ def get_model_calc_dependencies(
238
267
 
239
268
  @log
240
269
  def measure_dependency_tree(
241
- dataset: str, measure_name: str, workspace: Optional[str] = None
270
+ dataset: str | UUID, measure_name: str, workspace: Optional[str | UUID] = None
242
271
  ):
243
272
  """
244
273
  Prints a measure dependency tree of all dependent objects for a measure in a semantic model.
245
274
 
246
275
  Parameters
247
276
  ----------
248
- dataset : str
249
- Name of the semantic model.
277
+ dataset : str | uuid.UUID
278
+ Name or ID of the semantic model.
250
279
  measure_name : str
251
280
  Name of the measure.
252
- workspace : str, default=None
253
- The Fabric workspace name.
281
+ workspace : str | uuid.UUID, default=None
282
+ The Fabric workspace name or ID.
254
283
  Defaults to None which resolves to the workspace of the attached lakehouse
255
284
  or if no lakehouse attached, resolves to the workspace of the notebook.
256
-
257
- Returns
258
- -------
259
-
260
285
  """
261
286
 
262
- workspace = fabric.resolve_workspace_name(workspace)
287
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
288
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
263
289
 
264
- dfM = fabric.list_measures(dataset=dataset, workspace=workspace)
290
+ dfM = fabric.list_measures(dataset=dataset_id, workspace=workspace_id)
265
291
  dfM_filt = dfM[dfM["Measure Name"] == measure_name]
266
292
 
267
293
  if len(dfM_filt) == 0:
268
294
  print(
269
- f"{icons.red_dot} The '{measure_name}' measure does not exist in the '{dataset}' semantic model in the '{workspace}' workspace."
295
+ f"{icons.red_dot} The '{measure_name}' measure does not exist in the '{dataset_name}' semantic model in the '{workspace_name}' workspace."
270
296
  )
271
297
  return
272
298
 
273
- md = get_measure_dependencies(dataset, workspace)
299
+ md = get_measure_dependencies(dataset_id, workspace_id)
274
300
  df_filt = md[md["Object Name"] == measure_name]
275
301
 
276
302
  # Create a dictionary to hold references to nodes
sempy_labs/_notebooks.py CHANGED
@@ -11,17 +11,18 @@ from sempy_labs._helper_functions import (
11
11
  )
12
12
  from sempy.fabric.exceptions import FabricHTTPException
13
13
  import os
14
+ from uuid import UUID
14
15
 
15
16
  _notebook_prefix = "notebook-content."
16
17
 
17
18
 
18
19
  def _get_notebook_definition_base(
19
- notebook_name: str, workspace: Optional[str] = None
20
+ notebook_name: str, workspace: Optional[str | UUID] = None
20
21
  ) -> pd.DataFrame:
21
22
 
22
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
23
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
23
24
  item_id = fabric.resolve_item_id(
24
- item_name=notebook_name, type="Notebook", workspace=workspace
25
+ item_name=notebook_name, type="Notebook", workspace=workspace_id
25
26
  )
26
27
  client = fabric.FabricRestClient()
27
28
  response = client.post(
@@ -33,7 +34,9 @@ def _get_notebook_definition_base(
33
34
  return pd.json_normalize(result["definition"]["parts"])
34
35
 
35
36
 
36
- def _get_notebook_type(notebook_name: str, workspace: Optional[str] = None) -> str:
37
+ def _get_notebook_type(
38
+ notebook_name: str, workspace: Optional[str | UUID] = None
39
+ ) -> str:
37
40
 
38
41
  df_items = _get_notebook_definition_base(
39
42
  notebook_name=notebook_name, workspace=workspace
@@ -49,7 +52,7 @@ def _get_notebook_type(notebook_name: str, workspace: Optional[str] = None) -> s
49
52
 
50
53
 
51
54
  def get_notebook_definition(
52
- notebook_name: str, workspace: Optional[str] = None, decode: bool = True
55
+ notebook_name: str, workspace: Optional[str | UUID] = None, decode: bool = True
53
56
  ) -> str:
54
57
  """
55
58
  Obtains the notebook definition.
@@ -60,8 +63,8 @@ def get_notebook_definition(
60
63
  ----------
61
64
  notebook_name : str
62
65
  The name of the notebook.
63
- workspace : str, default=None
64
- The name of the workspace.
66
+ workspace : str | uuid.UUID, default=None
67
+ The name or ID of the workspace.
65
68
  Defaults to None which resolves to the workspace of the attached lakehouse
66
69
  or if no lakehouse attached, resolves to the workspace of the notebook.
67
70
  decode : bool, default=True
@@ -92,7 +95,7 @@ def import_notebook_from_web(
92
95
  notebook_name: str,
93
96
  url: str,
94
97
  description: Optional[str] = None,
95
- workspace: Optional[str] = None,
98
+ workspace: Optional[str | UUID] = None,
96
99
  overwrite: bool = False,
97
100
  ):
98
101
  """
@@ -110,16 +113,15 @@ def import_notebook_from_web(
110
113
  description : str, default=None
111
114
  The description of the notebook.
112
115
  Defaults to None which does not place a description.
113
- workspace : str, default=None
114
- The name of the workspace.
116
+ workspace : str | uuid.UUID, default=None
117
+ The name or ID of the workspace.
115
118
  Defaults to None which resolves to the workspace of the attached lakehouse
116
119
  or if no lakehouse attached, resolves to the workspace of the notebook.
117
120
  overwrite : bool, default=False
118
121
  If set to True, overwrites the existing notebook in the workspace if it exists.
119
122
  """
120
123
 
121
- if workspace is None:
122
- workspace = fabric.resolve_workspace_name(workspace)
124
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
123
125
 
124
126
  # Fix links to go to the raw github file
125
127
  starting_text = "https://github.com/"
@@ -139,7 +141,7 @@ def import_notebook_from_web(
139
141
  create_notebook(
140
142
  name=notebook_name,
141
143
  notebook_content=response.content,
142
- workspace=workspace,
144
+ workspace=workspace_id,
143
145
  description=description,
144
146
  )
145
147
  elif len(dfI_filt) > 0 and overwrite:
@@ -149,7 +151,7 @@ def import_notebook_from_web(
149
151
  # )
150
152
  else:
151
153
  raise ValueError(
152
- f"{icons.red_dot} The '{notebook_name}' already exists within the '{workspace}' workspace and 'overwrite' is set to False."
154
+ f"{icons.red_dot} The '{notebook_name}' already exists within the '{workspace_name}' workspace and 'overwrite' is set to False."
153
155
  )
154
156
 
155
157
 
@@ -158,7 +160,7 @@ def create_notebook(
158
160
  notebook_content: str,
159
161
  type: str = "py",
160
162
  description: Optional[str] = None,
161
- workspace: Optional[str] = None,
163
+ workspace: Optional[str | UUID] = None,
162
164
  ):
163
165
  """
164
166
  Creates a new notebook with a definition within a workspace.
@@ -174,13 +176,13 @@ def create_notebook(
174
176
  description : str, default=None
175
177
  The description of the notebook.
176
178
  Defaults to None which does not place a description.
177
- workspace : str, default=None
178
- The name of the workspace.
179
+ workspace : str | uuid.UUID, default=None
180
+ The name or ID of the workspace.
179
181
  Defaults to None which resolves to the workspace of the attached lakehouse
180
182
  or if no lakehouse attached, resolves to the workspace of the notebook.
181
183
  """
182
184
 
183
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
185
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
184
186
  client = fabric.FabricRestClient()
185
187
  notebook_payload = base64.b64encode(notebook_content)
186
188
 
@@ -205,12 +207,12 @@ def create_notebook(
205
207
  lro(client, response, status_codes=[201, 202])
206
208
 
207
209
  print(
208
- f"{icons.green_dot} The '{name}' notebook was created within the '{workspace}' workspace."
210
+ f"{icons.green_dot} The '{name}' notebook was created within the '{workspace_name}' workspace."
209
211
  )
210
212
 
211
213
 
212
214
  def update_notebook_definition(
213
- name: str, notebook_content: str, workspace: Optional[str] = None
215
+ name: str, notebook_content: str, workspace: Optional[str | UUID] = None
214
216
  ):
215
217
  """
216
218
  Updates an existing notebook with a new definition.
@@ -221,17 +223,17 @@ def update_notebook_definition(
221
223
  The name of the notebook to be updated.
222
224
  notebook_content : str
223
225
  The Jupyter notebook content (not in Base64 format).
224
- workspace : str, default=None
225
- The name of the workspace.
226
+ workspace : str | uuid.UUID, default=None
227
+ The name or ID of the workspace.
226
228
  Defaults to None which resolves to the workspace of the attached lakehouse
227
229
  or if no lakehouse attached, resolves to the workspace of the notebook.
228
230
  """
229
231
 
230
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
232
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
231
233
  client = fabric.FabricRestClient()
232
234
  notebook_payload = base64.b64encode(notebook_content)
233
235
  notebook_id = fabric.resolve_item_id(
234
- item_name=name, type="Notebook", workspace=workspace
236
+ item_name=name, type="Notebook", workspace=workspace_id
235
237
  )
236
238
 
237
239
  type = _get_notebook_type(notebook_name=name, workspace=workspace_id)
@@ -256,5 +258,5 @@ def update_notebook_definition(
256
258
  lro(client, response, return_status_code=True)
257
259
 
258
260
  print(
259
- f"{icons.green_dot} The '{name}' notebook was updated within the '{workspace}' workspace."
261
+ f"{icons.green_dot} The '{name}' notebook was updated within the '{workspace_name}' workspace."
260
262
  )
@@ -2,14 +2,18 @@ import sempy.fabric as fabric
2
2
  import pandas as pd
3
3
  from typing import Optional
4
4
  from sempy._utils._log import log
5
- from sempy_labs._helper_functions import resolve_workspace_name_and_id
5
+ from sempy_labs._helper_functions import (
6
+ resolve_workspace_name_and_id,
7
+ resolve_dataset_name_and_id,
8
+ )
6
9
  import sempy_labs._icons as icons
10
+ from uuid import UUID
7
11
 
8
12
 
9
13
  @log
10
14
  def export_model_to_onelake(
11
- dataset: str,
12
- workspace: Optional[str] = None,
15
+ dataset: str | UUID,
16
+ workspace: Optional[str | UUID] = None,
13
17
  destination_lakehouse: Optional[str] = None,
14
18
  destination_workspace: Optional[str] = None,
15
19
  ):
@@ -18,10 +22,10 @@ def export_model_to_onelake(
18
22
 
19
23
  Parameters
20
24
  ----------
21
- dataset : str
22
- Name of the semantic model.
23
- workspace : str, default=None
24
- The Fabric workspace name.
25
+ dataset : str | uuid.UUID
26
+ Name or ID of the semantic model.
27
+ workspace : str | uuid.UUID, default=None
28
+ The Fabric workspace name or ID.
25
29
  Defaults to None which resolves to the workspace of the attached lakehouse
26
30
  or if no lakehouse attached, resolves to the workspace of the notebook.
27
31
  destination_lakehouse : str, default=None
@@ -30,22 +34,15 @@ def export_model_to_onelake(
30
34
  The name of the Fabric workspace in which the lakehouse resides.
31
35
  """
32
36
 
33
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
37
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
38
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
34
39
 
35
40
  if destination_workspace is None:
36
- destination_workspace = workspace
41
+ destination_workspace = workspace_name
37
42
  destination_workspace_id = workspace_id
38
43
  else:
39
44
  destination_workspace_id = fabric.resolve_workspace_id(destination_workspace)
40
45
 
41
- dfD = fabric.list_datasets(workspace=workspace)
42
- dfD_filt = dfD[dfD["Dataset Name"] == dataset]
43
-
44
- if len(dfD_filt) == 0:
45
- raise ValueError(
46
- f"{icons.red_dot} The '{dataset}' semantic model does not exist in the '{workspace}' workspace."
47
- )
48
-
49
46
  tmsl = f"""
50
47
  {{
51
48
  'export': {{
@@ -53,7 +50,7 @@ def export_model_to_onelake(
53
50
  'type': 'full',
54
51
  'objects': [
55
52
  {{
56
- 'database': '{dataset}'
53
+ 'database': '{dataset_name}'
57
54
  }}
58
55
  ]
59
56
  }}
@@ -62,13 +59,13 @@ def export_model_to_onelake(
62
59
 
63
60
  # Export model's tables as delta tables
64
61
  try:
65
- fabric.execute_tmsl(script=tmsl, workspace=workspace)
62
+ fabric.execute_tmsl(script=tmsl, workspace=workspace_id)
66
63
  print(
67
- f"{icons.green_dot} The '{dataset}' semantic model's tables have been exported as delta tables to the '{workspace}' workspace.\n"
64
+ f"{icons.green_dot} The '{dataset_name}' semantic model's tables have been exported as delta tables to the '{workspace_name}' workspace.\n"
68
65
  )
69
66
  except Exception as e:
70
67
  raise ValueError(
71
- f"{icons.red_dot} The '{dataset}' semantic model's tables have not been exported as delta tables to the '{workspace}' workspace.\nMake sure you enable OneLake integration for the '{dataset}' semantic model. Follow the instructions here: https://learn.microsoft.com/power-bi/enterprise/onelake-integration-overview#enable-onelake-integration"
68
+ f"{icons.red_dot} The '{dataset_name}' semantic model's tables have not been exported as delta tables to the '{workspace_name}' workspace.\nMake sure you enable OneLake integration for the '{dataset_name}' semantic model. Follow the instructions here: https://learn.microsoft.com/power-bi/enterprise/onelake-integration-overview#enable-onelake-integration"
72
69
  ) from e
73
70
 
74
71
  # Create shortcuts if destination lakehouse is specified
@@ -92,14 +89,14 @@ def export_model_to_onelake(
92
89
  destination_lakehouse_id = dfI_filt["Id"].iloc[0]
93
90
 
94
91
  # Source...
95
- dfI_Source = fabric.list_items(workspace=workspace, type="SemanticModel")
92
+ dfI_Source = fabric.list_items(workspace=workspace_id, type="SemanticModel")
96
93
  dfI_filtSource = dfI_Source[(dfI_Source["Display Name"] == dataset)]
97
94
  sourceLakehouseId = dfI_filtSource["Id"].iloc[0]
98
95
 
99
96
  # Valid tables
100
97
  dfP = fabric.list_partitions(
101
- dataset=dataset,
102
- workspace=workspace,
98
+ dataset=dataset_id,
99
+ workspace=workspace_id,
103
100
  additional_xmla_properties=["Parent.SystemManaged"],
104
101
  )
105
102
  dfP_filt = dfP[
@@ -107,7 +104,7 @@ def export_model_to_onelake(
107
104
  & (dfP["Source Type"] != "CalculationGroup")
108
105
  & (dfP["Parent System Managed"] == False)
109
106
  ]
110
- dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
107
+ dfC = fabric.list_columns(dataset=dataset_id, workspace=workspace_id)
111
108
  tmc = pd.DataFrame(dfP.groupby("Table Name")["Mode"].nunique()).reset_index()
112
109
  oneMode = tmc[tmc["Mode"] == 1]
113
110
  tableAll = dfP_filt[
@@ -141,7 +138,7 @@ def export_model_to_onelake(
141
138
  )
142
139
  if response.status_code == 201:
143
140
  print(
144
- f"{icons.bullet} The shortcut '{shortcutName}' was created in the '{destination_lakehouse}' lakehouse within the '{destination_workspace}' workspace. It is based on the '{tableName}' table in the '{dataset}' semantic model within the '{workspace}' workspace.\n"
141
+ f"{icons.bullet} The shortcut '{shortcutName}' was created in the '{destination_lakehouse}' lakehouse within the '{destination_workspace}' workspace. It is based on the '{tableName}' table in the '{dataset_name}' semantic model within the '{workspace_name}' workspace.\n"
145
142
  )
146
143
  else:
147
144
  print(response.status_code)