semantic-link-labs 0.9.6__py3-none-any.whl → 0.9.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (35) hide show
  1. {semantic_link_labs-0.9.6.dist-info → semantic_link_labs-0.9.7.dist-info}/METADATA +7 -5
  2. {semantic_link_labs-0.9.6.dist-info → semantic_link_labs-0.9.7.dist-info}/RECORD +35 -32
  3. {semantic_link_labs-0.9.6.dist-info → semantic_link_labs-0.9.7.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +4 -0
  5. sempy_labs/_ai.py +3 -1
  6. sempy_labs/_capacities.py +0 -1
  7. sempy_labs/_dax_query_view.py +2 -0
  8. sempy_labs/_delta_analyzer_history.py +298 -0
  9. sempy_labs/_helper_functions.py +171 -15
  10. sempy_labs/_icons.py +6 -6
  11. sempy_labs/_list_functions.py +3 -1
  12. sempy_labs/_model_bpa_bulk.py +10 -11
  13. sempy_labs/_model_bpa_rules.py +1 -1
  14. sempy_labs/admin/_basic_functions.py +28 -2
  15. sempy_labs/admin/_reports.py +1 -1
  16. sempy_labs/admin/_scanner.py +0 -2
  17. sempy_labs/admin/_tenant.py +8 -3
  18. sempy_labs/directlake/_generate_shared_expression.py +9 -1
  19. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +82 -36
  20. sempy_labs/directlake/_update_directlake_partition_entity.py +3 -0
  21. sempy_labs/graph/_groups.py +6 -0
  22. sempy_labs/graph/_teams.py +2 -0
  23. sempy_labs/graph/_users.py +4 -0
  24. sempy_labs/lakehouse/__init__.py +12 -3
  25. sempy_labs/lakehouse/_blobs.py +231 -0
  26. sempy_labs/lakehouse/_shortcuts.py +22 -3
  27. sempy_labs/migration/_direct_lake_to_import.py +47 -10
  28. sempy_labs/report/__init__.py +4 -0
  29. sempy_labs/report/_report_functions.py +3 -3
  30. sempy_labs/report/_report_helper.py +17 -5
  31. sempy_labs/report/_reportwrapper.py +17 -8
  32. sempy_labs/report/_save_report.py +147 -0
  33. sempy_labs/tom/_model.py +154 -23
  34. {semantic_link_labs-0.9.6.dist-info → semantic_link_labs-0.9.7.dist-info/licenses}/LICENSE +0 -0
  35. {semantic_link_labs-0.9.6.dist-info → semantic_link_labs-0.9.7.dist-info}/top_level.txt +0 -0
@@ -183,7 +183,7 @@ def resolve_report_name(report_id: UUID, workspace: Optional[str | UUID] = None)
183
183
  The name of the Power BI report.
184
184
  """
185
185
 
186
- return resolve_item_name(item_id=report_id, type="Report", workspace=workspace)
186
+ return resolve_item_name(item_id=report_id, workspace=workspace)
187
187
 
188
188
 
189
189
  def delete_item(
@@ -469,9 +469,7 @@ def resolve_dataset_name(
469
469
  The name of the semantic model.
470
470
  """
471
471
 
472
- return resolve_item_name(
473
- item_id=dataset_id, type="SemanticModel", workspace=workspace
474
- )
472
+ return resolve_item_name(item_id=dataset_id, workspace=workspace)
475
473
 
476
474
 
477
475
  def resolve_lakehouse_name(
@@ -503,9 +501,7 @@ def resolve_lakehouse_name(
503
501
  f"{icons.red_dot} Cannot resolve a lakehouse. Please enter a valid lakehouse or make sure a lakehouse is attached to the notebook."
504
502
  )
505
503
 
506
- return resolve_item_name(
507
- item_id=lakehouse_id, type="Lakehouse", workspace=workspace
508
- )
504
+ return resolve_item_name(item_id=lakehouse_id, workspace=workspace)
509
505
 
510
506
 
511
507
  def resolve_lakehouse_id(
@@ -900,6 +896,113 @@ def resolve_workspace_name_and_id(
900
896
  return workspace_name, workspace_id
901
897
 
902
898
 
899
+ def resolve_item_id(
900
+ item: str | UUID, type: Optional[str] = None, workspace: Optional[str | UUID] = None
901
+ ) -> UUID:
902
+
903
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
904
+ item_id = None
905
+
906
+ if _is_valid_uuid(item):
907
+ # Check (optional)
908
+ item_id = item
909
+ try:
910
+ _base_api(
911
+ request=f"/v1/workspaces/{workspace_id}/items/{item_id}",
912
+ client="fabric_sp",
913
+ )
914
+ except FabricHTTPException:
915
+ raise ValueError(
916
+ f"{icons.red_dot} The '{item_id}' item was not found in the '{workspace_name}' workspace."
917
+ )
918
+ else:
919
+ if type is None:
920
+ raise ValueError(
921
+ f"{icons.red_dot} The 'type' parameter is required if specifying an item name."
922
+ )
923
+ responses = _base_api(
924
+ request=f"/v1/workspaces/{workspace_id}/items?type={type}",
925
+ client="fabric_sp",
926
+ uses_pagination=True,
927
+ )
928
+ for r in responses:
929
+ for v in r.get("value", []):
930
+ display_name = v.get("displayName")
931
+ if display_name == item:
932
+ item_id = v.get("id")
933
+ break
934
+
935
+ if item_id is None:
936
+ raise ValueError(
937
+ f"{icons.red_dot} There's no item '{item}' of type '{type}' in the '{workspace_name}' workspace."
938
+ )
939
+
940
+ return item_id
941
+
942
+
943
+ def resolve_item_name_and_id(
944
+ item: str | UUID, type: Optional[str] = None, workspace: Optional[str | UUID] = None
945
+ ) -> Tuple[str, UUID]:
946
+
947
+ workspace_id = resolve_workspace_id(workspace)
948
+ item_id = resolve_item_id(item=item, type=type, workspace=workspace_id)
949
+ item_name = (
950
+ _base_api(
951
+ request=f"/v1/workspaces/{workspace_id}/items/{item_id}", client="fabric_sp"
952
+ )
953
+ .json()
954
+ .get("displayName")
955
+ )
956
+
957
+ return item_name, item_id
958
+
959
+
960
+ def resolve_item_name(item_id: UUID, workspace: Optional[str | UUID] = None) -> str:
961
+
962
+ workspace_id = resolve_workspace_id(workspace)
963
+ try:
964
+ item_name = (
965
+ _base_api(
966
+ request=f"/v1/workspaces/{workspace_id}/items/{item_id}",
967
+ client="fabric_sp",
968
+ )
969
+ .json()
970
+ .get("displayName")
971
+ )
972
+ except FabricHTTPException:
973
+ raise ValueError(
974
+ f"{icons.red_dot} The '{item_id}' item was not found in the '{workspace_id}' workspace."
975
+ )
976
+
977
+ return item_name
978
+
979
+
980
+ def resolve_lakehouse_name_and_id(
981
+ lakehouse: Optional[str | UUID] = None, workspace: Optional[str | UUID] = None
982
+ ) -> Tuple[str, UUID]:
983
+
984
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
985
+ type = "Lakehouse"
986
+
987
+ if lakehouse is None:
988
+ lakehouse_id = fabric.get_lakehouse_id()
989
+ lakehouse_name = fabric.resolve_item_name(
990
+ item_id=lakehouse_id, type=type, workspace=workspace_id
991
+ )
992
+ elif _is_valid_uuid(lakehouse):
993
+ lakehouse_id = lakehouse
994
+ lakehouse_name = fabric.resolve_item_name(
995
+ item_id=lakehouse_id, type=type, workspace=workspace_id
996
+ )
997
+ else:
998
+ lakehouse_name = lakehouse
999
+ lakehouse_id = fabric.resolve_item_id(
1000
+ item_name=lakehouse, type=type, workspace=workspace_id
1001
+ )
1002
+
1003
+ return lakehouse_name, lakehouse_id
1004
+
1005
+
903
1006
  def _extract_json(dataframe: pd.DataFrame) -> dict:
904
1007
 
905
1008
  payload = dataframe["payload"].iloc[0]
@@ -1308,10 +1411,8 @@ class FabricTokenCredential(TokenCredential):
1308
1411
 
1309
1412
  import notebookutils
1310
1413
 
1311
- token = notebookutils.credentials.getToken(scopes)
1312
- access_token = AccessToken(token, 0)
1313
-
1314
- return access_token
1414
+ token = notebookutils.credentials.getToken("storage")
1415
+ return AccessToken(token, 0)
1315
1416
 
1316
1417
 
1317
1418
  def _get_adls_client(account_name):
@@ -1320,11 +1421,21 @@ def _get_adls_client(account_name):
1320
1421
 
1321
1422
  account_url = f"https://{account_name}.dfs.core.windows.net"
1322
1423
 
1323
- service_client = DataLakeServiceClient(
1324
- account_url, credential=FabricTokenCredential()
1424
+ return DataLakeServiceClient(account_url, credential=FabricTokenCredential())
1425
+
1426
+
1427
+ def _get_blob_client(workspace_id: UUID, item_id: UUID):
1428
+
1429
+ from azure.storage.blob import BlobServiceClient
1430
+
1431
+ endpoint = _get_fabric_context_setting(name="trident.onelake.endpoint").replace(
1432
+ ".dfs.", ".blob."
1325
1433
  )
1434
+ url = f"https://{endpoint}/{workspace_id}/{item_id}"
1435
+
1436
+ # account_url = f"https://{account_name}.blob.core.windows.net"
1326
1437
 
1327
- return service_client
1438
+ return BlobServiceClient(url, credential=FabricTokenCredential())
1328
1439
 
1329
1440
 
1330
1441
  def resolve_warehouse_id(
@@ -1889,7 +2000,9 @@ def _run_spark_sql_query(query):
1889
2000
  return spark.sql(query)
1890
2001
 
1891
2002
 
1892
- def _mount(lakehouse, workspace) -> str:
2003
+ def _mount(
2004
+ lakehouse: Optional[str | UUID] = None, workspace: Optional[str | UUID] = None
2005
+ ) -> str:
1893
2006
  """
1894
2007
  Mounts a lakehouse to a notebook if it is not already mounted. Returns the local path to the lakehouse.
1895
2008
  """
@@ -1901,6 +2014,16 @@ def _mount(lakehouse, workspace) -> str:
1901
2014
  lakehouse=lakehouse, workspace=workspace
1902
2015
  )
1903
2016
 
2017
+ # Hide display mounts
2018
+ current_setting = ""
2019
+ try:
2020
+ current_setting = notebookutils.conf.get(
2021
+ "spark.notebookutils.displaymountpoint.enabled"
2022
+ )
2023
+ notebookutils.conf.set("spark.notebookutils.displaymountpoint.enabled", "false")
2024
+ except Exception:
2025
+ pass
2026
+
1904
2027
  lake_path = create_abfss_path(lakehouse_id, workspace_id)
1905
2028
  mounts = notebookutils.fs.mounts()
1906
2029
  mount_point = f"/{workspace_name.replace(' ', '')}{lakehouse_name.replace(' ', '')}"
@@ -1912,6 +2035,16 @@ def _mount(lakehouse, workspace) -> str:
1912
2035
  )
1913
2036
 
1914
2037
  mounts = notebookutils.fs.mounts()
2038
+
2039
+ # Set display mounts to original setting
2040
+ try:
2041
+ if current_setting != "false":
2042
+ notebookutils.conf.set(
2043
+ "spark.notebookutils.displaymountpoint.enabled", "true"
2044
+ )
2045
+ except Exception:
2046
+ pass
2047
+
1915
2048
  local_path = next(
1916
2049
  i.get("localPath") for i in mounts if i.get("source") == lake_path
1917
2050
  )
@@ -2015,3 +2148,26 @@ def _get_or_create_warehouse(
2015
2148
  )
2016
2149
 
2017
2150
  return (warehouse, warehouse_id)
2151
+
2152
+
2153
+ def _xml_to_dict(element):
2154
+ data = {element.tag: {} if element.attrib else None}
2155
+ children = list(element)
2156
+ if children:
2157
+ temp_dict = {}
2158
+ for child in children:
2159
+ child_dict = _xml_to_dict(child)
2160
+ for key, value in child_dict.items():
2161
+ if key in temp_dict:
2162
+ if isinstance(temp_dict[key], list):
2163
+ temp_dict[key].append(value)
2164
+ else:
2165
+ temp_dict[key] = [temp_dict[key], value]
2166
+ else:
2167
+ temp_dict[key] = value
2168
+ data[element.tag] = temp_dict
2169
+ else:
2170
+ data[element.tag] = (
2171
+ element.text.strip() if element.text and element.text.strip() else None
2172
+ )
2173
+ return data
sempy_labs/_icons.py CHANGED
@@ -1,6 +1,6 @@
1
- green_dot = "\U0001F7E2"
2
- yellow_dot = "\U0001F7E1"
3
- red_dot = "\U0001F534"
1
+ green_dot = "\U0001f7e2"
2
+ yellow_dot = "\U0001f7e1"
3
+ red_dot = "\U0001f534"
4
4
  in_progress = "⌛"
5
5
  checked = "\u2611"
6
6
  unchecked = "\u2610"
@@ -8,11 +8,11 @@ start_bold = "\033[1m"
8
8
  end_bold = "\033[0m"
9
9
  bullet = "\u2022"
10
10
  warning = "⚠️"
11
- error = "\u274C"
11
+ error = "\u274c"
12
12
  info = "ℹ️"
13
13
  measure_icon = "\u2211"
14
- table_icon = "\u229E"
15
- column_icon = "\u229F"
14
+ table_icon = "\u229e"
15
+ column_icon = "\u229f"
16
16
  model_bpa_name = "ModelBPA"
17
17
  report_bpa_name = "ReportBPA"
18
18
  severity_mapping = {warning: "Warning", error: "Error", info: "Info"}
@@ -1476,7 +1476,9 @@ def list_server_properties(workspace: Optional[str | UUID] = None) -> pd.DataFra
1476
1476
  A pandas dataframe showing a list of the server properties.
1477
1477
  """
1478
1478
 
1479
- tom_server = fabric.create_tom_server(readonly=True, workspace=workspace)
1479
+ tom_server = fabric.create_tom_server(
1480
+ dataset=None, readonly=True, workspace=workspace
1481
+ )
1480
1482
 
1481
1483
  rows = [
1482
1484
  {
@@ -2,7 +2,6 @@ import sempy.fabric as fabric
2
2
  import pandas as pd
3
3
  import datetime
4
4
  from sempy_labs._helper_functions import (
5
- resolve_lakehouse_name,
6
5
  save_as_delta_table,
7
6
  resolve_workspace_capacity,
8
7
  retry,
@@ -26,7 +25,7 @@ def run_model_bpa_bulk(
26
25
  rules: Optional[pd.DataFrame] = None,
27
26
  extended: bool = False,
28
27
  language: Optional[str] = None,
29
- workspace: Optional[str | List[str]] = None,
28
+ workspace: Optional[str | UUID | List[str | UUID]] = None,
30
29
  skip_models: Optional[str | List[str]] = ["ModelBPA", "Fabric Capacity Metrics"],
31
30
  skip_models_in_workspace: Optional[dict] = None,
32
31
  ):
@@ -44,8 +43,8 @@ def run_model_bpa_bulk(
44
43
  language : str, default=None
45
44
  The language (code) in which the rules will appear. For example, specifying 'it-IT' will show the Rule Name, Category and Description in Italian.
46
45
  Defaults to None which resolves to English.
47
- workspace : str | List[str], default=None
48
- The workspace or list of workspaces to scan.
46
+ workspace : str | uuid.UUID | List[str | uuid.UUID], default=None
47
+ The workspace or list of workspaces to scan. Supports both the workspace name and the workspace id.
49
48
  Defaults to None which scans all accessible workspaces.
50
49
  skip_models : str | List[str], default=['ModelBPA', 'Fabric Capacity Metrics']
51
50
  The semantic models to always skip when running this analysis.
@@ -71,7 +70,7 @@ def run_model_bpa_bulk(
71
70
  output_table = "modelbparesults"
72
71
  lakeT = get_lakehouse_tables()
73
72
  lakeT_filt = lakeT[lakeT["Table Name"] == output_table]
74
- if len(lakeT_filt) == 0:
73
+ if lakeT_filt.empty:
75
74
  runId = 1
76
75
  else:
77
76
  max_run_id = _get_column_aggregate(table_name=output_table)
@@ -84,14 +83,14 @@ def run_model_bpa_bulk(
84
83
  if workspace is None:
85
84
  dfW_filt = dfW.copy()
86
85
  else:
87
- dfW_filt = dfW[dfW["Name"].isin(workspace)]
86
+ dfW_filt = dfW[(dfW["Name"].isin(workspace)) | (dfW["Id"].isin(workspace))]
88
87
 
89
- if len(dfW_filt) == 0:
88
+ if dfW_filt.empty:
90
89
  raise ValueError(
91
90
  f"{icons.red_dot} There are no valid workspaces to assess. This is likely due to not having proper permissions to the workspace(s) entered in the 'workspace' parameter."
92
91
  )
93
92
 
94
- for i, r in dfW_filt.iterrows():
93
+ for _, r in dfW_filt.iterrows():
95
94
  wksp = r["Name"]
96
95
  wksp_id = r["Id"]
97
96
  capacity_id, capacity_name = resolve_workspace_capacity(workspace=wksp)
@@ -106,7 +105,7 @@ def run_model_bpa_bulk(
106
105
  dfD = dfD[~dfD["Dataset Name"].isin(skip_models_wkspc)]
107
106
 
108
107
  # Exclude default semantic models
109
- if len(dfD) > 0:
108
+ if not dfD.empty:
110
109
  dfI = fabric.list_items(workspace=wksp)
111
110
  filtered_df = dfI.groupby("Display Name").filter(
112
111
  lambda x: set(["Warehouse", "SemanticModel"]).issubset(set(x["Type"]))
@@ -116,7 +115,7 @@ def run_model_bpa_bulk(
116
115
  skip_models.extend(default_semantic_models)
117
116
  dfD_filt = dfD[~dfD["Dataset Name"].isin(skip_models)]
118
117
 
119
- if len(dfD_filt) > 0:
118
+ if not dfD_filt.empty:
120
119
  for _, r2 in dfD_filt.iterrows():
121
120
  dataset_id = r2["Dataset Id"]
122
121
  dataset_name = r2["Dataset Name"]
@@ -159,7 +158,7 @@ def run_model_bpa_bulk(
159
158
  )
160
159
  print(e)
161
160
 
162
- if len(df) == 0:
161
+ if df.empty:
163
162
  print(
164
163
  f"{icons.yellow_dot} No BPA results to save for the '{wksp}' workspace."
165
164
  )
@@ -556,7 +556,7 @@ def model_bpa_rules(
556
556
  "Warning",
557
557
  "Use the DIVIDE function for division",
558
558
  lambda obj, tom: re.search(
559
- r"\]\s*\/(?!\/)(?!\*)\" or \"\)\s*\/(?!\/)(?!\*)",
559
+ r"\]\s*\/(?!\/)(?!\*)|\)\s*\/(?!\/)(?!\*)",
560
560
  obj.Expression,
561
561
  flags=re.IGNORECASE,
562
562
  ),
@@ -350,14 +350,40 @@ def list_workspace_access_details(
350
350
  return df
351
351
 
352
352
 
353
+ def _resolve_workspace_name(workspace_id: Optional[UUID] = None) -> str:
354
+ from sempy_labs._helper_functions import _get_fabric_context_setting
355
+ from sempy.fabric.exceptions import FabricHTTPException
356
+
357
+ if workspace_id is None:
358
+ workspace_id = _get_fabric_context_setting(name="trident.workspace.id")
359
+
360
+ try:
361
+ workspace_name = (
362
+ _base_api(
363
+ request=f"/v1/admin/workspaces/{workspace_id}", client="fabric_sp"
364
+ )
365
+ .json()
366
+ .get("name")
367
+ )
368
+ except FabricHTTPException:
369
+ raise ValueError(
370
+ f"{icons.red_dot} The '{workspace_id}' workspace was not found."
371
+ )
372
+ return workspace_name
373
+
374
+
353
375
  def _resolve_workspace_name_and_id(
354
376
  workspace: str | UUID,
355
377
  ) -> Tuple[str, UUID]:
356
378
 
357
- from sempy_labs._helper_functions import resolve_workspace_name_and_id
379
+ from sempy_labs._helper_functions import _get_fabric_context_setting
358
380
 
359
381
  if workspace is None:
360
- (workspace_name, workspace_id) = resolve_workspace_name_and_id()
382
+ workspace_id = _get_fabric_context_setting(name="trident.workspace.id")
383
+ workspace_name = _resolve_workspace_name(workspace_id)
384
+ elif _is_valid_uuid(workspace):
385
+ workspace_id = workspace
386
+ workspace_name = _resolve_workspace_name(workspace_id)
361
387
  else:
362
388
  dfW = list_workspaces(workspace=workspace)
363
389
  if not dfW.empty:
@@ -113,7 +113,7 @@ def list_report_users(report: str | UUID) -> pd.DataFrame:
113
113
  """
114
114
  Shows a list of users that have access to the specified report.
115
115
 
116
- This is a wrapper function for the following API: `Admin - Reports GetDatasetUsersAsAdmin <https://learn.microsoft.com/rest/api/power-bi/admin/datasets-get-report-users-as-admin>`_.
116
+ This is a wrapper function for the following API: `Admin - Reports GetDatasetUsersAsAdmin <https://learn.microsoft.com/rest/api/power-bi/admin/reports-get-report-users-as-admin>`_.
117
117
 
118
118
  Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
119
119
 
@@ -115,6 +115,4 @@ def scan_workspaces(
115
115
  client="fabric_sp",
116
116
  )
117
117
 
118
- print(f"{icons.green_dot} Status: {scan_status}")
119
-
120
118
  return response.json()
@@ -126,11 +126,10 @@ def list_capacity_tenant_settings_overrides(
126
126
  if capacity_id is None:
127
127
  # If capacity_id is None, we access 'Overrides' -> 'tenantSettings'
128
128
  for override in r.get("overrides", []):
129
+ capacity_id = override.get("id")
129
130
  tenant_settings = override.get("tenantSettings", [])
130
131
  for setting in tenant_settings:
131
- data.append(
132
- create_new_data(setting)
133
- ) # No capacity_id needed here
132
+ data.append(create_new_data(setting, capacity_id))
134
133
  else:
135
134
  # If capacity_id is provided, we access 'value' directly for tenantSettings
136
135
  for setting in r.get("value", []):
@@ -391,6 +390,7 @@ def list_workspaces_tenant_settings_overrides() -> pd.DataFrame:
391
390
  """
392
391
 
393
392
  columns = {
393
+ "Workspace Id": "string",
394
394
  "Setting Name": "string",
395
395
  "Title": "string",
396
396
  "Enabled": "bool",
@@ -409,8 +409,10 @@ def list_workspaces_tenant_settings_overrides() -> pd.DataFrame:
409
409
 
410
410
  for r in responses:
411
411
  for v in r.get("value", []):
412
+ workspace_id = v.get("id")
412
413
  for setting in v.get("tenantSettings", []):
413
414
  new_data = {
415
+ "Workspace Id": workspace_id,
414
416
  "Setting Name": setting.get("settingName"),
415
417
  "Title": setting.get("title"),
416
418
  "Enabled": setting.get("enabled"),
@@ -447,6 +449,7 @@ def list_domain_tenant_settings_overrides() -> pd.DataFrame:
447
449
  """
448
450
 
449
451
  columns = {
452
+ "Domain Id": "string",
450
453
  "Setting Name": "string",
451
454
  "Title": "string",
452
455
  "Enabled": "bool",
@@ -466,8 +469,10 @@ def list_domain_tenant_settings_overrides() -> pd.DataFrame:
466
469
 
467
470
  for r in responses:
468
471
  for v in r.get("value", []):
472
+ domain_id = v.get("id")
469
473
  for setting in v.get("tenantSettings", []):
470
474
  new_data = {
475
+ "Domain Id": domain_id,
471
476
  "Setting Name": setting.get("settingName"),
472
477
  "Title": setting.get("title"),
473
478
  "Enabled": setting.get("enabled"),
@@ -13,6 +13,7 @@ def generate_shared_expression(
13
13
  item_name: Optional[str] = None,
14
14
  item_type: str = "Lakehouse",
15
15
  workspace: Optional[str | UUID] = None,
16
+ use_sql_endpoint: bool = True,
16
17
  ) -> str:
17
18
  """
18
19
  Dynamically generates the M expression used by a Direct Lake model for a given lakehouse/warehouse.
@@ -28,6 +29,9 @@ def generate_shared_expression(
28
29
  The Fabric workspace name or ID used by the item.
29
30
  Defaults to None which resolves to the workspace of the attached lakehouse
30
31
  or if no lakehouse attached, resolves to the workspace of the notebook.
32
+ use_sql_endpoint : bool, default=True
33
+ Whether to use the SQL Endpoint for the lakehouse/warehouse.
34
+ If False, the expression will be generated without using the SQL Endpoint.
31
35
 
32
36
  Returns
33
37
  -------
@@ -78,4 +82,8 @@ def generate_shared_expression(
78
82
  end_expr = "\nin\n\tdatabase"
79
83
  mid_expr = f'Sql.Database("{sqlEPCS}", "{sqlepid}")'
80
84
 
81
- return f"{start_expr}{mid_expr}{end_expr}"
85
+ # Build DL/OL expression
86
+ if not use_sql_endpoint and item_type == "Lakehouse":
87
+ return f'AzureDataLakeStorage{{"server":"onelake.dfs.fabric.microsoft.com","path":"/{workspace_id}/{item_id}/"}}'
88
+ else:
89
+ return f"{start_expr}{mid_expr}{end_expr}"