semantic-link-labs 0.8.10__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (81) hide show
  1. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/METADATA +6 -5
  2. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/RECORD +81 -80
  3. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +34 -3
  5. sempy_labs/_authentication.py +80 -4
  6. sempy_labs/_capacities.py +770 -200
  7. sempy_labs/_capacity_migration.py +7 -37
  8. sempy_labs/_clear_cache.py +37 -35
  9. sempy_labs/_connections.py +13 -13
  10. sempy_labs/_data_pipelines.py +20 -20
  11. sempy_labs/_dataflows.py +27 -28
  12. sempy_labs/_dax.py +41 -47
  13. sempy_labs/_deployment_pipelines.py +1 -1
  14. sempy_labs/_environments.py +26 -23
  15. sempy_labs/_eventhouses.py +16 -15
  16. sempy_labs/_eventstreams.py +16 -15
  17. sempy_labs/_external_data_shares.py +18 -20
  18. sempy_labs/_gateways.py +16 -14
  19. sempy_labs/_generate_semantic_model.py +107 -62
  20. sempy_labs/_git.py +105 -43
  21. sempy_labs/_helper_functions.py +251 -194
  22. sempy_labs/_job_scheduler.py +227 -0
  23. sempy_labs/_kql_databases.py +16 -15
  24. sempy_labs/_kql_querysets.py +16 -15
  25. sempy_labs/_list_functions.py +150 -126
  26. sempy_labs/_managed_private_endpoints.py +19 -17
  27. sempy_labs/_mirrored_databases.py +51 -48
  28. sempy_labs/_mirrored_warehouses.py +5 -4
  29. sempy_labs/_ml_experiments.py +16 -15
  30. sempy_labs/_ml_models.py +15 -14
  31. sempy_labs/_model_bpa.py +210 -207
  32. sempy_labs/_model_bpa_bulk.py +2 -2
  33. sempy_labs/_model_bpa_rules.py +3 -3
  34. sempy_labs/_model_dependencies.py +55 -29
  35. sempy_labs/_notebooks.py +29 -25
  36. sempy_labs/_one_lake_integration.py +23 -26
  37. sempy_labs/_query_scale_out.py +75 -64
  38. sempy_labs/_refresh_semantic_model.py +25 -26
  39. sempy_labs/_spark.py +33 -32
  40. sempy_labs/_sql.py +19 -12
  41. sempy_labs/_translations.py +10 -7
  42. sempy_labs/_vertipaq.py +38 -33
  43. sempy_labs/_warehouses.py +26 -25
  44. sempy_labs/_workspace_identity.py +11 -10
  45. sempy_labs/_workspaces.py +40 -33
  46. sempy_labs/admin/_basic_functions.py +166 -115
  47. sempy_labs/admin/_domains.py +7 -2
  48. sempy_labs/admin/_external_data_share.py +3 -3
  49. sempy_labs/admin/_git.py +4 -1
  50. sempy_labs/admin/_items.py +11 -6
  51. sempy_labs/admin/_scanner.py +10 -5
  52. sempy_labs/directlake/_directlake_schema_compare.py +25 -16
  53. sempy_labs/directlake/_directlake_schema_sync.py +24 -12
  54. sempy_labs/directlake/_dl_helper.py +74 -55
  55. sempy_labs/directlake/_generate_shared_expression.py +10 -9
  56. sempy_labs/directlake/_get_directlake_lakehouse.py +32 -36
  57. sempy_labs/directlake/_get_shared_expression.py +4 -3
  58. sempy_labs/directlake/_guardrails.py +12 -6
  59. sempy_labs/directlake/_list_directlake_model_calc_tables.py +15 -9
  60. sempy_labs/directlake/_show_unsupported_directlake_objects.py +16 -10
  61. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +35 -31
  62. sempy_labs/directlake/_update_directlake_partition_entity.py +39 -31
  63. sempy_labs/directlake/_warm_cache.py +87 -65
  64. sempy_labs/lakehouse/_get_lakehouse_columns.py +23 -26
  65. sempy_labs/lakehouse/_get_lakehouse_tables.py +27 -38
  66. sempy_labs/lakehouse/_lakehouse.py +7 -20
  67. sempy_labs/lakehouse/_shortcuts.py +42 -23
  68. sempy_labs/migration/_create_pqt_file.py +16 -11
  69. sempy_labs/migration/_refresh_calc_tables.py +16 -10
  70. sempy_labs/report/_download_report.py +9 -8
  71. sempy_labs/report/_generate_report.py +85 -44
  72. sempy_labs/report/_paginated.py +9 -9
  73. sempy_labs/report/_report_bpa.py +15 -11
  74. sempy_labs/report/_report_functions.py +80 -91
  75. sempy_labs/report/_report_helper.py +8 -4
  76. sempy_labs/report/_report_list_functions.py +24 -13
  77. sempy_labs/report/_report_rebind.py +17 -16
  78. sempy_labs/report/_reportwrapper.py +41 -33
  79. sempy_labs/tom/_model.py +139 -21
  80. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/LICENSE +0 -0
  81. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/top_level.txt +0 -0
@@ -13,6 +13,7 @@ from sempy_labs._helper_functions import (
13
13
  _is_valid_uuid,
14
14
  _build_url,
15
15
  )
16
+ import sempy_labs._authentication as auth
16
17
 
17
18
 
18
19
  def _resolve_item_id(
@@ -65,7 +66,7 @@ def _resolve_item_name_and_id(
65
66
 
66
67
  def list_items(
67
68
  capacity: Optional[str | UUID] = None,
68
- workspace: Optional[str] = None,
69
+ workspace: Optional[str | UUID] = None,
69
70
  state: Optional[str] = None,
70
71
  type: Optional[str] = None,
71
72
  item: Optional[str | UUID] = None,
@@ -76,11 +77,13 @@ def list_items(
76
77
 
77
78
  This is a wrapper function for the following API: `Items - List Items <https://learn.microsoft.com/rest/api/fabric/admin/items/list-items>`_.
78
79
 
80
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
81
+
79
82
  Parameters
80
83
  ----------
81
- capacity : str | UUID, default=None
84
+ capacity : str | uuid.UUID, default=None
82
85
  The capacity name or id.
83
- workspace : str, default=None
86
+ workspace : str | uuid.UUID, default=None
84
87
  The Fabric workspace name.
85
88
  Defaults to None which resolves to the workspace of the attached lakehouse
86
89
  or if no lakehouse attached, resolves to the workspace of the notebook.
@@ -120,7 +123,7 @@ def list_items(
120
123
  ]
121
124
  )
122
125
 
123
- client = fabric.FabricRestClient()
126
+ client = fabric.FabricRestClient(token_provider=auth.token_provider.get())
124
127
 
125
128
  params = {}
126
129
 
@@ -189,13 +192,15 @@ def list_item_access_details(
189
192
 
190
193
  This is a wrapper function for the following API: `Items - List Item Access Details <https://learn.microsoft.com/rest/api/fabric/admin/items/list-item-access-details>`_.
191
194
 
195
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
196
+
192
197
  Parameters
193
198
  ----------
194
199
  item : str
195
200
  Name or id of the Fabric item.
196
201
  type : str, default=None
197
202
  Type of Fabric item.
198
- workspace : str, default=None
203
+ workspace : str | uuid.UUID, default=None
199
204
  The Fabric workspace name or id.
200
205
  Defaults to None which resolves to the workspace of the attached lakehouse
201
206
  or if no lakehouse attached, resolves to the workspace of the notebook.
@@ -217,7 +222,7 @@ def list_item_access_details(
217
222
  f"{icons.red_dot} The parameter 'item' and 'type' are mandatory."
218
223
  )
219
224
 
220
- client = fabric.FabricRestClient()
225
+ client = fabric.FabricRestClient(token_provider=auth.token_provider.get())
221
226
 
222
227
  workspace_name, workspace_id = _resolve_workspace_name_and_id(workspace)
223
228
  item_name, item_id = _resolve_item_name_and_id(
@@ -5,8 +5,11 @@ from sempy.fabric.exceptions import FabricHTTPException
5
5
  import numpy as np
6
6
  import time
7
7
  from sempy_labs.admin._basic_functions import list_workspaces
8
+ from sempy._utils._log import log
9
+ import sempy_labs._authentication as auth
8
10
 
9
11
 
12
+ @log
10
13
  def scan_workspaces(
11
14
  data_source_details: bool = False,
12
15
  dataset_schema: bool = False,
@@ -23,6 +26,8 @@ def scan_workspaces(
23
26
  `Admin - WorkspaceInfo GetScanStatus <https://learn.microsoft.com/rest/api/power-bi/admin/workspace-info-get-scan-status>`_.
24
27
  `Admin - WorkspaceInfo GetScanResult <https://learn.microsoft.com/rest/api/power-bi/admin/workspace-info-get-scan-result>`_.
25
28
 
29
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
30
+
26
31
  Parameters
27
32
  ----------
28
33
  data_source_details : bool, default=False
@@ -30,12 +35,12 @@ def scan_workspaces(
30
35
  dataset_schema: bool = False
31
36
  Whether to return dataset schema (tables, columns and measures). If you set this parameter to true, you must fully enable metadata scanning in order for data to be returned. For more information, see Enable tenant settings for metadata scanning.
32
37
  dataset_expressions : bool, default=False
33
- Whether to return data source details
38
+ Whether to return data source details.
34
39
  lineage : bool, default=False
35
- Whether to return lineage info (upstream dataflows, tiles, data source IDs)
40
+ Whether to return lineage info (upstream dataflows, tiles, data source IDs).
36
41
  artifact_users : bool, default=False
37
- Whether to return user details for a Power BI item (such as a report or a dashboard)
38
- workspace : str | List[str] | UUID | List[UUID], default=None
42
+ Whether to return user details for a Power BI item (such as a report or a dashboard).
43
+ workspace : str | List[str] | uuid.UUID | List[uuid.UUID], default=None
39
44
  The required workspace name(s) or id(s) to be scanned
40
45
 
41
46
  Returns
@@ -49,7 +54,7 @@ def scan_workspaces(
49
54
  "misconfiguredDatasourceInstances": [],
50
55
  }
51
56
 
52
- client = fabric.FabricRestClient()
57
+ client = fabric.FabricRestClient(token_provider=auth.token_provider.get())
53
58
 
54
59
  if workspace is None:
55
60
  workspace = fabric.resolve_workspace_name()
@@ -2,6 +2,8 @@ import sempy.fabric as fabric
2
2
  import pandas as pd
3
3
  from sempy_labs._helper_functions import (
4
4
  format_dax_object_name,
5
+ resolve_workspace_name_and_id,
6
+ resolve_dataset_name_and_id,
5
7
  )
6
8
  from IPython.display import display
7
9
  from sempy_labs.lakehouse import get_lakehouse_columns
@@ -9,12 +11,13 @@ from sempy_labs.directlake._dl_helper import get_direct_lake_source
9
11
  from typing import Optional
10
12
  import sempy_labs._icons as icons
11
13
  from sempy._utils._log import log
14
+ from uuid import UUID
12
15
 
13
16
 
14
17
  @log
15
18
  def direct_lake_schema_compare(
16
- dataset: str,
17
- workspace: Optional[str] = None,
19
+ dataset: str | UUID,
20
+ workspace: Optional[str | UUID] = None,
18
21
  **kwargs,
19
22
  ):
20
23
  """
@@ -22,10 +25,10 @@ def direct_lake_schema_compare(
22
25
 
23
26
  Parameters
24
27
  ----------
25
- dataset : str
26
- Name of the semantic model.
27
- workspace : str, default=None
28
- The Fabric workspace name.
28
+ dataset : str | uuid.UUID
29
+ Name or ID of the semantic model.
30
+ workspace : str | uuid.UUID, default=None
31
+ The Fabric workspace name or ID.
29
32
  Defaults to None which resolves to the workspace of the attached lakehouse
30
33
  or if no lakehouse attached, resolves to the workspace of the notebook.
31
34
  """
@@ -41,27 +44,33 @@ def direct_lake_schema_compare(
41
44
  )
42
45
  del kwargs["lakehouse_workspace"]
43
46
 
44
- workspace = fabric.resolve_workspace_name(workspace)
47
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
48
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
45
49
 
46
50
  artifact_type, lakehouse_name, lakehouse_id, lakehouse_workspace_id = (
47
- get_direct_lake_source(dataset=dataset, workspace=workspace)
51
+ get_direct_lake_source(dataset=dataset_id, workspace=workspace_id)
48
52
  )
49
- lakehouse_workspace = fabric.resolve_workspace_name(lakehouse_workspace_id)
50
53
 
51
54
  if artifact_type == "Warehouse":
52
55
  raise ValueError(
53
56
  f"{icons.red_dot} This function is only valid for Direct Lake semantic models which source from Fabric lakehouses (not warehouses)."
54
57
  )
55
58
 
56
- dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
59
+ dfP = fabric.list_partitions(dataset=dataset_id, workspace=workspace_id)
57
60
 
58
- if not any(r["Mode"] == "DirectLake" for i, r in dfP.iterrows()):
61
+ if not any(r["Mode"] == "DirectLake" for _, r in dfP.iterrows()):
59
62
  raise ValueError(
60
- f"{icons.red_dot} The '{dataset}' semantic model is not in Direct Lake mode."
63
+ f"{icons.red_dot} The '{dataset_name}' semantic model within the '{workspace_name}' workspace is not in Direct Lake mode."
61
64
  )
62
65
 
63
- dfT = fabric.list_tables(dataset=dataset, workspace=workspace)
64
- dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
66
+ if artifact_type is None:
67
+ raise ValueError(
68
+ f"{icons.red_dot} This function only supports Direct Lake semantic models where the source lakehouse resides in the same workpace as the semantic model."
69
+ )
70
+
71
+ lakehouse_workspace = fabric.resolve_workspace_name(lakehouse_workspace_id)
72
+ dfT = fabric.list_tables(dataset=dataset_id, workspace=workspace_id)
73
+ dfC = fabric.list_columns(dataset=dataset_id, workspace=workspace_id)
65
74
  lc = get_lakehouse_columns(lakehouse_name, lakehouse_workspace)
66
75
 
67
76
  dfT.rename(columns={"Type": "Table Type"}, inplace=True)
@@ -92,7 +101,7 @@ def direct_lake_schema_compare(
92
101
  )
93
102
  else:
94
103
  print(
95
- f"{icons.yellow_dot} The following tables exist in the '{dataset}' semantic model within the '{workspace}' workspace"
104
+ f"{icons.yellow_dot} The following tables exist in the '{dataset_name}' semantic model within the '{workspace_name}' workspace"
96
105
  f" but do not exist in the '{lakehouse_name}' lakehouse within the '{lakehouse_workspace}' workspace."
97
106
  )
98
107
  display(missingtbls)
@@ -102,7 +111,7 @@ def direct_lake_schema_compare(
102
111
  )
103
112
  else:
104
113
  print(
105
- f"{icons.yellow_dot} The following columns exist in the '{dataset}' semantic model within the '{workspace}' workspace "
114
+ f"{icons.yellow_dot} The following columns exist in the '{dataset_name}' semantic model within the '{workspace_name}' workspace "
106
115
  f"but do not exist in the '{lakehouse_name}' lakehouse within the '{lakehouse_workspace}' workspace."
107
116
  )
108
117
  display(missingcols)
@@ -3,16 +3,21 @@ import sempy.fabric as fabric
3
3
  from sempy_labs.lakehouse import get_lakehouse_columns
4
4
  from sempy_labs.directlake._dl_helper import get_direct_lake_source
5
5
  from sempy_labs.tom import connect_semantic_model
6
- from sempy_labs._helper_functions import _convert_data_type
6
+ from sempy_labs._helper_functions import (
7
+ _convert_data_type,
8
+ resolve_workspace_name_and_id,
9
+ resolve_dataset_name_and_id,
10
+ )
7
11
  from typing import Optional
8
12
  from sempy._utils._log import log
9
13
  import sempy_labs._icons as icons
14
+ from uuid import UUID
10
15
 
11
16
 
12
17
  @log
13
18
  def direct_lake_schema_sync(
14
- dataset: str,
15
- workspace: Optional[str] = None,
19
+ dataset: str | UUID,
20
+ workspace: Optional[str | UUID] = None,
16
21
  add_to_model: bool = False,
17
22
  **kwargs,
18
23
  ):
@@ -21,10 +26,10 @@ def direct_lake_schema_sync(
21
26
 
22
27
  Parameters
23
28
  ----------
24
- dataset : str
25
- Name of the semantic model.
26
- workspace : str, default=None
27
- The Fabric workspace name.
29
+ dataset : str | uuid.UUID
30
+ Name or ID of the semantic model.
31
+ workspace : str | uuid.UUID, default=None
32
+ The Fabric workspace name or ID.
28
33
  Defaults to None which resolves to the workspace of the attached lakehouse
29
34
  or if no lakehouse attached, resolves to the workspace of the notebook.
30
35
  add_to_model : bool, default=False
@@ -45,22 +50,29 @@ def direct_lake_schema_sync(
45
50
  )
46
51
  del kwargs["lakehouse_workspace"]
47
52
 
48
- workspace = fabric.resolve_workspace_name(workspace)
53
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
54
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
49
55
 
50
56
  artifact_type, lakehouse_name, lakehouse_id, lakehouse_workspace_id = (
51
- get_direct_lake_source(dataset=dataset, workspace=workspace)
57
+ get_direct_lake_source(dataset=dataset_id, workspace=workspace_id)
52
58
  )
53
59
 
54
60
  if artifact_type == "Warehouse":
55
61
  raise ValueError(
56
62
  f"{icons.red_dot} This function is only valid for Direct Lake semantic models which source from Fabric lakehouses (not warehouses)."
57
63
  )
64
+
65
+ if artifact_type is None:
66
+ raise ValueError(
67
+ f"{icons.red_dot} This function only supports Direct Lake semantic models where the source lakehouse resides in the same workpace as the semantic model."
68
+ )
69
+
58
70
  lakehouse_workspace = fabric.resolve_workspace_name(lakehouse_workspace_id)
59
71
 
60
72
  lc = get_lakehouse_columns(lakehouse_name, lakehouse_workspace)
61
73
 
62
74
  with connect_semantic_model(
63
- dataset=dataset, readonly=False, workspace=workspace
75
+ dataset=dataset_id, readonly=False, workspace=workspace_id
64
76
  ) as tom:
65
77
 
66
78
  for i, r in lc.iterrows():
@@ -86,7 +98,7 @@ def direct_lake_schema_sync(
86
98
  for c in tom.all_columns()
87
99
  ):
88
100
  print(
89
- f"{icons.yellow_dot} The '{lakeCName}' column exists in the '{lakeTName}' lakehouse table but not in the '{dataset}' semantic model within the '{workspace}' workspace."
101
+ f"{icons.yellow_dot} The '{lakeCName}' column exists in the '{lakeTName}' lakehouse table but not in the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
90
102
  )
91
103
  if add_to_model:
92
104
  dt = _convert_data_type(dType)
@@ -97,5 +109,5 @@ def direct_lake_schema_sync(
97
109
  data_type=dt,
98
110
  )
99
111
  print(
100
- f"{icons.green_dot} The '{lakeCName}' column in the '{lakeTName}' lakehouse table was added to the '{dataset}' semantic model within the '{workspace}' workspace."
112
+ f"{icons.green_dot} The '{lakeCName}' column in the '{lakeTName}' lakehouse table was added to the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
101
113
  )
@@ -7,7 +7,6 @@ import sempy_labs._icons as icons
7
7
  from sempy._utils._log import log
8
8
  from sempy_labs._helper_functions import (
9
9
  retry,
10
- resolve_dataset_id,
11
10
  resolve_lakehouse_name,
12
11
  _convert_data_type,
13
12
  resolve_dataset_name_and_id,
@@ -16,17 +15,17 @@ from sempy_labs._helper_functions import (
16
15
 
17
16
 
18
17
  def check_fallback_reason(
19
- dataset: str | UUID, workspace: Optional[str] = None
18
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
20
19
  ) -> pd.DataFrame:
21
20
  """
22
21
  Shows the reason a table in a Direct Lake semantic model would fallback to DirectQuery.
23
22
 
24
23
  Parameters
25
24
  ----------
26
- dataset : str | UUID
25
+ dataset : str | uuid.UUID
27
26
  Name or ID of the semantic model.
28
- workspace : str, default=None
29
- The Fabric workspace name.
27
+ workspace : str | uuid.UUID, default=None
28
+ The Fabric workspace name or ID.
30
29
  Defaults to None which resolves to the workspace of the attached lakehouse
31
30
  or if no lakehouse attached, resolves to the workspace of the notebook.
32
31
 
@@ -81,9 +80,9 @@ def check_fallback_reason(
81
80
  def generate_direct_lake_semantic_model(
82
81
  dataset: str,
83
82
  lakehouse_tables: Union[str, List[str]],
84
- workspace: Optional[str] = None,
83
+ workspace: Optional[str | UUID] = None,
85
84
  lakehouse: Optional[str] = None,
86
- lakehouse_workspace: Optional[str] = None,
85
+ lakehouse_workspace: Optional[str | UUID] = None,
87
86
  schema: str = "dbo",
88
87
  overwrite: bool = False,
89
88
  refresh: bool = True,
@@ -97,15 +96,15 @@ def generate_direct_lake_semantic_model(
97
96
  Name of the semantic model to be created.
98
97
  lakehouse_tables : str | List[str]
99
98
  The table(s) within the Fabric lakehouse to add to the semantic model. All columns from these tables will be added to the semantic model.
100
- workspace : str, default=None
101
- The Fabric workspace name in which the semantic model will reside.
99
+ workspace : str | uuid.UUID, default=None
100
+ The Fabric workspace name or ID in which the semantic model will reside.
102
101
  Defaults to None which resolves to the workspace of the attached lakehouse
103
102
  or if no lakehouse attached, resolves to the workspace of the notebook.
104
103
  lakehouse : str, default=None
105
104
  The lakehouse which stores the delta tables which will feed the Direct Lake semantic model.
106
105
  Defaults to None which resolves to the attached lakehouse.
107
- lakehouse_workspace : str, default=None
108
- The Fabric workspace in which the lakehouse resides.
106
+ lakehouse_workspace : str | uuid.UUID, default=None
107
+ The Fabric workspace name or ID in which the lakehouse resides.
109
108
  Defaults to None which resolves to the workspace of the attached lakehouse
110
109
  or if no lakehouse attached, resolves to the workspace of the notebook.
111
110
  schema : str, default="dbo"
@@ -127,7 +126,7 @@ def generate_direct_lake_semantic_model(
127
126
  if isinstance(lakehouse_tables, str):
128
127
  lakehouse_tables = [lakehouse_tables]
129
128
 
130
- workspace = fabric.resolve_workspace_name(workspace)
129
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
131
130
  if lakehouse_workspace is None:
132
131
  lakehouse_workspace = workspace
133
132
  if lakehouse is None:
@@ -144,23 +143,23 @@ def generate_direct_lake_semantic_model(
144
143
  for t in lakehouse_tables:
145
144
  if t not in dfLT["Table Name"].values:
146
145
  raise ValueError(
147
- f"{icons.red_dot} The '{t}' table does not exist as a delta table in the '{lakehouse}' within the '{workspace}' workspace."
146
+ f"{icons.red_dot} The '{t}' table does not exist as a delta table in the '{lakehouse}' within the '{workspace_name}' workspace."
148
147
  )
149
148
 
150
149
  dfLC = get_lakehouse_columns(lakehouse=lakehouse, workspace=lakehouse_workspace)
151
150
  expr = generate_shared_expression(
152
151
  item_name=lakehouse, item_type="Lakehouse", workspace=lakehouse_workspace
153
152
  )
154
- dfD = fabric.list_datasets(workspace=workspace)
153
+ dfD = fabric.list_datasets(workspace=workspace_id)
155
154
  dfD_filt = dfD[dfD["Dataset Name"] == dataset]
156
155
 
157
156
  if len(dfD_filt) > 0 and not overwrite:
158
157
  raise ValueError(
159
- f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace already exists. Overwrite is set to False so the new semantic model has not been created."
158
+ f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace_name}' workspace already exists. Overwrite is set to False so the new semantic model has not been created."
160
159
  )
161
160
 
162
161
  create_blank_semantic_model(
163
- dataset=dataset, workspace=workspace, overwrite=overwrite
162
+ dataset=dataset, workspace=workspace_id, overwrite=overwrite
164
163
  )
165
164
 
166
165
  @retry(
@@ -169,7 +168,7 @@ def generate_direct_lake_semantic_model(
169
168
  )
170
169
  def dyn_connect():
171
170
  with connect_semantic_model(
172
- dataset=dataset, readonly=True, workspace=workspace
171
+ dataset=dataset, readonly=True, workspace=workspace_id
173
172
  ) as tom:
174
173
 
175
174
  tom.model
@@ -178,7 +177,7 @@ def generate_direct_lake_semantic_model(
178
177
 
179
178
  expression_name = "DatabaseQuery"
180
179
  with connect_semantic_model(
181
- dataset=dataset, workspace=workspace, readonly=False
180
+ dataset=dataset, workspace=workspace_id, readonly=False
182
181
  ) as tom:
183
182
  if not any(e.Name == expression_name for e in tom.model.Expressions):
184
183
  tom.add_expression(name=expression_name, expression=expr)
@@ -199,21 +198,21 @@ def generate_direct_lake_semantic_model(
199
198
  )
200
199
 
201
200
  if refresh:
202
- refresh_semantic_model(dataset=dataset, workspace=workspace)
201
+ refresh_semantic_model(dataset=dataset, workspace=workspace_id)
203
202
 
204
203
 
205
204
  def get_direct_lake_source(
206
- dataset: str, workspace: Optional[str] = None
205
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
207
206
  ) -> Tuple[str, str, UUID, UUID]:
208
207
  """
209
- Obtains the source information for a direct lake semantic model.
208
+ Obtains the source information for a direct lake semantic model (if the source is located in the same workspace as the semantic model).
210
209
 
211
210
  Parameters
212
211
  ----------
213
- dataset : str
214
- The name of the semantic model.
215
- workspace : str, default=None
216
- The Fabric workspace name.
212
+ dataset : str | uuid.UUID
213
+ The name or ID of the semantic model.
214
+ workspace : str | uuid.UUID, default=None
215
+ The Fabric workspace name or ID.
217
216
  Defaults to None which resolves to the workspace of the attached lakehouse
218
217
  or if no lakehouse attached, resolves to the workspace of the notebook.
219
218
 
@@ -225,34 +224,54 @@ def get_direct_lake_source(
225
224
  If the semantic model is not a Direct Lake semantic model, it will return None, None, None.
226
225
  """
227
226
 
228
- workspace = fabric.resolve_workspace_name(workspace)
229
- dataset_id = resolve_dataset_id(dataset, workspace)
230
- client = fabric.PowerBIRestClient()
231
- request_body = {
232
- "artifacts": [
233
- {
234
- "objectId": dataset_id,
235
- "type": "dataset",
236
- }
237
- ]
238
- }
239
- response = client.post(
240
- "metadata/relations/upstream?apiVersion=3", json=request_body
241
- )
242
-
243
- artifacts = response.json().get("artifacts", [])
244
- sql_id, sql_object_name, sql_workspace_id, artifact_type = None, None, None, None
245
-
246
- for artifact in artifacts:
247
- object_type = artifact.get("typeName")
248
- display_name = artifact.get("displayName")
249
- if object_type in ["Datawarehouse", "Lakewarehouse"]:
250
- artifact_type = (
251
- "Warehouse" if object_type == "Datawarehouse" else "Lakehouse"
252
- )
253
- sql_id = artifact.get("objectId")
254
- sql_workspace_id = artifact.get("workspace", {}).get("objectId")
255
- sql_object_name = display_name
256
- break
227
+ from sempy_labs._helper_functions import get_direct_lake_sql_endpoint
257
228
 
258
- return artifact_type, sql_object_name, sql_id, sql_workspace_id
229
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
230
+ sql_endpoint_id = get_direct_lake_sql_endpoint(dataset=dataset, workspace=workspace)
231
+ dfI = fabric.list_items(workspace=workspace)
232
+ dfI_filt = dfI[(dfI["Id"] == sql_endpoint_id) & (dfI["Type"] == "SQLEndpoint")]
233
+
234
+ artifact_type, artifact_name, artifact_id = None, None, None
235
+
236
+ if not dfI_filt.empty:
237
+ artifact_name = dfI_filt["Display Name"].iloc[0]
238
+ artifact_id = dfI[
239
+ (dfI["Display Name"] == artifact_name)
240
+ & (dfI["Type"].isin(["Lakehouse", "Warehouse"]))
241
+ ]["Id"].iloc[0]
242
+ artifact_type = dfI[
243
+ (dfI["Display Name"] == artifact_name)
244
+ & (dfI["Type"].isin(["Lakehouse", "Warehouse"]))
245
+ ]["Type"].iloc[0]
246
+
247
+ return artifact_type, artifact_name, artifact_id, workspace_id
248
+
249
+ # client = fabric.PowerBIRestClient()
250
+ # request_body = {
251
+ # "artifacts": [
252
+ # {
253
+ # "objectId": dataset_id,
254
+ # "type": "dataset",
255
+ # }
256
+ # ]
257
+ # }
258
+ # response = client.post(
259
+ # "metadata/relations/upstream?apiVersion=3", json=request_body
260
+ # )
261
+
262
+ # artifacts = response.json().get("artifacts", [])
263
+ # sql_id, sql_object_name, sql_workspace_id, artifact_type = None, None, None, None
264
+
265
+ # for artifact in artifacts:
266
+ # object_type = artifact.get("typeName")
267
+ # display_name = artifact.get("displayName")
268
+ # if object_type in ["Datawarehouse", "Lakewarehouse"]:
269
+ # artifact_type = (
270
+ # "Warehouse" if object_type == "Datawarehouse" else "Lakehouse"
271
+ # )
272
+ # sql_id = artifact.get("objectId")
273
+ # sql_workspace_id = artifact.get("workspace", {}).get("objectId")
274
+ # sql_object_name = display_name
275
+ # break
276
+
277
+ # return artifact_type, sql_object_name, sql_id, sql_workspace_id
@@ -3,16 +3,18 @@ from sempy_labs._helper_functions import (
3
3
  resolve_lakehouse_name,
4
4
  resolve_lakehouse_id,
5
5
  resolve_warehouse_id,
6
+ resolve_workspace_name_and_id,
6
7
  )
7
8
  from typing import Optional
8
9
  import sempy_labs._icons as icons
9
10
  from sempy.fabric.exceptions import FabricHTTPException
11
+ from uuid import UUID
10
12
 
11
13
 
12
14
  def generate_shared_expression(
13
15
  item_name: Optional[str] = None,
14
16
  item_type: str = "Lakehouse",
15
- workspace: Optional[str] = None,
17
+ workspace: Optional[str | UUID] = None,
16
18
  ) -> str:
17
19
  """
18
20
  Dynamically generates the M expression used by a Direct Lake model for a given lakehouse/warehouse.
@@ -24,8 +26,8 @@ def generate_shared_expression(
24
26
  Defaults to None which resolves to the lakehouse attached to the notebook.
25
27
  item_type : str, default="Lakehouse"
26
28
  The Fabric item name. Valid options: 'Lakehouse', 'Warehouse'.
27
- workspace : str, default=None
28
- The Fabric workspace used by the item.
29
+ workspace : str | uuid.UUID, default=None
30
+ The Fabric workspace name or ID used by the item.
29
31
  Defaults to None which resolves to the workspace of the attached lakehouse
30
32
  or if no lakehouse attached, resolves to the workspace of the notebook.
31
33
 
@@ -35,8 +37,7 @@ def generate_shared_expression(
35
37
  Shows the expression which can be used to connect a Direct Lake semantic model to its SQL Endpoint.
36
38
  """
37
39
 
38
- workspace = fabric.resolve_workspace_name(workspace)
39
- workspace_id = fabric.resolve_workspace_id(workspace)
40
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
40
41
  item_types = ["Lakehouse", "Warehouse"]
41
42
  item_type = item_type.capitalize()
42
43
  if item_type not in item_types:
@@ -46,11 +47,11 @@ def generate_shared_expression(
46
47
 
47
48
  if item_name is None:
48
49
  item_id = fabric.get_lakehouse_id()
49
- item_name = resolve_lakehouse_name(item_id, workspace)
50
+ item_name = resolve_lakehouse_name(item_id, workspace_id)
50
51
  elif item_name is not None and item_type == "Lakehouse":
51
- item_id = resolve_lakehouse_id(lakehouse=item_name, workspace=workspace)
52
+ item_id = resolve_lakehouse_id(lakehouse=item_name, workspace=workspace_id)
52
53
  elif item_type == "Warehouse":
53
- item_id = resolve_warehouse_id(warehouse=item_name, workspace=workspace)
54
+ item_id = resolve_warehouse_id(warehouse=item_name, workspace=workspace_id)
54
55
 
55
56
  client = fabric.FabricRestClient()
56
57
  item_type_rest = f"{item_type.lower()}s"
@@ -72,7 +73,7 @@ def generate_shared_expression(
72
73
 
73
74
  if provStatus == "InProgress":
74
75
  raise ValueError(
75
- f"{icons.red_dot} The SQL Endpoint for the '{item_name}' lakehouse within the '{workspace}' workspace has not yet been provisioned. Please wait until it has been provisioned."
76
+ f"{icons.red_dot} The SQL Endpoint for the '{item_name}' lakehouse within the '{workspace_name}' workspace has not yet been provisioned. Please wait until it has been provisioned."
76
77
  )
77
78
 
78
79
  start_expr = "let\n\tdatabase = "