semantic-link-labs 0.8.6__py3-none-any.whl → 0.8.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (34) hide show
  1. {semantic_link_labs-0.8.6.dist-info → semantic_link_labs-0.8.7.dist-info}/METADATA +14 -6
  2. {semantic_link_labs-0.8.6.dist-info → semantic_link_labs-0.8.7.dist-info}/RECORD +34 -28
  3. {semantic_link_labs-0.8.6.dist-info → semantic_link_labs-0.8.7.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +37 -6
  5. sempy_labs/_authentication.py +108 -0
  6. sempy_labs/_connections.py +355 -176
  7. sempy_labs/_dataflows.py +0 -1
  8. sempy_labs/_gateways.py +439 -0
  9. sempy_labs/_generate_semantic_model.py +51 -30
  10. sempy_labs/_git.py +13 -5
  11. sempy_labs/_helper_functions.py +14 -3
  12. sempy_labs/_list_functions.py +1 -1
  13. sempy_labs/_model_auto_build.py +4 -2
  14. sempy_labs/_model_bpa.py +2 -15
  15. sempy_labs/_model_bpa_bulk.py +4 -2
  16. sempy_labs/_refresh_semantic_model.py +6 -0
  17. sempy_labs/admin/__init__.py +19 -9
  18. sempy_labs/admin/_basic_functions.py +475 -548
  19. sempy_labs/admin/_external_data_share.py +97 -0
  20. sempy_labs/admin/_git.py +69 -0
  21. sempy_labs/admin/_items.py +264 -0
  22. sempy_labs/admin/_scanner.py +104 -0
  23. sempy_labs/directlake/_dl_helper.py +6 -2
  24. sempy_labs/directlake/_get_shared_expression.py +5 -35
  25. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +3 -2
  26. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +4 -2
  27. sempy_labs/report/_generate_report.py +10 -4
  28. sempy_labs/report/_report_bpa.py +1 -0
  29. sempy_labs/report/_report_helper.py +58 -0
  30. sempy_labs/report/_report_list_functions.py +2 -0
  31. sempy_labs/report/_reportwrapper.py +358 -175
  32. sempy_labs/tom/_model.py +1 -0
  33. {semantic_link_labs-0.8.6.dist-info → semantic_link_labs-0.8.7.dist-info}/LICENSE +0 -0
  34. {semantic_link_labs-0.8.6.dist-info → semantic_link_labs-0.8.7.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,97 @@
1
+ import sempy.fabric as fabric
2
+ from uuid import UUID
3
+ import sempy_labs._icons as icons
4
+ from sempy.fabric.exceptions import FabricHTTPException
5
+ import pandas as pd
6
+ from sempy_labs.admin._basic_functions import _resolve_workspace_name_and_id
7
+
8
+
9
+ def list_external_data_shares() -> pd.DataFrame:
10
+ """
11
+ Lists external data shares in the tenant. This function is for admins.
12
+
13
+ This is a wrapper function for the following API: `External Data Shares - List External Data Shares <https://learn.microsoft.com/rest/api/fabric/admin/external-data-shares/list-external-data-shares>`_.
14
+
15
+ Returns
16
+ -------
17
+ pandas.DataFrame
18
+ A pandas dataframe showing a list of external data shares in the tenant.
19
+ """
20
+ df = pd.DataFrame(
21
+ columns=[
22
+ "External Data Share Id",
23
+ "Paths",
24
+ "Creater Principal Id",
25
+ "Creater Principal Name",
26
+ "Creater Principal Type",
27
+ "Creater Principal UPN",
28
+ "Recipient UPN",
29
+ "Status",
30
+ "Expiration Time UTC",
31
+ "Workspace Id",
32
+ "Item Id",
33
+ "Invitation URL",
34
+ ]
35
+ )
36
+
37
+ client = fabric.FabricRestClient()
38
+ response = client.get("/v1/admin/items/externalDataShares")
39
+
40
+ if response.status_code != 200:
41
+ raise FabricHTTPException(response)
42
+
43
+ for i in response.json().get("value", []):
44
+ cp = i.get("creatorPrincipal", {})
45
+ new_data = {
46
+ "External Data Share Id": i.get("id"),
47
+ "Paths": [i.get("paths", [])],
48
+ "Creater Principal Id": cp.get("id"),
49
+ "Creater Principal Name": cp.get("displayName"),
50
+ "Creater Principal Type": cp.get("type"),
51
+ "Creater Principal UPN": cp.get("userDetails", {}).get("userPrincipalName"),
52
+ "Recipient UPN": i.get("recipient", {}).get("userPrincipalName"),
53
+ "Status": i.get("status"),
54
+ "Expiration Time UTC": i.get("expirationTimeUtc"),
55
+ "Workspace Id": i.get("workspaceId"),
56
+ "Item Id": i.get("itemId"),
57
+ "Invitation URL": i.get("invitationUrl"),
58
+ }
59
+
60
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
61
+
62
+ date_time_columns = ["Expiration Time UTC"]
63
+ df[date_time_columns] = pd.to_datetime(df[date_time_columns])
64
+
65
+ return df
66
+
67
+
68
+ def revoke_external_data_share(
69
+ external_data_share_id: UUID, item_id: UUID, workspace: str | UUID
70
+ ):
71
+ """
72
+ Revokes the specified external data share. Note: This action cannot be undone.
73
+
74
+ This is a wrapper function for the following API: `External Data Shares - Revoke External Data Share <https://learn.microsoft.com/rest/api/fabric/admin/external-data-shares/revoke-external-data-share>`_.
75
+
76
+ Parameters
77
+ ----------
78
+ external_data_share_id : UUID
79
+ The external data share ID.
80
+ item_id : int, default=None
81
+ The Item ID
82
+ workspace : str
83
+ The Fabric workspace name or id.
84
+ """
85
+ (workspace, workspace_id) = _resolve_workspace_name_and_id(workspace)
86
+
87
+ client = fabric.FabricRestClient()
88
+ response = client.post(
89
+ f"/v1/admin/workspaces/{workspace_id}/items/{item_id}/externalDataShares/{external_data_share_id}/revoke"
90
+ )
91
+
92
+ if response.status_code != 200:
93
+ raise FabricHTTPException(response)
94
+
95
+ print(
96
+ f"{icons.green_dot} The '{external_data_share_id}' external data share for the '{item_id}' item within the '{workspace}' workspace has been revoked."
97
+ )
@@ -0,0 +1,69 @@
1
+ import sempy.fabric as fabric
2
+ from sempy.fabric.exceptions import FabricHTTPException
3
+ from sempy_labs._helper_functions import (
4
+ pagination,
5
+ )
6
+ import pandas as pd
7
+ from sempy_labs.admin._basic_functions import list_workspaces
8
+
9
+
10
+ def list_git_connections() -> pd.DataFrame:
11
+ """
12
+ Shows a list of Git connections.
13
+
14
+ This is a wrapper function for the following API: `Workspaces - List Git Connections <https://learn.microsoft.com/rest/api/fabric/admin/workspaces/list-git-connections>`_.
15
+
16
+ Returns
17
+ -------
18
+ pandas.DataFrame
19
+ A pandas dataframe showing a list of Git connections.
20
+ """
21
+
22
+ client = fabric.FabricRestClient()
23
+ response = client.get("/v1/admin/workspaces/discoverGitConnections")
24
+
25
+ df = pd.DataFrame(
26
+ columns=[
27
+ "Workspace Id",
28
+ "Organization Name",
29
+ "Owner Name",
30
+ "Project Name",
31
+ "Git Provider Type",
32
+ "Repository Name",
33
+ "Branch Name",
34
+ "Directory Name",
35
+ ]
36
+ )
37
+
38
+ if response.status_code != 200:
39
+ raise FabricHTTPException(response)
40
+
41
+ responses = pagination(client, response)
42
+
43
+ for r in responses:
44
+ for v in r.get("value", []):
45
+ git = v.get("gitProviderDetails", {})
46
+ new_data = {
47
+ "Workspace Id": v.get("workspaceId"),
48
+ "Organization Name": git.get("organizationName"),
49
+ "Owner Name": git.get("ownerName"),
50
+ "Project Name": git.get("projectName"),
51
+ "Git Provider Type": git.get("gitProviderType"),
52
+ "Repository Name": git.get("repositoryName"),
53
+ "Branch Name": git.get("branchName"),
54
+ "Directory Name": git.get("directoryName"),
55
+ }
56
+
57
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
58
+
59
+ dfW = list_workspaces()
60
+ df = pd.merge(
61
+ df, dfW[["Id", "Name"]], left_on="Workspace Id", right_on="Id", how="left"
62
+ )
63
+ new_col_name = "Workspace Name"
64
+ df = df.rename(columns={"Name": new_col_name})
65
+ df.insert(1, new_col_name, df.pop(new_col_name))
66
+
67
+ df = df.drop(columns=["Id"])
68
+
69
+ return df
@@ -0,0 +1,264 @@
1
+ import sempy.fabric as fabric
2
+ import pandas as pd
3
+ from typing import Optional, Tuple
4
+ from uuid import UUID
5
+ import sempy_labs._icons as icons
6
+ from sempy.fabric.exceptions import FabricHTTPException
7
+ from sempy_labs.admin._basic_functions import (
8
+ _resolve_capacity_name_and_id,
9
+ _resolve_workspace_name_and_id,
10
+ )
11
+ from sempy_labs._helper_functions import (
12
+ pagination,
13
+ _is_valid_uuid,
14
+ _build_url,
15
+ )
16
+
17
+
18
+ def _resolve_item_id(
19
+ item_name: str,
20
+ type: Optional[str] = None,
21
+ workspace: Optional[str | UUID] = None,
22
+ ) -> UUID:
23
+
24
+ dfI = list_items(workspace=workspace, type=type)
25
+ dfI_filt = dfI[dfI["Item Name"] == item_name]
26
+
27
+ if len(dfI_filt) == 0:
28
+ raise ValueError(
29
+ f"The '{item_name}' {type} does not exist within the '{workspace}' workspace or is not of type '{type}'."
30
+ )
31
+
32
+ return dfI_filt["Item Id"].iloc[0]
33
+
34
+
35
+ def _resolve_item_name_and_id(
36
+ item: str,
37
+ type: Optional[str] = None,
38
+ workspace: Optional[str | UUID] = None,
39
+ **kwargs,
40
+ ) -> Tuple[str, UUID]:
41
+ if "item_name" in kwargs:
42
+ print(
43
+ "The 'item_name' parameter has been deprecated. Please replace this parameter with 'item' from the function going forward."
44
+ )
45
+ item = item_name
46
+ del kwargs["item_name"]
47
+
48
+ dfI = list_items(workspace=workspace, type=type, item=item)
49
+
50
+ if len(dfI) > 1:
51
+ raise ValueError(
52
+ f"There are more than 1 item with the name '{item}'. Please specify the 'type' and/or 'workspace' to be more precise."
53
+ )
54
+
55
+ try:
56
+ item_name = dfI["Item Name"].iloc[0]
57
+ item_id = dfI["Item Id"].iloc[0]
58
+ except Exception:
59
+ raise ValueError(
60
+ f"The '{item}' {type} does not exist within the '{workspace}' workspace or is not of type '{type}'."
61
+ )
62
+
63
+ return item_name, item_id
64
+
65
+
66
+ def list_items(
67
+ capacity: Optional[str | UUID] = None,
68
+ workspace: Optional[str] = None,
69
+ state: Optional[str] = None,
70
+ type: Optional[str] = None,
71
+ item: Optional[str | UUID] = None,
72
+ **kwargs,
73
+ ) -> pd.DataFrame:
74
+ """
75
+ Shows a list of active Fabric and Power BI items.
76
+
77
+ This is a wrapper function for the following API: `Items - List Items <https://learn.microsoft.com/rest/api/fabric/admin/items/list-items>`_.
78
+
79
+ Parameters
80
+ ----------
81
+ capacity : str | UUID, default=None
82
+ The capacity name or id.
83
+ workspace : str, default=None
84
+ The Fabric workspace name.
85
+ Defaults to None which resolves to the workspace of the attached lakehouse
86
+ or if no lakehouse attached, resolves to the workspace of the notebook.
87
+ state : str, default=None
88
+ The item state.
89
+ type : str, default=None
90
+ The item type.
91
+ item : str | UUID, default=None
92
+ Item id or name to filter the list.
93
+
94
+ Returns
95
+ -------
96
+ pandas.DataFrame
97
+ A pandas dataframe showing a list of active Fabric and Power BI items.
98
+ """
99
+ if "capacity_name" in kwargs:
100
+ print(
101
+ "The 'capacity_name' parameter has been deprecated. Please replace this parameter with 'capacity' from the function going forward."
102
+ )
103
+ capacity = kwargs["capacity_name"]
104
+ del kwargs["capacity_name"]
105
+
106
+ df = pd.DataFrame(
107
+ columns=[
108
+ "Item Id",
109
+ "Item Name",
110
+ "Type",
111
+ "Description",
112
+ "State",
113
+ "Last Updated Date",
114
+ "Creator Principal Id",
115
+ "Creator Principal Display Name",
116
+ "Creator Principal Type",
117
+ "Creator User Principal Name",
118
+ "Workspace Id",
119
+ "Capacity Id",
120
+ ]
121
+ )
122
+
123
+ client = fabric.FabricRestClient()
124
+
125
+ params = {}
126
+
127
+ url = "/v1/admin/items"
128
+
129
+ if capacity is not None:
130
+ params["capacityId"] = _resolve_capacity_name_and_id(capacity)[1]
131
+
132
+ if workspace is not None:
133
+ params["workspaceId"] = _resolve_workspace_name_and_id(workspace)[1]
134
+
135
+ if state is not None:
136
+ params["state"] = state
137
+
138
+ if type is not None:
139
+ params["type"] = type
140
+
141
+ url = _build_url(url, params)
142
+
143
+ response = client.get(url)
144
+
145
+ if response.status_code != 200:
146
+ raise FabricHTTPException(response)
147
+
148
+ responses = pagination(client, response)
149
+
150
+ for r in responses:
151
+ for v in r.get("itemEntities", []):
152
+ new_data = {
153
+ "Item Id": v.get("id"),
154
+ "Type": v.get("type"),
155
+ "Item Name": v.get("name"),
156
+ "Description": v.get("description"),
157
+ "State": v.get("state"),
158
+ "Last Updated Date": v.get("lastUpdatedDate"),
159
+ "Creator Principal Id": v.get("creatorPrincipal", {}).get("id"),
160
+ "Creator Principal Display Name": v.get("creatorPrincipal", {}).get(
161
+ "displayName"
162
+ ),
163
+ "Creator Principal Type": v.get("creatorPrincipal", {}).get("type"),
164
+ "Creator User Principal Name": v.get("creatorPrincipal", {})
165
+ .get("userDetails", {})
166
+ .get("userPrincipalName"),
167
+ "Workspace Id": v.get("workspaceId"),
168
+ "Capacity Id": v.get("capacityId"),
169
+ }
170
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
171
+
172
+ if item is not None:
173
+ if _is_valid_uuid(item):
174
+ df = df[df["Item Id"] == item]
175
+ else:
176
+ df = df[df["Item Name"] == item]
177
+
178
+ return df
179
+
180
+
181
+ def list_item_access_details(
182
+ item: str | UUID = None,
183
+ type: str = None,
184
+ workspace: Optional[str | UUID] = None,
185
+ **kwargs,
186
+ ) -> pd.DataFrame:
187
+ """
188
+ Returns a list of users (including groups and service principals) and lists their workspace roles.
189
+
190
+ This is a wrapper function for the following API: `Items - List Item Access Details <https://learn.microsoft.com/rest/api/fabric/admin/items/list-item-access-details>`_.
191
+
192
+ Parameters
193
+ ----------
194
+ item : str
195
+ Name or id of the Fabric item.
196
+ type : str, default=None
197
+ Type of Fabric item.
198
+ workspace : str, default=None
199
+ The Fabric workspace name or id.
200
+ Defaults to None which resolves to the workspace of the attached lakehouse
201
+ or if no lakehouse attached, resolves to the workspace of the notebook.
202
+
203
+ Returns
204
+ -------
205
+ pandas.DataFrame
206
+ A pandas dataframe showing a list of users (including groups and service principals) and lists their workspace roles.
207
+ """
208
+ if "item_name" in kwargs:
209
+ print(
210
+ "The 'item_name' parameter has been deprecated. Please replace this parameter with 'item' from the function going forward."
211
+ )
212
+ item = kwargs["item_name"]
213
+ del kwargs["item_name"]
214
+
215
+ if item is None or type is None:
216
+ raise ValueError(
217
+ f"{icons.red_dot} The parameter 'item' and 'type' are mandatory."
218
+ )
219
+
220
+ client = fabric.FabricRestClient()
221
+
222
+ workspace_name, workspace_id = _resolve_workspace_name_and_id(workspace)
223
+ item_name, item_id = _resolve_item_name_and_id(
224
+ item=item, type=type, workspace=workspace_name
225
+ )
226
+
227
+ df = pd.DataFrame(
228
+ columns=[
229
+ "User Id",
230
+ "User Name",
231
+ "User Type",
232
+ "User Principal Name",
233
+ "Item Name",
234
+ "Item Type",
235
+ "Item Id",
236
+ "Permissions",
237
+ "Additional Permissions",
238
+ ]
239
+ )
240
+
241
+ response = client.get(f"/v1/admin/workspaces/{workspace_id}/items/{item_id}/users")
242
+
243
+ if response.status_code != 200:
244
+ raise FabricHTTPException(response)
245
+
246
+ for v in response.json().get("accessDetails", []):
247
+ new_data = {
248
+ "User Id": v.get("principal", {}).get("id"),
249
+ "User Name": v.get("principal", {}).get("displayName"),
250
+ "User Type": v.get("principal", {}).get("type"),
251
+ "User Principal Name": v.get("principal", {})
252
+ .get("userDetails", {})
253
+ .get("userPrincipalName"),
254
+ "Item Type": v.get("itemAccessDetails", {}).get("type"),
255
+ "Permissions": v.get("itemAccessDetails", {}).get("permissions"),
256
+ "Additional Permissions": v.get("itemAccessDetails", {}).get(
257
+ "additionalPermissions"
258
+ ),
259
+ "Item Name": item_name,
260
+ "Item Id": item_id,
261
+ }
262
+ df = pd.concat([df, pd.DataFrame([new_data])], ignore_index=True)
263
+
264
+ return df
@@ -0,0 +1,104 @@
1
+ import sempy.fabric as fabric
2
+ from typing import Optional, List
3
+ from uuid import UUID
4
+ from sempy.fabric.exceptions import FabricHTTPException
5
+ import numpy as np
6
+ import time
7
+ from sempy_labs.admin._basic_functions import list_workspaces
8
+
9
+
10
+ def scan_workspaces(
11
+ data_source_details: bool = False,
12
+ dataset_schema: bool = False,
13
+ dataset_expressions: bool = False,
14
+ lineage: bool = False,
15
+ artifact_users: bool = False,
16
+ workspace: Optional[str | List[str] | UUID | List[UUID]] = None,
17
+ ) -> dict:
18
+ """
19
+ Get the inventory and details of the tenant.
20
+
21
+ This is a wrapper function for the following APIs:
22
+ `Admin - WorkspaceInfo PostWorkspaceInfo <https://learn.microsoft.com/en-gb/rest/api/power-bi/admin/workspace-info-post-workspace-info>`_.
23
+ `Admin - WorkspaceInfo GetScanStatus <https://learn.microsoft.com/en-gb/rest/api/power-bi/admin/workspace-info-get-scan-status>`_.
24
+ `Admin - WorkspaceInfo GetScanResult <https://learn.microsoft.com/en-gb/rest/api/power-bi/admin/workspace-info-get-scan-result>`_.
25
+
26
+ Parameters
27
+ ----------
28
+ data_source_details : bool, default=False
29
+ Whether to return dataset expressions (DAX and Mashup queries). If you set this parameter to true, you must fully enable metadata scanning in order for data to be returned. For more information, see Enable tenant settings for metadata scanning.
30
+ dataset_schema: bool = False
31
+ Whether to return dataset schema (tables, columns and measures). If you set this parameter to true, you must fully enable metadata scanning in order for data to be returned. For more information, see Enable tenant settings for metadata scanning.
32
+ dataset_expressions : bool, default=False
33
+ Whether to return data source details
34
+ lineage : bool, default=False
35
+ Whether to return lineage info (upstream dataflows, tiles, data source IDs)
36
+ artifact_users : bool, default=False
37
+ Whether to return user details for a Power BI item (such as a report or a dashboard)
38
+ workspace : str | List[str] | UUID | List[UUID], default=None
39
+ The required workspace name(s) or id(s) to be scanned
40
+
41
+ Returns
42
+ -------
43
+ dictionary
44
+ A json object with the scan result.
45
+ """
46
+ scan_result = {
47
+ "workspaces": [],
48
+ "datasourceInstances": [],
49
+ "misconfiguredDatasourceInstances": [],
50
+ }
51
+
52
+ client = fabric.FabricRestClient()
53
+
54
+ if workspace is None:
55
+ workspace = fabric.resolve_workspace_name()
56
+
57
+ if isinstance(workspace, str):
58
+ workspace = [workspace]
59
+
60
+ workspace_list = []
61
+
62
+ dfW = list_workspaces()
63
+ workspace_list = dfW[dfW["Name"].isin(workspace)]["Id"].tolist()
64
+ workspace_list = workspace_list + dfW[dfW["Id"].isin(workspace)]["Id"].tolist()
65
+
66
+ workspaces = np.array(workspace_list)
67
+ batch_size = 99
68
+ for i in range(0, len(workspaces), batch_size):
69
+ batch = workspaces[i : i + batch_size].tolist()
70
+ request_body = {"workspaces": batch}
71
+
72
+ response_clause = f"/v1.0/myorg/admin/workspaces/getInfo?lineage={lineage}&datasourceDetails={data_source_details}&datasetSchema={dataset_schema}&datasetExpressions={dataset_expressions}&getArtifactUsers={artifact_users}"
73
+ response = client.post(response_clause, json=request_body)
74
+
75
+ if response.status_code != 202:
76
+ raise FabricHTTPException(response)
77
+ scan_id = response.json()["id"]
78
+ scan_status = response.json().get("status")
79
+ while scan_status not in ["Succeeded", "Failed"]:
80
+ time.sleep(1)
81
+ response = client.get(f"/v1.0/myorg/admin/workspaces/scanStatus/{scan_id}")
82
+ scan_status = response.json().get("status")
83
+ if scan_status == "Failed":
84
+ raise FabricHTTPException(response)
85
+ response = client.get(f"/v1.0/myorg/admin/workspaces/scanResult/{scan_id}")
86
+ if response.status_code != 200:
87
+ raise FabricHTTPException(response)
88
+
89
+ responseJson = response.json()
90
+
91
+ if "workspaces" in responseJson:
92
+ scan_result["workspaces"].extend(responseJson["workspaces"])
93
+
94
+ if "datasourceInstances" in responseJson:
95
+ scan_result["datasourceInstances"].extend(
96
+ responseJson["datasourceInstances"]
97
+ )
98
+
99
+ if "misconfiguredDatasourceInstances" in responseJson:
100
+ scan_result["misconfiguredDatasourceInstances"].extend(
101
+ responseJson["misconfiguredDatasourceInstances"]
102
+ )
103
+
104
+ return scan_result
@@ -112,7 +112,9 @@ def generate_direct_lake_semantic_model(
112
112
  """
113
113
 
114
114
  from sempy_labs.lakehouse import get_lakehouse_tables, get_lakehouse_columns
115
- from sempy_labs.directlake import get_shared_expression
115
+ from sempy_labs.directlake._generate_shared_expression import (
116
+ generate_shared_expression,
117
+ )
116
118
  from sempy_labs.tom import connect_semantic_model
117
119
  from sempy_labs._generate_semantic_model import create_blank_semantic_model
118
120
  from sempy_labs._refresh_semantic_model import refresh_semantic_model
@@ -141,7 +143,9 @@ def generate_direct_lake_semantic_model(
141
143
  )
142
144
 
143
145
  dfLC = get_lakehouse_columns(lakehouse=lakehouse, workspace=lakehouse_workspace)
144
- expr = get_shared_expression(lakehouse=lakehouse, workspace=lakehouse_workspace)
146
+ expr = generate_shared_expression(
147
+ item_name=lakehouse, item_type="Lakehouse", workspace=lakehouse_workspace
148
+ )
145
149
  dfD = fabric.list_datasets(workspace=workspace)
146
150
  dfD_filt = dfD[dfD["Dataset Name"] == dataset]
147
151
 
@@ -1,8 +1,4 @@
1
- import sempy.fabric as fabric
2
- from sempy_labs._helper_functions import resolve_lakehouse_name
3
- from sempy_labs._list_functions import list_lakehouses
4
1
  from typing import Optional
5
- import sempy_labs._icons as icons
6
2
 
7
3
 
8
4
  def get_shared_expression(
@@ -26,36 +22,10 @@ def get_shared_expression(
26
22
  str
27
23
  Shows the expression which can be used to connect a Direct Lake semantic model to its SQL Endpoint.
28
24
  """
29
-
30
- workspace = fabric.resolve_workspace_name(workspace)
31
- if lakehouse is None:
32
- lakehouse_id = fabric.get_lakehouse_id()
33
- lakehouse = resolve_lakehouse_name(lakehouse_id, workspace)
34
-
35
- dfL = list_lakehouses(workspace=workspace)
36
- lakeDetail = dfL[dfL["Lakehouse Name"] == lakehouse]
37
-
38
- sqlEPCS = lakeDetail["SQL Endpoint Connection String"].iloc[0]
39
- sqlepid = lakeDetail["SQL Endpoint ID"].iloc[0]
40
- provStatus = lakeDetail["SQL Endpoint Provisioning Status"].iloc[0]
41
-
42
- parts = sqlEPCS.split(".", 1)
43
- if parts:
44
- parts[0] = parts[0].upper()
45
-
46
- sqlEPCS = ".".join(parts)
47
-
48
- if provStatus == "InProgress":
49
- raise ValueError(
50
- f"{icons.red_dot} The SQL Endpoint for the '{lakehouse}' lakehouse within the '{workspace}' workspace has not yet been provisioned. Please wait until it has been provisioned."
51
- )
52
-
53
- sh = (
54
- 'let\n\tdatabase = Sql.Database("'
55
- + sqlEPCS
56
- + '", "'
57
- + sqlepid
58
- + '")\nin\n\tdatabase'
25
+ from sempy_labs.directlake._generate_shared_expression import (
26
+ generate_shared_expression,
59
27
  )
60
28
 
61
- return sh
29
+ return generate_shared_expression(
30
+ item_name=lakehouse, item_type="Lakehouse", workspace=workspace
31
+ )
@@ -1,5 +1,4 @@
1
1
  import sempy.fabric as fabric
2
- from sempy_labs.directlake._get_shared_expression import get_shared_expression
3
2
  from sempy_labs.directlake._generate_shared_expression import generate_shared_expression
4
3
  from sempy_labs._helper_functions import (
5
4
  resolve_lakehouse_name,
@@ -56,7 +55,9 @@ def update_direct_lake_model_lakehouse_connection(
56
55
 
57
56
  icons.sll_tags.append("UpdateDLConnection")
58
57
 
59
- shEx = get_shared_expression(lakehouse, lakehouse_workspace)
58
+ shEx = generate_shared_expression(
59
+ item_name=lakehouse, item_type="Lakehouse", workspace=lakehouse_workspace
60
+ )
60
61
 
61
62
  with connect_semantic_model(
62
63
  dataset=dataset, readonly=False, workspace=workspace
@@ -1,6 +1,6 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
- from sempy_labs.directlake._get_shared_expression import get_shared_expression
3
+ from sempy_labs.directlake._generate_shared_expression import generate_shared_expression
4
4
  from sempy_labs._helper_functions import resolve_lakehouse_name, retry
5
5
  from sempy_labs.lakehouse._lakehouse import lakehouse_attached
6
6
  from sempy_labs.tom import connect_semantic_model
@@ -71,7 +71,9 @@ def migrate_tables_columns_to_semantic_model(
71
71
  "To attach a lakehouse to a notebook, go to the the 'Explorer' window to the left, click 'Lakehouses' to add your lakehouse to this notebook"
72
72
  "\nLearn more here: https://learn.microsoft.com/fabric/data-engineering/lakehouse-notebook-explore#add-or-remove-a-lakehouse"
73
73
  )
74
- shEx = get_shared_expression(lakehouse, lakehouse_workspace)
74
+ shEx = generate_shared_expression(
75
+ item_name=lakehouse, item_type="Lakehouse", workspace=lakehouse_workspace
76
+ )
75
77
 
76
78
  dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
77
79
  dfT = fabric.list_tables(dataset=dataset, workspace=workspace)
@@ -179,7 +179,9 @@ def update_report_from_reportjson(
179
179
  )
180
180
 
181
181
 
182
- def get_report_definition(report: str, workspace: Optional[str] = None) -> pd.DataFrame:
182
+ def get_report_definition(
183
+ report: str, workspace: Optional[str] = None, return_dataframe: bool = True
184
+ ) -> pd.DataFrame | dict:
183
185
  """
184
186
  Gets the collection of definition files of a report.
185
187
 
@@ -193,10 +195,12 @@ def get_report_definition(report: str, workspace: Optional[str] = None) -> pd.Da
193
195
  The Fabric workspace name in which the report resides.
194
196
  Defaults to None which resolves to the workspace of the attached lakehouse
195
197
  or if no lakehouse attached, resolves to the workspace of the notebook.
198
+ return_dataframe : bool, default=True
199
+ If True, returns a dataframe. If False, returns a json dictionary.
196
200
 
197
201
  Returns
198
202
  -------
199
- pandas.DataFrame
203
+ pandas.DataFrame | dict
200
204
  The collection of report definition files within a pandas dataframe.
201
205
  """
202
206
 
@@ -209,9 +213,11 @@ def get_report_definition(report: str, workspace: Optional[str] = None) -> pd.Da
209
213
  )
210
214
 
211
215
  result = lro(client, response).json()
212
- rdef = pd.json_normalize(result["definition"]["parts"])
213
216
 
214
- return rdef
217
+ if return_dataframe:
218
+ return pd.json_normalize(result["definition"]["parts"])
219
+ else:
220
+ return result
215
221
 
216
222
 
217
223
  @log