semantic-link-labs 0.9.6__py3-none-any.whl → 0.9.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (35) hide show
  1. {semantic_link_labs-0.9.6.dist-info → semantic_link_labs-0.9.8.dist-info}/METADATA +8 -5
  2. {semantic_link_labs-0.9.6.dist-info → semantic_link_labs-0.9.8.dist-info}/RECORD +35 -32
  3. {semantic_link_labs-0.9.6.dist-info → semantic_link_labs-0.9.8.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +4 -0
  5. sempy_labs/_ai.py +3 -1
  6. sempy_labs/_capacities.py +0 -1
  7. sempy_labs/_dax_query_view.py +2 -0
  8. sempy_labs/_delta_analyzer_history.py +298 -0
  9. sempy_labs/_helper_functions.py +65 -16
  10. sempy_labs/_icons.py +6 -6
  11. sempy_labs/_list_functions.py +3 -1
  12. sempy_labs/_model_bpa_bulk.py +10 -11
  13. sempy_labs/_model_bpa_rules.py +1 -1
  14. sempy_labs/admin/_basic_functions.py +28 -2
  15. sempy_labs/admin/_reports.py +1 -1
  16. sempy_labs/admin/_scanner.py +0 -2
  17. sempy_labs/admin/_tenant.py +8 -3
  18. sempy_labs/directlake/_generate_shared_expression.py +9 -1
  19. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +82 -36
  20. sempy_labs/directlake/_update_directlake_partition_entity.py +3 -0
  21. sempy_labs/graph/_groups.py +6 -0
  22. sempy_labs/graph/_teams.py +2 -0
  23. sempy_labs/graph/_users.py +4 -0
  24. sempy_labs/lakehouse/__init__.py +12 -3
  25. sempy_labs/lakehouse/_blobs.py +231 -0
  26. sempy_labs/lakehouse/_shortcuts.py +22 -3
  27. sempy_labs/migration/_direct_lake_to_import.py +47 -10
  28. sempy_labs/report/__init__.py +4 -0
  29. sempy_labs/report/_report_functions.py +3 -3
  30. sempy_labs/report/_report_helper.py +17 -5
  31. sempy_labs/report/_reportwrapper.py +17 -8
  32. sempy_labs/report/_save_report.py +147 -0
  33. sempy_labs/tom/_model.py +156 -23
  34. {semantic_link_labs-0.9.6.dist-info → semantic_link_labs-0.9.8.dist-info/licenses}/LICENSE +0 -0
  35. {semantic_link_labs-0.9.6.dist-info → semantic_link_labs-0.9.8.dist-info}/top_level.txt +0 -0
@@ -7,6 +7,7 @@ from sempy_labs._helper_functions import (
7
7
  _base_api,
8
8
  _create_dataframe,
9
9
  )
10
+ from sempy._utils._log import log
10
11
 
11
12
 
12
13
  def resolve_user_id(user: str | UUID) -> UUID:
@@ -33,6 +34,7 @@ def resolve_user_id(user: str | UUID) -> UUID:
33
34
  return result.get("id")
34
35
 
35
36
 
37
+ @log
36
38
  def get_user(user: str | UUID) -> pd.DataFrame:
37
39
  """
38
40
  Shows properties of a given user.
@@ -70,6 +72,7 @@ def get_user(user: str | UUID) -> pd.DataFrame:
70
72
  return pd.DataFrame([new_data])
71
73
 
72
74
 
75
+ @log
73
76
  def list_users() -> pd.DataFrame:
74
77
  """
75
78
  Shows a list of users and their properties.
@@ -120,6 +123,7 @@ def list_users() -> pd.DataFrame:
120
123
  return df
121
124
 
122
125
 
126
+ @log
123
127
  def send_mail(
124
128
  user: UUID | str,
125
129
  subject: str,
@@ -1,12 +1,15 @@
1
- from sempy_labs.lakehouse._get_lakehouse_columns import get_lakehouse_columns
2
- from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
1
+ from sempy_labs.lakehouse._get_lakehouse_columns import (
2
+ get_lakehouse_columns,
3
+ )
4
+ from sempy_labs.lakehouse._get_lakehouse_tables import (
5
+ get_lakehouse_tables,
6
+ )
3
7
  from sempy_labs.lakehouse._lakehouse import (
4
8
  lakehouse_attached,
5
9
  optimize_lakehouse_tables,
6
10
  vacuum_lakehouse_tables,
7
11
  run_table_maintenance,
8
12
  )
9
-
10
13
  from sempy_labs.lakehouse._shortcuts import (
11
14
  # create_shortcut,
12
15
  create_shortcut_onelake,
@@ -14,6 +17,10 @@ from sempy_labs.lakehouse._shortcuts import (
14
17
  reset_shortcut_cache,
15
18
  list_shortcuts,
16
19
  )
20
+ from sempy_labs.lakehouse._blobs import (
21
+ recover_lakehouse_object,
22
+ list_blobs,
23
+ )
17
24
 
18
25
  __all__ = [
19
26
  "get_lakehouse_columns",
@@ -27,4 +34,6 @@ __all__ = [
27
34
  "reset_shortcut_cache",
28
35
  "run_table_maintenance",
29
36
  "list_shortcuts",
37
+ "recover_lakehouse_object",
38
+ "list_blobs",
30
39
  ]
@@ -0,0 +1,231 @@
1
+ from sempy_labs._helper_functions import (
2
+ resolve_workspace_id,
3
+ resolve_lakehouse_id,
4
+ _xml_to_dict,
5
+ _create_dataframe,
6
+ _update_dataframe_datatypes,
7
+ )
8
+ from sempy._utils._log import log
9
+ from uuid import UUID
10
+ from typing import Optional, List
11
+ import sempy_labs._icons as icons
12
+ import xml.etree.ElementTree as ET
13
+ import pandas as pd
14
+
15
+
16
+ def _request_blob_api(
17
+ request: str,
18
+ method: str = "get",
19
+ payload: Optional[dict] = None,
20
+ status_codes: int | List[int] = 200,
21
+ ):
22
+
23
+ import requests
24
+ import notebookutils
25
+ from sempy.fabric.exceptions import FabricHTTPException
26
+
27
+ if isinstance(status_codes, int):
28
+ status_codes = [status_codes]
29
+
30
+ token = notebookutils.credentials.getToken("storage")
31
+
32
+ headers = {
33
+ "Authorization": f"Bearer {token}",
34
+ "Content-Type": "application/json",
35
+ "x-ms-version": "2025-05-05",
36
+ }
37
+
38
+ response = requests.request(
39
+ method.upper(),
40
+ f"https://onelake.blob.fabric.microsoft.com/{request}",
41
+ headers=headers,
42
+ json=payload,
43
+ )
44
+
45
+ if response.status_code not in status_codes:
46
+ raise FabricHTTPException(response)
47
+
48
+ return response
49
+
50
+
51
+ @log
52
+ def list_blobs(
53
+ lakehouse: Optional[str | UUID] = None,
54
+ workspace: Optional[str | UUID] = None,
55
+ container: Optional[str] = None,
56
+ ) -> pd.DataFrame:
57
+ """
58
+ Returns a list of blobs for a given lakehouse.
59
+
60
+ This function leverages the following API: `List Blobs <https://learn.microsoft.com/rest/api/storageservices/list-blobs?tabs=microsoft-entra-id>`_.
61
+
62
+ Parameters
63
+ ----------
64
+ lakehouse : str | uuid.UUID, default=None
65
+ The Fabric lakehouse name or ID.
66
+ Defaults to None which resolves to the lakehouse attached to the notebook.
67
+ workspace : str | uuid.UUID, default=None
68
+ The Fabric workspace name or ID used by the lakehouse.
69
+ Defaults to None which resolves to the workspace of the attached lakehouse
70
+ or if no lakehouse attached, resolves to the workspace of the notebook.
71
+ container : str, default=None
72
+ The container name to list blobs from. If None, lists all blobs in the lakehouse.
73
+ Valid values are "Tables" or "Files". If not specified, the function will list all blobs in the lakehouse.
74
+
75
+ Returns
76
+ -------
77
+ pandas.DataFrame
78
+ A pandas dataframe showing a list of blobs in the lakehouse.
79
+ """
80
+
81
+ workspace_id = resolve_workspace_id(workspace)
82
+ lakehouse_id = resolve_lakehouse_id(lakehouse, workspace_id)
83
+
84
+ if container is None:
85
+ path_prefix = f"{workspace_id}/{lakehouse_id}"
86
+ else:
87
+ if container not in ["Tables", "Files"]:
88
+ raise ValueError(
89
+ f"{icons.red_dot} Invalid container '{container}' within the file_path parameter. Expected 'Tables' or 'Files'."
90
+ )
91
+ path_prefix = f"{workspace_id}/{lakehouse_id}/{container}"
92
+
93
+ response = _request_blob_api(
94
+ request=f"{path_prefix}?restype=container&comp=list&include=deleted"
95
+ )
96
+ root = ET.fromstring(response.content)
97
+ response_json = _xml_to_dict(root)
98
+
99
+ columns = {
100
+ "Blob Name": "str",
101
+ "Is Deleted": "bool",
102
+ "Deletion Id": "str",
103
+ "Creation Time": "datetime",
104
+ "Expiry Time": "datetime",
105
+ "Etag": "str",
106
+ "Resource Type": "str",
107
+ "Content Length": "int",
108
+ "Content Type": "str",
109
+ "Content Encoding": "str",
110
+ "Content Language": "str",
111
+ "Content CRC64": "str",
112
+ "Content MD5": "str",
113
+ "Cache Control": "str",
114
+ "Content Disposition": "str",
115
+ "Blob Type": "str",
116
+ "Access Tier": "str",
117
+ "Access Tier Inferred": "str",
118
+ "Server Encrypted": "bool",
119
+ "Deleted Time": "str",
120
+ "Remaining Retention Days": "str",
121
+ }
122
+
123
+ df = _create_dataframe(columns=columns)
124
+
125
+ for blob in (
126
+ response_json.get("EnumerationResults", {}).get("Blobs", {}).get("Blob", {})
127
+ ):
128
+ p = blob.get("Properties", {})
129
+ new_data = {
130
+ "Blob Name": blob.get("Name"),
131
+ "Is Deleted": blob.get("Deleted", False),
132
+ "Deletion Id": blob.get("DeletionId"),
133
+ "Creation Time": p.get("Creation-Time"),
134
+ "Expiry Time": p.get("Expiry-Time"),
135
+ "Etag": p.get("Etag"),
136
+ "Resource Type": p.get("ResourceType"),
137
+ "Content Length": p.get("Content-Length"),
138
+ "Content Type": p.get("Content-Type"),
139
+ "Content Encoding": p.get("Content-Encoding"),
140
+ "Content Language": p.get("Content-Language"),
141
+ "Content CRC64": p.get("Content-CRC64"),
142
+ "Content MD5": p.get("Content-MD5"),
143
+ "Cache Control": p.get("Cache-Control"),
144
+ "Content Disposition": p.get("Content-Disposition"),
145
+ "Blob Type": p.get("BlobType"),
146
+ "Access Tier": p.get("AccessTier"),
147
+ "Access Tier Inferred": p.get("AccessTierInferred"),
148
+ "Server Encrypted": p.get("ServerEncrypted"),
149
+ "Deleted Time": p.get("DeletedTime"),
150
+ "Remaining Retention Days": p.get("RemainingRetentionDays"),
151
+ }
152
+
153
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
154
+
155
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
156
+
157
+ return df
158
+
159
+
160
+ @log
161
+ def recover_lakehouse_object(
162
+ file_path: str,
163
+ lakehouse: Optional[str | UUID] = None,
164
+ workspace: Optional[str | UUID] = None,
165
+ ):
166
+ """
167
+ Recovers an object (i.e. table, file, folder) in a lakehouse from a deleted state. Only `soft-deleted objects <https://learn.microsoft.com/fabric/onelake/onelake-disaster-recovery#soft-delete-for-onelake-files>`_ can be recovered (deleted for less than 7 days).
168
+
169
+ Parameters
170
+ ----------
171
+ file_path : str
172
+ The file path of the object to restore. For example: "Tables/my_delta_table".
173
+ lakehouse : str | uuid.UUID, default=None
174
+ The Fabric lakehouse name or ID.
175
+ Defaults to None which resolves to the lakehouse attached to the notebook.
176
+ workspace : str | uuid.UUID, default=None
177
+ The Fabric workspace name or ID used by the lakehouse.
178
+ Defaults to None which resolves to the workspace of the attached lakehouse
179
+ or if no lakehouse attached, resolves to the workspace of the notebook.
180
+ """
181
+
182
+ workspace_id = resolve_workspace_id(workspace)
183
+ lakehouse_id = resolve_lakehouse_id(lakehouse, workspace_id)
184
+
185
+ blob_path_prefix = f"{lakehouse_id}/{file_path}"
186
+
187
+ container = file_path.split("/")[0]
188
+ if container not in ["Tables", "Files"]:
189
+ raise ValueError(
190
+ f"{icons.red_dot} Invalid container '{container}' within the file_path parameter. Expected 'Tables' or 'Files'."
191
+ )
192
+
193
+ df = list_blobs(lakehouse=lakehouse, workspace=workspace, container=container)
194
+
195
+ for _, r in df.iterrows():
196
+ blob_name = r.get("Blob Name")
197
+ is_deleted = r.get("Is Deleted")
198
+ if blob_name.startswith(blob_path_prefix) and is_deleted:
199
+ print(f"{icons.in_progress} Restoring the '{blob_name}' blob...")
200
+ _request_blob_api(
201
+ request=f"{workspace_id}/{lakehouse_id}/{file_path}?comp=undelete",
202
+ method="put",
203
+ )
204
+ print(f"{icons.green_dot} The '{blob_name}' blob has been restored.")
205
+
206
+
207
+ def _get_user_delegation_key():
208
+
209
+ # https://learn.microsoft.com/rest/api/storageservices/get-user-delegation-key
210
+
211
+ from datetime import datetime, timedelta, timezone
212
+
213
+ utc_now = datetime.now(timezone.utc)
214
+ start_time = utc_now + timedelta(minutes=2)
215
+ expiry_time = start_time + timedelta(minutes=45)
216
+ start_str = start_time.strftime("%Y-%m-%dT%H:%M:%SZ")
217
+ expiry_str = expiry_time.strftime("%Y-%m-%dT%H:%M:%SZ")
218
+
219
+ payload = f"""<?xml version="1.0" encoding="utf-8"?>
220
+ <KeyInfo>
221
+ <Start>{start_str}</Start>
222
+ <Expiry>{expiry_str}</Expiry>
223
+ </KeyInfo>"""
224
+
225
+ response = _request_blob_api(
226
+ request="restype=service&comp=userdelegationkey",
227
+ method="post",
228
+ payload=payload,
229
+ )
230
+
231
+ return response.content
@@ -24,12 +24,15 @@ def create_shortcut_onelake(
24
24
  shortcut_name: Optional[str] = None,
25
25
  source_path: str = "Tables",
26
26
  destination_path: str = "Tables",
27
+ shortcut_conflict_policy: Optional[str] = None,
27
28
  ):
28
29
  """
29
30
  Creates a `shortcut <https://learn.microsoft.com/fabric/onelake/onelake-shortcuts>`_ to a delta table in OneLake.
30
31
 
31
32
  This is a wrapper function for the following API: `OneLake Shortcuts - Create Shortcut <https://learn.microsoft.com/rest/api/fabric/core/onelake-shortcuts/create-shortcut>`_.
32
33
 
34
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
35
+
33
36
  Parameters
34
37
  ----------
35
38
  table_name : str
@@ -51,6 +54,8 @@ def create_shortcut_onelake(
51
54
  A string representing the full path to the table/file in the source lakehouse, including either "Files" or "Tables". Examples: Tables/FolderName/SubFolderName; Files/FolderName/SubFolderName.
52
55
  destination_path: str, default="Tables"
53
56
  A string representing the full path where the shortcut is created, including either "Files" or "Tables". Examples: Tables/FolderName/SubFolderName; Files/FolderName/SubFolderName.
57
+ shortcut_conflict_policy : str, default=None
58
+ When provided, it defines the action to take when a shortcut with the same name and path already exists. The default action is 'Abort'. Additional ShortcutConflictPolicy types may be added over time.
54
59
  """
55
60
 
56
61
  if not (source_path.startswith("Files") or source_path.startswith("Tables")):
@@ -103,7 +108,8 @@ def create_shortcut_onelake(
103
108
  # Check if the shortcut already exists
104
109
  try:
105
110
  response = _base_api(
106
- request=f"/v1/workspaces/{destination_workspace_id}/items/{destination_lakehouse_id}/shortcuts/{destination_path}/{actual_shortcut_name}"
111
+ request=f"/v1/workspaces/{destination_workspace_id}/items/{destination_lakehouse_id}/shortcuts/{destination_path}/{actual_shortcut_name}",
112
+ client="fabric_sp",
107
113
  )
108
114
  response_json = response.json()
109
115
  del response_json["target"]["type"]
@@ -119,11 +125,21 @@ def create_shortcut_onelake(
119
125
  except FabricHTTPException:
120
126
  pass
121
127
 
128
+ url = f"/v1/workspaces/{destination_workspace_id}/items/{destination_lakehouse_id}/shortcuts"
129
+
130
+ if shortcut_conflict_policy:
131
+ if shortcut_conflict_policy not in ["Abort", "GenerateUniqueName"]:
132
+ raise ValueError(
133
+ f"{icons.red_dot} The 'shortcut_conflict_policy' parameter must be either 'Abort' or 'GenerateUniqueName'."
134
+ )
135
+ url += f"?shortcutConflictPolicy={shortcut_conflict_policy}"
136
+
122
137
  _base_api(
123
- request=f"/v1/workspaces/{destination_workspace_id}/items/{destination_lakehouse_id}/shortcuts",
138
+ request=url,
124
139
  payload=payload,
125
140
  status_codes=201,
126
141
  method="post",
142
+ client="fabric_sp",
127
143
  )
128
144
 
129
145
  print(
@@ -211,6 +227,8 @@ def delete_shortcut(
211
227
 
212
228
  This is a wrapper function for the following API: `OneLake Shortcuts - Delete Shortcut <https://learn.microsoft.com/rest/api/fabric/core/onelake-shortcuts/delete-shortcut>`_.
213
229
 
230
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
231
+
214
232
  Parameters
215
233
  ----------
216
234
  shortcut_name : str
@@ -234,6 +252,7 @@ def delete_shortcut(
234
252
  _base_api(
235
253
  request=f"/v1/workspaces/{workspace_id}/items/{lakehouse_id}/shortcuts/{shortcut_path}/{shortcut_name}",
236
254
  method="delete",
255
+ client="fabric_sp",
237
256
  )
238
257
 
239
258
  print(
@@ -288,7 +307,7 @@ def list_shortcuts(
288
307
  Defaults to None which resolves to the workspace of the attached lakehouse
289
308
  or if no lakehouse attached, resolves to the workspace of the notebook.
290
309
  path: str, default=None
291
- The path within lakehouse where to look for shortcuts. If provied, must start with either "Files" or "Tables". Examples: Tables/FolderName/SubFolderName; Files/FolderName/SubFolderName.
310
+ The path within lakehouse where to look for shortcuts. If provided, must start with either "Files" or "Tables". Examples: Tables/FolderName/SubFolderName; Files/FolderName/SubFolderName.
292
311
  Defaults to None which will retun all shortcuts on the given lakehouse
293
312
 
294
313
  Returns
@@ -1,11 +1,16 @@
1
1
  import sempy
2
2
  from uuid import UUID
3
3
  import sempy_labs._icons as icons
4
+ from typing import Optional
4
5
 
5
6
 
6
- def migrate_direct_lake_to_import(dataset: str | UUID, workspace: str | UUID):
7
+ def migrate_direct_lake_to_import(
8
+ dataset: str | UUID,
9
+ workspace: Optional[str | UUID] = None,
10
+ mode: str = "import",
11
+ ):
7
12
  """
8
- Migrates a semantic model from Direct Lake mode to import mode. After running this function, you must go to the semantic model settings and update the cloud connection. Not doing so will result in an inablity to refresh/use the semantic model.
13
+ Migrates a semantic model or specific table(s) from a Direct Lake mode to import or DirectQuery mode. After running this function, you must go to the semantic model settings and update the cloud connection. Not doing so will result in an inablity to refresh/use the semantic model.
9
14
 
10
15
  Parameters
11
16
  ----------
@@ -15,12 +20,29 @@ def migrate_direct_lake_to_import(dataset: str | UUID, workspace: str | UUID):
15
20
  The Fabric workspace name or ID.
16
21
  Defaults to None which resolves to the workspace of the attached lakehouse
17
22
  or if no lakehouse attached, resolves to the workspace of the notebook.
23
+ mode : str, default="import"
24
+ The mode to migrate to. Can be either "import" or "directquery".
18
25
  """
19
26
 
20
27
  sempy.fabric._client._utils._init_analysis_services()
21
28
  import Microsoft.AnalysisServices.Tabular as TOM
22
29
  from sempy_labs.tom import connect_semantic_model
23
30
 
31
+ modes = {
32
+ "import": "Import",
33
+ "directquery": "DirectQuery",
34
+ "dq": "DirectQuery",
35
+ }
36
+
37
+ # Resolve mode
38
+ mode = mode.lower()
39
+ actual_mode = modes.get(mode)
40
+ if actual_mode is None:
41
+ raise ValueError(f"Invalid mode '{mode}'. Must be one of {list(modes.keys())}.")
42
+
43
+ # if isinstance(tables, str):
44
+ # tables = [tables]
45
+
24
46
  with connect_semantic_model(
25
47
  dataset=dataset, workspace=workspace, readonly=False
26
48
  ) as tom:
@@ -31,7 +53,14 @@ def migrate_direct_lake_to_import(dataset: str | UUID, workspace: str | UUID):
31
53
  )
32
54
  return
33
55
 
34
- for t in tom.model.Tables:
56
+ # if tables is None:
57
+ table_list = [t for t in tom.model.Tables]
58
+ # else:
59
+ # table_list = [t for t in tom.model.Tables if t.Name in tables]
60
+ # if not table_list:
61
+ # raise ValueError(f"{icons.red_dot} No tables found to migrate.")
62
+
63
+ for t in table_list:
35
64
  table_name = t.Name
36
65
  if t.Partitions.Count == 1 and all(
37
66
  p.Mode == TOM.ModeType.DirectLake for p in t.Partitions
@@ -51,16 +80,24 @@ def migrate_direct_lake_to_import(dataset: str | UUID, workspace: str | UUID):
51
80
  table_name=table_name,
52
81
  partition_name=partition_name,
53
82
  expression=expression,
54
- mode="Import",
83
+ mode=actual_mode,
55
84
  )
56
85
  # Remove Direct Lake partition
57
86
  tom.remove_object(object=p)
87
+ # if tables is not None:
88
+ # print(
89
+ # f"{icons.green_dot} The '{table_name}' table has been migrated to '{actual_mode}' mode."
90
+ # )
58
91
 
59
92
  tom.model.Model.DefaultMode = TOM.ModeType.Import
93
+ # if tables is None:
94
+ print(
95
+ f"{icons.green_dot} All tables which were in Direct Lake mode have been migrated to '{actual_mode}' mode."
96
+ )
60
97
 
61
- # Check
62
- # for t in tom.model.Tables:
63
- # if t.Partitions.Count == 1 and all(p.Mode == TOM.ModeType.Import for p in t.Partitions) and t.CalculationGroup is None:
64
- # p = next(p for p in t.Partitions)
65
- # print(p.Name)
66
- # print(p.Source.Expression)
98
+ # Check
99
+ # for t in tom.model.Tables:
100
+ # if t.Partitions.Count == 1 and all(p.Mode == TOM.ModeType.Import for p in t.Partitions) and t.CalculationGroup is None:
101
+ # p = next(p for p in t.Partitions)
102
+ # print(p.Name)
103
+ # print(p.Source.Expression)
@@ -1,3 +1,6 @@
1
+ from sempy_labs.report._save_report import (
2
+ save_report_as_pbip,
3
+ )
1
4
  from sempy_labs.report._reportwrapper import (
2
5
  ReportWrapper,
3
6
  )
@@ -46,4 +49,5 @@ __all__ = [
46
49
  "run_report_bpa",
47
50
  "get_report_datasources",
48
51
  "download_report",
52
+ "save_report_as_pbip",
49
53
  ]
@@ -116,9 +116,9 @@ def report_dependency_tree(workspace: Optional[str | UUID] = None):
116
116
  dfR.rename(columns={"Name": "Report Name"}, inplace=True)
117
117
  dfR = dfR[["Report Name", "Dataset Name"]]
118
118
 
119
- report_icon = "\U0001F4F6"
120
- dataset_icon = "\U0001F9CA"
121
- workspace_icon = "\U0001F465"
119
+ report_icon = "\U0001f4f6"
120
+ dataset_icon = "\U0001f9ca"
121
+ workspace_icon = "\U0001f465"
122
122
 
123
123
  node_dict = {}
124
124
  rootNode = Node(workspace_name)
@@ -236,15 +236,27 @@ def find_entity_property_pairs(data, result=None, keys_path=None):
236
236
  keys_path = []
237
237
 
238
238
  if isinstance(data, dict):
239
+ expression = data.get("Expression", {})
240
+ source_ref = (
241
+ expression.get("SourceRef", {}) if isinstance(expression, dict) else {}
242
+ )
243
+
239
244
  if (
240
- "Entity" in data.get("Expression", {}).get("SourceRef", {})
245
+ isinstance(source_ref, dict)
246
+ and "Entity" in source_ref
241
247
  and "Property" in data
242
248
  ):
243
- entity = data.get("Expression", {}).get("SourceRef", {}).get("Entity", {})
244
- property_value = data.get("Property")
245
- object_type = keys_path[-1].replace("HierarchyLevel", "Hierarchy")
249
+ entity = source_ref.get("Entity", "")
250
+ property_value = data.get("Property", "")
251
+
252
+ object_type = (
253
+ keys_path[-1].replace("HierarchyLevel", "Hierarchy")
254
+ if keys_path
255
+ else "Unknown"
256
+ )
246
257
  result[property_value] = (entity, object_type)
247
- keys_path.pop()
258
+ if keys_path:
259
+ keys_path.pop()
248
260
 
249
261
  # Recursively search the rest of the dictionary
250
262
  for key, value in data.items():
@@ -966,17 +966,25 @@ class ReportWrapper:
966
966
  keys_path = []
967
967
 
968
968
  if isinstance(data, dict):
969
+ expression = data.get("Expression", {})
970
+ source_ref = (
971
+ expression.get("SourceRef", {})
972
+ if isinstance(expression, dict)
973
+ else {}
974
+ )
969
975
  if (
970
- "Entity" in data.get("Expression", {}).get("SourceRef", {})
976
+ isinstance(source_ref, dict)
977
+ and "Entity" in source_ref
971
978
  and "Property" in data
972
979
  ):
973
- entity = (
974
- data.get("Expression", {})
975
- .get("SourceRef", {})
976
- .get("Entity", {})
980
+ entity = source_ref.get("Entity", "")
981
+ property_value = data.get("Property", "")
982
+
983
+ object_type = (
984
+ keys_path[-1].replace("HierarchyLevel", "Hierarchy")
985
+ if keys_path
986
+ else "Unknown"
977
987
  )
978
- property_value = data.get("Property", {})
979
- object_type = keys_path[-1].replace("HierarchyLevel", "Hierarchy")
980
988
  is_agg = keys_path[-3] == "Aggregation"
981
989
  is_viz_calc = keys_path[-3] == "NativeVisualCalculation"
982
990
  is_sparkline = keys_path[-3] == "SparklineData"
@@ -987,7 +995,8 @@ class ReportWrapper:
987
995
  is_viz_calc,
988
996
  is_sparkline,
989
997
  )
990
- keys_path.pop()
998
+ if keys_path:
999
+ keys_path.pop()
991
1000
 
992
1001
  # Recursively search the rest of the dictionary
993
1002
  for key, value in data.items():