semantic-link-labs 0.10.1__py3-none-any.whl → 0.11.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (94) hide show
  1. {semantic_link_labs-0.10.1.dist-info → semantic_link_labs-0.11.1.dist-info}/METADATA +8 -6
  2. {semantic_link_labs-0.10.1.dist-info → semantic_link_labs-0.11.1.dist-info}/RECORD +94 -92
  3. sempy_labs/__init__.py +4 -0
  4. sempy_labs/_a_lib_info.py +1 -1
  5. sempy_labs/_capacities.py +2 -0
  6. sempy_labs/_connections.py +11 -0
  7. sempy_labs/_dashboards.py +9 -4
  8. sempy_labs/_data_pipelines.py +5 -0
  9. sempy_labs/_dataflows.py +284 -17
  10. sempy_labs/_daxformatter.py +2 -0
  11. sempy_labs/_delta_analyzer_history.py +4 -1
  12. sempy_labs/_deployment_pipelines.py +4 -0
  13. sempy_labs/_documentation.py +3 -0
  14. sempy_labs/_environments.py +10 -1
  15. sempy_labs/_eventhouses.py +12 -5
  16. sempy_labs/_eventstreams.py +11 -3
  17. sempy_labs/_external_data_shares.py +8 -2
  18. sempy_labs/_gateways.py +26 -5
  19. sempy_labs/_git.py +11 -0
  20. sempy_labs/_graphQL.py +10 -3
  21. sempy_labs/_helper_functions.py +62 -10
  22. sempy_labs/_job_scheduler.py +54 -7
  23. sempy_labs/_kql_databases.py +11 -2
  24. sempy_labs/_kql_querysets.py +11 -3
  25. sempy_labs/_list_functions.py +17 -2
  26. sempy_labs/_managed_private_endpoints.py +11 -2
  27. sempy_labs/_mirrored_databases.py +17 -3
  28. sempy_labs/_mirrored_warehouses.py +9 -3
  29. sempy_labs/_ml_experiments.py +11 -3
  30. sempy_labs/_ml_models.py +11 -3
  31. sempy_labs/_model_bpa_rules.py +2 -0
  32. sempy_labs/_mounted_data_factories.py +12 -8
  33. sempy_labs/_notebooks.py +3 -0
  34. sempy_labs/_refresh_semantic_model.py +1 -0
  35. sempy_labs/_semantic_models.py +6 -0
  36. sempy_labs/_spark.py +7 -0
  37. sempy_labs/_sql_endpoints.py +54 -31
  38. sempy_labs/_sqldatabase.py +13 -4
  39. sempy_labs/_tags.py +5 -1
  40. sempy_labs/_user_delegation_key.py +2 -0
  41. sempy_labs/_variable_libraries.py +3 -1
  42. sempy_labs/_warehouses.py +13 -3
  43. sempy_labs/_workloads.py +3 -0
  44. sempy_labs/_workspace_identity.py +3 -0
  45. sempy_labs/_workspaces.py +14 -1
  46. sempy_labs/admin/__init__.py +2 -0
  47. sempy_labs/admin/_activities.py +6 -5
  48. sempy_labs/admin/_apps.py +31 -31
  49. sempy_labs/admin/_artifacts.py +8 -3
  50. sempy_labs/admin/_basic_functions.py +5 -0
  51. sempy_labs/admin/_capacities.py +33 -20
  52. sempy_labs/admin/_datasets.py +51 -51
  53. sempy_labs/admin/_domains.py +17 -1
  54. sempy_labs/admin/_external_data_share.py +8 -2
  55. sempy_labs/admin/_git.py +14 -9
  56. sempy_labs/admin/_items.py +15 -2
  57. sempy_labs/admin/_reports.py +64 -65
  58. sempy_labs/admin/_shared.py +7 -1
  59. sempy_labs/admin/_tags.py +5 -0
  60. sempy_labs/admin/_tenant.py +5 -2
  61. sempy_labs/admin/_users.py +9 -3
  62. sempy_labs/admin/_workspaces.py +88 -0
  63. sempy_labs/directlake/_dl_helper.py +2 -0
  64. sempy_labs/directlake/_generate_shared_expression.py +2 -0
  65. sempy_labs/directlake/_get_directlake_lakehouse.py +2 -4
  66. sempy_labs/directlake/_get_shared_expression.py +2 -0
  67. sempy_labs/directlake/_guardrails.py +2 -0
  68. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +2 -0
  69. sempy_labs/directlake/_warm_cache.py +1 -0
  70. sempy_labs/graph/_groups.py +22 -7
  71. sempy_labs/graph/_teams.py +7 -2
  72. sempy_labs/graph/_users.py +1 -0
  73. sempy_labs/lakehouse/_blobs.py +1 -0
  74. sempy_labs/lakehouse/_get_lakehouse_tables.py +88 -27
  75. sempy_labs/lakehouse/_helper.py +2 -0
  76. sempy_labs/lakehouse/_lakehouse.py +38 -5
  77. sempy_labs/lakehouse/_livy_sessions.py +2 -1
  78. sempy_labs/lakehouse/_shortcuts.py +7 -1
  79. sempy_labs/migration/_direct_lake_to_import.py +2 -0
  80. sempy_labs/mirrored_azure_databricks_catalog/_discover.py +4 -0
  81. sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +2 -0
  82. sempy_labs/report/_download_report.py +2 -1
  83. sempy_labs/report/_generate_report.py +2 -0
  84. sempy_labs/report/_paginated.py +2 -0
  85. sempy_labs/report/_report_bpa.py +110 -122
  86. sempy_labs/report/_report_bpa_rules.py +2 -0
  87. sempy_labs/report/_report_functions.py +7 -0
  88. sempy_labs/report/_reportwrapper.py +64 -31
  89. sempy_labs/theme/__init__.py +12 -0
  90. sempy_labs/theme/_org_themes.py +117 -0
  91. sempy_labs/tom/_model.py +494 -16
  92. {semantic_link_labs-0.10.1.dist-info → semantic_link_labs-0.11.1.dist-info}/WHEEL +0 -0
  93. {semantic_link_labs-0.10.1.dist-info → semantic_link_labs-0.11.1.dist-info}/licenses/LICENSE +0 -0
  94. {semantic_link_labs-0.10.1.dist-info → semantic_link_labs-0.11.1.dist-info}/top_level.txt +0 -0
sempy_labs/admin/_git.py CHANGED
@@ -4,8 +4,10 @@ from sempy_labs._helper_functions import (
4
4
  )
5
5
  import pandas as pd
6
6
  from sempy_labs.admin._basic_functions import list_workspaces
7
+ from sempy._utils._log import log
7
8
 
8
9
 
10
+ @log
9
11
  def list_git_connections() -> pd.DataFrame:
10
12
  """
11
13
  Shows a list of Git connections.
@@ -38,6 +40,7 @@ def list_git_connections() -> pd.DataFrame:
38
40
  uses_pagination=True,
39
41
  )
40
42
 
43
+ dfs = []
41
44
  for r in responses:
42
45
  for v in r.get("value", []):
43
46
  git = v.get("gitProviderDetails", {})
@@ -52,16 +55,18 @@ def list_git_connections() -> pd.DataFrame:
52
55
  "Directory Name": git.get("directoryName"),
53
56
  }
54
57
 
55
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
58
+ dfs.append(pd.DataFrame(new_data, index=[0]))
56
59
 
57
- dfW = list_workspaces()
58
- df = pd.merge(
59
- df, dfW[["Id", "Name"]], left_on="Workspace Id", right_on="Id", how="left"
60
- )
61
- new_col_name = "Workspace Name"
62
- df = df.rename(columns={"Name": new_col_name})
63
- df.insert(1, new_col_name, df.pop(new_col_name))
60
+ if dfs:
61
+ df = pd.concat(dfs, ignore_index=True)
62
+ dfW = list_workspaces()
63
+ df = pd.merge(
64
+ df, dfW[["Id", "Name"]], left_on="Workspace Id", right_on="Id", how="left"
65
+ )
66
+ new_col_name = "Workspace Name"
67
+ df = df.rename(columns={"Name": new_col_name})
68
+ df.insert(1, new_col_name, df.pop(new_col_name))
64
69
 
65
- df = df.drop(columns=["Id"])
70
+ df = df.drop(columns=["Id"])
66
71
 
67
72
  return df
@@ -14,8 +14,10 @@ from sempy_labs._helper_functions import (
14
14
  _base_api,
15
15
  _create_dataframe,
16
16
  )
17
+ from sempy._utils._log import log
17
18
 
18
19
 
20
+ @log
19
21
  def _resolve_item_id(
20
22
  item: str,
21
23
  type: Optional[str] = None,
@@ -39,6 +41,7 @@ def _resolve_item_id(
39
41
  return item_id
40
42
 
41
43
 
44
+ @log
42
45
  def _resolve_item_name_and_id(
43
46
  item: str,
44
47
  type: Optional[str] = None,
@@ -70,6 +73,7 @@ def _resolve_item_name_and_id(
70
73
  return item_name, item_id
71
74
 
72
75
 
76
+ @log
73
77
  def list_items(
74
78
  capacity: Optional[str | UUID] = None,
75
79
  workspace: Optional[str | UUID] = None,
@@ -146,6 +150,7 @@ def list_items(
146
150
 
147
151
  responses = _base_api(request=url, client="fabric_sp", uses_pagination=True)
148
152
 
153
+ dfs = []
149
154
  for r in responses:
150
155
  for v in r.get("itemEntities", []):
151
156
  new_data = {
@@ -166,7 +171,10 @@ def list_items(
166
171
  "Workspace Id": v.get("workspaceId"),
167
172
  "Capacity Id": v.get("capacityId"),
168
173
  }
169
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
174
+ dfs.append(pd.DataFrame(new_data, index=[0]))
175
+
176
+ if dfs:
177
+ df = pd.concat(dfs, ignore_index=True)
170
178
 
171
179
  if item is not None:
172
180
  if _is_valid_uuid(item):
@@ -177,6 +185,7 @@ def list_items(
177
185
  return df
178
186
 
179
187
 
188
+ @log
180
189
  def list_item_access_details(
181
190
  item: str | UUID = None,
182
191
  type: str = None,
@@ -241,6 +250,7 @@ def list_item_access_details(
241
250
  client="fabric_sp",
242
251
  )
243
252
 
253
+ dfs = []
244
254
  for v in response.json().get("accessDetails", []):
245
255
  new_data = {
246
256
  "User Id": v.get("principal", {}).get("id"),
@@ -257,6 +267,9 @@ def list_item_access_details(
257
267
  "Item Name": item_name,
258
268
  "Item Id": item_id,
259
269
  }
260
- df = pd.concat([df, pd.DataFrame([new_data])], ignore_index=True)
270
+ dfs.append(pd.DataFrame(new_data, index=[0]))
271
+
272
+ if dfs:
273
+ df = pd.concat(dfs, ignore_index=True)
261
274
 
262
275
  return df
@@ -8,8 +8,10 @@ from sempy_labs._helper_functions import (
8
8
  )
9
9
  from uuid import UUID
10
10
  import sempy_labs._icons as icons
11
+ from sempy._utils._log import log
11
12
 
12
13
 
14
+ @log
13
15
  def list_reports(
14
16
  top: Optional[int] = None,
15
17
  skip: Optional[int] = None,
@@ -67,37 +69,36 @@ def list_reports(
67
69
 
68
70
  url.rstrip("$").rstrip("?")
69
71
  response = _base_api(request=url, client="fabric_sp")
70
- rows = []
71
72
 
73
+ dfs = []
72
74
  for v in response.json().get("value", []):
73
- rows.append(
74
- {
75
- "Report Id": v.get("id"),
76
- "Report Name": v.get("name"),
77
- "Type": v.get("reportType"),
78
- "Web URL": v.get("webUrl"),
79
- "Embed URL": v.get("embedUrl"),
80
- "Dataset Id": v.get("datasetId"),
81
- "Created Date": v.get("createdDateTime"),
82
- "Modified Date": v.get("modifiedDateTime"),
83
- "Created By": v.get("createdBy"),
84
- "Modified By": v.get("modifiedBy"),
85
- "Sensitivity Label Id": v.get("sensitivityLabel", {}).get("labelId"),
86
- "Users": v.get("users"),
87
- "Subscriptions": v.get("subscriptions"),
88
- "Workspace Id": v.get("workspaceId"),
89
- "Report Flags": v.get("reportFlags"),
90
- }
91
- )
92
-
93
- if rows:
94
- df = pd.DataFrame(rows, columns=list(columns.keys()))
95
-
96
- _update_dataframe_datatypes(dataframe=df, column_map=columns)
75
+ new_data = {
76
+ "Report Id": v.get("id"),
77
+ "Report Name": v.get("name"),
78
+ "Type": v.get("reportType"),
79
+ "Web URL": v.get("webUrl"),
80
+ "Embed URL": v.get("embedUrl"),
81
+ "Dataset Id": v.get("datasetId"),
82
+ "Created Date": v.get("createdDateTime"),
83
+ "Modified Date": v.get("modifiedDateTime"),
84
+ "Created By": v.get("createdBy"),
85
+ "Modified By": v.get("modifiedBy"),
86
+ "Sensitivity Label Id": v.get("sensitivityLabel", {}).get("labelId"),
87
+ "Users": v.get("users"),
88
+ "Subscriptions": v.get("subscriptions"),
89
+ "Workspace Id": v.get("workspaceId"),
90
+ "Report Flags": v.get("reportFlags"),
91
+ }
92
+ dfs.append(pd.DataFrame(new_data, index=[0]))
93
+
94
+ if dfs:
95
+ df = pd.concat(dfs, ignore_index=True)
96
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
97
97
 
98
98
  return df
99
99
 
100
100
 
101
+ @log
101
102
  def _resolve_report_id(report: str | UUID) -> str:
102
103
  if _is_valid_uuid(report):
103
104
  return report
@@ -109,6 +110,7 @@ def _resolve_report_id(report: str | UUID) -> str:
109
110
  return df_filt["Report Id"].iloc[0]
110
111
 
111
112
 
113
+ @log
112
114
  def list_report_users(report: str | UUID) -> pd.DataFrame:
113
115
  """
114
116
  Shows a list of users that have access to the specified report.
@@ -144,27 +146,26 @@ def list_report_users(report: str | UUID) -> pd.DataFrame:
144
146
  url = f"/v1.0/myorg/admin/reports/{report_id}/users"
145
147
  response = _base_api(request=url, client="fabric_sp")
146
148
 
147
- rows = []
149
+ dfs = []
148
150
  for v in response.json().get("value", []):
149
- rows.append(
150
- {
151
- "User Name": v.get("displayName"),
152
- "Email Address": v.get("emailAddress"),
153
- "Report User Access Right": v.get("reportUserAccessRight"),
154
- "Identifier": v.get("identifier"),
155
- "Graph Id": v.get("graphId"),
156
- "Principal Type": v.get("principalType"),
157
- }
158
- )
159
-
160
- if rows:
161
- df = pd.DataFrame(rows, columns=list(columns.keys()))
162
-
163
- _update_dataframe_datatypes(dataframe=df, column_map=columns)
151
+ new_data = {
152
+ "User Name": v.get("displayName"),
153
+ "Email Address": v.get("emailAddress"),
154
+ "Report User Access Right": v.get("reportUserAccessRight"),
155
+ "Identifier": v.get("identifier"),
156
+ "Graph Id": v.get("graphId"),
157
+ "Principal Type": v.get("principalType"),
158
+ }
159
+ dfs.append(pd.DataFrame(new_data, index=[0]))
160
+
161
+ if dfs:
162
+ df = pd.concat(dfs, ignore_index=True)
163
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
164
164
 
165
165
  return df
166
166
 
167
167
 
168
+ @log
168
169
  def list_report_subscriptions(report: str | UUID) -> pd.DataFrame:
169
170
  """
170
171
  Shows a list of report subscriptions along with subscriber details. This is a preview API call.
@@ -210,30 +211,28 @@ def list_report_subscriptions(report: str | UUID) -> pd.DataFrame:
210
211
  client="fabric_sp",
211
212
  )
212
213
 
213
- rows = []
214
+ dfs = []
214
215
  for v in response.json().get("value", []):
215
- rows.append(
216
- {
217
- "Subscription Id": v.get("id"),
218
- "Title": v.get("title"),
219
- "Artifact Id": v.get("artifactId"),
220
- "Artifact Name": v.get("artifactDisplayName"),
221
- "Sub Artifact Name": v.get("subArtifactDisplayName"),
222
- "Artifact Type": v.get("artifactType"),
223
- "Is Enabled": v.get("isEnabled"),
224
- "Frequency": v.get("frequency"),
225
- "Start Date": v.get("startDate"),
226
- "End Date": v.get("endDate"),
227
- "Link To Content": v.get("linkToContent"),
228
- "Preview Image": v.get("previewImage"),
229
- "Attachment Format": v.get("attachmentFormat"),
230
- "Users": str(v.get("users")),
231
- }
232
- )
233
-
234
- if rows:
235
- df = pd.DataFrame(rows, columns=list(columns.keys()))
236
-
237
- _update_dataframe_datatypes(dataframe=df, column_map=columns)
216
+ new_data = {
217
+ "Subscription Id": v.get("id"),
218
+ "Title": v.get("title"),
219
+ "Artifact Id": v.get("artifactId"),
220
+ "Artifact Name": v.get("artifactDisplayName"),
221
+ "Sub Artifact Name": v.get("subArtifactDisplayName"),
222
+ "Artifact Type": v.get("artifactType"),
223
+ "Is Enabled": v.get("isEnabled"),
224
+ "Frequency": v.get("frequency"),
225
+ "Start Date": v.get("startDate"),
226
+ "End Date": v.get("endDate"),
227
+ "Link To Content": v.get("linkToContent"),
228
+ "Preview Image": v.get("previewImage"),
229
+ "Attachment Format": v.get("attachmentFormat"),
230
+ "Users": str(v.get("users")),
231
+ }
232
+ dfs.append(pd.DataFrame(new_data, index=[0]))
233
+
234
+ if dfs:
235
+ df = pd.concat(dfs, ignore_index=True)
236
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
238
237
 
239
238
  return df
@@ -3,8 +3,10 @@ from sempy_labs._helper_functions import (
3
3
  _base_api,
4
4
  _create_dataframe,
5
5
  )
6
+ from sempy._utils._log import log
6
7
 
7
8
 
9
+ @log
8
10
  def list_widely_shared_artifacts(
9
11
  api_name: str = "LinksSharedToWholeOrganization",
10
12
  ) -> pd.DataFrame:
@@ -55,6 +57,7 @@ def list_widely_shared_artifacts(
55
57
  uses_pagination=True,
56
58
  )
57
59
 
60
+ dfs = []
58
61
  for r in responses:
59
62
  for v in r.get("ArtifactAccessEntities", []):
60
63
  sharer = v.get("sharer", {})
@@ -71,6 +74,9 @@ def list_widely_shared_artifacts(
71
74
  "Sharer Principal Type": sharer.get("principalType"),
72
75
  }
73
76
 
74
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
77
+ dfs.append(pd.DataFrame(new_data, index=[0]))
78
+
79
+ if dfs:
80
+ df = pd.concat(dfs, ignore_index=True)
75
81
 
76
82
  return df
sempy_labs/admin/_tags.py CHANGED
@@ -6,8 +6,10 @@ from uuid import UUID
6
6
  from sempy_labs._tags import list_tags
7
7
  import sempy_labs._icons as icons
8
8
  from typing import List
9
+ from sempy._utils._log import log
9
10
 
10
11
 
12
+ @log
11
13
  def resolve_tag_id(tag: str | UUID):
12
14
 
13
15
  if _is_valid_uuid(tag):
@@ -22,6 +24,7 @@ def resolve_tag_id(tag: str | UUID):
22
24
  return tag_id
23
25
 
24
26
 
27
+ @log
25
28
  def create_tags(tags: str | List[str]):
26
29
  """
27
30
  Creates a new tag or tags.
@@ -77,6 +80,7 @@ def create_tags(tags: str | List[str]):
77
80
  print(f"{icons.green_dot} The '{available_tags}' tag(s) have been created.")
78
81
 
79
82
 
83
+ @log
80
84
  def delete_tag(tag: str | UUID):
81
85
  """
82
86
  Deletes a tag.
@@ -98,6 +102,7 @@ def delete_tag(tag: str | UUID):
98
102
  print(f"{icons.green_dot} The '{tag}' tag has been deleted.")
99
103
 
100
104
 
105
+ @log
101
106
  def update_tag(name: str, tag: str | UUID):
102
107
  """
103
108
  Updates the name of a tag.
@@ -38,6 +38,7 @@ def list_tenant_settings() -> pd.DataFrame:
38
38
 
39
39
  response = _base_api(request="/v1/admin/tenantsettings", client="fabric_sp")
40
40
 
41
+ dfs = []
41
42
  for i in response.json().get("value", []):
42
43
  new_data = {
43
44
  "Setting Name": i.get("settingName"),
@@ -47,9 +48,11 @@ def list_tenant_settings() -> pd.DataFrame:
47
48
  "Tenant Setting Group": i.get("tenantSettingGroup"),
48
49
  "Enabled Security Groups": [i.get("enabledSecurityGroups", [])],
49
50
  }
50
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
51
+ dfs.append(pd.DataFrame(new_data, index=[0]))
51
52
 
52
- _update_dataframe_datatypes(dataframe=df, column_map=columns)
53
+ if dfs:
54
+ df = pd.concat(dfs, ignore_index=True)
55
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
53
56
 
54
57
  return df
55
58
 
@@ -5,8 +5,10 @@ from sempy_labs._helper_functions import (
5
5
  )
6
6
  from uuid import UUID
7
7
  import pandas as pd
8
+ from sempy._utils._log import log
8
9
 
9
10
 
11
+ @log
10
12
  def list_access_entities(
11
13
  user_email_address: str,
12
14
  ) -> pd.DataFrame:
@@ -43,6 +45,7 @@ def list_access_entities(
43
45
  uses_pagination=True,
44
46
  )
45
47
 
48
+ dfs = []
46
49
  for r in responses:
47
50
  for v in r.get("accessEntities", []):
48
51
  new_data = {
@@ -54,11 +57,15 @@ def list_access_entities(
54
57
  "additionalPermissions"
55
58
  ),
56
59
  }
57
- df = pd.concat([df, pd.DataFrame([new_data])], ignore_index=True)
60
+ dfs.append(pd.DataFrame(new_data, index=[0]))
61
+
62
+ if dfs:
63
+ df = pd.concat(dfs, ignore_index=True)
58
64
 
59
65
  return df
60
66
 
61
67
 
68
+ @log
62
69
  def list_user_subscriptions(user: str | UUID) -> pd.DataFrame:
63
70
  """
64
71
  Shows a list of subscriptions for the specified user. This is a preview API call.
@@ -127,7 +134,6 @@ def list_user_subscriptions(user: str | UUID) -> pd.DataFrame:
127
134
 
128
135
  if rows:
129
136
  df = pd.DataFrame(rows, columns=list(columns.keys()))
130
-
131
- _update_dataframe_datatypes(dataframe=df, column_map=columns)
137
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
132
138
 
133
139
  return df
@@ -2,15 +2,21 @@ from sempy_labs._helper_functions import (
2
2
  _base_api,
3
3
  _build_url,
4
4
  _encode_user,
5
+ _update_dataframe_datatypes,
6
+ _create_dataframe,
5
7
  )
8
+
6
9
  from uuid import UUID
7
10
  from typing import Optional
8
11
  from sempy_labs.admin._basic_functions import (
9
12
  _resolve_workspace_name_and_id,
10
13
  )
11
14
  import sempy_labs._icons as icons
15
+ import pandas as pd
16
+ from sempy._utils._log import log
12
17
 
13
18
 
19
+ @log
14
20
  def add_user_to_workspace(
15
21
  user: str | UUID,
16
22
  role: str = "Member",
@@ -68,6 +74,7 @@ def add_user_to_workspace(
68
74
  )
69
75
 
70
76
 
77
+ @log
71
78
  def delete_user_from_workspace(
72
79
  user: str | UUID,
73
80
  workspace: Optional[str | UUID] = None,
@@ -116,6 +123,7 @@ def delete_user_from_workspace(
116
123
  )
117
124
 
118
125
 
126
+ @log
119
127
  def restore_deleted_workspace(workspace_id: UUID, name: str, email_address: str):
120
128
  """
121
129
  Restores a deleted workspace.
@@ -146,3 +154,83 @@ def restore_deleted_workspace(workspace_id: UUID, name: str, email_address: str)
146
154
  print(
147
155
  f"{icons.green_dot} The '{workspace_id}' workspace has been restored as '{name}'."
148
156
  )
157
+
158
+
159
+ @log
160
+ def list_orphaned_workspaces(top: int = 100) -> pd.DataFrame:
161
+ """
162
+ Shows a list of orphaned workspaces (those with no users or no admins).
163
+
164
+ This is a wrapper function for the following API:
165
+ `Admin - Groups ListGroupsAsAdmin <https://learn.microsoft.com/rest/api/power-bi/admin/groups-get-groups-as-admin>`_.
166
+
167
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
168
+
169
+ Parameters
170
+ ----------
171
+ top : int, default=100
172
+ The maximum number of results to return.
173
+
174
+ Returns
175
+ -------
176
+ pandas.DataFrame
177
+ A pandas dataframe showing a list of orphaned workspaces.
178
+ """
179
+
180
+ # column structure with proper data types
181
+ columns = {
182
+ "Workspace Name": "string",
183
+ "Workspace Id": "string",
184
+ "Type": "string",
185
+ "State": "string",
186
+ "Is Read Only": "bool",
187
+ "Is On Dedicated Capacity": "bool",
188
+ "Capacity Migration Status": "string",
189
+ "Has Workspace Level Settings": "bool",
190
+ "Users": "list",
191
+ }
192
+
193
+ df = _create_dataframe(columns=columns)
194
+
195
+ url = (
196
+ "/v1.0/myorg/admin/groups?"
197
+ "$expand=users&"
198
+ "$filter=(not users/any()) or "
199
+ "(not users/any(u: u/groupUserAccessRight eq Microsoft.PowerBI.ServiceContracts.Api.GroupUserAccessRight'Admin'))&"
200
+ f"$top={top}"
201
+ )
202
+
203
+ response = _base_api(request=url, client="fabric_sp")
204
+ values = response.json().get("value", [])
205
+ df_raw = pd.json_normalize(values)
206
+
207
+ # friendly names and reorder
208
+ if not df_raw.empty:
209
+ df_raw = df_raw.rename(
210
+ columns={
211
+ "name": "Workspace Name",
212
+ "id": "Workspace Id",
213
+ "type": "Type",
214
+ "state": "State",
215
+ "isReadOnly": "Is Read Only",
216
+ "isOnDedicatedCapacity": "Is On Dedicated Capacity",
217
+ "capacityMigrationStatus": "Capacity Migration Status",
218
+ "hasWorkspaceLevelSettings ": "Has Workspace Level Settings", # Note the space in original
219
+ "users": "Users",
220
+ }
221
+ )
222
+
223
+ df = df_raw[list(columns.keys())].copy()
224
+
225
+ # Convert empty lists to a more readable format for Users column
226
+ if "Users" in df.columns:
227
+ df["Users"] = df["Users"].apply(
228
+ lambda x: x if (x is not None and len(x) > 0) else []
229
+ )
230
+ else:
231
+ df = _create_dataframe(columns=columns)
232
+
233
+ # proper data types
234
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
235
+
236
+ return df
@@ -13,6 +13,7 @@ from sempy_labs._helper_functions import (
13
13
  )
14
14
 
15
15
 
16
+ @log
16
17
  def check_fallback_reason(
17
18
  dataset: str | UUID, workspace: Optional[str | UUID] = None
18
19
  ) -> pd.DataFrame:
@@ -195,6 +196,7 @@ def generate_direct_lake_semantic_model(
195
196
  refresh_semantic_model(dataset=dataset, workspace=workspace_id)
196
197
 
197
198
 
199
+ @log
198
200
  def get_direct_lake_source(
199
201
  dataset: str | UUID, workspace: Optional[str | UUID] = None
200
202
  ) -> Tuple[str, str, UUID, UUID]:
@@ -8,8 +8,10 @@ from sempy_labs._helper_functions import (
8
8
  from typing import Optional
9
9
  import sempy_labs._icons as icons
10
10
  from uuid import UUID
11
+ from sempy._utils._log import log
11
12
 
12
13
 
14
+ @log
13
15
  def generate_shared_expression(
14
16
  item_name: Optional[str] = None,
15
17
  item_type: str = "Lakehouse",
@@ -1,16 +1,14 @@
1
1
  import sempy.fabric as fabric
2
2
  from sempy_labs._helper_functions import (
3
3
  resolve_lakehouse_id,
4
- resolve_lakehouse_name,
5
- get_direct_lake_sql_endpoint,
6
- resolve_workspace_name_and_id,
7
- resolve_dataset_name_and_id,
8
4
  )
9
5
  from typing import Optional, Tuple
10
6
  from uuid import UUID
11
7
  import sempy_labs._icons as icons
8
+ from sempy._utils._log import log
12
9
 
13
10
 
11
+ @log
14
12
  def get_direct_lake_lakehouse(
15
13
  dataset: str | UUID,
16
14
  workspace: Optional[str | UUID] = None,
@@ -1,7 +1,9 @@
1
1
  from typing import Optional
2
2
  from uuid import UUID
3
+ from sempy._utils._log import log
3
4
 
4
5
 
6
+ @log
5
7
  def get_shared_expression(
6
8
  lakehouse: Optional[str] = None, workspace: Optional[str | UUID] = None
7
9
  ) -> str:
@@ -6,8 +6,10 @@ from uuid import UUID
6
6
  from sempy_labs._helper_functions import (
7
7
  resolve_workspace_name_and_id,
8
8
  )
9
+ from sempy._utils._log import log
9
10
 
10
11
 
12
+ @log
11
13
  def get_direct_lake_guardrails() -> pd.DataFrame:
12
14
  """
13
15
  Shows the guardrails for when Direct Lake semantic models will fallback to Direct Query
@@ -13,6 +13,7 @@ from uuid import UUID
13
13
  import re
14
14
 
15
15
 
16
+ @log
16
17
  def _extract_expression_list(expression):
17
18
  """
18
19
  Finds the pattern for DL/SQL & DL/OL expressions in the semantic model.
@@ -37,6 +38,7 @@ def _extract_expression_list(expression):
37
38
  return result
38
39
 
39
40
 
41
+ @log
40
42
  def _get_direct_lake_expressions(
41
43
  dataset: str | UUID, workspace: Optional[str | UUID] = None
42
44
  ) -> dict:
@@ -182,6 +182,7 @@ def warm_direct_lake_cache_isresident(
182
182
  )
183
183
 
184
184
 
185
+ @log
185
186
  def _put_columns_into_memory(dataset, workspace, col_df, return_dataframe: bool = True):
186
187
 
187
188
  row_limit = 1000000