semantic-link-labs 0.12.3__py3-none-any.whl → 0.12.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (47) hide show
  1. {semantic_link_labs-0.12.3.dist-info → semantic_link_labs-0.12.5.dist-info}/METADATA +5 -3
  2. {semantic_link_labs-0.12.3.dist-info → semantic_link_labs-0.12.5.dist-info}/RECORD +45 -37
  3. sempy_labs/__init__.py +20 -16
  4. sempy_labs/_a_lib_info.py +1 -1
  5. sempy_labs/_authentication.py +1 -1
  6. sempy_labs/_capacities.py +1 -1
  7. sempy_labs/_dataflows.py +98 -10
  8. sempy_labs/_git.py +1 -1
  9. sempy_labs/_helper_functions.py +32 -5
  10. sempy_labs/_list_functions.py +55 -5
  11. sempy_labs/_managed_private_endpoints.py +63 -1
  12. sempy_labs/_model_bpa.py +6 -0
  13. sempy_labs/_notebooks.py +4 -2
  14. sempy_labs/_onelake.py +131 -0
  15. sempy_labs/_sql_audit_settings.py +208 -0
  16. sempy_labs/_sql_endpoints.py +18 -3
  17. sempy_labs/_utils.py +2 -0
  18. sempy_labs/admin/__init__.py +6 -0
  19. sempy_labs/admin/_basic_functions.py +17 -13
  20. sempy_labs/admin/_items.py +3 -3
  21. sempy_labs/admin/_labels.py +211 -0
  22. sempy_labs/admin/_workspaces.py +2 -2
  23. sempy_labs/deployment_pipeline/__init__.py +21 -0
  24. sempy_labs/deployment_pipeline/_items.py +486 -0
  25. sempy_labs/directlake/_update_directlake_partition_entity.py +73 -41
  26. sempy_labs/directlake/_warm_cache.py +3 -1
  27. sempy_labs/eventstream/__init__.py +37 -0
  28. sempy_labs/eventstream/_items.py +263 -0
  29. sempy_labs/eventstream/_topology.py +652 -0
  30. sempy_labs/graph/__init__.py +10 -0
  31. sempy_labs/graph/_groups.py +123 -53
  32. sempy_labs/graph/_sensitivity_labels.py +39 -0
  33. sempy_labs/graph/_teams.py +19 -18
  34. sempy_labs/graph/_user_licenses.py +96 -0
  35. sempy_labs/graph/_users.py +69 -18
  36. sempy_labs/lakehouse/_get_lakehouse_tables.py +33 -1
  37. sempy_labs/lakehouse/_lakehouse.py +6 -2
  38. sempy_labs/lakehouse/_partitioning.py +165 -0
  39. sempy_labs/report/_export_report.py +0 -22
  40. sempy_labs/report/_report_rebind.py +29 -43
  41. sempy_labs/report/_reportwrapper.py +80 -35
  42. sempy_labs/tom/_model.py +81 -4
  43. sempy_labs/_deployment_pipelines.py +0 -209
  44. sempy_labs/_eventstreams.py +0 -123
  45. {semantic_link_labs-0.12.3.dist-info → semantic_link_labs-0.12.5.dist-info}/WHEEL +0 -0
  46. {semantic_link_labs-0.12.3.dist-info → semantic_link_labs-0.12.5.dist-info}/licenses/LICENSE +0 -0
  47. {semantic_link_labs-0.12.3.dist-info → semantic_link_labs-0.12.5.dist-info}/top_level.txt +0 -0
@@ -756,6 +756,53 @@ def update_item(
756
756
  )
757
757
 
758
758
 
759
+ @log
760
+ def list_user_defined_functions(
761
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
762
+ ) -> pd.DataFrame:
763
+ """
764
+ Shows a list of the user-defined functions within a semantic model.
765
+
766
+ Parameters
767
+ ----------
768
+ dataset: str | uuid.UUID
769
+ Name or UUID of the semantic model.
770
+ workspace : str | uuid.UUID, default=None
771
+ The Fabric workspace name or ID.
772
+ Defaults to None which resolves to the workspace of the attached lakehouse
773
+ or if no lakehouse attached, resolves to the workspace of the notebook.
774
+
775
+ Returns
776
+ -------
777
+ pandas.DataFrame
778
+ A pandas dataframe showing a list of the user-defined functions within a semantic model.
779
+ """
780
+
781
+ from sempy_labs.tom import connect_semantic_model
782
+
783
+ columns = {
784
+ "Function Name": "string",
785
+ "Expression": "string",
786
+ "Lineage Tag": "string",
787
+ }
788
+ df = _create_dataframe(columns=columns)
789
+ rows = []
790
+ with connect_semantic_model(dataset=dataset, workspace=workspace) as tom:
791
+ for f in tom.model.Functions:
792
+ rows.append(
793
+ {
794
+ "Function Name": f.Name,
795
+ "Expression": f.Expression,
796
+ "Lineage Tag": f.LineageTag,
797
+ }
798
+ )
799
+
800
+ if rows:
801
+ df = pd.DataFrame(rows)
802
+
803
+ return df
804
+
805
+
759
806
  @log
760
807
  def list_relationships(
761
808
  dataset: str | UUID, workspace: Optional[str | UUID] = None, extended: bool = False
@@ -1484,15 +1531,14 @@ def list_semantic_model_errors(
1484
1531
 
1485
1532
  from sempy_labs.tom import connect_semantic_model
1486
1533
 
1487
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
1488
- (dataset_name, dataset_id) = resolve_dataset_name_and_id(
1489
- dataset, workspace=workspace_id
1534
+ df = pd.DataFrame(
1535
+ columns=["Object Type", "Table Name", "Object Name", "Error Message"]
1490
1536
  )
1491
1537
 
1492
1538
  error_rows = []
1493
1539
 
1494
1540
  with connect_semantic_model(
1495
- dataset=dataset_id, workspace=workspace_id, readonly=True
1541
+ dataset=dataset, workspace=workspace, readonly=True
1496
1542
  ) as tom:
1497
1543
  # Define mappings of TOM objects to object types and attributes
1498
1544
  error_checks = [
@@ -1546,6 +1592,7 @@ def list_semantic_model_errors(
1546
1592
  else ""
1547
1593
  ),
1548
1594
  ),
1595
+ ("Function", tom.all_functions, lambda o: o.ErrorMessage),
1549
1596
  ]
1550
1597
 
1551
1598
  # Iterate over all error checks
@@ -1562,7 +1609,10 @@ def list_semantic_model_errors(
1562
1609
  }
1563
1610
  )
1564
1611
 
1565
- return pd.DataFrame(error_rows)
1612
+ if error_rows:
1613
+ df = pd.DataFrame(error_rows)
1614
+
1615
+ return df
1566
1616
 
1567
1617
 
1568
1618
  @log
@@ -2,6 +2,7 @@ import pandas as pd
2
2
  import sempy_labs._icons as icons
3
3
  from typing import Optional
4
4
  from sempy_labs._helper_functions import (
5
+ resolve_item_id,
5
6
  resolve_workspace_name_and_id,
6
7
  _is_valid_uuid,
7
8
  _base_api,
@@ -24,7 +25,7 @@ def create_managed_private_endpoint(
24
25
  """
25
26
  Creates a managed private endpoint.
26
27
 
27
- This is a wrapper function for the following API: `Managed Private Endpoints - Create Workspace Managed Private Endpoint <https://learn.microsoft.com/rest/api/fabric/core/managed-private-endpoints/create-workspace-managed-private-endpoint>`.
28
+ This is a wrapper function for the following API: `Managed Private Endpoints - Create Workspace Managed Private Endpoint <https://learn.microsoft.com/rest/api/fabric/core/managed-private-endpoints/create-workspace-managed-private-endpoint>`_.
28
29
 
29
30
  Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
30
31
 
@@ -190,3 +191,64 @@ def delete_managed_private_endpoint(
190
191
  workspace_name=workspace_name,
191
192
  action="deleted",
192
193
  )
194
+
195
+
196
+ @log
197
+ def list_managed_private_endpoint_fqdns(
198
+ managed_private_endpoint: str | UUID, workspace: Optional[str | UUID] = None
199
+ ) -> pd.DataFrame:
200
+ """
201
+ Shows a list of fully qualified domain names (FQDNs) associated with the specified managed private endpoint.
202
+
203
+ This is a wrapper function for the following API: `Managed Private Endpoints - List FQDNs <https://learn.microsoft.com/rest/api/fabric/core/managed-private-endpoints/list-fqd-ns>`.
204
+
205
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
206
+
207
+ Parameters
208
+ ----------
209
+ managed_private_endpoint : str | uuid.UUID
210
+ The managed private endpoint name or ID.
211
+ workspace : str | uuid.UUID, default=None
212
+ The Fabric workspace name or ID.
213
+ Defaults to None which resolves to the workspace of the attached lakehouse
214
+ or if no lakehouse attached, resolves to the workspace of the notebook.
215
+
216
+ Returns
217
+ -------
218
+ pandas.DataFrame
219
+ A pandas dataframe showing a list of fully qualified domain names (FQDNs) associated with the specified managed private endpoint.
220
+ """
221
+
222
+ workspace_id = resolve_workspace_id(workspace)
223
+ if _is_valid_uuid(managed_private_endpoint):
224
+ item_id = managed_private_endpoint
225
+ else:
226
+ df = list_managed_private_endpoints(workspace=workspace_id)
227
+ df_filt = df[df["Managed Private Endpoint Name"] == managed_private_endpoint]
228
+ if df_filt.empty:
229
+ raise ValueError(
230
+ f"{icons.red_dot} The '{managed_private_endpoint}' managed private endpoint does not exist within the workspace."
231
+ )
232
+ item_id = df_filt["Managed Private Endpoint Id"].iloc[0]
233
+
234
+ columns = {"FQDN": "str"}
235
+ df = _create_dataframe(columns=columns)
236
+ responses = _base_api(
237
+ request=f"/v1/workspaces/{workspace_id}/managedPrivateEndpoints/{item_id}/targetFQDNs",
238
+ uses_pagination=True,
239
+ client="fabric_sp",
240
+ )
241
+
242
+ rows = []
243
+ for r in responses:
244
+ for v in r.get("value", []):
245
+ rows.append(
246
+ {
247
+ "FQDN": v,
248
+ }
249
+ )
250
+
251
+ if rows:
252
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
253
+
254
+ return df
sempy_labs/_model_bpa.py CHANGED
@@ -300,6 +300,10 @@ def run_model_bpa(
300
300
  tom.all_partitions(),
301
301
  lambda obj: format_dax_object_name(obj.Parent.Name, obj.Name),
302
302
  ),
303
+ "Function": (
304
+ tom.all_functions(),
305
+ lambda obj: obj.Name,
306
+ ),
303
307
  }
304
308
 
305
309
  for i, r in rules.iterrows():
@@ -320,6 +324,8 @@ def run_model_bpa(
320
324
  x = ["Model"]
321
325
  elif scope == "Measure":
322
326
  x = [nm(obj) for obj in tom.all_measures() if expr(obj, tom)]
327
+ elif scope == "Function":
328
+ x = [nm(obj) for obj in tom.all_functions() if expr(obj, tom)]
323
329
  elif scope == "Column":
324
330
  x = [nm(obj) for obj in tom.all_columns() if expr(obj, tom)]
325
331
  elif scope == "Partition":
sempy_labs/_notebooks.py CHANGED
@@ -163,7 +163,7 @@ def import_notebook_from_web(
163
163
  if len(dfI_filt) == 0:
164
164
  create_notebook(
165
165
  name=notebook_name,
166
- notebook_content=response.content,
166
+ notebook_content=response.content.decode("utf-8"),
167
167
  workspace=workspace_id,
168
168
  description=description,
169
169
  format="ipynb",
@@ -216,7 +216,9 @@ def create_notebook(
216
216
  Defaults to None which places the notebook in the root of the workspace.
217
217
  """
218
218
 
219
- notebook_payload = base64.b64encode(notebook_content).decode("utf-8")
219
+ notebook_payload = base64.b64encode(notebook_content.encode("utf-8")).decode(
220
+ "utf-8"
221
+ )
220
222
 
221
223
  definition_payload = {
222
224
  "parts": [
sempy_labs/_onelake.py ADDED
@@ -0,0 +1,131 @@
1
+ from sempy_labs._helper_functions import (
2
+ _base_api,
3
+ resolve_workspace_id,
4
+ resolve_lakehouse_name_and_id,
5
+ resolve_workspace_name_and_id,
6
+ )
7
+ from sempy._utils._log import log
8
+ from uuid import UUID
9
+ from typing import Optional
10
+ import pandas as pd
11
+ import sempy_labs._icons as icons
12
+
13
+
14
+ @log
15
+ def get_onelake_settings(workspace: Optional[str | UUID] = None):
16
+ """
17
+ Obtains the workspace OneLake settings.
18
+
19
+ This is a wrapper function for the following API: `OneLake Settings - Get Settings <https://learn.microsoft.com/rest/api/fabric/core/onelake-settings/get-settings>`_.
20
+
21
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
22
+
23
+ Parameters
24
+ ----------
25
+ workspace : str | uuid.UUID, default=None
26
+ The name or ID of the workspace.
27
+ Defaults to None which resolves to the workspace of the attached lakehouse
28
+ or if no lakehouse attached, resolves to the workspace of the notebook.
29
+
30
+ Returns
31
+ -------
32
+ pandas.DataFrame
33
+ The workspace OneLake settings.
34
+ """
35
+
36
+ workspace_id = resolve_workspace_id(workspace)
37
+ result = _base_api(
38
+ request=f"/v1/workspaces/{workspace_id}/onelake/settings", client="fabric_sp"
39
+ ).json()
40
+
41
+ d = result.get("diagnostics", {})
42
+ enabled = True if d.get("status", {}) == "Enabled" else False
43
+ rows = []
44
+ rows.append(
45
+ {
46
+ "Enabled": enabled,
47
+ "Destination Type": (
48
+ d.get("destination", {}).get("type", {}) if enabled else None
49
+ ),
50
+ "Destination Id": (
51
+ d.get("destination", {}).get("lakehouse", {}).get("itemId", {})
52
+ if enabled
53
+ else None
54
+ ),
55
+ "Destination Workspace Id": (
56
+ d.get("destination", {}).get("lakehouse", {}).get("workspaceId", {})
57
+ if enabled
58
+ else None
59
+ ),
60
+ }
61
+ )
62
+
63
+ return pd.DataFrame(rows)
64
+
65
+
66
+ def modify_onelake_diagnostics(
67
+ workspace: Optional[str | UUID] = None,
68
+ enabled: bool = True,
69
+ destination_lakehouse: Optional[str | UUID] = None,
70
+ destination_workspace: Optional[str | UUID] = None,
71
+ ):
72
+ """
73
+ Obtains the workspace OneLake settings.
74
+
75
+ This is a wrapper function for the following API: `OneLake Settings - Modify Diagnostics <https://learn.microsoft.com/rest/api/fabric/core/onelake-settings/modify-diagnostics>`_.
76
+
77
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
78
+
79
+ Parameters
80
+ ----------
81
+ workspace : str | uuid.UUID, default=None
82
+ The name or ID of the workspace.
83
+ Defaults to None which resolves to the workspace of the attached lakehouse
84
+ or if no lakehouse attached, resolves to the workspace of the notebook.
85
+ enabled : bool, default=True
86
+ Whether to enable or disable OneLake diagnostics.
87
+ destination_lakehouse : str | uuid.UUID, default=None
88
+ The name or ID of the destination lakehouse.
89
+ Defaults to None which resolves to the lakehouse of the attached lakehouse
90
+ or if no lakehouse attached, resolves to the lakehouse of the notebook.
91
+ destination_workspace : str | uuid.UUID, default=None
92
+ The name or ID of the destination workspace.
93
+ Defaults to None which resolves to the workspace of the attached lakehouse
94
+ or if no lakehouse attached, resolves to the workspace of the notebook.
95
+ """
96
+
97
+ workspace_id = resolve_workspace_id(workspace)
98
+ (destination_workspace_name, destination_workspace_id) = (
99
+ resolve_workspace_name_and_id(destination_workspace)
100
+ )
101
+ (destination_lakehouse_name, destination_lakehouse_id) = (
102
+ resolve_lakehouse_name_and_id(destination_lakehouse, destination_workspace_id)
103
+ )
104
+
105
+ if enabled:
106
+ payload = {
107
+ "status": "Enabled",
108
+ "destination": {
109
+ "type": "Lakehouse",
110
+ "lakehouse": {
111
+ "referenceType": "ById",
112
+ "itemId": destination_lakehouse_id,
113
+ "workspaceId": destination_workspace_id,
114
+ },
115
+ },
116
+ }
117
+ else:
118
+ payload = {"status": "Disabled"}
119
+ _base_api(
120
+ request=f"/v1/workspaces/{workspace_id}/onelake/settings/modifyDiagnostics",
121
+ client="fabric_sp",
122
+ method="post",
123
+ payload=payload,
124
+ )
125
+
126
+ if enabled:
127
+ print(
128
+ f"{icons.green_dot} OneLake diagnostics have been enabled and updated to use the '{destination_lakehouse_name}' lakehouse in the '{destination_workspace_name}' workspace as the destination."
129
+ )
130
+ else:
131
+ print(f"{icons.green_dot} OneLake diagnostics have been disabled.")
@@ -0,0 +1,208 @@
1
+ from sempy_labs._helper_functions import (
2
+ resolve_item_id,
3
+ resolve_workspace_name_and_id,
4
+ _base_api,
5
+ _create_dataframe,
6
+ _update_dataframe_datatypes,
7
+ resolve_workspace_id,
8
+ resolve_item_name_and_id,
9
+ )
10
+ import pandas as pd
11
+ from typing import Optional, List, Literal
12
+ import sempy_labs._icons as icons
13
+ from uuid import UUID
14
+ from sempy._utils._log import log
15
+
16
+
17
+ def _get_base_url(item, type, workspace):
18
+
19
+ workspace_id = resolve_workspace_id(workspace)
20
+ item_id = resolve_item_id(item=item, type=type, workspace=workspace)
21
+
22
+ type_dict = {
23
+ "Warehouse": "warehouses",
24
+ "SQLEndpoint": "sqlEndpoints",
25
+ }
26
+ type_for_url = type_dict.get(type)
27
+
28
+ if type in ["SQLEndpoint", "Warehouse"]:
29
+ url = f"/v1/workspaces/{workspace_id}/{type_for_url}/{item_id}"
30
+ else:
31
+ raise ValueError(
32
+ f"{icons.red_dot} The type must be 'Warehouse' or 'SQLEndpoint'."
33
+ )
34
+
35
+ return url
36
+
37
+
38
+ @log
39
+ def get_sql_audit_settings(
40
+ item: str | UUID,
41
+ type: Literal["Warehouse", "SQLEndpoint"],
42
+ workspace: Optional[str | UUID] = None,
43
+ ) -> pd.DataFrame:
44
+ """
45
+ Shows the SQL audit settings of a Fabric item.
46
+
47
+ This is a wrapper function for the following API: `SQL Audit Settings - Get SQL Audit Settings <https://learn.microsoft.com/rest/api/fabric/warehouse/sql-audit-settings/get-sql-audit-settings>`_.
48
+
49
+ Service Principal Authentication is supported (see `here <https://learn.microsoft.com/rest/api/fabric/warehouse/sql-audit-settings/get-sql-audit-settings#service-principal-authentication>`_).
50
+
51
+ Parameters
52
+ ----------
53
+ item : str | uuid.UUID
54
+ The name or ID of the item (Warehouse or SQLEndpoint).
55
+ type : Literal['Warehouse', 'SQLEndpoint']
56
+ The type of the item. Must be 'Warehouse' or 'SQLEndpoint'.
57
+ workspace : str | uuid.UUID, default=None
58
+ The Fabric workspace name or ID.
59
+ Defaults to None which resolves to the workspace of the attached lakehouse
60
+ or if no lakehouse attached, resolves to the workspace of the notebook.
61
+
62
+ Returns
63
+ -------
64
+ pandas.DataFrame
65
+ A pandas dataframe containing the SQL audit settings of the specified warehouse.
66
+ """
67
+
68
+ columns = {
69
+ "State": "string",
70
+ "Retention Days": "int",
71
+ "Audit Actions And Group": "list",
72
+ }
73
+
74
+ df = _create_dataframe(columns=columns)
75
+
76
+ url = _get_base_url(item=item, type=type, workspace=workspace)
77
+ response = _base_api(
78
+ request=f"{url}/settings/sqlAudit",
79
+ client="fabric_sp",
80
+ ).json()
81
+
82
+ rows = []
83
+ rows.append(
84
+ {
85
+ "State": response.get("state"),
86
+ "Retention Days": response.get("retentionDays"),
87
+ "Audit Actions And Group": response.get("auditActionsAndGroups"),
88
+ }
89
+ )
90
+
91
+ if rows:
92
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
93
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
94
+
95
+ return df
96
+
97
+
98
+ @log
99
+ def update_sql_audit_settings(
100
+ item: str | UUID,
101
+ type: Literal["Warehouse", "SQLEndpoint"],
102
+ workspace: Optional[str | UUID] = None,
103
+ retention_days: Optional[int] = None,
104
+ state: Optional[str] = None,
105
+ ):
106
+ """
107
+ Update settings associated with the item.
108
+
109
+ This is a wrapper function for the following API: SQL Audit Settings - Update SQL Audit Settings <https://learn.microsoft.com/rest/api/fabric/warehouse/sql-audit-settings/update-sql-audit-settings>`_.
110
+
111
+ Service Principal Authentication is supported (see `here <https://learn.microsoft.com/rest/api/fabric/warehouse/sql-audit-settings/get-sql-audit-settings#service-principal-authentication>`_).
112
+
113
+ Parameters
114
+ ----------
115
+ item : str | uuid.UUID
116
+ The name or ID of the item (Warehouse or SQLEndpoint).
117
+ type : Literal['Warehouse', 'SQLEndpoint']
118
+ The type of the item. Must be 'Warehouse' or 'SQLEndpoint'.
119
+ workspace : str | uuid.UUID, default=None
120
+ The Fabric workspace name or ID.
121
+ Defaults to None which resolves to the workspace of the attached lakehouse
122
+ or if no lakehouse attached, resolves to the workspace of the notebook.
123
+ """
124
+
125
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
126
+
127
+ payload = {}
128
+ if retention_days is not None:
129
+ if not isinstance(retention_days, int) or retention_days < 0:
130
+ raise ValueError(
131
+ f"{icons.red_dot} retention_days must be a non-negative integer."
132
+ )
133
+ payload["retentionDays"] = retention_days
134
+ if state is not None:
135
+ state = state.capitalize()
136
+ if state not in ["Enabled", "Disabled"]:
137
+ raise ValueError(
138
+ f"{icons.red_dot} state must be either 'Enabled' or 'Disabled'."
139
+ )
140
+ payload["state"] = state
141
+
142
+ if not payload:
143
+ print(
144
+ f"{icons.info} No updates were made as neither retention_days nor state were provided."
145
+ )
146
+ return
147
+
148
+ url = _get_base_url(item=item, type=type, workspace=workspace)
149
+ _base_api(
150
+ request=f"{url}/settings/sqlAudit",
151
+ client="fabric_sp",
152
+ method="patch",
153
+ payload=payload,
154
+ )
155
+
156
+ print(
157
+ f"{icons.green_dot} The SQL audit settings for the '{item}' {type.lower()} within the '{workspace_name}' workspace have been updated accordingly."
158
+ )
159
+
160
+
161
+ @log
162
+ def set_audit_actions_and_group(
163
+ item: str | UUID,
164
+ type: Literal["Warehouse", "SQLEndpoint"],
165
+ sql_audit_groups: List[str],
166
+ workspace: Optional[str | UUID] = None,
167
+ ):
168
+ """
169
+ Update the audit actions and groups for this item.
170
+
171
+ This is a wrapper function for the following API: SQL Audit Settings - Set Audit Actions And Groups <https://learn.microsoft.com/rest/api/fabric/warehouse/sql-audit-settings/set-audit-actions-and-groups>`_.
172
+
173
+ Service Principal Authentication is supported (see `here <https://learn.microsoft.com/rest/api/fabric/warehouse/sql-audit-settings/get-sql-audit-settings#service-principal-authentication>`_).
174
+
175
+ Parameters
176
+ ----------
177
+ item : str | uuid.UUID
178
+ The name or ID of the item (Warehouse or SQLEndpoint).
179
+ type : Literal['Warehouse', 'SQLEndpoint']
180
+ The type of the item. Must be 'Warehouse' or 'SQLEndpoint'.
181
+ workspace : str | uuid.UUID, default=None
182
+ The Fabric workspace name or ID.
183
+ Defaults to None which resolves to the workspace of the attached lakehouse
184
+ or if no lakehouse attached, resolves to the workspace of the notebook.
185
+ """
186
+
187
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
188
+
189
+ if (
190
+ not sql_audit_groups
191
+ or not isinstance(sql_audit_groups, list)
192
+ or not all(isinstance(item, str) for item in sql_audit_groups)
193
+ ):
194
+ raise ValueError(
195
+ f"{icons.red_dot} sql_audit_groups must be a non-empty list of strings."
196
+ )
197
+
198
+ url = _get_base_url(item=item, type=type, workspace=workspace)
199
+ _base_api(
200
+ request=f"{url}/settings/sqlAudit/setAuditActionsAndGroups",
201
+ client="fabric_sp",
202
+ method="post",
203
+ payload=sql_audit_groups,
204
+ )
205
+
206
+ print(
207
+ f"{icons.green_dot} The SQL audit actions and groups for the '{item}' {type.lower()} within the '{workspace_name}' workspace have been updated accordingly."
208
+ )
@@ -128,10 +128,25 @@ def refresh_sql_endpoint_metadata(
128
128
  else:
129
129
  raise ValueError("Invalid type. Must be 'Lakehouse' or 'MirroredDatabase'.")
130
130
 
131
- payload = None
132
131
  timeout_unit = timeout_unit.capitalize()
133
- if timeout_unit != "Minutes" and timeout_value != 15:
134
- payload = {"timeout": {"timeUnit": timeout_unit, "value": timeout_value}}
132
+ if timeout_unit not in ["Seconds", "Minutes", "Hours", "Days"]:
133
+ raise ValueError(
134
+ "Invalid timeout_unit. Must be 'Seconds', 'Minutes', 'Hours', or 'Days'."
135
+ )
136
+ if timeout_unit == "Hours" and timeout_value > 24:
137
+ raise ValueError("timeout_value cannot exceed 24 when timeout_unit is 'Hours'.")
138
+ if timeout_unit == "Days" and timeout_value > 1:
139
+ raise ValueError("timeout_value cannot exceed 1 when timeout_unit is 'Days'.")
140
+ if timeout_unit == "Minutes" and timeout_value > 1440:
141
+ raise ValueError(
142
+ "timeout_value cannot exceed 1440 when timeout_unit is 'Minutes'."
143
+ )
144
+ if timeout_unit == "Seconds" and timeout_value > 86400:
145
+ raise ValueError(
146
+ "timeout_value cannot exceed 86400 when timeout_unit is 'Seconds'."
147
+ )
148
+
149
+ payload = {"timeout": {"timeUnit": timeout_unit, "value": timeout_value}}
135
150
 
136
151
  result = _base_api(
137
152
  request=f"v1/workspaces/{workspace_id}/sqlEndpoints/{sql_endpoint_id}/refreshMetadata",
sempy_labs/_utils.py CHANGED
@@ -66,4 +66,6 @@ items = {
66
66
  "DigitalTwinBuilder": "digitaltwinbuilders",
67
67
  "DigitalTwinBuilderFlow": "DigitalTwinBuilderFlows",
68
68
  "MirroredAzureDatabricksCatalog": "mirroredAzureDatabricksCatalogs",
69
+ "Map": "Maps",
70
+ "AnomalyDetector": "anomalydetectors",
69
71
  }
@@ -98,6 +98,10 @@ from ._sharing_links import (
98
98
  remove_all_sharing_links,
99
99
  remove_sharing_links,
100
100
  )
101
+ from ._labels import (
102
+ bulk_set_labels,
103
+ bulk_remove_labels,
104
+ )
101
105
 
102
106
  __all__ = [
103
107
  "list_items",
@@ -161,4 +165,6 @@ __all__ = [
161
165
  "rotate_tenant_key",
162
166
  "remove_all_sharing_links",
163
167
  "remove_sharing_links",
168
+ "bulk_set_labels",
169
+ "bulk_remove_labels",
164
170
  ]
@@ -1,10 +1,9 @@
1
1
  from typing import Optional, List, Union, Tuple
2
2
  from uuid import UUID
3
3
  import sempy_labs._icons as icons
4
- from .._helper_functions import (
4
+ from sempy_labs._helper_functions import (
5
5
  _is_valid_uuid,
6
6
  _build_url,
7
- _update_dataframe_datatypes,
8
7
  _base_api,
9
8
  _create_dataframe,
10
9
  )
@@ -337,16 +336,23 @@ def list_workspace_access_details(
337
336
  request=f"/v1/admin/workspaces/{workspace_id}/users", client="fabric_sp"
338
337
  )
339
338
 
339
+ rows = []
340
340
  for v in response.json().get("accessDetails", []):
341
- new_data = {
342
- "User Id": v.get("principal", {}).get("id"),
343
- "User Name": v.get("principal", {}).get("displayName"),
344
- "User Type": v.get("principal", {}).get("type"),
345
- "Workspace Name": workspace_name,
346
- "Workspace Id": workspace_id,
347
- "Workspace Role": v.get("workspaceAccessDetails", {}).get("workspaceRole"),
348
- }
349
- df = pd.concat([df, pd.DataFrame([new_data])], ignore_index=True)
341
+ rows.append(
342
+ {
343
+ "User Id": v.get("principal", {}).get("id"),
344
+ "User Name": v.get("principal", {}).get("displayName"),
345
+ "User Type": v.get("principal", {}).get("type"),
346
+ "Workspace Name": workspace_name,
347
+ "Workspace Id": workspace_id,
348
+ "Workspace Role": v.get("workspaceAccessDetails", {}).get(
349
+ "workspaceRole"
350
+ ),
351
+ }
352
+ )
353
+
354
+ if rows:
355
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
350
356
 
351
357
  return df
352
358
 
@@ -454,6 +460,4 @@ def list_workspace_users(workspace: Optional[str | UUID] = None) -> pd.DataFrame
454
460
  if rows:
455
461
  df = pd.DataFrame(rows, columns=list(columns.keys()))
456
462
 
457
- _update_dataframe_datatypes(dataframe=df, column_map=columns)
458
-
459
463
  return df
@@ -2,13 +2,13 @@ import pandas as pd
2
2
  from typing import Optional, Tuple
3
3
  from uuid import UUID
4
4
  import sempy_labs._icons as icons
5
- from ._basic_functions import (
5
+ from sempy_labs.admin._basic_functions import (
6
6
  _resolve_workspace_name_and_id,
7
7
  )
8
- from ._capacities import (
8
+ from sempy_labs.admin._capacities import (
9
9
  _resolve_capacity_name_and_id,
10
10
  )
11
- from .._helper_functions import (
11
+ from sempy_labs._helper_functions import (
12
12
  _is_valid_uuid,
13
13
  _build_url,
14
14
  _base_api,