semantic-link-labs 0.12.3__py3-none-any.whl → 0.12.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (36) hide show
  1. {semantic_link_labs-0.12.3.dist-info → semantic_link_labs-0.12.4.dist-info}/METADATA +4 -3
  2. {semantic_link_labs-0.12.3.dist-info → semantic_link_labs-0.12.4.dist-info}/RECORD +35 -29
  3. sempy_labs/__init__.py +10 -8
  4. sempy_labs/_a_lib_info.py +1 -1
  5. sempy_labs/_authentication.py +1 -1
  6. sempy_labs/_capacities.py +1 -1
  7. sempy_labs/_git.py +1 -1
  8. sempy_labs/_helper_functions.py +27 -4
  9. sempy_labs/_list_functions.py +55 -5
  10. sempy_labs/_managed_private_endpoints.py +1 -1
  11. sempy_labs/_notebooks.py +4 -2
  12. sempy_labs/_sql_audit_settings.py +208 -0
  13. sempy_labs/_sql_endpoints.py +18 -3
  14. sempy_labs/_utils.py +2 -0
  15. sempy_labs/admin/__init__.py +6 -0
  16. sempy_labs/admin/_items.py +3 -3
  17. sempy_labs/admin/_labels.py +211 -0
  18. sempy_labs/directlake/_warm_cache.py +3 -1
  19. sempy_labs/eventstream/__init__.py +37 -0
  20. sempy_labs/eventstream/_items.py +263 -0
  21. sempy_labs/eventstream/_topology.py +652 -0
  22. sempy_labs/graph/__init__.py +8 -0
  23. sempy_labs/graph/_groups.py +60 -53
  24. sempy_labs/graph/_sensitivity_labels.py +39 -0
  25. sempy_labs/graph/_teams.py +19 -18
  26. sempy_labs/graph/_user_licenses.py +96 -0
  27. sempy_labs/graph/_users.py +23 -16
  28. sempy_labs/lakehouse/_get_lakehouse_tables.py +33 -1
  29. sempy_labs/lakehouse/_lakehouse.py +6 -2
  30. sempy_labs/lakehouse/_partitioning.py +165 -0
  31. sempy_labs/report/_reportwrapper.py +15 -5
  32. sempy_labs/tom/_model.py +81 -4
  33. sempy_labs/_eventstreams.py +0 -123
  34. {semantic_link_labs-0.12.3.dist-info → semantic_link_labs-0.12.4.dist-info}/WHEEL +0 -0
  35. {semantic_link_labs-0.12.3.dist-info → semantic_link_labs-0.12.4.dist-info}/licenses/LICENSE +0 -0
  36. {semantic_link_labs-0.12.3.dist-info → semantic_link_labs-0.12.4.dist-info}/top_level.txt +0 -0
@@ -128,10 +128,25 @@ def refresh_sql_endpoint_metadata(
128
128
  else:
129
129
  raise ValueError("Invalid type. Must be 'Lakehouse' or 'MirroredDatabase'.")
130
130
 
131
- payload = None
132
131
  timeout_unit = timeout_unit.capitalize()
133
- if timeout_unit != "Minutes" and timeout_value != 15:
134
- payload = {"timeout": {"timeUnit": timeout_unit, "value": timeout_value}}
132
+ if timeout_unit not in ["Seconds", "Minutes", "Hours", "Days"]:
133
+ raise ValueError(
134
+ "Invalid timeout_unit. Must be 'Seconds', 'Minutes', 'Hours', or 'Days'."
135
+ )
136
+ if timeout_unit == "Hours" and timeout_value > 24:
137
+ raise ValueError("timeout_value cannot exceed 24 when timeout_unit is 'Hours'.")
138
+ if timeout_unit == "Days" and timeout_value > 1:
139
+ raise ValueError("timeout_value cannot exceed 1 when timeout_unit is 'Days'.")
140
+ if timeout_unit == "Minutes" and timeout_value > 1440:
141
+ raise ValueError(
142
+ "timeout_value cannot exceed 1440 when timeout_unit is 'Minutes'."
143
+ )
144
+ if timeout_unit == "Seconds" and timeout_value > 86400:
145
+ raise ValueError(
146
+ "timeout_value cannot exceed 86400 when timeout_unit is 'Seconds'."
147
+ )
148
+
149
+ payload = {"timeout": {"timeUnit": timeout_unit, "value": timeout_value}}
135
150
 
136
151
  result = _base_api(
137
152
  request=f"v1/workspaces/{workspace_id}/sqlEndpoints/{sql_endpoint_id}/refreshMetadata",
sempy_labs/_utils.py CHANGED
@@ -66,4 +66,6 @@ items = {
66
66
  "DigitalTwinBuilder": "digitaltwinbuilders",
67
67
  "DigitalTwinBuilderFlow": "DigitalTwinBuilderFlows",
68
68
  "MirroredAzureDatabricksCatalog": "mirroredAzureDatabricksCatalogs",
69
+ "Map": "Maps",
70
+ "AnomalyDetector": "anomalydetectors",
69
71
  }
@@ -98,6 +98,10 @@ from ._sharing_links import (
98
98
  remove_all_sharing_links,
99
99
  remove_sharing_links,
100
100
  )
101
+ from ._labels import (
102
+ bulk_set_labels,
103
+ bulk_remove_labels,
104
+ )
101
105
 
102
106
  __all__ = [
103
107
  "list_items",
@@ -161,4 +165,6 @@ __all__ = [
161
165
  "rotate_tenant_key",
162
166
  "remove_all_sharing_links",
163
167
  "remove_sharing_links",
168
+ "bulk_set_labels",
169
+ "bulk_remove_labels",
164
170
  ]
@@ -2,13 +2,13 @@ import pandas as pd
2
2
  from typing import Optional, Tuple
3
3
  from uuid import UUID
4
4
  import sempy_labs._icons as icons
5
- from ._basic_functions import (
5
+ from sempy_labs.admin._basic_functions import (
6
6
  _resolve_workspace_name_and_id,
7
7
  )
8
- from ._capacities import (
8
+ from sempy_labs.admin._capacities import (
9
9
  _resolve_capacity_name_and_id,
10
10
  )
11
- from .._helper_functions import (
11
+ from sempy_labs._helper_functions import (
12
12
  _is_valid_uuid,
13
13
  _build_url,
14
14
  _base_api,
@@ -0,0 +1,211 @@
1
+ from typing import Literal, List
2
+ from uuid import UUID
3
+ import sempy_labs._icons as icons
4
+ from sempy_labs.admin._basic_functions import (
5
+ _resolve_workspace_name_and_id,
6
+ )
7
+ from sempy_labs.admin._items import (
8
+ list_items,
9
+ )
10
+ from sempy_labs._helper_functions import (
11
+ _is_valid_uuid,
12
+ _base_api,
13
+ )
14
+ from sempy._utils._log import log
15
+
16
+
17
+ @log
18
+ def bulk_set_labels(
19
+ items: List[dict],
20
+ label_id: UUID,
21
+ assignment_method: Literal["Standard", "Priviledged"] = "Standard",
22
+ ):
23
+ """
24
+ Sets sensitivity labels on Fabric items.
25
+
26
+ Note: Please use the sempy_labs.graph.resolve_sensitivity_label_id function to retrieve label IDs.
27
+
28
+ This is a wrapper function for the following API: `Labels - Bulk Set Labels <https://learn.microsoft.com/rest/api/fabric/admin/labels/bulk-set-labels>`_.
29
+
30
+ Parameters
31
+ ----------
32
+ items : List[dict]
33
+ A list of dictionaries containing the item details.
34
+
35
+ Example 1:
36
+ items = [
37
+ {
38
+ "id": "fe472f5e-636e-4c10-a1c6-7e9edc0b542a",
39
+ "type": "Dashboard"
40
+ },
41
+ {
42
+ "id": "fe472f5e-636e-4c10-a1c6-7e9edc0b542c",
43
+ "type": "Report"
44
+ },
45
+ {
46
+ "id": "fe472f5e-636e-4c10-a1c6-7e9edc0b542e",
47
+ "type": "SemanticModel"
48
+ },
49
+ ]
50
+
51
+ Example 2:
52
+ items = [
53
+ {
54
+ "id": "Dashboard 1",
55
+ "type": "Dashboard",
56
+ "workspace": "Sales Workspace"
57
+ },
58
+ {
59
+ "id": "Sales Report",
60
+ "type": "Report",
61
+ "workspace": "Sales Workspace"
62
+ },
63
+ {
64
+ "id": "KPI Model",
65
+ "type": "SemanticModel",
66
+ "workspace": "Workspace 2"
67
+ },
68
+ ]
69
+
70
+ label_id : uuid.UUID
71
+ The label ID, which must be in the user's label policy.
72
+ assignment_method : Literal["Standard", "Priviledged"], default="Standard"
73
+ Specifies whether the assigned label was set by an automated process or manually. Additional tenant setting property types may be added over time.
74
+ """
75
+
76
+ if assignment_method not in ["Standard", "Priviledged"]:
77
+ raise ValueError("assignment_method must be either 'Standard' or 'Priviledged'")
78
+
79
+ payload = {"items": []}
80
+ df = list_items()
81
+
82
+ for i in items:
83
+ item = i.get("item")
84
+ type = i.get("type")
85
+ workspace = i.get("workspace")
86
+ if _is_valid_uuid(item):
87
+ payload["items"].append(
88
+ {
89
+ "id": item,
90
+ "type": type,
91
+ }
92
+ )
93
+ else:
94
+ workspace_id = _resolve_workspace_name_and_id(workspace)[1]
95
+ df_filtered = df[
96
+ (df["Item Name"] == item)
97
+ & (df["Type"] == type)
98
+ & (df["Workspace Id"] == workspace_id)
99
+ ]
100
+ if df_filtered.empty:
101
+ raise ValueError(
102
+ f"The item '{item}' of type '{type}' does not exist in workspace '{workspace}'."
103
+ )
104
+ else:
105
+ payload["items"].append(
106
+ {
107
+ "id": df_filtered["Item Id"].iloc[0],
108
+ "type": type,
109
+ }
110
+ )
111
+
112
+ payload["labelId"] = label_id
113
+ payload["assignmentMethod"] = assignment_method
114
+
115
+ _base_api(request="/v1/admin/items/bulkSetLabels", method="post", payload=payload)
116
+
117
+ print(
118
+ f"{icons.green_dot} Labels have been successfully set on the specified items."
119
+ )
120
+
121
+
122
+ @log
123
+ def bulk_remove_labels(
124
+ items: List[dict],
125
+ ):
126
+ """
127
+ Removes sensitivity labels from Fabric items.
128
+
129
+ This is a wrapper function for the following API: `Labels - Bulk Remove Labels <https://learn.microsoft.com/rest/api/fabric/admin/labels/bulk-remove-labels>`_.
130
+
131
+ Parameters
132
+ ----------
133
+ items : List[dict]
134
+ A list of dictionaries containing the item details.
135
+
136
+ Example 1:
137
+ items = [
138
+ {
139
+ "id": "fe472f5e-636e-4c10-a1c6-7e9edc0b542a",
140
+ "type": "Dashboard"
141
+ },
142
+ {
143
+ "id": "fe472f5e-636e-4c10-a1c6-7e9edc0b542c",
144
+ "type": "Report"
145
+ },
146
+ {
147
+ "id": "fe472f5e-636e-4c10-a1c6-7e9edc0b542e",
148
+ "type": "SemanticModel"
149
+ },
150
+ ]
151
+
152
+ Example 2:
153
+ items = [
154
+ {
155
+ "id": "Dashboard 1",
156
+ "type": "Dashboard",
157
+ "workspace": "Sales Workspace"
158
+ },
159
+ {
160
+ "id": "Sales Report",
161
+ "type": "Report",
162
+ "workspace": "Sales Workspace"
163
+ },
164
+ {
165
+ "id": "KPI Model",
166
+ "type": "SemanticModel",
167
+ "workspace": "Workspace 2"
168
+ },
169
+ ]
170
+ """
171
+
172
+ payload = {"items": []}
173
+ df = list_items()
174
+
175
+ for i in items:
176
+ item = i.get("item")
177
+ type = i.get("type")
178
+ workspace = i.get("workspace")
179
+ if _is_valid_uuid(item):
180
+ payload["items"].append(
181
+ {
182
+ "id": item,
183
+ "type": type,
184
+ }
185
+ )
186
+ else:
187
+ workspace_id = _resolve_workspace_name_and_id(workspace)[1]
188
+ df_filtered = df[
189
+ (df["Item Name"] == item)
190
+ & (df["Type"] == type)
191
+ & (df["Workspace Id"] == workspace_id)
192
+ ]
193
+ if df_filtered.empty:
194
+ raise ValueError(
195
+ f"The item '{item}' of type '{type}' does not exist in workspace '{workspace}'."
196
+ )
197
+ else:
198
+ payload["items"].append(
199
+ {
200
+ "id": df_filtered["Item Id"].iloc[0],
201
+ "type": type,
202
+ }
203
+ )
204
+
205
+ _base_api(
206
+ request="/v1/admin/items/bulkRemoveLabels", method="post", payload=payload
207
+ )
208
+
209
+ print(
210
+ f"{icons.green_dot} Labels have been successfully removed from the specified items."
211
+ )
@@ -201,7 +201,9 @@ def _put_columns_into_memory(dataset, workspace, col_df, return_dataframe: bool
201
201
  if not dfT_filt.empty:
202
202
  row_count = dfT_filt["Row Count"].iloc[0]
203
203
  bar.set_description(f"Warming the '{table_name}' table...")
204
- if row_count < row_limit:
204
+ if pd.isna(row_count):
205
+ pass
206
+ elif row_count < row_limit:
205
207
  columns = col_df_filt["DAX Object"].tolist()
206
208
  css = ", ".join(columns)
207
209
  dax = f"EVALUATE TOPN(1, SELECTCOLUMNS('{table_name}', {css}))"
@@ -0,0 +1,37 @@
1
+ from ._items import (
2
+ list_eventstreams,
3
+ create_eventstream,
4
+ delete_eventstream,
5
+ get_eventstream_definition,
6
+ )
7
+ from ._topology import (
8
+ get_eventstream_destination,
9
+ get_eventstream_destination_connection,
10
+ get_eventstream_source,
11
+ get_eventstream_source_connection,
12
+ get_eventstream_topology,
13
+ pause_eventstream,
14
+ pause_eventstream_destination,
15
+ pause_eventstream_source,
16
+ resume_eventstream,
17
+ resume_eventstream_destination,
18
+ resume_eventstream_source,
19
+ )
20
+
21
+ __all__ = [
22
+ "list_eventstreams",
23
+ "create_eventstream",
24
+ "delete_eventstream",
25
+ "get_eventstream_definition",
26
+ "get_eventstream_destination",
27
+ "get_eventstream_destination_connection",
28
+ "get_eventstream_source",
29
+ "get_eventstream_source_connection",
30
+ "get_eventstream_topology",
31
+ "pause_eventstream",
32
+ "pause_eventstream_destination",
33
+ "pause_eventstream_source",
34
+ "resume_eventstream",
35
+ "resume_eventstream_destination",
36
+ "resume_eventstream_source",
37
+ ]
@@ -0,0 +1,263 @@
1
+ import pandas as pd
2
+ from typing import Optional
3
+ from sempy_labs._helper_functions import (
4
+ _base_api,
5
+ delete_item,
6
+ _create_dataframe,
7
+ create_item,
8
+ resolve_workspace_id,
9
+ resolve_item_id,
10
+ _decode_b64,
11
+ )
12
+ from uuid import UUID
13
+ import sempy_labs._icons as icons
14
+ from sempy._utils._log import log
15
+ import json
16
+
17
+
18
+ @log
19
+ def list_eventstreams(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
20
+ """
21
+ Shows the eventstreams within a workspace.
22
+
23
+ This is a wrapper function for the following API: `Items - List Eventstreams <https://learn.microsoft.com/rest/api/fabric/environment/items/list-eventstreams>`_.
24
+
25
+ Parameters
26
+ ----------
27
+ workspace : str | uuid.UUID, default=None
28
+ The Fabric workspace name or ID.
29
+ Defaults to None which resolves to the workspace of the attached lakehouse
30
+ or if no lakehouse attached, resolves to the workspace of the notebook.
31
+
32
+ Returns
33
+ -------
34
+ pandas.DataFrame
35
+ A pandas dataframe showing the eventstreams within a workspace.
36
+ """
37
+
38
+ columns = {
39
+ "Eventstream Name": "string",
40
+ "Eventstream Id": "string",
41
+ "Description": "string",
42
+ }
43
+ df = _create_dataframe(columns=columns)
44
+
45
+ workspace_id = resolve_workspace_id(workspace)
46
+ responses = _base_api(
47
+ request=f"/v1/workspaces/{workspace_id}/eventstreams", uses_pagination=True
48
+ )
49
+
50
+ rows = []
51
+ for r in responses:
52
+ for v in r.get("value", []):
53
+ rows.append(
54
+ {
55
+ "Eventstream Name": v.get("displayName"),
56
+ "Eventstream Id": v.get("id"),
57
+ "Description": v.get("description"),
58
+ }
59
+ )
60
+
61
+ if rows:
62
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
63
+
64
+ return df
65
+
66
+
67
+ @log
68
+ def create_eventstream(
69
+ name: str, description: Optional[str] = None, workspace: Optional[str | UUID] = None
70
+ ):
71
+ """
72
+ Creates a Fabric eventstream.
73
+
74
+ This is a wrapper function for the following API: `Items - Create Eventstream <https://learn.microsoft.com/rest/api/fabric/environment/items/create-eventstream>`_.
75
+
76
+ Parameters
77
+ ----------
78
+ name: str
79
+ Name of the eventstream.
80
+ description : str, default=None
81
+ A description of the environment.
82
+ workspace : str | uuid.UUID, default=None
83
+ The Fabric workspace name or ID.
84
+ Defaults to None which resolves to the workspace of the attached lakehouse
85
+ or if no lakehouse attached, resolves to the workspace of the notebook.
86
+ """
87
+
88
+ create_item(
89
+ name=name, description=description, type="Eventstream", workspace=workspace
90
+ )
91
+
92
+
93
+ @log
94
+ def delete_eventstream(
95
+ eventstream: str | UUID, workspace: Optional[str | UUID] = None, **kwargs
96
+ ):
97
+ """
98
+ Deletes a Fabric eventstream.
99
+
100
+ This is a wrapper function for the following API: `Items - Delete Eventstream <https://learn.microsoft.com/rest/api/fabric/environment/items/delete-eventstream>`_.
101
+
102
+ Parameters
103
+ ----------
104
+ eventstream: str | uuid.UUID
105
+ Name or ID of the eventstream.
106
+ workspace : str | uuid.UUID, default=None
107
+ The Fabric workspace name or ID.
108
+ Defaults to None which resolves to the workspace of the attached lakehouse
109
+ or if no lakehouse attached, resolves to the workspace of the notebook.
110
+ """
111
+
112
+ if "name" in kwargs:
113
+ eventstream = kwargs["name"]
114
+ print(
115
+ f"{icons.warning} The 'name' parameter is deprecated. Please use 'eventstream' instead."
116
+ )
117
+
118
+ delete_item(item=eventstream, type="Eventstream", workspace=workspace)
119
+
120
+
121
+ @log
122
+ def get_eventstream_definition(
123
+ eventstream: str | UUID,
124
+ workspace: Optional[str | UUID] = None,
125
+ decode: bool = True,
126
+ return_dataframe: bool = False,
127
+ ) -> dict:
128
+
129
+ workspace_id = resolve_workspace_id(workspace)
130
+ item_id = resolve_item_id(item=eventstream, type="Eventstream", workspace=workspace)
131
+
132
+ result = _base_api(
133
+ request=f"/v1/workspaces/{workspace_id}/eventstreams/{item_id}/getDefinition",
134
+ method="post",
135
+ client="fabric_sp",
136
+ status_codes=None,
137
+ lro_return_json=True,
138
+ )
139
+
140
+ if decode:
141
+ definition = {"definition": {"parts": []}}
142
+
143
+ for part in result.get("definition", {}).get("parts", []):
144
+ path = part.get("path")
145
+ payload = json.loads(_decode_b64(part.get("payload")))
146
+ definition["definition"]["parts"].append({"path": path, "payload": payload})
147
+ else:
148
+ definition = result.copy()
149
+
150
+ if return_dataframe:
151
+ df = pd.DataFrame(definition["definition"]["parts"])
152
+ df.columns = ["Path", "Payload", "Payload Type"]
153
+ return df
154
+ else:
155
+ return definition
156
+
157
+
158
+ @log
159
+ def list_eventstream_destinations(
160
+ eventstream: str | UUID, workspace: Optional[str | UUID] = None
161
+ ) -> pd.DataFrame:
162
+ """
163
+ Lists the destinations of the specified eventstream.
164
+
165
+ Parameters
166
+ ----------
167
+ eventstream : str | uuid.UUID
168
+ The name or ID of the eventstream.
169
+ workspace : str | uuid.UUID, default=None
170
+ The Fabric workspace name or ID.
171
+ Defaults to None which resolves to the workspace of the attached lakehouse
172
+ or if no lakehouse attached, resolves to the workspace of the notebook.
173
+
174
+ Returns
175
+ -------
176
+ pandas.DataFrame
177
+ A pandas dataframe showing the destinations of the eventstream.
178
+ """
179
+
180
+ definition = get_eventstream_definition(
181
+ eventstream=eventstream, workspace=workspace
182
+ )
183
+
184
+ columns = {
185
+ "Destination Id": "string",
186
+ "Destination Name": "string",
187
+ "Destination Type": "string",
188
+ }
189
+
190
+ df = _create_dataframe(columns=columns)
191
+
192
+ rows = []
193
+ for part in definition.get("definition").get("parts"):
194
+ payload = part.get("payload")
195
+ if part.get("path") == "eventstream.json":
196
+ destinations = payload.get("destinations")
197
+ for d in destinations:
198
+ rows.append(
199
+ {
200
+ "Destination Id": d.get("id"),
201
+ "Destination Name": d.get("name"),
202
+ "Destination Type": d.get("type"),
203
+ }
204
+ )
205
+
206
+ if rows:
207
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
208
+
209
+ return df
210
+
211
+
212
+ @log
213
+ def list_eventstream_sources(
214
+ eventstream: str | UUID, workspace: Optional[str | UUID] = None
215
+ ) -> pd.DataFrame:
216
+ """
217
+ Lists the destinations of the specified eventstream.
218
+
219
+ Parameters
220
+ ----------
221
+ eventstream : str | uuid.UUID
222
+ The name or ID of the eventstream.
223
+ workspace : str | uuid.UUID, default=None
224
+ The Fabric workspace name or ID.
225
+ Defaults to None which resolves to the workspace of the attached lakehouse
226
+ or if no lakehouse attached, resolves to the workspace of the notebook.
227
+
228
+ Returns
229
+ -------
230
+ pandas.DataFrame
231
+ A pandas dataframe showing the destinations of the eventstream.
232
+ """
233
+
234
+ definition = get_eventstream_definition(
235
+ eventstream=eventstream, workspace=workspace
236
+ )
237
+
238
+ columns = {
239
+ "Source Id": "string",
240
+ "Source Name": "string",
241
+ "Source Type": "string",
242
+ }
243
+
244
+ df = _create_dataframe(columns=columns)
245
+
246
+ rows = []
247
+ for part in definition.get("definition").get("parts"):
248
+ payload = part.get("payload")
249
+ if part.get("path") == "eventstream.json":
250
+ sources = payload.get("sources")
251
+ for s in sources:
252
+ rows.append(
253
+ {
254
+ "Source Id": s.get("id"),
255
+ "Source Name": s.get("name"),
256
+ "Source Type": s.get("type"),
257
+ }
258
+ )
259
+
260
+ if rows:
261
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
262
+
263
+ return df