semantic-link-labs 0.9.10__py3-none-any.whl → 0.9.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.9.10.dist-info → semantic_link_labs-0.9.11.dist-info}/METADATA +27 -21
- {semantic_link_labs-0.9.10.dist-info → semantic_link_labs-0.9.11.dist-info}/RECORD +34 -29
- {semantic_link_labs-0.9.10.dist-info → semantic_link_labs-0.9.11.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +22 -1
- sempy_labs/_delta_analyzer.py +9 -8
- sempy_labs/_environments.py +19 -1
- sempy_labs/_generate_semantic_model.py +1 -1
- sempy_labs/_helper_functions.py +193 -134
- sempy_labs/_kusto.py +25 -23
- sempy_labs/_list_functions.py +13 -35
- sempy_labs/_model_bpa_rules.py +13 -3
- sempy_labs/_notebooks.py +44 -11
- sempy_labs/_semantic_models.py +93 -1
- sempy_labs/_sql.py +3 -2
- sempy_labs/_tags.py +194 -0
- sempy_labs/_variable_libraries.py +89 -0
- sempy_labs/_vpax.py +386 -0
- sempy_labs/admin/__init__.py +8 -0
- sempy_labs/admin/_tags.py +126 -0
- sempy_labs/directlake/_generate_shared_expression.py +5 -1
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +55 -5
- sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
- sempy_labs/lakehouse/__init__.py +16 -0
- sempy_labs/lakehouse/_blobs.py +115 -63
- sempy_labs/lakehouse/_get_lakehouse_tables.py +1 -13
- sempy_labs/lakehouse/_helper.py +211 -0
- sempy_labs/lakehouse/_lakehouse.py +1 -1
- sempy_labs/lakehouse/_livy_sessions.py +137 -0
- sempy_labs/report/_download_report.py +1 -1
- sempy_labs/report/_generate_report.py +5 -1
- sempy_labs/report/_reportwrapper.py +31 -18
- sempy_labs/tom/_model.py +83 -21
- sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +0 -9
- sempy_labs/report/_bpareporttemplate/.platform +0 -11
- {semantic_link_labs-0.9.10.dist-info → semantic_link_labs-0.9.11.dist-info}/licenses/LICENSE +0 -0
- {semantic_link_labs-0.9.10.dist-info → semantic_link_labs-0.9.11.dist-info}/top_level.txt +0 -0
sempy_labs/_notebooks.py
CHANGED
|
@@ -7,6 +7,7 @@ import requests
|
|
|
7
7
|
from sempy._utils._log import log
|
|
8
8
|
from sempy_labs._helper_functions import (
|
|
9
9
|
resolve_workspace_name_and_id,
|
|
10
|
+
resolve_workspace_id,
|
|
10
11
|
_decode_b64,
|
|
11
12
|
_base_api,
|
|
12
13
|
resolve_item_id,
|
|
@@ -20,13 +21,20 @@ _notebook_prefix = "notebook-content."
|
|
|
20
21
|
|
|
21
22
|
|
|
22
23
|
def _get_notebook_definition_base(
|
|
23
|
-
notebook_name: str,
|
|
24
|
+
notebook_name: str,
|
|
25
|
+
workspace: Optional[str | UUID] = None,
|
|
26
|
+
format: Optional[str] = None,
|
|
24
27
|
) -> pd.DataFrame:
|
|
25
28
|
|
|
26
|
-
|
|
29
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
27
30
|
item_id = resolve_item_id(item=notebook_name, type="Notebook", workspace=workspace)
|
|
31
|
+
|
|
32
|
+
url = f"v1/workspaces/{workspace_id}/notebooks/{item_id}/getDefinition"
|
|
33
|
+
if format == "ipynb":
|
|
34
|
+
url += f"?format={format}"
|
|
35
|
+
|
|
28
36
|
result = _base_api(
|
|
29
|
-
request=
|
|
37
|
+
request=url,
|
|
30
38
|
method="post",
|
|
31
39
|
lro_return_json=True,
|
|
32
40
|
status_codes=None,
|
|
@@ -53,7 +61,10 @@ def _get_notebook_type(
|
|
|
53
61
|
|
|
54
62
|
|
|
55
63
|
def get_notebook_definition(
|
|
56
|
-
notebook_name: str,
|
|
64
|
+
notebook_name: str,
|
|
65
|
+
workspace: Optional[str | UUID] = None,
|
|
66
|
+
decode: bool = True,
|
|
67
|
+
format: Optional[str] = None,
|
|
57
68
|
) -> str:
|
|
58
69
|
"""
|
|
59
70
|
Obtains the notebook definition.
|
|
@@ -71,6 +82,9 @@ def get_notebook_definition(
|
|
|
71
82
|
decode : bool, default=True
|
|
72
83
|
If True, decodes the notebook definition file into .ipynb format.
|
|
73
84
|
If False, obtains the notebook definition file in base64 format.
|
|
85
|
+
format : str, default=None
|
|
86
|
+
The only supported value is ipynb
|
|
87
|
+
If provided the format will be in standard .ipynb otherwise the format will be in source code format which is GIT friendly ipynb
|
|
74
88
|
|
|
75
89
|
Returns
|
|
76
90
|
-------
|
|
@@ -79,7 +93,7 @@ def get_notebook_definition(
|
|
|
79
93
|
"""
|
|
80
94
|
|
|
81
95
|
df_items = _get_notebook_definition_base(
|
|
82
|
-
notebook_name=notebook_name, workspace=workspace
|
|
96
|
+
notebook_name=notebook_name, workspace=workspace, format=format
|
|
83
97
|
)
|
|
84
98
|
df_items_filt = df_items[df_items["path"].str.startswith(_notebook_prefix)]
|
|
85
99
|
payload = df_items_filt["payload"].iloc[0]
|
|
@@ -163,6 +177,7 @@ def create_notebook(
|
|
|
163
177
|
type: str = "py",
|
|
164
178
|
description: Optional[str] = None,
|
|
165
179
|
workspace: Optional[str | UUID] = None,
|
|
180
|
+
format: Optional[str] = None,
|
|
166
181
|
):
|
|
167
182
|
"""
|
|
168
183
|
Creates a new notebook with a definition within a workspace.
|
|
@@ -182,20 +197,27 @@ def create_notebook(
|
|
|
182
197
|
The name or ID of the workspace.
|
|
183
198
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
184
199
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
200
|
+
format : str, default=None
|
|
201
|
+
If 'ipynb' is provided than notebook_content should be standard ipynb format
|
|
202
|
+
otherwise notebook_content should be GIT friendly format
|
|
185
203
|
"""
|
|
186
204
|
|
|
187
|
-
notebook_payload = base64.b64encode(notebook_content
|
|
205
|
+
notebook_payload = base64.b64encode(notebook_content.encode("utf-8")).decode(
|
|
206
|
+
"utf-8"
|
|
207
|
+
)
|
|
188
208
|
definition_payload = {
|
|
189
|
-
"format": "ipynb",
|
|
190
209
|
"parts": [
|
|
191
210
|
{
|
|
192
|
-
"path": f"{_notebook_prefix}
|
|
211
|
+
"path": f"{_notebook_prefix}{type}",
|
|
193
212
|
"payload": notebook_payload,
|
|
194
213
|
"payloadType": "InlineBase64",
|
|
195
214
|
}
|
|
196
215
|
],
|
|
197
216
|
}
|
|
198
217
|
|
|
218
|
+
if format == "ipynb":
|
|
219
|
+
definition_payload["format"] = "ipynb"
|
|
220
|
+
|
|
199
221
|
create_item(
|
|
200
222
|
name=name,
|
|
201
223
|
type="Notebook",
|
|
@@ -206,7 +228,10 @@ def create_notebook(
|
|
|
206
228
|
|
|
207
229
|
|
|
208
230
|
def update_notebook_definition(
|
|
209
|
-
name: str,
|
|
231
|
+
name: str,
|
|
232
|
+
notebook_content: str,
|
|
233
|
+
workspace: Optional[str | UUID] = None,
|
|
234
|
+
format: Optional[str] = None,
|
|
210
235
|
):
|
|
211
236
|
"""
|
|
212
237
|
Updates an existing notebook with a new definition.
|
|
@@ -221,10 +246,15 @@ def update_notebook_definition(
|
|
|
221
246
|
The name or ID of the workspace.
|
|
222
247
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
223
248
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
249
|
+
format : str, default=None
|
|
250
|
+
If 'ipynb' is provided than notebook_content should be standard ipynb format
|
|
251
|
+
otherwise notebook_content should be GIT friendly format
|
|
224
252
|
"""
|
|
225
253
|
|
|
226
254
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
227
|
-
notebook_payload = base64.b64encode(notebook_content)
|
|
255
|
+
notebook_payload = base64.b64encode(notebook_content.encode("utf-8")).decode(
|
|
256
|
+
"utf-8"
|
|
257
|
+
)
|
|
228
258
|
item_id = resolve_item_id(item=name, type="Notebook", workspace=workspace)
|
|
229
259
|
type = _get_notebook_type(notebook_name=name, workspace=workspace)
|
|
230
260
|
|
|
@@ -232,7 +262,7 @@ def update_notebook_definition(
|
|
|
232
262
|
"definition": {
|
|
233
263
|
"parts": [
|
|
234
264
|
{
|
|
235
|
-
"path": f"{_notebook_prefix}
|
|
265
|
+
"path": f"{_notebook_prefix}{type}",
|
|
236
266
|
"payload": notebook_payload,
|
|
237
267
|
"payloadType": "InlineBase64",
|
|
238
268
|
}
|
|
@@ -240,6 +270,9 @@ def update_notebook_definition(
|
|
|
240
270
|
},
|
|
241
271
|
}
|
|
242
272
|
|
|
273
|
+
if format == "ipynb":
|
|
274
|
+
payload["definition"]["format"] = "ipynb"
|
|
275
|
+
|
|
243
276
|
_base_api(
|
|
244
277
|
request=f"v1/workspaces/{workspace_id}/notebooks/{item_id}/updateDefinition",
|
|
245
278
|
payload=payload,
|
sempy_labs/_semantic_models.py
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
from uuid import UUID
|
|
2
|
-
from typing import Optional
|
|
2
|
+
from typing import Optional, List
|
|
3
3
|
import pandas as pd
|
|
4
4
|
from sempy_labs._helper_functions import (
|
|
5
5
|
_create_dataframe,
|
|
@@ -10,6 +10,7 @@ from sempy_labs._helper_functions import (
|
|
|
10
10
|
delete_item,
|
|
11
11
|
)
|
|
12
12
|
import sempy_labs._icons as icons
|
|
13
|
+
import re
|
|
13
14
|
|
|
14
15
|
|
|
15
16
|
def get_semantic_model_refresh_schedule(
|
|
@@ -135,3 +136,94 @@ def delete_semantic_model(dataset: str | UUID, workspace: Optional[str | UUID] =
|
|
|
135
136
|
"""
|
|
136
137
|
|
|
137
138
|
delete_item(item=dataset, type="SemanticModel", workspace=workspace)
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def update_semantic_model_refresh_schedule(
|
|
142
|
+
dataset: str | UUID,
|
|
143
|
+
days: Optional[str | List[str]] = None,
|
|
144
|
+
times: Optional[str | List[str]] = None,
|
|
145
|
+
time_zone: Optional[str] = None,
|
|
146
|
+
workspace: Optional[str | UUID] = None,
|
|
147
|
+
):
|
|
148
|
+
"""
|
|
149
|
+
Updates the refresh schedule for the specified dataset from the specified workspace.
|
|
150
|
+
|
|
151
|
+
This is a wrapper function for the following API: `Datasets - Update Refresh Schedule In Group <https://learn.microsoft.com/rest/api/power-bi/datasets/update-refresh-schedule-in-group>`_.
|
|
152
|
+
|
|
153
|
+
Parameters
|
|
154
|
+
----------
|
|
155
|
+
dataset : str | uuid.UUID
|
|
156
|
+
Name or ID of the semantic model.
|
|
157
|
+
days : str | list[str], default=None
|
|
158
|
+
The days of the week to refresh the dataset.
|
|
159
|
+
Valid values are: "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday".
|
|
160
|
+
Defaults to None which means the refresh schedule will not be updated.
|
|
161
|
+
times : str | list[str], default=None
|
|
162
|
+
The times of the day to refresh the dataset.
|
|
163
|
+
Valid format is "HH:MM" (24-hour format).
|
|
164
|
+
Defaults to None which means the refresh schedule will not be updated.
|
|
165
|
+
time_zone : str, default=None
|
|
166
|
+
The time zone to use for the refresh schedule.
|
|
167
|
+
Defaults to None which means the refresh schedule will not be updated.
|
|
168
|
+
workspace : str | uuid.UUID, default=None
|
|
169
|
+
The workspace name or ID.
|
|
170
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
171
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
172
|
+
"""
|
|
173
|
+
|
|
174
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
175
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace)
|
|
176
|
+
|
|
177
|
+
payload = {"value": {}}
|
|
178
|
+
|
|
179
|
+
def is_valid_time_format(time_str):
|
|
180
|
+
pattern = r"^(?:[01]\d|2[0-3]):[0-5]\d$"
|
|
181
|
+
return re.match(pattern, time_str) is not None
|
|
182
|
+
|
|
183
|
+
weekdays = [
|
|
184
|
+
"Monday",
|
|
185
|
+
"Tuesday",
|
|
186
|
+
"Wednesday",
|
|
187
|
+
"Thursday",
|
|
188
|
+
"Friday",
|
|
189
|
+
"Sunday",
|
|
190
|
+
"Saturday",
|
|
191
|
+
]
|
|
192
|
+
if days:
|
|
193
|
+
if isinstance(days, str):
|
|
194
|
+
days = [days]
|
|
195
|
+
for i in range(len(days)):
|
|
196
|
+
days[i] = days[i].capitalize()
|
|
197
|
+
if days[i] not in weekdays:
|
|
198
|
+
raise ValueError(
|
|
199
|
+
f"{icons.red_dot} Invalid day '{days[i]}'. Valid days are: {weekdays}"
|
|
200
|
+
)
|
|
201
|
+
payload["value"]["days"] = days
|
|
202
|
+
if times:
|
|
203
|
+
if isinstance(times, str):
|
|
204
|
+
times = [times]
|
|
205
|
+
for i in range(len(times)):
|
|
206
|
+
if not is_valid_time_format(times[i]):
|
|
207
|
+
raise ValueError(
|
|
208
|
+
f"{icons.red_dot} Invalid time '{times[i]}'. Valid time format is 'HH:MM' (24-hour format)."
|
|
209
|
+
)
|
|
210
|
+
payload["value"]["times"] = times
|
|
211
|
+
if time_zone:
|
|
212
|
+
payload["value"]["localTimeZoneId"] = time_zone
|
|
213
|
+
|
|
214
|
+
if not payload.get("value"):
|
|
215
|
+
print(
|
|
216
|
+
f"{icons.info} No changes were made to the refresh schedule for the '{dataset_name}' within the '{workspace_name}' workspace."
|
|
217
|
+
)
|
|
218
|
+
return
|
|
219
|
+
|
|
220
|
+
_base_api(
|
|
221
|
+
request=f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/refreshSchedule",
|
|
222
|
+
method="patch",
|
|
223
|
+
client="fabric_sp",
|
|
224
|
+
payload=payload,
|
|
225
|
+
)
|
|
226
|
+
|
|
227
|
+
print(
|
|
228
|
+
f"{icons.green_dot} Refresh schedule for the '{dataset_name}' within the '{workspace_name}' workspace has been updated."
|
|
229
|
+
)
|
sempy_labs/_sql.py
CHANGED
|
@@ -185,7 +185,7 @@ class ConnectWarehouse(ConnectBase):
|
|
|
185
185
|
class ConnectLakehouse(ConnectBase):
|
|
186
186
|
def __init__(
|
|
187
187
|
self,
|
|
188
|
-
lakehouse: str | UUID,
|
|
188
|
+
lakehouse: Optional[str | UUID] = None,
|
|
189
189
|
workspace: Optional[Union[str, UUID]] = None,
|
|
190
190
|
timeout: int = 30,
|
|
191
191
|
):
|
|
@@ -194,8 +194,9 @@ class ConnectLakehouse(ConnectBase):
|
|
|
194
194
|
|
|
195
195
|
Parameters
|
|
196
196
|
----------
|
|
197
|
-
lakehouse : str | uuid.UUID
|
|
197
|
+
lakehouse : str | uuid.UUID, default=None
|
|
198
198
|
The name or ID of the Fabric lakehouse.
|
|
199
|
+
Defaults to None which resolves to the lakehouse attached to the notebook.
|
|
199
200
|
workspace : str | uuid.UUID, default=None
|
|
200
201
|
The name or ID of the workspace.
|
|
201
202
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
sempy_labs/_tags.py
ADDED
|
@@ -0,0 +1,194 @@
|
|
|
1
|
+
from sempy_labs._helper_functions import (
|
|
2
|
+
_base_api,
|
|
3
|
+
_create_dataframe,
|
|
4
|
+
_update_dataframe_datatypes,
|
|
5
|
+
resolve_item_name_and_id,
|
|
6
|
+
resolve_workspace_name_and_id,
|
|
7
|
+
_is_valid_uuid,
|
|
8
|
+
)
|
|
9
|
+
import pandas as pd
|
|
10
|
+
from typing import Optional, List
|
|
11
|
+
from uuid import UUID
|
|
12
|
+
import sempy_labs._icons as icons
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def list_tags() -> pd.DataFrame:
|
|
16
|
+
"""
|
|
17
|
+
Shows a list of all the tenant's tags.
|
|
18
|
+
|
|
19
|
+
This is a wrapper function for the following API: `Tags - List Tags <https://learn.microsoft.com/rest/api/fabric/core/tags/list-tags>`_.
|
|
20
|
+
|
|
21
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
22
|
+
|
|
23
|
+
Returns
|
|
24
|
+
-------
|
|
25
|
+
pandas.DataFrame
|
|
26
|
+
A pandas dataframe showing a list of all the tenant's tags.
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
columns = {
|
|
30
|
+
"Tag Name": "string",
|
|
31
|
+
"Tag Id": "string",
|
|
32
|
+
}
|
|
33
|
+
df = _create_dataframe(columns=columns)
|
|
34
|
+
|
|
35
|
+
responses = _base_api(
|
|
36
|
+
request="/v1/tags",
|
|
37
|
+
uses_pagination=True,
|
|
38
|
+
client="fabric_sp",
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
dfs = []
|
|
42
|
+
|
|
43
|
+
for r in responses:
|
|
44
|
+
for v in r.get("value", []):
|
|
45
|
+
new_data = {
|
|
46
|
+
"Tag Name": v.get("displayName"),
|
|
47
|
+
"Tag Id": v.get("id"),
|
|
48
|
+
}
|
|
49
|
+
dfs.append(pd.DataFrame(new_data, index=[0]))
|
|
50
|
+
|
|
51
|
+
if dfs:
|
|
52
|
+
df = pd.concat(dfs, ignore_index=True)
|
|
53
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
54
|
+
|
|
55
|
+
return df
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def resolve_tags(tags: str | List[str]) -> List[str]:
|
|
59
|
+
"""
|
|
60
|
+
Resolves the tags to a list of strings.
|
|
61
|
+
|
|
62
|
+
Parameters
|
|
63
|
+
----------
|
|
64
|
+
tags : str | List[str]
|
|
65
|
+
The tags to resolve.
|
|
66
|
+
|
|
67
|
+
Returns
|
|
68
|
+
-------
|
|
69
|
+
List[str]
|
|
70
|
+
A list of resolved tags.
|
|
71
|
+
"""
|
|
72
|
+
|
|
73
|
+
if isinstance(tags, str):
|
|
74
|
+
tags = [tags]
|
|
75
|
+
|
|
76
|
+
if all(_is_valid_uuid(tag) for tag in tags):
|
|
77
|
+
return tags
|
|
78
|
+
|
|
79
|
+
df = list_tags()
|
|
80
|
+
|
|
81
|
+
tag_list = []
|
|
82
|
+
for tag in tags:
|
|
83
|
+
if _is_valid_uuid(tag):
|
|
84
|
+
tag_list.append(tag)
|
|
85
|
+
else:
|
|
86
|
+
df_filt = df[df["Tag Name"] == tag]
|
|
87
|
+
if df_filt.empty:
|
|
88
|
+
raise ValueError(f"Tag '{tag}' not found in the tenant's tags.")
|
|
89
|
+
tag_id = df_filt["Tag Id"].iloc[0]
|
|
90
|
+
tag_list.append(tag_id)
|
|
91
|
+
|
|
92
|
+
return tag_list
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def apply_tags(
|
|
96
|
+
item: str | UUID,
|
|
97
|
+
type: str,
|
|
98
|
+
tags: str | UUID | List[str | UUID],
|
|
99
|
+
workspace: Optional[str | UUID] = None,
|
|
100
|
+
):
|
|
101
|
+
"""
|
|
102
|
+
Shows a list of all the tenant's tags.
|
|
103
|
+
|
|
104
|
+
This is a wrapper function for the following API: `Tags - Apply Tags <https://learn.microsoft.com/rest/api/fabric/core/tags/apply-tags>`_.
|
|
105
|
+
|
|
106
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
107
|
+
|
|
108
|
+
Parameters
|
|
109
|
+
----------
|
|
110
|
+
item : str | uuid.UUID
|
|
111
|
+
The name or ID of the item to apply tags to.
|
|
112
|
+
type : str
|
|
113
|
+
The type of the item to apply tags to. For example: "Lakehouse".
|
|
114
|
+
tags : str | uuid.UUID | List[str | uuid.UUID]
|
|
115
|
+
The name or ID of the tag(s) to apply to the item.
|
|
116
|
+
workspace : str | uuid.UUID, default=None
|
|
117
|
+
The workspace name or ID.
|
|
118
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
119
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
120
|
+
"""
|
|
121
|
+
|
|
122
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
123
|
+
(item_name, item_id) = resolve_item_name_and_id(item, type, workspace_id)
|
|
124
|
+
|
|
125
|
+
if isinstance(tags, str):
|
|
126
|
+
tags = [tags]
|
|
127
|
+
|
|
128
|
+
tag_list = resolve_tags(tags)
|
|
129
|
+
|
|
130
|
+
payload = {
|
|
131
|
+
"tags": tag_list,
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
_base_api(
|
|
135
|
+
request=f"/v1/workspaces/{workspace_id}/items/{item_id}/applyTags",
|
|
136
|
+
client="fabric_sp",
|
|
137
|
+
method="post",
|
|
138
|
+
payload=payload,
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
print(
|
|
142
|
+
f"{icons.green_dot} Tags {tags} applied to the '{item_name}' {type.lower()} within the '{workspace_name}' workspace"
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def unapply_tags(
|
|
147
|
+
item: str | UUID,
|
|
148
|
+
type: str,
|
|
149
|
+
tags: str | UUID | List[str | UUID],
|
|
150
|
+
workspace: Optional[str | UUID] = None,
|
|
151
|
+
):
|
|
152
|
+
"""
|
|
153
|
+
Shows a list of all the tenant's tags.
|
|
154
|
+
|
|
155
|
+
This is a wrapper function for the following API: `Tags - Unapply Tags <https://learn.microsoft.com/rest/api/fabric/core/tags/unapply-tags>`_.
|
|
156
|
+
|
|
157
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
158
|
+
|
|
159
|
+
Parameters
|
|
160
|
+
----------
|
|
161
|
+
item : str | uuid.UUID
|
|
162
|
+
The name or ID of the item to apply tags to.
|
|
163
|
+
type : str
|
|
164
|
+
The type of the item to apply tags to. For example: "Lakehouse".
|
|
165
|
+
tags : str | uuid.UUID | List[str | uuid.UUID]
|
|
166
|
+
The name or ID of the tag(s) to apply to the item.
|
|
167
|
+
workspace : str | uuid.UUID, default=None
|
|
168
|
+
The workspace name or ID.
|
|
169
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
170
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
171
|
+
"""
|
|
172
|
+
|
|
173
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
174
|
+
(item_name, item_id) = resolve_item_name_and_id(item, type, workspace_id)
|
|
175
|
+
|
|
176
|
+
if isinstance(tags, str):
|
|
177
|
+
tags = [tags]
|
|
178
|
+
|
|
179
|
+
tag_list = resolve_tags(tags)
|
|
180
|
+
|
|
181
|
+
payload = {
|
|
182
|
+
"tags": tag_list,
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
_base_api(
|
|
186
|
+
request=f"/v1/workspaces/{workspace_id}/items/{item_id}/unapplyTags",
|
|
187
|
+
client="fabric_sp",
|
|
188
|
+
method="post",
|
|
189
|
+
payload=payload,
|
|
190
|
+
)
|
|
191
|
+
|
|
192
|
+
print(
|
|
193
|
+
f"{icons.green_dot} Tags {tags} applied to the '{item_name}' {type.lower()} within the '{workspace_name}' workspace"
|
|
194
|
+
)
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
from sempy_labs._helper_functions import (
|
|
2
|
+
resolve_workspace_name_and_id,
|
|
3
|
+
resolve_workspace_id,
|
|
4
|
+
_base_api,
|
|
5
|
+
_create_dataframe,
|
|
6
|
+
_update_dataframe_datatypes,
|
|
7
|
+
delete_item,
|
|
8
|
+
)
|
|
9
|
+
import pandas as pd
|
|
10
|
+
from typing import Optional
|
|
11
|
+
from uuid import UUID
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def list_variable_libraries(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
15
|
+
"""
|
|
16
|
+
Shows the variable libraries within a workspace.
|
|
17
|
+
|
|
18
|
+
This is a wrapper function for the following API: `Items - List Variable Libraries <https://learn.microsoft.com/rest/api/fabric/variablelibrary/items/list-variable-libraries>`_.
|
|
19
|
+
|
|
20
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
21
|
+
|
|
22
|
+
Parameters
|
|
23
|
+
----------
|
|
24
|
+
workspace : str | uuid.UUID, default=None
|
|
25
|
+
The Fabric workspace name or ID.
|
|
26
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
27
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
28
|
+
|
|
29
|
+
Returns
|
|
30
|
+
-------
|
|
31
|
+
pandas.DataFrame
|
|
32
|
+
A pandas dataframe showing the variable libraries within a workspace.
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
columns = {
|
|
36
|
+
"Variable Library Name": "string",
|
|
37
|
+
"Variable Library Id": "string",
|
|
38
|
+
"Description": "string",
|
|
39
|
+
"Active Value Set Name": "string",
|
|
40
|
+
}
|
|
41
|
+
df = _create_dataframe(columns=columns)
|
|
42
|
+
|
|
43
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
44
|
+
|
|
45
|
+
responses = _base_api(
|
|
46
|
+
request=f"/v1/workspaces/{workspace_id}/VariableLibraries",
|
|
47
|
+
uses_pagination=True,
|
|
48
|
+
client="fabric_sp",
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
dfs = []
|
|
52
|
+
for r in responses:
|
|
53
|
+
for v in r.get("value", []):
|
|
54
|
+
prop = v.get("properties", {})
|
|
55
|
+
|
|
56
|
+
new_data = {
|
|
57
|
+
"Variable Library Name": v.get("displayName"),
|
|
58
|
+
"Variable Library Id": v.get("id"),
|
|
59
|
+
"Description": v.get("description"),
|
|
60
|
+
"Active Value Set Name": prop.get("activeValueSetName"),
|
|
61
|
+
}
|
|
62
|
+
dfs.append(pd.DataFrame(new_data, index=[0]))
|
|
63
|
+
|
|
64
|
+
if dfs:
|
|
65
|
+
df = pd.concat(dfs, ignore_index=True)
|
|
66
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
67
|
+
|
|
68
|
+
return df
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def delete_variable_library(
|
|
72
|
+
variable_library: str | UUID, workspace: Optional[str | UUID] = None
|
|
73
|
+
):
|
|
74
|
+
"""
|
|
75
|
+
Deletes a variable library.
|
|
76
|
+
|
|
77
|
+
This is a wrapper function for the following API: `Items - Delete Variable Library <https://learn.microsoft.com/rest/api/fabric/warehouse/items/delete-variable-library>`_.
|
|
78
|
+
|
|
79
|
+
Parameters
|
|
80
|
+
----------
|
|
81
|
+
navariable_libraryme: str | uuid.UUID
|
|
82
|
+
Name or ID of the variable library.
|
|
83
|
+
workspace : str | uuid.UUID, default=None
|
|
84
|
+
The Fabric workspace name or ID.
|
|
85
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
86
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
87
|
+
"""
|
|
88
|
+
|
|
89
|
+
delete_item(item=variable_library, type="VariableLibrary", workspace=workspace)
|