semantic-link-labs 0.9.10__py3-none-any.whl → 0.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (40) hide show
  1. {semantic_link_labs-0.9.10.dist-info → semantic_link_labs-0.10.0.dist-info}/METADATA +28 -21
  2. {semantic_link_labs-0.9.10.dist-info → semantic_link_labs-0.10.0.dist-info}/RECORD +38 -31
  3. {semantic_link_labs-0.9.10.dist-info → semantic_link_labs-0.10.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +26 -1
  5. sempy_labs/_delta_analyzer.py +9 -8
  6. sempy_labs/_dictionary_diffs.py +221 -0
  7. sempy_labs/_environments.py +19 -1
  8. sempy_labs/_generate_semantic_model.py +1 -1
  9. sempy_labs/_helper_functions.py +358 -134
  10. sempy_labs/_kusto.py +25 -23
  11. sempy_labs/_list_functions.py +13 -35
  12. sempy_labs/_model_bpa_rules.py +13 -3
  13. sempy_labs/_notebooks.py +44 -11
  14. sempy_labs/_semantic_models.py +93 -1
  15. sempy_labs/_sql.py +4 -3
  16. sempy_labs/_tags.py +194 -0
  17. sempy_labs/_user_delegation_key.py +42 -0
  18. sempy_labs/_variable_libraries.py +89 -0
  19. sempy_labs/_vpax.py +388 -0
  20. sempy_labs/admin/__init__.py +8 -0
  21. sempy_labs/admin/_tags.py +126 -0
  22. sempy_labs/directlake/_generate_shared_expression.py +5 -1
  23. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +55 -5
  24. sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
  25. sempy_labs/lakehouse/__init__.py +14 -0
  26. sempy_labs/lakehouse/_blobs.py +100 -85
  27. sempy_labs/lakehouse/_get_lakehouse_tables.py +1 -13
  28. sempy_labs/lakehouse/_helper.py +211 -0
  29. sempy_labs/lakehouse/_lakehouse.py +1 -1
  30. sempy_labs/lakehouse/_livy_sessions.py +137 -0
  31. sempy_labs/report/__init__.py +2 -0
  32. sempy_labs/report/_download_report.py +1 -1
  33. sempy_labs/report/_generate_report.py +5 -1
  34. sempy_labs/report/_report_helper.py +27 -128
  35. sempy_labs/report/_reportwrapper.py +1903 -1165
  36. sempy_labs/tom/_model.py +83 -21
  37. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +0 -9
  38. sempy_labs/report/_bpareporttemplate/.platform +0 -11
  39. {semantic_link_labs-0.9.10.dist-info → semantic_link_labs-0.10.0.dist-info}/licenses/LICENSE +0 -0
  40. {semantic_link_labs-0.9.10.dist-info → semantic_link_labs-0.10.0.dist-info}/top_level.txt +0 -0
sempy_labs/_kusto.py CHANGED
@@ -77,33 +77,35 @@ def query_kusto(
77
77
 
78
78
  df = pd.DataFrame(rows, columns=[col["ColumnName"] for col in columns_info])
79
79
 
80
- for col_info in columns_info:
81
- col_name = col_info["ColumnName"]
82
- data_type = col_info["DataType"]
83
-
84
- try:
85
- if data_type == "DateTime":
86
- df[col_name] = pd.to_datetime(df[col_name])
87
- elif data_type in ["Int64", "Int32", "Long"]:
88
- df[col_name] = (
89
- pd.to_numeric(df[col_name], errors="coerce")
90
- .fillna(0)
91
- .astype("int64")
92
- )
93
- elif data_type == "Real" or data_type == "Double":
94
- df[col_name] = pd.to_numeric(df[col_name], errors="coerce")
95
- else:
96
- # Convert any other type to string, change as needed
97
- df[col_name] = df[col_name].astype(str)
98
- except Exception as e:
99
- print(
100
- f"{icons.yellow_dot} Could not convert column {col_name} to {data_type}, defaulting to string: {str(e)}"
101
- )
102
- df[col_name] = df[col_name].astype(str)
80
+ return df
81
+ # for col_info in columns_info:
82
+ # col_name = col_info["ColumnName"]
83
+ # data_type = col_info["DataType"]
84
+
85
+ # try:
86
+ # if data_type == "DateTime":
87
+ # df[col_name] = pd.to_datetime(df[col_name])
88
+ # elif data_type in ["Int64", "Int32", "Long"]:
89
+ # df[col_name] = (
90
+ # pd.to_numeric(df[col_name], errors="coerce")
91
+ # .fillna(0)
92
+ # .astype("int64")
93
+ # )
94
+ # elif data_type == "Real" or data_type == "Double":
95
+ # df[col_name] = pd.to_numeric(df[col_name], errors="coerce")
96
+ # else:
97
+ # # Convert any other type to string, change as needed
98
+ # df[col_name] = df[col_name].astype(str)
99
+ # except Exception as e:
100
+ # print(
101
+ # f"{icons.yellow_dot} Could not convert column {col_name} to {data_type}, defaulting to string: {str(e)}"
102
+ # )
103
+ # df[col_name] = df[col_name].astype(str)
103
104
 
104
105
  return df
105
106
 
106
107
 
108
+ @log
107
109
  def query_workspace_monitoring(
108
110
  query: str, workspace: Optional[str | UUID] = None, language: str = "kql"
109
111
  ) -> pd.DataFrame:
@@ -41,54 +41,32 @@ def get_object_level_security(
41
41
 
42
42
  from sempy_labs.tom import connect_semantic_model
43
43
 
44
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
45
- (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
46
-
47
44
  columns = {
48
45
  "Role Name": "string",
49
46
  "Object Type": "string",
50
47
  "Table Name": "string",
51
48
  "Object Name": "string",
49
+ "Metadata Permission": "string",
52
50
  }
53
51
  df = _create_dataframe(columns=columns)
54
52
 
55
53
  with connect_semantic_model(
56
- dataset=dataset_id, readonly=True, workspace=workspace_id
54
+ dataset=dataset, readonly=True, workspace=workspace
57
55
  ) as tom:
58
56
 
59
57
  for r in tom.model.Roles:
60
58
  for tp in r.TablePermissions:
61
- if len(tp.FilterExpression) == 0:
62
- columnCount = 0
63
- try:
64
- columnCount = len(tp.ColumnPermissions)
65
- except Exception:
66
- pass
67
- objectType = "Table"
68
- if columnCount == 0:
69
- new_data = {
70
- "Role Name": r.Name,
71
- "Object Type": objectType,
72
- "Table Name": tp.Name,
73
- "Object Name": tp.Name,
74
- }
75
- df = pd.concat(
76
- [df, pd.DataFrame(new_data, index=[0])], ignore_index=True
77
- )
78
- else:
79
- objectType = "Column"
80
- for cp in tp.ColumnPermissions:
81
- new_data = {
82
- "Role Name": r.Name,
83
- "Object Type": objectType,
84
- "Table Name": tp.Name,
85
- "Object Name": cp.Name,
86
- }
87
- df = pd.concat(
88
- [df, pd.DataFrame(new_data, index=[0])],
89
- ignore_index=True,
90
- )
91
-
59
+ for cp in tp.ColumnPermissions:
60
+ new_data = {
61
+ "Role Name": r.Name,
62
+ "Object Type": "Column",
63
+ "Table Name": tp.Name,
64
+ "Object Name": cp.Name,
65
+ "Metadata Permission": cp.Permission,
66
+ }
67
+ df = pd.concat(
68
+ [df, pd.DataFrame(new_data, index=[0])], ignore_index=True
69
+ )
92
70
  return df
93
71
 
94
72
 
@@ -674,8 +674,18 @@ def model_bpa_rules(
674
674
  "Provide format string for 'Date' columns",
675
675
  lambda obj, tom: (re.search(r"date", obj.Name, flags=re.IGNORECASE))
676
676
  and (obj.DataType == TOM.DataType.DateTime)
677
- and (obj.FormatString != "mm/dd/yyyy"),
678
- 'Columns of type "DateTime" that have "Month" in their names should be formatted as "mm/dd/yyyy".',
677
+ and (
678
+ obj.FormatString.lower()
679
+ not in [
680
+ "mm/dd/yyyy",
681
+ "mm-dd-yyyy",
682
+ "dd/mm/yyyy",
683
+ "dd-mm-yyyy",
684
+ "yyyy-mm-dd",
685
+ "yyyy/mm/dd",
686
+ ]
687
+ ),
688
+ 'Columns of type "DateTime" that have "Date" in their names should be formatted.',
679
689
  ),
680
690
  (
681
691
  "Formatting",
@@ -789,7 +799,7 @@ def model_bpa_rules(
789
799
  "Formatting",
790
800
  "Column",
791
801
  "Warning",
792
- 'Provide format string for "Month" columns',
802
+ "Provide format string for 'Month' columns",
793
803
  lambda obj, tom: re.search(r"month", obj.Name, flags=re.IGNORECASE)
794
804
  and obj.DataType == TOM.DataType.DateTime
795
805
  and obj.FormatString != "MMMM yyyy",
sempy_labs/_notebooks.py CHANGED
@@ -7,6 +7,7 @@ import requests
7
7
  from sempy._utils._log import log
8
8
  from sempy_labs._helper_functions import (
9
9
  resolve_workspace_name_and_id,
10
+ resolve_workspace_id,
10
11
  _decode_b64,
11
12
  _base_api,
12
13
  resolve_item_id,
@@ -20,13 +21,20 @@ _notebook_prefix = "notebook-content."
20
21
 
21
22
 
22
23
  def _get_notebook_definition_base(
23
- notebook_name: str, workspace: Optional[str | UUID] = None
24
+ notebook_name: str,
25
+ workspace: Optional[str | UUID] = None,
26
+ format: Optional[str] = None,
24
27
  ) -> pd.DataFrame:
25
28
 
26
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
29
+ workspace_id = resolve_workspace_id(workspace)
27
30
  item_id = resolve_item_id(item=notebook_name, type="Notebook", workspace=workspace)
31
+
32
+ url = f"v1/workspaces/{workspace_id}/notebooks/{item_id}/getDefinition"
33
+ if format == "ipynb":
34
+ url += f"?format={format}"
35
+
28
36
  result = _base_api(
29
- request=f"v1/workspaces/{workspace_id}/notebooks/{item_id}/getDefinition",
37
+ request=url,
30
38
  method="post",
31
39
  lro_return_json=True,
32
40
  status_codes=None,
@@ -53,7 +61,10 @@ def _get_notebook_type(
53
61
 
54
62
 
55
63
  def get_notebook_definition(
56
- notebook_name: str, workspace: Optional[str | UUID] = None, decode: bool = True
64
+ notebook_name: str,
65
+ workspace: Optional[str | UUID] = None,
66
+ decode: bool = True,
67
+ format: Optional[str] = None,
57
68
  ) -> str:
58
69
  """
59
70
  Obtains the notebook definition.
@@ -71,6 +82,9 @@ def get_notebook_definition(
71
82
  decode : bool, default=True
72
83
  If True, decodes the notebook definition file into .ipynb format.
73
84
  If False, obtains the notebook definition file in base64 format.
85
+ format : str, default=None
86
+ The only supported value is ipynb
87
+ If provided the format will be in standard .ipynb otherwise the format will be in source code format which is GIT friendly ipynb
74
88
 
75
89
  Returns
76
90
  -------
@@ -79,7 +93,7 @@ def get_notebook_definition(
79
93
  """
80
94
 
81
95
  df_items = _get_notebook_definition_base(
82
- notebook_name=notebook_name, workspace=workspace
96
+ notebook_name=notebook_name, workspace=workspace, format=format
83
97
  )
84
98
  df_items_filt = df_items[df_items["path"].str.startswith(_notebook_prefix)]
85
99
  payload = df_items_filt["payload"].iloc[0]
@@ -163,6 +177,7 @@ def create_notebook(
163
177
  type: str = "py",
164
178
  description: Optional[str] = None,
165
179
  workspace: Optional[str | UUID] = None,
180
+ format: Optional[str] = None,
166
181
  ):
167
182
  """
168
183
  Creates a new notebook with a definition within a workspace.
@@ -182,20 +197,27 @@ def create_notebook(
182
197
  The name or ID of the workspace.
183
198
  Defaults to None which resolves to the workspace of the attached lakehouse
184
199
  or if no lakehouse attached, resolves to the workspace of the notebook.
200
+ format : str, default=None
201
+ If 'ipynb' is provided than notebook_content should be standard ipynb format
202
+ otherwise notebook_content should be GIT friendly format
185
203
  """
186
204
 
187
- notebook_payload = base64.b64encode(notebook_content).decode("utf-8")
205
+ notebook_payload = base64.b64encode(notebook_content.encode("utf-8")).decode(
206
+ "utf-8"
207
+ )
188
208
  definition_payload = {
189
- "format": "ipynb",
190
209
  "parts": [
191
210
  {
192
- "path": f"{_notebook_prefix}.{type}",
211
+ "path": f"{_notebook_prefix}{type}",
193
212
  "payload": notebook_payload,
194
213
  "payloadType": "InlineBase64",
195
214
  }
196
215
  ],
197
216
  }
198
217
 
218
+ if format == "ipynb":
219
+ definition_payload["format"] = "ipynb"
220
+
199
221
  create_item(
200
222
  name=name,
201
223
  type="Notebook",
@@ -206,7 +228,10 @@ def create_notebook(
206
228
 
207
229
 
208
230
  def update_notebook_definition(
209
- name: str, notebook_content: str, workspace: Optional[str | UUID] = None
231
+ name: str,
232
+ notebook_content: str,
233
+ workspace: Optional[str | UUID] = None,
234
+ format: Optional[str] = None,
210
235
  ):
211
236
  """
212
237
  Updates an existing notebook with a new definition.
@@ -221,10 +246,15 @@ def update_notebook_definition(
221
246
  The name or ID of the workspace.
222
247
  Defaults to None which resolves to the workspace of the attached lakehouse
223
248
  or if no lakehouse attached, resolves to the workspace of the notebook.
249
+ format : str, default=None
250
+ If 'ipynb' is provided than notebook_content should be standard ipynb format
251
+ otherwise notebook_content should be GIT friendly format
224
252
  """
225
253
 
226
254
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
227
- notebook_payload = base64.b64encode(notebook_content)
255
+ notebook_payload = base64.b64encode(notebook_content.encode("utf-8")).decode(
256
+ "utf-8"
257
+ )
228
258
  item_id = resolve_item_id(item=name, type="Notebook", workspace=workspace)
229
259
  type = _get_notebook_type(notebook_name=name, workspace=workspace)
230
260
 
@@ -232,7 +262,7 @@ def update_notebook_definition(
232
262
  "definition": {
233
263
  "parts": [
234
264
  {
235
- "path": f"{_notebook_prefix}.{type}",
265
+ "path": f"{_notebook_prefix}{type}",
236
266
  "payload": notebook_payload,
237
267
  "payloadType": "InlineBase64",
238
268
  }
@@ -240,6 +270,9 @@ def update_notebook_definition(
240
270
  },
241
271
  }
242
272
 
273
+ if format == "ipynb":
274
+ payload["definition"]["format"] = "ipynb"
275
+
243
276
  _base_api(
244
277
  request=f"v1/workspaces/{workspace_id}/notebooks/{item_id}/updateDefinition",
245
278
  payload=payload,
@@ -1,5 +1,5 @@
1
1
  from uuid import UUID
2
- from typing import Optional
2
+ from typing import Optional, List
3
3
  import pandas as pd
4
4
  from sempy_labs._helper_functions import (
5
5
  _create_dataframe,
@@ -10,6 +10,7 @@ from sempy_labs._helper_functions import (
10
10
  delete_item,
11
11
  )
12
12
  import sempy_labs._icons as icons
13
+ import re
13
14
 
14
15
 
15
16
  def get_semantic_model_refresh_schedule(
@@ -135,3 +136,94 @@ def delete_semantic_model(dataset: str | UUID, workspace: Optional[str | UUID] =
135
136
  """
136
137
 
137
138
  delete_item(item=dataset, type="SemanticModel", workspace=workspace)
139
+
140
+
141
+ def update_semantic_model_refresh_schedule(
142
+ dataset: str | UUID,
143
+ days: Optional[str | List[str]] = None,
144
+ times: Optional[str | List[str]] = None,
145
+ time_zone: Optional[str] = None,
146
+ workspace: Optional[str | UUID] = None,
147
+ ):
148
+ """
149
+ Updates the refresh schedule for the specified dataset from the specified workspace.
150
+
151
+ This is a wrapper function for the following API: `Datasets - Update Refresh Schedule In Group <https://learn.microsoft.com/rest/api/power-bi/datasets/update-refresh-schedule-in-group>`_.
152
+
153
+ Parameters
154
+ ----------
155
+ dataset : str | uuid.UUID
156
+ Name or ID of the semantic model.
157
+ days : str | list[str], default=None
158
+ The days of the week to refresh the dataset.
159
+ Valid values are: "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday".
160
+ Defaults to None which means the refresh schedule will not be updated.
161
+ times : str | list[str], default=None
162
+ The times of the day to refresh the dataset.
163
+ Valid format is "HH:MM" (24-hour format).
164
+ Defaults to None which means the refresh schedule will not be updated.
165
+ time_zone : str, default=None
166
+ The time zone to use for the refresh schedule.
167
+ Defaults to None which means the refresh schedule will not be updated.
168
+ workspace : str | uuid.UUID, default=None
169
+ The workspace name or ID.
170
+ Defaults to None which resolves to the workspace of the attached lakehouse
171
+ or if no lakehouse attached, resolves to the workspace of the notebook.
172
+ """
173
+
174
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
175
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace)
176
+
177
+ payload = {"value": {}}
178
+
179
+ def is_valid_time_format(time_str):
180
+ pattern = r"^(?:[01]\d|2[0-3]):[0-5]\d$"
181
+ return re.match(pattern, time_str) is not None
182
+
183
+ weekdays = [
184
+ "Monday",
185
+ "Tuesday",
186
+ "Wednesday",
187
+ "Thursday",
188
+ "Friday",
189
+ "Sunday",
190
+ "Saturday",
191
+ ]
192
+ if days:
193
+ if isinstance(days, str):
194
+ days = [days]
195
+ for i in range(len(days)):
196
+ days[i] = days[i].capitalize()
197
+ if days[i] not in weekdays:
198
+ raise ValueError(
199
+ f"{icons.red_dot} Invalid day '{days[i]}'. Valid days are: {weekdays}"
200
+ )
201
+ payload["value"]["days"] = days
202
+ if times:
203
+ if isinstance(times, str):
204
+ times = [times]
205
+ for i in range(len(times)):
206
+ if not is_valid_time_format(times[i]):
207
+ raise ValueError(
208
+ f"{icons.red_dot} Invalid time '{times[i]}'. Valid time format is 'HH:MM' (24-hour format)."
209
+ )
210
+ payload["value"]["times"] = times
211
+ if time_zone:
212
+ payload["value"]["localTimeZoneId"] = time_zone
213
+
214
+ if not payload.get("value"):
215
+ print(
216
+ f"{icons.info} No changes were made to the refresh schedule for the '{dataset_name}' within the '{workspace_name}' workspace."
217
+ )
218
+ return
219
+
220
+ _base_api(
221
+ request=f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/refreshSchedule",
222
+ method="patch",
223
+ client="fabric_sp",
224
+ payload=payload,
225
+ )
226
+
227
+ print(
228
+ f"{icons.green_dot} Refresh schedule for the '{dataset_name}' within the '{workspace_name}' workspace has been updated."
229
+ )
sempy_labs/_sql.py CHANGED
@@ -82,7 +82,7 @@ class ConnectBase:
82
82
  )
83
83
 
84
84
  # Set up the connection string
85
- access_token = SynapseTokenProvider()()
85
+ access_token = SynapseTokenProvider()("sql")
86
86
  tokenstruct = _bytes2mswin_bstr(access_token.encode())
87
87
  if endpoint_type == "sqldatabase":
88
88
  conn_str = f"DRIVER={{ODBC Driver 18 for SQL Server}};SERVER={tds_endpoint};DATABASE={resource_name}-{resource_id};Encrypt=Yes;"
@@ -185,7 +185,7 @@ class ConnectWarehouse(ConnectBase):
185
185
  class ConnectLakehouse(ConnectBase):
186
186
  def __init__(
187
187
  self,
188
- lakehouse: str | UUID,
188
+ lakehouse: Optional[str | UUID] = None,
189
189
  workspace: Optional[Union[str, UUID]] = None,
190
190
  timeout: int = 30,
191
191
  ):
@@ -194,8 +194,9 @@ class ConnectLakehouse(ConnectBase):
194
194
 
195
195
  Parameters
196
196
  ----------
197
- lakehouse : str | uuid.UUID
197
+ lakehouse : str | uuid.UUID, default=None
198
198
  The name or ID of the Fabric lakehouse.
199
+ Defaults to None which resolves to the lakehouse attached to the notebook.
199
200
  workspace : str | uuid.UUID, default=None
200
201
  The name or ID of the workspace.
201
202
  Defaults to None which resolves to the workspace of the attached lakehouse
sempy_labs/_tags.py ADDED
@@ -0,0 +1,194 @@
1
+ from sempy_labs._helper_functions import (
2
+ _base_api,
3
+ _create_dataframe,
4
+ _update_dataframe_datatypes,
5
+ resolve_item_name_and_id,
6
+ resolve_workspace_name_and_id,
7
+ _is_valid_uuid,
8
+ )
9
+ import pandas as pd
10
+ from typing import Optional, List
11
+ from uuid import UUID
12
+ import sempy_labs._icons as icons
13
+
14
+
15
+ def list_tags() -> pd.DataFrame:
16
+ """
17
+ Shows a list of all the tenant's tags.
18
+
19
+ This is a wrapper function for the following API: `Tags - List Tags <https://learn.microsoft.com/rest/api/fabric/core/tags/list-tags>`_.
20
+
21
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
22
+
23
+ Returns
24
+ -------
25
+ pandas.DataFrame
26
+ A pandas dataframe showing a list of all the tenant's tags.
27
+ """
28
+
29
+ columns = {
30
+ "Tag Name": "string",
31
+ "Tag Id": "string",
32
+ }
33
+ df = _create_dataframe(columns=columns)
34
+
35
+ responses = _base_api(
36
+ request="/v1/tags",
37
+ uses_pagination=True,
38
+ client="fabric_sp",
39
+ )
40
+
41
+ dfs = []
42
+
43
+ for r in responses:
44
+ for v in r.get("value", []):
45
+ new_data = {
46
+ "Tag Name": v.get("displayName"),
47
+ "Tag Id": v.get("id"),
48
+ }
49
+ dfs.append(pd.DataFrame(new_data, index=[0]))
50
+
51
+ if dfs:
52
+ df = pd.concat(dfs, ignore_index=True)
53
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
54
+
55
+ return df
56
+
57
+
58
+ def resolve_tags(tags: str | List[str]) -> List[str]:
59
+ """
60
+ Resolves the tags to a list of strings.
61
+
62
+ Parameters
63
+ ----------
64
+ tags : str | List[str]
65
+ The tags to resolve.
66
+
67
+ Returns
68
+ -------
69
+ List[str]
70
+ A list of resolved tags.
71
+ """
72
+
73
+ if isinstance(tags, str):
74
+ tags = [tags]
75
+
76
+ if all(_is_valid_uuid(tag) for tag in tags):
77
+ return tags
78
+
79
+ df = list_tags()
80
+
81
+ tag_list = []
82
+ for tag in tags:
83
+ if _is_valid_uuid(tag):
84
+ tag_list.append(tag)
85
+ else:
86
+ df_filt = df[df["Tag Name"] == tag]
87
+ if df_filt.empty:
88
+ raise ValueError(f"Tag '{tag}' not found in the tenant's tags.")
89
+ tag_id = df_filt["Tag Id"].iloc[0]
90
+ tag_list.append(tag_id)
91
+
92
+ return tag_list
93
+
94
+
95
+ def apply_tags(
96
+ item: str | UUID,
97
+ type: str,
98
+ tags: str | UUID | List[str | UUID],
99
+ workspace: Optional[str | UUID] = None,
100
+ ):
101
+ """
102
+ Shows a list of all the tenant's tags.
103
+
104
+ This is a wrapper function for the following API: `Tags - Apply Tags <https://learn.microsoft.com/rest/api/fabric/core/tags/apply-tags>`_.
105
+
106
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
107
+
108
+ Parameters
109
+ ----------
110
+ item : str | uuid.UUID
111
+ The name or ID of the item to apply tags to.
112
+ type : str
113
+ The type of the item to apply tags to. For example: "Lakehouse".
114
+ tags : str | uuid.UUID | List[str | uuid.UUID]
115
+ The name or ID of the tag(s) to apply to the item.
116
+ workspace : str | uuid.UUID, default=None
117
+ The workspace name or ID.
118
+ Defaults to None which resolves to the workspace of the attached lakehouse
119
+ or if no lakehouse attached, resolves to the workspace of the notebook.
120
+ """
121
+
122
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
123
+ (item_name, item_id) = resolve_item_name_and_id(item, type, workspace_id)
124
+
125
+ if isinstance(tags, str):
126
+ tags = [tags]
127
+
128
+ tag_list = resolve_tags(tags)
129
+
130
+ payload = {
131
+ "tags": tag_list,
132
+ }
133
+
134
+ _base_api(
135
+ request=f"/v1/workspaces/{workspace_id}/items/{item_id}/applyTags",
136
+ client="fabric_sp",
137
+ method="post",
138
+ payload=payload,
139
+ )
140
+
141
+ print(
142
+ f"{icons.green_dot} Tags {tags} applied to the '{item_name}' {type.lower()} within the '{workspace_name}' workspace"
143
+ )
144
+
145
+
146
+ def unapply_tags(
147
+ item: str | UUID,
148
+ type: str,
149
+ tags: str | UUID | List[str | UUID],
150
+ workspace: Optional[str | UUID] = None,
151
+ ):
152
+ """
153
+ Shows a list of all the tenant's tags.
154
+
155
+ This is a wrapper function for the following API: `Tags - Unapply Tags <https://learn.microsoft.com/rest/api/fabric/core/tags/unapply-tags>`_.
156
+
157
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
158
+
159
+ Parameters
160
+ ----------
161
+ item : str | uuid.UUID
162
+ The name or ID of the item to apply tags to.
163
+ type : str
164
+ The type of the item to apply tags to. For example: "Lakehouse".
165
+ tags : str | uuid.UUID | List[str | uuid.UUID]
166
+ The name or ID of the tag(s) to apply to the item.
167
+ workspace : str | uuid.UUID, default=None
168
+ The workspace name or ID.
169
+ Defaults to None which resolves to the workspace of the attached lakehouse
170
+ or if no lakehouse attached, resolves to the workspace of the notebook.
171
+ """
172
+
173
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
174
+ (item_name, item_id) = resolve_item_name_and_id(item, type, workspace_id)
175
+
176
+ if isinstance(tags, str):
177
+ tags = [tags]
178
+
179
+ tag_list = resolve_tags(tags)
180
+
181
+ payload = {
182
+ "tags": tag_list,
183
+ }
184
+
185
+ _base_api(
186
+ request=f"/v1/workspaces/{workspace_id}/items/{item_id}/unapplyTags",
187
+ client="fabric_sp",
188
+ method="post",
189
+ payload=payload,
190
+ )
191
+
192
+ print(
193
+ f"{icons.green_dot} Tags {tags} applied to the '{item_name}' {type.lower()} within the '{workspace_name}' workspace"
194
+ )
@@ -0,0 +1,42 @@
1
+ from sempy_labs.lakehouse._blobs import _request_blob_api
2
+ from sempy_labs._helper_functions import (
3
+ _xml_to_dict,
4
+ )
5
+ from datetime import datetime, timedelta, timezone
6
+ import xml.etree.ElementTree as ET
7
+
8
+
9
+ def get_user_delegation_key():
10
+ """
11
+ Gets a key that can be used to sign a user delegation SAS (shared access signature). A user delegation SAS grants access to Azure Blob Storage resources by using Microsoft Entra credentials.
12
+
13
+ This is a wrapper function for the following API: `Get User Delegation Key <https://learn.microsoft.com/rest/api/storageservices/get-user-delegation-key>`_.
14
+
15
+ Returns
16
+ -------
17
+ str
18
+ The user delegation key value.
19
+ """
20
+
21
+ utc_now = datetime.now(timezone.utc)
22
+ start_time = utc_now + timedelta(minutes=2)
23
+ expiry_time = start_time + timedelta(minutes=60)
24
+ start_str = start_time.strftime("%Y-%m-%dT%H:%M:%SZ")
25
+ expiry_str = expiry_time.strftime("%Y-%m-%dT%H:%M:%SZ")
26
+
27
+ payload = f"""<?xml version="1.0" encoding="utf-8"?>
28
+ <KeyInfo>
29
+ <Start>{start_str}</Start>
30
+ <Expiry>{expiry_str}</Expiry>
31
+ </KeyInfo>"""
32
+
33
+ response = _request_blob_api(
34
+ request="?restype=service&comp=userdelegationkey",
35
+ method="post",
36
+ payload=payload,
37
+ )
38
+
39
+ root = ET.fromstring(response.content)
40
+ response_json = _xml_to_dict(root)
41
+
42
+ return response_json.get("UserDelegationKey", {}).get("Value", None)