semantic-link-labs 0.9.3__py3-none-any.whl → 0.9.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (68) hide show
  1. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/METADATA +25 -6
  2. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/RECORD +68 -52
  3. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +45 -4
  5. sempy_labs/_capacities.py +22 -127
  6. sempy_labs/_capacity_migration.py +11 -9
  7. sempy_labs/_dashboards.py +60 -0
  8. sempy_labs/_data_pipelines.py +5 -31
  9. sempy_labs/_dax.py +17 -3
  10. sempy_labs/_delta_analyzer.py +279 -127
  11. sempy_labs/_environments.py +20 -48
  12. sempy_labs/_eventhouses.py +69 -30
  13. sempy_labs/_eventstreams.py +16 -34
  14. sempy_labs/_gateways.py +4 -4
  15. sempy_labs/_generate_semantic_model.py +30 -10
  16. sempy_labs/_git.py +90 -1
  17. sempy_labs/_graphQL.py +3 -20
  18. sempy_labs/_helper_functions.py +201 -44
  19. sempy_labs/_job_scheduler.py +226 -2
  20. sempy_labs/_kql_databases.py +19 -34
  21. sempy_labs/_kql_querysets.py +15 -32
  22. sempy_labs/_list_functions.py +14 -133
  23. sempy_labs/_mirrored_databases.py +14 -48
  24. sempy_labs/_ml_experiments.py +5 -30
  25. sempy_labs/_ml_models.py +4 -28
  26. sempy_labs/_model_bpa.py +17 -0
  27. sempy_labs/_model_bpa_rules.py +12 -2
  28. sempy_labs/_mounted_data_factories.py +119 -0
  29. sempy_labs/_notebooks.py +16 -26
  30. sempy_labs/_semantic_models.py +117 -0
  31. sempy_labs/_sql.py +78 -10
  32. sempy_labs/_sqldatabase.py +227 -0
  33. sempy_labs/_utils.py +42 -0
  34. sempy_labs/_vertipaq.py +17 -2
  35. sempy_labs/_warehouses.py +5 -17
  36. sempy_labs/_workloads.py +23 -9
  37. sempy_labs/_workspaces.py +13 -5
  38. sempy_labs/admin/__init__.py +70 -9
  39. sempy_labs/admin/_activities.py +166 -0
  40. sempy_labs/admin/_apps.py +143 -0
  41. sempy_labs/admin/_artifacts.py +62 -0
  42. sempy_labs/admin/_basic_functions.py +32 -704
  43. sempy_labs/admin/_capacities.py +311 -0
  44. sempy_labs/admin/_datasets.py +184 -0
  45. sempy_labs/admin/_domains.py +1 -1
  46. sempy_labs/admin/_items.py +3 -1
  47. sempy_labs/admin/_reports.py +239 -0
  48. sempy_labs/admin/_scanner.py +0 -1
  49. sempy_labs/admin/_shared.py +76 -0
  50. sempy_labs/admin/_tenant.py +489 -0
  51. sempy_labs/admin/_users.py +133 -0
  52. sempy_labs/admin/_workspaces.py +148 -0
  53. sempy_labs/directlake/_dl_helper.py +0 -1
  54. sempy_labs/directlake/_update_directlake_partition_entity.py +14 -0
  55. sempy_labs/graph/_teams.py +1 -1
  56. sempy_labs/graph/_users.py +9 -1
  57. sempy_labs/lakehouse/__init__.py +2 -0
  58. sempy_labs/lakehouse/_lakehouse.py +6 -7
  59. sempy_labs/lakehouse/_shortcuts.py +216 -64
  60. sempy_labs/report/__init__.py +3 -1
  61. sempy_labs/report/_download_report.py +4 -1
  62. sempy_labs/report/_export_report.py +272 -0
  63. sempy_labs/report/_generate_report.py +9 -17
  64. sempy_labs/report/_report_bpa.py +12 -19
  65. sempy_labs/report/_report_functions.py +9 -261
  66. sempy_labs/tom/_model.py +307 -40
  67. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/LICENSE +0 -0
  68. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,148 @@
1
+ from sempy_labs._helper_functions import (
2
+ _base_api,
3
+ _build_url,
4
+ _encode_user,
5
+ )
6
+ from uuid import UUID
7
+ from typing import Optional
8
+ from sempy_labs.admin._basic_functions import (
9
+ _resolve_workspace_name_and_id,
10
+ )
11
+ import sempy_labs._icons as icons
12
+
13
+
14
+ def add_user_to_workspace(
15
+ user: str | UUID,
16
+ role: str = "Member",
17
+ principal_type: str = "User",
18
+ workspace: Optional[str | UUID] = None,
19
+ ):
20
+ """
21
+ Grants user permissions to the specified workspace.
22
+
23
+ This is a wrapper function for the following API: `Admin - Groups AddUserAsAdmin <https://learn.microsoft.com/rest/api/power-bi/admin/groups-add-user-as-admin>`_.
24
+
25
+ Parameters
26
+ ----------
27
+ user : str | uuid.UUID
28
+ The user identifier or email address. For service principals and groups you must use the user identifier.
29
+ role : str, default="Member"
30
+ The role of the user in the workspace. Options are: 'Admin', 'Contributor', 'Member', 'None', 'Viewer'.
31
+ principal_type : str, default="User"
32
+ The principal type of the user. Options are: 'App', 'Group', 'None', 'User'.
33
+ workspace : str | uuid.UUID, default=None
34
+ The Fabric workspace name or ID.
35
+ Defaults to None which resolves to the workspace of the attached lakehouse
36
+ or if no lakehouse attached, resolves to the workspace of the notebook.
37
+ """
38
+
39
+ (workspace_name, workspace_id) = _resolve_workspace_name_and_id(workspace)
40
+
41
+ # Validation
42
+ role = role.capitalize()
43
+ roles = ["Admin", "Contributor", "Member", "None", "Viewer"]
44
+ if role not in roles:
45
+ raise ValueError(f"{icons.red_dot} Invalid role. Please choose from {roles}")
46
+ principal_types = ["App", "Group", "None", "User"]
47
+ if principal_type not in principal_types:
48
+ raise ValueError(
49
+ f"{icons.red_dot} Invalid principal type. Please choose from {principal_types}"
50
+ )
51
+
52
+ user = _encode_user(user)
53
+
54
+ payload = {
55
+ "identifier": user, # identifier or emailAddress?
56
+ "principalType": principal_type,
57
+ "groupUserAccessRight": role,
58
+ }
59
+
60
+ _base_api(
61
+ request=f"/v1.0/myorg/admin/groups/{workspace_id}/users",
62
+ method="post",
63
+ payload=payload,
64
+ )
65
+
66
+ print(
67
+ f"{icons.green_dot} The '{user}' user has been added with '{role.lower()}' permissions to the '{workspace_name}' workspace."
68
+ )
69
+
70
+
71
+ def delete_user_from_workspace(
72
+ user: str | UUID,
73
+ workspace: Optional[str | UUID] = None,
74
+ is_group: Optional[bool] = None,
75
+ profile_id: Optional[str] = None,
76
+ ):
77
+ """
78
+ Removes user permissions from the specified workspace.
79
+
80
+ This is a wrapper function for the following API: `Admin - Groups DeleteUserAsAdmin <https://learn.microsoft.com/rest/api/power-bi/admin/groups-delete-user-as-admin>`_.
81
+
82
+ Parameters
83
+ ----------
84
+ user : str | uuid.UUID
85
+ The user identifier or email address. For service principals and groups you must use the user identifier.
86
+ workspace : str | uuid.UUID, default=None
87
+ The Fabric workspace name or ID.
88
+ Defaults to None which resolves to the workspace of the attached lakehouse
89
+ or if no lakehouse attached, resolves to the workspace of the notebook.
90
+ is_group : bool, default=None
91
+ Whether a given user is a group or not. This parameter is required when user to delete is group.
92
+ profile_id : str, default=None
93
+ The service principal profile ID to delete.
94
+ """
95
+
96
+ (workspace_name, workspace_id) = _resolve_workspace_name_and_id(workspace)
97
+
98
+ user = _encode_user(user)
99
+ url = f"/v1.0/myorg/admin/groups/{workspace_id}/users/{user}"
100
+
101
+ params = {}
102
+ if profile_id is not None:
103
+ params["profileId"] = profile_id
104
+ if is_group is not None:
105
+ params["isGroup"] = is_group
106
+
107
+ url = _build_url(url, params)
108
+
109
+ _base_api(
110
+ request=url,
111
+ method="delete",
112
+ )
113
+
114
+ print(
115
+ f"{icons.green_dot} The '{user}' user has been removed from the '{workspace_name}' workspace."
116
+ )
117
+
118
+
119
+ def restore_deleted_workspace(workspace_id: UUID, name: str, email_address: str):
120
+ """
121
+ Restores a deleted workspace.
122
+
123
+ This is a wrapper function for the following API: `Admin - Groups RestoreDeletedGroupAsAdmin <https://learn.microsoft.com/rest/api/power-bi/admin/groups-restore-deleted-group-as-admin>`_.
124
+
125
+ Parameters
126
+ ----------
127
+ workspace_id : uuid.UUID
128
+ The ID of the workspace to restore.
129
+ name : str
130
+ The name of the group to be restored
131
+ email_address : str
132
+ The email address of the owner of the group to be restored
133
+ """
134
+
135
+ payload = {
136
+ "name": name,
137
+ "emailAddress": email_address,
138
+ }
139
+
140
+ _base_api(
141
+ request=f"/v1.0/myorg/admin/groups/{workspace_id}/restore",
142
+ method="post",
143
+ payload=payload,
144
+ )
145
+
146
+ print(
147
+ f"{icons.green_dot} The '{workspace_id}' workspace has been restored as '{name}'."
148
+ )
@@ -11,7 +11,6 @@ from sempy_labs._helper_functions import (
11
11
  _convert_data_type,
12
12
  resolve_dataset_name_and_id,
13
13
  resolve_workspace_name_and_id,
14
- _base_api,
15
14
  )
16
15
 
17
16
 
@@ -17,6 +17,7 @@ def update_direct_lake_partition_entity(
17
17
  dataset: str | UUID,
18
18
  table_name: Union[str, List[str]],
19
19
  entity_name: Union[str, List[str]],
20
+ schema: Optional[str] = None,
20
21
  workspace: Optional[str | UUID] = None,
21
22
  ):
22
23
  """
@@ -30,6 +31,9 @@ def update_direct_lake_partition_entity(
30
31
  Name of the table(s) in the semantic model.
31
32
  entity_name : str, List[str]
32
33
  Name of the lakehouse table to be mapped to the semantic model table.
34
+ schema : str, default=None
35
+ The schema of the lakehouse table to be mapped to the semantic model table.
36
+ Defaults to None which resolves to the existing schema of the lakehouse table.
33
37
  workspace : str | uuid.UUID, default=None
34
38
  The Fabric workspace name or ID in which the semantic model exists.
35
39
  Defaults to None which resolves to the workspace of the attached lakehouse
@@ -77,6 +81,16 @@ def update_direct_lake_partition_entity(
77
81
  )
78
82
 
79
83
  tom.model.Tables[tName].Partitions[part_name].Source.EntityName = eName
84
+
85
+ # Update source lineage tag
86
+ existing_schema = (
87
+ tom.model.Tables[tName].Partitions[part_name].Source.SchemaName or "dbo"
88
+ )
89
+ if schema is None:
90
+ schema = existing_schema
91
+
92
+ tom.model.Tables[tName].Partitions[part_name].Source.SchemaName = schema
93
+ tom.model.Tables[tName].SourceLineageTag = f"[{schema}].[{eName}]"
80
94
  print(
81
95
  f"{icons.green_dot} The '{tName}' table in the '{dataset_name}' semantic model within the '{workspace_name}' workspace has been updated to point to the '{eName}' table."
82
96
  )
@@ -35,7 +35,7 @@ def list_teams() -> pd.DataFrame:
35
35
  "Archived": "bool",
36
36
  "Favorite By Me": "bool",
37
37
  "Discoverable By Me": "bool",
38
- "Member Count": "int",
38
+ "Member Count": "int_fillna",
39
39
  }
40
40
 
41
41
  df = _create_dataframe(columns=columns)
@@ -125,6 +125,7 @@ def send_mail(
125
125
  subject: str,
126
126
  to_recipients: str | List[str],
127
127
  content: str,
128
+ content_type: str = "Text",
128
129
  cc_recipients: str | List[str] = None,
129
130
  ):
130
131
  """
@@ -144,10 +145,17 @@ def send_mail(
144
145
  The email address of the recipients.
145
146
  content : str
146
147
  The email content.
148
+ content_type : str, default="Text"
149
+ The email content type. Options: "Text" or "HTML".
147
150
  cc_recipients : str | List[str], default=None
148
151
  The email address of the CC recipients.
149
152
  """
150
153
 
154
+ if content_type.lower() == "html":
155
+ content_type = "HTML"
156
+ else:
157
+ content_type = "Text"
158
+
151
159
  user_id = resolve_user_id(user=user)
152
160
 
153
161
  if isinstance(to_recipients, str):
@@ -170,7 +178,7 @@ def send_mail(
170
178
  "message": {
171
179
  "subject": subject,
172
180
  "body": {
173
- "contentType": "Text",
181
+ "contentType": content_type,
174
182
  "content": content,
175
183
  },
176
184
  "toRecipients": to_email_addresses,
@@ -12,6 +12,7 @@ from sempy_labs.lakehouse._shortcuts import (
12
12
  create_shortcut_onelake,
13
13
  delete_shortcut,
14
14
  reset_shortcut_cache,
15
+ list_shortcuts,
15
16
  )
16
17
 
17
18
  __all__ = [
@@ -25,4 +26,5 @@ __all__ = [
25
26
  "vacuum_lakehouse_tables",
26
27
  "reset_shortcut_cache",
27
28
  "run_table_maintenance",
29
+ "list_shortcuts",
28
30
  ]
@@ -35,7 +35,7 @@ def lakehouse_attached() -> bool:
35
35
  @log
36
36
  def optimize_lakehouse_tables(
37
37
  tables: Optional[Union[str, List[str]]] = None,
38
- lakehouse: Optional[str] = None,
38
+ lakehouse: Optional[str | UUID] = None,
39
39
  workspace: Optional[str | UUID] = None,
40
40
  ):
41
41
  """
@@ -46,8 +46,8 @@ def optimize_lakehouse_tables(
46
46
  tables : str | List[str], default=None
47
47
  The table(s) to optimize.
48
48
  Defaults to None which resovles to optimizing all tables within the lakehouse.
49
- lakehouse : str, default=None
50
- The Fabric lakehouse.
49
+ lakehouse : str | uuid.UUID, default=None
50
+ The Fabric lakehouse name or ID.
51
51
  Defaults to None which resolves to the lakehouse attached to the notebook.
52
52
  workspace : str | uuid.UUID, default=None
53
53
  The Fabric workspace name or ID used by the lakehouse.
@@ -82,7 +82,7 @@ def optimize_lakehouse_tables(
82
82
  @log
83
83
  def vacuum_lakehouse_tables(
84
84
  tables: Optional[Union[str, List[str]]] = None,
85
- lakehouse: Optional[str] = None,
85
+ lakehouse: Optional[str | UUID] = None,
86
86
  workspace: Optional[str | UUID] = None,
87
87
  retain_n_hours: Optional[int] = None,
88
88
  ):
@@ -93,8 +93,8 @@ def vacuum_lakehouse_tables(
93
93
  ----------
94
94
  tables : str | List[str] | None
95
95
  The table(s) to vacuum. If no tables are specified, all tables in the lakehouse will be optimized.
96
- lakehouse : str, default=None
97
- The Fabric lakehouse.
96
+ lakehouse : str | uuid.UUID, default=None
97
+ The Fabric lakehouse name or ID.
98
98
  Defaults to None which resolves to the lakehouse attached to the notebook.
99
99
  workspace : str | uuid.UUID, default=None
100
100
  The Fabric workspace name or ID used by the lakehouse.
@@ -107,7 +107,6 @@ def vacuum_lakehouse_tables(
107
107
  The default retention period is 168 hours (7 days) unless manually configured via table properties.
108
108
  """
109
109
 
110
- from pyspark.sql import SparkSession
111
110
  from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
112
111
  from delta import DeltaTable
113
112
 
@@ -1,16 +1,19 @@
1
1
  import sempy.fabric as fabric
2
+ import pandas as pd
2
3
  from sempy_labs._helper_functions import (
3
- resolve_lakehouse_name,
4
- resolve_lakehouse_id,
4
+ resolve_lakehouse_name_and_id,
5
5
  resolve_workspace_name_and_id,
6
6
  _base_api,
7
+ _create_dataframe,
7
8
  )
9
+ from sempy._utils._log import log
8
10
  from typing import Optional
9
11
  import sempy_labs._icons as icons
10
- from sempy.fabric.exceptions import FabricHTTPException
11
12
  from uuid import UUID
13
+ from sempy.fabric.exceptions import FabricHTTPException
12
14
 
13
15
 
16
+ @log
14
17
  def create_shortcut_onelake(
15
18
  table_name: str,
16
19
  source_lakehouse: str,
@@ -18,6 +21,8 @@ def create_shortcut_onelake(
18
21
  destination_lakehouse: str,
19
22
  destination_workspace: Optional[str | UUID] = None,
20
23
  shortcut_name: Optional[str] = None,
24
+ source_path: str = "Tables",
25
+ destination_path: str = "Tables",
21
26
  ):
22
27
  """
23
28
  Creates a `shortcut <https://learn.microsoft.com/fabric/onelake/onelake-shortcuts>`_ to a delta table in OneLake.
@@ -40,52 +45,78 @@ def create_shortcut_onelake(
40
45
  or if no lakehouse attached, resolves to the workspace of the notebook.
41
46
  shortcut_name : str, default=None
42
47
  The name of the shortcut 'table' to be created. This defaults to the 'table_name' parameter value.
48
+ source_path : str, default="Tables"
49
+ A string representing the full path to the table/file in the source lakehouse, including either "Files" or "Tables". Examples: Tables/FolderName/SubFolderName; Files/FolderName/SubFolderName.
50
+ destination_path: str, default="Tables"
51
+ A string representing the full path where the shortcut is created, including either "Files" or "Tables". Examples: Tables/FolderName/SubFolderName; Files/FolderName/SubFolderName.
43
52
  """
44
53
 
54
+ if not (source_path.startswith("Files") or source_path.startswith("Tables")):
55
+ raise ValueError(
56
+ f"{icons.red_dot} The 'source_path' parameter must be either 'Files' or 'Tables'."
57
+ )
58
+ if not (
59
+ destination_path.startswith("Files") or destination_path.startswith("Tables")
60
+ ):
61
+ raise ValueError(
62
+ f"{icons.red_dot} The 'destination_path' parameter must be either 'Files' or 'Tables'."
63
+ )
64
+
45
65
  (source_workspace_name, source_workspace_id) = resolve_workspace_name_and_id(
46
66
  source_workspace
47
67
  )
48
- source_lakehouse_id = resolve_lakehouse_id(source_lakehouse, source_workspace_id)
49
- source_lakehouse_name = fabric.resolve_item_name(
50
- item_id=source_lakehouse_id, type="Lakehouse", workspace=source_workspace_id
51
- )
52
68
 
53
- if destination_workspace is None:
54
- destination_workspace_name = source_workspace_name
55
- destination_workspace_id = source_workspace_id
56
- else:
57
- destination_workspace_name = destination_workspace
58
- destination_workspace_id = fabric.resolve_workspace_id(
59
- destination_workspace_name
60
- )
69
+ (source_lakehouse_name, source_lakehouse_id) = resolve_lakehouse_name_and_id(
70
+ lakehouse=source_lakehouse, workspace=source_workspace_id
71
+ )
61
72
 
62
- destination_workspace_id = fabric.resolve_workspace_id(destination_workspace)
63
- destination_lakehouse_id = resolve_lakehouse_id(
64
- destination_lakehouse, destination_workspace
73
+ (destination_workspace_name, destination_workspace_id) = (
74
+ resolve_workspace_name_and_id(destination_workspace)
65
75
  )
66
- destination_lakehouse_name = fabric.resolve_item_name(
67
- item_id=destination_lakehouse_id,
68
- type="Lakehouse",
69
- workspace=destination_workspace_id,
76
+ (destination_lakehouse_name, destination_lakehouse_id) = (
77
+ resolve_lakehouse_name_and_id(
78
+ lakehouse=destination_lakehouse, workspace=destination_workspace_id
79
+ )
70
80
  )
71
81
 
72
82
  if shortcut_name is None:
73
83
  shortcut_name = table_name
74
84
 
75
- table_path = f"Tables/{table_name}"
85
+ source_full_path = f"{source_path}/{table_name}"
86
+
87
+ actual_shortcut_name = shortcut_name.replace(" ", "")
76
88
 
77
89
  payload = {
78
- "path": "Tables",
79
- "name": shortcut_name.replace(" ", ""),
90
+ "path": destination_path,
91
+ "name": actual_shortcut_name,
80
92
  "target": {
81
93
  "oneLake": {
82
- "workspaceId": source_workspace_id,
83
94
  "itemId": source_lakehouse_id,
84
- "path": table_path,
95
+ "path": source_full_path,
96
+ "workspaceId": source_workspace_id,
85
97
  }
86
98
  },
87
99
  }
88
100
 
101
+ # Check if the shortcut already exists
102
+ try:
103
+ response = _base_api(
104
+ request=f"/v1/workspaces/{destination_workspace_id}/items/{destination_lakehouse_id}/shortcuts/{destination_path}/{actual_shortcut_name}"
105
+ )
106
+ response_json = response.json()
107
+ del response_json["target"]["type"]
108
+ if response_json.get("target") == payload.get("target"):
109
+ print(
110
+ f"{icons.info} The '{actual_shortcut_name}' shortcut already exists in the '{destination_lakehouse_name}' lakehouse within the '{destination_workspace_name}' workspace."
111
+ )
112
+ return
113
+ else:
114
+ raise ValueError(
115
+ f"{icons.red_dot} The '{actual_shortcut_name}' shortcut already exists in the '{destination_lakehouse_name} lakehouse within the '{destination_workspace_name}' workspace but has a different source."
116
+ )
117
+ except FabricHTTPException:
118
+ pass
119
+
89
120
  _base_api(
90
121
  request=f"/v1/workspaces/{destination_workspace_id}/items/{destination_lakehouse_id}/shortcuts",
91
122
  payload=payload,
@@ -94,7 +125,7 @@ def create_shortcut_onelake(
94
125
  )
95
126
 
96
127
  print(
97
- f"{icons.green_dot} The shortcut '{shortcut_name}' was created in the '{destination_lakehouse_name}' lakehouse within the '{destination_workspace_name} workspace. It is based on the '{table_name}' table in the '{source_lakehouse_name}' lakehouse within the '{source_workspace_name}' workspace."
128
+ f"{icons.green_dot} The shortcut '{shortcut_name}' was created in the '{destination_lakehouse_name}' lakehouse within the '{destination_workspace_name}' workspace. It is based on the '{table_name}' table in the '{source_lakehouse_name}' lakehouse within the '{source_workspace_name}' workspace."
98
129
  )
99
130
 
100
131
 
@@ -136,17 +167,14 @@ def create_shortcut(
136
167
 
137
168
  sourceTitle = source_titles[source]
138
169
 
139
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
140
-
141
- if lakehouse is None:
142
- lakehouse_id = fabric.get_lakehouse_id()
143
- else:
144
- lakehouse_id = resolve_lakehouse_id(lakehouse, workspace)
170
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
171
+ (lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id(
172
+ lakehouse=lakehouse, workspace=workspace_id
173
+ )
145
174
 
146
- client = fabric.FabricRestClient()
147
175
  shortcutActualName = shortcut_name.replace(" ", "")
148
176
 
149
- request_body = {
177
+ payload = {
150
178
  "path": "Tables",
151
179
  "name": shortcutActualName,
152
180
  "target": {
@@ -158,26 +186,21 @@ def create_shortcut(
158
186
  },
159
187
  }
160
188
 
161
- try:
162
- response = client.post(
163
- f"/v1/workspaces/{workspace_id}/items/{lakehouse_id}/shortcuts",
164
- json=request_body,
165
- )
166
- if response.status_code == 201:
167
- print(
168
- f"{icons.green_dot} The shortcut '{shortcutActualName}' was created in the '{lakehouse}' lakehouse within"
169
- f" the '{workspace} workspace. It is based on the '{subpath}' table in '{sourceTitle}'."
170
- )
171
- else:
172
- print(response.status_code)
173
- except Exception as e:
174
- raise ValueError(
175
- f"{icons.red_dot} Failed to create a shortcut for the '{shortcut_name}' table."
176
- ) from e
189
+ _base_api(
190
+ request=f"/v1/workspaces/{workspace_id}/items/{lakehouse_id}/shortcuts",
191
+ method="post",
192
+ payload=payload,
193
+ status_codes=201,
194
+ )
195
+ print(
196
+ f"{icons.green_dot} The shortcut '{shortcutActualName}' was created in the '{lakehouse_name}' lakehouse within"
197
+ f" the '{workspace_name}' workspace. It is based on the '{subpath}' table in '{sourceTitle}'."
198
+ )
177
199
 
178
200
 
179
201
  def delete_shortcut(
180
202
  shortcut_name: str,
203
+ shortcut_path: str = "Tables",
181
204
  lakehouse: Optional[str] = None,
182
205
  workspace: Optional[str | UUID] = None,
183
206
  ):
@@ -190,7 +213,9 @@ def delete_shortcut(
190
213
  ----------
191
214
  shortcut_name : str
192
215
  The name of the shortcut.
193
- lakehouse : str, default=None
216
+ shortcut_path : str = "Tables"
217
+ The path of the shortcut to be deleted. Must start with either "Files" or "Tables". Examples: Tables/FolderName/SubFolderName; Files/FolderName/SubFolderName.
218
+ lakehouse : str | uuid.UUID, default=None
194
219
  The Fabric lakehouse name in which the shortcut resides.
195
220
  Defaults to None which resolves to the lakehouse attached to the notebook.
196
221
  workspace : str | UUID, default=None
@@ -200,20 +225,15 @@ def delete_shortcut(
200
225
  """
201
226
 
202
227
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
228
+ (lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id(
229
+ lakehouse=lakehouse, workspace=workspace_id
230
+ )
203
231
 
204
- if lakehouse is None:
205
- lakehouse_id = fabric.get_lakehouse_id()
206
- lakehouse = resolve_lakehouse_name(lakehouse_id, workspace_id)
207
- else:
208
- lakehouse_id = resolve_lakehouse_id(lakehouse, workspace_id)
209
-
210
- client = fabric.FabricRestClient()
211
- response = client.delete(
212
- f"/v1/workspaces/{workspace_id}/items/{lakehouse_id}/shortcuts/Tables/{shortcut_name}"
232
+ _base_api(
233
+ request=f"/v1/workspaces/{workspace_id}/items/{lakehouse_id}/shortcuts/{shortcut_path}/{shortcut_name}",
234
+ method="delete",
213
235
  )
214
236
 
215
- if response.status_code != 200:
216
- raise FabricHTTPException(response)
217
237
  print(
218
238
  f"{icons.green_dot} The '{shortcut_name}' shortcut in the '{lakehouse}' within the '{workspace_name}' workspace has been deleted."
219
239
  )
@@ -245,3 +265,135 @@ def reset_shortcut_cache(workspace: Optional[str | UUID] = None):
245
265
  print(
246
266
  f"{icons.green_dot} The shortcut cache has been reset for the '{workspace_name}' workspace."
247
267
  )
268
+
269
+
270
+ @log
271
+ def list_shortcuts(
272
+ lakehouse: Optional[str | UUID] = None,
273
+ workspace: Optional[str | UUID] = None,
274
+ path: Optional[str] = None,
275
+ ) -> pd.DataFrame:
276
+ """
277
+ Shows all shortcuts which exist in a Fabric lakehouse and their properties.
278
+
279
+ Parameters
280
+ ----------
281
+ lakehouse : str | uuid.UUID, default=None
282
+ The Fabric lakehouse name or ID.
283
+ Defaults to None which resolves to the lakehouse attached to the notebook.
284
+ workspace : str | uuid.UUID, default=None
285
+ The name or ID of the Fabric workspace in which lakehouse resides.
286
+ Defaults to None which resolves to the workspace of the attached lakehouse
287
+ or if no lakehouse attached, resolves to the workspace of the notebook.
288
+ path: str, default=None
289
+ The path within lakehouse where to look for shortcuts. If provied, must start with either "Files" or "Tables". Examples: Tables/FolderName/SubFolderName; Files/FolderName/SubFolderName.
290
+ Defaults to None which will retun all shortcuts on the given lakehouse
291
+
292
+ Returns
293
+ -------
294
+ pandas.DataFrame
295
+ A pandas dataframe showing all the shortcuts which exist in the specified lakehouse.
296
+ """
297
+
298
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
299
+ (lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id(
300
+ lakehouse=lakehouse, workspace=workspace_id
301
+ )
302
+
303
+ columns = {
304
+ "Shortcut Name": "string",
305
+ "Shortcut Path": "string",
306
+ "Source Type": "string",
307
+ "Source Workspace Id": "string",
308
+ "Source Workspace Name": "string",
309
+ "Source Item Id": "string",
310
+ "Source Item Name": "string",
311
+ "Source Item Type": "string",
312
+ "OneLake Path": "string",
313
+ "Connection Id": "string",
314
+ "Location": "string",
315
+ "Bucket": "string",
316
+ "SubPath": "string",
317
+ "Source Properties Raw": "string",
318
+ }
319
+ df = _create_dataframe(columns=columns)
320
+
321
+ # To improve performance create a dataframe to cache all items for a given workspace
322
+ itm_clms = {
323
+ "Id": "string",
324
+ "Display Name": "string",
325
+ "Description": "string",
326
+ "Type": "string",
327
+ "Workspace Id": "string",
328
+ }
329
+ source_items_df = _create_dataframe(columns=itm_clms)
330
+
331
+ url = f"/v1/workspaces/{workspace_id}/items/{lakehouse_id}/shortcuts"
332
+
333
+ if path is not None:
334
+ url += f"?parentPath={path}"
335
+
336
+ responses = _base_api(
337
+ request=url,
338
+ uses_pagination=True,
339
+ )
340
+
341
+ sources = {
342
+ "AdlsGen2": "adlsGen2",
343
+ "AmazonS3": "amazonS3",
344
+ "Dataverse": "dataverse",
345
+ "ExternalDataShare": "externalDataShare",
346
+ "GoogleCloudStorage": "googleCloudStorage",
347
+ "OneLake": "oneLake",
348
+ "S3Compatible": "s3Compatible",
349
+ }
350
+
351
+ for r in responses:
352
+ for i in r.get("value", []):
353
+ tgt = i.get("target", {})
354
+ tgt_type = tgt.get("type")
355
+ connection_id = tgt.get(sources.get(tgt_type), {}).get("connectionId")
356
+ location = tgt.get(sources.get(tgt_type), {}).get("location")
357
+ sub_path = tgt.get(sources.get(tgt_type), {}).get("subpath")
358
+ source_workspace_id = tgt.get(sources.get(tgt_type), {}).get("workspaceId")
359
+ source_item_id = tgt.get(sources.get(tgt_type), {}).get("itemId")
360
+ bucket = tgt.get(sources.get(tgt_type), {}).get("bucket")
361
+ source_workspace_name = (
362
+ fabric.resolve_workspace_name(source_workspace_id)
363
+ if source_workspace_id is not None
364
+ else None
365
+ )
366
+ # Cache and use it to getitem type and name
367
+ source_item_type = None
368
+ source_item_name = None
369
+ dfI = source_items_df[
370
+ source_items_df["Workspace Id"] == source_workspace_id
371
+ ]
372
+ if dfI.empty:
373
+ dfI = fabric.list_items(workspace=source_workspace_id)
374
+ source_items_df = pd.concat([source_items_df, dfI], ignore_index=True)
375
+
376
+ dfI_filt = dfI[dfI["Id"] == source_item_id]
377
+ if not dfI_filt.empty:
378
+ source_item_type = dfI_filt["Type"].iloc[0]
379
+ source_item_name = dfI_filt["Display Name"].iloc[0]
380
+
381
+ new_data = {
382
+ "Shortcut Name": i.get("name"),
383
+ "Shortcut Path": i.get("path"),
384
+ "Source Type": tgt_type,
385
+ "Source Workspace Id": source_workspace_id,
386
+ "Source Workspace Name": source_workspace_name,
387
+ "Source Item Id": source_item_id,
388
+ "Source Item Name": source_item_name,
389
+ "Source Item Type": source_item_type,
390
+ "OneLake Path": tgt.get(sources.get("oneLake"), {}).get("path"),
391
+ "Connection Id": connection_id,
392
+ "Location": location,
393
+ "Bucket": bucket,
394
+ "SubPath": sub_path,
395
+ "Source Properties Raw": str(tgt),
396
+ }
397
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
398
+
399
+ return df