semantic-link-labs 0.11.2__py3-none-any.whl → 0.12.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (93) hide show
  1. {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.12.0.dist-info}/METADATA +7 -6
  2. {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.12.0.dist-info}/RECORD +90 -84
  3. sempy_labs/__init__.py +18 -18
  4. sempy_labs/_a_lib_info.py +1 -1
  5. sempy_labs/_authentication.py +81 -32
  6. sempy_labs/_capacities.py +2 -2
  7. sempy_labs/_capacity_migration.py +4 -4
  8. sempy_labs/_clear_cache.py +1 -1
  9. sempy_labs/_connections.py +107 -70
  10. sempy_labs/_dashboards.py +6 -2
  11. sempy_labs/_data_pipelines.py +1 -1
  12. sempy_labs/_dataflows.py +1 -1
  13. sempy_labs/_dax.py +3 -3
  14. sempy_labs/_delta_analyzer.py +4 -4
  15. sempy_labs/_delta_analyzer_history.py +1 -1
  16. sempy_labs/_deployment_pipelines.py +1 -1
  17. sempy_labs/_environments.py +1 -1
  18. sempy_labs/_eventhouses.py +9 -3
  19. sempy_labs/_eventstreams.py +1 -1
  20. sempy_labs/_external_data_shares.py +56 -2
  21. sempy_labs/_gateways.py +14 -7
  22. sempy_labs/_generate_semantic_model.py +7 -12
  23. sempy_labs/_git.py +1 -1
  24. sempy_labs/_graphQL.py +1 -1
  25. sempy_labs/_helper_functions.py +293 -22
  26. sempy_labs/_job_scheduler.py +12 -1
  27. sempy_labs/_kql_databases.py +1 -1
  28. sempy_labs/_kql_querysets.py +10 -2
  29. sempy_labs/_kusto.py +2 -2
  30. sempy_labs/_labels.py +126 -0
  31. sempy_labs/_list_functions.py +2 -2
  32. sempy_labs/_managed_private_endpoints.py +1 -1
  33. sempy_labs/_mirrored_databases.py +40 -16
  34. sempy_labs/_mirrored_warehouses.py +1 -1
  35. sempy_labs/_ml_experiments.py +1 -1
  36. sempy_labs/_model_bpa.py +6 -6
  37. sempy_labs/_model_bpa_bulk.py +3 -3
  38. sempy_labs/_model_dependencies.py +1 -1
  39. sempy_labs/_mounted_data_factories.py +3 -3
  40. sempy_labs/_notebooks.py +153 -3
  41. sempy_labs/_query_scale_out.py +2 -2
  42. sempy_labs/_refresh_semantic_model.py +1 -1
  43. sempy_labs/_semantic_models.py +15 -3
  44. sempy_labs/_spark.py +1 -1
  45. sempy_labs/_sql.py +3 -3
  46. sempy_labs/_sql_endpoints.py +5 -3
  47. sempy_labs/_sqldatabase.py +5 -1
  48. sempy_labs/_tags.py +3 -1
  49. sempy_labs/_translations.py +7 -360
  50. sempy_labs/_user_delegation_key.py +2 -2
  51. sempy_labs/_utils.py +27 -0
  52. sempy_labs/_vertipaq.py +3 -3
  53. sempy_labs/_vpax.py +1 -1
  54. sempy_labs/_warehouses.py +5 -0
  55. sempy_labs/_workloads.py +1 -1
  56. sempy_labs/_workspace_identity.py +1 -1
  57. sempy_labs/_workspaces.py +145 -11
  58. sempy_labs/admin/__init__.py +6 -0
  59. sempy_labs/admin/_capacities.py +34 -11
  60. sempy_labs/admin/_items.py +2 -2
  61. sempy_labs/admin/_tenant_keys.py +89 -0
  62. sempy_labs/directlake/_dl_helper.py +5 -2
  63. sempy_labs/graph/_users.py +3 -5
  64. sempy_labs/lakehouse/__init__.py +4 -0
  65. sempy_labs/lakehouse/_helper.py +18 -9
  66. sempy_labs/lakehouse/_lakehouse.py +18 -9
  67. sempy_labs/lakehouse/_materialized_lake_views.py +76 -0
  68. sempy_labs/lakehouse/_shortcuts.py +8 -2
  69. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +38 -47
  70. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +12 -22
  71. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +7 -11
  72. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +14 -23
  73. sempy_labs/ml_model/__init__.py +23 -0
  74. sempy_labs/ml_model/_functions.py +427 -0
  75. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
  76. sempy_labs/report/_bpareporttemplate/.platform +11 -0
  77. sempy_labs/report/_download_report.py +4 -1
  78. sempy_labs/report/_export_report.py +12 -5
  79. sempy_labs/report/_generate_report.py +11 -3
  80. sempy_labs/report/_paginated.py +21 -15
  81. sempy_labs/report/_report_functions.py +19 -11
  82. sempy_labs/report/_report_rebind.py +21 -10
  83. sempy_labs/report/_reportwrapper.py +1 -1
  84. sempy_labs/theme/_org_themes.py +5 -6
  85. sempy_labs/tom/_model.py +13 -19
  86. sempy_labs/variable_library/__init__.py +19 -0
  87. sempy_labs/variable_library/_functions.py +403 -0
  88. sempy_labs/_dax_query_view.py +0 -57
  89. sempy_labs/_ml_models.py +0 -111
  90. sempy_labs/_variable_libraries.py +0 -92
  91. {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.12.0.dist-info}/WHEEL +0 -0
  92. {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.12.0.dist-info}/licenses/LICENSE +0 -0
  93. {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.12.0.dist-info}/top_level.txt +0 -0
@@ -1,5 +1,5 @@
1
1
  import sempy.fabric as fabric
2
- from ._helper_functions import (
2
+ from sempy_labs._helper_functions import (
3
3
  is_default_semantic_model,
4
4
  _get_adls_client,
5
5
  resolve_workspace_name_and_id,
@@ -1,8 +1,8 @@
1
1
  import pandas as pd
2
2
  from typing import Optional
3
- from ._helper_functions import (
3
+ from sempy_labs._helper_functions import (
4
4
  _is_valid_uuid,
5
- resolve_workspace_name_and_id,
5
+ resolve_workspace_id,
6
6
  _update_dataframe_datatypes,
7
7
  _base_api,
8
8
  _create_dataframe,
@@ -10,8 +10,9 @@ from ._helper_functions import (
10
10
  )
11
11
  from uuid import UUID
12
12
  import sempy_labs._icons as icons
13
- from ._gateways import _resolve_gateway_id
13
+ from sempy_labs._gateways import _resolve_gateway_id
14
14
  from sempy._utils._log import log
15
+ import warnings
15
16
 
16
17
 
17
18
  @log
@@ -68,13 +69,13 @@ def delete_connection_role_assignment(connection: str | UUID, role_assignment_id
68
69
  @log
69
70
  def _resolve_connection_id(connection: str | UUID) -> UUID:
70
71
 
71
- dfC = list_connections()
72
72
  if _is_valid_uuid(connection):
73
- dfC_filt = dfC[dfC["Connection Id"] == connection]
74
- else:
75
- dfC_filt = dfC[dfC["Connection Name"] == connection]
73
+ return connection
74
+
75
+ dfC = list_connections()
76
+ dfC_filt = dfC[dfC["Connection Name"] == connection]
76
77
 
77
- if len(dfC_filt) == 0:
78
+ if dfC_filt.empty:
78
79
  raise ValueError(
79
80
  f"{icons.red_dot} The '{connection}' is not a valid connection."
80
81
  )
@@ -119,16 +120,20 @@ def list_connection_role_assignments(connection: str | UUID) -> pd.DataFrame:
119
120
  uses_pagination=True,
120
121
  )
121
122
 
123
+ rows = []
122
124
  for r in responses:
123
125
  for v in r.get("value", []):
124
- new_data = {
125
- "Connection Role Assignment Id": v.get("id"),
126
- "Principal Id": v.get("principal", {}).get("id"),
127
- "Principal Type": v.get("principal", {}).get("type"),
128
- "Role": v.get("role"),
129
- }
126
+ rows.append(
127
+ {
128
+ "Connection Role Assignment Id": v.get("id"),
129
+ "Principal Id": v.get("principal", {}).get("id"),
130
+ "Principal Type": v.get("principal", {}).get("type"),
131
+ "Role": v.get("role"),
132
+ }
133
+ )
130
134
 
131
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
135
+ if rows:
136
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
132
137
 
133
138
  return df
134
139
 
@@ -165,51 +170,57 @@ def list_connections() -> pd.DataFrame:
165
170
  request="/v1/connections", client="fabric_sp", uses_pagination=True
166
171
  )
167
172
 
173
+ rows = []
168
174
  for r in responses:
169
175
  for i in r.get("value", []):
170
176
  connection_details = i.get("connectionDetails", {})
171
177
  credential_details = i.get("credentialDetails", {})
172
178
 
173
- new_data = {
174
- "Connection Id": i.get("id"),
175
- "Connection Name": i.get("displayName"),
176
- "Gateway Id": i.get("gatewayId"),
177
- "Connectivity Type": i.get("connectivityType"),
178
- "Connection Path": connection_details.get("path"),
179
- "Connection Type": connection_details.get("type"),
180
- "Privacy Level": i.get("privacyLevel"),
181
- "Credential Type": (
182
- credential_details.get("credentialType")
183
- if credential_details
184
- else None
185
- ),
186
- "Single Sign On Type": (
187
- credential_details.get("singleSignOnType")
188
- if credential_details
189
- else None
190
- ),
191
- "Connection Encryption": (
192
- credential_details.get("connectionEncryption")
193
- if credential_details
194
- else None
195
- ),
196
- "Skip Test Connection": (
197
- credential_details.get("skipTestConnection")
198
- if credential_details
199
- else None
200
- ),
201
- }
202
-
203
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
204
-
205
- _update_dataframe_datatypes(dataframe=df, column_map=columns)
179
+ rows.append(
180
+ {
181
+ "Connection Id": i.get("id"),
182
+ "Connection Name": i.get("displayName"),
183
+ "Gateway Id": i.get("gatewayId"),
184
+ "Connectivity Type": i.get("connectivityType"),
185
+ "Connection Path": connection_details.get("path"),
186
+ "Connection Type": connection_details.get("type"),
187
+ "Privacy Level": i.get("privacyLevel"),
188
+ "Credential Type": (
189
+ credential_details.get("credentialType")
190
+ if credential_details
191
+ else None
192
+ ),
193
+ "Single Sign On Type": (
194
+ credential_details.get("singleSignOnType")
195
+ if credential_details
196
+ else None
197
+ ),
198
+ "Connection Encryption": (
199
+ credential_details.get("connectionEncryption")
200
+ if credential_details
201
+ else None
202
+ ),
203
+ "Skip Test Connection": (
204
+ credential_details.get("skipTestConnection")
205
+ if credential_details
206
+ else None
207
+ ),
208
+ }
209
+ )
210
+
211
+ if rows:
212
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
213
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
206
214
 
207
215
  return df
208
216
 
209
217
 
210
218
  @log
211
219
  def list_item_connections(
212
- item_name: str, item_type: str, workspace: Optional[str | UUID] = None
220
+ item: Optional[str | UUID] = None,
221
+ type: Optional[str] = None,
222
+ workspace: Optional[str | UUID] = None,
223
+ **kwargs,
213
224
  ) -> pd.DataFrame:
214
225
  """
215
226
  Shows the list of connections that the specified item is connected to.
@@ -220,9 +231,9 @@ def list_item_connections(
220
231
 
221
232
  Parameters
222
233
  ----------
223
- item_name : str
224
- The item name.
225
- item_type : str
234
+ item : str | uuid.UUID
235
+ The item name or ID.
236
+ type : str
226
237
  The `item type <https://learn.microsoft.com/rest/api/fabric/core/items/update-item?tabs=HTTP#itemtype>`_.
227
238
  workspace : str | uuid.UUID, default=None
228
239
  The Fabric workspace name or ID.
@@ -235,9 +246,32 @@ def list_item_connections(
235
246
  A pandas dataframe showing the list of connections that the specified item is connected to.
236
247
  """
237
248
 
238
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
239
- item_type = item_type[0].upper() + item_type[1:]
240
- item_id = resolve_item_id(item=item_name, type=item_type, workspace=workspace_id)
249
+ if "item_name" in kwargs:
250
+ if item is not None:
251
+ raise TypeError("Cannot specify both 'item' and 'item_name'")
252
+ item = kwargs.pop("item_name")
253
+ warnings.warn(
254
+ "'item_name' parameter is deprecated, use 'item' instead.",
255
+ FutureWarning,
256
+ stacklevel=2,
257
+ )
258
+ if "item_type" in kwargs:
259
+ if type is not None:
260
+ raise TypeError("Cannot specify both 'type' and 'item_type'")
261
+ type = kwargs.pop("item_type")
262
+ warnings.warn(
263
+ "'item_type' parameter is deprecated, use 'type' instead.",
264
+ FutureWarning,
265
+ stacklevel=2,
266
+ )
267
+
268
+ if item is None or type is None:
269
+ raise TypeError(
270
+ "Missing required parameters: 'item' and 'type' must be provided either directly or via 'item_name' and 'item_type'."
271
+ )
272
+
273
+ workspace_id = resolve_workspace_id(workspace)
274
+ item_id = resolve_item_id(item=item, type=type, workspace=workspace_id)
241
275
 
242
276
  columns = {
243
277
  "Connection Name": "string",
@@ -255,18 +289,22 @@ def list_item_connections(
255
289
  uses_pagination=True,
256
290
  )
257
291
 
292
+ rows = []
258
293
  for r in responses:
259
294
  for v in r.get("value", []):
260
- new_data = {
261
- "Connection Name": v.get("displayName"),
262
- "Connection Id": v.get("id"),
263
- "Connectivity Type": v.get("connectivityType"),
264
- "Connection Type": v.get("connectionDetails", {}).get("type"),
265
- "Connection Path": v.get("connectionDetails", {}).get("path"),
266
- "Gateway Id": v.get("gatewayId"),
267
- }
295
+ rows.append(
296
+ {
297
+ "Connection Name": v.get("displayName"),
298
+ "Connection Id": v.get("id"),
299
+ "Connectivity Type": v.get("connectivityType"),
300
+ "Connection Type": v.get("connectionDetails", {}).get("type"),
301
+ "Connection Path": v.get("connectionDetails", {}).get("path"),
302
+ "Gateway Id": v.get("gatewayId"),
303
+ }
304
+ )
268
305
 
269
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
306
+ if rows:
307
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
270
308
 
271
309
  return df
272
310
 
@@ -293,10 +331,10 @@ def _list_supported_connection_types(
293
331
  url = url.rstrip("&")
294
332
  responses = _base_api(request=url, client="fabric_sp", uses_pagination=True)
295
333
 
296
- records = []
334
+ rows = []
297
335
  for r in responses:
298
336
  for v in r.get("value", []):
299
- records.append(
337
+ rows.append(
300
338
  {
301
339
  "Connection Type": v.get("type"),
302
340
  "Creation Method": v["creationMethods"][0]["name"],
@@ -310,10 +348,9 @@ def _list_supported_connection_types(
310
348
  }
311
349
  )
312
350
 
313
- if records:
314
- df = pd.DataFrame(records)
315
-
316
- _update_dataframe_datatypes(dataframe=df, column_map=columns)
351
+ if rows:
352
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
353
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
317
354
 
318
355
  return df
319
356
 
sempy_labs/_dashboards.py CHANGED
@@ -1,7 +1,7 @@
1
1
  from typing import Optional
2
2
  from uuid import UUID
3
3
  import pandas as pd
4
- from ._helper_functions import (
4
+ from sempy_labs._helper_functions import (
5
5
  _create_dataframe,
6
6
  _base_api,
7
7
  resolve_workspace_id,
@@ -15,6 +15,8 @@ def list_dashboards(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
15
15
  """
16
16
  Shows a list of the dashboards within a workspace.
17
17
 
18
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
19
+
18
20
  Parameters
19
21
  ----------
20
22
  workspace : str | uuid.UUID, default=None
@@ -42,7 +44,9 @@ def list_dashboards(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
42
44
 
43
45
  workspace_id = resolve_workspace_id(workspace)
44
46
 
45
- response = _base_api(request=f"/v1.0/myorg/groups/{workspace_id}/dashboards")
47
+ response = _base_api(
48
+ request=f"/v1.0/myorg/groups/{workspace_id}/dashboards", client="fabric_sp"
49
+ )
46
50
 
47
51
  rows = []
48
52
  for v in response.json().get("value", []):
@@ -1,6 +1,6 @@
1
1
  import pandas as pd
2
2
  from typing import Optional
3
- from ._helper_functions import (
3
+ from sempy_labs._helper_functions import (
4
4
  resolve_workspace_name_and_id,
5
5
  _decode_b64,
6
6
  _base_api,
sempy_labs/_dataflows.py CHANGED
@@ -1,5 +1,5 @@
1
1
  import pandas as pd
2
- from ._helper_functions import (
2
+ from sempy_labs._helper_functions import (
3
3
  resolve_workspace_name_and_id,
4
4
  _is_valid_uuid,
5
5
  _update_dataframe_datatypes,
sempy_labs/_dax.py CHANGED
@@ -1,17 +1,17 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
- from ._helper_functions import (
3
+ from sempy_labs._helper_functions import (
4
4
  resolve_workspace_name_and_id,
5
5
  format_dax_object_name,
6
6
  resolve_dataset_name_and_id,
7
7
  _base_api,
8
8
  generate_guid,
9
9
  )
10
- from ._model_dependencies import get_model_calc_dependencies
10
+ from sempy_labs._model_dependencies import get_model_calc_dependencies
11
11
  from typing import Optional, List, Tuple
12
12
  from sempy._utils._log import log
13
13
  from uuid import UUID
14
- from .directlake._warm_cache import _put_columns_into_memory
14
+ from sempy_labs.directlake._warm_cache import _put_columns_into_memory
15
15
  import sempy_labs._icons as icons
16
16
  import time
17
17
 
@@ -5,7 +5,7 @@ import os
5
5
  from uuid import UUID
6
6
  from typing import Dict, Optional
7
7
  import pyarrow.parquet as pq
8
- from ._helper_functions import (
8
+ from sempy_labs._helper_functions import (
9
9
  create_abfss_path,
10
10
  save_as_delta_table,
11
11
  _get_column_aggregate,
@@ -21,11 +21,11 @@ from ._helper_functions import (
21
21
  _get_delta_table,
22
22
  )
23
23
  from sempy._utils._log import log
24
- from .lakehouse._get_lakehouse_tables import get_lakehouse_tables
25
- from .lakehouse._lakehouse import (
24
+ from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
25
+ from sempy_labs.lakehouse._lakehouse import (
26
26
  lakehouse_attached,
27
27
  )
28
- from .lakehouse._helper import (
28
+ from sempy_labs.lakehouse._helper import (
29
29
  is_v_ordered,
30
30
  )
31
31
  import sempy_labs._icons as icons
@@ -1,7 +1,7 @@
1
1
  import pandas as pd
2
2
  from typing import Optional
3
3
  import pyarrow.parquet as pq
4
- from ._helper_functions import (
4
+ from sempy_labs._helper_functions import (
5
5
  create_abfss_path,
6
6
  resolve_workspace_id,
7
7
  resolve_lakehouse_id,
@@ -1,5 +1,5 @@
1
1
  import pandas as pd
2
- from ._helper_functions import (
2
+ from sempy_labs._helper_functions import (
3
3
  _is_valid_uuid,
4
4
  _base_api,
5
5
  _update_dataframe_datatypes,
@@ -1,7 +1,7 @@
1
1
  import pandas as pd
2
2
  import sempy_labs._icons as icons
3
3
  from typing import Optional
4
- from ._helper_functions import (
4
+ from sempy_labs._helper_functions import (
5
5
  resolve_workspace_name_and_id,
6
6
  resolve_workspace_id,
7
7
  _base_api,
@@ -1,12 +1,12 @@
1
1
  import pandas as pd
2
2
  from typing import Optional
3
- from ._helper_functions import (
3
+ from sempy_labs._helper_functions import (
4
4
  _base_api,
5
5
  _create_dataframe,
6
6
  _conv_b64,
7
7
  delete_item,
8
8
  create_item,
9
- get_item_definition,
9
+ _get_item_definition,
10
10
  resolve_workspace_id,
11
11
  )
12
12
  from uuid import UUID
@@ -26,6 +26,8 @@ def create_eventhouse(
26
26
 
27
27
  This is a wrapper function for the following API: `Items - Create Eventhouse <https://learn.microsoft.com/rest/api/fabric/environment/items/create-eventhouse>`_.
28
28
 
29
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
30
+
29
31
  Parameters
30
32
  ----------
31
33
  name: str
@@ -127,6 +129,8 @@ def delete_eventhouse(name: str, workspace: Optional[str | UUID] = None):
127
129
 
128
130
  This is a wrapper function for the following API: `Items - Delete Eventhouse <https://learn.microsoft.com/rest/api/fabric/environment/items/delete-eventhouse>`_.
129
131
 
132
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
133
+
130
134
  Parameters
131
135
  ----------
132
136
  name: str
@@ -151,6 +155,8 @@ def get_eventhouse_definition(
151
155
 
152
156
  This is a wrapper function for the following API: `Items - Get Eventhouse Definition <https://learn.microsoft.com/rest/api/fabric/eventhouse/items/get-eventhouse-definition>`_.
153
157
 
158
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
159
+
154
160
  Parameters
155
161
  ----------
156
162
  eventhouse : str
@@ -168,7 +174,7 @@ def get_eventhouse_definition(
168
174
  The eventhouse definition in .json format or as a pandas dataframe.
169
175
  """
170
176
 
171
- return get_item_definition(
177
+ return _get_item_definition(
172
178
  item=eventhouse,
173
179
  type="Eventhouse",
174
180
  workspace=workspace,
@@ -1,6 +1,6 @@
1
1
  import pandas as pd
2
2
  from typing import Optional
3
- from ._helper_functions import (
3
+ from sempy_labs._helper_functions import (
4
4
  _base_api,
5
5
  delete_item,
6
6
  _create_dataframe,
@@ -2,11 +2,13 @@ from uuid import UUID
2
2
  import pandas as pd
3
3
  from typing import Optional, List
4
4
  import sempy_labs._icons as icons
5
- from ._helper_functions import (
5
+ from sempy_labs._helper_functions import (
6
6
  resolve_workspace_name_and_id,
7
7
  _base_api,
8
8
  _create_dataframe,
9
9
  resolve_item_id,
10
+ resolve_item_name_and_id,
11
+ resolve_workspace_id,
10
12
  )
11
13
  from sempy._utils._log import log
12
14
 
@@ -24,6 +26,8 @@ def create_external_data_share(
24
26
 
25
27
  This is a wrapper function for the following API: `External Data Shares - Create External Data Share <https://learn.microsoft.com/rest/api/fabric/core/external-data-shares/create-external-data-share>`_.
26
28
 
29
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
30
+
27
31
  Parameters
28
32
  ----------
29
33
  item_name : str
@@ -53,6 +57,7 @@ def create_external_data_share(
53
57
  method="post",
54
58
  status_codes=201,
55
59
  payload=payload,
60
+ client="fabric_sp",
56
61
  )
57
62
  print(
58
63
  f"{icons.green_dot} An external data share was created for the '{item_name}' {item_type} within the '{workspace_name}' workspace for the {paths} paths."
@@ -71,6 +76,8 @@ def revoke_external_data_share(
71
76
 
72
77
  This is a wrapper function for the following API: `External Data Shares - Revoke External Data Share <https://learn.microsoft.com/rest/api/fabric/core/external-data-shares/revoke-external-data-share`_.
73
78
 
79
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
80
+
74
81
  Parameters
75
82
  ----------
76
83
  external_data_share_id : uuid.UUID
@@ -91,6 +98,7 @@ def revoke_external_data_share(
91
98
  _base_api(
92
99
  request=f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares/{external_data_share_id}/revoke",
93
100
  method="post",
101
+ client="fabric_sp",
94
102
  )
95
103
  print(
96
104
  f"{icons.green_dot} The '{external_data_share_id}' external data share for the '{item_name}' {item_type} within the '{workspace_name}' workspace has been revoked."
@@ -106,6 +114,8 @@ def list_external_data_shares_in_item(
106
114
 
107
115
  This is a wrapper function for the following API: `External Data Shares - List External Data Shares In Item <https://learn.microsoft.com/rest/api/fabric/core/external-data-shares/list-external-data-shares-in-item`_.
108
116
 
117
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
118
+
109
119
  Parameters
110
120
  ----------
111
121
  item_name : str
@@ -123,7 +133,7 @@ def list_external_data_shares_in_item(
123
133
  A pandas dataframe showing a list of the external data shares that exist for the specified item.
124
134
  """
125
135
 
126
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
136
+ workspace_id = resolve_workspace_id(workspace)
127
137
  item_id = resolve_item_id(item=item_name, type=item_type, workspace=workspace_id)
128
138
 
129
139
  columns = {
@@ -145,6 +155,7 @@ def list_external_data_shares_in_item(
145
155
  responses = _base_api(
146
156
  request=f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares",
147
157
  uses_pagination=True,
158
+ client="fabric_sp",
148
159
  )
149
160
 
150
161
  rows = []
@@ -174,3 +185,46 @@ def list_external_data_shares_in_item(
174
185
  df = pd.DataFrame(rows, columns=list(columns.keys()))
175
186
 
176
187
  return df
188
+
189
+
190
+ @log
191
+ def delete_external_data_share(
192
+ external_data_share_id: UUID,
193
+ item: str | UUID,
194
+ item_type: str,
195
+ workspace: Optional[str | UUID] = None,
196
+ ):
197
+ """
198
+ Deletes the specified external data share.
199
+
200
+ This is a wrapper function for the following API: `External Data Shares Provider - Delete External Data Share <https://learn.microsoft.com/rest/api/fabric/core/external-data-shares-provider/delete-external-data-share`_.
201
+
202
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
203
+
204
+ Parameters
205
+ ----------
206
+ external_data_share_id : uuid.UUID
207
+ The external data share ID.
208
+ item : str | uuid.UUID
209
+ The item name or ID.
210
+ item_type : str
211
+ The `item type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_.
212
+ workspace : str | uuid.UUID, default=None
213
+ The Fabric workspace name or ID.
214
+ Defaults to None which resolves to the workspace of the attached lakehouse
215
+ or if no lakehouse attached, resolves to the workspace of the notebook.
216
+ """
217
+
218
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
219
+ (item_name, item_id) = resolve_item_name_and_id(
220
+ item=item, type=item_type, workspace=workspace_id
221
+ )
222
+
223
+ _base_api(
224
+ request=f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares/{external_data_share_id}",
225
+ method="delete",
226
+ client="fabric_sp",
227
+ )
228
+ print(
229
+ f"{icons.green_dot} The '{external_data_share_id}' external data share for the '{item_name}' {item_type} within the '{workspace_name}' workspace has been revoked."
230
+ )
sempy_labs/_gateways.py CHANGED
@@ -77,16 +77,16 @@ def list_gateways() -> pd.DataFrame:
77
77
  @log
78
78
  def _resolve_gateway_id(gateway: str | UUID) -> UUID:
79
79
 
80
- dfG = list_gateways()
81
80
  if _is_valid_uuid(gateway):
82
- dfG_filt = dfG[dfG["Gateway Id"] == gateway]
81
+ return gateway
83
82
  else:
83
+ dfG = list_gateways()
84
84
  dfG_filt = dfG[dfG["Gateway Name"] == gateway]
85
85
 
86
- if len(dfG_filt) == 0:
87
- raise ValueError(f"{icons.red_dot} The '{gateway}' does not exist.")
86
+ if dfG_filt.empty:
87
+ raise ValueError(f"{icons.red_dot} The '{gateway}' gateway does not exist.")
88
88
 
89
- return dfG_filt["Gateway Id"].iloc[0]
89
+ return dfG_filt["Gateway Id"].iloc[0]
90
90
 
91
91
 
92
92
  @log
@@ -472,7 +472,10 @@ def update_vnet_gateway(
472
472
 
473
473
  @log
474
474
  def bind_semantic_model_to_gateway(
475
- dataset: str | UUID, gateway: str | UUID, workspace: Optional[str | UUID] = None
475
+ dataset: str | UUID,
476
+ gateway: str | UUID,
477
+ workspace: Optional[str | UUID] = None,
478
+ data_source_object_ids: Optional[list[UUID]] = None,
476
479
  ):
477
480
  """
478
481
  Binds the specified dataset from the specified workspace to the specified gateway.
@@ -488,9 +491,11 @@ def bind_semantic_model_to_gateway(
488
491
  gateway : str | uuid.UUID
489
492
  The name or ID of the gateway.
490
493
  workspace : str | uuid.UUID, default=None
491
- The Fabric workspace name.
494
+ The workspace name or ID.
492
495
  Defaults to None which resolves to the workspace of the attached lakehouse
493
496
  or if no lakehouse attached, resolves to the workspace of the notebook.
497
+ data_source_object_ids : list[uuid.UUID], default=None
498
+ A list of data source object IDs to bind to the gateway.
494
499
  """
495
500
 
496
501
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
@@ -502,6 +507,8 @@ def bind_semantic_model_to_gateway(
502
507
  payload = {
503
508
  "gatewayObjectId": gateway_id,
504
509
  }
510
+ if data_source_object_ids is not None:
511
+ payload["datasourceObjectIds"] = data_source_object_ids
505
512
 
506
513
  _base_api(
507
514
  request=f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/Default.BindToGateway",