semantic-link-labs 0.12.1__py3-none-any.whl → 0.12.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (32) hide show
  1. {semantic_link_labs-0.12.1.dist-info → semantic_link_labs-0.12.3.dist-info}/METADATA +4 -2
  2. {semantic_link_labs-0.12.1.dist-info → semantic_link_labs-0.12.3.dist-info}/RECORD +32 -26
  3. sempy_labs/__init__.py +12 -0
  4. sempy_labs/_a_lib_info.py +1 -1
  5. sempy_labs/_data_access_security.py +98 -0
  6. sempy_labs/_data_pipelines.py +23 -9
  7. sempy_labs/_dataflows.py +0 -1
  8. sempy_labs/_deployment_pipelines.py +49 -27
  9. sempy_labs/_eventstreams.py +9 -1
  10. sempy_labs/_generate_semantic_model.py +2 -2
  11. sempy_labs/_get_connection_string.py +84 -0
  12. sempy_labs/_helper_functions.py +17 -1
  13. sempy_labs/_job_scheduler.py +63 -33
  14. sempy_labs/_labels.py +4 -6
  15. sempy_labs/_model_dependencies.py +5 -2
  16. sempy_labs/_semantic_models.py +118 -0
  17. sempy_labs/_sql_endpoints.py +12 -24
  18. sempy_labs/_warehouses.py +1 -1
  19. sempy_labs/admin/__init__.py +6 -0
  20. sempy_labs/admin/_sharing_links.py +110 -0
  21. sempy_labs/graph/__init__.py +16 -0
  22. sempy_labs/graph/_groups.py +157 -2
  23. sempy_labs/graph/_sensitivity_labels.py +81 -0
  24. sempy_labs/graph/_users.py +162 -0
  25. sempy_labs/lakehouse/_shortcuts.py +16 -11
  26. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
  27. sempy_labs/report/_bpareporttemplate/.platform +11 -0
  28. sempy_labs/report/_reportwrapper.py +53 -6
  29. sempy_labs/tom/_model.py +49 -18
  30. {semantic_link_labs-0.12.1.dist-info → semantic_link_labs-0.12.3.dist-info}/WHEEL +0 -0
  31. {semantic_link_labs-0.12.1.dist-info → semantic_link_labs-0.12.3.dist-info}/licenses/LICENSE +0 -0
  32. {semantic_link_labs-0.12.1.dist-info → semantic_link_labs-0.12.3.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,84 @@
1
+ from sempy_labs._helper_functions import (
2
+ resolve_item_id,
3
+ _base_api,
4
+ resolve_workspace_id,
5
+ )
6
+ from typing import Optional, Literal
7
+ import sempy_labs._icons as icons
8
+ from uuid import UUID
9
+ from sempy._utils._log import log
10
+
11
+
12
+ @log
13
+ def get_connection_string(
14
+ item: str | UUID,
15
+ type: Literal["Lakehouse", "Warehouse", "SQLEndpoint"],
16
+ workspace: Optional[str | UUID] = None,
17
+ guest_tenant_id: Optional[UUID] = None,
18
+ private_link_type: Optional[str] = None,
19
+ ) -> str:
20
+ """
21
+ Returns the SQL connection string of the specified item.
22
+
23
+ Parameters
24
+ ----------
25
+ item : str | uuid.UUID
26
+ The name or ID of the item (Lakehouse or MirroredDatabase).
27
+ type : Literal['Lakehouse', 'Warehouse', 'SQLEndpoint']
28
+ The type of the item. Must be 'Lakehouse' or 'MirroredDatabase'.
29
+ workspace : str | uuid.UUID, default=None
30
+ The Fabric workspace name or ID.
31
+ Defaults to None which resolves to the workspace of the attached lakehouse
32
+ or if no lakehouse attached, resolves to the workspace of the notebook.
33
+ guest_tenant_id : uuid.UUID, default=None
34
+ The guest tenant ID if the end user's tenant is different from the warehouse tenant.
35
+ private_link_type : str, default=None
36
+ Indicates the type of private link this connection string uses. Must be either 'Workspace' or 'None' or left as None.
37
+
38
+ Returns
39
+ -------
40
+ str
41
+ Returns the SQL connection string of the specified item.
42
+ """
43
+ workspace_id = resolve_workspace_id(workspace)
44
+ item_id = resolve_item_id(item=item, type=type, workspace=workspace)
45
+
46
+ type_dict = {
47
+ "Warehouse": "warehouses",
48
+ "SQLEndpoint": "sqlEndpoints",
49
+ }
50
+ type_for_url = type_dict.get(type)
51
+
52
+ if type == "Lakehouse":
53
+ response = _base_api(
54
+ f"/v1/workspaces/{workspace_id}/lakehouses/{item_id}", client="fabric_sp"
55
+ ).json()
56
+ return (
57
+ response.get("properties", {})
58
+ .get("sqlEndpointProperties", {})
59
+ .get("connectionString", {})
60
+ )
61
+ if type in ["SQLEndpoint", "Warehouse"]:
62
+ url = f"/v1/workspaces/{workspace_id}/{type_for_url}/{item_id}/connectionString"
63
+ else:
64
+ raise ValueError(
65
+ f"{icons.red_dot} The type must be 'Lakehouse', 'Warehouse' or 'SQLEndpoint'."
66
+ )
67
+
68
+ if private_link_type is not None and private_link_type not in ["Workspace", "None"]:
69
+ raise ValueError(
70
+ f"{icons.red_dot} private_link_type must be 'Workspace' or 'None' or left as None."
71
+ )
72
+
73
+ if guest_tenant_id or private_link_type:
74
+ params = []
75
+ if guest_tenant_id:
76
+ params.append(f"guestTenantId={guest_tenant_id}")
77
+ if private_link_type:
78
+ params.append(f"privateLinkType={private_link_type}")
79
+ param_str = "?" + "&".join(params)
80
+ url += param_str
81
+
82
+ response = _base_api(request=url, client="fabric_sp")
83
+
84
+ return response.json().get("connectionString")
@@ -303,6 +303,7 @@ def create_item(
303
303
  payload=payload,
304
304
  status_codes=[201, 202],
305
305
  lro_return_status_code=True,
306
+ client="fabric_sp",
306
307
  )
307
308
  print(
308
309
  f"{icons.green_dot} The '{name}' {item_type} has been successfully created within the '{workspace_name}' workspace."
@@ -407,7 +408,8 @@ def copy_item(
407
408
  # Get the existing source model
408
409
  if type == "Report" and keep_existing_bindings:
409
410
  result = _base_api(
410
- request=f"v1.0/myorg/groups/{target_workspace_id}/reports/{target_item_id}"
411
+ request=f"v1.0/myorg/groups/{target_workspace_id}/reports/{target_item_id}",
412
+ client="fabric_sp",
411
413
  ).json()
412
414
  dataset_id = result.get("datasetId")
413
415
  dataset_workspace_id = result.get("datasetWorkspaceId")
@@ -524,6 +526,7 @@ def get_item_definition(
524
526
  method="post",
525
527
  status_codes=None,
526
528
  lro_return_json=True,
529
+ client="fabric_sp",
527
530
  )
528
531
 
529
532
  if return_dataframe:
@@ -546,6 +549,7 @@ def get_item_definition(
546
549
  definition["definition"]["parts"].append(
547
550
  {"path": path, "payload": decoded_payload}
548
551
  )
552
+ return definition
549
553
  else:
550
554
  return result
551
555
 
@@ -574,6 +578,7 @@ def _get_item_definition(
574
578
  method="post",
575
579
  status_codes=None,
576
580
  lro_return_json=True,
581
+ client="fabric_sp",
577
582
  )
578
583
 
579
584
  if return_dataframe:
@@ -2758,3 +2763,14 @@ def remove_json_value(path: str, payload: dict, json_path: str, verbose: bool =
2758
2763
  print(f"{icons.green_dot} Removed index [{index}] from '{path}'.")
2759
2764
 
2760
2765
  return payload
2766
+
2767
+
2768
+ def _get_url_prefix() -> str:
2769
+
2770
+ client = fabric.PowerBIRestClient()
2771
+ response = client.get("/v1.0/myorg/capacities")
2772
+ if response.status_code != 200:
2773
+ raise FabricHTTPException("Failed to retrieve URL prefix.")
2774
+ context = response.json().get("@odata.context")
2775
+
2776
+ return context.split("/v1.0")[0]
@@ -8,6 +8,7 @@ from sempy_labs._helper_functions import (
8
8
  _base_api,
9
9
  _create_dataframe,
10
10
  resolve_workspace_id,
11
+ resolve_item_id,
11
12
  )
12
13
  from uuid import UUID
13
14
  import sempy_labs._icons as icons
@@ -113,7 +114,7 @@ def _get_item_job_instance(url: str) -> pd.DataFrame:
113
114
  }
114
115
  df = _create_dataframe(columns=columns)
115
116
 
116
- response = _base_api(request=url)
117
+ response = _base_api(request=url, client="fabric_sp")
117
118
 
118
119
  rows = []
119
120
  for v in response.json().get("value", []):
@@ -171,11 +172,9 @@ def list_item_schedules(
171
172
  """
172
173
 
173
174
  workspace_id = resolve_workspace_id(workspace)
174
- (item_name, item_id) = resolve_item_name_and_id(
175
- item=item, type=type, workspace=workspace_id
176
- )
175
+ item_id = resolve_item_id(item=item, type=type, workspace=workspace_id)
177
176
 
178
- columns = {
177
+ base_columns = {
179
178
  "Job Schedule Id": "string",
180
179
  "Enabled": "bool",
181
180
  "Created Date Time": "datetime",
@@ -183,48 +182,75 @@ def list_item_schedules(
183
182
  "End Date Time": "string",
184
183
  "Local Time Zone Id": "string",
185
184
  "Type": "string",
186
- "Interval": "string",
187
- "Weekdays": "string",
188
- "Times": "string",
189
185
  "Owner Id": "string",
190
186
  "Owner Type": "string",
191
- "Recurrence": "int",
187
+ }
188
+
189
+ optional_columns = {
190
+ "Occurrence Day of Month": "int_fillna",
191
+ "Occurrence Week Index": "string",
192
+ "Occurrence Weekday": "string",
192
193
  "Occurrence Type": "string",
193
- "Occurrence Day of Month": "int",
194
+ "Interval": "int_fillna",
195
+ "Times": "string",
196
+ "Recurrence": "int_fillna",
197
+ "Weekdays": "string",
194
198
  }
195
- df = _create_dataframe(columns=columns)
196
199
 
197
200
  response = _base_api(
198
- request=f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/{job_type}/schedules"
201
+ request=f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/{job_type}/schedules",
202
+ client="fabric_sp",
199
203
  )
200
204
 
201
205
  rows = []
202
206
  for v in response.json().get("value", []):
203
207
  config = v.get("configuration", {})
204
208
  own = v.get("owner", {})
205
- rows.append(
206
- {
207
- "Job Schedule Id": v.get("id"),
208
- "Enabled": v.get("enabled"),
209
- "Created Date Time": v.get("createdDateTime"),
210
- "Start Date Time": config.get("startDateTime"),
211
- "End Date Time": config.get("endDateTime"),
212
- "Local Time Zone Id": config.get("localTimeZoneId"),
213
- "Type": config.get("type"),
214
- "Interval": config.get("interval"),
215
- "Weekdays": config.get("weekdays"),
216
- "Times": config.get("times"),
217
- "Owner Id": own.get("id"),
218
- "Owner Type": own.get("type"),
219
- "Recurrence": config.get("recurrence"),
220
- "Occurrence Type": config.get("occurence", {}).get("occurrenceType"),
221
- "Occurrence Day of Month": config.get("occurrence", {}).get(
222
- "dayOfMonth"
223
- ),
224
- }
225
- )
209
+ occurrence = config.get("occurrence", {})
210
+ type = config.get("type")
211
+
212
+ row = {
213
+ "Job Schedule Id": v.get("id"),
214
+ "Enabled": v.get("enabled"),
215
+ "Created Date Time": v.get("createdDateTime"),
216
+ "Start Date Time": config.get("startDateTime"),
217
+ "End Date Time": config.get("endDateTime"),
218
+ "Local Time Zone Id": config.get("localTimeZoneId"),
219
+ "Type": type,
220
+ "Owner Id": own.get("id"),
221
+ "Owner Type": own.get("type"),
222
+ }
223
+
224
+ if type == "Cron":
225
+ row["Interval"] = config.get("interval")
226
+ elif type == "Daily":
227
+ row["Times"] = config.get("times")
228
+ elif type == "Weekly":
229
+ row["Times"] = config.get("times")
230
+ row["Weekdays"] = config.get("weekdays")
231
+ elif type == "Monthly":
232
+ occurrence_type = occurrence.get("occurrenceType")
233
+ row["Times"] = config.get("times")
234
+ row["Recurrence"] = config.get("recurrence")
235
+ row["Occurrence Type"] = occurrence_type
236
+
237
+ if occurrence_type == "OrdinalWeekday":
238
+ row["Occurrence Week Index"] = occurrence.get("weekIndex")
239
+ row["Occurrence Weekday"] = occurrence.get("weekday")
240
+ elif occurrence_type == "DayOfMonth":
241
+ row["Occurrence Day of Month"] = occurrence.get("dayOfMonth")
242
+
243
+ rows.append(row)
244
+
245
+ # Build final column map based on what was actually present
246
+ columns = base_columns.copy()
226
247
 
227
248
  if rows:
249
+ # Find which optional columns were actually included in rows
250
+ all_used_columns = set().union(*(r.keys() for r in rows))
251
+ for col in all_used_columns:
252
+ if col in optional_columns:
253
+ columns[col] = optional_columns[col]
228
254
  df = pd.DataFrame(rows, columns=list(columns.keys()))
229
255
  _update_dataframe_datatypes(dataframe=df, column_map=columns)
230
256
 
@@ -267,6 +293,7 @@ def run_on_demand_item_job(
267
293
  method="post",
268
294
  lro_return_status_code=True,
269
295
  status_codes=202,
296
+ client="fabric_sp",
270
297
  )
271
298
 
272
299
  print(f"{icons.green_dot} The '{item_name}' {type.lower()} has been executed.")
@@ -334,6 +361,7 @@ def create_item_schedule_cron(
334
361
  method="post",
335
362
  payload=payload,
336
363
  status_codes=201,
364
+ client="fabric_sp",
337
365
  )
338
366
 
339
367
  print(
@@ -403,6 +431,7 @@ def create_item_schedule_daily(
403
431
  method="post",
404
432
  payload=payload,
405
433
  status_codes=201,
434
+ client="fabric_sp",
406
435
  )
407
436
 
408
437
  print(
@@ -492,6 +521,7 @@ def create_item_schedule_weekly(
492
521
  method="post",
493
522
  payload=payload,
494
523
  status_codes=201,
524
+ client="fabric_sp",
495
525
  )
496
526
 
497
527
  print(
sempy_labs/_labels.py CHANGED
@@ -5,6 +5,9 @@ from typing import Optional, Union
5
5
  from uuid import UUID
6
6
  from sempy.fabric.exceptions import FabricHTTPException
7
7
  from sempy._utils._log import log
8
+ from sempy_labs._helper_functions import (
9
+ _get_url_prefix,
10
+ )
8
11
 
9
12
 
10
13
  @log
@@ -87,12 +90,7 @@ def list_item_labels(workspace: Optional[Union[str, UUID]] = None) -> pd.DataFra
87
90
  if artifact_ids:
88
91
  payload["artifacts"] = [{"artifactId": i} for i in artifact_ids]
89
92
 
90
- client = fabric.PowerBIRestClient()
91
- response = client.get("/v1.0/myorg/capacities")
92
- if response.status_code != 200:
93
- raise FabricHTTPException("Failed to retrieve URL prefix.")
94
- context = response.json().get("@odata.context")
95
- prefix = context.split("/v1.0")[0]
93
+ prefix = _get_url_prefix()
96
94
 
97
95
  response = requests.post(
98
96
  f"{prefix}/metadata/informationProtection/artifacts",
@@ -334,7 +334,10 @@ def measure_dependency_tree(
334
334
 
335
335
  # Visualize the tree structure using RenderTree
336
336
  for pre, _, node in RenderTree(node_dict[measure_name]):
337
- if icons.table_icon in node.custom_property:
337
+ if (
338
+ hasattr(node, "custom_property")
339
+ and icons.table_icon in node.custom_property
340
+ ):
338
341
  print(f"{pre}{node.custom_property}'{node.name}'")
339
342
  else:
340
- print(f"{pre}{node.custom_property}[{node.name}]")
343
+ print(f"{pre}'{node.name}'")
@@ -348,3 +348,121 @@ def list_semantic_model_datasources(
348
348
  df = pd.DataFrame(rows, columns=list(columns.keys()))
349
349
 
350
350
  return df
351
+
352
+
353
+ @log
354
+ def bind_semantic_model_connection(
355
+ dataset: str | UUID,
356
+ connection_id: UUID,
357
+ connectivity_type: str,
358
+ connection_type: str,
359
+ connection_path: str,
360
+ workspace: Optional[str | UUID] = None,
361
+ ):
362
+ """
363
+ Binds a semantic model data source reference to a data connection.
364
+ This API can also be used to unbind data source references.
365
+
366
+ This is a wrapper function for the following API: `Items - Bind Semantic Model Connection <https://learn.microsoft.com/rest/api/fabric/semanticmodel/items/bind-semantic-model-connection>`_.
367
+
368
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
369
+
370
+ Parameters
371
+ ----------
372
+ dataset : str | uuid.UUID
373
+ Name or ID of the semantic model.
374
+ connection_id : uuid.UUID
375
+ The object ID of the connection.
376
+ connectivity_type : str
377
+ The connectivity type of the connection. Additional connectivity types may be added over time.
378
+ connection_type : str
379
+ The `type <https://learn.microsoft.com/rest/api/fabric/semanticmodel/items/bind-semantic-model-connection?tabs=HTTP#connectivitytype>`_ of the connection.
380
+ connection_path : str
381
+ The path of the connection.
382
+ workspace : str | uuid.UUID, default=None
383
+ The workspace name or ID.
384
+ Defaults to None which resolves to the workspace of the attached lakehouse
385
+ or if no lakehouse attached, resolves to the workspace of the notebook.
386
+ """
387
+
388
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
389
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(
390
+ dataset=dataset, workspace=workspace_id
391
+ )
392
+
393
+ payload = {
394
+ "connectionBinding": {
395
+ "id": str(connection_id),
396
+ "connectivityType": connectivity_type,
397
+ "connectionDetails": {
398
+ "type": connection_type,
399
+ "path": connection_path,
400
+ },
401
+ }
402
+ }
403
+
404
+ _base_api(
405
+ request=f"/v1/workspaces/{workspace_id}/semanticModels/{dataset_id}/bindConnection",
406
+ method="post",
407
+ client="fabric_sp",
408
+ payload=payload,
409
+ )
410
+
411
+ print(
412
+ f"{icons.green_dot} Connection '{connection_id}' has been bound to the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
413
+ )
414
+
415
+
416
+ @log
417
+ def unbind_semantic_model_connection(
418
+ dataset: str | UUID,
419
+ connection_type: str,
420
+ connection_path: str,
421
+ workspace: Optional[str | UUID] = None,
422
+ ):
423
+ """
424
+ Unbinds a semantic model data source reference to a data connection.
425
+
426
+ This is a wrapper function for the following API: `Items - Bind Semantic Model Connection <https://learn.microsoft.com/rest/api/fabric/semanticmodel/items/bind-semantic-model-connection>`_.
427
+
428
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
429
+
430
+ Parameters
431
+ ----------
432
+ dataset : str | uuid.UUID
433
+ Name or ID of the semantic model.
434
+ connection_type : str
435
+ The `type <https://learn.microsoft.com/rest/api/fabric/semanticmodel/items/bind-semantic-model-connection?tabs=HTTP#connectivitytype>`_ of the connection.
436
+ connection_path : str
437
+ The path of the connection.
438
+ workspace : str | uuid.UUID, default=None
439
+ The workspace name or ID.
440
+ Defaults to None which resolves to the workspace of the attached lakehouse
441
+ or if no lakehouse attached, resolves to the workspace of the notebook.
442
+ """
443
+
444
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
445
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(
446
+ dataset=dataset, workspace=workspace_id
447
+ )
448
+
449
+ payload = {
450
+ "connectionBinding": {
451
+ "connectivityType": "None",
452
+ "connectionDetails": {
453
+ "type": connection_type,
454
+ "path": connection_path,
455
+ },
456
+ }
457
+ }
458
+
459
+ _base_api(
460
+ request=f"/v1/workspaces/{workspace_id}/semanticModels/{dataset_id}/bindConnection",
461
+ method="post",
462
+ client="fabric_sp",
463
+ payload=payload,
464
+ )
465
+
466
+ print(
467
+ f"{icons.green_dot} The '{dataset_name}' semantic model within the '{workspace_name}' workspace has been unbound from its connection."
468
+ )
@@ -66,7 +66,8 @@ def refresh_sql_endpoint_metadata(
66
66
  item: str | UUID,
67
67
  type: Literal["Lakehouse", "MirroredDatabase"],
68
68
  workspace: Optional[str | UUID] = None,
69
- tables: dict[str, list[str]] = None,
69
+ timeout_unit: Literal["Seconds", "Minutes", "Hours", "Days"] = "Minutes",
70
+ timeout_value: int = 15,
70
71
  ) -> pd.DataFrame:
71
72
  """
72
73
  Refreshes the metadata of a SQL endpoint.
@@ -85,15 +86,10 @@ def refresh_sql_endpoint_metadata(
85
86
  The Fabric workspace name or ID.
86
87
  Defaults to None which resolves to the workspace of the attached lakehouse
87
88
  or if no lakehouse attached, resolves to the workspace of the notebook.
88
- tables : dict[str, list[str]], default=None
89
- A dictionary where the keys are schema names and the values are lists of table names.
90
- If empty, all table metadata will be refreshed.
91
-
92
- Example:
93
- {
94
- "dbo": ["DimDate", "DimGeography"],
95
- "sls": ["FactSales", "FactBudget"],
96
- }
89
+ timeout_unit : Literal['Seconds', 'Minutes', 'Hours', 'Days'], default='Minutes'
90
+ The unit of time for the request duration before timing out. Additional duration types may be added over time.
91
+ timeout_value : int, default=15
92
+ The number of time units in the request duration.
97
93
 
98
94
  Returns
99
95
  -------
@@ -132,14 +128,10 @@ def refresh_sql_endpoint_metadata(
132
128
  else:
133
129
  raise ValueError("Invalid type. Must be 'Lakehouse' or 'MirroredDatabase'.")
134
130
 
135
- payload = {}
136
- if tables:
137
- payload = {
138
- "tableDefinitions": [
139
- {"schema": schema, "tableNames": tables}
140
- for schema, tables in tables.items()
141
- ]
142
- }
131
+ payload = None
132
+ timeout_unit = timeout_unit.capitalize()
133
+ if timeout_unit != "Minutes" and timeout_value != 15:
134
+ payload = {"timeout": {"timeUnit": timeout_unit, "value": timeout_value}}
143
135
 
144
136
  result = _base_api(
145
137
  request=f"v1/workspaces/{workspace_id}/sqlEndpoints/{sql_endpoint_id}/refreshMetadata",
@@ -195,10 +187,8 @@ def refresh_sql_endpoint_metadata(
195
187
  df = df[column_order]
196
188
 
197
189
  printout = f"{icons.green_dot} The metadata of the SQL endpoint for the '{item_name}' {type.lower()} within the '{workspace_name}' workspace has been refreshed"
198
- if tables:
199
- print(f"{printout} for the following tables: {tables}.")
200
- else:
201
- print(f"{printout} for all tables.")
190
+ print(f"{printout} for all tables.")
191
+ _update_dataframe_datatypes(df, columns)
202
192
  else:
203
193
  # If the target item has no tables to refresh the metadata for
204
194
  df = pd.DataFrame(columns=columns.keys())
@@ -206,6 +196,4 @@ def refresh_sql_endpoint_metadata(
206
196
  f"{icons.yellow_dot} The SQL endpoint '{item_name}' {type.lower()} within the '{workspace_name}' workspace has no tables to refresh..."
207
197
  )
208
198
 
209
- _update_dataframe_datatypes(df, columns)
210
-
211
199
  return df
sempy_labs/_warehouses.py CHANGED
@@ -1,4 +1,4 @@
1
- from ._helper_functions import (
1
+ from sempy_labs._helper_functions import (
2
2
  resolve_workspace_name_and_id,
3
3
  _base_api,
4
4
  _create_dataframe,
@@ -94,6 +94,10 @@ from ._tenant_keys import (
94
94
  list_tenant_keys,
95
95
  rotate_tenant_key,
96
96
  )
97
+ from ._sharing_links import (
98
+ remove_all_sharing_links,
99
+ remove_sharing_links,
100
+ )
97
101
 
98
102
  __all__ = [
99
103
  "list_items",
@@ -155,4 +159,6 @@ __all__ = [
155
159
  "delete_tag",
156
160
  "list_tenant_keys",
157
161
  "rotate_tenant_key",
162
+ "remove_all_sharing_links",
163
+ "remove_sharing_links",
158
164
  ]
@@ -0,0 +1,110 @@
1
+ from sempy._utils._log import log
2
+ from sempy_labs._helper_functions import (
3
+ _base_api,
4
+ _is_valid_uuid,
5
+ )
6
+ import sempy_labs._icons as icons
7
+ from typing import List
8
+
9
+
10
+ @log
11
+ def remove_all_sharing_links(sharing_link_type: str = "OrgLink"):
12
+ """
13
+ Deletes all organization sharing links for all Fabric items in the tenant. This action cannot be undone.
14
+
15
+ This is a wrapper function for the following API: `Sharing Links - Remove All Sharing Links <https://learn.microsoft.com/rest/api/fabric/admin/sharing-links/remove-all-sharing-links>`_.
16
+
17
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
18
+
19
+ Parameters
20
+ ----------
21
+ sharing_link_type : str, default='OrgLink'
22
+ Specifies the type of sharing link that is required to be deleted. Additional sharing link types may be added over time.
23
+ """
24
+
25
+ payload = {"sharingLinkType": sharing_link_type}
26
+
27
+ _base_api(
28
+ request="/v1/admin/items/removeAllSharingLinks",
29
+ client="fabric_sp",
30
+ lro_return_status_code=True,
31
+ status_codes=[200, 202],
32
+ method="post",
33
+ payload=payload,
34
+ )
35
+
36
+ print(
37
+ f"{icons.green_dot} All organization sharing links for all Fbric items in the tenant have been deleted."
38
+ )
39
+
40
+
41
+ @log
42
+ def remove_sharing_links(items: List[dict], sharing_link_type: str = "OrgLink"):
43
+ """
44
+ Deletes all organization sharing links for the specified Fabric items. This action cannot be undone.
45
+
46
+ This is a wrapper function for the following API: `Sharing Links - Bulk Remove Sharing Links <https://learn.microsoft.com/rest/api/fabric/admin/sharing-links/bulk-remove-sharing-links>`_.
47
+
48
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
49
+
50
+ Parameters
51
+ ----------
52
+ items : List[dict]
53
+ A list of dictionaries, each representing an item. The 'item' and 'workspace' accepts either name or ID. Examples:
54
+
55
+ [
56
+ {
57
+ "item": "MyReport",
58
+ "type": "Report",
59
+ "workspace": "Workpsace 1",
60
+ },
61
+ {
62
+ "item": "MyReport2",
63
+ "type": "Report",
64
+ "workspace": "Workspace 2",
65
+ },
66
+ ]
67
+
68
+ [
69
+ {
70
+ "item": "fe472f5e-636e-4c10-a1c6-7e9edc0b542a",
71
+ "type": "Report",
72
+ "workspace": "Workspace 1",
73
+ },
74
+ {
75
+ "item": "fe472f5e-636e-4c10-a1c6-7e9edc0b542c",
76
+ "type": "Notebook",
77
+ "workspace": "476fcafe-b514-495d-b13f-ca9a4f0b1d8f",
78
+ },
79
+ ]
80
+
81
+ sharing_link_type : str, default='OrgLink'
82
+ Specifies the type of sharing link that is required to be deleted. Additional sharing link types may be added over time.
83
+ """
84
+
85
+ from sempy_labs.admin._items import _resolve_item_id
86
+
87
+ payload = {"items": [], "sharingLinkType": sharing_link_type}
88
+
89
+ for i in items:
90
+ item = i.get("item")
91
+ type = i.get("type")
92
+ workspace = i.get("workspace")
93
+ if _is_valid_uuid(item):
94
+ payload["items"].append({"id": item, "type": type})
95
+ else:
96
+ item_id = _resolve_item_id(item=item, type=type, workspace=workspace)
97
+ payload["items"].append({"id": item_id, "type": type})
98
+
99
+ _base_api(
100
+ request="/v1/admin/items/bulkRemoveSharingLinks",
101
+ client="fabric_sp",
102
+ method="post",
103
+ payload=payload,
104
+ lro_return_status_code=True,
105
+ status_codes=[200, 202],
106
+ )
107
+
108
+ print(
109
+ f"{icons.green_dot} Organizational sharing links for all specified items have been deleted."
110
+ )