semantic-link-labs 0.9.1__py3-none-any.whl → 0.9.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (87) hide show
  1. {semantic_link_labs-0.9.1.dist-info → semantic_link_labs-0.9.3.dist-info}/METADATA +67 -8
  2. {semantic_link_labs-0.9.1.dist-info → semantic_link_labs-0.9.3.dist-info}/RECORD +87 -80
  3. sempy_labs/__init__.py +14 -12
  4. sempy_labs/_ai.py +8 -5
  5. sempy_labs/_capacities.py +120 -142
  6. sempy_labs/_capacity_migration.py +61 -94
  7. sempy_labs/_clear_cache.py +9 -8
  8. sempy_labs/_connections.py +107 -104
  9. sempy_labs/_data_pipelines.py +47 -49
  10. sempy_labs/_dataflows.py +45 -51
  11. sempy_labs/_dax.py +228 -6
  12. sempy_labs/_delta_analyzer.py +321 -0
  13. sempy_labs/_deployment_pipelines.py +72 -66
  14. sempy_labs/_environments.py +39 -36
  15. sempy_labs/_eventhouses.py +35 -35
  16. sempy_labs/_eventstreams.py +38 -39
  17. sempy_labs/_external_data_shares.py +29 -42
  18. sempy_labs/_gateways.py +103 -99
  19. sempy_labs/_generate_semantic_model.py +22 -30
  20. sempy_labs/_git.py +46 -66
  21. sempy_labs/_graphQL.py +95 -0
  22. sempy_labs/_helper_functions.py +227 -36
  23. sempy_labs/_job_scheduler.py +47 -59
  24. sempy_labs/_kql_databases.py +27 -34
  25. sempy_labs/_kql_querysets.py +23 -30
  26. sempy_labs/_list_functions.py +264 -167
  27. sempy_labs/_managed_private_endpoints.py +52 -47
  28. sempy_labs/_mirrored_databases.py +110 -134
  29. sempy_labs/_mirrored_warehouses.py +13 -13
  30. sempy_labs/_ml_experiments.py +36 -36
  31. sempy_labs/_ml_models.py +37 -38
  32. sempy_labs/_model_bpa.py +2 -2
  33. sempy_labs/_model_bpa_rules.py +8 -6
  34. sempy_labs/_model_dependencies.py +2 -0
  35. sempy_labs/_notebooks.py +28 -29
  36. sempy_labs/_one_lake_integration.py +2 -0
  37. sempy_labs/_query_scale_out.py +63 -81
  38. sempy_labs/_refresh_semantic_model.py +12 -14
  39. sempy_labs/_spark.py +54 -79
  40. sempy_labs/_sql.py +7 -11
  41. sempy_labs/_translations.py +2 -2
  42. sempy_labs/_vertipaq.py +11 -6
  43. sempy_labs/_warehouses.py +30 -33
  44. sempy_labs/_workloads.py +15 -20
  45. sempy_labs/_workspace_identity.py +13 -17
  46. sempy_labs/_workspaces.py +49 -48
  47. sempy_labs/admin/__init__.py +2 -0
  48. sempy_labs/admin/_basic_functions.py +244 -281
  49. sempy_labs/admin/_domains.py +186 -103
  50. sempy_labs/admin/_external_data_share.py +26 -31
  51. sempy_labs/admin/_git.py +17 -22
  52. sempy_labs/admin/_items.py +34 -48
  53. sempy_labs/admin/_scanner.py +61 -49
  54. sempy_labs/directlake/_directlake_schema_compare.py +2 -0
  55. sempy_labs/directlake/_dl_helper.py +10 -11
  56. sempy_labs/directlake/_generate_shared_expression.py +4 -5
  57. sempy_labs/directlake/_get_directlake_lakehouse.py +1 -0
  58. sempy_labs/directlake/_list_directlake_model_calc_tables.py +1 -0
  59. sempy_labs/directlake/_show_unsupported_directlake_objects.py +2 -0
  60. sempy_labs/directlake/_warm_cache.py +2 -0
  61. sempy_labs/graph/__init__.py +33 -0
  62. sempy_labs/graph/_groups.py +402 -0
  63. sempy_labs/graph/_teams.py +113 -0
  64. sempy_labs/graph/_users.py +191 -0
  65. sempy_labs/lakehouse/__init__.py +4 -0
  66. sempy_labs/lakehouse/_get_lakehouse_columns.py +12 -12
  67. sempy_labs/lakehouse/_get_lakehouse_tables.py +16 -22
  68. sempy_labs/lakehouse/_lakehouse.py +104 -7
  69. sempy_labs/lakehouse/_shortcuts.py +42 -20
  70. sempy_labs/migration/__init__.py +4 -0
  71. sempy_labs/migration/_direct_lake_to_import.py +66 -0
  72. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +3 -2
  73. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +1 -0
  74. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +1 -0
  75. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +2 -0
  76. sempy_labs/migration/_refresh_calc_tables.py +2 -2
  77. sempy_labs/report/_download_report.py +8 -13
  78. sempy_labs/report/_generate_report.py +49 -46
  79. sempy_labs/report/_paginated.py +20 -26
  80. sempy_labs/report/_report_functions.py +52 -47
  81. sempy_labs/report/_report_list_functions.py +2 -0
  82. sempy_labs/report/_report_rebind.py +6 -10
  83. sempy_labs/report/_reportwrapper.py +187 -220
  84. sempy_labs/tom/_model.py +12 -6
  85. {semantic_link_labs-0.9.1.dist-info → semantic_link_labs-0.9.3.dist-info}/LICENSE +0 -0
  86. {semantic_link_labs-0.9.1.dist-info → semantic_link_labs-0.9.3.dist-info}/WHEEL +0 -0
  87. {semantic_link_labs-0.9.1.dist-info → semantic_link_labs-0.9.3.dist-info}/top_level.txt +0 -0
@@ -1,11 +1,12 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
- from sempy.fabric.exceptions import FabricHTTPException
4
3
  from typing import Optional
5
4
  from sempy_labs._helper_functions import (
6
- pagination,
7
5
  _is_valid_uuid,
8
6
  resolve_workspace_name_and_id,
7
+ _update_dataframe_datatypes,
8
+ _base_api,
9
+ _create_dataframe,
9
10
  )
10
11
  from uuid import UUID
11
12
  import sempy_labs._icons as icons
@@ -18,6 +19,8 @@ def delete_connection(connection: str | UUID):
18
19
 
19
20
  This is a wrapper function for the following API: `Connections - Delete Connection <https://learn.microsoft.com/rest/api/fabric/core/connections/delete-connection>`_.
20
21
 
22
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
23
+
21
24
  Parameters
22
25
  ----------
23
26
  connection : str | uuid.UUID
@@ -25,13 +28,9 @@ def delete_connection(connection: str | UUID):
25
28
  """
26
29
 
27
30
  connection_id = _resolve_connection_id(connection)
28
-
29
- client = fabric.FabricRestClient()
30
- response = client.delete(f"/v1/connections/{connection_id}")
31
-
32
- if response.status_code != 200:
33
- raise FabricHTTPException(response)
34
-
31
+ _base_api(
32
+ request=f"/v1/connections/{connection_id}", client="fabric_sp", method="delete"
33
+ )
35
34
  print(f"{icons.green_dot} The '{connection}' connection has been deleted.")
36
35
 
37
36
 
@@ -41,6 +40,8 @@ def delete_connection_role_assignment(connection: str | UUID, role_assignment_id
41
40
 
42
41
  This is a wrapper function for the following API: `Connections - Delete Connection Role Assignment <https://learn.microsoft.com/rest/api/fabric/core/connections/delete-connection-role-assignment>`_.
43
42
 
43
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
44
+
44
45
  Parameters
45
46
  ----------
46
47
  connection : str | uuid.UUID
@@ -50,15 +51,12 @@ def delete_connection_role_assignment(connection: str | UUID, role_assignment_id
50
51
  """
51
52
 
52
53
  connection_id = _resolve_connection_id(connection)
53
-
54
- client = fabric.FabricRestClient()
55
- response = client.delete(
56
- f"/v1/connections/{connection_id}/roleAssignments/{role_assignment_id}"
54
+ _base_api(
55
+ request=f"/v1/connections/{connection_id}/roleAssignments/{role_assignment_id}",
56
+ client="fabric_sp",
57
+ method="delete",
57
58
  )
58
59
 
59
- if response.status_code != 200:
60
- raise FabricHTTPException(response)
61
-
62
60
  print(
63
61
  f"{icons.green_dot} The '{role_assignment_id}' role assignment Id has been deleted from the '{connection}' connection."
64
62
  )
@@ -86,6 +84,8 @@ def list_connection_role_assignments(connection: str | UUID) -> pd.DataFrame:
86
84
 
87
85
  This is a wrapper function for the following API: `Connections - List Connection Role Assignments <https://learn.microsoft.com/rest/api/fabric/core/connections/list-connection-role-assignments>`_.
88
86
 
87
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
88
+
89
89
  Parameters
90
90
  ----------
91
91
  connection : str | uuid.UUID
@@ -99,22 +99,20 @@ def list_connection_role_assignments(connection: str | UUID) -> pd.DataFrame:
99
99
 
100
100
  connection_id = _resolve_connection_id(connection)
101
101
 
102
- client = fabric.FabricRestClient()
103
- response = client.get(f"/v1/connections/{connection_id}/roleAssignments")
104
-
105
- df = pd.DataFrame(
106
- columns=[
107
- "Connection Role Assignment Id",
108
- "Principal Id",
109
- "Principal Type",
110
- "Role",
111
- ]
112
- )
102
+ columns = {
103
+ "Connection Role Assignment Id": "string",
104
+ "Principal Id": "string",
105
+ "Principal Type": "string",
106
+ "Role": "string",
107
+ }
113
108
 
114
- if response.status_code != 200:
115
- raise FabricHTTPException(response)
109
+ df = _create_dataframe(columns=columns)
116
110
 
117
- responses = pagination(client, response)
111
+ responses = _base_api(
112
+ request=f"/v1/connections/{connection_id}/roleAssignments",
113
+ client="fabric_sp",
114
+ uses_pagination=True,
115
+ )
118
116
 
119
117
  for r in responses:
120
118
  for v in r.get("value", []):
@@ -134,35 +132,33 @@ def list_connections() -> pd.DataFrame:
134
132
  """
135
133
  Lists all available connections.
136
134
 
135
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
136
+
137
137
  Returns
138
138
  -------
139
139
  pandas.DataFrame
140
140
  A pandas dataframe showing all available connections.
141
141
  """
142
142
 
143
- client = fabric.FabricRestClient()
144
- response = client.get("/v1/connections")
145
-
146
- if response.status_code != 200:
147
- raise FabricHTTPException(response)
148
-
149
- responses = pagination(client, response)
150
-
151
- df = pd.DataFrame(
152
- columns=[
153
- "Connection Id",
154
- "Connection Name",
155
- "Gateway Id",
156
- "Connectivity Type",
157
- "Connection Path",
158
- "Connection Type",
159
- "Privacy Level",
160
- "Credential Type",
161
- "Single Sign on Type",
162
- "Connection Encyrption",
163
- "Skip Test Connection",
164
- ]
143
+ columns = {
144
+ "Connection Id": "string",
145
+ "Connection Name": "string",
146
+ "Gateway Id": "string",
147
+ "Connectivity Type": "string",
148
+ "Connection Path": "string",
149
+ "Connection Type": "string",
150
+ "Privacy Level": "string",
151
+ "Credential Type": "string",
152
+ "Single Sign On Type": "string",
153
+ "Connection Encryption": "string",
154
+ "Skip Test Connection": "bool",
155
+ }
156
+ df = _create_dataframe(columns=columns)
157
+
158
+ responses = _base_api(
159
+ request="/v1/connections", client="fabric_sp", uses_pagination=True
165
160
  )
161
+
166
162
  for r in responses:
167
163
  for i in r.get("value", []):
168
164
  connection_details = i.get("connectionDetails", {})
@@ -199,8 +195,8 @@ def list_connections() -> pd.DataFrame:
199
195
  }
200
196
 
201
197
  df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
202
- bool_cols = ["Skip Test Connection"]
203
- df[bool_cols] = df[bool_cols].astype(bool)
198
+
199
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
204
200
 
205
201
  return df
206
202
 
@@ -213,6 +209,8 @@ def list_item_connections(
213
209
 
214
210
  This is a wrapper function for the following API: `Items - List Item Connections <https://learn.microsoft.com/rest/api/fabric/core/items/list-item-connections>`_.
215
211
 
212
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
213
+
216
214
  Parameters
217
215
  ----------
218
216
  item_name : str
@@ -236,24 +234,21 @@ def list_item_connections(
236
234
  item_name=item_name, type=item_type, workspace=workspace_id
237
235
  )
238
236
 
239
- client = fabric.FabricRestClient()
240
- response = client.get(f"/v1/workspaces/{workspace_id}/items/{item_id}/connections")
241
-
242
- df = pd.DataFrame(
243
- columns=[
244
- "Connection Name",
245
- "Connection Id",
246
- "Connectivity Type",
247
- "Connection Type",
248
- "Connection Path",
249
- "Gateway Id",
250
- ]
251
- )
252
-
253
- if response.status_code != 200:
254
- raise FabricHTTPException(response)
237
+ columns = {
238
+ "Connection Name": "string",
239
+ "Connection Id": "string",
240
+ "Connectivity Type": "string",
241
+ "Connection Type": "string",
242
+ "Connection Path": "string",
243
+ "Gateway Id": "string",
244
+ }
245
+ df = _create_dataframe(columns=columns)
255
246
 
256
- responses = pagination(client, response)
247
+ responses = _base_api(
248
+ request=f"/v1/workspaces/{workspace_id}/items/{item_id}/connections",
249
+ client="fabric_sp",
250
+ uses_pagination=True,
251
+ )
257
252
 
258
253
  for r in responses:
259
254
  for v in r.get("value", []):
@@ -280,23 +275,17 @@ def _list_supported_connection_types(
280
275
  gateway_id = _resolve_gateway_id(gateway)
281
276
  url += f"gatewayId={gateway_id}"
282
277
 
283
- df = pd.DataFrame(
284
- columns=[
285
- "Connection Type",
286
- "Creation Method",
287
- "Supported Credential Types",
288
- "Supported Connection Encryption Types",
289
- "Supports Skip Test Connection",
290
- ]
291
- )
278
+ columns = {
279
+ "Connection Type": "string",
280
+ "Creation Method": "string",
281
+ "Supported Credential Types": "string",
282
+ "Supported Connection Encryption Types": "string",
283
+ "Supports Skip Test Connection": "bool",
284
+ }
285
+ df = _create_dataframe(columns=columns)
292
286
 
293
287
  url = url.rstrip("&")
294
- client = fabric.FabricRestClient()
295
- response = client.get(url)
296
- if response.status_code != 200:
297
- raise FabricHTTPException(response)
298
-
299
- responses = pagination(client, response)
288
+ responses = _base_api(request=url, client="fabric_sp", uses_pagination=True)
300
289
 
301
290
  records = []
302
291
  for r in responses:
@@ -318,6 +307,8 @@ def _list_supported_connection_types(
318
307
  if records:
319
308
  df = pd.DataFrame(records)
320
309
 
310
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
311
+
321
312
  return df
322
313
 
323
314
 
@@ -336,6 +327,8 @@ def create_cloud_connection(
336
327
 
337
328
  This is a wrapper function for the following API: `Connections - Create Connection <https://learn.microsoft.com/rest/api/fabric/core/connections/create-connection>`_.
338
329
 
330
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
331
+
339
332
  Parameters
340
333
  ----------
341
334
  name : str
@@ -356,7 +349,7 @@ def create_cloud_connection(
356
349
  If True, skips the test connection.
357
350
  """
358
351
 
359
- request_body = {
352
+ payload = {
360
353
  "connectivityType": "ShareableCloud",
361
354
  "displayName": name,
362
355
  "connectionDetails": {
@@ -388,11 +381,13 @@ def create_cloud_connection(
388
381
  },
389
382
  }
390
383
 
391
- client = fabric.FabricRestClient()
392
- response = client.post("/v1/connections", json=request_body)
393
-
394
- if response.status_code != 201:
395
- raise FabricHTTPException(response)
384
+ _base_api(
385
+ request="/v1/connections",
386
+ client="fabric_sp",
387
+ method="post",
388
+ payload=payload,
389
+ status_codes=201,
390
+ )
396
391
 
397
392
  print(f"{icons.green_dot} The '{name}' cloud connection has been created.")
398
393
 
@@ -412,6 +407,8 @@ def create_on_prem_connection(
412
407
 
413
408
  This is a wrapper function for the following API: `Connections - Create Connection <https://learn.microsoft.com/rest/api/fabric/core/connections/create-connection>`_.
414
409
 
410
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
411
+
415
412
  Parameters
416
413
  ----------
417
414
  name : str
@@ -436,7 +433,7 @@ def create_on_prem_connection(
436
433
 
437
434
  gateway_id = _resolve_gateway_id(gateway)
438
435
 
439
- request_body = {
436
+ payload = {
440
437
  "connectivityType": "OnPremisesGateway",
441
438
  "gatewayId": gateway_id,
442
439
  "displayName": name,
@@ -468,11 +465,13 @@ def create_on_prem_connection(
468
465
  },
469
466
  }
470
467
 
471
- client = fabric.FabricRestClient()
472
- response = client.post("/v1/connections", json=request_body)
473
-
474
- if response.status_code != 201:
475
- raise FabricHTTPException(response)
468
+ _base_api(
469
+ request="/v1/connections",
470
+ client="fabric_sp",
471
+ method="post",
472
+ payload=payload,
473
+ status_codes=201,
474
+ )
476
475
 
477
476
  print(f"{icons.green_dot} The '{name}' on-prem connection has been created.")
478
477
 
@@ -493,6 +492,8 @@ def create_vnet_connection(
493
492
 
494
493
  This is a wrapper function for the following API: `Connections - Create Connection <https://learn.microsoft.com/rest/api/fabric/core/connections/create-connection>`_.
495
494
 
495
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
496
+
496
497
  Parameters
497
498
  ----------
498
499
  name : str
@@ -517,7 +518,7 @@ def create_vnet_connection(
517
518
 
518
519
  gateway_id = _resolve_gateway_id(gateway)
519
520
 
520
- request_body = {
521
+ payload = {
521
522
  "connectivityType": "VirtualNetworkGateway",
522
523
  "gatewayId": gateway_id,
523
524
  "displayName": name,
@@ -550,11 +551,13 @@ def create_vnet_connection(
550
551
  },
551
552
  }
552
553
 
553
- client = fabric.FabricRestClient()
554
- response = client.post("/v1/connections", json=request_body)
555
-
556
- if response.status_code != 201:
557
- raise FabricHTTPException(response)
554
+ _base_api(
555
+ request="/v1/connections",
556
+ client="fabric_sp",
557
+ method="post",
558
+ payload=payload,
559
+ status_codes=201,
560
+ )
558
561
 
559
562
  print(
560
563
  f"{icons.green_dot} The '{name}' virtual network gateway connection has been created."
@@ -1,14 +1,14 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
- import sempy_labs._icons as icons
4
3
  from typing import Optional
5
4
  from sempy_labs._helper_functions import (
6
5
  resolve_workspace_name_and_id,
7
- lro,
8
- pagination,
9
6
  _decode_b64,
7
+ _base_api,
8
+ _print_success,
9
+ resolve_item_id,
10
+ _create_dataframe,
10
11
  )
11
- from sempy.fabric.exceptions import FabricHTTPException
12
12
  from uuid import UUID
13
13
 
14
14
 
@@ -31,16 +31,18 @@ def list_data_pipelines(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
31
31
  A pandas dataframe showing the data pipelines within a workspace.
32
32
  """
33
33
 
34
- df = pd.DataFrame(columns=["Data Pipeline Name", "Data Pipeline ID", "Description"])
34
+ columns = {
35
+ "Data Pipeline Name": "string",
36
+ "Data Pipeline ID": "string",
37
+ "Description": "string",
38
+ }
39
+ df = _create_dataframe(columns=columns)
35
40
 
36
41
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
37
42
 
38
- client = fabric.FabricRestClient()
39
- response = client.get(f"/v1/workspaces/{workspace_id}/dataPipelines")
40
- if response.status_code != 200:
41
- raise FabricHTTPException(response)
42
-
43
- responses = pagination(client, response)
43
+ responses = _base_api(
44
+ request=f"/v1/workspaces/{workspace_id}/dataPipelines", uses_pagination=True
45
+ )
44
46
 
45
47
  for r in responses:
46
48
  for v in r.get("value", []):
@@ -76,24 +78,27 @@ def create_data_pipeline(
76
78
 
77
79
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
78
80
 
79
- request_body = {"displayName": name}
81
+ payload = {"displayName": name}
80
82
 
81
83
  if description:
82
- request_body["description"] = description
83
-
84
- client = fabric.FabricRestClient()
85
- response = client.post(
86
- f"/v1/workspaces/{workspace_id}/dataPipelines", json=request_body
84
+ payload["description"] = description
85
+
86
+ _base_api(
87
+ request=f"/v1/workspaces/{workspace_id}/dataPipelines",
88
+ method="post",
89
+ payload=payload,
90
+ status_codes=[201, 202],
91
+ lro_return_status_code=True,
87
92
  )
88
-
89
- lro(client, response, status_codes=[201, 202])
90
-
91
- print(
92
- f"{icons.green_dot} The '{name}' data pipeline has been created within the '{workspace_name}' workspace."
93
+ _print_success(
94
+ item_name=name,
95
+ item_type="data pipeline",
96
+ workspace_name=workspace_name,
97
+ action="created",
93
98
  )
94
99
 
95
100
 
96
- def delete_data_pipeline(name: str, workspace: Optional[str | UUID] = None):
101
+ def delete_data_pipeline(name: str | UUID, workspace: Optional[str | UUID] = None):
97
102
  """
98
103
  Deletes a Fabric data pipeline.
99
104
 
@@ -101,8 +106,8 @@ def delete_data_pipeline(name: str, workspace: Optional[str | UUID] = None):
101
106
 
102
107
  Parameters
103
108
  ----------
104
- name: str
105
- Name of the data pipeline.
109
+ name: str | uuid.UUID
110
+ Name or ID of the data pipeline.
106
111
  workspace : str | uuid.UUID, default=None
107
112
  The Fabric workspace name.
108
113
  Defaults to None which resolves to the workspace of the attached lakehouse
@@ -110,32 +115,27 @@ def delete_data_pipeline(name: str, workspace: Optional[str | UUID] = None):
110
115
  """
111
116
 
112
117
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
113
-
114
- item_id = fabric.resolve_item_id(
115
- item_name=name, type="DataPipeline", workspace=workspace_id
116
- )
117
-
118
- client = fabric.FabricRestClient()
119
- response = client.delete(f"/v1/workspaces/{workspace_id}/dataPipelines/{item_id}")
120
-
121
- if response.status_code != 200:
122
- raise FabricHTTPException(response)
123
-
124
- print(
125
- f"{icons.green_dot} The '{name}' data pipeline within the '{workspace_name}' workspace has been deleted."
118
+ item_id = resolve_item_id(item=name, type="DataPipeline", workspace=workspace)
119
+
120
+ fabric.delete_item(item_id=item_id, workspace=workspace)
121
+ _print_success(
122
+ item_name=name,
123
+ item_type="data pipeline",
124
+ workspace_name=workspace_name,
125
+ action="deleted",
126
126
  )
127
127
 
128
128
 
129
129
  def get_data_pipeline_definition(
130
- name: str, workspace: Optional[str | UUID] = None, decode: bool = True
130
+ name: str | UUID, workspace: Optional[str | UUID] = None, decode: bool = True
131
131
  ) -> dict | pd.DataFrame:
132
132
  """
133
133
  Obtains the definition of a data pipeline.
134
134
 
135
135
  Parameters
136
136
  ----------
137
- name : str
138
- The name of the data pipeline.
137
+ name : str or uuid.UUID
138
+ The name or ID of the data pipeline.
139
139
  workspace : str | uuid.UUID, default=None
140
140
  The Fabric workspace name or ID.
141
141
  Defaults to None which resolves to the workspace of the attached lakehouse
@@ -152,16 +152,14 @@ def get_data_pipeline_definition(
152
152
  """
153
153
 
154
154
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
155
- item_id = fabric.resolve_item_id(
156
- item_name=name, type="DataPipeline", workspace=workspace_id
157
- )
158
155
 
159
- client = fabric.FabricRestClient()
160
- response = client.post(
161
- f"/v1/workspaces/{workspace_id}/dataPipelines/{item_id}/getDefinition"
156
+ item_id = resolve_item_id(item=name, type="DataPipeline", workspace=workspace)
157
+ result = _base_api(
158
+ request=f"/v1/workspaces/{workspace_id}/dataPipelines/{item_id}/getDefinition",
159
+ method="post",
160
+ lro_return_json=True,
161
+ status_codes=None,
162
162
  )
163
- result = lro(client, response).json()
164
-
165
163
  df = pd.json_normalize(result["definition"]["parts"])
166
164
 
167
165
  if not decode: