semantic-link-labs 0.9.1__py3-none-any.whl → 0.9.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (82) hide show
  1. {semantic_link_labs-0.9.1.dist-info → semantic_link_labs-0.9.2.dist-info}/METADATA +66 -8
  2. {semantic_link_labs-0.9.1.dist-info → semantic_link_labs-0.9.2.dist-info}/RECORD +82 -75
  3. sempy_labs/__init__.py +14 -12
  4. sempy_labs/_capacities.py +120 -142
  5. sempy_labs/_capacity_migration.py +61 -94
  6. sempy_labs/_clear_cache.py +9 -8
  7. sempy_labs/_connections.py +72 -105
  8. sempy_labs/_data_pipelines.py +47 -49
  9. sempy_labs/_dataflows.py +45 -51
  10. sempy_labs/_dax.py +228 -6
  11. sempy_labs/_delta_analyzer.py +303 -0
  12. sempy_labs/_deployment_pipelines.py +72 -66
  13. sempy_labs/_environments.py +39 -36
  14. sempy_labs/_eventhouses.py +35 -35
  15. sempy_labs/_eventstreams.py +38 -39
  16. sempy_labs/_external_data_shares.py +29 -42
  17. sempy_labs/_gateways.py +57 -101
  18. sempy_labs/_generate_semantic_model.py +22 -30
  19. sempy_labs/_git.py +46 -66
  20. sempy_labs/_graphQL.py +95 -0
  21. sempy_labs/_helper_functions.py +175 -30
  22. sempy_labs/_job_scheduler.py +47 -59
  23. sempy_labs/_kql_databases.py +27 -34
  24. sempy_labs/_kql_querysets.py +23 -30
  25. sempy_labs/_list_functions.py +262 -164
  26. sempy_labs/_managed_private_endpoints.py +52 -47
  27. sempy_labs/_mirrored_databases.py +110 -134
  28. sempy_labs/_mirrored_warehouses.py +13 -13
  29. sempy_labs/_ml_experiments.py +36 -36
  30. sempy_labs/_ml_models.py +37 -38
  31. sempy_labs/_model_dependencies.py +2 -0
  32. sempy_labs/_notebooks.py +28 -29
  33. sempy_labs/_one_lake_integration.py +2 -0
  34. sempy_labs/_query_scale_out.py +63 -81
  35. sempy_labs/_refresh_semantic_model.py +12 -14
  36. sempy_labs/_spark.py +54 -79
  37. sempy_labs/_sql.py +7 -11
  38. sempy_labs/_vertipaq.py +8 -3
  39. sempy_labs/_warehouses.py +30 -33
  40. sempy_labs/_workloads.py +15 -20
  41. sempy_labs/_workspace_identity.py +13 -17
  42. sempy_labs/_workspaces.py +49 -48
  43. sempy_labs/admin/__init__.py +2 -0
  44. sempy_labs/admin/_basic_functions.py +244 -281
  45. sempy_labs/admin/_domains.py +188 -103
  46. sempy_labs/admin/_external_data_share.py +26 -31
  47. sempy_labs/admin/_git.py +17 -22
  48. sempy_labs/admin/_items.py +34 -48
  49. sempy_labs/admin/_scanner.py +20 -13
  50. sempy_labs/directlake/_directlake_schema_compare.py +2 -0
  51. sempy_labs/directlake/_dl_helper.py +10 -11
  52. sempy_labs/directlake/_generate_shared_expression.py +4 -5
  53. sempy_labs/directlake/_get_directlake_lakehouse.py +1 -0
  54. sempy_labs/directlake/_list_directlake_model_calc_tables.py +1 -0
  55. sempy_labs/directlake/_show_unsupported_directlake_objects.py +2 -0
  56. sempy_labs/directlake/_warm_cache.py +2 -0
  57. sempy_labs/graph/__init__.py +33 -0
  58. sempy_labs/graph/_groups.py +402 -0
  59. sempy_labs/graph/_teams.py +113 -0
  60. sempy_labs/graph/_users.py +191 -0
  61. sempy_labs/lakehouse/__init__.py +4 -0
  62. sempy_labs/lakehouse/_get_lakehouse_columns.py +10 -10
  63. sempy_labs/lakehouse/_get_lakehouse_tables.py +14 -20
  64. sempy_labs/lakehouse/_lakehouse.py +101 -4
  65. sempy_labs/lakehouse/_shortcuts.py +42 -20
  66. sempy_labs/migration/__init__.py +4 -0
  67. sempy_labs/migration/_direct_lake_to_import.py +66 -0
  68. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +1 -0
  69. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +1 -0
  70. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +1 -0
  71. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +2 -0
  72. sempy_labs/report/_download_report.py +8 -13
  73. sempy_labs/report/_generate_report.py +49 -46
  74. sempy_labs/report/_paginated.py +20 -26
  75. sempy_labs/report/_report_functions.py +50 -45
  76. sempy_labs/report/_report_list_functions.py +2 -0
  77. sempy_labs/report/_report_rebind.py +6 -10
  78. sempy_labs/report/_reportwrapper.py +187 -220
  79. sempy_labs/tom/_model.py +8 -5
  80. {semantic_link_labs-0.9.1.dist-info → semantic_link_labs-0.9.2.dist-info}/LICENSE +0 -0
  81. {semantic_link_labs-0.9.1.dist-info → semantic_link_labs-0.9.2.dist-info}/WHEEL +0 -0
  82. {semantic_link_labs-0.9.1.dist-info → semantic_link_labs-0.9.2.dist-info}/top_level.txt +0 -0
@@ -1,11 +1,12 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
- from sempy.fabric.exceptions import FabricHTTPException
4
3
  from typing import Optional
5
4
  from sempy_labs._helper_functions import (
6
- pagination,
7
5
  _is_valid_uuid,
8
6
  resolve_workspace_name_and_id,
7
+ _update_dataframe_datatypes,
8
+ _base_api,
9
+ _create_dataframe,
9
10
  )
10
11
  from uuid import UUID
11
12
  import sempy_labs._icons as icons
@@ -25,13 +26,7 @@ def delete_connection(connection: str | UUID):
25
26
  """
26
27
 
27
28
  connection_id = _resolve_connection_id(connection)
28
-
29
- client = fabric.FabricRestClient()
30
- response = client.delete(f"/v1/connections/{connection_id}")
31
-
32
- if response.status_code != 200:
33
- raise FabricHTTPException(response)
34
-
29
+ _base_api(request=f"/v1/connections/{connection_id}", method="delete")
35
30
  print(f"{icons.green_dot} The '{connection}' connection has been deleted.")
36
31
 
37
32
 
@@ -50,15 +45,11 @@ def delete_connection_role_assignment(connection: str | UUID, role_assignment_id
50
45
  """
51
46
 
52
47
  connection_id = _resolve_connection_id(connection)
53
-
54
- client = fabric.FabricRestClient()
55
- response = client.delete(
56
- f"/v1/connections/{connection_id}/roleAssignments/{role_assignment_id}"
48
+ _base_api(
49
+ request=f"/v1/connections/{connection_id}/roleAssignments/{role_assignment_id}",
50
+ method="delete",
57
51
  )
58
52
 
59
- if response.status_code != 200:
60
- raise FabricHTTPException(response)
61
-
62
53
  print(
63
54
  f"{icons.green_dot} The '{role_assignment_id}' role assignment Id has been deleted from the '{connection}' connection."
64
55
  )
@@ -99,22 +90,18 @@ def list_connection_role_assignments(connection: str | UUID) -> pd.DataFrame:
99
90
 
100
91
  connection_id = _resolve_connection_id(connection)
101
92
 
102
- client = fabric.FabricRestClient()
103
- response = client.get(f"/v1/connections/{connection_id}/roleAssignments")
104
-
105
- df = pd.DataFrame(
106
- columns=[
107
- "Connection Role Assignment Id",
108
- "Principal Id",
109
- "Principal Type",
110
- "Role",
111
- ]
112
- )
93
+ columns = {
94
+ "Connection Role Assignment Id": "string",
95
+ "Principal Id": "string",
96
+ "Principal Type": "string",
97
+ "Role": "string",
98
+ }
113
99
 
114
- if response.status_code != 200:
115
- raise FabricHTTPException(response)
100
+ df = _create_dataframe(columns=columns)
116
101
 
117
- responses = pagination(client, response)
102
+ responses = _base_api(
103
+ request=f"/v1/connections/{connection_id}/roleAssignments", uses_pagination=True
104
+ )
118
105
 
119
106
  for r in responses:
120
107
  for v in r.get("value", []):
@@ -140,29 +127,23 @@ def list_connections() -> pd.DataFrame:
140
127
  A pandas dataframe showing all available connections.
141
128
  """
142
129
 
143
- client = fabric.FabricRestClient()
144
- response = client.get("/v1/connections")
145
-
146
- if response.status_code != 200:
147
- raise FabricHTTPException(response)
148
-
149
- responses = pagination(client, response)
150
-
151
- df = pd.DataFrame(
152
- columns=[
153
- "Connection Id",
154
- "Connection Name",
155
- "Gateway Id",
156
- "Connectivity Type",
157
- "Connection Path",
158
- "Connection Type",
159
- "Privacy Level",
160
- "Credential Type",
161
- "Single Sign on Type",
162
- "Connection Encyrption",
163
- "Skip Test Connection",
164
- ]
165
- )
130
+ columns = {
131
+ "Connection Id": "string",
132
+ "Connection Name": "string",
133
+ "Gateway Id": "string",
134
+ "Connectivity Type": "string",
135
+ "Connection Path": "string",
136
+ "Connection Type": "string",
137
+ "Privacy Level": "string",
138
+ "Credential Type": "string",
139
+ "Single Sign on Type": "string",
140
+ "Connection Encyrption": "string",
141
+ "Skip Test Connection": "bool",
142
+ }
143
+ df = _create_dataframe(columns=columns)
144
+
145
+ responses = _base_api(request="/v1/connections", uses_pagination=True)
146
+
166
147
  for r in responses:
167
148
  for i in r.get("value", []):
168
149
  connection_details = i.get("connectionDetails", {})
@@ -199,8 +180,8 @@ def list_connections() -> pd.DataFrame:
199
180
  }
200
181
 
201
182
  df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
202
- bool_cols = ["Skip Test Connection"]
203
- df[bool_cols] = df[bool_cols].astype(bool)
183
+
184
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
204
185
 
205
186
  return df
206
187
 
@@ -236,24 +217,20 @@ def list_item_connections(
236
217
  item_name=item_name, type=item_type, workspace=workspace_id
237
218
  )
238
219
 
239
- client = fabric.FabricRestClient()
240
- response = client.get(f"/v1/workspaces/{workspace_id}/items/{item_id}/connections")
241
-
242
- df = pd.DataFrame(
243
- columns=[
244
- "Connection Name",
245
- "Connection Id",
246
- "Connectivity Type",
247
- "Connection Type",
248
- "Connection Path",
249
- "Gateway Id",
250
- ]
251
- )
252
-
253
- if response.status_code != 200:
254
- raise FabricHTTPException(response)
220
+ columns = {
221
+ "Connection Name": "string",
222
+ "Connection Id": "string",
223
+ "Connectivity Type": "string",
224
+ "Connection Type": "string",
225
+ "Connection Path": "string",
226
+ "Gateway Id": "string",
227
+ }
228
+ df = _create_dataframe(columns=columns)
255
229
 
256
- responses = pagination(client, response)
230
+ responses = _base_api(
231
+ request=f"/v1/workspaces/{workspace_id}/items/{item_id}/connections",
232
+ uses_pagination=True,
233
+ )
257
234
 
258
235
  for r in responses:
259
236
  for v in r.get("value", []):
@@ -280,23 +257,17 @@ def _list_supported_connection_types(
280
257
  gateway_id = _resolve_gateway_id(gateway)
281
258
  url += f"gatewayId={gateway_id}"
282
259
 
283
- df = pd.DataFrame(
284
- columns=[
285
- "Connection Type",
286
- "Creation Method",
287
- "Supported Credential Types",
288
- "Supported Connection Encryption Types",
289
- "Supports Skip Test Connection",
290
- ]
291
- )
260
+ columns = {
261
+ "Connection Type": "string",
262
+ "Creation Method": "string",
263
+ "Supported Credential Types": "string",
264
+ "Supported Connection Encryption Types": "string",
265
+ "Supports Skip Test Connection": "bool",
266
+ }
267
+ df = _create_dataframe(columns=columns)
292
268
 
293
269
  url = url.rstrip("&")
294
- client = fabric.FabricRestClient()
295
- response = client.get(url)
296
- if response.status_code != 200:
297
- raise FabricHTTPException(response)
298
-
299
- responses = pagination(client, response)
270
+ responses = _base_api(request=url, uses_pagination=True)
300
271
 
301
272
  records = []
302
273
  for r in responses:
@@ -318,6 +289,8 @@ def _list_supported_connection_types(
318
289
  if records:
319
290
  df = pd.DataFrame(records)
320
291
 
292
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
293
+
321
294
  return df
322
295
 
323
296
 
@@ -356,7 +329,7 @@ def create_cloud_connection(
356
329
  If True, skips the test connection.
357
330
  """
358
331
 
359
- request_body = {
332
+ payload = {
360
333
  "connectivityType": "ShareableCloud",
361
334
  "displayName": name,
362
335
  "connectionDetails": {
@@ -388,11 +361,9 @@ def create_cloud_connection(
388
361
  },
389
362
  }
390
363
 
391
- client = fabric.FabricRestClient()
392
- response = client.post("/v1/connections", json=request_body)
393
-
394
- if response.status_code != 201:
395
- raise FabricHTTPException(response)
364
+ _base_api(
365
+ request="/v1/connections", method="post", payload=payload, status_codes=201
366
+ )
396
367
 
397
368
  print(f"{icons.green_dot} The '{name}' cloud connection has been created.")
398
369
 
@@ -436,7 +407,7 @@ def create_on_prem_connection(
436
407
 
437
408
  gateway_id = _resolve_gateway_id(gateway)
438
409
 
439
- request_body = {
410
+ payload = {
440
411
  "connectivityType": "OnPremisesGateway",
441
412
  "gatewayId": gateway_id,
442
413
  "displayName": name,
@@ -468,11 +439,9 @@ def create_on_prem_connection(
468
439
  },
469
440
  }
470
441
 
471
- client = fabric.FabricRestClient()
472
- response = client.post("/v1/connections", json=request_body)
473
-
474
- if response.status_code != 201:
475
- raise FabricHTTPException(response)
442
+ _base_api(
443
+ request="/v1/connections", method="post", payload=payload, status_codes=201
444
+ )
476
445
 
477
446
  print(f"{icons.green_dot} The '{name}' on-prem connection has been created.")
478
447
 
@@ -517,7 +486,7 @@ def create_vnet_connection(
517
486
 
518
487
  gateway_id = _resolve_gateway_id(gateway)
519
488
 
520
- request_body = {
489
+ payload = {
521
490
  "connectivityType": "VirtualNetworkGateway",
522
491
  "gatewayId": gateway_id,
523
492
  "displayName": name,
@@ -550,11 +519,9 @@ def create_vnet_connection(
550
519
  },
551
520
  }
552
521
 
553
- client = fabric.FabricRestClient()
554
- response = client.post("/v1/connections", json=request_body)
555
-
556
- if response.status_code != 201:
557
- raise FabricHTTPException(response)
522
+ _base_api(
523
+ request="/v1/connections", method="post", payload=payload, status_codes=201
524
+ )
558
525
 
559
526
  print(
560
527
  f"{icons.green_dot} The '{name}' virtual network gateway connection has been created."
@@ -1,14 +1,14 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
- import sempy_labs._icons as icons
4
3
  from typing import Optional
5
4
  from sempy_labs._helper_functions import (
6
5
  resolve_workspace_name_and_id,
7
- lro,
8
- pagination,
9
6
  _decode_b64,
7
+ _base_api,
8
+ _print_success,
9
+ resolve_item_id,
10
+ _create_dataframe,
10
11
  )
11
- from sempy.fabric.exceptions import FabricHTTPException
12
12
  from uuid import UUID
13
13
 
14
14
 
@@ -31,16 +31,18 @@ def list_data_pipelines(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
31
31
  A pandas dataframe showing the data pipelines within a workspace.
32
32
  """
33
33
 
34
- df = pd.DataFrame(columns=["Data Pipeline Name", "Data Pipeline ID", "Description"])
34
+ columns = {
35
+ "Data Pipeline Name": "string",
36
+ "Data Pipeline ID": "string",
37
+ "Description": "string",
38
+ }
39
+ df = _create_dataframe(columns=columns)
35
40
 
36
41
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
37
42
 
38
- client = fabric.FabricRestClient()
39
- response = client.get(f"/v1/workspaces/{workspace_id}/dataPipelines")
40
- if response.status_code != 200:
41
- raise FabricHTTPException(response)
42
-
43
- responses = pagination(client, response)
43
+ responses = _base_api(
44
+ request=f"/v1/workspaces/{workspace_id}/dataPipelines", uses_pagination=True
45
+ )
44
46
 
45
47
  for r in responses:
46
48
  for v in r.get("value", []):
@@ -76,24 +78,27 @@ def create_data_pipeline(
76
78
 
77
79
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
78
80
 
79
- request_body = {"displayName": name}
81
+ payload = {"displayName": name}
80
82
 
81
83
  if description:
82
- request_body["description"] = description
83
-
84
- client = fabric.FabricRestClient()
85
- response = client.post(
86
- f"/v1/workspaces/{workspace_id}/dataPipelines", json=request_body
84
+ payload["description"] = description
85
+
86
+ _base_api(
87
+ request=f"/v1/workspaces/{workspace_id}/dataPipelines",
88
+ method="post",
89
+ payload=payload,
90
+ status_codes=[201, 202],
91
+ lro_return_status_code=True,
87
92
  )
88
-
89
- lro(client, response, status_codes=[201, 202])
90
-
91
- print(
92
- f"{icons.green_dot} The '{name}' data pipeline has been created within the '{workspace_name}' workspace."
93
+ _print_success(
94
+ item_name=name,
95
+ item_type="data pipeline",
96
+ workspace_name=workspace_name,
97
+ action="created",
93
98
  )
94
99
 
95
100
 
96
- def delete_data_pipeline(name: str, workspace: Optional[str | UUID] = None):
101
+ def delete_data_pipeline(name: str | UUID, workspace: Optional[str | UUID] = None):
97
102
  """
98
103
  Deletes a Fabric data pipeline.
99
104
 
@@ -101,8 +106,8 @@ def delete_data_pipeline(name: str, workspace: Optional[str | UUID] = None):
101
106
 
102
107
  Parameters
103
108
  ----------
104
- name: str
105
- Name of the data pipeline.
109
+ name: str | uuid.UUID
110
+ Name or ID of the data pipeline.
106
111
  workspace : str | uuid.UUID, default=None
107
112
  The Fabric workspace name.
108
113
  Defaults to None which resolves to the workspace of the attached lakehouse
@@ -110,32 +115,27 @@ def delete_data_pipeline(name: str, workspace: Optional[str | UUID] = None):
110
115
  """
111
116
 
112
117
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
113
-
114
- item_id = fabric.resolve_item_id(
115
- item_name=name, type="DataPipeline", workspace=workspace_id
116
- )
117
-
118
- client = fabric.FabricRestClient()
119
- response = client.delete(f"/v1/workspaces/{workspace_id}/dataPipelines/{item_id}")
120
-
121
- if response.status_code != 200:
122
- raise FabricHTTPException(response)
123
-
124
- print(
125
- f"{icons.green_dot} The '{name}' data pipeline within the '{workspace_name}' workspace has been deleted."
118
+ item_id = resolve_item_id(item=name, type="DataPipeline", workspace=workspace)
119
+
120
+ fabric.delete_item(item_id=item_id, workspace=workspace)
121
+ _print_success(
122
+ item_name=name,
123
+ item_type="data pipeline",
124
+ workspace_name=workspace_name,
125
+ action="deleted",
126
126
  )
127
127
 
128
128
 
129
129
  def get_data_pipeline_definition(
130
- name: str, workspace: Optional[str | UUID] = None, decode: bool = True
130
+ name: str | UUID, workspace: Optional[str | UUID] = None, decode: bool = True
131
131
  ) -> dict | pd.DataFrame:
132
132
  """
133
133
  Obtains the definition of a data pipeline.
134
134
 
135
135
  Parameters
136
136
  ----------
137
- name : str
138
- The name of the data pipeline.
137
+ name : str or uuid.UUID
138
+ The name or ID of the data pipeline.
139
139
  workspace : str | uuid.UUID, default=None
140
140
  The Fabric workspace name or ID.
141
141
  Defaults to None which resolves to the workspace of the attached lakehouse
@@ -152,16 +152,14 @@ def get_data_pipeline_definition(
152
152
  """
153
153
 
154
154
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
155
- item_id = fabric.resolve_item_id(
156
- item_name=name, type="DataPipeline", workspace=workspace_id
157
- )
158
155
 
159
- client = fabric.FabricRestClient()
160
- response = client.post(
161
- f"/v1/workspaces/{workspace_id}/dataPipelines/{item_id}/getDefinition"
156
+ item_id = resolve_item_id(item=name, type="DataPipeline", workspace=workspace)
157
+ result = _base_api(
158
+ request=f"/v1/workspaces/{workspace_id}/dataPipelines/{item_id}/getDefinition",
159
+ method="post",
160
+ lro_return_json=True,
161
+ status_codes=None,
162
162
  )
163
- result = lro(client, response).json()
164
-
165
163
  df = pd.json_normalize(result["definition"]["parts"])
166
164
 
167
165
  if not decode:
sempy_labs/_dataflows.py CHANGED
@@ -3,10 +3,12 @@ import pandas as pd
3
3
  from sempy_labs._helper_functions import (
4
4
  resolve_workspace_name_and_id,
5
5
  _is_valid_uuid,
6
+ _update_dataframe_datatypes,
7
+ _base_api,
8
+ _create_dataframe,
6
9
  )
7
10
  from typing import Optional, Tuple
8
11
  import sempy_labs._icons as icons
9
- from sempy.fabric.exceptions import FabricHTTPException
10
12
  from uuid import UUID
11
13
 
12
14
 
@@ -28,14 +30,17 @@ def list_dataflows(workspace: Optional[str | UUID] = None):
28
30
  """
29
31
 
30
32
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
31
- client = fabric.PowerBIRestClient()
32
- response = client.get(f"/v1.0/myorg/groups/{workspace_id}/dataflows")
33
- if response.status_code != 200:
34
- raise FabricHTTPException(response)
35
33
 
36
- df = pd.DataFrame(
37
- columns=["Dataflow Id", "Dataflow Name", "Configured By", "Users", "Generation"]
38
- )
34
+ columns = {
35
+ "Dataflow Id": "string",
36
+ "Dataflow Name": "string",
37
+ "Configured By": "string",
38
+ "Users": "string",
39
+ "Generation": "int",
40
+ }
41
+ df = _create_dataframe(columns=columns)
42
+
43
+ response = _base_api(request=f"/v1.0/myorg/groups/{workspace_id}/dataflows")
39
44
 
40
45
  data = [] # Collect rows here
41
46
 
@@ -51,7 +56,8 @@ def list_dataflows(workspace: Optional[str | UUID] = None):
51
56
 
52
57
  if data:
53
58
  df = pd.DataFrame(data)
54
- df["Generation"] = df["Generation"].astype(int)
59
+
60
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
55
61
 
56
62
  return df
57
63
 
@@ -85,16 +91,14 @@ def assign_workspace_to_dataflow_storage(
85
91
  )
86
92
 
87
93
  dataflow_storage_id = df_filt["Dataflow Storage Account ID"].iloc[0]
88
- client = fabric.PowerBIRestClient()
89
-
90
- request_body = {"dataflowStorageId": dataflow_storage_id}
94
+ payload = {"dataflowStorageId": dataflow_storage_id}
91
95
 
92
- response = client.post(
93
- f"/v1.0/myorg/groups/{workspace_id}/AssignToDataflowStorage", json=request_body
96
+ _base_api(
97
+ request=f"/v1.0/myorg/groups/{workspace_id}/AssignToDataflowStorage",
98
+ method="post",
99
+ payload=payload,
94
100
  )
95
101
 
96
- if response.status_code != 200:
97
- raise FabricHTTPException(response)
98
102
  print(
99
103
  f"{icons.green_dot} The '{dataflow_storage_account}' dataflow storage account has been assigned to the '{workspace_name}' workspacce."
100
104
  )
@@ -112,17 +116,14 @@ def list_dataflow_storage_accounts() -> pd.DataFrame:
112
116
  A pandas dataframe showing the accessible dataflow storage accounts.
113
117
  """
114
118
 
115
- df = pd.DataFrame(
116
- columns=[
117
- "Dataflow Storage Account ID",
118
- "Dataflow Storage Account Name",
119
- "Enabled",
120
- ]
121
- )
122
- client = fabric.PowerBIRestClient()
123
- response = client.get("/v1.0/myorg/dataflowStorageAccounts")
124
- if response.status_code != 200:
125
- raise FabricHTTPException(response)
119
+ columns = {
120
+ "Dataflow Storage Account ID": "string",
121
+ "Dataflow Storage Account Name": "string",
122
+ "Enabled": "bool",
123
+ }
124
+ df = _create_dataframe(columns=columns)
125
+
126
+ response = _base_api(request="/v1.0/myorg/dataflowStorageAccounts")
126
127
 
127
128
  for v in response.json().get("value", []):
128
129
  new_data = {
@@ -132,7 +133,7 @@ def list_dataflow_storage_accounts() -> pd.DataFrame:
132
133
  }
133
134
  df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
134
135
 
135
- df["Enabled"] = df["Enabled"].astype(bool)
136
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
136
137
 
137
138
  return df
138
139
 
@@ -164,29 +165,23 @@ def list_upstream_dataflows(
164
165
  (dataflow_name, dataflow_id) = _resolve_dataflow_name_and_id(
165
166
  dataflow=dataflow, workspace=workspace_id
166
167
  )
167
- client = fabric.PowerBIRestClient()
168
-
169
- df = pd.DataFrame(
170
- columns=[
171
- "Dataflow Name",
172
- "Dataflow Id",
173
- "Workspace Name",
174
- "Workspace Id",
175
- "Upstream Dataflow Name",
176
- "Upstream Dataflow Id",
177
- "Upstream Workspace Name",
178
- "Upstream Workspace Id",
179
- ]
180
- )
181
168
 
182
- def collect_upstreams(
183
- client, dataflow_id, dataflow_name, workspace_id, workspace_name
184
- ):
185
- response = client.get(
186
- f"/v1.0/myorg/groups/{workspace_id}/dataflows/{dataflow_id}/upstreamDataflows"
169
+ columns = {
170
+ "Dataflow Name": "string",
171
+ "Dataflow Id": "string",
172
+ "Workspace Name": "string",
173
+ "Workspace Id": "string",
174
+ "Upstream Dataflow Name": "string",
175
+ "Upstream Dataflow Id": "string",
176
+ "Upstream Workspace Name": "string",
177
+ "Upstream Workspace Id": "string",
178
+ }
179
+ df = _create_dataframe(columns=columns)
180
+
181
+ def collect_upstreams(dataflow_id, dataflow_name, workspace_id, workspace_name):
182
+ response = _base_api(
183
+ request=f"/v1.0/myorg/groups/{workspace_id}/dataflows/{dataflow_id}/upstreamDataflows"
187
184
  )
188
- if response.status_code != 200:
189
- raise FabricHTTPException(response)
190
185
 
191
186
  values = response.json().get("value", [])
192
187
  for v in values:
@@ -209,14 +204,13 @@ def list_upstream_dataflows(
209
204
  }
210
205
 
211
206
  collect_upstreams(
212
- client,
213
207
  tgt_dataflow_id,
214
208
  tgt_dataflow_name,
215
209
  tgt_workspace_id,
216
210
  tgt_workspace_name,
217
211
  )
218
212
 
219
- collect_upstreams(client, dataflow_id, dataflow_name, workspace_id, workspace_name)
213
+ collect_upstreams(dataflow_id, dataflow_name, workspace_id, workspace_name)
220
214
 
221
215
  return df
222
216