semantic-link-labs 0.9.0__py3-none-any.whl → 0.9.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (83) hide show
  1. {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/METADATA +68 -7
  2. {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/RECORD +83 -76
  3. sempy_labs/__init__.py +14 -12
  4. sempy_labs/_authentication.py +0 -2
  5. sempy_labs/_capacities.py +120 -142
  6. sempy_labs/_capacity_migration.py +61 -94
  7. sempy_labs/_clear_cache.py +9 -8
  8. sempy_labs/_connections.py +72 -105
  9. sempy_labs/_data_pipelines.py +47 -49
  10. sempy_labs/_dataflows.py +45 -51
  11. sempy_labs/_dax.py +228 -6
  12. sempy_labs/_delta_analyzer.py +303 -0
  13. sempy_labs/_deployment_pipelines.py +72 -66
  14. sempy_labs/_environments.py +39 -36
  15. sempy_labs/_eventhouses.py +35 -35
  16. sempy_labs/_eventstreams.py +38 -39
  17. sempy_labs/_external_data_shares.py +29 -42
  18. sempy_labs/_gateways.py +57 -101
  19. sempy_labs/_generate_semantic_model.py +22 -30
  20. sempy_labs/_git.py +46 -66
  21. sempy_labs/_graphQL.py +95 -0
  22. sempy_labs/_helper_functions.py +175 -30
  23. sempy_labs/_job_scheduler.py +47 -59
  24. sempy_labs/_kql_databases.py +27 -34
  25. sempy_labs/_kql_querysets.py +23 -30
  26. sempy_labs/_list_functions.py +262 -164
  27. sempy_labs/_managed_private_endpoints.py +52 -47
  28. sempy_labs/_mirrored_databases.py +110 -134
  29. sempy_labs/_mirrored_warehouses.py +13 -13
  30. sempy_labs/_ml_experiments.py +36 -36
  31. sempy_labs/_ml_models.py +37 -38
  32. sempy_labs/_model_dependencies.py +2 -0
  33. sempy_labs/_notebooks.py +28 -29
  34. sempy_labs/_one_lake_integration.py +2 -0
  35. sempy_labs/_query_scale_out.py +63 -81
  36. sempy_labs/_refresh_semantic_model.py +12 -14
  37. sempy_labs/_spark.py +54 -79
  38. sempy_labs/_sql.py +7 -11
  39. sempy_labs/_vertipaq.py +8 -3
  40. sempy_labs/_warehouses.py +30 -33
  41. sempy_labs/_workloads.py +15 -20
  42. sempy_labs/_workspace_identity.py +13 -17
  43. sempy_labs/_workspaces.py +49 -48
  44. sempy_labs/admin/__init__.py +2 -0
  45. sempy_labs/admin/_basic_functions.py +244 -281
  46. sempy_labs/admin/_domains.py +188 -103
  47. sempy_labs/admin/_external_data_share.py +26 -31
  48. sempy_labs/admin/_git.py +17 -22
  49. sempy_labs/admin/_items.py +34 -48
  50. sempy_labs/admin/_scanner.py +20 -13
  51. sempy_labs/directlake/_directlake_schema_compare.py +2 -0
  52. sempy_labs/directlake/_dl_helper.py +10 -11
  53. sempy_labs/directlake/_generate_shared_expression.py +4 -5
  54. sempy_labs/directlake/_get_directlake_lakehouse.py +1 -0
  55. sempy_labs/directlake/_list_directlake_model_calc_tables.py +1 -0
  56. sempy_labs/directlake/_show_unsupported_directlake_objects.py +2 -0
  57. sempy_labs/directlake/_warm_cache.py +2 -0
  58. sempy_labs/graph/__init__.py +33 -0
  59. sempy_labs/graph/_groups.py +402 -0
  60. sempy_labs/graph/_teams.py +113 -0
  61. sempy_labs/graph/_users.py +191 -0
  62. sempy_labs/lakehouse/__init__.py +4 -0
  63. sempy_labs/lakehouse/_get_lakehouse_columns.py +10 -10
  64. sempy_labs/lakehouse/_get_lakehouse_tables.py +14 -20
  65. sempy_labs/lakehouse/_lakehouse.py +101 -4
  66. sempy_labs/lakehouse/_shortcuts.py +42 -20
  67. sempy_labs/migration/__init__.py +4 -0
  68. sempy_labs/migration/_direct_lake_to_import.py +66 -0
  69. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +1 -0
  70. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +1 -0
  71. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +1 -0
  72. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +2 -0
  73. sempy_labs/report/_download_report.py +8 -13
  74. sempy_labs/report/_generate_report.py +49 -46
  75. sempy_labs/report/_paginated.py +20 -26
  76. sempy_labs/report/_report_functions.py +50 -45
  77. sempy_labs/report/_report_list_functions.py +2 -0
  78. sempy_labs/report/_report_rebind.py +6 -10
  79. sempy_labs/report/_reportwrapper.py +187 -220
  80. sempy_labs/tom/_model.py +8 -5
  81. {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/LICENSE +0 -0
  82. {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/WHEEL +0 -0
  83. {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/top_level.txt +0 -0
@@ -1,10 +1,12 @@
1
- import sempy.fabric as fabric
2
1
  import pandas as pd
3
2
  from sempy_labs._helper_functions import (
4
- pagination,
3
+ _is_valid_uuid,
4
+ _base_api,
5
+ _update_dataframe_datatypes,
6
+ _create_dataframe,
5
7
  )
6
8
  import sempy_labs._icons as icons
7
- from sempy.fabric.exceptions import FabricHTTPException
9
+ from uuid import UUID
8
10
 
9
11
 
10
12
  def list_deployment_pipelines() -> pd.DataFrame:
@@ -19,18 +21,19 @@ def list_deployment_pipelines() -> pd.DataFrame:
19
21
  A pandas dataframe showing a list of deployment pipelines the user can access.
20
22
  """
21
23
 
22
- df = pd.DataFrame(
23
- columns=["Deployment Pipeline Id", "Deployment Pipeline Name", "Description"]
24
+ columns = {
25
+ "Deployment Pipeline Id": "string",
26
+ "Deployment Pipeline Name": "string",
27
+ "Description": "string",
28
+ }
29
+ df = _create_dataframe(columns=columns)
30
+
31
+ responses = _base_api(
32
+ request="/v1/deploymentPipelines",
33
+ status_codes=200,
34
+ uses_pagination=True,
24
35
  )
25
36
 
26
- client = fabric.FabricRestClient()
27
- response = client.get("/v1/deploymentPipelines")
28
-
29
- if response.status_code != 200:
30
- raise FabricHTTPException(response)
31
-
32
- responses = pagination(client, response)
33
-
34
37
  for r in responses:
35
38
  for v in r.get("value", []):
36
39
  new_data = {
@@ -43,7 +46,7 @@ def list_deployment_pipelines() -> pd.DataFrame:
43
46
  return df
44
47
 
45
48
 
46
- def list_deployment_pipeline_stages(deployment_pipeline: str) -> pd.DataFrame:
49
+ def list_deployment_pipeline_stages(deployment_pipeline: str | UUID) -> pd.DataFrame:
47
50
  """
48
51
  Shows the specified deployment pipeline stages.
49
52
 
@@ -51,8 +54,8 @@ def list_deployment_pipeline_stages(deployment_pipeline: str) -> pd.DataFrame:
51
54
 
52
55
  Parameters
53
56
  ----------
54
- deployment_pipeline : str
55
- The deployment pipeline name.
57
+ deployment_pipeline : str | uuid.UUID
58
+ The deployment pipeline name or ID.
56
59
 
57
60
  Returns
58
61
  -------
@@ -62,28 +65,26 @@ def list_deployment_pipeline_stages(deployment_pipeline: str) -> pd.DataFrame:
62
65
 
63
66
  from sempy_labs._helper_functions import resolve_deployment_pipeline_id
64
67
 
65
- df = pd.DataFrame(
66
- columns=[
67
- "Deployment Pipeline Stage Id",
68
- "Deployment Pipeline Stage Name",
69
- "Order",
70
- "Description",
71
- "Workspace Id",
72
- "Workspace Name",
73
- "Public",
74
- ]
75
- )
68
+ columns = {
69
+ "Deployment Pipeline Stage Id": "string",
70
+ "Deployment Pipeline Stage Name": "string",
71
+ "Order": "int",
72
+ "Description": "string",
73
+ "Workspace Id": "string",
74
+ "Workspace Name": "string",
75
+ "Public": "bool",
76
+ }
77
+ df = _create_dataframe(columns=columns)
76
78
 
77
79
  deployment_pipeline_id = resolve_deployment_pipeline_id(
78
80
  deployment_pipeline=deployment_pipeline
79
81
  )
80
- client = fabric.FabricRestClient()
81
- response = client.get(f"/v1/deploymentPipelines/{deployment_pipeline_id}/stages")
82
-
83
- if response.status_code != 200:
84
- raise FabricHTTPException(response)
85
82
 
86
- responses = pagination(client, response)
83
+ responses = _base_api(
84
+ request=f"/v1/deploymentPipelines/{deployment_pipeline_id}/stages",
85
+ status_codes=200,
86
+ uses_pagination=True,
87
+ )
87
88
 
88
89
  for r in responses:
89
90
  for v in r.get("value", []):
@@ -98,14 +99,14 @@ def list_deployment_pipeline_stages(deployment_pipeline: str) -> pd.DataFrame:
98
99
  }
99
100
  df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
100
101
 
101
- df["Order"] = df["Order"].astype(int)
102
- df["Public"] = df["Public"].astype(bool)
102
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
103
103
 
104
104
  return df
105
105
 
106
106
 
107
107
  def list_deployment_pipeline_stage_items(
108
- deployment_pipeline: str, stage_name: str
108
+ deployment_pipeline: str | UUID,
109
+ stage: str | UUID,
109
110
  ) -> pd.DataFrame:
110
111
  """
111
112
  Shows the supported items from the workspace assigned to the specified stage of the specified deployment pipeline.
@@ -114,10 +115,10 @@ def list_deployment_pipeline_stage_items(
114
115
 
115
116
  Parameters
116
117
  ----------
117
- deployment_pipeline : str
118
- The deployment pipeline name.
119
- stage_name : str
120
- The deployment pipeline stage name.
118
+ deployment_pipeline : str | uuid.UUID
119
+ The deployment pipeline name or ID.
120
+ stage : str | uuid.UUID
121
+ The deployment pipeline stage name or ID.
121
122
 
122
123
  Returns
123
124
  -------
@@ -127,39 +128,46 @@ def list_deployment_pipeline_stage_items(
127
128
 
128
129
  from sempy_labs._helper_functions import resolve_deployment_pipeline_id
129
130
 
130
- df = pd.DataFrame(
131
- columns=[
132
- "Deployment Pipeline Stage Item Id",
133
- "Deployment Pipeline Stage Item Name",
134
- "Item Type",
135
- "Source Item Id",
136
- "Target Item Id",
137
- "Last Deployment Time",
138
- ]
139
- )
131
+ columns = {
132
+ "Deployment Pipeline Stage Item Id": "string",
133
+ "Deployment Pipeline Stage Item Name": "string",
134
+ "Item Type": "string",
135
+ "Source Item Id": "string",
136
+ "Target Item Id": "string",
137
+ "Last Deployment Time": "string",
138
+ }
139
+ df = _create_dataframe(columns=columns)
140
140
 
141
141
  deployment_pipeline_id = resolve_deployment_pipeline_id(
142
142
  deployment_pipeline=deployment_pipeline
143
143
  )
144
- dfPS = list_deployment_pipeline_stages(deployment_pipeline=deployment_pipeline)
145
- dfPS_filt = dfPS[dfPS["Deployment Pipeline Stage Name"] == stage_name]
146
144
 
147
- if len(dfPS_filt) == 0:
148
- raise ValueError(
149
- f"{icons.red_dot} The '{stage_name}' stage does not exist within the '{deployment_pipeline}' deployment pipeline."
145
+ def resolve_deployment_pipeline_stage_id(
146
+ deployment_pipeline_id: UUID, stage: str | UUID
147
+ ):
148
+
149
+ dfPS = list_deployment_pipeline_stages(
150
+ deployment_pipeline=deployment_pipeline_id
150
151
  )
151
- stage_id = dfPS_filt["Deployment Pipeline Stage Id"].iloc[0]
152
152
 
153
- client = fabric.FabricRestClient()
154
- response = client.get(
155
- f"/v1/deploymentPipelines/{deployment_pipeline_id}/stages/{stage_id}/items"
153
+ if _is_valid_uuid(stage):
154
+ dfPS_filt = dfPS[dfPS["Deployment Pipeline Stage Id"] == stage]
155
+ else:
156
+ dfPS_filt = dfPS[dfPS["Deployment Pipeline Stage Name"] == stage]
157
+ if dfPS.empty:
158
+ raise ValueError(
159
+ f"{icons.red_dot} The '{stage}' stage does not exist within the '{deployment_pipeline}' deployment pipeline."
160
+ )
161
+ return dfPS_filt["Deployment Pipeline Stage Id"].iloc[0]
162
+
163
+ stage_id = resolve_deployment_pipeline_stage_id(deployment_pipeline_id, stage)
164
+
165
+ responses = _base_api(
166
+ request=f"/v1/deploymentPipelines/{deployment_pipeline_id}/stages/{stage_id}/items",
167
+ status_codes=200,
168
+ uses_pagination=True,
156
169
  )
157
170
 
158
- if response.status_code != 200:
159
- raise FabricHTTPException(response)
160
-
161
- responses = pagination(client, response)
162
-
163
171
  for r in responses:
164
172
  for v in r.get("value", []):
165
173
  new_data = {
@@ -172,6 +180,4 @@ def list_deployment_pipeline_stage_items(
172
180
  }
173
181
  df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
174
182
 
175
- df["Last Deployment Time"] = pd.to_datetime(df["Last Deployment Time"])
176
-
177
183
  return df
@@ -1,13 +1,12 @@
1
- import sempy.fabric as fabric
2
1
  import pandas as pd
3
2
  import sempy_labs._icons as icons
4
3
  from typing import Optional
5
4
  from sempy_labs._helper_functions import (
6
5
  resolve_workspace_name_and_id,
7
- lro,
8
- pagination,
6
+ _base_api,
7
+ _print_success,
8
+ _create_dataframe,
9
9
  )
10
- from sempy.fabric.exceptions import FabricHTTPException
11
10
  from uuid import UUID
12
11
 
13
12
 
@@ -35,20 +34,23 @@ def create_environment(
35
34
 
36
35
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
37
36
 
38
- request_body = {"displayName": environment}
37
+ payload = {"displayName": environment}
39
38
 
40
39
  if description:
41
- request_body["description"] = description
42
-
43
- client = fabric.FabricRestClient()
44
- response = client.post(
45
- f"/v1/workspaces/{workspace_id}/environments", json=request_body
40
+ payload["description"] = description
41
+
42
+ _base_api(
43
+ request="/v1/workspaces/{workspace_id}/environments",
44
+ method="post",
45
+ payload=payload,
46
+ status_codes=[201, 202],
47
+ lro_return_status_code=True,
46
48
  )
47
-
48
- lro(client, response, status_codes=[201, 202])
49
-
50
- print(
51
- f"{icons.green_dot} The '{environment}' environment has been created within the '{workspace_name}' workspace."
49
+ _print_success(
50
+ item_name=environment,
51
+ item_type="environment",
52
+ workspace_name=workspace_name,
53
+ action="created",
52
54
  )
53
55
 
54
56
 
@@ -71,16 +73,18 @@ def list_environments(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
71
73
  A pandas dataframe showing the environments within a workspace.
72
74
  """
73
75
 
74
- df = pd.DataFrame(columns=["Environment Name", "Environment Id", "Description"])
76
+ columns = {
77
+ "Environment Name": "string",
78
+ "Environment Id": "string",
79
+ "Description": "string",
80
+ }
81
+ df = _create_dataframe(columns=columns)
75
82
 
76
83
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
77
84
 
78
- client = fabric.FabricRestClient()
79
- response = client.get(f"/v1/workspaces/{workspace_id}/environments")
80
- if response.status_code != 200:
81
- raise FabricHTTPException(response)
82
-
83
- responses = pagination(client, response)
85
+ responses = _base_api(
86
+ request=f"/v1/workspaces/{workspace_id}/environments", uses_pagination=True
87
+ )
84
88
 
85
89
  for r in responses:
86
90
  for v in r.get("value", []):
@@ -117,16 +121,15 @@ def delete_environment(environment: str, workspace: Optional[str | UUID] = None)
117
121
  environment=environment, workspace=workspace_id
118
122
  )
119
123
 
120
- client = fabric.FabricRestClient()
121
- response = client.delete(
122
- f"/v1/workspaces/{workspace_id}/environments/{environment_id}"
124
+ _base_api(
125
+ request=f"/v1/workspaces/{workspace_id}/environments/{environment_id}",
126
+ method="delete",
123
127
  )
124
-
125
- if response.status_code != 200:
126
- raise FabricHTTPException(response)
127
-
128
- print(
129
- f"{icons.green_dot} The '{environment}' environment within the '{workspace_name}' workspace has been deleted."
128
+ _print_success(
129
+ item_name=environment,
130
+ item_type="environment",
131
+ workspace_name=workspace_name,
132
+ action="deleted",
130
133
  )
131
134
 
132
135
 
@@ -153,13 +156,13 @@ def publish_environment(environment: str, workspace: Optional[str | UUID] = None
153
156
  environment=environment, workspace=workspace_id
154
157
  )
155
158
 
156
- client = fabric.FabricRestClient()
157
- response = client.post(
158
- f"/v1/workspaces/{workspace_id}/environments/{environment_id}/staging/publish"
159
+ _base_api(
160
+ request=f"/v1/workspaces/{workspace_id}/environments/{environment_id}/staging/publish",
161
+ method="post",
162
+ lro_return_status_code=True,
163
+ status_codes=None,
159
164
  )
160
165
 
161
- lro(client, response)
162
-
163
166
  print(
164
167
  f"{icons.green_dot} The '{environment}' environment within the '{workspace_name}' workspace has been published."
165
168
  )
@@ -1,13 +1,13 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
- import sempy_labs._icons as icons
4
3
  from typing import Optional
5
4
  from sempy_labs._helper_functions import (
6
5
  resolve_workspace_name_and_id,
7
- lro,
8
- pagination,
6
+ _base_api,
7
+ _print_success,
8
+ resolve_item_id,
9
+ _create_dataframe,
9
10
  )
10
- from sempy.fabric.exceptions import FabricHTTPException
11
11
  from uuid import UUID
12
12
 
13
13
 
@@ -33,20 +33,23 @@ def create_eventhouse(
33
33
 
34
34
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
35
35
 
36
- request_body = {"displayName": name}
36
+ payload = {"displayName": name}
37
37
 
38
38
  if description:
39
- request_body["description"] = description
40
-
41
- client = fabric.FabricRestClient()
42
- response = client.post(
43
- f"/v1/workspaces/{workspace_id}/eventhouses", json=request_body
39
+ payload["description"] = description
40
+
41
+ _base_api(
42
+ request=f"/v1/workspaces/{workspace_id}/eventhouses",
43
+ method="post",
44
+ status_codes=[201, 202],
45
+ payload=payload,
46
+ lro_return_status_code=True,
44
47
  )
45
-
46
- lro(client, response, status_codes=[201, 202])
47
-
48
- print(
49
- f"{icons.green_dot} The '{name}' eventhouse has been created within the '{workspace_name}' workspace."
48
+ _print_success(
49
+ item_name=name,
50
+ item_type="eventhouse",
51
+ workspace_name=workspace_name,
52
+ action="created",
50
53
  )
51
54
 
52
55
 
@@ -69,16 +72,18 @@ def list_eventhouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
69
72
  A pandas dataframe showing the eventhouses within a workspace.
70
73
  """
71
74
 
72
- df = pd.DataFrame(columns=["Eventhouse Name", "Eventhouse Id", "Description"])
75
+ columns = {
76
+ "Eventhouse Name": "string",
77
+ "Eventhouse Id": "string",
78
+ "Description": "string",
79
+ }
80
+ df = _create_dataframe(columns=columns)
73
81
 
74
82
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
75
83
 
76
- client = fabric.FabricRestClient()
77
- response = client.get(f"/v1/workspaces/{workspace_id}/eventhouses")
78
- if response.status_code != 200:
79
- raise FabricHTTPException(response)
80
-
81
- responses = pagination(client, response)
84
+ responses = _base_api(
85
+ request=f"/v1/workspaces/{workspace_id}/eventhouses", uses_pagination=True
86
+ )
82
87
 
83
88
  for r in responses:
84
89
  for v in r.get("value", []):
@@ -109,17 +114,12 @@ def delete_eventhouse(name: str, workspace: Optional[str | UUID] = None):
109
114
  """
110
115
 
111
116
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
112
-
113
- item_id = fabric.resolve_item_id(
114
- item_name=name, type="Eventhouse", workspace=workspace_id
115
- )
116
-
117
- client = fabric.FabricRestClient()
118
- response = client.delete(f"/v1/workspaces/{workspace_id}/eventhouses/{item_id}")
119
-
120
- if response.status_code != 200:
121
- raise FabricHTTPException(response)
122
-
123
- print(
124
- f"{icons.green_dot} The '{name}' eventhouse within the '{workspace_name}' workspace has been deleted."
117
+ item_id = resolve_item_id(item=name, type="Eventhouse", workspace=workspace)
118
+
119
+ fabric.delete_item(item_id=item_id, workspace=workspace)
120
+ _print_success(
121
+ item_name=name,
122
+ item_type="eventhouse",
123
+ workspace_name=workspace_name,
124
+ action="deleted",
125
125
  )
@@ -1,13 +1,13 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
- import sempy_labs._icons as icons
4
3
  from typing import Optional
5
4
  from sempy_labs._helper_functions import (
6
5
  resolve_workspace_name_and_id,
7
- lro,
8
- pagination,
6
+ _base_api,
7
+ _print_success,
8
+ resolve_item_id,
9
+ _create_dataframe,
9
10
  )
10
- from sempy.fabric.exceptions import FabricHTTPException
11
11
  from uuid import UUID
12
12
 
13
13
 
@@ -30,16 +30,17 @@ def list_eventstreams(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
30
30
  A pandas dataframe showing the eventstreams within a workspace.
31
31
  """
32
32
 
33
- df = pd.DataFrame(columns=["Eventstream Name", "Eventstream Id", "Description"])
33
+ columns = {
34
+ "Eventstream Name": "string",
35
+ "Eventstream Id": "string",
36
+ "Description": "string",
37
+ }
38
+ df = _create_dataframe(columns=columns)
34
39
 
35
40
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
36
-
37
- client = fabric.FabricRestClient()
38
- response = client.get(f"/v1/workspaces/{workspace_id}/eventstreams")
39
- if response.status_code != 200:
40
- raise FabricHTTPException(response)
41
-
42
- responses = pagination(client, response)
41
+ responses = _base_api(
42
+ request=f"/v1/workspaces/{workspace_id}/eventstreams", uses_pagination=True
43
+ )
43
44
 
44
45
  for r in responses:
45
46
  for v in r.get("value", []):
@@ -75,24 +76,27 @@ def create_eventstream(
75
76
 
76
77
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
77
78
 
78
- request_body = {"displayName": name}
79
+ payload = {"displayName": name}
79
80
 
80
81
  if description:
81
- request_body["description"] = description
82
-
83
- client = fabric.FabricRestClient()
84
- response = client.post(
85
- f"/v1/workspaces/{workspace_id}/eventstreams", json=request_body
82
+ payload["description"] = description
83
+
84
+ _base_api(
85
+ request=f"/v1/workspaces/{workspace_id}/eventstreams",
86
+ method="post",
87
+ payload=payload,
88
+ status_codes=[201, 202],
89
+ lro_return_status_code=True,
86
90
  )
87
-
88
- lro(client, response, status_codes=[201, 202])
89
-
90
- print(
91
- f"{icons.green_dot} The '{name}' eventstream has been created within the '{workspace_name}' workspace."
91
+ _print_success(
92
+ item_name=name,
93
+ item_type="eventstream",
94
+ workspace_name=workspace_name,
95
+ action="created",
92
96
  )
93
97
 
94
98
 
95
- def delete_eventstream(name: str, workspace: Optional[str | UUID] = None):
99
+ def delete_eventstream(name: str | UUID, workspace: Optional[str | UUID] = None):
96
100
  """
97
101
  Deletes a Fabric eventstream.
98
102
 
@@ -100,8 +104,8 @@ def delete_eventstream(name: str, workspace: Optional[str | UUID] = None):
100
104
 
101
105
  Parameters
102
106
  ----------
103
- name: str
104
- Name of the eventstream.
107
+ name: str | uuid.UUID
108
+ Name or ID of the eventstream.
105
109
  workspace : str | uuid.UUID, default=None
106
110
  The Fabric workspace name or ID.
107
111
  Defaults to None which resolves to the workspace of the attached lakehouse
@@ -109,17 +113,12 @@ def delete_eventstream(name: str, workspace: Optional[str | UUID] = None):
109
113
  """
110
114
 
111
115
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
112
-
113
- item_id = fabric.resolve_item_id(
114
- item_name=name, type="Eventstream", workspace=workspace_id
115
- )
116
-
117
- client = fabric.FabricRestClient()
118
- response = client.delete(f"/v1/workspaces/{workspace_id}/eventstreams/{item_id}")
119
-
120
- if response.status_code != 200:
121
- raise FabricHTTPException(response)
122
-
123
- print(
124
- f"{icons.green_dot} The '{name}' eventstream within the '{workspace_name}' workspace has been deleted."
116
+ item_id = resolve_item_id(item=name, type="Eventstream", workspace=workspace)
117
+
118
+ fabric.delete_item(item_id=item_id, workspace=workspace)
119
+ _print_success(
120
+ item_name=name,
121
+ item_type="eventstream",
122
+ workspace_name=workspace_name,
123
+ action="deleted",
125
124
  )
@@ -5,9 +5,9 @@ from typing import Optional, List
5
5
  import sempy_labs._icons as icons
6
6
  from sempy_labs._helper_functions import (
7
7
  resolve_workspace_name_and_id,
8
- pagination,
8
+ _base_api,
9
+ _create_dataframe,
9
10
  )
10
- from sempy.fabric.exceptions import FabricHTTPException
11
11
 
12
12
 
13
13
  def create_external_data_share(
@@ -48,15 +48,12 @@ def create_external_data_share(
48
48
 
49
49
  payload = {"paths": paths, "recipient": {"userPrincipalName": recipient}}
50
50
 
51
- client = fabric.FabricRestClient()
52
- response = client.post(
53
- f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares",
54
- json=payload,
51
+ _base_api(
52
+ request=f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares",
53
+ method="post",
54
+ status_codes=201,
55
+ payload=payload,
55
56
  )
56
-
57
- if response.status_code != 201:
58
- raise FabricHTTPException(response)
59
-
60
57
  print(
61
58
  f"{icons.green_dot} An external data share was created for the '{item_name}' {item_type} within the '{workspace_name}' workspace for the {paths} paths."
62
59
  )
@@ -92,14 +89,10 @@ def revoke_external_data_share(
92
89
  item_name=item_name, type=item_type, workspace=workspace_id
93
90
  )
94
91
 
95
- client = fabric.FabricRestClient()
96
- response = client.post(
97
- f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares/{external_data_share_id}/revoke"
92
+ _base_api(
93
+ request=f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares/{external_data_share_id}/revoke",
94
+ method="post",
98
95
  )
99
-
100
- if response.status_code != 200:
101
- raise FabricHTTPException(response)
102
-
103
96
  print(
104
97
  f"{icons.green_dot} The '{external_data_share_id}' external data share for the '{item_name}' {item_type} within the '{workspace_name}' workspace has been revoked."
105
98
  )
@@ -135,32 +128,26 @@ def list_external_data_shares_in_item(
135
128
  item_name=item_name, type=item_type, workspace=workspace_id
136
129
  )
137
130
 
138
- client = fabric.FabricRestClient()
139
- response = client.get(
140
- f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares"
131
+ columns = {
132
+ "External Data Share Id": "string",
133
+ "Paths": "string",
134
+ "Creator Principal Id": "string",
135
+ "Creator Principal Type": "string",
136
+ "Recipient User Principal Name": "string",
137
+ "Status": "string",
138
+ "Expiration Time UTC": "string",
139
+ "Workspace Id": "string",
140
+ "Item Id": "string",
141
+ "Item Name": "string",
142
+ "Item Type": "string",
143
+ "Invitation URL": "string",
144
+ }
145
+ df = _create_dataframe(columns=columns)
146
+
147
+ responses = _base_api(
148
+ request=f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares",
149
+ uses_pagination=True,
141
150
  )
142
-
143
- if response.status_code != 200:
144
- raise FabricHTTPException(response)
145
-
146
- df = pd.DataFrame(
147
- columns=[
148
- "External Data Share Id",
149
- "Paths",
150
- "Creator Principal Id",
151
- "Creater Principal Type",
152
- "Recipient User Principal Name",
153
- "Status",
154
- "Expiration Time UTC",
155
- "Workspace Id",
156
- "Item Id",
157
- "Item Name",
158
- "Item Type",
159
- "Invitation URL",
160
- ]
161
- )
162
-
163
- responses = pagination(client, response)
164
151
  dfs = []
165
152
 
166
153
  for r in responses: