semantic-link-labs 0.9.0__py3-none-any.whl → 0.9.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/METADATA +68 -7
- {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/RECORD +83 -76
- sempy_labs/__init__.py +14 -12
- sempy_labs/_authentication.py +0 -2
- sempy_labs/_capacities.py +120 -142
- sempy_labs/_capacity_migration.py +61 -94
- sempy_labs/_clear_cache.py +9 -8
- sempy_labs/_connections.py +72 -105
- sempy_labs/_data_pipelines.py +47 -49
- sempy_labs/_dataflows.py +45 -51
- sempy_labs/_dax.py +228 -6
- sempy_labs/_delta_analyzer.py +303 -0
- sempy_labs/_deployment_pipelines.py +72 -66
- sempy_labs/_environments.py +39 -36
- sempy_labs/_eventhouses.py +35 -35
- sempy_labs/_eventstreams.py +38 -39
- sempy_labs/_external_data_shares.py +29 -42
- sempy_labs/_gateways.py +57 -101
- sempy_labs/_generate_semantic_model.py +22 -30
- sempy_labs/_git.py +46 -66
- sempy_labs/_graphQL.py +95 -0
- sempy_labs/_helper_functions.py +175 -30
- sempy_labs/_job_scheduler.py +47 -59
- sempy_labs/_kql_databases.py +27 -34
- sempy_labs/_kql_querysets.py +23 -30
- sempy_labs/_list_functions.py +262 -164
- sempy_labs/_managed_private_endpoints.py +52 -47
- sempy_labs/_mirrored_databases.py +110 -134
- sempy_labs/_mirrored_warehouses.py +13 -13
- sempy_labs/_ml_experiments.py +36 -36
- sempy_labs/_ml_models.py +37 -38
- sempy_labs/_model_dependencies.py +2 -0
- sempy_labs/_notebooks.py +28 -29
- sempy_labs/_one_lake_integration.py +2 -0
- sempy_labs/_query_scale_out.py +63 -81
- sempy_labs/_refresh_semantic_model.py +12 -14
- sempy_labs/_spark.py +54 -79
- sempy_labs/_sql.py +7 -11
- sempy_labs/_vertipaq.py +8 -3
- sempy_labs/_warehouses.py +30 -33
- sempy_labs/_workloads.py +15 -20
- sempy_labs/_workspace_identity.py +13 -17
- sempy_labs/_workspaces.py +49 -48
- sempy_labs/admin/__init__.py +2 -0
- sempy_labs/admin/_basic_functions.py +244 -281
- sempy_labs/admin/_domains.py +188 -103
- sempy_labs/admin/_external_data_share.py +26 -31
- sempy_labs/admin/_git.py +17 -22
- sempy_labs/admin/_items.py +34 -48
- sempy_labs/admin/_scanner.py +20 -13
- sempy_labs/directlake/_directlake_schema_compare.py +2 -0
- sempy_labs/directlake/_dl_helper.py +10 -11
- sempy_labs/directlake/_generate_shared_expression.py +4 -5
- sempy_labs/directlake/_get_directlake_lakehouse.py +1 -0
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +1 -0
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +2 -0
- sempy_labs/directlake/_warm_cache.py +2 -0
- sempy_labs/graph/__init__.py +33 -0
- sempy_labs/graph/_groups.py +402 -0
- sempy_labs/graph/_teams.py +113 -0
- sempy_labs/graph/_users.py +191 -0
- sempy_labs/lakehouse/__init__.py +4 -0
- sempy_labs/lakehouse/_get_lakehouse_columns.py +10 -10
- sempy_labs/lakehouse/_get_lakehouse_tables.py +14 -20
- sempy_labs/lakehouse/_lakehouse.py +101 -4
- sempy_labs/lakehouse/_shortcuts.py +42 -20
- sempy_labs/migration/__init__.py +4 -0
- sempy_labs/migration/_direct_lake_to_import.py +66 -0
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +1 -0
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +1 -0
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +1 -0
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +2 -0
- sempy_labs/report/_download_report.py +8 -13
- sempy_labs/report/_generate_report.py +49 -46
- sempy_labs/report/_paginated.py +20 -26
- sempy_labs/report/_report_functions.py +50 -45
- sempy_labs/report/_report_list_functions.py +2 -0
- sempy_labs/report/_report_rebind.py +6 -10
- sempy_labs/report/_reportwrapper.py +187 -220
- sempy_labs/tom/_model.py +8 -5
- {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/top_level.txt +0 -0
|
@@ -1,13 +1,13 @@
|
|
|
1
|
-
import sempy.fabric as fabric
|
|
2
1
|
import pandas as pd
|
|
3
2
|
import sempy_labs._icons as icons
|
|
4
3
|
from typing import Optional
|
|
5
4
|
from sempy_labs._helper_functions import (
|
|
6
5
|
resolve_workspace_name_and_id,
|
|
7
|
-
|
|
8
|
-
|
|
6
|
+
_is_valid_uuid,
|
|
7
|
+
_base_api,
|
|
8
|
+
_print_success,
|
|
9
|
+
_create_dataframe,
|
|
9
10
|
)
|
|
10
|
-
from sempy.fabric.exceptions import FabricHTTPException
|
|
11
11
|
from uuid import UUID
|
|
12
12
|
|
|
13
13
|
|
|
@@ -54,15 +54,18 @@ def create_managed_private_endpoint(
|
|
|
54
54
|
)
|
|
55
55
|
request_body["requestMessage"] = request_message
|
|
56
56
|
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
57
|
+
_base_api(
|
|
58
|
+
request=f"/v1/workspaces/{workspace_id}/managedPrivateEndpoints",
|
|
59
|
+
method="post",
|
|
60
|
+
status_codes=[201, 202],
|
|
61
|
+
payload=request_body,
|
|
62
|
+
lro_return_status_code=True,
|
|
60
63
|
)
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
64
|
+
_print_success(
|
|
65
|
+
item_name=name,
|
|
66
|
+
item_type="managed private endpoint",
|
|
67
|
+
workspace_name=workspace_name,
|
|
68
|
+
action="created",
|
|
66
69
|
)
|
|
67
70
|
|
|
68
71
|
|
|
@@ -87,26 +90,24 @@ def list_managed_private_endpoints(
|
|
|
87
90
|
A pandas dataframe showing the managed private endpoints within a workspace.
|
|
88
91
|
"""
|
|
89
92
|
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
)
|
|
93
|
+
columns = {
|
|
94
|
+
"Managed Private Endpoint Name": "string",
|
|
95
|
+
"Managed Private Endpoint Id": "string",
|
|
96
|
+
"Target Private Link Resource Id": "string",
|
|
97
|
+
"Provisioning State": "string",
|
|
98
|
+
"Connection Status": "string",
|
|
99
|
+
"Connection Description": "string",
|
|
100
|
+
"Target Subresource Type": "string",
|
|
101
|
+
}
|
|
102
|
+
df = _create_dataframe(columns=columns)
|
|
101
103
|
|
|
102
104
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
103
105
|
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
responses = pagination(client, response)
|
|
106
|
+
responses = _base_api(
|
|
107
|
+
request=f"/v1/workspaces/{workspace_id}/managedPrivateEndpoints",
|
|
108
|
+
uses_pagination=True,
|
|
109
|
+
status_codes=200,
|
|
110
|
+
)
|
|
110
111
|
|
|
111
112
|
for r in responses:
|
|
112
113
|
for v in r.get("value", []):
|
|
@@ -126,7 +127,7 @@ def list_managed_private_endpoints(
|
|
|
126
127
|
|
|
127
128
|
|
|
128
129
|
def delete_managed_private_endpoint(
|
|
129
|
-
managed_private_endpoint: str, workspace: Optional[str | UUID] = None
|
|
130
|
+
managed_private_endpoint: str | UUID, workspace: Optional[str | UUID] = None
|
|
130
131
|
):
|
|
131
132
|
"""
|
|
132
133
|
Deletes a Fabric managed private endpoint.
|
|
@@ -135,8 +136,8 @@ def delete_managed_private_endpoint(
|
|
|
135
136
|
|
|
136
137
|
Parameters
|
|
137
138
|
----------
|
|
138
|
-
managed_private_endpoint: str
|
|
139
|
-
Name of the managed private endpoint.
|
|
139
|
+
managed_private_endpoint: str | uuid.UUID
|
|
140
|
+
Name or ID of the managed private endpoint.
|
|
140
141
|
workspace : str | uuid.UUID, default=None
|
|
141
142
|
The Fabric workspace name or ID.
|
|
142
143
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
@@ -145,24 +146,28 @@ def delete_managed_private_endpoint(
|
|
|
145
146
|
|
|
146
147
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
147
148
|
|
|
148
|
-
|
|
149
|
-
|
|
149
|
+
if _is_valid_uuid(managed_private_endpoint):
|
|
150
|
+
item_id = managed_private_endpoint
|
|
151
|
+
else:
|
|
152
|
+
df = list_managed_private_endpoints(workspace=workspace)
|
|
153
|
+
df_filt = df[df["Managed Private Endpoint Name"] == managed_private_endpoint]
|
|
150
154
|
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
+
if df_filt.empty:
|
|
156
|
+
raise ValueError(
|
|
157
|
+
f"{icons.red_dot} The '{managed_private_endpoint}' managed private endpoint does not exist within the '{workspace_name}' workspace."
|
|
158
|
+
)
|
|
155
159
|
|
|
156
|
-
|
|
160
|
+
item_id = df_filt["Managed Private Endpoint Id"].iloc[0]
|
|
157
161
|
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
162
|
+
_base_api(
|
|
163
|
+
request=f"/v1/workspaces/{workspace_id}/managedPrivateEndpoints/{item_id}",
|
|
164
|
+
method="delete",
|
|
165
|
+
status_codes=200,
|
|
161
166
|
)
|
|
162
167
|
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
+
_print_success(
|
|
169
|
+
item_name=managed_private_endpoint,
|
|
170
|
+
item_type="managed private endpoint",
|
|
171
|
+
workspace_name=workspace_name,
|
|
172
|
+
action="deleted",
|
|
168
173
|
)
|
|
@@ -3,11 +3,13 @@ import pandas as pd
|
|
|
3
3
|
from typing import Optional
|
|
4
4
|
from sempy_labs._helper_functions import (
|
|
5
5
|
resolve_workspace_name_and_id,
|
|
6
|
-
pagination,
|
|
7
|
-
lro,
|
|
8
6
|
_decode_b64,
|
|
7
|
+
_update_dataframe_datatypes,
|
|
8
|
+
_base_api,
|
|
9
|
+
_print_success,
|
|
10
|
+
resolve_item_id,
|
|
11
|
+
_create_dataframe,
|
|
9
12
|
)
|
|
10
|
-
from sempy.fabric.exceptions import FabricHTTPException
|
|
11
13
|
import sempy_labs._icons as icons
|
|
12
14
|
import base64
|
|
13
15
|
from uuid import UUID
|
|
@@ -32,26 +34,24 @@ def list_mirrored_databases(workspace: Optional[str | UUID] = None) -> pd.DataFr
|
|
|
32
34
|
A pandas dataframe showing the mirrored databases within a workspace.
|
|
33
35
|
"""
|
|
34
36
|
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
)
|
|
37
|
+
columns = {
|
|
38
|
+
"Mirrored Database Name": "string",
|
|
39
|
+
"Mirrored Database Id": "string",
|
|
40
|
+
"Description": "string",
|
|
41
|
+
"OneLake Tables Path": "string",
|
|
42
|
+
"SQL Endpoint Connection String": "string",
|
|
43
|
+
"SQL Endpoint Id": "string",
|
|
44
|
+
"Provisioning Status": "string",
|
|
45
|
+
"Default Schema": "string",
|
|
46
|
+
}
|
|
47
|
+
df = _create_dataframe(columns=columns)
|
|
47
48
|
|
|
48
49
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
responses = pagination(client, response)
|
|
50
|
+
responses = _base_api(
|
|
51
|
+
request=f"/v1/workspaces/{workspace_id}/mirroredDatabases",
|
|
52
|
+
status_codes=200,
|
|
53
|
+
uses_pagination=True,
|
|
54
|
+
)
|
|
55
55
|
|
|
56
56
|
for r in responses:
|
|
57
57
|
for v in r.get("value", []):
|
|
@@ -94,21 +94,19 @@ def create_mirrored_database(
|
|
|
94
94
|
|
|
95
95
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
96
96
|
|
|
97
|
-
|
|
97
|
+
payload = {"displayName": name}
|
|
98
98
|
|
|
99
99
|
if description:
|
|
100
|
-
|
|
100
|
+
payload["description"] = description
|
|
101
101
|
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
102
|
+
_base_api(
|
|
103
|
+
request=f"/v1/workspaces/{workspace_id}/mirroredDatabases",
|
|
104
|
+
status_codes=201,
|
|
105
|
+
method="post",
|
|
106
|
+
payload=payload,
|
|
105
107
|
)
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
raise FabricHTTPException(response)
|
|
109
|
-
|
|
110
|
-
print(
|
|
111
|
-
f"{icons.green_dot} The '{name}' mirrored database has been created within the '{workspace_name}' workspace."
|
|
108
|
+
_print_success(
|
|
109
|
+
item_name=name, item_type="mirrored database", workspace_name=workspace_name
|
|
112
110
|
)
|
|
113
111
|
|
|
114
112
|
|
|
@@ -130,27 +128,19 @@ def delete_mirrored_database(
|
|
|
130
128
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
131
129
|
"""
|
|
132
130
|
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
item_id = fabric.resolve_item_id(
|
|
136
|
-
item_name=mirrored_database, type="MirroredDatabase", workspace=workspace_id
|
|
131
|
+
item_id = resolve_item_id(
|
|
132
|
+
item=mirrored_database, type="MirroredDatabase", workspace=workspace
|
|
137
133
|
)
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
if response.status_code != 200:
|
|
145
|
-
raise FabricHTTPException(response)
|
|
146
|
-
|
|
147
|
-
print(
|
|
148
|
-
f"{icons.green_dot} The '{mirrored_database}' mirrored database within the '{workspace_name}' workspace has been deleted."
|
|
134
|
+
fabric.delete_item(item_id=item_id, workspace=workspace)
|
|
135
|
+
_print_success(
|
|
136
|
+
item_name=mirrored_database,
|
|
137
|
+
item_type="mirrored database",
|
|
138
|
+
workspace_name=workspace,
|
|
149
139
|
)
|
|
150
140
|
|
|
151
141
|
|
|
152
142
|
def get_mirroring_status(
|
|
153
|
-
mirrored_database: str, workspace: Optional[str | UUID] = None
|
|
143
|
+
mirrored_database: str | UUID, workspace: Optional[str | UUID] = None
|
|
154
144
|
) -> str:
|
|
155
145
|
"""
|
|
156
146
|
Get the status of the mirrored database.
|
|
@@ -159,8 +149,8 @@ def get_mirroring_status(
|
|
|
159
149
|
|
|
160
150
|
Parameters
|
|
161
151
|
----------
|
|
162
|
-
mirrored_database: str
|
|
163
|
-
Name of the mirrored database.
|
|
152
|
+
mirrored_database: str | uuid.UUID
|
|
153
|
+
Name or ID of the mirrored database.
|
|
164
154
|
workspace : str | uuid.UUID, default=None
|
|
165
155
|
The Fabric workspace name or ID.
|
|
166
156
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
@@ -173,24 +163,19 @@ def get_mirroring_status(
|
|
|
173
163
|
"""
|
|
174
164
|
|
|
175
165
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
item_name=mirrored_database, type="MirroredDatabase", workspace=workspace_id
|
|
166
|
+
item_id = resolve_item_id(
|
|
167
|
+
item=mirrored_database, type="MirroredDatabase", workspace=workspace
|
|
179
168
|
)
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
f"/v1/workspaces/{workspace_id}/mirroredDatabases/{item_id}/getMirroringStatus"
|
|
169
|
+
response = _base_api(
|
|
170
|
+
request=f"/v1/workspaces/{workspace_id}/mirroredDatabases/{item_id}/getMirroringStatus",
|
|
171
|
+
status_codes=200,
|
|
184
172
|
)
|
|
185
173
|
|
|
186
|
-
if response.status_code != 200:
|
|
187
|
-
raise FabricHTTPException(response)
|
|
188
|
-
|
|
189
174
|
return response.json().get("status", {})
|
|
190
175
|
|
|
191
176
|
|
|
192
177
|
def get_tables_mirroring_status(
|
|
193
|
-
mirrored_database: str, workspace: Optional[str | UUID] = None
|
|
178
|
+
mirrored_database: str | UUID, workspace: Optional[str | UUID] = None
|
|
194
179
|
) -> pd.DataFrame:
|
|
195
180
|
"""
|
|
196
181
|
Gets the mirroring status of the tables.
|
|
@@ -199,8 +184,8 @@ def get_tables_mirroring_status(
|
|
|
199
184
|
|
|
200
185
|
Parameters
|
|
201
186
|
----------
|
|
202
|
-
mirrored_database: str
|
|
203
|
-
Name of the mirrored database.
|
|
187
|
+
mirrored_database: str | uuid.UUID
|
|
188
|
+
Name or ID of the mirrored database.
|
|
204
189
|
workspace : str | uuid.UUID, default=None
|
|
205
190
|
The Fabric workspace name or ID.
|
|
206
191
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
@@ -213,31 +198,25 @@ def get_tables_mirroring_status(
|
|
|
213
198
|
"""
|
|
214
199
|
|
|
215
200
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
item_name=mirrored_database, type="MirroredDatabase", workspace=workspace_id
|
|
201
|
+
item_id = resolve_item_id(
|
|
202
|
+
item=mirrored_database, type="MirroredDatabase", workspace=workspace
|
|
219
203
|
)
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
204
|
+
responses = _base_api(
|
|
205
|
+
request=f"/v1/workspaces/{workspace_id}/mirroredDatabases/{item_id}/getTablesMirroringStatus",
|
|
206
|
+
method="post",
|
|
207
|
+
status_codes=200,
|
|
208
|
+
uses_pagination=True,
|
|
224
209
|
)
|
|
225
210
|
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
"Status",
|
|
236
|
-
"Processed Bytes",
|
|
237
|
-
"Processed Rows",
|
|
238
|
-
"Last Sync Date",
|
|
239
|
-
]
|
|
240
|
-
)
|
|
211
|
+
columns = {
|
|
212
|
+
"Source Schema Name": "string",
|
|
213
|
+
"Source Table Name": "string",
|
|
214
|
+
"Status": "string",
|
|
215
|
+
"Processed Bytes": "int",
|
|
216
|
+
"Processed Rows": "int",
|
|
217
|
+
"Last Sync Date": "datetime",
|
|
218
|
+
}
|
|
219
|
+
df = _create_dataframe(columns=columns)
|
|
241
220
|
|
|
242
221
|
for r in responses:
|
|
243
222
|
for v in r.get("data", []):
|
|
@@ -253,14 +232,14 @@ def get_tables_mirroring_status(
|
|
|
253
232
|
|
|
254
233
|
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
255
234
|
|
|
256
|
-
|
|
257
|
-
df[int_cols] = df[int_cols].astype(int)
|
|
258
|
-
df["Last Sync Date"] = pd.to_datetime(df["Last Sync Date"])
|
|
235
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
259
236
|
|
|
260
237
|
return df
|
|
261
238
|
|
|
262
239
|
|
|
263
|
-
def start_mirroring(
|
|
240
|
+
def start_mirroring(
|
|
241
|
+
mirrored_database: str | UUID, workspace: Optional[str | UUID] = None
|
|
242
|
+
):
|
|
264
243
|
"""
|
|
265
244
|
Starts the mirroring for a database.
|
|
266
245
|
|
|
@@ -268,8 +247,8 @@ def start_mirroring(mirrored_database: str, workspace: Optional[str | UUID] = No
|
|
|
268
247
|
|
|
269
248
|
Parameters
|
|
270
249
|
----------
|
|
271
|
-
mirrored_database: str
|
|
272
|
-
Name of the mirrored database.
|
|
250
|
+
mirrored_database: str | uuid.UUID
|
|
251
|
+
Name or ID of the mirrored database.
|
|
273
252
|
workspace : str | uuid.UUID, default=None
|
|
274
253
|
The Fabric workspace name or ID.
|
|
275
254
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
@@ -277,25 +256,23 @@ def start_mirroring(mirrored_database: str, workspace: Optional[str | UUID] = No
|
|
|
277
256
|
"""
|
|
278
257
|
|
|
279
258
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
item_name=mirrored_database, type="MirroredDatabase", workspace=workspace_id
|
|
259
|
+
item_id = resolve_item_id(
|
|
260
|
+
item=mirrored_database, type="MirroredDatabase", workspace=workspace
|
|
283
261
|
)
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
262
|
+
_base_api(
|
|
263
|
+
request=f"/v1/workspaces/{workspace_id}/mirroredDatabases/{item_id}/startMirroring",
|
|
264
|
+
method="post",
|
|
265
|
+
status_codes=200,
|
|
288
266
|
)
|
|
289
267
|
|
|
290
|
-
if response.status_code != 200:
|
|
291
|
-
raise FabricHTTPException(response)
|
|
292
|
-
|
|
293
268
|
print(
|
|
294
269
|
f"{icons.green_dot} Mirroring has started for the '{mirrored_database}' database within the '{workspace_name}' workspace."
|
|
295
270
|
)
|
|
296
271
|
|
|
297
272
|
|
|
298
|
-
def stop_mirroring(
|
|
273
|
+
def stop_mirroring(
|
|
274
|
+
mirrored_database: str | UUID, workspace: Optional[str | UUID] = None
|
|
275
|
+
):
|
|
299
276
|
"""
|
|
300
277
|
Stops the mirroring for a database.
|
|
301
278
|
|
|
@@ -303,8 +280,8 @@ def stop_mirroring(mirrored_database: str, workspace: Optional[str | UUID] = Non
|
|
|
303
280
|
|
|
304
281
|
Parameters
|
|
305
282
|
----------
|
|
306
|
-
mirrored_database: str
|
|
307
|
-
Name of the mirrored database.
|
|
283
|
+
mirrored_database: str | uuid.UUID
|
|
284
|
+
Name or ID of the mirrored database.
|
|
308
285
|
workspace : str | uuid.UUID, default=None
|
|
309
286
|
The Fabric workspace name or ID.
|
|
310
287
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
@@ -312,26 +289,24 @@ def stop_mirroring(mirrored_database: str, workspace: Optional[str | UUID] = Non
|
|
|
312
289
|
"""
|
|
313
290
|
|
|
314
291
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
item_name=mirrored_database, type="MirroredDatabase", workspace=workspace_id
|
|
292
|
+
item_id = resolve_item_id(
|
|
293
|
+
item=mirrored_database, type="MirroredDatabase", workspace=workspace
|
|
318
294
|
)
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
295
|
+
_base_api(
|
|
296
|
+
request=f"/v1/workspaces/{workspace_id}/mirroredDatabases/{item_id}/stopMirroring",
|
|
297
|
+
method="post",
|
|
298
|
+
status_codes=200,
|
|
323
299
|
)
|
|
324
300
|
|
|
325
|
-
if response.status_code != 200:
|
|
326
|
-
raise FabricHTTPException(response)
|
|
327
|
-
|
|
328
301
|
print(
|
|
329
302
|
f"{icons.green_dot} Mirroring has stopped for the '{mirrored_database}' database within the '{workspace_name}' workspace."
|
|
330
303
|
)
|
|
331
304
|
|
|
332
305
|
|
|
333
306
|
def get_mirrored_database_definition(
|
|
334
|
-
mirrored_database: str
|
|
307
|
+
mirrored_database: str | UUID,
|
|
308
|
+
workspace: Optional[str | UUID] = None,
|
|
309
|
+
decode: bool = True,
|
|
335
310
|
) -> str:
|
|
336
311
|
"""
|
|
337
312
|
Obtains the mirrored database definition.
|
|
@@ -340,8 +315,8 @@ def get_mirrored_database_definition(
|
|
|
340
315
|
|
|
341
316
|
Parameters
|
|
342
317
|
----------
|
|
343
|
-
mirrored_database : str
|
|
344
|
-
The name of the mirrored database.
|
|
318
|
+
mirrored_database : str | uuid.UUID
|
|
319
|
+
The name or ID of the mirrored database.
|
|
345
320
|
workspace : str | uuid.UUID, default=None
|
|
346
321
|
The name or ID of the workspace.
|
|
347
322
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
@@ -357,15 +332,16 @@ def get_mirrored_database_definition(
|
|
|
357
332
|
"""
|
|
358
333
|
|
|
359
334
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
360
|
-
item_id =
|
|
361
|
-
|
|
335
|
+
item_id = resolve_item_id(
|
|
336
|
+
item=mirrored_database, type="MirroredDatabase", workspace=workspace
|
|
362
337
|
)
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
338
|
+
result = _base_api(
|
|
339
|
+
request=f"/v1/workspaces/{workspace_id}/mirroredDatabases/{item_id}/getDefinition",
|
|
340
|
+
method="post",
|
|
341
|
+
status_codes=200,
|
|
342
|
+
lro_return_json=True,
|
|
366
343
|
)
|
|
367
344
|
|
|
368
|
-
result = lro(client, response).json()
|
|
369
345
|
df_items = pd.json_normalize(result["definition"]["parts"])
|
|
370
346
|
df_items_filt = df_items[df_items["path"] == "mirroredDatabase.json"]
|
|
371
347
|
payload = df_items_filt["payload"].iloc[0]
|
|
@@ -379,7 +355,7 @@ def get_mirrored_database_definition(
|
|
|
379
355
|
|
|
380
356
|
|
|
381
357
|
def update_mirrored_database_definition(
|
|
382
|
-
mirrored_database: str,
|
|
358
|
+
mirrored_database: str | UUID,
|
|
383
359
|
mirrored_database_content: dict,
|
|
384
360
|
workspace: Optional[str | UUID] = None,
|
|
385
361
|
):
|
|
@@ -388,8 +364,8 @@ def update_mirrored_database_definition(
|
|
|
388
364
|
|
|
389
365
|
Parameters
|
|
390
366
|
----------
|
|
391
|
-
mirrored_database : str
|
|
392
|
-
The name of the mirrored database to be
|
|
367
|
+
mirrored_database : str | uuid.UUID
|
|
368
|
+
The name or ID of the mirrored database to be updated.
|
|
393
369
|
mirrored_database_content : dict
|
|
394
370
|
The mirrored database definition (not in Base64 format).
|
|
395
371
|
workspace : str | uuid.UUID, default=None
|
|
@@ -399,11 +375,10 @@ def update_mirrored_database_definition(
|
|
|
399
375
|
"""
|
|
400
376
|
|
|
401
377
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
item_id = fabric.resolve_item_id(
|
|
405
|
-
item_name=mirrored_database, type="MirroredDatabase", workspace=workspace_id
|
|
378
|
+
item_id = resolve_item_id(
|
|
379
|
+
item=mirrored_database, type="MirroredDatabase", workspace=workspace
|
|
406
380
|
)
|
|
381
|
+
payload = base64.b64encode(mirrored_database_content)
|
|
407
382
|
|
|
408
383
|
request_body = {
|
|
409
384
|
"displayName": mirrored_database,
|
|
@@ -419,13 +394,14 @@ def update_mirrored_database_definition(
|
|
|
419
394
|
},
|
|
420
395
|
}
|
|
421
396
|
|
|
422
|
-
|
|
423
|
-
f"v1/workspaces/{workspace_id}/mirroredDatabases/{item_id}/updateDefinition",
|
|
397
|
+
_base_api(
|
|
398
|
+
request=f"/v1/workspaces/{workspace_id}/mirroredDatabases/{item_id}/updateDefinition",
|
|
399
|
+
method="post",
|
|
424
400
|
json=request_body,
|
|
401
|
+
status_codes=None,
|
|
402
|
+
lro_return_status_code=True,
|
|
425
403
|
)
|
|
426
404
|
|
|
427
|
-
lro(client, response, return_status_code=True)
|
|
428
|
-
|
|
429
405
|
print(
|
|
430
406
|
f"{icons.green_dot} The '{mirrored_database}' mirrored database was updated within the '{workspace_name}' workspace."
|
|
431
407
|
)
|
|
@@ -1,11 +1,10 @@
|
|
|
1
|
-
import sempy.fabric as fabric
|
|
2
1
|
import pandas as pd
|
|
3
2
|
from typing import Optional
|
|
4
3
|
from sempy_labs._helper_functions import (
|
|
5
4
|
resolve_workspace_name_and_id,
|
|
6
|
-
|
|
5
|
+
_base_api,
|
|
6
|
+
_create_dataframe,
|
|
7
7
|
)
|
|
8
|
-
from sempy.fabric.exceptions import FabricHTTPException
|
|
9
8
|
from uuid import UUID
|
|
10
9
|
|
|
11
10
|
|
|
@@ -28,21 +27,22 @@ def list_mirrored_warehouses(workspace: Optional[str | UUID] = None) -> pd.DataF
|
|
|
28
27
|
A pandas dataframe showing the mirrored warehouses within a workspace.
|
|
29
28
|
"""
|
|
30
29
|
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
30
|
+
columns = {
|
|
31
|
+
"Mirrored Warehouse Name": "string",
|
|
32
|
+
"Mirrored Warehouse Id": "string",
|
|
33
|
+
"Description": "string",
|
|
34
|
+
}
|
|
35
|
+
df = _create_dataframe(columns=columns)
|
|
34
36
|
|
|
35
37
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
responses = pagination(client, response)
|
|
38
|
+
responses = _base_api(
|
|
39
|
+
request=f"/v1/workspaces/{workspace_id}/mirroredWarehouses",
|
|
40
|
+
status_codes=200,
|
|
41
|
+
uses_pagination=True,
|
|
42
|
+
)
|
|
42
43
|
|
|
43
44
|
for r in responses:
|
|
44
45
|
for v in r.get("value", []):
|
|
45
|
-
|
|
46
46
|
new_data = {
|
|
47
47
|
"Mirrored Warehouse Name": v.get("displayName"),
|
|
48
48
|
"Mirrored Warehouse Id": v.get("id"),
|