semantic-link-labs 0.9.0__py3-none-any.whl → 0.9.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/METADATA +68 -7
- {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/RECORD +83 -76
- sempy_labs/__init__.py +14 -12
- sempy_labs/_authentication.py +0 -2
- sempy_labs/_capacities.py +120 -142
- sempy_labs/_capacity_migration.py +61 -94
- sempy_labs/_clear_cache.py +9 -8
- sempy_labs/_connections.py +72 -105
- sempy_labs/_data_pipelines.py +47 -49
- sempy_labs/_dataflows.py +45 -51
- sempy_labs/_dax.py +228 -6
- sempy_labs/_delta_analyzer.py +303 -0
- sempy_labs/_deployment_pipelines.py +72 -66
- sempy_labs/_environments.py +39 -36
- sempy_labs/_eventhouses.py +35 -35
- sempy_labs/_eventstreams.py +38 -39
- sempy_labs/_external_data_shares.py +29 -42
- sempy_labs/_gateways.py +57 -101
- sempy_labs/_generate_semantic_model.py +22 -30
- sempy_labs/_git.py +46 -66
- sempy_labs/_graphQL.py +95 -0
- sempy_labs/_helper_functions.py +175 -30
- sempy_labs/_job_scheduler.py +47 -59
- sempy_labs/_kql_databases.py +27 -34
- sempy_labs/_kql_querysets.py +23 -30
- sempy_labs/_list_functions.py +262 -164
- sempy_labs/_managed_private_endpoints.py +52 -47
- sempy_labs/_mirrored_databases.py +110 -134
- sempy_labs/_mirrored_warehouses.py +13 -13
- sempy_labs/_ml_experiments.py +36 -36
- sempy_labs/_ml_models.py +37 -38
- sempy_labs/_model_dependencies.py +2 -0
- sempy_labs/_notebooks.py +28 -29
- sempy_labs/_one_lake_integration.py +2 -0
- sempy_labs/_query_scale_out.py +63 -81
- sempy_labs/_refresh_semantic_model.py +12 -14
- sempy_labs/_spark.py +54 -79
- sempy_labs/_sql.py +7 -11
- sempy_labs/_vertipaq.py +8 -3
- sempy_labs/_warehouses.py +30 -33
- sempy_labs/_workloads.py +15 -20
- sempy_labs/_workspace_identity.py +13 -17
- sempy_labs/_workspaces.py +49 -48
- sempy_labs/admin/__init__.py +2 -0
- sempy_labs/admin/_basic_functions.py +244 -281
- sempy_labs/admin/_domains.py +188 -103
- sempy_labs/admin/_external_data_share.py +26 -31
- sempy_labs/admin/_git.py +17 -22
- sempy_labs/admin/_items.py +34 -48
- sempy_labs/admin/_scanner.py +20 -13
- sempy_labs/directlake/_directlake_schema_compare.py +2 -0
- sempy_labs/directlake/_dl_helper.py +10 -11
- sempy_labs/directlake/_generate_shared_expression.py +4 -5
- sempy_labs/directlake/_get_directlake_lakehouse.py +1 -0
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +1 -0
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +2 -0
- sempy_labs/directlake/_warm_cache.py +2 -0
- sempy_labs/graph/__init__.py +33 -0
- sempy_labs/graph/_groups.py +402 -0
- sempy_labs/graph/_teams.py +113 -0
- sempy_labs/graph/_users.py +191 -0
- sempy_labs/lakehouse/__init__.py +4 -0
- sempy_labs/lakehouse/_get_lakehouse_columns.py +10 -10
- sempy_labs/lakehouse/_get_lakehouse_tables.py +14 -20
- sempy_labs/lakehouse/_lakehouse.py +101 -4
- sempy_labs/lakehouse/_shortcuts.py +42 -20
- sempy_labs/migration/__init__.py +4 -0
- sempy_labs/migration/_direct_lake_to_import.py +66 -0
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +1 -0
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +1 -0
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +1 -0
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +2 -0
- sempy_labs/report/_download_report.py +8 -13
- sempy_labs/report/_generate_report.py +49 -46
- sempy_labs/report/_paginated.py +20 -26
- sempy_labs/report/_report_functions.py +50 -45
- sempy_labs/report/_report_list_functions.py +2 -0
- sempy_labs/report/_report_rebind.py +6 -10
- sempy_labs/report/_reportwrapper.py +187 -220
- sempy_labs/tom/_model.py +8 -5
- {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/top_level.txt +0 -0
sempy_labs/_gateways.py
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
|
-
import sempy.fabric as fabric
|
|
2
1
|
from sempy._utils._log import log
|
|
3
2
|
import pandas as pd
|
|
4
3
|
from typing import Optional
|
|
5
|
-
from sempy.fabric.exceptions import FabricHTTPException
|
|
6
4
|
from sempy_labs._helper_functions import (
|
|
7
|
-
pagination,
|
|
8
5
|
_is_valid_uuid,
|
|
9
6
|
resolve_capacity_id,
|
|
10
7
|
resolve_workspace_name_and_id,
|
|
11
8
|
resolve_dataset_name_and_id,
|
|
9
|
+
_update_dataframe_datatypes,
|
|
10
|
+
_base_api,
|
|
11
|
+
_create_dataframe,
|
|
12
12
|
)
|
|
13
13
|
from uuid import UUID
|
|
14
14
|
import sempy_labs._icons as icons
|
|
@@ -27,28 +27,21 @@ def list_gateways() -> pd.DataFrame:
|
|
|
27
27
|
A pandas dataframe showing a list of all gateways the user has permission for, including on-premises, on-premises (personal mode), and virtual network gateways.
|
|
28
28
|
"""
|
|
29
29
|
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
"Version",
|
|
46
|
-
"Number Of Member Gateways",
|
|
47
|
-
"Load Balancing Setting",
|
|
48
|
-
"Allow Cloud Connection Refresh",
|
|
49
|
-
"Allow Custom Connectors",
|
|
50
|
-
]
|
|
51
|
-
)
|
|
30
|
+
columns = {
|
|
31
|
+
"Gateway Name": "string",
|
|
32
|
+
"Gateway Id": "string",
|
|
33
|
+
"Type": "string",
|
|
34
|
+
"Public Key Exponent": "string",
|
|
35
|
+
"Public Key Modulus": "string",
|
|
36
|
+
"Version": "string",
|
|
37
|
+
"Number Of Member Gateways": "int",
|
|
38
|
+
"Load Balancing Setting": "string",
|
|
39
|
+
"Allow Cloud Connection Refresh": "bool",
|
|
40
|
+
"Allow Custom Connectors": "bool",
|
|
41
|
+
}
|
|
42
|
+
df = _create_dataframe(columns=columns)
|
|
43
|
+
|
|
44
|
+
responses = _base_api(request="/v1/gateways", uses_pagination=True)
|
|
52
45
|
|
|
53
46
|
for r in responses:
|
|
54
47
|
for v in r.get("value", []):
|
|
@@ -67,10 +60,7 @@ def list_gateways() -> pd.DataFrame:
|
|
|
67
60
|
|
|
68
61
|
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
69
62
|
|
|
70
|
-
|
|
71
|
-
bool_cols = ["Allow Cloud Connection Refresh", "Allow Custom Connectors"]
|
|
72
|
-
df[bool_cols] = df[bool_cols].astype(bool)
|
|
73
|
-
df[int_cols] = df[int_cols].astype(int)
|
|
63
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
74
64
|
|
|
75
65
|
return df
|
|
76
66
|
|
|
@@ -102,12 +92,7 @@ def delete_gateway(gateway: str | UUID):
|
|
|
102
92
|
"""
|
|
103
93
|
|
|
104
94
|
gateway_id = _resolve_gateway_id(gateway)
|
|
105
|
-
|
|
106
|
-
response = client.delete(f"/v1/gateways/{gateway_id}")
|
|
107
|
-
|
|
108
|
-
if response.status_code != 200:
|
|
109
|
-
raise FabricHTTPException(response)
|
|
110
|
-
|
|
95
|
+
_base_api(request=f"/v1/gateways/{gateway_id}", method="delete")
|
|
111
96
|
print(f"{icons.green_dot} The '{gateway}' gateway has been deleted.")
|
|
112
97
|
|
|
113
98
|
|
|
@@ -128,16 +113,17 @@ def list_gateway_role_assigments(gateway: str | UUID) -> pd.DataFrame:
|
|
|
128
113
|
A pandas dataframe showing a list of gateway role assignments.
|
|
129
114
|
"""
|
|
130
115
|
|
|
116
|
+
columns = {
|
|
117
|
+
"Gateway Role Assignment Id": "string",
|
|
118
|
+
"Principal Id": "string",
|
|
119
|
+
"Principal Type": "string",
|
|
120
|
+
"Role": "string",
|
|
121
|
+
}
|
|
122
|
+
df = _create_dataframe(columns=columns)
|
|
131
123
|
gateway_id = _resolve_gateway_id(gateway)
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
if response.status_code != 200:
|
|
136
|
-
raise FabricHTTPException(response)
|
|
137
|
-
|
|
138
|
-
df = pd.DataFrame(columns=[])
|
|
139
|
-
|
|
140
|
-
responses = pagination(client, response)
|
|
124
|
+
responses = _base_api(
|
|
125
|
+
request=f"/v1/gateways/{gateway_id}/roleAssignments", uses_pagination=True
|
|
126
|
+
)
|
|
141
127
|
|
|
142
128
|
for r in responses:
|
|
143
129
|
for v in r.get("value", []):
|
|
@@ -153,7 +139,7 @@ def list_gateway_role_assigments(gateway: str | UUID) -> pd.DataFrame:
|
|
|
153
139
|
return df
|
|
154
140
|
|
|
155
141
|
|
|
156
|
-
def delete_gateway_role_assignment(gateway: str | UUID,
|
|
142
|
+
def delete_gateway_role_assignment(gateway: str | UUID, role_assignment_id: UUID):
|
|
157
143
|
"""
|
|
158
144
|
Delete the specified role assignment for the gateway.
|
|
159
145
|
|
|
@@ -163,21 +149,18 @@ def delete_gateway_role_assignment(gateway: str | UUID, role_assignement_id: UUI
|
|
|
163
149
|
----------
|
|
164
150
|
gateway : str | uuid.UUID
|
|
165
151
|
The gateway name or ID.
|
|
166
|
-
|
|
152
|
+
role_assignment_id : uuid.UUID
|
|
167
153
|
The role assignment ID.
|
|
168
154
|
"""
|
|
169
155
|
|
|
170
156
|
gateway_id = _resolve_gateway_id(gateway)
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
157
|
+
_base_api(
|
|
158
|
+
request=f"/v1/gateways/{gateway_id}/roleAssignments/{role_assignment_id}",
|
|
159
|
+
method="delete",
|
|
174
160
|
)
|
|
175
161
|
|
|
176
|
-
if response.status_code != 200:
|
|
177
|
-
raise FabricHTTPException(response)
|
|
178
|
-
|
|
179
162
|
print(
|
|
180
|
-
f"{icons.green_dot} The '{
|
|
163
|
+
f"{icons.green_dot} The '{role_assignment_id}' role assignment for the '{gateway}' gateway has been deleted."
|
|
181
164
|
)
|
|
182
165
|
|
|
183
166
|
|
|
@@ -217,12 +200,7 @@ def delete_gateway_member(gateway: str | UUID, gateway_member: str | UUID):
|
|
|
217
200
|
gateway=gateway_id, gateway_member=gateway_member
|
|
218
201
|
)
|
|
219
202
|
|
|
220
|
-
|
|
221
|
-
response = client.delete(f"/v1/gateways/{gateway_id}/members/{member_id}")
|
|
222
|
-
|
|
223
|
-
if response.status_code != 200:
|
|
224
|
-
raise FabricHTTPException(response)
|
|
225
|
-
|
|
203
|
+
_base_api(request=f"/v1/gateways/{gateway_id}/members/{member_id}", method="delete")
|
|
226
204
|
print(
|
|
227
205
|
f"{icons.green_dot} The '{member_id}' member for the '{gateway}' gateway has been deleted."
|
|
228
206
|
)
|
|
@@ -246,22 +224,18 @@ def list_gateway_members(gateway: str | UUID) -> pd.DataFrame:
|
|
|
246
224
|
"""
|
|
247
225
|
|
|
248
226
|
gateway_id = _resolve_gateway_id(gateway)
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
"Version",
|
|
262
|
-
"Enabled",
|
|
263
|
-
]
|
|
264
|
-
)
|
|
227
|
+
|
|
228
|
+
columns = {
|
|
229
|
+
"Member Id": "string",
|
|
230
|
+
"Member Name": "string",
|
|
231
|
+
"Public Key Exponent": "string",
|
|
232
|
+
"Public Key Modulus": "string",
|
|
233
|
+
"Version": "string",
|
|
234
|
+
"Enabled": "bool",
|
|
235
|
+
}
|
|
236
|
+
df = _create_dataframe(columns=columns)
|
|
237
|
+
|
|
238
|
+
response = _base_api(request=f"/v1/gateways/{gateway_id}/members")
|
|
265
239
|
|
|
266
240
|
for v in response.json().get("value", []):
|
|
267
241
|
new_data = {
|
|
@@ -275,8 +249,7 @@ def list_gateway_members(gateway: str | UUID) -> pd.DataFrame:
|
|
|
275
249
|
|
|
276
250
|
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
277
251
|
|
|
278
|
-
|
|
279
|
-
df[bool_cols] = df[bool_cols].astype(bool)
|
|
252
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
280
253
|
|
|
281
254
|
return df
|
|
282
255
|
|
|
@@ -316,8 +289,6 @@ def create_vnet_gateway(
|
|
|
316
289
|
The name of the subnet.
|
|
317
290
|
"""
|
|
318
291
|
|
|
319
|
-
client = fabric.FabricRestClient()
|
|
320
|
-
|
|
321
292
|
capacity_id = resolve_capacity_id(capacity)
|
|
322
293
|
payload = {
|
|
323
294
|
"type": "VirtualNetwork",
|
|
@@ -332,10 +303,8 @@ def create_vnet_gateway(
|
|
|
332
303
|
"inactivityMinutesBeforeSleep": inactivity_minutes_before_sleep,
|
|
333
304
|
"numberOfMemberGateways": number_of_member_gateways,
|
|
334
305
|
}
|
|
335
|
-
response = client.post("/v1/gateways", json=payload)
|
|
336
306
|
|
|
337
|
-
|
|
338
|
-
raise FabricHTTPException(response)
|
|
307
|
+
_base_api(request="/v1/gateways", method="post", payload=payload, status_codes=201)
|
|
339
308
|
|
|
340
309
|
print(
|
|
341
310
|
f"{icons.green_dot} The '{name}' gateway was created within the '{capacity}' capacity."
|
|
@@ -383,11 +352,7 @@ def update_on_premises_gateway(
|
|
|
383
352
|
|
|
384
353
|
payload["type"] = "OnPremises"
|
|
385
354
|
|
|
386
|
-
|
|
387
|
-
response = client.patch(f"/v1/gateways/{gateway_id}", json=payload)
|
|
388
|
-
|
|
389
|
-
if response.status_code != 200:
|
|
390
|
-
raise FabricHTTPException(response)
|
|
355
|
+
_base_api(request=f"/v1/gateways/{gateway_id}", method="patch", payload=payload)
|
|
391
356
|
|
|
392
357
|
print(f"{icons.green_dot} The '{gateway}' has been updated accordingly.")
|
|
393
358
|
|
|
@@ -434,12 +399,7 @@ def update_vnet_gateway(
|
|
|
434
399
|
|
|
435
400
|
payload["type"] = "VirtualNetwork"
|
|
436
401
|
|
|
437
|
-
|
|
438
|
-
response = client.patch(f"/v1/gateways/{gateway_id}", json=payload)
|
|
439
|
-
|
|
440
|
-
if response.status_code != 200:
|
|
441
|
-
raise FabricHTTPException(response)
|
|
442
|
-
|
|
402
|
+
_base_api(request=f"/v1/gateways/{gateway_id}", method="patch", payload=payload)
|
|
443
403
|
print(f"{icons.green_dot} The '{gateway}' has been updated accordingly.")
|
|
444
404
|
|
|
445
405
|
|
|
@@ -473,15 +433,11 @@ def bind_semantic_model_to_gateway(
|
|
|
473
433
|
"gatewayObjectId": gateway_id,
|
|
474
434
|
}
|
|
475
435
|
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
436
|
+
_base_api(
|
|
437
|
+
request=f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/Default.BindToGateway",
|
|
438
|
+
method="post",
|
|
439
|
+
payload=payload,
|
|
480
440
|
)
|
|
481
|
-
|
|
482
|
-
if response.status_code != 200:
|
|
483
|
-
raise FabricHTTPException(response)
|
|
484
|
-
|
|
485
441
|
print(
|
|
486
442
|
f"{icons.green_dot} The '{dataset_name}' semantic model within the '{workspace_name}' workspace has been binded to the '{gateway_id}' gateway."
|
|
487
443
|
)
|
|
@@ -10,7 +10,7 @@ from sempy_labs._helper_functions import (
|
|
|
10
10
|
resolve_dataset_name_and_id,
|
|
11
11
|
_conv_b64,
|
|
12
12
|
_decode_b64,
|
|
13
|
-
|
|
13
|
+
_base_api,
|
|
14
14
|
)
|
|
15
15
|
from sempy_labs.lakehouse._lakehouse import lakehouse_attached
|
|
16
16
|
import sempy_labs._icons as icons
|
|
@@ -145,18 +145,16 @@ def create_semantic_model_from_bim(
|
|
|
145
145
|
dfI = fabric.list_datasets(workspace=workspace_id, mode="rest")
|
|
146
146
|
dfI_filt = dfI[(dfI["Dataset Name"] == dataset)]
|
|
147
147
|
|
|
148
|
-
if
|
|
148
|
+
if not dfI_filt.empty:
|
|
149
149
|
raise ValueError(
|
|
150
150
|
f"{icons.red_dot} The '{dataset}' semantic model already exists as a semantic model in the '{workspace_name}' workspace."
|
|
151
151
|
)
|
|
152
152
|
|
|
153
|
-
client = fabric.FabricRestClient()
|
|
154
153
|
defPBIDataset = {"version": "1.0", "settings": {}}
|
|
155
|
-
|
|
156
154
|
payloadPBIDefinition = _conv_b64(defPBIDataset)
|
|
157
155
|
payloadBim = _conv_b64(bim_file)
|
|
158
156
|
|
|
159
|
-
|
|
157
|
+
payload = {
|
|
160
158
|
"displayName": dataset,
|
|
161
159
|
"definition": {
|
|
162
160
|
"parts": [
|
|
@@ -174,13 +172,14 @@ def create_semantic_model_from_bim(
|
|
|
174
172
|
},
|
|
175
173
|
}
|
|
176
174
|
|
|
177
|
-
|
|
178
|
-
f"
|
|
179
|
-
|
|
175
|
+
_base_api(
|
|
176
|
+
request=f"v1/workspaces/{workspace_id}/semanticModels",
|
|
177
|
+
payload=payload,
|
|
178
|
+
method="post",
|
|
179
|
+
lro_return_status_code=True,
|
|
180
|
+
status_codes=[201, 202],
|
|
180
181
|
)
|
|
181
182
|
|
|
182
|
-
lro(client, response, status_codes=[201, 202])
|
|
183
|
-
|
|
184
183
|
print(
|
|
185
184
|
f"{icons.green_dot} The '{dataset}' semantic model has been created within the '{workspace_name}' workspace."
|
|
186
185
|
)
|
|
@@ -210,13 +209,11 @@ def update_semantic_model_from_bim(
|
|
|
210
209
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
211
210
|
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
212
211
|
|
|
213
|
-
client = fabric.FabricRestClient()
|
|
214
212
|
defPBIDataset = {"version": "1.0", "settings": {}}
|
|
215
|
-
|
|
216
213
|
payloadPBIDefinition = _conv_b64(defPBIDataset)
|
|
217
214
|
payloadBim = _conv_b64(bim_file)
|
|
218
215
|
|
|
219
|
-
|
|
216
|
+
payload = {
|
|
220
217
|
"displayName": dataset_name,
|
|
221
218
|
"definition": {
|
|
222
219
|
"parts": [
|
|
@@ -234,13 +231,14 @@ def update_semantic_model_from_bim(
|
|
|
234
231
|
},
|
|
235
232
|
}
|
|
236
233
|
|
|
237
|
-
|
|
238
|
-
f"
|
|
239
|
-
|
|
234
|
+
_base_api(
|
|
235
|
+
request=f"v1/workspaces/{workspace_id}/semanticModels/{dataset_id}/updateDefinition",
|
|
236
|
+
payload=payload,
|
|
237
|
+
method="post",
|
|
238
|
+
lro_return_status_code=True,
|
|
239
|
+
status_codes=None,
|
|
240
240
|
)
|
|
241
241
|
|
|
242
|
-
lro(client, response, status_codes=[200, 202], return_status_code=True)
|
|
243
|
-
|
|
244
242
|
print(
|
|
245
243
|
f"{icons.green_dot} The '{dataset_name}' semantic model has been updated within the '{workspace_name}' workspace."
|
|
246
244
|
)
|
|
@@ -333,7 +331,6 @@ def get_semantic_model_bim(
|
|
|
333
331
|
dataset: str | UUID,
|
|
334
332
|
workspace: Optional[str | UUID] = None,
|
|
335
333
|
save_to_file_name: Optional[str] = None,
|
|
336
|
-
lakehouse_workspace: Optional[str] = None,
|
|
337
334
|
) -> dict:
|
|
338
335
|
"""
|
|
339
336
|
Extracts the Model.bim file for a given semantic model.
|
|
@@ -348,10 +345,6 @@ def get_semantic_model_bim(
|
|
|
348
345
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
349
346
|
save_to_file_name : str, default=None
|
|
350
347
|
If specified, saves the Model.bim as a file in the lakehouse attached to the notebook.
|
|
351
|
-
lakehouse_workspace : str, default=None
|
|
352
|
-
The Fabric workspace name in which the lakehouse attached to the workspace resides.
|
|
353
|
-
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
354
|
-
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
355
348
|
|
|
356
349
|
Returns
|
|
357
350
|
-------
|
|
@@ -375,9 +368,7 @@ def get_semantic_model_bim(
|
|
|
375
368
|
f"{icons.red_dot} In order to save the model.bim file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
|
|
376
369
|
)
|
|
377
370
|
|
|
378
|
-
|
|
379
|
-
lake_workspace = fabric.resolve_workspace_name()
|
|
380
|
-
lakehouse = resolve_lakehouse_name(lakehouse_id, lake_workspace)
|
|
371
|
+
lakehouse = resolve_lakehouse_name()
|
|
381
372
|
folderPath = "/lakehouse/default/Files"
|
|
382
373
|
fileExt = ".bim"
|
|
383
374
|
if not save_to_file_name.endswith(fileExt):
|
|
@@ -437,11 +428,12 @@ def get_semantic_model_definition(
|
|
|
437
428
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
438
429
|
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
439
430
|
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
431
|
+
result = _base_api(
|
|
432
|
+
request=f"v1/workspaces/{workspace_id}/semanticModels/{dataset_id}/getDefinition?format={format}",
|
|
433
|
+
method="post",
|
|
434
|
+
lro_return_json=True,
|
|
435
|
+
status_codes=None,
|
|
443
436
|
)
|
|
444
|
-
result = lro(client, response).json()
|
|
445
437
|
|
|
446
438
|
files = result["definition"]["parts"]
|
|
447
439
|
|
sempy_labs/_git.py
CHANGED
|
@@ -1,12 +1,10 @@
|
|
|
1
|
-
import sempy.fabric as fabric
|
|
2
1
|
import pandas as pd
|
|
3
2
|
import sempy_labs._icons as icons
|
|
4
3
|
from typing import Optional, List
|
|
5
4
|
from sempy_labs._helper_functions import (
|
|
6
5
|
resolve_workspace_name_and_id,
|
|
7
|
-
|
|
6
|
+
_base_api,
|
|
8
7
|
)
|
|
9
|
-
from sempy.fabric.exceptions import FabricHTTPException
|
|
10
8
|
from uuid import UUID
|
|
11
9
|
|
|
12
10
|
|
|
@@ -43,7 +41,7 @@ def connect_workspace_to_azure_dev_ops(
|
|
|
43
41
|
|
|
44
42
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
45
43
|
|
|
46
|
-
|
|
44
|
+
payload = {
|
|
47
45
|
"gitProviderDetails": {
|
|
48
46
|
"organizationName": organization_name,
|
|
49
47
|
"projectName": project_name,
|
|
@@ -54,12 +52,11 @@ def connect_workspace_to_azure_dev_ops(
|
|
|
54
52
|
}
|
|
55
53
|
}
|
|
56
54
|
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
55
|
+
_base_api(
|
|
56
|
+
request=f"/v1/workspaces/{workspace_id}/git/connect",
|
|
57
|
+
payload=payload,
|
|
58
|
+
method="post",
|
|
60
59
|
)
|
|
61
|
-
if response.status_code != 200:
|
|
62
|
-
raise FabricHTTPException(response)
|
|
63
60
|
|
|
64
61
|
print(
|
|
65
62
|
f"{icons.green_dot} The '{workspace_name}' workspace has been connected to the '{project_name}' Git project in Azure DevOps within the '{repository_name}' repository."
|
|
@@ -102,7 +99,7 @@ def connect_workspace_to_github(
|
|
|
102
99
|
|
|
103
100
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
104
101
|
|
|
105
|
-
|
|
102
|
+
payload = {
|
|
106
103
|
"gitProviderDetails": {
|
|
107
104
|
"ownerName": owner_name,
|
|
108
105
|
"gitProviderType": "GitHub",
|
|
@@ -116,12 +113,11 @@ def connect_workspace_to_github(
|
|
|
116
113
|
},
|
|
117
114
|
}
|
|
118
115
|
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
116
|
+
_base_api(
|
|
117
|
+
request=f"/v1/workspaces/{workspace_id}/git/connect",
|
|
118
|
+
payload=payload,
|
|
119
|
+
method="post",
|
|
122
120
|
)
|
|
123
|
-
if response.status_code != 200:
|
|
124
|
-
raise FabricHTTPException(response)
|
|
125
121
|
|
|
126
122
|
print(
|
|
127
123
|
f"{icons.green_dot} The '{workspace_name}' workspace has been connected to the '{repository_name}' GitHub repository."
|
|
@@ -144,10 +140,7 @@ def disconnect_workspace_from_git(workspace: Optional[str | UUID] = None):
|
|
|
144
140
|
|
|
145
141
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
146
142
|
|
|
147
|
-
|
|
148
|
-
response = client.post(f"/v1/workspaces/{workspace_id}/git/disconnect")
|
|
149
|
-
if response.status_code != 200:
|
|
150
|
-
raise FabricHTTPException(response)
|
|
143
|
+
_base_api(request=f"/v1/workspaces/{workspace_id}/git/disconnect", method="post")
|
|
151
144
|
|
|
152
145
|
print(
|
|
153
146
|
f"{icons.green_dot} The '{workspace_name}' workspace has been disconnected from Git."
|
|
@@ -189,13 +182,11 @@ def get_git_status(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
189
182
|
]
|
|
190
183
|
)
|
|
191
184
|
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
result = lro(client, response).json()
|
|
185
|
+
result = _base_api(
|
|
186
|
+
request=f"/v1/workspaces/{workspace_id}/git/status",
|
|
187
|
+
lro_return_json=True,
|
|
188
|
+
status_codes=None,
|
|
189
|
+
)
|
|
199
190
|
|
|
200
191
|
for changes in result.get("changes", []):
|
|
201
192
|
item_metadata = changes.get("itemMetadata", {})
|
|
@@ -252,11 +243,7 @@ def get_git_connection(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
252
243
|
]
|
|
253
244
|
)
|
|
254
245
|
|
|
255
|
-
|
|
256
|
-
response = client.get(f"/v1/workspaces/{workspace_id}/git/connection")
|
|
257
|
-
|
|
258
|
-
if response.status_code != 200:
|
|
259
|
-
raise FabricHTTPException(response)
|
|
246
|
+
response = _base_api(request=f"/v1/workspaces/{workspace_id}/git/connection")
|
|
260
247
|
|
|
261
248
|
r = response.json()
|
|
262
249
|
provider_details = r.get("gitProviderDetails", {})
|
|
@@ -298,19 +285,18 @@ def initialize_git_connection(workspace: Optional[str | UUID] = None) -> str:
|
|
|
298
285
|
|
|
299
286
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
300
287
|
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
lro(client, response)
|
|
288
|
+
response_json = _base_api(
|
|
289
|
+
request=f"/v1/workspaces/{workspace_id}/git/initializeConnection",
|
|
290
|
+
method="post",
|
|
291
|
+
lro_return_json=True,
|
|
292
|
+
status_codes=None,
|
|
293
|
+
)
|
|
308
294
|
|
|
309
295
|
print(
|
|
310
296
|
f"{icons.green_dot} The '{workspace_name}' workspace git connection has been initialized."
|
|
311
297
|
)
|
|
312
298
|
|
|
313
|
-
return
|
|
299
|
+
return response_json.get("remoteCommitHash")
|
|
314
300
|
|
|
315
301
|
|
|
316
302
|
def commit_to_git(
|
|
@@ -350,26 +336,23 @@ def commit_to_git(
|
|
|
350
336
|
if isinstance(item_ids, str):
|
|
351
337
|
item_ids = [item_ids]
|
|
352
338
|
|
|
353
|
-
|
|
339
|
+
payload = {
|
|
354
340
|
"mode": commit_mode,
|
|
355
341
|
"workspaceHead": workspace_head,
|
|
356
342
|
"comment": comment,
|
|
357
343
|
}
|
|
358
344
|
|
|
359
345
|
if item_ids is not None:
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
346
|
+
payload["items"] = [{"objectId": item_id} for item_id in item_ids]
|
|
347
|
+
|
|
348
|
+
_base_api(
|
|
349
|
+
request=f"/v1/workspaces/{workspace_id}/git/commitToGit",
|
|
350
|
+
method="post",
|
|
351
|
+
payload=payload,
|
|
352
|
+
lro_return_status_code=True,
|
|
353
|
+
status_codes=None,
|
|
366
354
|
)
|
|
367
355
|
|
|
368
|
-
if response.status_code not in [200, 202]:
|
|
369
|
-
raise FabricHTTPException(response)
|
|
370
|
-
|
|
371
|
-
lro(client=client, response=response, return_status_code=True)
|
|
372
|
-
|
|
373
356
|
if commit_mode == "All":
|
|
374
357
|
print(
|
|
375
358
|
f"{icons.green_dot} All items within the '{workspace_name}' workspace have been committed to Git."
|
|
@@ -426,29 +409,26 @@ def update_from_git(
|
|
|
426
409
|
f"{icons.red_dot} Invalid conflict resolution policy. Valid options: {conflict_resolution_policies}."
|
|
427
410
|
)
|
|
428
411
|
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
412
|
+
payload = {}
|
|
413
|
+
payload["remoteCommitHash"] = remote_commit_hash
|
|
414
|
+
payload["conflictResolution"] = {
|
|
432
415
|
"conflictResolutionType": "Workspace",
|
|
433
416
|
"conflictResolutionPolicy": conflict_resolution_policy,
|
|
434
417
|
}
|
|
435
418
|
|
|
436
419
|
if workspace_head is not None:
|
|
437
|
-
|
|
420
|
+
payload["workspaceHead"] = workspace_head
|
|
438
421
|
if allow_override is not None:
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
422
|
+
payload["options"] = {"allowOverrideItems": allow_override}
|
|
423
|
+
|
|
424
|
+
_base_api(
|
|
425
|
+
request=f"/v1/workspaces/{workspace_id}/git/updateFromGit",
|
|
426
|
+
method="post",
|
|
427
|
+
payload=payload,
|
|
428
|
+
lro_return_status_code=True,
|
|
429
|
+
status_codes=None,
|
|
445
430
|
)
|
|
446
431
|
|
|
447
|
-
if response.status_code not in [200, 202]:
|
|
448
|
-
raise FabricHTTPException(response)
|
|
449
|
-
|
|
450
|
-
lro(client, response, return_status_code=True)
|
|
451
|
-
|
|
452
432
|
print(
|
|
453
433
|
f"{icons.green_dot} The '{workspace_name}' workspace has been updated with commits pushed to the connected branch."
|
|
454
434
|
)
|