semantic-link-labs 0.9.4__py3-none-any.whl → 0.9.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.5.dist-info}/METADATA +18 -2
- {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.5.dist-info}/RECORD +49 -43
- sempy_labs/__init__.py +18 -3
- sempy_labs/_capacities.py +22 -127
- sempy_labs/_capacity_migration.py +8 -7
- sempy_labs/_dashboards.py +60 -0
- sempy_labs/_data_pipelines.py +5 -31
- sempy_labs/_environments.py +20 -48
- sempy_labs/_eventhouses.py +22 -52
- sempy_labs/_eventstreams.py +16 -34
- sempy_labs/_gateways.py +4 -4
- sempy_labs/_generate_semantic_model.py +0 -1
- sempy_labs/_git.py +90 -1
- sempy_labs/_graphQL.py +3 -20
- sempy_labs/_helper_functions.py +171 -43
- sempy_labs/_kql_databases.py +19 -34
- sempy_labs/_kql_querysets.py +15 -32
- sempy_labs/_list_functions.py +12 -155
- sempy_labs/_mirrored_databases.py +14 -48
- sempy_labs/_ml_experiments.py +5 -30
- sempy_labs/_ml_models.py +4 -28
- sempy_labs/_model_bpa.py +2 -0
- sempy_labs/_mounted_data_factories.py +119 -0
- sempy_labs/_notebooks.py +16 -26
- sempy_labs/_sql.py +7 -6
- sempy_labs/_utils.py +42 -0
- sempy_labs/_vertipaq.py +17 -2
- sempy_labs/_warehouses.py +5 -17
- sempy_labs/_workloads.py +23 -9
- sempy_labs/_workspaces.py +13 -5
- sempy_labs/admin/__init__.py +21 -1
- sempy_labs/admin/_apps.py +1 -1
- sempy_labs/admin/_artifacts.py +62 -0
- sempy_labs/admin/_basic_functions.py +0 -52
- sempy_labs/admin/_capacities.py +61 -0
- sempy_labs/admin/_reports.py +74 -0
- sempy_labs/admin/_shared.py +4 -2
- sempy_labs/admin/_users.py +133 -0
- sempy_labs/admin/_workspaces.py +148 -0
- sempy_labs/directlake/_update_directlake_partition_entity.py +9 -1
- sempy_labs/lakehouse/__init__.py +2 -0
- sempy_labs/lakehouse/_lakehouse.py +6 -7
- sempy_labs/lakehouse/_shortcuts.py +192 -53
- sempy_labs/report/_generate_report.py +9 -17
- sempy_labs/report/_report_bpa.py +12 -19
- sempy_labs/tom/_model.py +34 -16
- {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.5.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.5.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.5.dist-info}/top_level.txt +0 -0
sempy_labs/_helper_functions.py
CHANGED
|
@@ -30,6 +30,11 @@ def _build_url(url: str, params: dict) -> str:
|
|
|
30
30
|
return url
|
|
31
31
|
|
|
32
32
|
|
|
33
|
+
def _encode_user(user: str) -> str:
|
|
34
|
+
|
|
35
|
+
return urllib.parse.quote(user, safe="@")
|
|
36
|
+
|
|
37
|
+
|
|
33
38
|
def create_abfss_path(
|
|
34
39
|
lakehouse_id: UUID,
|
|
35
40
|
lakehouse_workspace_id: UUID,
|
|
@@ -130,14 +135,16 @@ def create_relationship_name(
|
|
|
130
135
|
)
|
|
131
136
|
|
|
132
137
|
|
|
133
|
-
def resolve_report_id(
|
|
138
|
+
def resolve_report_id(
|
|
139
|
+
report: str | UUID, workspace: Optional[str | UUID] = None
|
|
140
|
+
) -> UUID:
|
|
134
141
|
"""
|
|
135
142
|
Obtains the ID of the Power BI report.
|
|
136
143
|
|
|
137
144
|
Parameters
|
|
138
145
|
----------
|
|
139
|
-
report : str
|
|
140
|
-
The name of the Power BI report.
|
|
146
|
+
report : str | uuid.UUID
|
|
147
|
+
The name or ID of the Power BI report.
|
|
141
148
|
workspace : str | uuid.UUID, default=None
|
|
142
149
|
The Fabric workspace name or ID.
|
|
143
150
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
@@ -145,11 +152,11 @@ def resolve_report_id(report: str, workspace: Optional[str | UUID] = None) -> UU
|
|
|
145
152
|
|
|
146
153
|
Returns
|
|
147
154
|
-------
|
|
148
|
-
UUID
|
|
155
|
+
uuid.UUID
|
|
149
156
|
The ID of the Power BI report.
|
|
150
157
|
"""
|
|
151
158
|
|
|
152
|
-
return
|
|
159
|
+
return resolve_item_id(item=report, type="Report", workspace=workspace)
|
|
153
160
|
|
|
154
161
|
|
|
155
162
|
def resolve_report_name(report_id: UUID, workspace: Optional[str | UUID] = None) -> str:
|
|
@@ -176,6 +183,127 @@ def resolve_report_name(report_id: UUID, workspace: Optional[str | UUID] = None)
|
|
|
176
183
|
)
|
|
177
184
|
|
|
178
185
|
|
|
186
|
+
def delete_item(
|
|
187
|
+
item: str | UUID, type: str, workspace: Optional[str | UUID] = None
|
|
188
|
+
) -> None:
|
|
189
|
+
"""
|
|
190
|
+
Deletes an item from a Fabric workspace.
|
|
191
|
+
|
|
192
|
+
Parameters
|
|
193
|
+
----------
|
|
194
|
+
item : str | uuid.UUID
|
|
195
|
+
The name or ID of the item to be deleted.
|
|
196
|
+
type : str
|
|
197
|
+
The type of the item to be deleted.
|
|
198
|
+
workspace : str | uuid.UUID, default=None
|
|
199
|
+
The Fabric workspace name or ID.
|
|
200
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
201
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
202
|
+
"""
|
|
203
|
+
|
|
204
|
+
from sempy_labs._utils import item_types
|
|
205
|
+
|
|
206
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
207
|
+
(item_name, item_id) = resolve_item_name_and_id(item, type, workspace_id)
|
|
208
|
+
item_type = item_types.get(type)[0].lower()
|
|
209
|
+
|
|
210
|
+
fabric.delete_item(item_id=item_id, workspace=workspace_id)
|
|
211
|
+
|
|
212
|
+
print(
|
|
213
|
+
f"{icons.green_dot} The '{item_name}' {item_type} has been successfully deleted from the '{workspace_name}' workspace."
|
|
214
|
+
)
|
|
215
|
+
|
|
216
|
+
|
|
217
|
+
def create_item(
|
|
218
|
+
name: str,
|
|
219
|
+
type: str,
|
|
220
|
+
description: Optional[str] = None,
|
|
221
|
+
definition: Optional[dict] = None,
|
|
222
|
+
workspace: Optional[str | UUID] = None,
|
|
223
|
+
):
|
|
224
|
+
"""
|
|
225
|
+
Creates an item in a Fabric workspace.
|
|
226
|
+
|
|
227
|
+
Parameters
|
|
228
|
+
----------
|
|
229
|
+
name : str
|
|
230
|
+
The name of the item to be created.
|
|
231
|
+
type : str
|
|
232
|
+
The type of the item to be created.
|
|
233
|
+
description : str, default=None
|
|
234
|
+
A description of the item to be created.
|
|
235
|
+
definition : dict, default=None
|
|
236
|
+
The definition of the item to be created.
|
|
237
|
+
workspace : str | uuid.UUID, default=None
|
|
238
|
+
The Fabric workspace name or ID.
|
|
239
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
240
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
241
|
+
"""
|
|
242
|
+
from sempy_labs._utils import item_types
|
|
243
|
+
|
|
244
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
245
|
+
item_type = item_types.get(type)[0].lower()
|
|
246
|
+
item_type_url = item_types.get(type)[1]
|
|
247
|
+
|
|
248
|
+
payload = {
|
|
249
|
+
"displayName": name,
|
|
250
|
+
}
|
|
251
|
+
if description:
|
|
252
|
+
payload["description"] = description
|
|
253
|
+
if definition:
|
|
254
|
+
payload["definition"] = definition
|
|
255
|
+
|
|
256
|
+
_base_api(
|
|
257
|
+
request=f"/v1/workspaces/{workspace_id}/{item_type_url}",
|
|
258
|
+
method="post",
|
|
259
|
+
payload=payload,
|
|
260
|
+
status_codes=[201, 202],
|
|
261
|
+
lro_return_status_code=True,
|
|
262
|
+
)
|
|
263
|
+
print(
|
|
264
|
+
f"{icons.green_dot} The '{name}' {item_type} has been successfully created within the in the '{workspace_name}' workspace."
|
|
265
|
+
)
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
def get_item_definition(
|
|
269
|
+
item: str | UUID,
|
|
270
|
+
type: str,
|
|
271
|
+
workspace: Optional[str | UUID] = None,
|
|
272
|
+
format: Optional[str] = None,
|
|
273
|
+
return_dataframe: bool = True,
|
|
274
|
+
decode: bool = True,
|
|
275
|
+
):
|
|
276
|
+
|
|
277
|
+
from sempy_labs._utils import item_types
|
|
278
|
+
|
|
279
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
280
|
+
item_id = resolve_item_id(item, type, workspace_id)
|
|
281
|
+
item_type_url = item_types.get(type)[1]
|
|
282
|
+
path = item_types.get(type)[2]
|
|
283
|
+
|
|
284
|
+
url = f"/v1/workspaces/{workspace_id}/{item_type_url}/{item_id}/getDefinition"
|
|
285
|
+
if format:
|
|
286
|
+
url += f"?format={format}"
|
|
287
|
+
|
|
288
|
+
result = _base_api(
|
|
289
|
+
request=url,
|
|
290
|
+
method="post",
|
|
291
|
+
status_codes=None,
|
|
292
|
+
lro_return_json=True,
|
|
293
|
+
)
|
|
294
|
+
|
|
295
|
+
if return_dataframe:
|
|
296
|
+
return pd.json_normalize(result["definition"]["parts"])
|
|
297
|
+
|
|
298
|
+
value = next(
|
|
299
|
+
p.get("payload") for p in result["definition"]["parts"] if p.get("path") == path
|
|
300
|
+
)
|
|
301
|
+
if decode:
|
|
302
|
+
json.loads(_decode_b64(value))
|
|
303
|
+
else:
|
|
304
|
+
return value
|
|
305
|
+
|
|
306
|
+
|
|
179
307
|
def resolve_item_id(
|
|
180
308
|
item: str | UUID, type: str, workspace: Optional[str] = None
|
|
181
309
|
) -> UUID:
|
|
@@ -268,14 +396,7 @@ def resolve_dataset_id(
|
|
|
268
396
|
The ID of the semantic model.
|
|
269
397
|
"""
|
|
270
398
|
|
|
271
|
-
|
|
272
|
-
dataset_id = dataset
|
|
273
|
-
else:
|
|
274
|
-
dataset_id = fabric.resolve_item_id(
|
|
275
|
-
item_name=dataset, type="SemanticModel", workspace=workspace
|
|
276
|
-
)
|
|
277
|
-
|
|
278
|
-
return dataset_id
|
|
399
|
+
return resolve_item_id(item=dataset, type="SemanticModel", workspace=workspace)
|
|
279
400
|
|
|
280
401
|
|
|
281
402
|
def resolve_dataset_name(
|
|
@@ -543,8 +664,6 @@ def save_as_delta_table(
|
|
|
543
664
|
f"{icons.red_dot} Invalid 'delta_table_name'. Delta tables in the lakehouse cannot have spaces in their names."
|
|
544
665
|
)
|
|
545
666
|
|
|
546
|
-
dataframe.columns = [col.replace(" ", "_") for col in dataframe.columns]
|
|
547
|
-
|
|
548
667
|
spark = _create_spark_session()
|
|
549
668
|
|
|
550
669
|
type_mapping = {
|
|
@@ -562,6 +681,7 @@ def save_as_delta_table(
|
|
|
562
681
|
}
|
|
563
682
|
|
|
564
683
|
if isinstance(dataframe, pd.DataFrame):
|
|
684
|
+
dataframe.columns = [col.replace(" ", "_") for col in dataframe.columns]
|
|
565
685
|
if schema is None:
|
|
566
686
|
spark_df = spark.createDataFrame(dataframe)
|
|
567
687
|
else:
|
|
@@ -573,6 +693,9 @@ def save_as_delta_table(
|
|
|
573
693
|
)
|
|
574
694
|
spark_df = spark.createDataFrame(dataframe, schema_map)
|
|
575
695
|
else:
|
|
696
|
+
for col_name in dataframe.columns:
|
|
697
|
+
new_name = col_name.replace(" ", "_")
|
|
698
|
+
dataframe = dataframe.withColumnRenamed(col_name, new_name)
|
|
576
699
|
spark_df = dataframe
|
|
577
700
|
|
|
578
701
|
filePath = create_abfss_path(
|
|
@@ -643,7 +766,7 @@ def resolve_workspace_name_and_id(
|
|
|
643
766
|
|
|
644
767
|
Returns
|
|
645
768
|
-------
|
|
646
|
-
str,
|
|
769
|
+
str, uuid.UUID
|
|
647
770
|
The name and ID of the Fabric workspace.
|
|
648
771
|
"""
|
|
649
772
|
|
|
@@ -908,14 +1031,14 @@ def resolve_capacity_name(capacity_id: Optional[UUID] = None) -> str:
|
|
|
908
1031
|
return dfC_filt["Display Name"].iloc[0]
|
|
909
1032
|
|
|
910
1033
|
|
|
911
|
-
def resolve_capacity_id(
|
|
1034
|
+
def resolve_capacity_id(capacity: Optional[str | UUID] = None, **kwargs) -> UUID:
|
|
912
1035
|
"""
|
|
913
1036
|
Obtains the capacity Id for a given capacity name.
|
|
914
1037
|
|
|
915
1038
|
Parameters
|
|
916
1039
|
----------
|
|
917
|
-
|
|
918
|
-
The capacity name.
|
|
1040
|
+
capacity : str | uuid.UUID, default=None
|
|
1041
|
+
The capacity name or ID.
|
|
919
1042
|
Defaults to None which resolves to the capacity id of the workspace of the attached lakehouse
|
|
920
1043
|
or if no lakehouse attached, resolves to the capacity name of the workspace of the notebook.
|
|
921
1044
|
|
|
@@ -925,16 +1048,22 @@ def resolve_capacity_id(capacity_name: Optional[str] = None) -> UUID:
|
|
|
925
1048
|
The capacity Id.
|
|
926
1049
|
"""
|
|
927
1050
|
|
|
928
|
-
if capacity_name
|
|
1051
|
+
if "capacity_name" in kwargs:
|
|
1052
|
+
capacity = kwargs["capacity_name"]
|
|
1053
|
+
print(
|
|
1054
|
+
f"{icons.warning} The 'capacity_name' parameter is deprecated. Please use 'capacity' instead."
|
|
1055
|
+
)
|
|
1056
|
+
|
|
1057
|
+
if capacity is None:
|
|
929
1058
|
return get_capacity_id()
|
|
1059
|
+
if _is_valid_uuid(capacity):
|
|
1060
|
+
return capacity
|
|
930
1061
|
|
|
931
1062
|
dfC = fabric.list_capacities()
|
|
932
|
-
dfC_filt = dfC[dfC["Display Name"] ==
|
|
1063
|
+
dfC_filt = dfC[dfC["Display Name"] == capacity]
|
|
933
1064
|
|
|
934
1065
|
if dfC_filt.empty:
|
|
935
|
-
raise ValueError(
|
|
936
|
-
f"{icons.red_dot} The '{capacity_name}' capacity does not exist."
|
|
937
|
-
)
|
|
1066
|
+
raise ValueError(f"{icons.red_dot} The '{capacity}' capacity does not exist.")
|
|
938
1067
|
|
|
939
1068
|
return dfC_filt["Id"].iloc[0]
|
|
940
1069
|
|
|
@@ -1097,12 +1226,7 @@ def resolve_warehouse_id(
|
|
|
1097
1226
|
The warehouse Id.
|
|
1098
1227
|
"""
|
|
1099
1228
|
|
|
1100
|
-
|
|
1101
|
-
return warehouse
|
|
1102
|
-
else:
|
|
1103
|
-
return fabric.resolve_item_id(
|
|
1104
|
-
item_name=warehouse, type="Warehouse", workspace=workspace
|
|
1105
|
-
)
|
|
1229
|
+
return resolve_item_id(item=warehouse, type="Warehouse", workspace=workspace)
|
|
1106
1230
|
|
|
1107
1231
|
|
|
1108
1232
|
def get_language_codes(languages: str | List[str]):
|
|
@@ -1162,14 +1286,14 @@ def convert_to_alphanumeric_lowercase(input_string):
|
|
|
1162
1286
|
|
|
1163
1287
|
|
|
1164
1288
|
def resolve_environment_id(
|
|
1165
|
-
environment: str, workspace: Optional[str | UUID] = None
|
|
1289
|
+
environment: str | UUID, workspace: Optional[str | UUID] = None
|
|
1166
1290
|
) -> UUID:
|
|
1167
1291
|
"""
|
|
1168
1292
|
Obtains the environment Id for a given environment.
|
|
1169
1293
|
|
|
1170
1294
|
Parameters
|
|
1171
1295
|
----------
|
|
1172
|
-
environment: str
|
|
1296
|
+
environment: str | uuid.UUID
|
|
1173
1297
|
Name of the environment.
|
|
1174
1298
|
workspace : str | uuid.UUID, default=None
|
|
1175
1299
|
The Fabric workspace name or ID in which the semantic model resides.
|
|
@@ -1178,13 +1302,11 @@ def resolve_environment_id(
|
|
|
1178
1302
|
|
|
1179
1303
|
Returns
|
|
1180
1304
|
-------
|
|
1181
|
-
UUID
|
|
1305
|
+
uuid.UUID
|
|
1182
1306
|
The environment Id.
|
|
1183
1307
|
"""
|
|
1184
1308
|
|
|
1185
|
-
return
|
|
1186
|
-
item_name=environment, type="Environment", workspace=workspace
|
|
1187
|
-
)
|
|
1309
|
+
return resolve_item_id(item=environment, type="Environment", workspace=workspace)
|
|
1188
1310
|
|
|
1189
1311
|
|
|
1190
1312
|
def _make_clickable(val):
|
|
@@ -1216,14 +1338,16 @@ def convert_to_friendly_case(text: str) -> str:
|
|
|
1216
1338
|
return text
|
|
1217
1339
|
|
|
1218
1340
|
|
|
1219
|
-
def resolve_notebook_id(
|
|
1341
|
+
def resolve_notebook_id(
|
|
1342
|
+
notebook: str | UUID, workspace: Optional[str | UUID] = None
|
|
1343
|
+
) -> UUID:
|
|
1220
1344
|
"""
|
|
1221
1345
|
Obtains the notebook Id for a given notebook.
|
|
1222
1346
|
|
|
1223
1347
|
Parameters
|
|
1224
1348
|
----------
|
|
1225
|
-
notebook: str
|
|
1226
|
-
Name of the notebook.
|
|
1349
|
+
notebook: str | uuid.UUID
|
|
1350
|
+
Name or ID of the notebook.
|
|
1227
1351
|
workspace : str | uuid.UUID, default=None
|
|
1228
1352
|
The Fabric workspace name or ID in which the semantic model resides.
|
|
1229
1353
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
@@ -1235,9 +1359,7 @@ def resolve_notebook_id(notebook: str, workspace: Optional[str | UUID] = None) -
|
|
|
1235
1359
|
The notebook Id.
|
|
1236
1360
|
"""
|
|
1237
1361
|
|
|
1238
|
-
return
|
|
1239
|
-
item_name=notebook, type="Notebook", workspace=workspace
|
|
1240
|
-
)
|
|
1362
|
+
return resolve_item_id(item=notebook, type="Notebook", workspace=workspace)
|
|
1241
1363
|
|
|
1242
1364
|
|
|
1243
1365
|
def generate_guid():
|
|
@@ -1523,9 +1645,15 @@ def _base_api(
|
|
|
1523
1645
|
raise NotImplementedError
|
|
1524
1646
|
else:
|
|
1525
1647
|
headers = _get_headers(auth.token_provider.get(), audience=client)
|
|
1648
|
+
if client == "graph":
|
|
1649
|
+
url = f"https://graph.microsoft.com/v1.0/{request}"
|
|
1650
|
+
elif client == "azure":
|
|
1651
|
+
url = request
|
|
1652
|
+
else:
|
|
1653
|
+
raise NotImplementedError
|
|
1526
1654
|
response = requests.request(
|
|
1527
1655
|
method.upper(),
|
|
1528
|
-
|
|
1656
|
+
url,
|
|
1529
1657
|
headers=headers,
|
|
1530
1658
|
json=payload,
|
|
1531
1659
|
)
|
sempy_labs/_kql_databases.py
CHANGED
|
@@ -1,13 +1,14 @@
|
|
|
1
|
-
import sempy.fabric as fabric
|
|
2
1
|
import pandas as pd
|
|
3
|
-
import sempy_labs._icons as icons
|
|
4
2
|
from typing import Optional
|
|
5
3
|
from sempy_labs._helper_functions import (
|
|
6
4
|
resolve_workspace_name_and_id,
|
|
7
5
|
_base_api,
|
|
8
6
|
_create_dataframe,
|
|
7
|
+
delete_item,
|
|
8
|
+
create_item,
|
|
9
9
|
)
|
|
10
10
|
from uuid import UUID
|
|
11
|
+
import sempy_labs._icons as icons
|
|
11
12
|
|
|
12
13
|
|
|
13
14
|
def list_kql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
@@ -64,7 +65,7 @@ def list_kql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
64
65
|
return df
|
|
65
66
|
|
|
66
67
|
|
|
67
|
-
def
|
|
68
|
+
def _create_kql_database(
|
|
68
69
|
name: str, description: Optional[str] = None, workspace: Optional[str | UUID] = None
|
|
69
70
|
):
|
|
70
71
|
"""
|
|
@@ -84,27 +85,16 @@ def create_kql_database(
|
|
|
84
85
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
85
86
|
"""
|
|
86
87
|
|
|
87
|
-
(
|
|
88
|
-
|
|
89
|
-
payload = {"displayName": name}
|
|
90
|
-
|
|
91
|
-
if description:
|
|
92
|
-
payload["description"] = description
|
|
93
|
-
|
|
94
|
-
_base_api(
|
|
95
|
-
request=f"v1/workspaces/{workspace_id}/kqlDatabases",
|
|
96
|
-
method="post",
|
|
97
|
-
payload=payload,
|
|
98
|
-
status_codes=[201, 202],
|
|
99
|
-
lro_return_status_code=True,
|
|
100
|
-
)
|
|
101
|
-
|
|
102
|
-
print(
|
|
103
|
-
f"{icons.green_dot} The '{name}' KQL database has been created within the '{workspace_name}' workspace."
|
|
88
|
+
create_item(
|
|
89
|
+
name=name, description=description, type="KQLDatabase", workspace=workspace
|
|
104
90
|
)
|
|
105
91
|
|
|
106
92
|
|
|
107
|
-
def delete_kql_database(
|
|
93
|
+
def delete_kql_database(
|
|
94
|
+
kql_database: str | UUID,
|
|
95
|
+
workspace: Optional[str | UUID] = None,
|
|
96
|
+
**kwargs,
|
|
97
|
+
):
|
|
108
98
|
"""
|
|
109
99
|
Deletes a KQL database.
|
|
110
100
|
|
|
@@ -112,23 +102,18 @@ def delete_kql_database(name: str, workspace: Optional[str | UUID] = None):
|
|
|
112
102
|
|
|
113
103
|
Parameters
|
|
114
104
|
----------
|
|
115
|
-
|
|
116
|
-
Name of the KQL database.
|
|
105
|
+
kql_database: str | uuid.UUID
|
|
106
|
+
Name or ID of the KQL database.
|
|
117
107
|
workspace : str | uuid.UUID, default=None
|
|
118
108
|
The Fabric workspace name or ID.
|
|
119
109
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
120
110
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
121
111
|
"""
|
|
122
112
|
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
113
|
+
if "name" in kwargs:
|
|
114
|
+
kql_database = kwargs["name"]
|
|
115
|
+
print(
|
|
116
|
+
f"{icons.warning} The 'name' parameter is deprecated. Please use 'kql_database' instead."
|
|
117
|
+
)
|
|
127
118
|
|
|
128
|
-
|
|
129
|
-
request=f"/v1/workspaces/{workspace_id}/kqlDatabases/{kql_database_id}",
|
|
130
|
-
method="delete",
|
|
131
|
-
)
|
|
132
|
-
print(
|
|
133
|
-
f"{icons.green_dot} The '{name}' KQL database within the '{workspace_name}' workspace has been deleted."
|
|
134
|
-
)
|
|
119
|
+
delete_item(item=kql_database, type="KQLDatabase", workspace=workspace)
|
sempy_labs/_kql_querysets.py
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import sempy.fabric as fabric
|
|
2
1
|
import pandas as pd
|
|
3
2
|
import sempy_labs._icons as icons
|
|
4
3
|
from typing import Optional
|
|
@@ -6,6 +5,8 @@ from sempy_labs._helper_functions import (
|
|
|
6
5
|
resolve_workspace_name_and_id,
|
|
7
6
|
_base_api,
|
|
8
7
|
_create_dataframe,
|
|
8
|
+
delete_item,
|
|
9
|
+
create_item,
|
|
9
10
|
)
|
|
10
11
|
from uuid import UUID
|
|
11
12
|
|
|
@@ -74,27 +75,14 @@ def create_kql_queryset(
|
|
|
74
75
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
75
76
|
"""
|
|
76
77
|
|
|
77
|
-
(
|
|
78
|
-
|
|
79
|
-
payload = {"displayName": name}
|
|
80
|
-
|
|
81
|
-
if description:
|
|
82
|
-
payload["description"] = description
|
|
83
|
-
|
|
84
|
-
_base_api(
|
|
85
|
-
request=f"v1/workspaces/{workspace_id}/kqlQuerysets",
|
|
86
|
-
method="post",
|
|
87
|
-
payload=payload,
|
|
88
|
-
status_codes=[201, 202],
|
|
89
|
-
lro_return_status_code=True,
|
|
90
|
-
)
|
|
91
|
-
|
|
92
|
-
print(
|
|
93
|
-
f"{icons.green_dot} The '{name}' KQL queryset has been created within the '{workspace_name}' workspace."
|
|
78
|
+
create_item(
|
|
79
|
+
name=name, description=description, type="KQLQueryset", workspace=workspace
|
|
94
80
|
)
|
|
95
81
|
|
|
96
82
|
|
|
97
|
-
def delete_kql_queryset(
|
|
83
|
+
def delete_kql_queryset(
|
|
84
|
+
kql_queryset: str | UUID, workspace: Optional[str | UUID] = None, **kwargs
|
|
85
|
+
):
|
|
98
86
|
"""
|
|
99
87
|
Deletes a KQL queryset.
|
|
100
88
|
|
|
@@ -102,23 +90,18 @@ def delete_kql_queryset(name: str, workspace: Optional[str | UUID] = None):
|
|
|
102
90
|
|
|
103
91
|
Parameters
|
|
104
92
|
----------
|
|
105
|
-
|
|
106
|
-
Name of the KQL queryset.
|
|
93
|
+
kql_queryset: str | uuid.UUID
|
|
94
|
+
Name or ID of the KQL queryset.
|
|
107
95
|
workspace : str | uuid.UUID, default=None
|
|
108
96
|
The Fabric workspace name or ID.
|
|
109
97
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
110
98
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
111
99
|
"""
|
|
112
100
|
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
101
|
+
if "name" in kwargs:
|
|
102
|
+
kql_queryset = kwargs["name"]
|
|
103
|
+
print(
|
|
104
|
+
f"{icons.warning} The 'name' parameter is deprecated. Please use 'kql_queryset' instead."
|
|
105
|
+
)
|
|
117
106
|
|
|
118
|
-
|
|
119
|
-
request=f"/v1/workspaces/{workspace_id}/kqlQuerysets/{kql_database_id}",
|
|
120
|
-
method="delete",
|
|
121
|
-
)
|
|
122
|
-
print(
|
|
123
|
-
f"{icons.green_dot} The '{name}' KQL queryset within the '{workspace_name}' workspace has been deleted."
|
|
124
|
-
)
|
|
107
|
+
delete_item(item=kql_queryset, type="KQLQueryset", workspace=workspace)
|