semantic-link-labs 0.7.4__py3-none-any.whl → 0.8.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.1.dist-info}/METADATA +43 -7
- {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.1.dist-info}/RECORD +59 -40
- {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.1.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +116 -58
- sempy_labs/_ai.py +0 -2
- sempy_labs/_capacities.py +39 -3
- sempy_labs/_capacity_migration.py +623 -0
- sempy_labs/_clear_cache.py +8 -8
- sempy_labs/_connections.py +15 -13
- sempy_labs/_data_pipelines.py +118 -0
- sempy_labs/_documentation.py +144 -0
- sempy_labs/_eventhouses.py +118 -0
- sempy_labs/_eventstreams.py +118 -0
- sempy_labs/_generate_semantic_model.py +3 -3
- sempy_labs/_git.py +23 -24
- sempy_labs/_helper_functions.py +140 -47
- sempy_labs/_icons.py +40 -0
- sempy_labs/_kql_databases.py +134 -0
- sempy_labs/_kql_querysets.py +124 -0
- sempy_labs/_list_functions.py +218 -421
- sempy_labs/_mirrored_warehouses.py +50 -0
- sempy_labs/_ml_experiments.py +122 -0
- sempy_labs/_ml_models.py +120 -0
- sempy_labs/_model_auto_build.py +0 -4
- sempy_labs/_model_bpa.py +10 -12
- sempy_labs/_model_bpa_bulk.py +8 -7
- sempy_labs/_model_dependencies.py +26 -18
- sempy_labs/_notebooks.py +5 -16
- sempy_labs/_query_scale_out.py +6 -5
- sempy_labs/_refresh_semantic_model.py +7 -19
- sempy_labs/_spark.py +40 -45
- sempy_labs/_sql.py +60 -15
- sempy_labs/_vertipaq.py +25 -25
- sempy_labs/_warehouses.py +132 -0
- sempy_labs/_workspaces.py +0 -3
- sempy_labs/admin/__init__.py +53 -0
- sempy_labs/admin/_basic_functions.py +888 -0
- sempy_labs/admin/_domains.py +411 -0
- sempy_labs/directlake/_directlake_schema_sync.py +1 -1
- sempy_labs/directlake/_dl_helper.py +32 -16
- sempy_labs/directlake/_generate_shared_expression.py +11 -14
- sempy_labs/directlake/_guardrails.py +7 -7
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +14 -24
- sempy_labs/directlake/_update_directlake_partition_entity.py +1 -1
- sempy_labs/directlake/_warm_cache.py +1 -1
- sempy_labs/lakehouse/_get_lakehouse_tables.py +3 -3
- sempy_labs/lakehouse/_lakehouse.py +3 -2
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +5 -0
- sempy_labs/report/__init__.py +9 -6
- sempy_labs/report/_generate_report.py +1 -1
- sempy_labs/report/_report_bpa.py +369 -0
- sempy_labs/report/_report_bpa_rules.py +113 -0
- sempy_labs/report/_report_helper.py +254 -0
- sempy_labs/report/_report_list_functions.py +95 -0
- sempy_labs/report/_report_rebind.py +0 -4
- sempy_labs/report/_reportwrapper.py +2037 -0
- sempy_labs/tom/_model.py +333 -22
- {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.1.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.1.dist-info}/top_level.txt +0 -0
sempy_labs/_helper_functions.py
CHANGED
|
@@ -2,21 +2,23 @@ import sempy.fabric as fabric
|
|
|
2
2
|
import re
|
|
3
3
|
import json
|
|
4
4
|
import base64
|
|
5
|
+
import time
|
|
6
|
+
import uuid
|
|
7
|
+
from sempy.fabric.exceptions import FabricHTTPException
|
|
5
8
|
import pandas as pd
|
|
6
9
|
from functools import wraps
|
|
7
10
|
import datetime
|
|
8
|
-
import time
|
|
9
11
|
from typing import Optional, Tuple, List
|
|
10
12
|
from uuid import UUID
|
|
11
13
|
import sempy_labs._icons as icons
|
|
12
|
-
from sempy.fabric.exceptions import FabricHTTPException
|
|
13
14
|
import urllib.parse
|
|
14
15
|
from azure.core.credentials import TokenCredential, AccessToken
|
|
16
|
+
import deltalake
|
|
15
17
|
|
|
16
18
|
|
|
17
19
|
def create_abfss_path(
|
|
18
20
|
lakehouse_id: UUID, lakehouse_workspace_id: UUID, delta_table_name: str
|
|
19
|
-
):
|
|
21
|
+
) -> str:
|
|
20
22
|
"""
|
|
21
23
|
Creates an abfss path for a delta table in a Fabric lakehouse.
|
|
22
24
|
|
|
@@ -38,7 +40,7 @@ def create_abfss_path(
|
|
|
38
40
|
return f"abfss://{lakehouse_workspace_id}@onelake.dfs.fabric.microsoft.com/{lakehouse_id}/Tables/{delta_table_name}"
|
|
39
41
|
|
|
40
42
|
|
|
41
|
-
def format_dax_object_name(table: str, column: str):
|
|
43
|
+
def format_dax_object_name(table: str, column: str) -> str:
|
|
42
44
|
"""
|
|
43
45
|
Formats a table/column combination to the 'Table Name'[Column Name] format.
|
|
44
46
|
|
|
@@ -60,7 +62,7 @@ def format_dax_object_name(table: str, column: str):
|
|
|
60
62
|
|
|
61
63
|
def create_relationship_name(
|
|
62
64
|
from_table: str, from_column: str, to_table: str, to_column: str
|
|
63
|
-
):
|
|
65
|
+
) -> str:
|
|
64
66
|
"""
|
|
65
67
|
Formats a relationship's table/columns into a fully qualified name.
|
|
66
68
|
|
|
@@ -88,7 +90,7 @@ def create_relationship_name(
|
|
|
88
90
|
)
|
|
89
91
|
|
|
90
92
|
|
|
91
|
-
def resolve_report_id(report: str, workspace: Optional[str] = None):
|
|
93
|
+
def resolve_report_id(report: str, workspace: Optional[str] = None) -> UUID:
|
|
92
94
|
"""
|
|
93
95
|
Obtains the ID of the Power BI report.
|
|
94
96
|
|
|
@@ -116,7 +118,7 @@ def resolve_report_id(report: str, workspace: Optional[str] = None):
|
|
|
116
118
|
return obj
|
|
117
119
|
|
|
118
120
|
|
|
119
|
-
def resolve_report_name(report_id: UUID, workspace: Optional[str] = None):
|
|
121
|
+
def resolve_report_name(report_id: UUID, workspace: Optional[str] = None) -> str:
|
|
120
122
|
"""
|
|
121
123
|
Obtains the name of the Power BI report.
|
|
122
124
|
|
|
@@ -146,7 +148,7 @@ def resolve_report_name(report_id: UUID, workspace: Optional[str] = None):
|
|
|
146
148
|
return obj
|
|
147
149
|
|
|
148
150
|
|
|
149
|
-
def resolve_dataset_id(dataset: str, workspace: Optional[str] = None):
|
|
151
|
+
def resolve_dataset_id(dataset: str, workspace: Optional[str] = None) -> UUID:
|
|
150
152
|
"""
|
|
151
153
|
Obtains the ID of the semantic model.
|
|
152
154
|
|
|
@@ -176,7 +178,7 @@ def resolve_dataset_id(dataset: str, workspace: Optional[str] = None):
|
|
|
176
178
|
return obj
|
|
177
179
|
|
|
178
180
|
|
|
179
|
-
def resolve_dataset_name(dataset_id: UUID, workspace: Optional[str] = None):
|
|
181
|
+
def resolve_dataset_name(dataset_id: UUID, workspace: Optional[str] = None) -> str:
|
|
180
182
|
"""
|
|
181
183
|
Obtains the name of the semantic model.
|
|
182
184
|
|
|
@@ -208,7 +210,7 @@ def resolve_dataset_name(dataset_id: UUID, workspace: Optional[str] = None):
|
|
|
208
210
|
|
|
209
211
|
def resolve_lakehouse_name(
|
|
210
212
|
lakehouse_id: Optional[UUID] = None, workspace: Optional[str] = None
|
|
211
|
-
):
|
|
213
|
+
) -> str:
|
|
212
214
|
"""
|
|
213
215
|
Obtains the name of the Fabric lakehouse.
|
|
214
216
|
|
|
@@ -242,7 +244,7 @@ def resolve_lakehouse_name(
|
|
|
242
244
|
return obj
|
|
243
245
|
|
|
244
246
|
|
|
245
|
-
def resolve_lakehouse_id(lakehouse: str, workspace: Optional[str] = None):
|
|
247
|
+
def resolve_lakehouse_id(lakehouse: str, workspace: Optional[str] = None) -> UUID:
|
|
246
248
|
"""
|
|
247
249
|
Obtains the ID of the Fabric lakehouse.
|
|
248
250
|
|
|
@@ -321,7 +323,7 @@ def get_direct_lake_sql_endpoint(dataset: str, workspace: Optional[str] = None)
|
|
|
321
323
|
return sqlEndpointId
|
|
322
324
|
|
|
323
325
|
|
|
324
|
-
def generate_embedded_filter(filter: str):
|
|
326
|
+
def generate_embedded_filter(filter: str) -> str:
|
|
325
327
|
"""
|
|
326
328
|
Converts the filter expression to a filter expression which can be used by a Power BI embedded URL.
|
|
327
329
|
|
|
@@ -390,7 +392,7 @@ def save_as_delta_table(
|
|
|
390
392
|
dataframe,
|
|
391
393
|
delta_table_name: str,
|
|
392
394
|
write_mode: str,
|
|
393
|
-
merge_schema:
|
|
395
|
+
merge_schema: bool = False,
|
|
394
396
|
schema: Optional[dict] = None,
|
|
395
397
|
lakehouse: Optional[str] = None,
|
|
396
398
|
workspace: Optional[str] = None,
|
|
@@ -869,7 +871,7 @@ def lro(
|
|
|
869
871
|
response,
|
|
870
872
|
status_codes: Optional[List[str]] = [200, 202],
|
|
871
873
|
sleep_time: Optional[int] = 1,
|
|
872
|
-
return_status_code:
|
|
874
|
+
return_status_code: bool = False,
|
|
873
875
|
):
|
|
874
876
|
|
|
875
877
|
if response.status_code not in status_codes:
|
|
@@ -922,6 +924,19 @@ def pagination(client, response):
|
|
|
922
924
|
|
|
923
925
|
|
|
924
926
|
def resolve_deployment_pipeline_id(deployment_pipeline: str) -> UUID:
|
|
927
|
+
"""
|
|
928
|
+
Obtains the Id for a given deployment pipeline.
|
|
929
|
+
|
|
930
|
+
Parameters
|
|
931
|
+
----------
|
|
932
|
+
deployment_pipeline : str
|
|
933
|
+
The deployment pipeline name
|
|
934
|
+
|
|
935
|
+
Returns
|
|
936
|
+
-------
|
|
937
|
+
UUID
|
|
938
|
+
The deployment pipeline Id.
|
|
939
|
+
"""
|
|
925
940
|
|
|
926
941
|
from sempy_labs._deployment_pipelines import list_deployment_pipelines
|
|
927
942
|
|
|
@@ -943,7 +958,7 @@ class FabricTokenCredential(TokenCredential):
|
|
|
943
958
|
scopes: str,
|
|
944
959
|
claims: Optional[str] = None,
|
|
945
960
|
tenant_id: Optional[str] = None,
|
|
946
|
-
enable_cae:
|
|
961
|
+
enable_cae: bool = False,
|
|
947
962
|
**kwargs: any,
|
|
948
963
|
) -> AccessToken:
|
|
949
964
|
|
|
@@ -968,15 +983,26 @@ def get_adls_client(account_name):
|
|
|
968
983
|
return service_client
|
|
969
984
|
|
|
970
985
|
|
|
971
|
-
def resolve_warehouse_id(warehouse: str, workspace: Optional[str]):
|
|
986
|
+
def resolve_warehouse_id(warehouse: str, workspace: Optional[str]) -> UUID:
|
|
987
|
+
"""
|
|
988
|
+
Obtains the Id for a given warehouse.
|
|
989
|
+
|
|
990
|
+
Parameters
|
|
991
|
+
----------
|
|
992
|
+
warehouse : str
|
|
993
|
+
The warehouse name
|
|
994
|
+
|
|
995
|
+
Returns
|
|
996
|
+
-------
|
|
997
|
+
UUID
|
|
998
|
+
The warehouse Id.
|
|
999
|
+
"""
|
|
972
1000
|
|
|
973
1001
|
workspace = fabric.resolve_workspace_name(workspace)
|
|
974
|
-
|
|
1002
|
+
return fabric.resolve_item_id(
|
|
975
1003
|
item_name=warehouse, type="Warehouse", workspace=workspace
|
|
976
1004
|
)
|
|
977
1005
|
|
|
978
|
-
return warehouse_id
|
|
979
|
-
|
|
980
1006
|
|
|
981
1007
|
def get_language_codes(languages: str | List[str]):
|
|
982
1008
|
|
|
@@ -992,34 +1018,6 @@ def get_language_codes(languages: str | List[str]):
|
|
|
992
1018
|
return languages
|
|
993
1019
|
|
|
994
1020
|
|
|
995
|
-
def resolve_environment_id(environment: str, workspace: Optional[str] = None) -> UUID:
|
|
996
|
-
"""
|
|
997
|
-
Obtains the environment Id for a given environment.
|
|
998
|
-
|
|
999
|
-
Parameters
|
|
1000
|
-
----------
|
|
1001
|
-
environment: str
|
|
1002
|
-
Name of the environment.
|
|
1003
|
-
workspace : str, default=None
|
|
1004
|
-
The Fabric workspace name.
|
|
1005
|
-
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
1006
|
-
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
1007
|
-
"""
|
|
1008
|
-
|
|
1009
|
-
from sempy_labs._environments import list_environments
|
|
1010
|
-
|
|
1011
|
-
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
1012
|
-
|
|
1013
|
-
dfE = list_environments(workspace=workspace)
|
|
1014
|
-
dfE_filt = dfE[dfE["Environment Name"] == environment]
|
|
1015
|
-
if len(dfE_filt) == 0:
|
|
1016
|
-
raise ValueError(
|
|
1017
|
-
f"{icons.red_dot} The '{environment}' environment does not exist within the '{workspace}' workspace."
|
|
1018
|
-
)
|
|
1019
|
-
|
|
1020
|
-
return dfE_filt["Environment Id"].iloc[0]
|
|
1021
|
-
|
|
1022
|
-
|
|
1023
1021
|
def get_azure_token_credentials(
|
|
1024
1022
|
key_vault_uri: str,
|
|
1025
1023
|
key_vault_tenant_id: str,
|
|
@@ -1048,3 +1046,98 @@ def get_azure_token_credentials(
|
|
|
1048
1046
|
}
|
|
1049
1047
|
|
|
1050
1048
|
return token, credential, headers
|
|
1049
|
+
|
|
1050
|
+
|
|
1051
|
+
def convert_to_alphanumeric_lowercase(input_string):
|
|
1052
|
+
|
|
1053
|
+
# Removes non-alphanumeric characters
|
|
1054
|
+
cleaned_string = re.sub(r"[^a-zA-Z0-9]", "", input_string)
|
|
1055
|
+
cleaned_string = cleaned_string.lower()
|
|
1056
|
+
|
|
1057
|
+
return cleaned_string
|
|
1058
|
+
|
|
1059
|
+
|
|
1060
|
+
def resolve_environment_id(environment: str, workspace: Optional[str] = None) -> UUID:
|
|
1061
|
+
"""
|
|
1062
|
+
Obtains the environment Id for a given environment.
|
|
1063
|
+
|
|
1064
|
+
Parameters
|
|
1065
|
+
----------
|
|
1066
|
+
environment: str
|
|
1067
|
+
Name of the environment.
|
|
1068
|
+
|
|
1069
|
+
Returns
|
|
1070
|
+
-------
|
|
1071
|
+
UUID
|
|
1072
|
+
The environment Id.
|
|
1073
|
+
"""
|
|
1074
|
+
|
|
1075
|
+
workspace = fabric.resolve_workspace_name(workspace)
|
|
1076
|
+
return fabric.resolve_item_id(
|
|
1077
|
+
item_name=environment, type="Environment", workspace=workspace
|
|
1078
|
+
)
|
|
1079
|
+
|
|
1080
|
+
|
|
1081
|
+
def make_clickable(val):
|
|
1082
|
+
|
|
1083
|
+
return f'<a target="_blank" href="{val}">{val}</a>'
|
|
1084
|
+
|
|
1085
|
+
|
|
1086
|
+
def convert_to_friendly_case(text: str) -> str:
|
|
1087
|
+
"""
|
|
1088
|
+
Converts a string of pascal/camel/snake case to business-friendly case.
|
|
1089
|
+
|
|
1090
|
+
Parameters
|
|
1091
|
+
----------
|
|
1092
|
+
text : str
|
|
1093
|
+
The text to convert.
|
|
1094
|
+
|
|
1095
|
+
Returns
|
|
1096
|
+
-------
|
|
1097
|
+
str
|
|
1098
|
+
Text converted into a business-friendly text.
|
|
1099
|
+
"""
|
|
1100
|
+
if text is not None:
|
|
1101
|
+
text = text.replace("_", " ")
|
|
1102
|
+
# Insert space before each capital letter, avoiding double spaces
|
|
1103
|
+
text = re.sub(r"(?<!\s)(?=[A-Z])", " ", text)
|
|
1104
|
+
# Strip leading/trailing whitespace and capitalize the first letter of each word
|
|
1105
|
+
text = text.strip().title()
|
|
1106
|
+
|
|
1107
|
+
return text
|
|
1108
|
+
|
|
1109
|
+
|
|
1110
|
+
def resolve_notebook_id(notebook: str, workspace: Optional[str] = None) -> UUID:
|
|
1111
|
+
"""
|
|
1112
|
+
Obtains the notebook Id for a given notebook.
|
|
1113
|
+
|
|
1114
|
+
Parameters
|
|
1115
|
+
----------
|
|
1116
|
+
notebook: str
|
|
1117
|
+
Name of the notebook.
|
|
1118
|
+
|
|
1119
|
+
Returns
|
|
1120
|
+
-------
|
|
1121
|
+
UUID
|
|
1122
|
+
The notebook Id.
|
|
1123
|
+
"""
|
|
1124
|
+
|
|
1125
|
+
workspace = fabric.resolve_workspace_name(workspace)
|
|
1126
|
+
return fabric.resolve_item_id(
|
|
1127
|
+
item_name=notebook, type="Notebook", workspace=workspace
|
|
1128
|
+
)
|
|
1129
|
+
|
|
1130
|
+
|
|
1131
|
+
def generate_guid():
|
|
1132
|
+
|
|
1133
|
+
return str(uuid.uuid4())
|
|
1134
|
+
|
|
1135
|
+
|
|
1136
|
+
def get_max_run_id(table_name: str) -> int:
|
|
1137
|
+
|
|
1138
|
+
table_path = f"/lakehouse/default/Tables/{table_name}/"
|
|
1139
|
+
delta_table = deltalake.DeltaTable(table_path)
|
|
1140
|
+
data = delta_table.to_pandas()
|
|
1141
|
+
max_run_id = data["RunId"].max()
|
|
1142
|
+
|
|
1143
|
+
return max_run_id
|
sempy_labs/_icons.py
CHANGED
|
@@ -74,3 +74,43 @@ language_map = {
|
|
|
74
74
|
}
|
|
75
75
|
workspace_roles = ["Admin", "Member", "Viewer", "Contributor"]
|
|
76
76
|
principal_types = ["App", "Group", "None", "User"]
|
|
77
|
+
azure_api_version = "2023-11-01"
|
|
78
|
+
migrate_capacity_suffix = "fsku"
|
|
79
|
+
sku_mapping = {
|
|
80
|
+
"A1": "F8",
|
|
81
|
+
"EM1": "F8",
|
|
82
|
+
"A2": "F16",
|
|
83
|
+
"EM2": "F16",
|
|
84
|
+
"A3": "F32",
|
|
85
|
+
"EM3": "F32",
|
|
86
|
+
"A4": "F64",
|
|
87
|
+
"P1": "F64",
|
|
88
|
+
"A5": "F128",
|
|
89
|
+
"P2": "F128",
|
|
90
|
+
"A6": "F256",
|
|
91
|
+
"P3": "F256",
|
|
92
|
+
"A7": "F512",
|
|
93
|
+
"P4": "F512",
|
|
94
|
+
"P5": "F1024",
|
|
95
|
+
}
|
|
96
|
+
refreshTypes = [
|
|
97
|
+
"full",
|
|
98
|
+
"automatic",
|
|
99
|
+
"dataOnly",
|
|
100
|
+
"calculate",
|
|
101
|
+
"clearValues",
|
|
102
|
+
"defragment",
|
|
103
|
+
]
|
|
104
|
+
|
|
105
|
+
itemTypes = {
|
|
106
|
+
"DataPipeline": "dataPipelines",
|
|
107
|
+
"Eventstream": "eventstreams",
|
|
108
|
+
"KQLDatabase": "kqlDatabases",
|
|
109
|
+
"KQLQueryset": "kqlQuerysets",
|
|
110
|
+
"Lakehouse": "lakehouses",
|
|
111
|
+
"MLExperiment": "mlExperiments",
|
|
112
|
+
"MLModel": "mlModels",
|
|
113
|
+
"Notebook": "notebooks",
|
|
114
|
+
"Warehouse": "warehouses",
|
|
115
|
+
}
|
|
116
|
+
default_schema = "dbo"
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
import sempy.fabric as fabric
|
|
2
|
+
import pandas as pd
|
|
3
|
+
import sempy_labs._icons as icons
|
|
4
|
+
from typing import Optional
|
|
5
|
+
from sempy_labs._helper_functions import (
|
|
6
|
+
resolve_workspace_name_and_id,
|
|
7
|
+
lro,
|
|
8
|
+
pagination,
|
|
9
|
+
)
|
|
10
|
+
from sempy.fabric.exceptions import FabricHTTPException
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def list_kql_databases(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
14
|
+
"""
|
|
15
|
+
Shows the KQL databases within a workspace.
|
|
16
|
+
|
|
17
|
+
Parameters
|
|
18
|
+
----------
|
|
19
|
+
workspace : str, default=None
|
|
20
|
+
The Fabric workspace name.
|
|
21
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
22
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
23
|
+
|
|
24
|
+
Returns
|
|
25
|
+
-------
|
|
26
|
+
pandas.DataFrame
|
|
27
|
+
A pandas dataframe showing the KQL databases within a workspace.
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
df = pd.DataFrame(
|
|
31
|
+
columns=[
|
|
32
|
+
"KQL Database Name",
|
|
33
|
+
"KQL Database Id",
|
|
34
|
+
"Description",
|
|
35
|
+
"Parent Eventhouse Item Id",
|
|
36
|
+
"Query Service URI",
|
|
37
|
+
"Ingestion Service URI",
|
|
38
|
+
"Database Type",
|
|
39
|
+
]
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
43
|
+
|
|
44
|
+
client = fabric.FabricRestClient()
|
|
45
|
+
response = client.get(f"/v1/workspaces/{workspace_id}/kqlDatabases")
|
|
46
|
+
if response.status_code != 200:
|
|
47
|
+
raise FabricHTTPException(response)
|
|
48
|
+
|
|
49
|
+
responses = pagination(client, response)
|
|
50
|
+
|
|
51
|
+
for r in responses:
|
|
52
|
+
for v in r.get("value", []):
|
|
53
|
+
prop = v.get("properties", {})
|
|
54
|
+
|
|
55
|
+
new_data = {
|
|
56
|
+
"KQL Database Name": v.get("displayName"),
|
|
57
|
+
"KQL Database Id": v.get("id"),
|
|
58
|
+
"Description": v.get("description"),
|
|
59
|
+
"Parent Eventhouse Item Id": prop.get("parentEventhouseItemId"),
|
|
60
|
+
"Query Service URI": prop.get("queryServiceUri"),
|
|
61
|
+
"Ingestion Service URI": prop.get("ingestionServiceUri"),
|
|
62
|
+
"Database Type": prop.get("databaseType"),
|
|
63
|
+
}
|
|
64
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
65
|
+
|
|
66
|
+
return df
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def create_kql_database(
|
|
70
|
+
name: str, description: Optional[str] = None, workspace: Optional[str] = None
|
|
71
|
+
):
|
|
72
|
+
"""
|
|
73
|
+
Creates a KQL database.
|
|
74
|
+
|
|
75
|
+
Parameters
|
|
76
|
+
----------
|
|
77
|
+
name: str
|
|
78
|
+
Name of the KQL database.
|
|
79
|
+
description : str, default=None
|
|
80
|
+
A description of the environment.
|
|
81
|
+
workspace : str, default=None
|
|
82
|
+
The Fabric workspace name.
|
|
83
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
84
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
85
|
+
"""
|
|
86
|
+
|
|
87
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
88
|
+
|
|
89
|
+
request_body = {"displayName": name}
|
|
90
|
+
|
|
91
|
+
if description:
|
|
92
|
+
request_body["description"] = description
|
|
93
|
+
|
|
94
|
+
client = fabric.FabricRestClient()
|
|
95
|
+
response = client.post(
|
|
96
|
+
f"/v1/workspaces/{workspace_id}/kqlDatabases", json=request_body
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
lro(client, response, status_codes=[201, 202])
|
|
100
|
+
|
|
101
|
+
print(
|
|
102
|
+
f"{icons.green_dot} The '{name}' KQL database has been created within the '{workspace}' workspace."
|
|
103
|
+
)
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
def delete_kql_database(name: str, workspace: Optional[str] = None):
|
|
107
|
+
"""
|
|
108
|
+
Deletes a KQL database.
|
|
109
|
+
|
|
110
|
+
Parameters
|
|
111
|
+
----------
|
|
112
|
+
name: str
|
|
113
|
+
Name of the KQL database.
|
|
114
|
+
workspace : str, default=None
|
|
115
|
+
The Fabric workspace name.
|
|
116
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
117
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
118
|
+
"""
|
|
119
|
+
|
|
120
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
121
|
+
kql_database_id = fabric.resolve_item_id(
|
|
122
|
+
item_name=name, type="KQLDatabase", workspace=workspace
|
|
123
|
+
)
|
|
124
|
+
|
|
125
|
+
client = fabric.FabricRestClient()
|
|
126
|
+
response = client.delete(
|
|
127
|
+
f"/v1/workspaces/{workspace_id}/kqlDatabases/{kql_database_id}"
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
if response.status_code != 200:
|
|
131
|
+
raise FabricHTTPException(response)
|
|
132
|
+
print(
|
|
133
|
+
f"{icons.green_dot} The '{name}' KQL database within the '{workspace}' workspace has been deleted."
|
|
134
|
+
)
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
import sempy.fabric as fabric
|
|
2
|
+
import pandas as pd
|
|
3
|
+
import sempy_labs._icons as icons
|
|
4
|
+
from typing import Optional
|
|
5
|
+
from sempy_labs._helper_functions import (
|
|
6
|
+
resolve_workspace_name_and_id,
|
|
7
|
+
lro,
|
|
8
|
+
pagination,
|
|
9
|
+
)
|
|
10
|
+
from sempy.fabric.exceptions import FabricHTTPException
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def list_kql_querysets(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
14
|
+
"""
|
|
15
|
+
Shows the KQL querysets within a workspace.
|
|
16
|
+
|
|
17
|
+
Parameters
|
|
18
|
+
----------
|
|
19
|
+
workspace : str, default=None
|
|
20
|
+
The Fabric workspace name.
|
|
21
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
22
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
23
|
+
|
|
24
|
+
Returns
|
|
25
|
+
-------
|
|
26
|
+
pandas.DataFrame
|
|
27
|
+
A pandas dataframe showing the KQL querysets within a workspace.
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
df = pd.DataFrame(
|
|
31
|
+
columns=[
|
|
32
|
+
"KQL Queryset Name",
|
|
33
|
+
"KQL Queryset Id",
|
|
34
|
+
"Description",
|
|
35
|
+
]
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
39
|
+
|
|
40
|
+
client = fabric.FabricRestClient()
|
|
41
|
+
response = client.get(f"/v1/workspaces/{workspace_id}/kqlQuerysets")
|
|
42
|
+
if response.status_code != 200:
|
|
43
|
+
raise FabricHTTPException(response)
|
|
44
|
+
|
|
45
|
+
responses = pagination(client, response)
|
|
46
|
+
|
|
47
|
+
for r in responses:
|
|
48
|
+
for v in r.get("value", []):
|
|
49
|
+
new_data = {
|
|
50
|
+
"KQL Queryset Name": v.get("displayName"),
|
|
51
|
+
"KQL Queryset Id": v.get("id"),
|
|
52
|
+
"Description": v.get("description"),
|
|
53
|
+
}
|
|
54
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
55
|
+
|
|
56
|
+
return df
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def create_kql_queryset(
|
|
60
|
+
name: str, description: Optional[str] = None, workspace: Optional[str] = None
|
|
61
|
+
):
|
|
62
|
+
"""
|
|
63
|
+
Creates a KQL queryset.
|
|
64
|
+
|
|
65
|
+
Parameters
|
|
66
|
+
----------
|
|
67
|
+
name: str
|
|
68
|
+
Name of the KQL queryset.
|
|
69
|
+
description : str, default=None
|
|
70
|
+
A description of the environment.
|
|
71
|
+
workspace : str, default=None
|
|
72
|
+
The Fabric workspace name.
|
|
73
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
74
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
75
|
+
"""
|
|
76
|
+
|
|
77
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
78
|
+
|
|
79
|
+
request_body = {"displayName": name}
|
|
80
|
+
|
|
81
|
+
if description:
|
|
82
|
+
request_body["description"] = description
|
|
83
|
+
|
|
84
|
+
client = fabric.FabricRestClient()
|
|
85
|
+
response = client.post(
|
|
86
|
+
f"/v1/workspaces/{workspace_id}/kqlQuerysets", json=request_body
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
lro(client, response, status_codes=[201, 202])
|
|
90
|
+
|
|
91
|
+
print(
|
|
92
|
+
f"{icons.green_dot} The '{name}' KQL queryset has been created within the '{workspace}' workspace."
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def delete_kql_queryset(name: str, workspace: Optional[str] = None):
|
|
97
|
+
"""
|
|
98
|
+
Deletes a KQL queryset.
|
|
99
|
+
|
|
100
|
+
Parameters
|
|
101
|
+
----------
|
|
102
|
+
name: str
|
|
103
|
+
Name of the KQL queryset.
|
|
104
|
+
workspace : str, default=None
|
|
105
|
+
The Fabric workspace name.
|
|
106
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
107
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
108
|
+
"""
|
|
109
|
+
|
|
110
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
111
|
+
kql_database_id = fabric.resolve_item_id(
|
|
112
|
+
item_name=name, type="KQLQueryset", workspace=workspace
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
client = fabric.FabricRestClient()
|
|
116
|
+
response = client.delete(
|
|
117
|
+
f"/v1/workspaces/{workspace_id}/kqlQuerysets/{kql_database_id}"
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
if response.status_code != 200:
|
|
121
|
+
raise FabricHTTPException(response)
|
|
122
|
+
print(
|
|
123
|
+
f"{icons.green_dot} The '{name}' KQL queryset within the '{workspace}' workspace has been deleted."
|
|
124
|
+
)
|