semantic-link-labs 0.7.3__py3-none-any.whl → 0.7.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.7.3.dist-info → semantic_link_labs-0.7.4.dist-info}/METADATA +14 -3
- {semantic_link_labs-0.7.3.dist-info → semantic_link_labs-0.7.4.dist-info}/RECORD +60 -44
- {semantic_link_labs-0.7.3.dist-info → semantic_link_labs-0.7.4.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +63 -24
- sempy_labs/_bpa_translation/{_translations_am-ET.po → _model/_translations_am-ET.po} +22 -0
- sempy_labs/_bpa_translation/{_translations_ar-AE.po → _model/_translations_ar-AE.po} +24 -0
- sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +938 -0
- sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +934 -0
- sempy_labs/_bpa_translation/{_translations_cs-CZ.po → _model/_translations_cs-CZ.po} +179 -157
- sempy_labs/_bpa_translation/{_translations_da-DK.po → _model/_translations_da-DK.po} +24 -0
- sempy_labs/_bpa_translation/{_translations_de-DE.po → _model/_translations_de-DE.po} +77 -52
- sempy_labs/_bpa_translation/{_translations_el-GR.po → _model/_translations_el-GR.po} +25 -0
- sempy_labs/_bpa_translation/{_translations_es-ES.po → _model/_translations_es-ES.po} +67 -43
- sempy_labs/_bpa_translation/{_translations_fa-IR.po → _model/_translations_fa-IR.po} +24 -0
- sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +915 -0
- sempy_labs/_bpa_translation/{_translations_fr-FR.po → _model/_translations_fr-FR.po} +83 -57
- sempy_labs/_bpa_translation/{_translations_ga-IE.po → _model/_translations_ga-IE.po} +25 -0
- sempy_labs/_bpa_translation/{_translations_he-IL.po → _model/_translations_he-IL.po} +23 -0
- sempy_labs/_bpa_translation/{_translations_hi-IN.po → _model/_translations_hi-IN.po} +24 -0
- sempy_labs/_bpa_translation/{_translations_hu-HU.po → _model/_translations_hu-HU.po} +25 -0
- sempy_labs/_bpa_translation/_model/_translations_id-ID.po +918 -0
- sempy_labs/_bpa_translation/{_translations_is-IS.po → _model/_translations_is-IS.po} +25 -0
- sempy_labs/_bpa_translation/{_translations_it-IT.po → _model/_translations_it-IT.po} +25 -0
- sempy_labs/_bpa_translation/{_translations_ja-JP.po → _model/_translations_ja-JP.po} +21 -0
- sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +823 -0
- sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +937 -0
- sempy_labs/_bpa_translation/{_translations_nl-NL.po → _model/_translations_nl-NL.po} +80 -56
- sempy_labs/_bpa_translation/{_translations_pl-PL.po → _model/_translations_pl-PL.po} +101 -76
- sempy_labs/_bpa_translation/{_translations_pt-BR.po → _model/_translations_pt-BR.po} +25 -0
- sempy_labs/_bpa_translation/{_translations_pt-PT.po → _model/_translations_pt-PT.po} +25 -0
- sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +939 -0
- sempy_labs/_bpa_translation/{_translations_ru-RU.po → _model/_translations_ru-RU.po} +25 -0
- sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +925 -0
- sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +922 -0
- sempy_labs/_bpa_translation/{_translations_ta-IN.po → _model/_translations_ta-IN.po} +26 -0
- sempy_labs/_bpa_translation/{_translations_te-IN.po → _model/_translations_te-IN.po} +24 -0
- sempy_labs/_bpa_translation/{_translations_th-TH.po → _model/_translations_th-TH.po} +24 -0
- sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +925 -0
- sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +933 -0
- sempy_labs/_bpa_translation/{_translations_zh-CN.po → _model/_translations_zh-CN.po} +116 -97
- sempy_labs/_bpa_translation/{_translations_zu-ZA.po → _model/_translations_zu-ZA.po} +25 -0
- sempy_labs/_capacities.py +541 -0
- sempy_labs/_connections.py +138 -0
- sempy_labs/_environments.py +156 -0
- sempy_labs/_helper_functions.py +146 -8
- sempy_labs/_icons.py +43 -0
- sempy_labs/_list_functions.py +35 -900
- sempy_labs/_model_bpa.py +8 -32
- sempy_labs/_notebooks.py +143 -0
- sempy_labs/_query_scale_out.py +28 -7
- sempy_labs/_spark.py +465 -0
- sempy_labs/_sql.py +35 -11
- sempy_labs/_translations.py +3 -0
- sempy_labs/_vertipaq.py +160 -99
- sempy_labs/_workspaces.py +294 -0
- sempy_labs/directlake/_directlake_schema_sync.py +1 -2
- sempy_labs/tom/_model.py +5 -1
- {semantic_link_labs-0.7.3.dist-info → semantic_link_labs-0.7.4.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.7.3.dist-info → semantic_link_labs-0.7.4.dist-info}/top_level.txt +0 -0
- /sempy_labs/_bpa_translation/{_translations_sv-SE.po → _model/_translations_sv-SE.po} +0 -0
sempy_labs/_spark.py
ADDED
|
@@ -0,0 +1,465 @@
|
|
|
1
|
+
import sempy.fabric as fabric
|
|
2
|
+
import pandas as pd
|
|
3
|
+
import sempy_labs._icons as icons
|
|
4
|
+
from typing import Optional
|
|
5
|
+
from sempy_labs._helper_functions import (
|
|
6
|
+
resolve_workspace_name_and_id,
|
|
7
|
+
)
|
|
8
|
+
from sempy.fabric.exceptions import FabricHTTPException
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def list_custom_pools(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
12
|
+
"""
|
|
13
|
+
Lists all `custom pools <https://learn.microsoft.com/fabric/data-engineering/create-custom-spark-pools>`_ within a workspace.
|
|
14
|
+
|
|
15
|
+
Parameters
|
|
16
|
+
----------
|
|
17
|
+
workspace : str, default=None
|
|
18
|
+
The name of the Fabric workspace.
|
|
19
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
20
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
21
|
+
|
|
22
|
+
Returns
|
|
23
|
+
-------
|
|
24
|
+
pandas.DataFrame
|
|
25
|
+
A pandas dataframe showing all the custom pools within the Fabric workspace.
|
|
26
|
+
"""
|
|
27
|
+
|
|
28
|
+
# https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/list-workspace-custom-pools
|
|
29
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
30
|
+
|
|
31
|
+
df = pd.DataFrame(
|
|
32
|
+
columns=[
|
|
33
|
+
"Custom Pool ID",
|
|
34
|
+
"Custom Pool Name",
|
|
35
|
+
"Type",
|
|
36
|
+
"Node Family",
|
|
37
|
+
"Node Size",
|
|
38
|
+
"Auto Scale Enabled",
|
|
39
|
+
"Auto Scale Min Node Count",
|
|
40
|
+
"Auto Scale Max Node Count",
|
|
41
|
+
"Dynamic Executor Allocation Enabled",
|
|
42
|
+
"Dynamic Executor Allocation Min Executors",
|
|
43
|
+
"Dynamic Executor Allocation Max Executors",
|
|
44
|
+
]
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
client = fabric.FabricRestClient()
|
|
48
|
+
response = client.get(f"/v1/workspaces/{workspace_id}/spark/pools")
|
|
49
|
+
if response.status_code != 200:
|
|
50
|
+
raise FabricHTTPException(response)
|
|
51
|
+
|
|
52
|
+
for i in response.json()["value"]:
|
|
53
|
+
|
|
54
|
+
aScale = i.get("autoScale", {})
|
|
55
|
+
d = i.get("dynamicExecutorAllocation", {})
|
|
56
|
+
|
|
57
|
+
new_data = {
|
|
58
|
+
"Custom Pool ID": i.get("id"),
|
|
59
|
+
"Custom Pool Name": i.get("name"),
|
|
60
|
+
"Type": i.get("type"),
|
|
61
|
+
"Node Family": i.get("nodeFamily"),
|
|
62
|
+
"Node Size": i.get("nodeSize"),
|
|
63
|
+
"Auto Scale Enabled": aScale.get("enabled"),
|
|
64
|
+
"Auto Scale Min Node Count": aScale.get("minNodeCount"),
|
|
65
|
+
"Auto Scale Max Node Count": aScale.get("maxNodeCount"),
|
|
66
|
+
"Dynamic Executor Allocation Enabled": d.get("enabled"),
|
|
67
|
+
"Dynamic Executor Allocation Min Executors": d.get("minExecutors"),
|
|
68
|
+
"Dynamic Executor Allocation Max Executors": d.get("maxExecutors"),
|
|
69
|
+
}
|
|
70
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
71
|
+
|
|
72
|
+
bool_cols = ["Auto Scale Enabled", "Dynamic Executor Allocation Enabled"]
|
|
73
|
+
int_cols = [
|
|
74
|
+
"Auto Scale Min Node Count",
|
|
75
|
+
"Auto Scale Max Node Count",
|
|
76
|
+
"Dynamic Executor Allocation Enabled",
|
|
77
|
+
"Dynamic Executor Allocation Min Executors",
|
|
78
|
+
"Dynamic Executor Allocation Max Executors",
|
|
79
|
+
]
|
|
80
|
+
|
|
81
|
+
df[bool_cols] = df[bool_cols].astype(bool)
|
|
82
|
+
df[int_cols] = df[int_cols].astype(int)
|
|
83
|
+
|
|
84
|
+
return df
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def create_custom_pool(
|
|
88
|
+
pool_name: str,
|
|
89
|
+
node_size: str,
|
|
90
|
+
min_node_count: int,
|
|
91
|
+
max_node_count: int,
|
|
92
|
+
min_executors: int,
|
|
93
|
+
max_executors: int,
|
|
94
|
+
node_family: Optional[str] = "MemoryOptimized",
|
|
95
|
+
auto_scale_enabled: Optional[bool] = True,
|
|
96
|
+
dynamic_executor_allocation_enabled: Optional[bool] = True,
|
|
97
|
+
workspace: Optional[str] = None,
|
|
98
|
+
):
|
|
99
|
+
"""
|
|
100
|
+
Creates a `custom pool <https://learn.microsoft.com/fabric/data-engineering/create-custom-spark-pools>`_ within a workspace.
|
|
101
|
+
|
|
102
|
+
Parameters
|
|
103
|
+
----------
|
|
104
|
+
pool_name : str
|
|
105
|
+
The custom pool name.
|
|
106
|
+
node_size : str
|
|
107
|
+
The `node size <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#nodesize>`_.
|
|
108
|
+
min_node_count : int
|
|
109
|
+
The `minimum node count <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#autoscaleproperties>`_.
|
|
110
|
+
max_node_count : int
|
|
111
|
+
The `maximum node count <https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#autoscaleproperties>`_.
|
|
112
|
+
min_executors : int
|
|
113
|
+
The `minimum executors <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#dynamicexecutorallocationproperties>`_.
|
|
114
|
+
max_executors : int
|
|
115
|
+
The `maximum executors <https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#dynamicexecutorallocationproperties>`_.
|
|
116
|
+
node_family : str, default='MemoryOptimized'
|
|
117
|
+
The `node family <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#nodefamily>`_.
|
|
118
|
+
auto_scale_enabled : bool, default=True
|
|
119
|
+
The status of `auto scale <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#autoscaleproperties>`_.
|
|
120
|
+
dynamic_executor_allocation_enabled : bool, default=True
|
|
121
|
+
The status of the `dynamic executor allocation <https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#dynamicexecutorallocationproperties>`_.
|
|
122
|
+
workspace : str, default=None
|
|
123
|
+
The name of the Fabric workspace.
|
|
124
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
125
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
126
|
+
"""
|
|
127
|
+
|
|
128
|
+
# https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool
|
|
129
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
130
|
+
|
|
131
|
+
request_body = {
|
|
132
|
+
"name": pool_name,
|
|
133
|
+
"nodeFamily": node_family,
|
|
134
|
+
"nodeSize": node_size,
|
|
135
|
+
"autoScale": {
|
|
136
|
+
"enabled": auto_scale_enabled,
|
|
137
|
+
"minNodeCount": min_node_count,
|
|
138
|
+
"maxNodeCount": max_node_count,
|
|
139
|
+
},
|
|
140
|
+
"dynamicExecutorAllocation": {
|
|
141
|
+
"enabled": dynamic_executor_allocation_enabled,
|
|
142
|
+
"minExecutors": min_executors,
|
|
143
|
+
"maxExecutors": max_executors,
|
|
144
|
+
},
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
client = fabric.FabricRestClient()
|
|
148
|
+
response = client.post(
|
|
149
|
+
f"/v1/workspaces/{workspace_id}/spark/pools", json=request_body
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
if response.status_code != 201:
|
|
153
|
+
raise FabricHTTPException(response)
|
|
154
|
+
print(
|
|
155
|
+
f"{icons.green_dot} The '{pool_name}' spark pool has been created within the '{workspace}' workspace."
|
|
156
|
+
)
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def update_custom_pool(
|
|
160
|
+
pool_name: str,
|
|
161
|
+
node_size: Optional[str] = None,
|
|
162
|
+
min_node_count: Optional[int] = None,
|
|
163
|
+
max_node_count: Optional[int] = None,
|
|
164
|
+
min_executors: Optional[int] = None,
|
|
165
|
+
max_executors: Optional[int] = None,
|
|
166
|
+
node_family: Optional[str] = None,
|
|
167
|
+
auto_scale_enabled: Optional[bool] = None,
|
|
168
|
+
dynamic_executor_allocation_enabled: Optional[bool] = None,
|
|
169
|
+
workspace: Optional[str] = None,
|
|
170
|
+
):
|
|
171
|
+
"""
|
|
172
|
+
Updates the properties of a `custom pool <https://learn.microsoft.com/fabric/data-engineering/create-custom-spark-pools>`_ within a workspace.
|
|
173
|
+
|
|
174
|
+
Parameters
|
|
175
|
+
----------
|
|
176
|
+
pool_name : str
|
|
177
|
+
The custom pool name.
|
|
178
|
+
node_size : str, default=None
|
|
179
|
+
The `node size <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#nodesize>`_.
|
|
180
|
+
Defaults to None which keeps the existing property setting.
|
|
181
|
+
min_node_count : int, default=None
|
|
182
|
+
The `minimum node count <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#autoscaleproperties>`_.
|
|
183
|
+
Defaults to None which keeps the existing property setting.
|
|
184
|
+
max_node_count : int, default=None
|
|
185
|
+
The `maximum node count <https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#autoscaleproperties>`_.
|
|
186
|
+
Defaults to None which keeps the existing property setting.
|
|
187
|
+
min_executors : int, default=None
|
|
188
|
+
The `minimum executors <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#dynamicexecutorallocationproperties>`_.
|
|
189
|
+
Defaults to None which keeps the existing property setting.
|
|
190
|
+
max_executors : int, default=None
|
|
191
|
+
The `maximum executors <https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#dynamicexecutorallocationproperties>`_.
|
|
192
|
+
Defaults to None which keeps the existing property setting.
|
|
193
|
+
node_family : str, default=None
|
|
194
|
+
The `node family <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#nodefamily>`_.
|
|
195
|
+
Defaults to None which keeps the existing property setting.
|
|
196
|
+
auto_scale_enabled : bool, default=None
|
|
197
|
+
The status of `auto scale <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#autoscaleproperties>`_.
|
|
198
|
+
Defaults to None which keeps the existing property setting.
|
|
199
|
+
dynamic_executor_allocation_enabled : bool, default=None
|
|
200
|
+
The status of the `dynamic executor allocation <https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#dynamicexecutorallocationproperties>`_.
|
|
201
|
+
Defaults to None which keeps the existing property setting.
|
|
202
|
+
workspace : str, default=None
|
|
203
|
+
The name of the Fabric workspace.
|
|
204
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
205
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
206
|
+
"""
|
|
207
|
+
|
|
208
|
+
# https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/update-workspace-custom-pool?tabs=HTTP
|
|
209
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
210
|
+
|
|
211
|
+
df = list_custom_pools(workspace=workspace)
|
|
212
|
+
df_pool = df[df["Custom Pool Name"] == pool_name]
|
|
213
|
+
|
|
214
|
+
if len(df_pool) == 0:
|
|
215
|
+
raise ValueError(
|
|
216
|
+
f"{icons.red_dot} The '{pool_name}' custom pool does not exist within the '{workspace}'. Please choose a valid custom pool."
|
|
217
|
+
)
|
|
218
|
+
|
|
219
|
+
if node_family is None:
|
|
220
|
+
node_family = df_pool["Node Family"].iloc[0]
|
|
221
|
+
if node_size is None:
|
|
222
|
+
node_size = df_pool["Node Size"].iloc[0]
|
|
223
|
+
if auto_scale_enabled is None:
|
|
224
|
+
auto_scale_enabled = bool(df_pool["Auto Scale Enabled"].iloc[0])
|
|
225
|
+
if min_node_count is None:
|
|
226
|
+
min_node_count = int(df_pool["Min Node Count"].iloc[0])
|
|
227
|
+
if max_node_count is None:
|
|
228
|
+
max_node_count = int(df_pool["Max Node Count"].iloc[0])
|
|
229
|
+
if dynamic_executor_allocation_enabled is None:
|
|
230
|
+
dynamic_executor_allocation_enabled = bool(
|
|
231
|
+
df_pool["Dynami Executor Allocation Enabled"].iloc[0]
|
|
232
|
+
)
|
|
233
|
+
if min_executors is None:
|
|
234
|
+
min_executors = int(df_pool["Min Executors"].iloc[0])
|
|
235
|
+
if max_executors is None:
|
|
236
|
+
max_executors = int(df_pool["Max Executors"].iloc[0])
|
|
237
|
+
|
|
238
|
+
request_body = {
|
|
239
|
+
"name": pool_name,
|
|
240
|
+
"nodeFamily": node_family,
|
|
241
|
+
"nodeSize": node_size,
|
|
242
|
+
"autoScale": {
|
|
243
|
+
"enabled": auto_scale_enabled,
|
|
244
|
+
"minNodeCount": min_node_count,
|
|
245
|
+
"maxNodeCount": max_node_count,
|
|
246
|
+
},
|
|
247
|
+
"dynamicExecutorAllocation": {
|
|
248
|
+
"enabled": dynamic_executor_allocation_enabled,
|
|
249
|
+
"minExecutors": min_executors,
|
|
250
|
+
"maxExecutors": max_executors,
|
|
251
|
+
},
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
client = fabric.FabricRestClient()
|
|
255
|
+
response = client.post(
|
|
256
|
+
f"/v1/workspaces/{workspace_id}/spark/pools", json=request_body
|
|
257
|
+
)
|
|
258
|
+
|
|
259
|
+
if response.status_code != 200:
|
|
260
|
+
raise FabricHTTPException(response)
|
|
261
|
+
print(
|
|
262
|
+
f"{icons.green_dot} The '{pool_name}' spark pool within the '{workspace}' workspace has been updated."
|
|
263
|
+
)
|
|
264
|
+
|
|
265
|
+
|
|
266
|
+
def delete_custom_pool(pool_name: str, workspace: Optional[str] = None):
|
|
267
|
+
"""
|
|
268
|
+
Deletes a `custom pool <https://learn.microsoft.com/fabric/data-engineering/create-custom-spark-pools>`_ within a workspace.
|
|
269
|
+
|
|
270
|
+
Parameters
|
|
271
|
+
----------
|
|
272
|
+
pool_name : str
|
|
273
|
+
The custom pool name.
|
|
274
|
+
workspace : str, default=None
|
|
275
|
+
The name of the Fabric workspace.
|
|
276
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
277
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
278
|
+
"""
|
|
279
|
+
|
|
280
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
281
|
+
|
|
282
|
+
dfL = list_custom_pools(workspace=workspace)
|
|
283
|
+
dfL_filt = dfL[dfL["Custom Pool Name"] == pool_name]
|
|
284
|
+
|
|
285
|
+
if len(dfL_filt) == 0:
|
|
286
|
+
raise ValueError(
|
|
287
|
+
f"{icons.red_dot} The '{pool_name}' custom pool does not exist within the '{workspace}' workspace."
|
|
288
|
+
)
|
|
289
|
+
poolId = dfL_filt["Custom Pool ID"].iloc[0]
|
|
290
|
+
|
|
291
|
+
client = fabric.FabricRestClient()
|
|
292
|
+
response = client.delete(f"/v1/workspaces/{workspace_id}/spark/pools/{poolId}")
|
|
293
|
+
|
|
294
|
+
if response.status_code != 200:
|
|
295
|
+
raise FabricHTTPException(response)
|
|
296
|
+
print(
|
|
297
|
+
f"{icons.green_dot} The '{pool_name}' spark pool has been deleted from the '{workspace}' workspace."
|
|
298
|
+
)
|
|
299
|
+
|
|
300
|
+
|
|
301
|
+
def get_spark_settings(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
302
|
+
"""
|
|
303
|
+
Shows the spark settings for a workspace.
|
|
304
|
+
|
|
305
|
+
Parameters
|
|
306
|
+
----------
|
|
307
|
+
workspace : str, default=None
|
|
308
|
+
The name of the Fabric workspace.
|
|
309
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
310
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
311
|
+
|
|
312
|
+
Returns
|
|
313
|
+
-------
|
|
314
|
+
pandas.DataFrame
|
|
315
|
+
A pandas dataframe showing the spark settings for a workspace.
|
|
316
|
+
"""
|
|
317
|
+
|
|
318
|
+
# https://learn.microsoft.com/en-us/rest/api/fabric/spark/workspace-settings/get-spark-settings?tabs=HTTP
|
|
319
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
320
|
+
|
|
321
|
+
df = pd.DataFrame(
|
|
322
|
+
columns=[
|
|
323
|
+
"Automatic Log Enabled",
|
|
324
|
+
"High Concurrency Enabled",
|
|
325
|
+
"Customize Compute Enabled",
|
|
326
|
+
"Default Pool Name",
|
|
327
|
+
"Default Pool Type",
|
|
328
|
+
"Max Node Count",
|
|
329
|
+
"Max Executors",
|
|
330
|
+
"Environment Name",
|
|
331
|
+
"Runtime Version",
|
|
332
|
+
]
|
|
333
|
+
)
|
|
334
|
+
|
|
335
|
+
client = fabric.FabricRestClient()
|
|
336
|
+
response = client.get(f"/v1/workspaces/{workspace_id}/spark/settings")
|
|
337
|
+
if response.status_code != 200:
|
|
338
|
+
raise FabricHTTPException(response)
|
|
339
|
+
|
|
340
|
+
i = response.json()
|
|
341
|
+
p = i.get("pool")
|
|
342
|
+
dp = i.get("pool", {}).get("defaultPool", {})
|
|
343
|
+
sp = i.get("pool", {}).get("starterPool", {})
|
|
344
|
+
e = i.get("environment", {})
|
|
345
|
+
|
|
346
|
+
new_data = {
|
|
347
|
+
"Automatic Log Enabled": i.get("automaticLog").get("enabled"),
|
|
348
|
+
"High Concurrency Enabled": i.get("highConcurrency").get(
|
|
349
|
+
"notebookInteractiveRunEnabled"
|
|
350
|
+
),
|
|
351
|
+
"Customize Compute Enabled": p.get("customizeComputeEnabled"),
|
|
352
|
+
"Default Pool Name": dp.get("name"),
|
|
353
|
+
"Default Pool Type": dp.get("type"),
|
|
354
|
+
"Max Node Count": sp.get("maxNodeCount"),
|
|
355
|
+
"Max Node Executors": sp.get("maxExecutors"),
|
|
356
|
+
"Environment Name": e.get("name"),
|
|
357
|
+
"Runtime Version": e.get("runtimeVersion"),
|
|
358
|
+
}
|
|
359
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
360
|
+
|
|
361
|
+
bool_cols = [
|
|
362
|
+
"Automatic Log Enabled",
|
|
363
|
+
"High Concurrency Enabled",
|
|
364
|
+
"Customize Compute Enabled",
|
|
365
|
+
]
|
|
366
|
+
int_cols = ["Max Node Count", "Max Executors"]
|
|
367
|
+
|
|
368
|
+
df[bool_cols] = df[bool_cols].astype(bool)
|
|
369
|
+
df[int_cols] = df[int_cols].astype(int)
|
|
370
|
+
|
|
371
|
+
return df
|
|
372
|
+
|
|
373
|
+
|
|
374
|
+
def update_spark_settings(
|
|
375
|
+
automatic_log_enabled: Optional[bool] = None,
|
|
376
|
+
high_concurrency_enabled: Optional[bool] = None,
|
|
377
|
+
customize_compute_enabled: Optional[bool] = None,
|
|
378
|
+
default_pool_name: Optional[str] = None,
|
|
379
|
+
max_node_count: Optional[int] = None,
|
|
380
|
+
max_executors: Optional[int] = None,
|
|
381
|
+
environment_name: Optional[str] = None,
|
|
382
|
+
runtime_version: Optional[str] = None,
|
|
383
|
+
workspace: Optional[str] = None,
|
|
384
|
+
):
|
|
385
|
+
"""
|
|
386
|
+
Updates the spark settings for a workspace.
|
|
387
|
+
|
|
388
|
+
Parameters
|
|
389
|
+
----------
|
|
390
|
+
automatic_log_enabled : bool, default=None
|
|
391
|
+
The status of the `automatic log <https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#automaticlogproperties>`_.
|
|
392
|
+
Defaults to None which keeps the existing property setting.
|
|
393
|
+
high_concurrency_enabled : bool, default=None
|
|
394
|
+
The status of the `high concurrency <https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#highconcurrencyproperties>`_ for notebook interactive run.
|
|
395
|
+
Defaults to None which keeps the existing property setting.
|
|
396
|
+
customize_compute_enabled : bool, default=None
|
|
397
|
+
`Customize compute <https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#poolproperties>`_ configurations for items.
|
|
398
|
+
Defaults to None which keeps the existing property setting.
|
|
399
|
+
default_pool_name : str, default=None
|
|
400
|
+
`Default pool <https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#poolproperties>`_ for workspace.
|
|
401
|
+
Defaults to None which keeps the existing property setting.
|
|
402
|
+
max_node_count : int, default=None
|
|
403
|
+
The `maximum node count <https://learn.microsoft.com/en-us/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#starterpoolproperties>`_.
|
|
404
|
+
Defaults to None which keeps the existing property setting.
|
|
405
|
+
max_executors : int, default=None
|
|
406
|
+
The `maximum executors <https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#starterpoolproperties>`_.
|
|
407
|
+
Defaults to None which keeps the existing property setting.
|
|
408
|
+
environment_name : str, default=None
|
|
409
|
+
The name of the `default environment <https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#environmentproperties>`_. Empty string indicated there is no workspace default environment
|
|
410
|
+
Defaults to None which keeps the existing property setting.
|
|
411
|
+
runtime_version : str, default=None
|
|
412
|
+
The `runtime version <https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#environmentproperties>`_.
|
|
413
|
+
Defaults to None which keeps the existing property setting.
|
|
414
|
+
workspace : str, default=None
|
|
415
|
+
The name of the Fabric workspace.
|
|
416
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
417
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
418
|
+
"""
|
|
419
|
+
|
|
420
|
+
# https://learn.microsoft.com/en-us/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP
|
|
421
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
422
|
+
|
|
423
|
+
dfS = get_spark_settings(workspace=workspace)
|
|
424
|
+
|
|
425
|
+
if automatic_log_enabled is None:
|
|
426
|
+
automatic_log_enabled = bool(dfS["Automatic Log Enabled"].iloc[0])
|
|
427
|
+
if high_concurrency_enabled is None:
|
|
428
|
+
high_concurrency_enabled = bool(dfS["High Concurrency Enabled"].iloc[0])
|
|
429
|
+
if customize_compute_enabled is None:
|
|
430
|
+
customize_compute_enabled = bool(dfS["Customize Compute Enabled"].iloc[0])
|
|
431
|
+
if default_pool_name is None:
|
|
432
|
+
default_pool_name = dfS["Default Pool Name"].iloc[0]
|
|
433
|
+
if max_node_count is None:
|
|
434
|
+
max_node_count = int(dfS["Max Node Count"].iloc[0])
|
|
435
|
+
if max_executors is None:
|
|
436
|
+
max_executors = int(dfS["Max Executors"].iloc[0])
|
|
437
|
+
if environment_name is None:
|
|
438
|
+
environment_name = dfS["Environment Name"].iloc[0]
|
|
439
|
+
if runtime_version is None:
|
|
440
|
+
runtime_version = dfS["Runtime Version"].iloc[0]
|
|
441
|
+
|
|
442
|
+
request_body = {
|
|
443
|
+
"automaticLog": {"enabled": automatic_log_enabled},
|
|
444
|
+
"highConcurrency": {"notebookInteractiveRunEnabled": high_concurrency_enabled},
|
|
445
|
+
"pool": {
|
|
446
|
+
"customizeComputeEnabled": customize_compute_enabled,
|
|
447
|
+
"defaultPool": {"name": default_pool_name, "type": "Workspace"},
|
|
448
|
+
"starterPool": {
|
|
449
|
+
"maxNodeCount": max_node_count,
|
|
450
|
+
"maxExecutors": max_executors,
|
|
451
|
+
},
|
|
452
|
+
},
|
|
453
|
+
"environment": {"name": environment_name, "runtimeVersion": runtime_version},
|
|
454
|
+
}
|
|
455
|
+
|
|
456
|
+
client = fabric.FabricRestClient()
|
|
457
|
+
response = client.patch(
|
|
458
|
+
f"/v1/workspaces/{workspace_id}/spark/settings", json=request_body
|
|
459
|
+
)
|
|
460
|
+
|
|
461
|
+
if response.status_code != 200:
|
|
462
|
+
raise FabricHTTPException(response)
|
|
463
|
+
print(
|
|
464
|
+
f"{icons.green_dot} The spark settings within the '{workspace}' workspace have been updated accordingly."
|
|
465
|
+
)
|
sempy_labs/_sql.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import sempy.fabric as fabric
|
|
2
2
|
import pandas as pd
|
|
3
|
-
from typing import Optional, Union
|
|
3
|
+
from typing import Optional, Union, List
|
|
4
4
|
from sempy._utils._log import log
|
|
5
5
|
import struct
|
|
6
6
|
import uuid
|
|
@@ -59,29 +59,53 @@ class ConnectWarehouse:
|
|
|
59
59
|
self.connection = pyodbc.connect(conn_str, attrs_before={1256: tokenstruct})
|
|
60
60
|
|
|
61
61
|
@log
|
|
62
|
-
def query(
|
|
62
|
+
def query(
|
|
63
|
+
self, sql: Union[str, List[str]]
|
|
64
|
+
) -> Union[List[pd.DataFrame], pd.DataFrame, None]:
|
|
63
65
|
"""
|
|
64
|
-
Runs a SQL query against a Fabric Warehouse.
|
|
66
|
+
Runs a SQL or T-SQL query (or multiple queries) against a Fabric Warehouse.
|
|
65
67
|
|
|
66
68
|
Parameters
|
|
67
69
|
----------
|
|
68
|
-
sql : str
|
|
69
|
-
|
|
70
|
+
sql : str or List[str]
|
|
71
|
+
A single SQL or T-SQL query, or a list of queries to be executed.
|
|
70
72
|
|
|
71
73
|
Returns
|
|
72
74
|
-------
|
|
73
|
-
pandas.DataFrame
|
|
74
|
-
A pandas
|
|
75
|
+
Union[List[pandas.DataFrame], pandas.DataFrame, None]
|
|
76
|
+
A list of pandas DataFrames if multiple SQL queries return results,
|
|
77
|
+
a single DataFrame if one query is executed and returns results, or None.
|
|
75
78
|
"""
|
|
76
79
|
cursor = None
|
|
80
|
+
results = [] # To store results from multiple queries if needed
|
|
81
|
+
|
|
82
|
+
# If the input is a single string, convert it to a list for consistency
|
|
83
|
+
if isinstance(sql, str):
|
|
84
|
+
sql = [sql]
|
|
77
85
|
|
|
78
86
|
try:
|
|
79
87
|
cursor = self.connection.cursor()
|
|
80
|
-
cursor.execute(sql)
|
|
81
88
|
|
|
82
|
-
|
|
83
|
-
cursor.
|
|
84
|
-
|
|
89
|
+
for sql_query in sql:
|
|
90
|
+
cursor.execute(sql_query)
|
|
91
|
+
|
|
92
|
+
# Commit for non-select queries (like CREATE, INSERT, etc.)
|
|
93
|
+
if not cursor.description:
|
|
94
|
+
self.connection.commit()
|
|
95
|
+
else:
|
|
96
|
+
# Fetch and append results for queries that return a result set
|
|
97
|
+
result = pd.DataFrame.from_records(
|
|
98
|
+
cursor.fetchall(),
|
|
99
|
+
columns=[col[0] for col in cursor.description],
|
|
100
|
+
)
|
|
101
|
+
results.append(result)
|
|
102
|
+
|
|
103
|
+
# Return results if any queries returned a result set
|
|
104
|
+
if results:
|
|
105
|
+
return results if len(results) > 1 else results[0]
|
|
106
|
+
else:
|
|
107
|
+
return None
|
|
108
|
+
|
|
85
109
|
finally:
|
|
86
110
|
if cursor:
|
|
87
111
|
cursor.close()
|
sempy_labs/_translations.py
CHANGED
|
@@ -3,6 +3,7 @@ import pandas as pd
|
|
|
3
3
|
from typing import List, Optional, Union
|
|
4
4
|
from sempy._utils._log import log
|
|
5
5
|
import sempy_labs._icons as icons
|
|
6
|
+
from sempy_labs._helper_functions import get_language_codes
|
|
6
7
|
|
|
7
8
|
|
|
8
9
|
@log
|
|
@@ -48,6 +49,8 @@ def translate_semantic_model(
|
|
|
48
49
|
if isinstance(languages, str):
|
|
49
50
|
languages = [languages]
|
|
50
51
|
|
|
52
|
+
languages = get_language_codes(languages)
|
|
53
|
+
|
|
51
54
|
df_prep = pd.DataFrame(
|
|
52
55
|
columns=["Object Type", "Name", "Description", "Display Folder"]
|
|
53
56
|
)
|