semantic-link-labs 0.8.2__py3-none-any.whl → 0.8.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/METADATA +37 -8
- {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/RECORD +108 -104
- {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +38 -0
- sempy_labs/_bpa_translation/_model/_translations_am-ET.po +24 -5
- sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +28 -4
- sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +33 -4
- sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_da-DK.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_de-DE.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_el-GR.po +36 -4
- sempy_labs/_bpa_translation/_model/_translations_es-ES.po +90 -58
- sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +31 -5
- sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_he-IL.po +28 -4
- sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_id-ID.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_is-IS.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_it-IT.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +24 -4
- sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +72 -56
- sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +95 -71
- sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +33 -4
- sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_te-IN.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_th-TH.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +100 -72
- sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +23 -5
- sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +32 -4
- sempy_labs/_capacities.py +138 -25
- sempy_labs/_capacity_migration.py +161 -60
- sempy_labs/_clear_cache.py +3 -3
- sempy_labs/_data_pipelines.py +54 -0
- sempy_labs/_dataflows.py +4 -0
- sempy_labs/_deployment_pipelines.py +13 -7
- sempy_labs/_environments.py +6 -0
- sempy_labs/_eventhouses.py +6 -0
- sempy_labs/_eventstreams.py +6 -0
- sempy_labs/_external_data_shares.py +190 -0
- sempy_labs/_generate_semantic_model.py +26 -4
- sempy_labs/_git.py +15 -15
- sempy_labs/_helper_functions.py +186 -11
- sempy_labs/_icons.py +55 -22
- sempy_labs/_kql_databases.py +6 -0
- sempy_labs/_kql_querysets.py +6 -0
- sempy_labs/_list_functions.py +6 -3
- sempy_labs/_managed_private_endpoints.py +166 -0
- sempy_labs/_mirrored_warehouses.py +2 -0
- sempy_labs/_ml_experiments.py +6 -0
- sempy_labs/_ml_models.py +6 -0
- sempy_labs/_model_bpa.py +11 -6
- sempy_labs/_model_bpa_bulk.py +14 -30
- sempy_labs/_model_bpa_rules.py +8 -3
- sempy_labs/_notebooks.py +111 -15
- sempy_labs/_query_scale_out.py +8 -6
- sempy_labs/_refresh_semantic_model.py +299 -49
- sempy_labs/_spark.py +12 -5
- sempy_labs/_sql.py +2 -2
- sempy_labs/_translations.py +16 -14
- sempy_labs/_vertipaq.py +127 -116
- sempy_labs/_warehouses.py +90 -1
- sempy_labs/_workloads.py +128 -0
- sempy_labs/_workspace_identity.py +4 -4
- sempy_labs/_workspaces.py +14 -1
- sempy_labs/admin/__init__.py +2 -0
- sempy_labs/admin/_basic_functions.py +203 -58
- sempy_labs/admin/_domains.py +18 -18
- sempy_labs/directlake/__init__.py +2 -0
- sempy_labs/directlake/_directlake_schema_sync.py +2 -6
- sempy_labs/directlake/_dl_helper.py +4 -1
- sempy_labs/directlake/_generate_shared_expression.py +1 -1
- sempy_labs/directlake/_get_shared_expression.py +7 -1
- sempy_labs/directlake/_guardrails.py +3 -2
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +2 -8
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +78 -0
- sempy_labs/directlake/_update_directlake_partition_entity.py +13 -32
- sempy_labs/lakehouse/_get_lakehouse_tables.py +6 -2
- sempy_labs/lakehouse/_shortcuts.py +4 -0
- sempy_labs/migration/_create_pqt_file.py +2 -2
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +3 -2
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +2 -0
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +2 -8
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +17 -0
- sempy_labs/migration/_migration_validation.py +2 -0
- sempy_labs/migration/_refresh_calc_tables.py +1 -0
- sempy_labs/report/__init__.py +4 -1
- sempy_labs/report/_generate_report.py +16 -14
- sempy_labs/report/_paginated.py +74 -0
- sempy_labs/report/_report_bpa.py +8 -10
- sempy_labs/report/_report_functions.py +19 -19
- sempy_labs/report/_report_rebind.py +6 -1
- sempy_labs/report/_reportwrapper.py +3 -3
- sempy_labs/tom/_model.py +173 -67
- {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/top_level.txt +0 -0
|
@@ -1,11 +1,19 @@
|
|
|
1
1
|
import sempy.fabric as fabric
|
|
2
2
|
import time
|
|
3
|
-
from sempy_labs._helper_functions import
|
|
3
|
+
from sempy_labs._helper_functions import (
|
|
4
|
+
resolve_dataset_id,
|
|
5
|
+
resolve_workspace_name_and_id,
|
|
6
|
+
_get_partition_map,
|
|
7
|
+
_process_and_display_chart,
|
|
8
|
+
)
|
|
4
9
|
from typing import Any, List, Optional, Union
|
|
5
10
|
from sempy._utils._log import log
|
|
6
11
|
import sempy_labs._icons as icons
|
|
7
|
-
from sempy_labs._helper_functions import resolve_workspace_name_and_id
|
|
8
12
|
from sempy.fabric.exceptions import FabricHTTPException
|
|
13
|
+
import pandas as pd
|
|
14
|
+
import warnings
|
|
15
|
+
import ipywidgets as widgets
|
|
16
|
+
import json
|
|
9
17
|
|
|
10
18
|
|
|
11
19
|
@log
|
|
@@ -18,7 +26,8 @@ def refresh_semantic_model(
|
|
|
18
26
|
apply_refresh_policy: bool = True,
|
|
19
27
|
max_parallelism: int = 10,
|
|
20
28
|
workspace: Optional[str] = None,
|
|
21
|
-
|
|
29
|
+
visualize: bool = False,
|
|
30
|
+
) -> pd.DataFrame | None:
|
|
22
31
|
"""
|
|
23
32
|
Refreshes a semantic model.
|
|
24
33
|
|
|
@@ -44,6 +53,13 @@ def refresh_semantic_model(
|
|
|
44
53
|
The Fabric workspace name.
|
|
45
54
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
46
55
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
56
|
+
visualize : bool, default=False
|
|
57
|
+
If True, displays a Gantt chart showing the refresh statistics for each table/partition.
|
|
58
|
+
|
|
59
|
+
Returns
|
|
60
|
+
-------
|
|
61
|
+
pandas.DataFrame | None
|
|
62
|
+
If 'visualize' is set to True, returns a pandas dataframe showing the SSAS trace output used to generate the visualization.
|
|
47
63
|
"""
|
|
48
64
|
|
|
49
65
|
workspace = fabric.resolve_workspace_name(workspace)
|
|
@@ -67,65 +83,186 @@ def refresh_semantic_model(
|
|
|
67
83
|
|
|
68
84
|
objects = objects + [extract_names(partition) for partition in partitions]
|
|
69
85
|
|
|
70
|
-
refresh_type = (
|
|
71
|
-
|
|
72
|
-
|
|
86
|
+
refresh_type = refresh_type.lower()
|
|
87
|
+
for prefix, mapped_value in icons.refresh_type_mapping.items():
|
|
88
|
+
if refresh_type.startswith(prefix):
|
|
89
|
+
refresh_type = mapped_value
|
|
90
|
+
break
|
|
73
91
|
|
|
74
|
-
|
|
92
|
+
valid_refresh_types = list(icons.refresh_type_mapping.values())
|
|
93
|
+
if refresh_type not in valid_refresh_types:
|
|
75
94
|
raise ValueError(
|
|
76
|
-
f"{icons.red_dot} Invalid refresh type. Refresh type must be one of these values: {
|
|
95
|
+
f"{icons.red_dot} Invalid refresh type. Refresh type must be one of these values: {valid_refresh_types}."
|
|
77
96
|
)
|
|
78
97
|
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
98
|
+
def refresh_and_trace_dataset(
|
|
99
|
+
dataset,
|
|
100
|
+
workspace,
|
|
101
|
+
refresh_type,
|
|
102
|
+
retry_count,
|
|
103
|
+
apply_refresh_policy,
|
|
104
|
+
max_parallelism,
|
|
105
|
+
objects,
|
|
106
|
+
visualize,
|
|
107
|
+
):
|
|
108
|
+
# Ignore specific warnings
|
|
109
|
+
warnings.filterwarnings(
|
|
110
|
+
"ignore",
|
|
111
|
+
message="No trace logs have been recorded. Try starting the trace with a larger 'delay'",
|
|
87
112
|
)
|
|
88
|
-
else:
|
|
89
|
-
requestID = fabric.refresh_dataset(
|
|
90
|
-
dataset=dataset,
|
|
91
|
-
workspace=workspace,
|
|
92
|
-
refresh_type=refresh_type,
|
|
93
|
-
retry_count=retry_count,
|
|
94
|
-
apply_refresh_policy=apply_refresh_policy,
|
|
95
|
-
max_parallelism=max_parallelism,
|
|
96
|
-
objects=objects,
|
|
97
|
-
)
|
|
98
|
-
print(
|
|
99
|
-
f"{icons.in_progress} Refresh of the '{dataset}' semantic model within the '{workspace}' workspace is in progress..."
|
|
100
|
-
)
|
|
101
|
-
if len(objects) != 0:
|
|
102
|
-
print(objects)
|
|
103
113
|
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
114
|
+
def extract_failure_error():
|
|
115
|
+
error_messages = []
|
|
116
|
+
combined_messages = ""
|
|
117
|
+
final_message = f"{icons.red_dot} The refresh of the '{dataset}' semantic model within the '{workspace}' workspace has failed."
|
|
118
|
+
for _, r in fabric.get_refresh_execution_details(
|
|
119
|
+
refresh_request_id=request_id,
|
|
120
|
+
dataset=dataset,
|
|
121
|
+
workspace=workspace,
|
|
122
|
+
).messages.iterrows():
|
|
123
|
+
error_messages.append(f"{r['Type']}: {r['Message']}")
|
|
109
124
|
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
125
|
+
if error_messages:
|
|
126
|
+
combined_messages = "\n".join(error_messages)
|
|
127
|
+
final_message += f"'\n' {combined_messages}"
|
|
128
|
+
|
|
129
|
+
return final_message
|
|
130
|
+
|
|
131
|
+
# Function to perform dataset refresh
|
|
132
|
+
def refresh_dataset():
|
|
133
|
+
return fabric.refresh_dataset(
|
|
134
|
+
dataset=dataset,
|
|
135
|
+
workspace=workspace,
|
|
136
|
+
refresh_type=refresh_type,
|
|
137
|
+
retry_count=retry_count,
|
|
138
|
+
apply_refresh_policy=apply_refresh_policy,
|
|
139
|
+
max_parallelism=max_parallelism,
|
|
140
|
+
objects=objects if objects else None,
|
|
116
141
|
)
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
142
|
+
|
|
143
|
+
def check_refresh_status(request_id):
|
|
144
|
+
request_details = fabric.get_refresh_execution_details(
|
|
145
|
+
dataset=dataset, refresh_request_id=request_id, workspace=workspace
|
|
120
146
|
)
|
|
121
|
-
return
|
|
147
|
+
return request_details.status
|
|
122
148
|
|
|
123
|
-
|
|
149
|
+
def display_trace_logs(trace, partition_map, widget, title, stop=False):
|
|
150
|
+
if stop:
|
|
151
|
+
df = trace.stop()
|
|
152
|
+
else:
|
|
153
|
+
df = trace.get_trace_logs()
|
|
154
|
+
if not df.empty:
|
|
155
|
+
df = df[
|
|
156
|
+
df["Event Subclass"].isin(["ExecuteSql", "Process"])
|
|
157
|
+
].reset_index(drop=True)
|
|
158
|
+
df = pd.merge(
|
|
159
|
+
df,
|
|
160
|
+
partition_map[
|
|
161
|
+
["PartitionID", "Object Name", "TableName", "PartitionName"]
|
|
162
|
+
],
|
|
163
|
+
left_on="Object ID",
|
|
164
|
+
right_on="PartitionID",
|
|
165
|
+
how="left",
|
|
166
|
+
)
|
|
167
|
+
_process_and_display_chart(df, title=title, widget=widget)
|
|
168
|
+
if stop:
|
|
169
|
+
df.drop(["Object Name", "PartitionID"], axis=1, inplace=True)
|
|
170
|
+
df.rename(columns={"TableName": "Table Name"}, inplace=True)
|
|
171
|
+
df.rename(columns={"PartitionName": "Partition Name"}, inplace=True)
|
|
172
|
+
return df
|
|
124
173
|
|
|
125
|
-
|
|
126
|
-
|
|
174
|
+
# Start the refresh process
|
|
175
|
+
if not visualize:
|
|
176
|
+
request_id = refresh_dataset()
|
|
177
|
+
print(
|
|
178
|
+
f"{icons.in_progress} Refresh of the '{dataset}' semantic model within the '{workspace}' workspace is in progress..."
|
|
179
|
+
)
|
|
180
|
+
|
|
181
|
+
# Monitor refresh progress and handle tracing if visualize is enabled
|
|
182
|
+
if visualize:
|
|
183
|
+
partition_map = _get_partition_map(dataset, workspace)
|
|
184
|
+
widget = widgets.Output()
|
|
185
|
+
|
|
186
|
+
with fabric.create_trace_connection(
|
|
187
|
+
dataset=dataset, workspace=workspace
|
|
188
|
+
) as trace_connection:
|
|
189
|
+
with trace_connection.create_trace(icons.refresh_event_schema) as trace:
|
|
190
|
+
trace.start()
|
|
191
|
+
request_id = refresh_dataset()
|
|
192
|
+
|
|
193
|
+
while True:
|
|
194
|
+
status = check_refresh_status(request_id)
|
|
195
|
+
# Check if the refresh has completed
|
|
196
|
+
if status == "Completed":
|
|
197
|
+
break
|
|
198
|
+
elif status == "Failed":
|
|
199
|
+
raise ValueError(extract_failure_error())
|
|
200
|
+
elif status == "Cancelled":
|
|
201
|
+
print(
|
|
202
|
+
f"{icons.yellow_dot} The refresh of the '{dataset}' semantic model within the '{workspace}' workspace has been cancelled."
|
|
203
|
+
)
|
|
204
|
+
return
|
|
205
|
+
|
|
206
|
+
# Capture and display logs in real-time
|
|
207
|
+
display_trace_logs(
|
|
208
|
+
trace,
|
|
209
|
+
partition_map,
|
|
210
|
+
widget,
|
|
211
|
+
title="Refresh in progress...",
|
|
212
|
+
)
|
|
213
|
+
|
|
214
|
+
time.sleep(3) # Wait before the next check
|
|
215
|
+
|
|
216
|
+
# Final log display after completion
|
|
217
|
+
time.sleep(5)
|
|
218
|
+
|
|
219
|
+
# Stop trace and display final chart
|
|
220
|
+
final_df = display_trace_logs(
|
|
221
|
+
trace,
|
|
222
|
+
partition_map,
|
|
223
|
+
widget,
|
|
224
|
+
title="Refresh Completed",
|
|
225
|
+
stop=True,
|
|
226
|
+
)
|
|
227
|
+
|
|
228
|
+
print(
|
|
229
|
+
f"{icons.green_dot} Refresh of the '{dataset}' semantic model within the '{workspace}' workspace is complete."
|
|
230
|
+
)
|
|
231
|
+
return final_df
|
|
232
|
+
|
|
233
|
+
# For non-visualize case, only check refresh status
|
|
234
|
+
else:
|
|
235
|
+
while True:
|
|
236
|
+
status = check_refresh_status(request_id)
|
|
237
|
+
if status == "Completed":
|
|
238
|
+
break
|
|
239
|
+
elif status == "Failed":
|
|
240
|
+
raise ValueError(extract_failure_error())
|
|
241
|
+
elif status == "Cancelled":
|
|
242
|
+
print(
|
|
243
|
+
f"{icons.yellow_dot} The refresh of the '{dataset}' semantic model within the '{workspace}' workspace has been cancelled."
|
|
244
|
+
)
|
|
245
|
+
return
|
|
246
|
+
|
|
247
|
+
time.sleep(3)
|
|
248
|
+
|
|
249
|
+
print(
|
|
250
|
+
f"{icons.green_dot} Refresh of the '{dataset}' semantic model within the '{workspace}' workspace is complete."
|
|
251
|
+
)
|
|
252
|
+
|
|
253
|
+
final_output = refresh_and_trace_dataset(
|
|
254
|
+
dataset=dataset,
|
|
255
|
+
workspace=workspace,
|
|
256
|
+
refresh_type=refresh_type,
|
|
257
|
+
retry_count=retry_count,
|
|
258
|
+
apply_refresh_policy=apply_refresh_policy,
|
|
259
|
+
max_parallelism=max_parallelism,
|
|
260
|
+
objects=objects,
|
|
261
|
+
visualize=visualize,
|
|
127
262
|
)
|
|
128
263
|
|
|
264
|
+
return final_output
|
|
265
|
+
|
|
129
266
|
|
|
130
267
|
@log
|
|
131
268
|
def cancel_dataset_refresh(
|
|
@@ -173,3 +310,116 @@ def cancel_dataset_refresh(
|
|
|
173
310
|
print(
|
|
174
311
|
f"{icons.green_dot} The '{request_id}' refresh request for the '{dataset}' semantic model within the '{workspace}' workspace has been cancelled."
|
|
175
312
|
)
|
|
313
|
+
|
|
314
|
+
|
|
315
|
+
def get_semantic_model_refresh_history(
|
|
316
|
+
dataset: str, request_id: Optional[str] = None, workspace: Optional[str] = None
|
|
317
|
+
) -> pd.DataFrame:
|
|
318
|
+
"""
|
|
319
|
+
Obtains the semantic model refresh history (refreshes executed via the Enhanced Refresh API).
|
|
320
|
+
|
|
321
|
+
This is a wrapper function for the following API: `Datasets - Get Refresh History In Group <https://learn.microsoft.com/rest/api/power-bi/datasets/get-refresh-history-in-group>`_.
|
|
322
|
+
|
|
323
|
+
Parameters
|
|
324
|
+
----------
|
|
325
|
+
dataset : str
|
|
326
|
+
Name of the semantic model.
|
|
327
|
+
request_id : str, default=None
|
|
328
|
+
The request id of a semantic model refresh.
|
|
329
|
+
Defaults to None which resolves to showing all refresh requests for the given semantic model.
|
|
330
|
+
workspace : str, default=None
|
|
331
|
+
The Fabric workspace name.
|
|
332
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
333
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
334
|
+
|
|
335
|
+
Returns
|
|
336
|
+
-------
|
|
337
|
+
pandas.DataFrame
|
|
338
|
+
A pandas dataframe showing the semantic model refresh history.
|
|
339
|
+
"""
|
|
340
|
+
|
|
341
|
+
workspace_name = fabric.resolve_workspace_name(workspace)
|
|
342
|
+
workspace_id = fabric.resolve_workspace_id(workspace_name)
|
|
343
|
+
df = pd.DataFrame(
|
|
344
|
+
columns=[
|
|
345
|
+
"Request Id",
|
|
346
|
+
"Refresh Type",
|
|
347
|
+
"Start Time",
|
|
348
|
+
"End Time",
|
|
349
|
+
"Status",
|
|
350
|
+
"Extended Status",
|
|
351
|
+
]
|
|
352
|
+
)
|
|
353
|
+
|
|
354
|
+
dataset_id = fabric.resolve_item_id(
|
|
355
|
+
item_name=dataset, workspace=workspace_id, type="SemanticModel"
|
|
356
|
+
)
|
|
357
|
+
client = fabric.PowerBIRestClient()
|
|
358
|
+
response = client.get(
|
|
359
|
+
f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/refreshes"
|
|
360
|
+
)
|
|
361
|
+
data = []
|
|
362
|
+
|
|
363
|
+
for i in response.json().get("value", []):
|
|
364
|
+
error = i.get("serviceExceptionJson")
|
|
365
|
+
if error:
|
|
366
|
+
error_json = json.loads(error)
|
|
367
|
+
if request_id is None:
|
|
368
|
+
new_data = {
|
|
369
|
+
"Request Id": i.get("requestId"),
|
|
370
|
+
"Refresh Type": i.get("refreshType"),
|
|
371
|
+
"Start Time": i.get("startTime"),
|
|
372
|
+
"End Time": i.get("endTime"),
|
|
373
|
+
"Error Code": error_json.get("errorCode") if error else None,
|
|
374
|
+
"Error Description": (
|
|
375
|
+
error_json.get("errorDescription") if error else None
|
|
376
|
+
),
|
|
377
|
+
"Status": i.get("status"),
|
|
378
|
+
"Extended Status": i.get("extendedStatus"),
|
|
379
|
+
"Attempts": i.get("refreshAttempts"),
|
|
380
|
+
}
|
|
381
|
+
data.append(new_data)
|
|
382
|
+
|
|
383
|
+
elif request_id == i.get("requestId"):
|
|
384
|
+
for attempt in i.get("refreshAttempts", []):
|
|
385
|
+
attempt_error = attempt.get("serviceExceptionJson")
|
|
386
|
+
if attempt_error:
|
|
387
|
+
attempt_error_json = json.loads(attempt_error)
|
|
388
|
+
new_data = {
|
|
389
|
+
"Request Id": i.get("requestId"),
|
|
390
|
+
"Refresh Type": i.get("refreshType"),
|
|
391
|
+
"Start Time": i.get("startTime"),
|
|
392
|
+
"End Time": i.get("endTime"),
|
|
393
|
+
"Error Code": error_json.get("errorCode") if error else None,
|
|
394
|
+
"Error Description": (
|
|
395
|
+
error_json.get("errorDescription") if error else None
|
|
396
|
+
),
|
|
397
|
+
"Status": i.get("status"),
|
|
398
|
+
"Extended Status": i.get("extendedStatus"),
|
|
399
|
+
"Attempt Id": attempt.get("attemptId"),
|
|
400
|
+
"Attempt Start Time": attempt.get("startTime"),
|
|
401
|
+
"Attempt End Time": attempt.get("endTime"),
|
|
402
|
+
"Attempt Error Code": (
|
|
403
|
+
attempt_error_json.get("errorCode") if attempt_error else None
|
|
404
|
+
),
|
|
405
|
+
"Attempt Error Description": (
|
|
406
|
+
attempt_error_json.get("errorDescription")
|
|
407
|
+
if attempt_error
|
|
408
|
+
else None
|
|
409
|
+
),
|
|
410
|
+
"Type": attempt.get("type"),
|
|
411
|
+
}
|
|
412
|
+
data.append(new_data)
|
|
413
|
+
|
|
414
|
+
if data:
|
|
415
|
+
df = pd.DataFrame(data)
|
|
416
|
+
|
|
417
|
+
# date_cols = ["Start Time", "End Time"]
|
|
418
|
+
# df[date_cols] = df[date_cols].apply(pd.to_datetime)
|
|
419
|
+
|
|
420
|
+
if "Attempt Id" in df.columns:
|
|
421
|
+
df["Attempt Id"] = df["Attempt Id"].astype(int)
|
|
422
|
+
# date_cols = ["Attempt Start Time", "Attempt End Time"]
|
|
423
|
+
# df[date_cols] = df[date_cols].apply(pd.to_datetime)
|
|
424
|
+
|
|
425
|
+
return df
|
sempy_labs/_spark.py
CHANGED
|
@@ -12,6 +12,8 @@ def list_custom_pools(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
12
12
|
"""
|
|
13
13
|
Lists all `custom pools <https://learn.microsoft.com/fabric/data-engineering/create-custom-spark-pools>`_ within a workspace.
|
|
14
14
|
|
|
15
|
+
This is a wrapper function for the following API: `Custom Pools - List Workspace Custom Pools <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/list-workspace-custom-pools`_.
|
|
16
|
+
|
|
15
17
|
Parameters
|
|
16
18
|
----------
|
|
17
19
|
workspace : str, default=None
|
|
@@ -25,7 +27,6 @@ def list_custom_pools(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
25
27
|
A pandas dataframe showing all the custom pools within the Fabric workspace.
|
|
26
28
|
"""
|
|
27
29
|
|
|
28
|
-
# https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/list-workspace-custom-pools
|
|
29
30
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
30
31
|
|
|
31
32
|
df = pd.DataFrame(
|
|
@@ -99,6 +100,8 @@ def create_custom_pool(
|
|
|
99
100
|
"""
|
|
100
101
|
Creates a `custom pool <https://learn.microsoft.com/fabric/data-engineering/create-custom-spark-pools>`_ within a workspace.
|
|
101
102
|
|
|
103
|
+
This is a wrapper function for the following API: `Custom Pools - Create Workspace Custom Pool <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool`_.
|
|
104
|
+
|
|
102
105
|
Parameters
|
|
103
106
|
----------
|
|
104
107
|
pool_name : str
|
|
@@ -125,7 +128,6 @@ def create_custom_pool(
|
|
|
125
128
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
126
129
|
"""
|
|
127
130
|
|
|
128
|
-
# https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool
|
|
129
131
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
130
132
|
|
|
131
133
|
request_body = {
|
|
@@ -171,6 +173,8 @@ def update_custom_pool(
|
|
|
171
173
|
"""
|
|
172
174
|
Updates the properties of a `custom pool <https://learn.microsoft.com/fabric/data-engineering/create-custom-spark-pools>`_ within a workspace.
|
|
173
175
|
|
|
176
|
+
This is a wrapper function for the following API: `Custom Pools - Update Workspace Custom Pool <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/update-workspace-custom-pool`_.
|
|
177
|
+
|
|
174
178
|
Parameters
|
|
175
179
|
----------
|
|
176
180
|
pool_name : str
|
|
@@ -205,7 +209,6 @@ def update_custom_pool(
|
|
|
205
209
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
206
210
|
"""
|
|
207
211
|
|
|
208
|
-
# https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/update-workspace-custom-pool?tabs=HTTP
|
|
209
212
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
210
213
|
|
|
211
214
|
df = list_custom_pools(workspace=workspace)
|
|
@@ -267,6 +270,8 @@ def delete_custom_pool(pool_name: str, workspace: Optional[str] = None):
|
|
|
267
270
|
"""
|
|
268
271
|
Deletes a `custom pool <https://learn.microsoft.com/fabric/data-engineering/create-custom-spark-pools>`_ within a workspace.
|
|
269
272
|
|
|
273
|
+
This is a wrapper function for the following API: `Custom Pools - Delete Workspace Custom Pool <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/delete-workspace-custom-pool`_.
|
|
274
|
+
|
|
270
275
|
Parameters
|
|
271
276
|
----------
|
|
272
277
|
pool_name : str
|
|
@@ -304,6 +309,8 @@ def get_spark_settings(
|
|
|
304
309
|
"""
|
|
305
310
|
Shows the spark settings for a workspace.
|
|
306
311
|
|
|
312
|
+
This is a wrapper function for the following API: `Workspace Settings - Get Spark Settings <https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/get-spark-settings`_.
|
|
313
|
+
|
|
307
314
|
Parameters
|
|
308
315
|
----------
|
|
309
316
|
workspace : str, default=None
|
|
@@ -319,7 +326,6 @@ def get_spark_settings(
|
|
|
319
326
|
A pandas dataframe showing the spark settings for a workspace.
|
|
320
327
|
"""
|
|
321
328
|
|
|
322
|
-
# https://learn.microsoft.com/en-us/rest/api/fabric/spark/workspace-settings/get-spark-settings?tabs=HTTP
|
|
323
329
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
324
330
|
|
|
325
331
|
df = pd.DataFrame(
|
|
@@ -392,6 +398,8 @@ def update_spark_settings(
|
|
|
392
398
|
"""
|
|
393
399
|
Updates the spark settings for a workspace.
|
|
394
400
|
|
|
401
|
+
This is a wrapper function for the following API: `Workspace Settings - Update Spark Settings <https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/update-spark-settings`_.
|
|
402
|
+
|
|
395
403
|
Parameters
|
|
396
404
|
----------
|
|
397
405
|
automatic_log_enabled : bool, default=None
|
|
@@ -424,7 +432,6 @@ def update_spark_settings(
|
|
|
424
432
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
425
433
|
"""
|
|
426
434
|
|
|
427
|
-
# https://learn.microsoft.com/en-us/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP
|
|
428
435
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
429
436
|
|
|
430
437
|
request_body = get_spark_settings(workspace=workspace, return_dataframe=False)
|
sempy_labs/_sql.py
CHANGED
|
@@ -9,7 +9,7 @@ from sempy.fabric.exceptions import FabricHTTPException
|
|
|
9
9
|
from sempy_labs._helper_functions import resolve_warehouse_id, resolve_lakehouse_id
|
|
10
10
|
|
|
11
11
|
|
|
12
|
-
def
|
|
12
|
+
def _bytes2mswin_bstr(value: bytes) -> bytes:
|
|
13
13
|
"""Convert a sequence of bytes into a (MS-Windows) BSTR (as bytes).
|
|
14
14
|
|
|
15
15
|
See https://github.com/mkleehammer/pyodbc/issues/228#issuecomment-319190980
|
|
@@ -68,7 +68,7 @@ class ConnectBase:
|
|
|
68
68
|
|
|
69
69
|
# Set up the connection string
|
|
70
70
|
access_token = SynapseTokenProvider()()
|
|
71
|
-
tokenstruct =
|
|
71
|
+
tokenstruct = _bytes2mswin_bstr(access_token.encode())
|
|
72
72
|
conn_str = f"DRIVER={{ODBC Driver 18 for SQL Server}};SERVER={tds_endpoint};DATABASE={name};Encrypt=Yes;"
|
|
73
73
|
|
|
74
74
|
if timeout is not None:
|
sempy_labs/_translations.py
CHANGED
|
@@ -40,7 +40,7 @@ def translate_semantic_model(
|
|
|
40
40
|
from pyspark.sql import SparkSession
|
|
41
41
|
from sempy_labs.tom import connect_semantic_model
|
|
42
42
|
|
|
43
|
-
def
|
|
43
|
+
def _clean_text(text, exclude_chars):
|
|
44
44
|
if exclude_chars:
|
|
45
45
|
for char in exclude_chars:
|
|
46
46
|
text = text.replace(char, " ")
|
|
@@ -55,13 +55,15 @@ def translate_semantic_model(
|
|
|
55
55
|
columns=["Object Type", "Name", "Description", "Display Folder"]
|
|
56
56
|
)
|
|
57
57
|
|
|
58
|
+
icons.sll_tags.append("TranslateSemanticModel")
|
|
59
|
+
|
|
58
60
|
with connect_semantic_model(
|
|
59
61
|
dataset=dataset, readonly=False, workspace=workspace
|
|
60
62
|
) as tom:
|
|
61
63
|
|
|
62
64
|
for o in tom.model.Tables:
|
|
63
|
-
oName =
|
|
64
|
-
oDescription =
|
|
65
|
+
oName = _clean_text(o.Name, exclude_characters)
|
|
66
|
+
oDescription = _clean_text(o.Description, exclude_characters)
|
|
65
67
|
new_data = {
|
|
66
68
|
"Object Type": "Table",
|
|
67
69
|
"Name": o.Name,
|
|
@@ -75,9 +77,9 @@ def translate_semantic_model(
|
|
|
75
77
|
[df_prep, pd.DataFrame(new_data, index=[0])], ignore_index=True
|
|
76
78
|
)
|
|
77
79
|
for o in tom.all_columns():
|
|
78
|
-
oName =
|
|
79
|
-
oDescription =
|
|
80
|
-
oDisplayFolder =
|
|
80
|
+
oName = _clean_text(o.Name, exclude_characters)
|
|
81
|
+
oDescription = _clean_text(o.Description, exclude_characters)
|
|
82
|
+
oDisplayFolder = _clean_text(o.DisplayFolder, exclude_characters)
|
|
81
83
|
new_data = {
|
|
82
84
|
"Object Type": "Column",
|
|
83
85
|
"Name": o.Name,
|
|
@@ -91,9 +93,9 @@ def translate_semantic_model(
|
|
|
91
93
|
[df_prep, pd.DataFrame(new_data, index=[0])], ignore_index=True
|
|
92
94
|
)
|
|
93
95
|
for o in tom.all_measures():
|
|
94
|
-
oName =
|
|
95
|
-
oDescription =
|
|
96
|
-
oDisplayFolder =
|
|
96
|
+
oName = _clean_text(o.Name, exclude_characters)
|
|
97
|
+
oDescription = _clean_text(o.Description, exclude_characters)
|
|
98
|
+
oDisplayFolder = _clean_text(o.DisplayFolder, exclude_characters)
|
|
97
99
|
new_data = {
|
|
98
100
|
"Object Type": "Measure",
|
|
99
101
|
"Name": o.Name,
|
|
@@ -107,9 +109,9 @@ def translate_semantic_model(
|
|
|
107
109
|
[df_prep, pd.DataFrame(new_data, index=[0])], ignore_index=True
|
|
108
110
|
)
|
|
109
111
|
for o in tom.all_hierarchies():
|
|
110
|
-
oName =
|
|
111
|
-
oDescription =
|
|
112
|
-
oDisplayFolder =
|
|
112
|
+
oName = _clean_text(o.Name, exclude_characters)
|
|
113
|
+
oDescription = _clean_text(o.Description, exclude_characters)
|
|
114
|
+
oDisplayFolder = _clean_text(o.DisplayFolder, exclude_characters)
|
|
113
115
|
new_data = {
|
|
114
116
|
"Object Type": "Hierarchy",
|
|
115
117
|
"Name": o.Name,
|
|
@@ -123,8 +125,8 @@ def translate_semantic_model(
|
|
|
123
125
|
[df_prep, pd.DataFrame(new_data, index=[0])], ignore_index=True
|
|
124
126
|
)
|
|
125
127
|
for o in tom.all_levels():
|
|
126
|
-
oName =
|
|
127
|
-
oDescription =
|
|
128
|
+
oName = _clean_text(o.Name, exclude_characters)
|
|
129
|
+
oDescription = _clean_text(o.Description, exclude_characters)
|
|
128
130
|
new_data = {
|
|
129
131
|
"Object Type": "Level",
|
|
130
132
|
"Name": o.Name,
|