semantic-link-labs 0.4.2__py3-none-any.whl → 0.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.4.2.dist-info → semantic_link_labs-0.6.0.dist-info}/METADATA +2 -2
- semantic_link_labs-0.6.0.dist-info/RECORD +54 -0
- {semantic_link_labs-0.4.2.dist-info → semantic_link_labs-0.6.0.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +44 -14
- sempy_labs/_ai.py +31 -32
- sempy_labs/_clear_cache.py +5 -8
- sempy_labs/_connections.py +80 -72
- sempy_labs/_dax.py +7 -9
- sempy_labs/_generate_semantic_model.py +60 -54
- sempy_labs/_helper_functions.py +8 -10
- sempy_labs/_icons.py +15 -0
- sempy_labs/_list_functions.py +1139 -428
- sempy_labs/_model_auto_build.py +5 -6
- sempy_labs/_model_bpa.py +134 -1125
- sempy_labs/_model_bpa_rules.py +831 -0
- sempy_labs/_model_dependencies.py +21 -25
- sempy_labs/_one_lake_integration.py +10 -7
- sempy_labs/_query_scale_out.py +83 -93
- sempy_labs/_refresh_semantic_model.py +12 -16
- sempy_labs/_translations.py +214 -288
- sempy_labs/_vertipaq.py +51 -42
- sempy_labs/directlake/__init__.py +2 -0
- sempy_labs/directlake/_directlake_schema_compare.py +12 -11
- sempy_labs/directlake/_directlake_schema_sync.py +13 -23
- sempy_labs/directlake/_fallback.py +5 -7
- sempy_labs/directlake/_get_directlake_lakehouse.py +1 -1
- sempy_labs/directlake/_get_shared_expression.py +4 -8
- sempy_labs/directlake/_guardrails.py +6 -8
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +18 -12
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +4 -4
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +9 -8
- sempy_labs/directlake/_update_directlake_partition_entity.py +129 -12
- sempy_labs/directlake/_warm_cache.py +5 -5
- sempy_labs/lakehouse/_get_lakehouse_columns.py +2 -2
- sempy_labs/lakehouse/_get_lakehouse_tables.py +4 -4
- sempy_labs/lakehouse/_lakehouse.py +3 -4
- sempy_labs/lakehouse/_shortcuts.py +17 -13
- sempy_labs/migration/__init__.py +1 -1
- sempy_labs/migration/_create_pqt_file.py +21 -24
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +16 -13
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +17 -18
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +45 -46
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +14 -14
- sempy_labs/migration/_migration_validation.py +6 -2
- sempy_labs/migration/_refresh_calc_tables.py +10 -5
- sempy_labs/report/__init__.py +2 -2
- sempy_labs/report/_generate_report.py +8 -7
- sempy_labs/report/_report_functions.py +47 -52
- sempy_labs/report/_report_rebind.py +38 -37
- sempy_labs/tom/__init__.py +1 -4
- sempy_labs/tom/_model.py +541 -180
- semantic_link_labs-0.4.2.dist-info/RECORD +0 -53
- {semantic_link_labs-0.4.2.dist-info → semantic_link_labs-0.6.0.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.4.2.dist-info → semantic_link_labs-0.6.0.dist-info}/top_level.txt +0 -0
|
@@ -1,10 +1,12 @@
|
|
|
1
1
|
import sempy.fabric as fabric
|
|
2
2
|
import pandas as pd
|
|
3
3
|
from sempy_labs._helper_functions import format_dax_object_name
|
|
4
|
+
import sempy_labs._icons as icons
|
|
4
5
|
from typing import Any, Dict, Optional
|
|
5
6
|
from anytree import Node, RenderTree
|
|
6
7
|
from sempy._utils._log import log
|
|
7
8
|
|
|
9
|
+
|
|
8
10
|
@log
|
|
9
11
|
def get_measure_dependencies(dataset: str, workspace: Optional[str] = None):
|
|
10
12
|
"""
|
|
@@ -25,9 +27,7 @@ def get_measure_dependencies(dataset: str, workspace: Optional[str] = None):
|
|
|
25
27
|
Shows all dependencies for all measures in the semantic model.
|
|
26
28
|
"""
|
|
27
29
|
|
|
28
|
-
|
|
29
|
-
workspace_id = fabric.get_workspace_id()
|
|
30
|
-
workspace = fabric.resolve_workspace_name(workspace_id)
|
|
30
|
+
workspace = fabric.resolve_workspace_name(workspace)
|
|
31
31
|
|
|
32
32
|
dep = fabric.evaluate_dax(
|
|
33
33
|
dataset=dataset,
|
|
@@ -63,11 +63,11 @@ def get_measure_dependencies(dataset: str, workspace: Optional[str] = None):
|
|
|
63
63
|
axis=1,
|
|
64
64
|
)
|
|
65
65
|
|
|
66
|
-
while any(df["Done"]
|
|
66
|
+
while any(df["Done"] == False):
|
|
67
67
|
for i, r in df.iterrows():
|
|
68
68
|
rObjFull = r["Referenced Full Object Name"]
|
|
69
69
|
rObj = r["Referenced Object"]
|
|
70
|
-
if r["Done"]
|
|
70
|
+
if r["Done"] == False:
|
|
71
71
|
dep_filt = dep[dep["Full Object Name"] == rObjFull]
|
|
72
72
|
|
|
73
73
|
for index, dependency in dep_filt.iterrows():
|
|
@@ -108,9 +108,9 @@ def get_measure_dependencies(dataset: str, workspace: Optional[str] = None):
|
|
|
108
108
|
"Table Name": r["Table Name"],
|
|
109
109
|
"Object Name": r["Object Name"],
|
|
110
110
|
"Object Type": r["Object Type"],
|
|
111
|
-
"Referenced Object": dependency[
|
|
112
|
-
"Referenced Table": dependency[
|
|
113
|
-
"Referenced Object Type": dependency[
|
|
111
|
+
"Referenced Object": dependency[4],
|
|
112
|
+
"Referenced Table": dependency[3],
|
|
113
|
+
"Referenced Object Type": dependency[5],
|
|
114
114
|
"Done": d,
|
|
115
115
|
"Full Object Name": r["Full Object Name"],
|
|
116
116
|
"Referenced Full Object Name": dependency[
|
|
@@ -130,6 +130,7 @@ def get_measure_dependencies(dataset: str, workspace: Optional[str] = None):
|
|
|
130
130
|
|
|
131
131
|
return df
|
|
132
132
|
|
|
133
|
+
|
|
133
134
|
@log
|
|
134
135
|
def get_model_calc_dependencies(dataset: str, workspace: Optional[str] = None):
|
|
135
136
|
"""
|
|
@@ -150,9 +151,7 @@ def get_model_calc_dependencies(dataset: str, workspace: Optional[str] = None):
|
|
|
150
151
|
Shows all dependencies for all objects in the semantic model.
|
|
151
152
|
"""
|
|
152
153
|
|
|
153
|
-
|
|
154
|
-
workspace_id = fabric.get_workspace_id()
|
|
155
|
-
workspace = fabric.resolve_workspace_name(workspace_id)
|
|
154
|
+
workspace = fabric.resolve_workspace_name(workspace)
|
|
156
155
|
|
|
157
156
|
dep = fabric.evaluate_dax(
|
|
158
157
|
dataset=dataset,
|
|
@@ -166,7 +165,7 @@ def get_model_calc_dependencies(dataset: str, workspace: Optional[str] = None):
|
|
|
166
165
|
,[REFERENCED_TABLE] AS [Referenced Table]
|
|
167
166
|
,[REFERENCED_OBJECT] AS [Referenced Object]
|
|
168
167
|
,[REFERENCED_OBJECT_TYPE] AS [Referenced Object Type]
|
|
169
|
-
FROM $SYSTEM.DISCOVER_CALC_DEPENDENCY
|
|
168
|
+
FROM $SYSTEM.DISCOVER_CALC_DEPENDENCY
|
|
170
169
|
""",
|
|
171
170
|
)
|
|
172
171
|
|
|
@@ -191,11 +190,11 @@ def get_model_calc_dependencies(dataset: str, workspace: Optional[str] = None):
|
|
|
191
190
|
lambda row: False if row["Referenced Object Type"] in objs else True, axis=1
|
|
192
191
|
)
|
|
193
192
|
|
|
194
|
-
while any(df["Done"]
|
|
193
|
+
while any(df["Done"] == False):
|
|
195
194
|
for i, r in df.iterrows():
|
|
196
195
|
rObjFull = r["Referenced Full Object Name"]
|
|
197
196
|
rObj = r["Referenced Object"]
|
|
198
|
-
if r["Done"]
|
|
197
|
+
if r["Done"] == False:
|
|
199
198
|
dep_filt = dep[dep["Full Object Name"] == rObjFull]
|
|
200
199
|
|
|
201
200
|
for index, dependency in dep_filt.iterrows():
|
|
@@ -282,9 +281,7 @@ def measure_dependency_tree(
|
|
|
282
281
|
|
|
283
282
|
"""
|
|
284
283
|
|
|
285
|
-
|
|
286
|
-
workspace_id = fabric.get_workspace_id()
|
|
287
|
-
workspace = fabric.resolve_workspace_name(workspace_id)
|
|
284
|
+
workspace = fabric.resolve_workspace_name(workspace)
|
|
288
285
|
|
|
289
286
|
dfM = fabric.list_measures(dataset=dataset, workspace=workspace)
|
|
290
287
|
dfM_filt = dfM[dfM["Measure Name"] == measure_name]
|
|
@@ -300,9 +297,6 @@ def measure_dependency_tree(
|
|
|
300
297
|
|
|
301
298
|
# Create a dictionary to hold references to nodes
|
|
302
299
|
node_dict: Dict[str, Any] = {}
|
|
303
|
-
measureIcon = "\u2211"
|
|
304
|
-
tableIcon = "\u229E"
|
|
305
|
-
columnIcon = "\u229F"
|
|
306
300
|
|
|
307
301
|
# Populate the tree
|
|
308
302
|
for _, row in df_filt.iterrows():
|
|
@@ -317,24 +311,26 @@ def measure_dependency_tree(
|
|
|
317
311
|
if parent_node is None:
|
|
318
312
|
parent_node = Node(parent_node_name)
|
|
319
313
|
node_dict[parent_node_name] = parent_node
|
|
320
|
-
parent_node.custom_property =
|
|
314
|
+
parent_node.custom_property = icons.measure_icon + " "
|
|
321
315
|
|
|
322
316
|
# Create the child node
|
|
323
317
|
child_node_name = ref_obj_name
|
|
324
318
|
child_node = Node(child_node_name, parent=parent_node)
|
|
325
319
|
if ref_obj_type == "Column":
|
|
326
|
-
child_node.custom_property =
|
|
320
|
+
child_node.custom_property = (
|
|
321
|
+
icons.column_icon + " '" + ref_obj_table_name + "'"
|
|
322
|
+
)
|
|
327
323
|
elif ref_obj_type == "Table":
|
|
328
|
-
child_node.custom_property =
|
|
324
|
+
child_node.custom_property = icons.table_icon + " "
|
|
329
325
|
elif ref_obj_type == "Measure":
|
|
330
|
-
child_node.custom_property =
|
|
326
|
+
child_node.custom_property = icons.measure_icon + " "
|
|
331
327
|
|
|
332
328
|
# Update the dictionary with the child node
|
|
333
329
|
node_dict[child_node_name] = child_node
|
|
334
330
|
|
|
335
331
|
# Visualize the tree structure using RenderTree
|
|
336
332
|
for pre, _, node in RenderTree(node_dict[measure_name]):
|
|
337
|
-
if
|
|
333
|
+
if icons.table_icon in node.custom_property:
|
|
338
334
|
print(f"{pre}{node.custom_property}'{node.name}'")
|
|
339
335
|
else:
|
|
340
336
|
print(f"{pre}{node.custom_property}[{node.name}]")
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import sempy
|
|
2
1
|
import sempy.fabric as fabric
|
|
3
2
|
import pandas as pd
|
|
4
3
|
from typing import Optional
|
|
@@ -6,6 +5,7 @@ from sempy._utils._log import log
|
|
|
6
5
|
from sempy_labs._helper_functions import resolve_workspace_name_and_id
|
|
7
6
|
import sempy_labs._icons as icons
|
|
8
7
|
|
|
8
|
+
|
|
9
9
|
@log
|
|
10
10
|
def export_model_to_onelake(
|
|
11
11
|
dataset: str,
|
|
@@ -42,10 +42,9 @@ def export_model_to_onelake(
|
|
|
42
42
|
dfD_filt = dfD[dfD["Dataset Name"] == dataset]
|
|
43
43
|
|
|
44
44
|
if len(dfD_filt) == 0:
|
|
45
|
-
|
|
45
|
+
raise ValueError(
|
|
46
46
|
f"{icons.red_dot} The '{dataset}' semantic model does not exist in the '{workspace}' workspace."
|
|
47
47
|
)
|
|
48
|
-
return
|
|
49
48
|
|
|
50
49
|
tmsl = f"""
|
|
51
50
|
{{
|
|
@@ -68,8 +67,10 @@ def export_model_to_onelake(
|
|
|
68
67
|
f"{icons.green_dot} The '{dataset}' semantic model's tables have been exported as delta tables to the '{workspace}' workspace.\n"
|
|
69
68
|
)
|
|
70
69
|
except Exception as e:
|
|
71
|
-
raise ValueError(
|
|
72
|
-
|
|
70
|
+
raise ValueError(
|
|
71
|
+
f"{icons.red_dot} The '{dataset}' semantic model's tables have not been exported as delta tables to the '{workspace}' workspace.\nMake sure you enable OneLake integration for the '{dataset}' semantic model. Follow the instructions here: https://learn.microsoft.com/power-bi/enterprise/onelake-integration-overview#enable-onelake-integration"
|
|
72
|
+
) from e
|
|
73
|
+
|
|
73
74
|
# Create shortcuts if destination lakehouse is specified
|
|
74
75
|
if destination_lakehouse is not None:
|
|
75
76
|
# Destination...
|
|
@@ -104,7 +105,7 @@ def export_model_to_onelake(
|
|
|
104
105
|
dfP_filt = dfP[
|
|
105
106
|
(dfP["Mode"] == "Import")
|
|
106
107
|
& (dfP["Source Type"] != "CalculationGroup")
|
|
107
|
-
& (dfP["Parent System Managed"]
|
|
108
|
+
& (dfP["Parent System Managed"] == False)
|
|
108
109
|
]
|
|
109
110
|
dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
|
|
110
111
|
tmc = pd.DataFrame(dfP.groupby("Table Name")["Mode"].nunique()).reset_index()
|
|
@@ -145,4 +146,6 @@ def export_model_to_onelake(
|
|
|
145
146
|
else:
|
|
146
147
|
print(response.status_code)
|
|
147
148
|
except Exception as e:
|
|
148
|
-
raise ValueError(
|
|
149
|
+
raise ValueError(
|
|
150
|
+
f"{icons.red_dot} Failed to create a shortcut for the '{tableName}' table."
|
|
151
|
+
) from e
|
sempy_labs/_query_scale_out.py
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
import sempy
|
|
2
1
|
import sempy.fabric as fabric
|
|
3
2
|
import pandas as pd
|
|
4
3
|
from sempy_labs._helper_functions import resolve_dataset_id
|
|
5
4
|
from typing import Optional
|
|
6
5
|
import sempy_labs._icons as icons
|
|
6
|
+
from sempy.fabric.exceptions import FabricHTTPException
|
|
7
7
|
|
|
8
8
|
|
|
9
9
|
def qso_sync(dataset: str, workspace: Optional[str] = None):
|
|
@@ -39,14 +39,11 @@ def qso_sync(dataset: str, workspace: Optional[str] = None):
|
|
|
39
39
|
f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/queryScaleOut/sync"
|
|
40
40
|
)
|
|
41
41
|
|
|
42
|
-
if response.status_code
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
print(
|
|
48
|
-
f"{icons.red_dot} QSO sync failed for the '{dataset}' semantic model within the '{workspace}' workspace."
|
|
49
|
-
)
|
|
42
|
+
if response.status_code != 200:
|
|
43
|
+
raise FabricHTTPException(response)
|
|
44
|
+
print(
|
|
45
|
+
f"{icons.green_dot} QSO sync initiated for the '{dataset}' semantic model within the '{workspace}' workspace."
|
|
46
|
+
)
|
|
50
47
|
|
|
51
48
|
|
|
52
49
|
def qso_sync_status(dataset: str, workspace: Optional[str] = None):
|
|
@@ -100,54 +97,54 @@ def qso_sync_status(dataset: str, workspace: Optional[str] = None):
|
|
|
100
97
|
f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/queryScaleOut/syncStatus"
|
|
101
98
|
)
|
|
102
99
|
|
|
103
|
-
if response.status_code
|
|
104
|
-
|
|
105
|
-
|
|
100
|
+
if response.status_code != 200:
|
|
101
|
+
raise FabricHTTPException(response)
|
|
102
|
+
|
|
103
|
+
o = response.json()
|
|
104
|
+
sos = o["scaleOutStatus"]
|
|
105
|
+
|
|
106
|
+
if sos == "Enabled":
|
|
107
|
+
new_data = {
|
|
108
|
+
"Scale Out Status": o["scaleOutStatus"],
|
|
109
|
+
"Sync Start Time": o["syncStartTime"],
|
|
110
|
+
"Sync End Time": o["syncEndTime"],
|
|
111
|
+
"Commit Version": o["commitVersion"],
|
|
112
|
+
"Commit Timestamp": o["commitTimestamp"],
|
|
113
|
+
"Target Sync Version": o["targetSyncVersion"],
|
|
114
|
+
"Target Sync Timestamp": o["targetSyncTimestamp"],
|
|
115
|
+
"Trigger Reason": o["triggerReason"],
|
|
116
|
+
"Min Active Read Version": o["minActiveReadVersion"],
|
|
117
|
+
"Min Active Read Timestamp": o["minActiveReadTimestamp"],
|
|
118
|
+
}
|
|
119
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
106
120
|
|
|
107
|
-
|
|
121
|
+
for r in o["scaleOutReplicas"]:
|
|
108
122
|
new_data = {
|
|
109
|
-
"
|
|
110
|
-
"
|
|
111
|
-
"
|
|
112
|
-
"
|
|
113
|
-
"Commit Timestamp": o["commitTimestamp"],
|
|
114
|
-
"Target Sync Version": o["targetSyncVersion"],
|
|
115
|
-
"Target Sync Timestamp": o["targetSyncTimestamp"],
|
|
116
|
-
"Trigger Reason": o["triggerReason"],
|
|
117
|
-
"Min Active Read Version": o["minActiveReadVersion"],
|
|
118
|
-
"Min Active Read Timestamp": o["minActiveReadTimestamp"],
|
|
123
|
+
"Replica ID": r["replicaId"],
|
|
124
|
+
"Replica Type": r["replicaType"],
|
|
125
|
+
"Replica Version": str(r["replicaVersion"]),
|
|
126
|
+
"Replica Timestamp": r["replicaTimestamp"],
|
|
119
127
|
}
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
for r in o["scaleOutReplicas"]:
|
|
123
|
-
new_data = {
|
|
124
|
-
"Replica ID": r["replicaId"],
|
|
125
|
-
"Replica Type": r["replicaType"],
|
|
126
|
-
"Replica Version": str(r["replicaVersion"]),
|
|
127
|
-
"Replica Timestamp": r["replicaTimestamp"],
|
|
128
|
-
}
|
|
129
|
-
dfRep = pd.concat(
|
|
130
|
-
[dfRep, pd.DataFrame(new_data, index=[0])], ignore_index=True
|
|
131
|
-
)
|
|
132
|
-
|
|
133
|
-
df["Sync Start Time"] = pd.to_datetime(df["Sync Start Time"])
|
|
134
|
-
df["Sync End Time"] = pd.to_datetime(df["Sync End Time"])
|
|
135
|
-
df["Commit Timestamp"] = pd.to_datetime(df["Commit Timestamp"])
|
|
136
|
-
df["Target Sync Timestamp"] = pd.to_datetime(df["Target Sync Timestamp"])
|
|
137
|
-
df["Min Active Read Timestamp"] = pd.to_datetime(
|
|
138
|
-
df["Min Active Read Timestamp"]
|
|
128
|
+
dfRep = pd.concat(
|
|
129
|
+
[dfRep, pd.DataFrame(new_data, index=[0])], ignore_index=True
|
|
139
130
|
)
|
|
140
|
-
dfRep["Replica Timestamp"] = pd.to_datetime(dfRep["Replica Timestamp"])
|
|
141
|
-
df["Commit Version"] = df["Commit Version"].astype("int")
|
|
142
|
-
df["Target Sync Version"] = df["Target Sync Version"].astype("int")
|
|
143
|
-
df["Min Active Read Version"] = df["Min Active Read Version"].astype("int")
|
|
144
131
|
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
132
|
+
df["Sync Start Time"] = pd.to_datetime(df["Sync Start Time"])
|
|
133
|
+
df["Sync End Time"] = pd.to_datetime(df["Sync End Time"])
|
|
134
|
+
df["Commit Timestamp"] = pd.to_datetime(df["Commit Timestamp"])
|
|
135
|
+
df["Target Sync Timestamp"] = pd.to_datetime(df["Target Sync Timestamp"])
|
|
136
|
+
df["Min Active Read Timestamp"] = pd.to_datetime(
|
|
137
|
+
df["Min Active Read Timestamp"]
|
|
138
|
+
)
|
|
139
|
+
dfRep["Replica Timestamp"] = pd.to_datetime(dfRep["Replica Timestamp"])
|
|
140
|
+
df["Commit Version"] = df["Commit Version"].astype("int")
|
|
141
|
+
df["Target Sync Version"] = df["Target Sync Version"].astype("int")
|
|
142
|
+
df["Min Active Read Version"] = df["Min Active Read Version"].astype("int")
|
|
143
|
+
|
|
144
|
+
return df, dfRep
|
|
149
145
|
else:
|
|
150
|
-
|
|
146
|
+
print(f"{sos}\n\n")
|
|
147
|
+
return df, dfRep
|
|
151
148
|
|
|
152
149
|
|
|
153
150
|
def disable_qso(dataset: str, workspace: Optional[str] = None):
|
|
@@ -182,14 +179,15 @@ def disable_qso(dataset: str, workspace: Optional[str] = None):
|
|
|
182
179
|
response = client.patch(
|
|
183
180
|
f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}", json=request_body
|
|
184
181
|
)
|
|
185
|
-
if response.status_code
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
182
|
+
if response.status_code != 200:
|
|
183
|
+
raise FabricHTTPException(response)
|
|
184
|
+
|
|
185
|
+
df = list_qso_settings(dataset=dataset, workspace=workspace)
|
|
186
|
+
print(
|
|
187
|
+
f"{icons.green_dot} Query scale out has been disabled for the '{dataset}' semantic model within the '{workspace}' workspace."
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
return df
|
|
193
191
|
|
|
194
192
|
|
|
195
193
|
def set_qso(
|
|
@@ -249,22 +247,18 @@ def set_qso(
|
|
|
249
247
|
f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}",
|
|
250
248
|
json=request_body,
|
|
251
249
|
)
|
|
252
|
-
if response.status_code
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
)
|
|
257
|
-
return df
|
|
258
|
-
else:
|
|
259
|
-
return f"{icons.red_dot} {response.status_code}"
|
|
260
|
-
else:
|
|
250
|
+
if response.status_code != 200:
|
|
251
|
+
raise FabricHTTPException(response)
|
|
252
|
+
|
|
253
|
+
df = list_qso_settings(dataset=dataset, workspace=workspace)
|
|
261
254
|
print(
|
|
262
|
-
f"{icons.
|
|
255
|
+
f"{icons.green_dot} Query scale out has been set on the '{dataset}' semantic model within the '{workspace}' workspace."
|
|
263
256
|
)
|
|
264
|
-
|
|
265
|
-
|
|
257
|
+
return df
|
|
258
|
+
else:
|
|
259
|
+
raise ValueError(
|
|
260
|
+
f"{icons.red_dot} Failed to set the '{dataset}' semantic model within the '{workspace}' workspace to large semantic model storage format. This is a prerequisite for enabling Query Scale Out.\n\"https://learn.microsoft.com/power-bi/enterprise/service-premium-scale-out#prerequisites\""
|
|
266
261
|
)
|
|
267
|
-
return
|
|
268
262
|
|
|
269
263
|
|
|
270
264
|
def set_semantic_model_storage_format(
|
|
@@ -311,22 +305,19 @@ def set_semantic_model_storage_format(
|
|
|
311
305
|
elif storage_format == "Small":
|
|
312
306
|
request_body = {"targetStorageMode": "Abf"}
|
|
313
307
|
else:
|
|
314
|
-
|
|
308
|
+
raise ValueError(
|
|
315
309
|
f"{icons.red_dot} Invalid storage format value. Valid options: {storageFormats}."
|
|
316
310
|
)
|
|
317
|
-
return
|
|
318
311
|
|
|
319
312
|
client = fabric.PowerBIRestClient()
|
|
320
313
|
response = client.patch(
|
|
321
314
|
f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}", json=request_body
|
|
322
315
|
)
|
|
316
|
+
if response.status_code != 200:
|
|
317
|
+
raise FabricHTTPException(response)
|
|
318
|
+
print(f"{icons.green_dot} Semantic model storage format set to '{storage_format}'.")
|
|
323
319
|
|
|
324
|
-
|
|
325
|
-
return print(
|
|
326
|
-
f"{icons.green_dot} Semantic model storage format set to '{storage_format}'."
|
|
327
|
-
)
|
|
328
|
-
else:
|
|
329
|
-
return f"{icons.red_dot} {response.status_code}"
|
|
320
|
+
return response.status_code
|
|
330
321
|
|
|
331
322
|
|
|
332
323
|
def list_qso_settings(dataset: Optional[str] = None, workspace: Optional[str] = None):
|
|
@@ -370,21 +361,21 @@ def list_qso_settings(dataset: Optional[str] = None, workspace: Optional[str] =
|
|
|
370
361
|
client = fabric.PowerBIRestClient()
|
|
371
362
|
response = client.get(f"/v1.0/myorg/groups/{workspace_id}/datasets")
|
|
372
363
|
for v in response.json()["value"]:
|
|
373
|
-
tsm = v
|
|
364
|
+
tsm = v.get("targetStorageMode")
|
|
374
365
|
if tsm == "Abf":
|
|
375
366
|
sm = "Small"
|
|
376
367
|
else:
|
|
377
368
|
sm = "Large"
|
|
378
369
|
new_data = {
|
|
379
|
-
"Dataset Id": v
|
|
380
|
-
"Dataset Name": v
|
|
370
|
+
"Dataset Id": v.get("id"),
|
|
371
|
+
"Dataset Name": v.get("name"),
|
|
381
372
|
"Storage Mode": sm,
|
|
382
|
-
"QSO Auto Sync Enabled": v
|
|
373
|
+
"QSO Auto Sync Enabled": v.get("queryScaleOutSettings", {}).get(
|
|
383
374
|
"autoSyncReadOnlyReplicas"
|
|
384
|
-
|
|
385
|
-
"QSO Max Read Only Replicas": v
|
|
375
|
+
),
|
|
376
|
+
"QSO Max Read Only Replicas": v.get("queryScaleOutSettings", {}).get(
|
|
386
377
|
"maxReadOnlyReplicas"
|
|
387
|
-
|
|
378
|
+
),
|
|
388
379
|
}
|
|
389
380
|
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
390
381
|
|
|
@@ -439,9 +430,8 @@ def set_workspace_default_storage_format(
|
|
|
439
430
|
client = fabric.PowerBIRestClient()
|
|
440
431
|
response = client.patch(f"/v1.0/myorg/groups/{workspace_id}", json=request_body)
|
|
441
432
|
|
|
442
|
-
if response.status_code
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
print(f"{icons.red_dot} {response.status_code}")
|
|
433
|
+
if response.status_code != 200:
|
|
434
|
+
raise FabricHTTPException(response)
|
|
435
|
+
print(
|
|
436
|
+
f"{icons.green_dot} The default storage format for the '{workspace}' workspace has been updated to '{storage_format}."
|
|
437
|
+
)
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import sempy
|
|
2
1
|
import sempy.fabric as fabric
|
|
3
2
|
import time
|
|
4
3
|
from sempy_labs._helper_functions import resolve_dataset_id
|
|
@@ -6,6 +5,7 @@ from typing import Any, List, Optional, Union
|
|
|
6
5
|
from sempy._utils._log import log
|
|
7
6
|
import sempy_labs._icons as icons
|
|
8
7
|
from sempy_labs._helper_functions import resolve_workspace_name_and_id
|
|
8
|
+
from sempy.fabric.exceptions import FabricHTTPException
|
|
9
9
|
|
|
10
10
|
|
|
11
11
|
@log
|
|
@@ -41,9 +41,7 @@ def refresh_semantic_model(
|
|
|
41
41
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
42
42
|
"""
|
|
43
43
|
|
|
44
|
-
|
|
45
|
-
workspace_id = fabric.get_workspace_id()
|
|
46
|
-
workspace = fabric.resolve_workspace_name(workspace_id)
|
|
44
|
+
workspace = fabric.resolve_workspace_name(workspace)
|
|
47
45
|
|
|
48
46
|
if refresh_type is None:
|
|
49
47
|
refresh_type = "full"
|
|
@@ -81,10 +79,9 @@ def refresh_semantic_model(
|
|
|
81
79
|
]
|
|
82
80
|
|
|
83
81
|
if refresh_type not in refreshTypes:
|
|
84
|
-
|
|
82
|
+
raise ValueError(
|
|
85
83
|
f"{icons.red_dot} Invalid refresh type. Refresh type must be one of these values: {refreshTypes}."
|
|
86
84
|
)
|
|
87
|
-
return
|
|
88
85
|
|
|
89
86
|
if len(objects) == 0:
|
|
90
87
|
requestID = fabric.refresh_dataset(
|
|
@@ -119,10 +116,9 @@ def refresh_semantic_model(
|
|
|
119
116
|
if status == "Completed":
|
|
120
117
|
break
|
|
121
118
|
elif status == "Failed":
|
|
122
|
-
|
|
119
|
+
raise ValueError(
|
|
123
120
|
f"{icons.red_dot} The refresh of the '{dataset}' semantic model within the '{workspace}' workspace has failed."
|
|
124
121
|
)
|
|
125
|
-
return
|
|
126
122
|
elif status == "Cancelled":
|
|
127
123
|
print(
|
|
128
124
|
f"{icons.yellow_dot} The refresh of the '{dataset}' semantic model within the '{workspace}' workspace has been cancelled."
|
|
@@ -163,10 +159,10 @@ def cancel_dataset_refresh(
|
|
|
163
159
|
|
|
164
160
|
if request_id is None:
|
|
165
161
|
if len(rr_filt) == 0:
|
|
166
|
-
|
|
162
|
+
raise ValueError(
|
|
167
163
|
f"{icons.red_dot} There are no active Enhanced API refreshes of the '{dataset}' semantic model within the '{workspace}' workspace."
|
|
168
164
|
)
|
|
169
|
-
|
|
165
|
+
|
|
170
166
|
request_id = rr_filt["Request Id"].iloc[0]
|
|
171
167
|
|
|
172
168
|
dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace)
|
|
@@ -176,9 +172,9 @@ def cancel_dataset_refresh(
|
|
|
176
172
|
response = client.delete(
|
|
177
173
|
f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/refreshes/{request_id}"
|
|
178
174
|
)
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
175
|
+
|
|
176
|
+
if response.status_code != 200:
|
|
177
|
+
raise FabricHTTPException(response)
|
|
178
|
+
print(
|
|
179
|
+
f"{icons.green_dot} The '{request_id}' refresh request for the '{dataset}' semantic model within the '{workspace}' workspace has been cancelled."
|
|
180
|
+
)
|