semantic-link-labs 0.5.0__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- semantic_link_labs-0.7.0.dist-info/METADATA +148 -0
- semantic_link_labs-0.7.0.dist-info/RECORD +111 -0
- {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.7.0.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +45 -15
- sempy_labs/_ai.py +42 -85
- sempy_labs/_bpa_translation/_translations_am-ET.po +828 -0
- sempy_labs/_bpa_translation/_translations_ar-AE.po +860 -0
- sempy_labs/_bpa_translation/_translations_cs-CZ.po +894 -0
- sempy_labs/_bpa_translation/_translations_da-DK.po +894 -0
- sempy_labs/_bpa_translation/_translations_de-DE.po +933 -0
- sempy_labs/_bpa_translation/_translations_el-GR.po +936 -0
- sempy_labs/_bpa_translation/_translations_es-ES.po +915 -0
- sempy_labs/_bpa_translation/_translations_fa-IR.po +883 -0
- sempy_labs/_bpa_translation/_translations_fr-FR.po +938 -0
- sempy_labs/_bpa_translation/_translations_ga-IE.po +912 -0
- sempy_labs/_bpa_translation/_translations_he-IL.po +855 -0
- sempy_labs/_bpa_translation/_translations_hi-IN.po +892 -0
- sempy_labs/_bpa_translation/_translations_hu-HU.po +910 -0
- sempy_labs/_bpa_translation/_translations_is-IS.po +887 -0
- sempy_labs/_bpa_translation/_translations_it-IT.po +931 -0
- sempy_labs/_bpa_translation/_translations_ja-JP.po +805 -0
- sempy_labs/_bpa_translation/_translations_nl-NL.po +924 -0
- sempy_labs/_bpa_translation/_translations_pl-PL.po +913 -0
- sempy_labs/_bpa_translation/_translations_pt-BR.po +909 -0
- sempy_labs/_bpa_translation/_translations_pt-PT.po +904 -0
- sempy_labs/_bpa_translation/_translations_ru-RU.po +909 -0
- sempy_labs/_bpa_translation/_translations_ta-IN.po +922 -0
- sempy_labs/_bpa_translation/_translations_te-IN.po +896 -0
- sempy_labs/_bpa_translation/_translations_th-TH.po +873 -0
- sempy_labs/_bpa_translation/_translations_zh-CN.po +767 -0
- sempy_labs/_bpa_translation/_translations_zu-ZA.po +916 -0
- sempy_labs/_clear_cache.py +12 -8
- sempy_labs/_connections.py +77 -70
- sempy_labs/_dax.py +7 -9
- sempy_labs/_generate_semantic_model.py +75 -90
- sempy_labs/_helper_functions.py +371 -20
- sempy_labs/_icons.py +23 -0
- sempy_labs/_list_functions.py +855 -427
- sempy_labs/_model_auto_build.py +4 -3
- sempy_labs/_model_bpa.py +307 -1118
- sempy_labs/_model_bpa_bulk.py +363 -0
- sempy_labs/_model_bpa_rules.py +831 -0
- sempy_labs/_model_dependencies.py +20 -16
- sempy_labs/_one_lake_integration.py +18 -12
- sempy_labs/_query_scale_out.py +116 -129
- sempy_labs/_refresh_semantic_model.py +23 -10
- sempy_labs/_translations.py +367 -288
- sempy_labs/_vertipaq.py +152 -123
- sempy_labs/directlake/__init__.py +7 -1
- sempy_labs/directlake/_directlake_schema_compare.py +33 -30
- sempy_labs/directlake/_directlake_schema_sync.py +60 -77
- sempy_labs/directlake/_dl_helper.py +233 -0
- sempy_labs/directlake/_get_directlake_lakehouse.py +7 -8
- sempy_labs/directlake/_get_shared_expression.py +5 -3
- sempy_labs/directlake/_guardrails.py +20 -16
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +17 -10
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +3 -2
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +10 -5
- sempy_labs/directlake/_update_directlake_partition_entity.py +169 -22
- sempy_labs/directlake/_warm_cache.py +7 -4
- sempy_labs/lakehouse/_get_lakehouse_columns.py +1 -1
- sempy_labs/lakehouse/_get_lakehouse_tables.py +65 -71
- sempy_labs/lakehouse/_lakehouse.py +5 -3
- sempy_labs/lakehouse/_shortcuts.py +20 -13
- sempy_labs/migration/__init__.py +1 -1
- sempy_labs/migration/_create_pqt_file.py +184 -186
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +240 -269
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +78 -77
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +444 -425
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +96 -102
- sempy_labs/migration/_migration_validation.py +2 -2
- sempy_labs/migration/_refresh_calc_tables.py +94 -100
- sempy_labs/report/_BPAReportTemplate.json +232 -0
- sempy_labs/report/__init__.py +6 -2
- sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
- sempy_labs/report/_bpareporttemplate/.platform +11 -0
- sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
- sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
- sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
- sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
- sempy_labs/report/_generate_report.py +260 -139
- sempy_labs/report/_report_functions.py +90 -59
- sempy_labs/report/_report_rebind.py +40 -34
- sempy_labs/tom/__init__.py +1 -4
- sempy_labs/tom/_model.py +601 -181
- semantic_link_labs-0.5.0.dist-info/METADATA +0 -22
- semantic_link_labs-0.5.0.dist-info/RECORD +0 -53
- sempy_labs/directlake/_fallback.py +0 -58
- {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.7.0.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.7.0.dist-info}/top_level.txt +0 -0
|
@@ -1,12 +1,16 @@
|
|
|
1
1
|
import sempy.fabric as fabric
|
|
2
2
|
import pandas as pd
|
|
3
3
|
from sempy_labs._helper_functions import format_dax_object_name
|
|
4
|
+
import sempy_labs._icons as icons
|
|
4
5
|
from typing import Any, Dict, Optional
|
|
5
6
|
from anytree import Node, RenderTree
|
|
6
7
|
from sempy._utils._log import log
|
|
7
8
|
|
|
9
|
+
|
|
8
10
|
@log
|
|
9
|
-
def get_measure_dependencies(
|
|
11
|
+
def get_measure_dependencies(
|
|
12
|
+
dataset: str, workspace: Optional[str] = None
|
|
13
|
+
) -> pd.DataFrame:
|
|
10
14
|
"""
|
|
11
15
|
Shows all dependencies for all measures in a semantic model.
|
|
12
16
|
|
|
@@ -31,7 +35,7 @@ def get_measure_dependencies(dataset: str, workspace: Optional[str] = None):
|
|
|
31
35
|
dataset=dataset,
|
|
32
36
|
workspace=workspace,
|
|
33
37
|
dax_string="""
|
|
34
|
-
SELECT
|
|
38
|
+
SELECT
|
|
35
39
|
[TABLE] AS [Table Name]
|
|
36
40
|
,[OBJECT] AS [Object Name]
|
|
37
41
|
,[OBJECT_TYPE] AS [Object Type]
|
|
@@ -106,9 +110,9 @@ def get_measure_dependencies(dataset: str, workspace: Optional[str] = None):
|
|
|
106
110
|
"Table Name": r["Table Name"],
|
|
107
111
|
"Object Name": r["Object Name"],
|
|
108
112
|
"Object Type": r["Object Type"],
|
|
109
|
-
"Referenced Object": dependency[
|
|
110
|
-
"Referenced Table": dependency[
|
|
111
|
-
"Referenced Object Type": dependency[
|
|
113
|
+
"Referenced Object": dependency[4],
|
|
114
|
+
"Referenced Table": dependency[3],
|
|
115
|
+
"Referenced Object Type": dependency[5],
|
|
112
116
|
"Done": d,
|
|
113
117
|
"Full Object Name": r["Full Object Name"],
|
|
114
118
|
"Referenced Full Object Name": dependency[
|
|
@@ -128,8 +132,11 @@ def get_measure_dependencies(dataset: str, workspace: Optional[str] = None):
|
|
|
128
132
|
|
|
129
133
|
return df
|
|
130
134
|
|
|
135
|
+
|
|
131
136
|
@log
|
|
132
|
-
def get_model_calc_dependencies(
|
|
137
|
+
def get_model_calc_dependencies(
|
|
138
|
+
dataset: str, workspace: Optional[str] = None
|
|
139
|
+
) -> pd.DataFrame:
|
|
133
140
|
"""
|
|
134
141
|
Shows all dependencies for all objects in a semantic model.
|
|
135
142
|
|
|
@@ -162,7 +169,7 @@ def get_model_calc_dependencies(dataset: str, workspace: Optional[str] = None):
|
|
|
162
169
|
,[REFERENCED_TABLE] AS [Referenced Table]
|
|
163
170
|
,[REFERENCED_OBJECT] AS [Referenced Object]
|
|
164
171
|
,[REFERENCED_OBJECT_TYPE] AS [Referenced Object Type]
|
|
165
|
-
FROM $SYSTEM.DISCOVER_CALC_DEPENDENCY
|
|
172
|
+
FROM $SYSTEM.DISCOVER_CALC_DEPENDENCY
|
|
166
173
|
""",
|
|
167
174
|
)
|
|
168
175
|
|
|
@@ -285,7 +292,7 @@ def measure_dependency_tree(
|
|
|
285
292
|
|
|
286
293
|
if len(dfM_filt) == 0:
|
|
287
294
|
print(
|
|
288
|
-
f"The '{measure_name}' measure does not exist in the '{dataset}' semantic model in the '{workspace}' workspace."
|
|
295
|
+
f"{icons.red_dot} The '{measure_name}' measure does not exist in the '{dataset}' semantic model in the '{workspace}' workspace."
|
|
289
296
|
)
|
|
290
297
|
return
|
|
291
298
|
|
|
@@ -294,9 +301,6 @@ def measure_dependency_tree(
|
|
|
294
301
|
|
|
295
302
|
# Create a dictionary to hold references to nodes
|
|
296
303
|
node_dict: Dict[str, Any] = {}
|
|
297
|
-
measureIcon = "\u2211"
|
|
298
|
-
tableIcon = "\u229E"
|
|
299
|
-
columnIcon = "\u229F"
|
|
300
304
|
|
|
301
305
|
# Populate the tree
|
|
302
306
|
for _, row in df_filt.iterrows():
|
|
@@ -311,24 +315,24 @@ def measure_dependency_tree(
|
|
|
311
315
|
if parent_node is None:
|
|
312
316
|
parent_node = Node(parent_node_name)
|
|
313
317
|
node_dict[parent_node_name] = parent_node
|
|
314
|
-
parent_node.custom_property =
|
|
318
|
+
parent_node.custom_property = f"{icons.measure_icon} "
|
|
315
319
|
|
|
316
320
|
# Create the child node
|
|
317
321
|
child_node_name = ref_obj_name
|
|
318
322
|
child_node = Node(child_node_name, parent=parent_node)
|
|
319
323
|
if ref_obj_type == "Column":
|
|
320
|
-
child_node.custom_property =
|
|
324
|
+
child_node.custom_property = f"{icons.column_icon} '{ref_obj_table_name}'"
|
|
321
325
|
elif ref_obj_type == "Table":
|
|
322
|
-
child_node.custom_property =
|
|
326
|
+
child_node.custom_property = f"{icons.table_icon} "
|
|
323
327
|
elif ref_obj_type == "Measure":
|
|
324
|
-
child_node.custom_property =
|
|
328
|
+
child_node.custom_property = f"{icons.measure_icon} "
|
|
325
329
|
|
|
326
330
|
# Update the dictionary with the child node
|
|
327
331
|
node_dict[child_node_name] = child_node
|
|
328
332
|
|
|
329
333
|
# Visualize the tree structure using RenderTree
|
|
330
334
|
for pre, _, node in RenderTree(node_dict[measure_name]):
|
|
331
|
-
if
|
|
335
|
+
if icons.table_icon in node.custom_property:
|
|
332
336
|
print(f"{pre}{node.custom_property}'{node.name}'")
|
|
333
337
|
else:
|
|
334
338
|
print(f"{pre}{node.custom_property}[{node.name}]")
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import sempy
|
|
2
1
|
import sempy.fabric as fabric
|
|
3
2
|
import pandas as pd
|
|
4
3
|
from typing import Optional
|
|
@@ -6,6 +5,7 @@ from sempy._utils._log import log
|
|
|
6
5
|
from sempy_labs._helper_functions import resolve_workspace_name_and_id
|
|
7
6
|
import sempy_labs._icons as icons
|
|
8
7
|
|
|
8
|
+
|
|
9
9
|
@log
|
|
10
10
|
def export_model_to_onelake(
|
|
11
11
|
dataset: str,
|
|
@@ -42,19 +42,21 @@ def export_model_to_onelake(
|
|
|
42
42
|
dfD_filt = dfD[dfD["Dataset Name"] == dataset]
|
|
43
43
|
|
|
44
44
|
if len(dfD_filt) == 0:
|
|
45
|
-
raise ValueError(
|
|
45
|
+
raise ValueError(
|
|
46
|
+
f"{icons.red_dot} The '{dataset}' semantic model does not exist in the '{workspace}' workspace."
|
|
47
|
+
)
|
|
46
48
|
|
|
47
49
|
tmsl = f"""
|
|
48
50
|
{{
|
|
49
51
|
'export': {{
|
|
50
52
|
'layout': 'delta',
|
|
51
|
-
'type': 'full',
|
|
52
|
-
'objects': [
|
|
53
|
-
{{
|
|
53
|
+
'type': 'full',
|
|
54
|
+
'objects': [
|
|
55
|
+
{{
|
|
54
56
|
'database': '{dataset}'
|
|
55
|
-
}}
|
|
56
|
-
]
|
|
57
|
-
}}
|
|
57
|
+
}}
|
|
58
|
+
]
|
|
59
|
+
}}
|
|
58
60
|
}}
|
|
59
61
|
"""
|
|
60
62
|
|
|
@@ -65,8 +67,10 @@ def export_model_to_onelake(
|
|
|
65
67
|
f"{icons.green_dot} The '{dataset}' semantic model's tables have been exported as delta tables to the '{workspace}' workspace.\n"
|
|
66
68
|
)
|
|
67
69
|
except Exception as e:
|
|
68
|
-
raise ValueError(
|
|
69
|
-
|
|
70
|
+
raise ValueError(
|
|
71
|
+
f"{icons.red_dot} The '{dataset}' semantic model's tables have not been exported as delta tables to the '{workspace}' workspace.\nMake sure you enable OneLake integration for the '{dataset}' semantic model. Follow the instructions here: https://learn.microsoft.com/power-bi/enterprise/onelake-integration-overview#enable-onelake-integration"
|
|
72
|
+
) from e
|
|
73
|
+
|
|
70
74
|
# Create shortcuts if destination lakehouse is specified
|
|
71
75
|
if destination_lakehouse is not None:
|
|
72
76
|
# Destination...
|
|
@@ -116,7 +120,7 @@ def export_model_to_onelake(
|
|
|
116
120
|
|
|
117
121
|
print(f"{icons.in_progress} Creating shortcuts...\n")
|
|
118
122
|
for tableName in tables:
|
|
119
|
-
tablePath = "Tables/"
|
|
123
|
+
tablePath = f"Tables/{tableName}"
|
|
120
124
|
shortcutName = tableName.replace(" ", "")
|
|
121
125
|
request_body = {
|
|
122
126
|
"path": "Tables",
|
|
@@ -142,4 +146,6 @@ def export_model_to_onelake(
|
|
|
142
146
|
else:
|
|
143
147
|
print(response.status_code)
|
|
144
148
|
except Exception as e:
|
|
145
|
-
raise ValueError(
|
|
149
|
+
raise ValueError(
|
|
150
|
+
f"{icons.red_dot} Failed to create a shortcut for the '{tableName}' table."
|
|
151
|
+
) from e
|
sempy_labs/_query_scale_out.py
CHANGED
|
@@ -1,9 +1,12 @@
|
|
|
1
|
-
import sempy
|
|
2
1
|
import sempy.fabric as fabric
|
|
3
2
|
import pandas as pd
|
|
4
|
-
from sempy_labs._helper_functions import
|
|
5
|
-
|
|
3
|
+
from sempy_labs._helper_functions import (
|
|
4
|
+
resolve_dataset_id,
|
|
5
|
+
resolve_workspace_name_and_id,
|
|
6
|
+
)
|
|
7
|
+
from typing import Optional, Tuple
|
|
6
8
|
import sempy_labs._icons as icons
|
|
9
|
+
from sempy.fabric.exceptions import FabricHTTPException
|
|
7
10
|
|
|
8
11
|
|
|
9
12
|
def qso_sync(dataset: str, workspace: Optional[str] = None):
|
|
@@ -26,12 +29,7 @@ def qso_sync(dataset: str, workspace: Optional[str] = None):
|
|
|
26
29
|
|
|
27
30
|
# https://learn.microsoft.com/en-us/rest/api/power-bi/datasets/trigger-query-scale-out-sync-in-group
|
|
28
31
|
|
|
29
|
-
|
|
30
|
-
workspace_id = fabric.get_workspace_id()
|
|
31
|
-
workspace = fabric.resolve_workspace_name(workspace_id)
|
|
32
|
-
else:
|
|
33
|
-
workspace_id = fabric.resolve_workspace_id(workspace)
|
|
34
|
-
|
|
32
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
35
33
|
dataset_id = resolve_dataset_id(dataset, workspace)
|
|
36
34
|
|
|
37
35
|
client = fabric.PowerBIRestClient()
|
|
@@ -39,14 +37,16 @@ def qso_sync(dataset: str, workspace: Optional[str] = None):
|
|
|
39
37
|
f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/queryScaleOut/sync"
|
|
40
38
|
)
|
|
41
39
|
|
|
42
|
-
if response.status_code
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
40
|
+
if response.status_code != 200:
|
|
41
|
+
raise FabricHTTPException(response)
|
|
42
|
+
print(
|
|
43
|
+
f"{icons.green_dot} QSO sync initiated for the '{dataset}' semantic model within the '{workspace}' workspace."
|
|
44
|
+
)
|
|
45
|
+
|
|
48
46
|
|
|
49
|
-
def qso_sync_status(
|
|
47
|
+
def qso_sync_status(
|
|
48
|
+
dataset: str, workspace: Optional[str] = None
|
|
49
|
+
) -> Tuple[pd.DataFrame, pd.DataFrame]:
|
|
50
50
|
"""
|
|
51
51
|
Returns the query scale-out sync status for the specified dataset from the specified workspace.
|
|
52
52
|
|
|
@@ -61,6 +61,8 @@ def qso_sync_status(dataset: str, workspace: Optional[str] = None):
|
|
|
61
61
|
|
|
62
62
|
Returns
|
|
63
63
|
-------
|
|
64
|
+
Tuple[pandas.DataFrame, pandas.DataFrame]
|
|
65
|
+
2 pandas dataframes showing the query scale-out sync status.
|
|
64
66
|
|
|
65
67
|
"""
|
|
66
68
|
|
|
@@ -84,12 +86,7 @@ def qso_sync_status(dataset: str, workspace: Optional[str] = None):
|
|
|
84
86
|
columns=["Replica ID", "Replica Type", "Replica Version", "Replica Timestamp"]
|
|
85
87
|
)
|
|
86
88
|
|
|
87
|
-
|
|
88
|
-
workspace_id = fabric.get_workspace_id()
|
|
89
|
-
workspace = fabric.resolve_workspace_name(workspace_id)
|
|
90
|
-
else:
|
|
91
|
-
workspace_id = fabric.resolve_workspace_id(workspace)
|
|
92
|
-
|
|
89
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
93
90
|
dataset_id = resolve_dataset_id(dataset, workspace)
|
|
94
91
|
|
|
95
92
|
client = fabric.PowerBIRestClient()
|
|
@@ -97,57 +94,57 @@ def qso_sync_status(dataset: str, workspace: Optional[str] = None):
|
|
|
97
94
|
f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/queryScaleOut/syncStatus"
|
|
98
95
|
)
|
|
99
96
|
|
|
100
|
-
if response.status_code
|
|
101
|
-
|
|
102
|
-
|
|
97
|
+
if response.status_code != 200:
|
|
98
|
+
raise FabricHTTPException(response)
|
|
99
|
+
|
|
100
|
+
o = response.json()
|
|
101
|
+
sos = o.get("scaleOutStatus")
|
|
102
|
+
|
|
103
|
+
if sos == "Enabled":
|
|
104
|
+
new_data = {
|
|
105
|
+
"Scale Out Status": o.get("scaleOutStatus"),
|
|
106
|
+
"Sync Start Time": o.get("syncStartTime"),
|
|
107
|
+
"Sync End Time": o.get("syncEndTime"),
|
|
108
|
+
"Commit Version": o.get("commitVersion"),
|
|
109
|
+
"Commit Timestamp": o.get("commitTimestamp"),
|
|
110
|
+
"Target Sync Version": o.get("targetSyncVersion"),
|
|
111
|
+
"Target Sync Timestamp": o.get("targetSyncTimestamp"),
|
|
112
|
+
"Trigger Reason": o.get("triggerReason"),
|
|
113
|
+
"Min Active Read Version": o.get("minActiveReadVersion"),
|
|
114
|
+
"Min Active Read Timestamp": o.get("minActiveReadTimestamp"),
|
|
115
|
+
}
|
|
116
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
103
117
|
|
|
104
|
-
|
|
118
|
+
for r in o.get("scaleOutReplicas", []):
|
|
105
119
|
new_data = {
|
|
106
|
-
"
|
|
107
|
-
"
|
|
108
|
-
"
|
|
109
|
-
"
|
|
110
|
-
"Commit Timestamp": o["commitTimestamp"],
|
|
111
|
-
"Target Sync Version": o["targetSyncVersion"],
|
|
112
|
-
"Target Sync Timestamp": o["targetSyncTimestamp"],
|
|
113
|
-
"Trigger Reason": o["triggerReason"],
|
|
114
|
-
"Min Active Read Version": o["minActiveReadVersion"],
|
|
115
|
-
"Min Active Read Timestamp": o["minActiveReadTimestamp"],
|
|
120
|
+
"Replica ID": r.get("replicaId"),
|
|
121
|
+
"Replica Type": r.get("replicaType"),
|
|
122
|
+
"Replica Version": str(r.get("replicaVersion")),
|
|
123
|
+
"Replica Timestamp": r.get("replicaTimestamp"),
|
|
116
124
|
}
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
for r in o["scaleOutReplicas"]:
|
|
120
|
-
new_data = {
|
|
121
|
-
"Replica ID": r["replicaId"],
|
|
122
|
-
"Replica Type": r["replicaType"],
|
|
123
|
-
"Replica Version": str(r["replicaVersion"]),
|
|
124
|
-
"Replica Timestamp": r["replicaTimestamp"],
|
|
125
|
-
}
|
|
126
|
-
dfRep = pd.concat(
|
|
127
|
-
[dfRep, pd.DataFrame(new_data, index=[0])], ignore_index=True
|
|
128
|
-
)
|
|
129
|
-
|
|
130
|
-
df["Sync Start Time"] = pd.to_datetime(df["Sync Start Time"])
|
|
131
|
-
df["Sync End Time"] = pd.to_datetime(df["Sync End Time"])
|
|
132
|
-
df["Commit Timestamp"] = pd.to_datetime(df["Commit Timestamp"])
|
|
133
|
-
df["Target Sync Timestamp"] = pd.to_datetime(df["Target Sync Timestamp"])
|
|
134
|
-
df["Min Active Read Timestamp"] = pd.to_datetime(
|
|
135
|
-
df["Min Active Read Timestamp"]
|
|
125
|
+
dfRep = pd.concat(
|
|
126
|
+
[dfRep, pd.DataFrame(new_data, index=[0])], ignore_index=True
|
|
136
127
|
)
|
|
137
|
-
dfRep["Replica Timestamp"] = pd.to_datetime(dfRep["Replica Timestamp"])
|
|
138
|
-
df["Commit Version"] = df["Commit Version"].astype("int")
|
|
139
|
-
df["Target Sync Version"] = df["Target Sync Version"].astype("int")
|
|
140
|
-
df["Min Active Read Version"] = df["Min Active Read Version"].astype("int")
|
|
141
128
|
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
129
|
+
df["Sync Start Time"] = pd.to_datetime(df["Sync Start Time"])
|
|
130
|
+
df["Sync End Time"] = pd.to_datetime(df["Sync End Time"])
|
|
131
|
+
df["Commit Timestamp"] = pd.to_datetime(df["Commit Timestamp"])
|
|
132
|
+
df["Target Sync Timestamp"] = pd.to_datetime(df["Target Sync Timestamp"])
|
|
133
|
+
df["Min Active Read Timestamp"] = pd.to_datetime(
|
|
134
|
+
df["Min Active Read Timestamp"]
|
|
135
|
+
)
|
|
136
|
+
dfRep["Replica Timestamp"] = pd.to_datetime(dfRep["Replica Timestamp"])
|
|
137
|
+
df["Commit Version"] = df["Commit Version"].astype("int")
|
|
138
|
+
df["Target Sync Version"] = df["Target Sync Version"].astype("int")
|
|
139
|
+
df["Min Active Read Version"] = df["Min Active Read Version"].astype("int")
|
|
140
|
+
|
|
141
|
+
return df, dfRep
|
|
146
142
|
else:
|
|
147
|
-
|
|
143
|
+
print(f"{sos}\n\n")
|
|
144
|
+
return df, dfRep
|
|
148
145
|
|
|
149
146
|
|
|
150
|
-
def disable_qso(dataset: str, workspace: Optional[str] = None):
|
|
147
|
+
def disable_qso(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
|
|
151
148
|
"""
|
|
152
149
|
Sets the max read-only replicas to 0, disabling query scale out.
|
|
153
150
|
|
|
@@ -162,15 +159,12 @@ def disable_qso(dataset: str, workspace: Optional[str] = None):
|
|
|
162
159
|
|
|
163
160
|
Returns
|
|
164
161
|
-------
|
|
162
|
+
pandas.DataFrame
|
|
163
|
+
A pandas dataframe showing the current query scale out settings.
|
|
165
164
|
|
|
166
165
|
"""
|
|
167
166
|
|
|
168
|
-
|
|
169
|
-
workspace_id = fabric.get_workspace_id()
|
|
170
|
-
workspace = fabric.resolve_workspace_name(workspace_id)
|
|
171
|
-
else:
|
|
172
|
-
workspace_id = fabric.resolve_workspace_id(workspace)
|
|
173
|
-
|
|
167
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
174
168
|
dataset_id = resolve_dataset_id(dataset, workspace)
|
|
175
169
|
|
|
176
170
|
request_body = {"queryScaleOutSettings": {"maxReadOnlyReplicas": "0"}}
|
|
@@ -179,14 +173,15 @@ def disable_qso(dataset: str, workspace: Optional[str] = None):
|
|
|
179
173
|
response = client.patch(
|
|
180
174
|
f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}", json=request_body
|
|
181
175
|
)
|
|
182
|
-
if response.status_code
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
176
|
+
if response.status_code != 200:
|
|
177
|
+
raise FabricHTTPException(response)
|
|
178
|
+
|
|
179
|
+
df = list_qso_settings(dataset=dataset, workspace=workspace)
|
|
180
|
+
print(
|
|
181
|
+
f"{icons.green_dot} Query scale out has been disabled for the '{dataset}' semantic model within the '{workspace}' workspace."
|
|
182
|
+
)
|
|
183
|
+
|
|
184
|
+
return df
|
|
190
185
|
|
|
191
186
|
|
|
192
187
|
def set_qso(
|
|
@@ -194,7 +189,7 @@ def set_qso(
|
|
|
194
189
|
auto_sync: Optional[bool] = True,
|
|
195
190
|
max_read_only_replicas: Optional[int] = -1,
|
|
196
191
|
workspace: Optional[str] = None,
|
|
197
|
-
):
|
|
192
|
+
) -> pd.DataFrame:
|
|
198
193
|
"""
|
|
199
194
|
Sets the query scale out settings for a semantic model.
|
|
200
195
|
|
|
@@ -213,17 +208,14 @@ def set_qso(
|
|
|
213
208
|
|
|
214
209
|
Returns
|
|
215
210
|
-------
|
|
211
|
+
pandas.DataFrame
|
|
212
|
+
A pandas dataframe showing the current query scale-out settings.
|
|
216
213
|
|
|
217
214
|
"""
|
|
218
215
|
|
|
219
216
|
# https://learn.microsoft.com/en-us/rest/api/power-bi/datasets/update-dataset-in-group
|
|
220
217
|
|
|
221
|
-
|
|
222
|
-
workspace_id = fabric.get_workspace_id()
|
|
223
|
-
workspace = fabric.resolve_workspace_name(workspace_id)
|
|
224
|
-
else:
|
|
225
|
-
workspace_id = fabric.resolve_workspace_id(workspace)
|
|
226
|
-
|
|
218
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
227
219
|
dataset_id = resolve_dataset_id(dataset, workspace)
|
|
228
220
|
|
|
229
221
|
if max_read_only_replicas == 0:
|
|
@@ -246,16 +238,19 @@ def set_qso(
|
|
|
246
238
|
f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}",
|
|
247
239
|
json=request_body,
|
|
248
240
|
)
|
|
249
|
-
if response.status_code
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
241
|
+
if response.status_code != 200:
|
|
242
|
+
raise FabricHTTPException(response)
|
|
243
|
+
|
|
244
|
+
df = list_qso_settings(dataset=dataset, workspace=workspace)
|
|
245
|
+
print(
|
|
246
|
+
f"{icons.green_dot} Query scale out has been set on the '{dataset}' semantic model within the '{workspace}' workspace."
|
|
247
|
+
)
|
|
248
|
+
return df
|
|
257
249
|
else:
|
|
258
|
-
raise ValueError(
|
|
250
|
+
raise ValueError(
|
|
251
|
+
f"{icons.red_dot} Failed to set the '{dataset}' semantic model within the '{workspace}' workspace to large semantic model storage format. This is a prerequisite for enabling Query Scale Out.\n\"https://learn.microsoft.com/power-bi/enterprise/service-premium-scale-out#prerequisites\""
|
|
252
|
+
)
|
|
253
|
+
|
|
259
254
|
|
|
260
255
|
def set_semantic_model_storage_format(
|
|
261
256
|
dataset: str, storage_format: str, workspace: Optional[str] = None
|
|
@@ -279,12 +274,7 @@ def set_semantic_model_storage_format(
|
|
|
279
274
|
|
|
280
275
|
"""
|
|
281
276
|
|
|
282
|
-
|
|
283
|
-
workspace_id = fabric.get_workspace_id()
|
|
284
|
-
workspace = fabric.resolve_workspace_name(workspace_id)
|
|
285
|
-
else:
|
|
286
|
-
workspace_id = fabric.resolve_workspace_id(workspace)
|
|
287
|
-
|
|
277
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
288
278
|
dataset_id = resolve_dataset_id(dataset, workspace)
|
|
289
279
|
|
|
290
280
|
storage_format = storage_format.capitalize()
|
|
@@ -301,21 +291,22 @@ def set_semantic_model_storage_format(
|
|
|
301
291
|
elif storage_format == "Small":
|
|
302
292
|
request_body = {"targetStorageMode": "Abf"}
|
|
303
293
|
else:
|
|
304
|
-
raise ValueError(
|
|
294
|
+
raise ValueError(
|
|
295
|
+
f"{icons.red_dot} Invalid storage format value. Valid options: {storageFormats}."
|
|
296
|
+
)
|
|
305
297
|
|
|
306
298
|
client = fabric.PowerBIRestClient()
|
|
307
299
|
response = client.patch(
|
|
308
300
|
f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}", json=request_body
|
|
309
301
|
)
|
|
302
|
+
if response.status_code != 200:
|
|
303
|
+
raise FabricHTTPException(response)
|
|
304
|
+
print(f"{icons.green_dot} Semantic model storage format set to '{storage_format}'.")
|
|
310
305
|
|
|
311
|
-
if response.status_code == 200:
|
|
312
|
-
return print(
|
|
313
|
-
f"{icons.green_dot} Semantic model storage format set to '{storage_format}'."
|
|
314
|
-
)
|
|
315
|
-
else:
|
|
316
|
-
raise ValueError(f"{icons.red_dot} {response.status_code}")
|
|
317
306
|
|
|
318
|
-
def list_qso_settings(
|
|
307
|
+
def list_qso_settings(
|
|
308
|
+
dataset: Optional[str] = None, workspace: Optional[str] = None
|
|
309
|
+
) -> pd.DataFrame:
|
|
319
310
|
"""
|
|
320
311
|
Shows the query scale out settings for a semantic model (or all semantic models within a workspace).
|
|
321
312
|
|
|
@@ -334,11 +325,7 @@ def list_qso_settings(dataset: Optional[str] = None, workspace: Optional[str] =
|
|
|
334
325
|
A pandas dataframe showing the query scale out settings.
|
|
335
326
|
"""
|
|
336
327
|
|
|
337
|
-
|
|
338
|
-
workspace_id = fabric.get_workspace_id()
|
|
339
|
-
workspace = fabric.resolve_workspace_name(workspace_id)
|
|
340
|
-
else:
|
|
341
|
-
workspace_id = fabric.resolve_workspace_id(workspace)
|
|
328
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
342
329
|
|
|
343
330
|
if dataset is not None:
|
|
344
331
|
dataset_id = resolve_dataset_id(dataset, workspace)
|
|
@@ -355,7 +342,8 @@ def list_qso_settings(dataset: Optional[str] = None, workspace: Optional[str] =
|
|
|
355
342
|
)
|
|
356
343
|
client = fabric.PowerBIRestClient()
|
|
357
344
|
response = client.get(f"/v1.0/myorg/groups/{workspace_id}/datasets")
|
|
358
|
-
|
|
345
|
+
|
|
346
|
+
for v in response.json().get("value", []):
|
|
359
347
|
tsm = v.get("targetStorageMode")
|
|
360
348
|
if tsm == "Abf":
|
|
361
349
|
sm = "Small"
|
|
@@ -365,8 +353,12 @@ def list_qso_settings(dataset: Optional[str] = None, workspace: Optional[str] =
|
|
|
365
353
|
"Dataset Id": v.get("id"),
|
|
366
354
|
"Dataset Name": v.get("name"),
|
|
367
355
|
"Storage Mode": sm,
|
|
368
|
-
"QSO Auto Sync Enabled": v.get("queryScaleOutSettings",{}).get(
|
|
369
|
-
|
|
356
|
+
"QSO Auto Sync Enabled": v.get("queryScaleOutSettings", {}).get(
|
|
357
|
+
"autoSyncReadOnlyReplicas"
|
|
358
|
+
),
|
|
359
|
+
"QSO Max Read Only Replicas": v.get("queryScaleOutSettings", {}).get(
|
|
360
|
+
"maxReadOnlyReplicas"
|
|
361
|
+
),
|
|
370
362
|
}
|
|
371
363
|
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
372
364
|
|
|
@@ -407,23 +399,18 @@ def set_workspace_default_storage_format(
|
|
|
407
399
|
|
|
408
400
|
if storage_format not in storageFormats:
|
|
409
401
|
print(
|
|
410
|
-
f"Invalid storage format. Please choose from these options: {storageFormats}."
|
|
402
|
+
f"{icons.red_dot} Invalid storage format. Please choose from these options: {storageFormats}."
|
|
411
403
|
)
|
|
412
404
|
|
|
413
|
-
|
|
414
|
-
workspace_id = fabric.get_workspace_id()
|
|
415
|
-
workspace = fabric.resolve_workspace_name(workspace_id)
|
|
416
|
-
else:
|
|
417
|
-
workspace_id = fabric.resolve_workspace_id(workspace)
|
|
405
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
418
406
|
|
|
419
407
|
request_body = {"name": workspace, "defaultDatasetStorageFormat": storage_format}
|
|
420
408
|
|
|
421
409
|
client = fabric.PowerBIRestClient()
|
|
422
410
|
response = client.patch(f"/v1.0/myorg/groups/{workspace_id}", json=request_body)
|
|
423
411
|
|
|
424
|
-
if response.status_code
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
raise ValueError(f"{icons.red_dot} {response.status_code}")
|
|
412
|
+
if response.status_code != 200:
|
|
413
|
+
raise FabricHTTPException(response)
|
|
414
|
+
print(
|
|
415
|
+
f"{icons.green_dot} The default storage format for the '{workspace}' workspace has been updated to '{storage_format}."
|
|
416
|
+
)
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import sempy
|
|
2
1
|
import sempy.fabric as fabric
|
|
3
2
|
import time
|
|
4
3
|
from sempy_labs._helper_functions import resolve_dataset_id
|
|
@@ -6,6 +5,7 @@ from typing import Any, List, Optional, Union
|
|
|
6
5
|
from sempy._utils._log import log
|
|
7
6
|
import sempy_labs._icons as icons
|
|
8
7
|
from sempy_labs._helper_functions import resolve_workspace_name_and_id
|
|
8
|
+
from sempy.fabric.exceptions import FabricHTTPException
|
|
9
9
|
|
|
10
10
|
|
|
11
11
|
@log
|
|
@@ -16,6 +16,7 @@ def refresh_semantic_model(
|
|
|
16
16
|
refresh_type: Optional[str] = None,
|
|
17
17
|
retry_count: Optional[int] = 0,
|
|
18
18
|
apply_refresh_policy: Optional[bool] = True,
|
|
19
|
+
max_parallelism: Optional[int] = 10,
|
|
19
20
|
workspace: Optional[str] = None,
|
|
20
21
|
):
|
|
21
22
|
"""
|
|
@@ -35,6 +36,10 @@ def refresh_semantic_model(
|
|
|
35
36
|
Number of times the operation retries before failing.
|
|
36
37
|
apply_refresh_policy : bool, default=True
|
|
37
38
|
If an incremental refresh policy is defined, determines whether to apply the policy. Modes are true or false. If the policy isn't applied, the full process leaves partition definitions unchanged, and fully refreshes all partitions in the table. If commitMode is transactional, applyRefreshPolicy can be true or false. If commitMode is partialBatch, applyRefreshPolicy of true isn't supported, and applyRefreshPolicy must be set to false.
|
|
39
|
+
max_parallelism : int, default=10
|
|
40
|
+
Determines the maximum number of threads that can run the processing commands in parallel.
|
|
41
|
+
This value aligns with the MaxParallelism property that can be set in the TMSL Sequence command or by using other methods.
|
|
42
|
+
Defaults to 10.
|
|
38
43
|
workspace : str, default=None
|
|
39
44
|
The Fabric workspace name.
|
|
40
45
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
@@ -79,7 +84,9 @@ def refresh_semantic_model(
|
|
|
79
84
|
]
|
|
80
85
|
|
|
81
86
|
if refresh_type not in refreshTypes:
|
|
82
|
-
raise ValueError(
|
|
87
|
+
raise ValueError(
|
|
88
|
+
f"{icons.red_dot} Invalid refresh type. Refresh type must be one of these values: {refreshTypes}."
|
|
89
|
+
)
|
|
83
90
|
|
|
84
91
|
if len(objects) == 0:
|
|
85
92
|
requestID = fabric.refresh_dataset(
|
|
@@ -88,6 +95,7 @@ def refresh_semantic_model(
|
|
|
88
95
|
refresh_type=refresh_type,
|
|
89
96
|
retry_count=retry_count,
|
|
90
97
|
apply_refresh_policy=apply_refresh_policy,
|
|
98
|
+
max_parallelism=max_parallelism,
|
|
91
99
|
)
|
|
92
100
|
else:
|
|
93
101
|
requestID = fabric.refresh_dataset(
|
|
@@ -96,6 +104,7 @@ def refresh_semantic_model(
|
|
|
96
104
|
refresh_type=refresh_type,
|
|
97
105
|
retry_count=retry_count,
|
|
98
106
|
apply_refresh_policy=apply_refresh_policy,
|
|
107
|
+
max_parallelism=max_parallelism,
|
|
99
108
|
objects=objects,
|
|
100
109
|
)
|
|
101
110
|
print(
|
|
@@ -114,7 +123,9 @@ def refresh_semantic_model(
|
|
|
114
123
|
if status == "Completed":
|
|
115
124
|
break
|
|
116
125
|
elif status == "Failed":
|
|
117
|
-
raise ValueError(
|
|
126
|
+
raise ValueError(
|
|
127
|
+
f"{icons.red_dot} The refresh of the '{dataset}' semantic model within the '{workspace}' workspace has failed."
|
|
128
|
+
)
|
|
118
129
|
elif status == "Cancelled":
|
|
119
130
|
print(
|
|
120
131
|
f"{icons.yellow_dot} The refresh of the '{dataset}' semantic model within the '{workspace}' workspace has been cancelled."
|
|
@@ -155,7 +166,9 @@ def cancel_dataset_refresh(
|
|
|
155
166
|
|
|
156
167
|
if request_id is None:
|
|
157
168
|
if len(rr_filt) == 0:
|
|
158
|
-
raise ValueError(
|
|
169
|
+
raise ValueError(
|
|
170
|
+
f"{icons.red_dot} There are no active Enhanced API refreshes of the '{dataset}' semantic model within the '{workspace}' workspace."
|
|
171
|
+
)
|
|
159
172
|
|
|
160
173
|
request_id = rr_filt["Request Id"].iloc[0]
|
|
161
174
|
|
|
@@ -166,9 +179,9 @@ def cancel_dataset_refresh(
|
|
|
166
179
|
response = client.delete(
|
|
167
180
|
f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/refreshes/{request_id}"
|
|
168
181
|
)
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
182
|
+
|
|
183
|
+
if response.status_code != 200:
|
|
184
|
+
raise FabricHTTPException(response)
|
|
185
|
+
print(
|
|
186
|
+
f"{icons.green_dot} The '{request_id}' refresh request for the '{dataset}' semantic model within the '{workspace}' workspace has been cancelled."
|
|
187
|
+
)
|