semantic-link-labs 0.8.4__py3-none-any.whl → 0.8.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.8.4.dist-info → semantic_link_labs-0.8.6.dist-info}/METADATA +9 -3
- {semantic_link_labs-0.8.4.dist-info → semantic_link_labs-0.8.6.dist-info}/RECORD +49 -47
- {semantic_link_labs-0.8.4.dist-info → semantic_link_labs-0.8.6.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +29 -1
- sempy_labs/_data_pipelines.py +3 -3
- sempy_labs/_dataflows.py +116 -3
- sempy_labs/_dax.py +189 -3
- sempy_labs/_deployment_pipelines.py +3 -3
- sempy_labs/_environments.py +3 -3
- sempy_labs/_eventhouses.py +3 -3
- sempy_labs/_eventstreams.py +3 -3
- sempy_labs/_external_data_shares.py +1 -1
- sempy_labs/_generate_semantic_model.py +3 -3
- sempy_labs/_git.py +7 -7
- sempy_labs/_helper_functions.py +25 -1
- sempy_labs/_kql_databases.py +3 -3
- sempy_labs/_kql_querysets.py +3 -3
- sempy_labs/_mirrored_databases.py +428 -0
- sempy_labs/_mirrored_warehouses.py +1 -1
- sempy_labs/_ml_experiments.py +3 -3
- sempy_labs/_ml_models.py +4 -4
- sempy_labs/_model_bpa.py +209 -180
- sempy_labs/_model_bpa_bulk.py +48 -24
- sempy_labs/_model_dependencies.py +42 -86
- sempy_labs/_notebooks.py +2 -2
- sempy_labs/_query_scale_out.py +4 -4
- sempy_labs/_refresh_semantic_model.py +2 -2
- sempy_labs/_spark.py +6 -6
- sempy_labs/_vertipaq.py +31 -19
- sempy_labs/_warehouses.py +3 -3
- sempy_labs/_workspace_identity.py +2 -2
- sempy_labs/_workspaces.py +7 -7
- sempy_labs/admin/__init__.py +2 -0
- sempy_labs/admin/_basic_functions.py +54 -8
- sempy_labs/admin/_domains.py +1 -1
- sempy_labs/directlake/_update_directlake_partition_entity.py +1 -1
- sempy_labs/directlake/_warm_cache.py +10 -9
- sempy_labs/lakehouse/_get_lakehouse_tables.py +1 -1
- sempy_labs/lakehouse/_shortcuts.py +2 -2
- sempy_labs/migration/_create_pqt_file.py +9 -4
- sempy_labs/report/__init__.py +2 -0
- sempy_labs/report/_download_report.py +75 -0
- sempy_labs/report/_generate_report.py +3 -3
- sempy_labs/report/_report_functions.py +3 -3
- sempy_labs/report/_report_rebind.py +1 -1
- sempy_labs/report/_reportwrapper.py +4 -2
- sempy_labs/tom/_model.py +71 -35
- {semantic_link_labs-0.8.4.dist-info → semantic_link_labs-0.8.6.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.8.4.dist-info → semantic_link_labs-0.8.6.dist-info}/top_level.txt +0 -0
|
@@ -177,7 +177,7 @@ def warm_direct_lake_cache_isresident(
|
|
|
177
177
|
dfC["DAX Object Name"] = format_dax_object_name(
|
|
178
178
|
dfC["Table Name"], dfC["Column Name"]
|
|
179
179
|
)
|
|
180
|
-
dfC_filtered = dfC[dfC["Is Resident"]]
|
|
180
|
+
dfC_filtered = dfC[dfC["Is Resident"] == True]
|
|
181
181
|
|
|
182
182
|
if len(dfC_filtered) == 0:
|
|
183
183
|
raise ValueError(
|
|
@@ -186,17 +186,18 @@ def warm_direct_lake_cache_isresident(
|
|
|
186
186
|
|
|
187
187
|
# Refresh/frame dataset
|
|
188
188
|
refresh_semantic_model(dataset=dataset, refresh_type="full", workspace=workspace)
|
|
189
|
-
|
|
190
189
|
time.sleep(2)
|
|
191
190
|
|
|
192
|
-
tbls = dfC_filtered["Table Name"].unique()
|
|
193
|
-
column_values = dfC_filtered["DAX Object Name"].tolist()
|
|
194
|
-
|
|
195
191
|
# Run basic query to get columns into memory; completed one table at a time (so as not to overload the capacity)
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
192
|
+
tbls = dfC_filtered["Table Name"].unique()
|
|
193
|
+
for table_name in (bar := tqdm(tbls)):
|
|
194
|
+
bar.set_description(f"Warming the '{table_name}' table...")
|
|
195
|
+
css = ", ".join(
|
|
196
|
+
dfC_filtered[dfC_filtered["Table Name"] == table_name]["DAX Object Name"]
|
|
197
|
+
.astype(str)
|
|
198
|
+
.tolist()
|
|
199
|
+
)
|
|
200
|
+
dax = f"""EVALUATE TOPN(1,SUMMARIZECOLUMNS({css}))"""
|
|
200
201
|
fabric.evaluate_dax(dataset=dataset, dax_string=dax, workspace=workspace)
|
|
201
202
|
|
|
202
203
|
print(
|
|
@@ -31,7 +31,7 @@ def get_lakehouse_tables(
|
|
|
31
31
|
"""
|
|
32
32
|
Shows the tables of a lakehouse and their respective properties. Option to include additional properties relevant to Direct Lake guardrails.
|
|
33
33
|
|
|
34
|
-
This is a wrapper function for the following API: `Tables - List Tables <https://learn.microsoft.com/rest/api/fabric/lakehouse/tables/list-tables
|
|
34
|
+
This is a wrapper function for the following API: `Tables - List Tables <https://learn.microsoft.com/rest/api/fabric/lakehouse/tables/list-tables>`_ plus extended capabilities.
|
|
35
35
|
|
|
36
36
|
Parameters
|
|
37
37
|
----------
|
|
@@ -20,7 +20,7 @@ def create_shortcut_onelake(
|
|
|
20
20
|
"""
|
|
21
21
|
Creates a `shortcut <https://learn.microsoft.com/fabric/onelake/onelake-shortcuts>`_ to a delta table in OneLake.
|
|
22
22
|
|
|
23
|
-
This is a wrapper function for the following API: `OneLake Shortcuts - Create Shortcut <https://learn.microsoft.com/rest/api/fabric/core/onelake-shortcuts/create-shortcut
|
|
23
|
+
This is a wrapper function for the following API: `OneLake Shortcuts - Create Shortcut <https://learn.microsoft.com/rest/api/fabric/core/onelake-shortcuts/create-shortcut>`_.
|
|
24
24
|
|
|
25
25
|
Parameters
|
|
26
26
|
----------
|
|
@@ -171,7 +171,7 @@ def delete_shortcut(
|
|
|
171
171
|
"""
|
|
172
172
|
Deletes a shortcut.
|
|
173
173
|
|
|
174
|
-
This is a wrapper function for the following API: `OneLake Shortcuts - Delete Shortcut <https://learn.microsoft.com/rest/api/fabric/core/onelake-shortcuts/delete-shortcut
|
|
174
|
+
This is a wrapper function for the following API: `OneLake Shortcuts - Delete Shortcut <https://learn.microsoft.com/rest/api/fabric/core/onelake-shortcuts/delete-shortcut>`_.
|
|
175
175
|
|
|
176
176
|
Parameters
|
|
177
177
|
----------
|
|
@@ -47,7 +47,6 @@ def create_pqt_file(
|
|
|
47
47
|
|
|
48
48
|
folderPath = "/lakehouse/default/Files"
|
|
49
49
|
subFolderPath = os.path.join(folderPath, "pqtnewfolder")
|
|
50
|
-
os.makedirs(subFolderPath, exist_ok=True)
|
|
51
50
|
|
|
52
51
|
with connect_semantic_model(
|
|
53
52
|
dataset=dataset, workspace=workspace, readonly=True
|
|
@@ -81,8 +80,10 @@ def create_pqt_file(
|
|
|
81
80
|
for e in tom.model.Expressions:
|
|
82
81
|
expr_map[e.Name] = [str(e.Kind), e.Expression]
|
|
83
82
|
|
|
83
|
+
expressions = tom.model.Expressions.Count
|
|
84
|
+
|
|
84
85
|
# Dataflows Gen2 max table limit is 50.
|
|
85
|
-
max_length = 50
|
|
86
|
+
max_length = 50 - expressions
|
|
86
87
|
table_chunks = [
|
|
87
88
|
dict(list(table_map.items())[i : i + max_length])
|
|
88
89
|
for i in range(0, len(table_map), max_length)
|
|
@@ -90,6 +91,8 @@ def create_pqt_file(
|
|
|
90
91
|
|
|
91
92
|
def create_pqt(table_map: dict, expr_map: dict, file_name: str):
|
|
92
93
|
|
|
94
|
+
os.makedirs(subFolderPath, exist_ok=True)
|
|
95
|
+
|
|
93
96
|
class QueryMetadata:
|
|
94
97
|
def __init__(
|
|
95
98
|
self,
|
|
@@ -223,6 +226,8 @@ def create_pqt_file(
|
|
|
223
226
|
a = 0
|
|
224
227
|
for t_map in table_chunks:
|
|
225
228
|
if a > 0:
|
|
226
|
-
|
|
229
|
+
save_file_name = f"{file_name}_{a}"
|
|
230
|
+
else:
|
|
231
|
+
save_file_name = file_name
|
|
227
232
|
a += 1
|
|
228
|
-
create_pqt(t_map, expr_map, file_name=
|
|
233
|
+
create_pqt(t_map, expr_map, file_name=save_file_name)
|
sempy_labs/report/__init__.py
CHANGED
|
@@ -10,6 +10,7 @@ from sempy_labs.report._generate_report import (
|
|
|
10
10
|
update_report_from_reportjson,
|
|
11
11
|
create_model_bpa_report,
|
|
12
12
|
)
|
|
13
|
+
from sempy_labs.report._download_report import download_report
|
|
13
14
|
from sempy_labs.report._report_functions import (
|
|
14
15
|
get_report_json,
|
|
15
16
|
# report_dependency_tree,
|
|
@@ -42,4 +43,5 @@ __all__ = [
|
|
|
42
43
|
"report_bpa_rules",
|
|
43
44
|
"run_report_bpa",
|
|
44
45
|
"get_report_datasources",
|
|
46
|
+
"download_report",
|
|
45
47
|
]
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
import sempy.fabric as fabric
|
|
2
|
+
import sempy_labs._icons as icons
|
|
3
|
+
from typing import Optional
|
|
4
|
+
from sempy_labs._helper_functions import (
|
|
5
|
+
resolve_workspace_name_and_id,
|
|
6
|
+
resolve_lakehouse_name,
|
|
7
|
+
)
|
|
8
|
+
from sempy_labs.lakehouse._lakehouse import lakehouse_attached
|
|
9
|
+
from sempy.fabric.exceptions import FabricHTTPException
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def download_report(
|
|
13
|
+
report: str,
|
|
14
|
+
file_name: Optional[str] = None,
|
|
15
|
+
download_type: str = "LiveConnect",
|
|
16
|
+
workspace: Optional[str] = None,
|
|
17
|
+
):
|
|
18
|
+
"""
|
|
19
|
+
Downloads the specified report from the specified workspace to a Power BI .pbix file.
|
|
20
|
+
|
|
21
|
+
This is a wrapper function for the following API: `Reports - Export Report In Group <https://learn.microsoft.com/rest/api/power-bi/reports/export-report-in-group>`.
|
|
22
|
+
|
|
23
|
+
Parameters
|
|
24
|
+
----------
|
|
25
|
+
report: str
|
|
26
|
+
Name of the report.
|
|
27
|
+
file_name : str, default=None
|
|
28
|
+
Name of the .pbix file to be saved.
|
|
29
|
+
Defaults to None which resolves to the name of the report.
|
|
30
|
+
download_type : str, default="LiveConnect"
|
|
31
|
+
The type of download. Valid values are "LiveConnect" and "IncludeModel".
|
|
32
|
+
workspace : str, default=None
|
|
33
|
+
The Fabric workspace name.
|
|
34
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
35
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
if not lakehouse_attached():
|
|
39
|
+
raise ValueError(
|
|
40
|
+
f"{icons.red_dot} A lakehouse must be attached to the notebook."
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
lakehouse_id = fabric.get_lakehouse_id()
|
|
44
|
+
workspace_name = fabric.resolve_workspace_name()
|
|
45
|
+
lakehouse_name = resolve_lakehouse_name(
|
|
46
|
+
lakehouse_id=lakehouse_id, workspace=workspace_name
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
download_types = ["LiveConnect", "IncludeModel"]
|
|
50
|
+
if download_type not in download_types:
|
|
51
|
+
raise ValueError(
|
|
52
|
+
f"{icons.red_dot} Invalid download_type parameter. Valid options: {download_types}."
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
file_name = file_name or report
|
|
56
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
57
|
+
report_id = fabric.resolve_item_id(
|
|
58
|
+
item_name=report, type="Report", workspace=workspace
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
client = fabric.PowerBIRestClient()
|
|
62
|
+
response = client.get(
|
|
63
|
+
f"/v1.0/myorg/groups/{workspace_id}/reports/{report_id}/Export?downloadType={download_type}"
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
if response.status_code != 200:
|
|
67
|
+
raise FabricHTTPException(response)
|
|
68
|
+
|
|
69
|
+
# Save file to the attached lakehouse
|
|
70
|
+
with open(f"/lakehouse/default/Files/{file_name}.pbix", "wb") as file:
|
|
71
|
+
file.write(response.content)
|
|
72
|
+
|
|
73
|
+
print(
|
|
74
|
+
f"{icons.green_dot} The '{report}' report within the '{workspace}' workspace has been exported as the '{file_name}' file in the '{lakehouse_name}' lakehouse within the '{workspace_name} workspace."
|
|
75
|
+
)
|
|
@@ -23,7 +23,7 @@ def create_report_from_reportjson(
|
|
|
23
23
|
"""
|
|
24
24
|
Creates a report based on a report.json file (and an optional themes.json file).
|
|
25
25
|
|
|
26
|
-
This is a wrapper function for the following API: `Items - Create Report <https://learn.microsoft.com/rest/api/fabric/report/items/create-report
|
|
26
|
+
This is a wrapper function for the following API: `Items - Create Report <https://learn.microsoft.com/rest/api/fabric/report/items/create-report>`_.
|
|
27
27
|
|
|
28
28
|
Parameters
|
|
29
29
|
----------
|
|
@@ -126,7 +126,7 @@ def update_report_from_reportjson(
|
|
|
126
126
|
"""
|
|
127
127
|
Updates a report based on a report.json file.
|
|
128
128
|
|
|
129
|
-
This is a wrapper function for the following API: `Items - Update Report Definition <https://learn.microsoft.com/rest/api/fabric/report/items/update-report-definition
|
|
129
|
+
This is a wrapper function for the following API: `Items - Update Report Definition <https://learn.microsoft.com/rest/api/fabric/report/items/update-report-definition>`_.
|
|
130
130
|
|
|
131
131
|
Parameters
|
|
132
132
|
----------
|
|
@@ -183,7 +183,7 @@ def get_report_definition(report: str, workspace: Optional[str] = None) -> pd.Da
|
|
|
183
183
|
"""
|
|
184
184
|
Gets the collection of definition files of a report.
|
|
185
185
|
|
|
186
|
-
This is a wrapper function for the following API: `Items - Get Report Definition <https://learn.microsoft.com/rest/api/fabric/report/items/get-report-definition
|
|
186
|
+
This is a wrapper function for the following API: `Items - Get Report Definition <https://learn.microsoft.com/rest/api/fabric/report/items/get-report-definition>`_.
|
|
187
187
|
|
|
188
188
|
Parameters
|
|
189
189
|
----------
|
|
@@ -33,7 +33,7 @@ def get_report_json(
|
|
|
33
33
|
"""
|
|
34
34
|
Gets the report.json file content of a Power BI report.
|
|
35
35
|
|
|
36
|
-
This is a wrapper function for the following API: `Items - Get Report Definition <https://learn.microsoft.com/rest/api/fabric/report/items/get-report-definition
|
|
36
|
+
This is a wrapper function for the following API: `Items - Get Report Definition <https://learn.microsoft.com/rest/api/fabric/report/items/get-report-definition>`_.
|
|
37
37
|
|
|
38
38
|
Parameters
|
|
39
39
|
----------
|
|
@@ -159,7 +159,7 @@ def export_report(
|
|
|
159
159
|
"""
|
|
160
160
|
Exports a Power BI report to a file in your lakehouse.
|
|
161
161
|
|
|
162
|
-
This is a wrapper function for the following APIs: `Reports - Export To File In Group <https://learn.microsoft.com/rest/api/power-bi/reports/export-to-file-in-group
|
|
162
|
+
This is a wrapper function for the following APIs: `Reports - Export To File In Group <https://learn.microsoft.com/rest/api/power-bi/reports/export-to-file-in-group>`_, `Reports - Get Export To File Status In Group <https://learn.microsoft.com/rest/api/power-bi/reports/get-export-to-file-status-in-group>`_, `Reports - Get File Of Export To File In Group <https://learn.microsoft.com/rest/api/power-bi/reports/get-file-of-export-to-file-in-group>`_.
|
|
163
163
|
|
|
164
164
|
Parameters
|
|
165
165
|
----------
|
|
@@ -405,7 +405,7 @@ def clone_report(
|
|
|
405
405
|
"""
|
|
406
406
|
Clones a Power BI report.
|
|
407
407
|
|
|
408
|
-
This is a wrapper function for the following API: `Reports - Clone Report In Group <https://learn.microsoft.com/rest/api/power-bi/reports/clone-report-in-group
|
|
408
|
+
This is a wrapper function for the following API: `Reports - Clone Report In Group <https://learn.microsoft.com/rest/api/power-bi/reports/clone-report-in-group>`_.
|
|
409
409
|
|
|
410
410
|
Parameters
|
|
411
411
|
----------
|
|
@@ -19,7 +19,7 @@ def report_rebind(
|
|
|
19
19
|
"""
|
|
20
20
|
Rebinds a report to a semantic model.
|
|
21
21
|
|
|
22
|
-
This is a wrapper function for the following API: `Reports - Rebind Report In Group <https://learn.microsoft.com/rest/api/power-bi/reports/rebind-report-in-group
|
|
22
|
+
This is a wrapper function for the following API: `Reports - Rebind Report In Group <https://learn.microsoft.com/rest/api/power-bi/reports/rebind-report-in-group>`_.
|
|
23
23
|
|
|
24
24
|
Parameters
|
|
25
25
|
----------
|
|
@@ -1332,6 +1332,7 @@ class ReportWrapper:
|
|
|
1332
1332
|
|
|
1333
1333
|
theme_name = json_file["name"]
|
|
1334
1334
|
theme_name_full = f"{theme_name}.json"
|
|
1335
|
+
rd = self.rdef
|
|
1335
1336
|
|
|
1336
1337
|
# Add theme.json file to request_body
|
|
1337
1338
|
file_payload = _conv_b64(json_file)
|
|
@@ -1345,11 +1346,12 @@ class ReportWrapper:
|
|
|
1345
1346
|
"type": "CustomTheme",
|
|
1346
1347
|
}
|
|
1347
1348
|
|
|
1348
|
-
rd = self.rdef
|
|
1349
1349
|
for _, r in rd.iterrows():
|
|
1350
1350
|
path = r["path"]
|
|
1351
1351
|
payload = r["payload"]
|
|
1352
|
-
if path
|
|
1352
|
+
if path == filePath:
|
|
1353
|
+
pass
|
|
1354
|
+
elif path != report_path:
|
|
1353
1355
|
_add_part(request_body, path, payload)
|
|
1354
1356
|
# Update the report.json file
|
|
1355
1357
|
else:
|
sempy_labs/tom/_model.py
CHANGED
|
@@ -51,15 +51,19 @@ class TOMWrapper:
|
|
|
51
51
|
|
|
52
52
|
self._table_map = {}
|
|
53
53
|
self._column_map = {}
|
|
54
|
-
|
|
55
|
-
if len(t.LineageTag) == 0:
|
|
56
|
-
t.LineageTag = generate_guid()
|
|
57
|
-
self._table_map[t.LineageTag] = t.Name
|
|
54
|
+
self._compat_level = self.model.Model.Database.CompatibilityLevel
|
|
58
55
|
|
|
59
|
-
for
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
56
|
+
# Minimum campat level for lineage tags is 1540 (https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.table.lineagetag?view=analysisservices-dotnet#microsoft-analysisservices-tabular-table-lineagetag)
|
|
57
|
+
if self._compat_level >= 1540:
|
|
58
|
+
for t in self.model.Tables:
|
|
59
|
+
if len(t.LineageTag) == 0:
|
|
60
|
+
t.LineageTag = generate_guid()
|
|
61
|
+
self._table_map[t.LineageTag] = t.Name
|
|
62
|
+
|
|
63
|
+
for c in self.all_columns():
|
|
64
|
+
if len(c.LineageTag) == 0:
|
|
65
|
+
c.LineageTag = generate_guid()
|
|
66
|
+
self._column_map[c.LineageTag] = [c.Name, c.DataType]
|
|
63
67
|
|
|
64
68
|
def all_columns(self):
|
|
65
69
|
"""
|
|
@@ -887,6 +891,7 @@ class TOMWrapper:
|
|
|
887
891
|
precedence: int,
|
|
888
892
|
description: Optional[str] = None,
|
|
889
893
|
hidden: bool = False,
|
|
894
|
+
column_name: str = "Name",
|
|
890
895
|
):
|
|
891
896
|
"""
|
|
892
897
|
Adds a `calculation group <https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.calculationgroup?view=analysisservices-dotnet>`_ to a semantic model.
|
|
@@ -901,9 +906,10 @@ class TOMWrapper:
|
|
|
901
906
|
A description of the calculation group.
|
|
902
907
|
hidden : bool, default=False
|
|
903
908
|
Whether the calculation group is hidden/visible.
|
|
909
|
+
column_name : str, default="Name"
|
|
910
|
+
The name of the calculation group column.
|
|
904
911
|
"""
|
|
905
912
|
import Microsoft.AnalysisServices.Tabular as TOM
|
|
906
|
-
import System
|
|
907
913
|
|
|
908
914
|
tbl = TOM.Table()
|
|
909
915
|
tbl.Name = name
|
|
@@ -925,14 +931,14 @@ class TOMWrapper:
|
|
|
925
931
|
col1.Name = sortCol
|
|
926
932
|
col1.SourceColumn = sortCol
|
|
927
933
|
col1.IsHidden = True
|
|
928
|
-
col1.DataType =
|
|
934
|
+
col1.DataType = TOM.DataType.Int64
|
|
929
935
|
|
|
930
936
|
tbl.Columns.Add(col1)
|
|
931
937
|
|
|
932
938
|
col2 = TOM.DataColumn()
|
|
933
|
-
col2.Name =
|
|
934
|
-
col2.SourceColumn =
|
|
935
|
-
col2.DataType =
|
|
939
|
+
col2.Name = column_name
|
|
940
|
+
col2.SourceColumn = column_name
|
|
941
|
+
col2.DataType = TOM.DataType.String
|
|
936
942
|
# col.SortByColumn = m.Tables[name].Columns[sortCol]
|
|
937
943
|
tbl.Columns.Add(col2)
|
|
938
944
|
|
|
@@ -2272,17 +2278,21 @@ class TOMWrapper:
|
|
|
2272
2278
|
t = self.model.Tables[table_name]
|
|
2273
2279
|
|
|
2274
2280
|
return (
|
|
2275
|
-
|
|
2276
|
-
|
|
2277
|
-
|
|
2278
|
-
for p in t.Partitions
|
|
2281
|
+
self.is_calculated_table(table_name=table_name)
|
|
2282
|
+
and t.Columns.Count == 4
|
|
2283
|
+
and any(
|
|
2284
|
+
"NAMEOF(" in p.Source.Expression.replace(" ", "") for p in t.Partitions
|
|
2279
2285
|
)
|
|
2280
2286
|
and all(
|
|
2281
2287
|
"[Value" in c.SourceColumn
|
|
2282
2288
|
for c in t.Columns
|
|
2283
|
-
if c.Type
|
|
2289
|
+
if c.Type == TOM.ColumnType.Data
|
|
2290
|
+
)
|
|
2291
|
+
and any(
|
|
2292
|
+
ep.Name == "ParameterMetadata"
|
|
2293
|
+
for c in t.Columns
|
|
2294
|
+
for ep in c.ExtendedProperties
|
|
2284
2295
|
)
|
|
2285
|
-
and t.Columns.Count == 4
|
|
2286
2296
|
)
|
|
2287
2297
|
|
|
2288
2298
|
def is_auto_date_table(self, table_name: str):
|
|
@@ -4481,32 +4491,58 @@ class TOMWrapper:
|
|
|
4481
4491
|
# with concurrent.futures.ThreadPoolExecutor() as executor:
|
|
4482
4492
|
# executor.map(process_measure, self.all_measures())
|
|
4483
4493
|
|
|
4494
|
+
def set_value_filter_behavior(self, value_filter_behavior: str = "Automatic"):
|
|
4495
|
+
"""
|
|
4496
|
+
Sets the `Value Filter Behavior <https://learn.microsoft.com/power-bi/transform-model/value-filter-behavior>`_ property for the semantic model.
|
|
4497
|
+
|
|
4498
|
+
Parameters
|
|
4499
|
+
----------
|
|
4500
|
+
value_filter_behavior : str , default="Automatic"
|
|
4501
|
+
Determines value filter behavior for SummarizeColumns. Valid options: 'Automatic', 'Independent', 'Coalesced'.
|
|
4502
|
+
"""
|
|
4503
|
+
|
|
4504
|
+
import Microsoft.AnalysisServices.Tabular as TOM
|
|
4505
|
+
import System
|
|
4506
|
+
|
|
4507
|
+
value_filter_behavior = value_filter_behavior.capitalize()
|
|
4508
|
+
min_compat = 1606
|
|
4509
|
+
|
|
4510
|
+
if self.model.Model.Database.CompatibilityLevel < min_compat:
|
|
4511
|
+
self.model.Model.Database.CompatibilityLevel = min_compat
|
|
4512
|
+
|
|
4513
|
+
self.model.ValueFilterBehavior = System.Enum.Parse(
|
|
4514
|
+
TOM.ValueFilterBehaviorType, value_filter_behavior
|
|
4515
|
+
)
|
|
4516
|
+
|
|
4484
4517
|
def close(self):
|
|
4485
4518
|
|
|
4486
4519
|
if not self._readonly and self.model is not None:
|
|
4487
4520
|
|
|
4488
4521
|
import Microsoft.AnalysisServices.Tabular as TOM
|
|
4489
4522
|
|
|
4490
|
-
# ChangedProperty logic
|
|
4491
|
-
|
|
4492
|
-
|
|
4493
|
-
p.SourceType == TOM.PartitionSourceType.Entity for p in t.Partitions
|
|
4494
|
-
):
|
|
4495
|
-
if t.LineageTag in list(self._table_map.keys()):
|
|
4496
|
-
if self._table_map.get(t.LineageTag) != t.Name:
|
|
4497
|
-
self.add_changed_property(object=t, property="Name")
|
|
4498
|
-
|
|
4499
|
-
for c in self.all_columns():
|
|
4500
|
-
if c.LineageTag in list(self._column_map.keys()):
|
|
4523
|
+
# ChangedProperty logic (min compat level is 1567) https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.changedproperty?view=analysisservices-dotnet
|
|
4524
|
+
if self.model.Model.Database.CompatibilityLevel >= 1567:
|
|
4525
|
+
for t in self.model.Tables:
|
|
4501
4526
|
if any(
|
|
4502
4527
|
p.SourceType == TOM.PartitionSourceType.Entity
|
|
4503
|
-
for p in
|
|
4528
|
+
for p in t.Partitions
|
|
4504
4529
|
):
|
|
4505
|
-
if self.
|
|
4506
|
-
self.
|
|
4507
|
-
|
|
4508
|
-
self.add_changed_property(object=c, property="DataType")
|
|
4530
|
+
if t.LineageTag in list(self._table_map.keys()):
|
|
4531
|
+
if self._table_map.get(t.LineageTag) != t.Name:
|
|
4532
|
+
self.add_changed_property(object=t, property="Name")
|
|
4509
4533
|
|
|
4534
|
+
for c in self.all_columns():
|
|
4535
|
+
if c.LineageTag in list(self._column_map.keys()):
|
|
4536
|
+
if any(
|
|
4537
|
+
p.SourceType == TOM.PartitionSourceType.Entity
|
|
4538
|
+
for p in c.Parent.Partitions
|
|
4539
|
+
):
|
|
4540
|
+
if self._column_map.get(c.LineageTag)[0] != c.Name:
|
|
4541
|
+
self.add_changed_property(object=c, property="Name")
|
|
4542
|
+
if self._column_map.get(c.LineageTag)[1] != c.DataType:
|
|
4543
|
+
self.add_changed_property(object=c, property="DataType")
|
|
4544
|
+
|
|
4545
|
+
# SLL Tags
|
|
4510
4546
|
tags = [f"{icons.sll_prefix}{a}" for a in icons.sll_tags]
|
|
4511
4547
|
tags.append("SLL")
|
|
4512
4548
|
|
|
File without changes
|
|
File without changes
|