semantic-link-labs 0.12.3__py3-none-any.whl → 0.12.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.12.3.dist-info → semantic_link_labs-0.12.5.dist-info}/METADATA +5 -3
- {semantic_link_labs-0.12.3.dist-info → semantic_link_labs-0.12.5.dist-info}/RECORD +45 -37
- sempy_labs/__init__.py +20 -16
- sempy_labs/_a_lib_info.py +1 -1
- sempy_labs/_authentication.py +1 -1
- sempy_labs/_capacities.py +1 -1
- sempy_labs/_dataflows.py +98 -10
- sempy_labs/_git.py +1 -1
- sempy_labs/_helper_functions.py +32 -5
- sempy_labs/_list_functions.py +55 -5
- sempy_labs/_managed_private_endpoints.py +63 -1
- sempy_labs/_model_bpa.py +6 -0
- sempy_labs/_notebooks.py +4 -2
- sempy_labs/_onelake.py +131 -0
- sempy_labs/_sql_audit_settings.py +208 -0
- sempy_labs/_sql_endpoints.py +18 -3
- sempy_labs/_utils.py +2 -0
- sempy_labs/admin/__init__.py +6 -0
- sempy_labs/admin/_basic_functions.py +17 -13
- sempy_labs/admin/_items.py +3 -3
- sempy_labs/admin/_labels.py +211 -0
- sempy_labs/admin/_workspaces.py +2 -2
- sempy_labs/deployment_pipeline/__init__.py +21 -0
- sempy_labs/deployment_pipeline/_items.py +486 -0
- sempy_labs/directlake/_update_directlake_partition_entity.py +73 -41
- sempy_labs/directlake/_warm_cache.py +3 -1
- sempy_labs/eventstream/__init__.py +37 -0
- sempy_labs/eventstream/_items.py +263 -0
- sempy_labs/eventstream/_topology.py +652 -0
- sempy_labs/graph/__init__.py +10 -0
- sempy_labs/graph/_groups.py +123 -53
- sempy_labs/graph/_sensitivity_labels.py +39 -0
- sempy_labs/graph/_teams.py +19 -18
- sempy_labs/graph/_user_licenses.py +96 -0
- sempy_labs/graph/_users.py +69 -18
- sempy_labs/lakehouse/_get_lakehouse_tables.py +33 -1
- sempy_labs/lakehouse/_lakehouse.py +6 -2
- sempy_labs/lakehouse/_partitioning.py +165 -0
- sempy_labs/report/_export_report.py +0 -22
- sempy_labs/report/_report_rebind.py +29 -43
- sempy_labs/report/_reportwrapper.py +80 -35
- sempy_labs/tom/_model.py +81 -4
- sempy_labs/_deployment_pipelines.py +0 -209
- sempy_labs/_eventstreams.py +0 -123
- {semantic_link_labs-0.12.3.dist-info → semantic_link_labs-0.12.5.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.12.3.dist-info → semantic_link_labs-0.12.5.dist-info}/licenses/LICENSE +0 -0
- {semantic_link_labs-0.12.3.dist-info → semantic_link_labs-0.12.5.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,165 @@
|
|
|
1
|
+
from typing import Optional, List
|
|
2
|
+
from uuid import UUID
|
|
3
|
+
from sempy_labs._helper_functions import (
|
|
4
|
+
_create_spark_session,
|
|
5
|
+
create_abfss_path,
|
|
6
|
+
resolve_workspace_id,
|
|
7
|
+
resolve_lakehouse_id,
|
|
8
|
+
_get_delta_table,
|
|
9
|
+
)
|
|
10
|
+
from sempy._utils._log import log
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@log
|
|
14
|
+
def _get_partitions(
|
|
15
|
+
table_name: str,
|
|
16
|
+
schema_name: Optional[str] = None,
|
|
17
|
+
lakehouse: Optional[str | UUID] = None,
|
|
18
|
+
workspace: Optional[str | UUID] = None,
|
|
19
|
+
):
|
|
20
|
+
|
|
21
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
22
|
+
lakehouse_id = resolve_lakehouse_id(lakehouse, workspace)
|
|
23
|
+
path = create_abfss_path(lakehouse_id, workspace_id, table_name, schema_name)
|
|
24
|
+
|
|
25
|
+
delta_table = _get_delta_table(path)
|
|
26
|
+
details_df = delta_table.detail()
|
|
27
|
+
|
|
28
|
+
return details_df.collect()[0].asDict()
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
@log
|
|
32
|
+
def is_partitioned(
|
|
33
|
+
table: str,
|
|
34
|
+
schema: Optional[str] = None,
|
|
35
|
+
lakehouse: Optional[str | UUID] = None,
|
|
36
|
+
workspace: Optional[str | UUID] = None,
|
|
37
|
+
) -> bool:
|
|
38
|
+
"""
|
|
39
|
+
Checks if a delta table is partitioned.
|
|
40
|
+
|
|
41
|
+
Parameters
|
|
42
|
+
----------
|
|
43
|
+
table : str
|
|
44
|
+
The name of the delta table.
|
|
45
|
+
schema : str, optional
|
|
46
|
+
The schema of the table to check. If not provided, the default schema is used.
|
|
47
|
+
lakehouse : str | uuid.UUID, default=None
|
|
48
|
+
The Fabric lakehouse name or ID.
|
|
49
|
+
Defaults to None which resolves to the lakehouse attached to the notebook.
|
|
50
|
+
workspace : str | uuid.UUID, default=None
|
|
51
|
+
The Fabric workspace name or ID used by the lakehouse.
|
|
52
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
53
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
54
|
+
|
|
55
|
+
Returns
|
|
56
|
+
-------
|
|
57
|
+
bool
|
|
58
|
+
True if the table is partitioned, False otherwise.
|
|
59
|
+
"""
|
|
60
|
+
|
|
61
|
+
details = _get_partitions(
|
|
62
|
+
table_name=table, schema_name=schema, lakehouse=lakehouse, workspace=workspace
|
|
63
|
+
)
|
|
64
|
+
return len(details["partitionColumns"]) > 0
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
@log
|
|
68
|
+
def list_partitioned_columns(
|
|
69
|
+
table: str,
|
|
70
|
+
schema: Optional[str] = None,
|
|
71
|
+
lakehouse: Optional[str | UUID] = None,
|
|
72
|
+
workspace: Optional[str | UUID] = None,
|
|
73
|
+
) -> List[str]:
|
|
74
|
+
"""
|
|
75
|
+
Lists the partitioned columns of a delta table.
|
|
76
|
+
|
|
77
|
+
Parameters
|
|
78
|
+
----------
|
|
79
|
+
table : str
|
|
80
|
+
The name of the delta table.
|
|
81
|
+
schema : str, optional
|
|
82
|
+
The schema of the table to check. If not provided, the default schema is used.
|
|
83
|
+
lakehouse : str | uuid.UUID, default=None
|
|
84
|
+
The Fabric lakehouse name or ID.
|
|
85
|
+
Defaults to None which resolves to the lakehouse attached to the notebook.
|
|
86
|
+
workspace : str | uuid.UUID, default=None
|
|
87
|
+
The Fabric workspace name or ID used by the lakehouse.
|
|
88
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
89
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
90
|
+
|
|
91
|
+
Returns
|
|
92
|
+
-------
|
|
93
|
+
List[str]
|
|
94
|
+
The list of partitioned columns.
|
|
95
|
+
"""
|
|
96
|
+
|
|
97
|
+
details = _get_partitions(
|
|
98
|
+
table_name=table, schema_name=schema, lakehouse=lakehouse, workspace=workspace
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
return details["partitionColumns"]
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
@log
|
|
105
|
+
def is_over_partitioned(
|
|
106
|
+
table: str,
|
|
107
|
+
schema: Optional[str] = None,
|
|
108
|
+
lakehouse: Optional[str | UUID] = None,
|
|
109
|
+
workspace: Optional[str | UUID] = None,
|
|
110
|
+
total_table_size_gb: int = 1000,
|
|
111
|
+
average_partition_size_gb: int = 1,
|
|
112
|
+
) -> bool:
|
|
113
|
+
"""
|
|
114
|
+
Checks if a delta table is over-partitioned.
|
|
115
|
+
|
|
116
|
+
Parameters
|
|
117
|
+
----------
|
|
118
|
+
table : str
|
|
119
|
+
The name of the delta table.
|
|
120
|
+
schema : str, optional
|
|
121
|
+
The schema of the table to check. If not provided, the default schema is used.
|
|
122
|
+
lakehouse : str | uuid.UUID, default=None
|
|
123
|
+
The Fabric lakehouse name or ID.
|
|
124
|
+
Defaults to None which resolves to the lakehouse attached to the notebook.
|
|
125
|
+
workspace : str | uuid.UUID, default=None
|
|
126
|
+
The Fabric workspace name or ID used by the lakehouse.
|
|
127
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
128
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
129
|
+
total_table_size_gb : int, default=1000
|
|
130
|
+
Threshold for total table size in GB (default 1TB).
|
|
131
|
+
average_partition_size_gb : int, default=1
|
|
132
|
+
Threshold for average partition size in GB.
|
|
133
|
+
|
|
134
|
+
Returns
|
|
135
|
+
-------
|
|
136
|
+
bool
|
|
137
|
+
True if the table is over-partitioned, False otherwise.
|
|
138
|
+
"""
|
|
139
|
+
|
|
140
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
141
|
+
lakehouse_id = resolve_lakehouse_id(lakehouse, workspace)
|
|
142
|
+
path = create_abfss_path(lakehouse_id, workspace_id, table, schema)
|
|
143
|
+
# Get DeltaTable details
|
|
144
|
+
spark = _create_spark_session()
|
|
145
|
+
details_df = spark.sql(f"DESCRIBE DETAIL delta.`{path}`")
|
|
146
|
+
details = details_df.collect()[0].asDict()
|
|
147
|
+
|
|
148
|
+
# Extract relevant fields
|
|
149
|
+
size_bytes = details["sizeInBytes"]
|
|
150
|
+
partition_cols = details["partitionColumns"]
|
|
151
|
+
num_files = details["numFiles"]
|
|
152
|
+
|
|
153
|
+
total_size_gb = size_bytes / (1024**3)
|
|
154
|
+
|
|
155
|
+
# Only check if the table is partitioned
|
|
156
|
+
if len(partition_cols) > 0 and num_files > 0:
|
|
157
|
+
avg_partition_size_gb = total_size_gb / num_files
|
|
158
|
+
|
|
159
|
+
if (
|
|
160
|
+
total_size_gb < total_table_size_gb
|
|
161
|
+
or avg_partition_size_gb < average_partition_size_gb
|
|
162
|
+
):
|
|
163
|
+
return True
|
|
164
|
+
|
|
165
|
+
return False
|
|
@@ -12,10 +12,6 @@ from typing import Optional
|
|
|
12
12
|
from sempy._utils._log import log
|
|
13
13
|
import sempy_labs._icons as icons
|
|
14
14
|
from uuid import UUID
|
|
15
|
-
from sempy_labs.report._report_functions import (
|
|
16
|
-
list_report_visuals,
|
|
17
|
-
list_report_pages,
|
|
18
|
-
)
|
|
19
15
|
|
|
20
16
|
|
|
21
17
|
@log
|
|
@@ -187,15 +183,7 @@ def export_report(
|
|
|
187
183
|
request_body = {"format": export_format, "powerBIReportConfiguration": {}}
|
|
188
184
|
|
|
189
185
|
request_body["powerBIReportConfiguration"]["pages"] = []
|
|
190
|
-
dfPage = list_report_pages(report=report, workspace=workspace_id)
|
|
191
|
-
|
|
192
186
|
for page in page_name:
|
|
193
|
-
dfPage_filt = dfPage[dfPage["Page ID"] == page]
|
|
194
|
-
if len(dfPage_filt) == 0:
|
|
195
|
-
raise ValueError(
|
|
196
|
-
f"{icons.red_dot} The '{page}' page does not exist in the '{report}' report within the '{workspace_name}' workspace."
|
|
197
|
-
)
|
|
198
|
-
|
|
199
187
|
page_dict = {"pageName": page}
|
|
200
188
|
request_body["powerBIReportConfiguration"]["pages"].append(page_dict)
|
|
201
189
|
|
|
@@ -209,19 +197,9 @@ def export_report(
|
|
|
209
197
|
request_body = {"format": export_format, "powerBIReportConfiguration": {}}
|
|
210
198
|
|
|
211
199
|
request_body["powerBIReportConfiguration"]["pages"] = []
|
|
212
|
-
dfVisual = list_report_visuals(report=report, workspace=workspace_id)
|
|
213
200
|
a = 0
|
|
214
201
|
for page in page_name:
|
|
215
202
|
visual = visual_name[a]
|
|
216
|
-
|
|
217
|
-
dfVisual_filt = dfVisual[
|
|
218
|
-
(dfVisual["Page ID"] == page) & (dfVisual["Visual ID"] == visual)
|
|
219
|
-
]
|
|
220
|
-
if len(dfVisual_filt) == 0:
|
|
221
|
-
raise ValueError(
|
|
222
|
-
f"{icons.red_dot} The '{visual}' visual does not exist on the '{page}' in the '{report}' report within the '{workspace_name}' workspace."
|
|
223
|
-
)
|
|
224
|
-
|
|
225
203
|
page_dict = {"pageName": page, "visualName": visual}
|
|
226
204
|
request_body["powerBIReportConfiguration"]["pages"].append(page_dict)
|
|
227
205
|
a += 1
|
|
@@ -1,4 +1,6 @@
|
|
|
1
1
|
from sempy_labs._helper_functions import (
|
|
2
|
+
resolve_item_id,
|
|
3
|
+
resolve_workspace_id,
|
|
2
4
|
resolve_workspace_name_and_id,
|
|
3
5
|
resolve_item_name_and_id,
|
|
4
6
|
_base_api,
|
|
@@ -8,6 +10,7 @@ from typing import Optional, List
|
|
|
8
10
|
from sempy._utils._log import log
|
|
9
11
|
import sempy_labs._icons as icons
|
|
10
12
|
from uuid import UUID
|
|
13
|
+
import sempy.fabric as fabric
|
|
11
14
|
|
|
12
15
|
|
|
13
16
|
@log
|
|
@@ -108,57 +111,40 @@ def report_rebind_all(
|
|
|
108
111
|
the new semantic model.
|
|
109
112
|
"""
|
|
110
113
|
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
(dataset_name, dataset_id) = resolve_dataset_name_and_id(
|
|
114
|
-
dataset=dataset, workspace=dataset_workspace
|
|
114
|
+
(dataset_name, dataset_id) = resolve_item_name_and_id(
|
|
115
|
+
item=dataset, type="SemanticModel", workspace=dataset_workspace
|
|
115
116
|
)
|
|
116
|
-
|
|
117
|
-
|
|
117
|
+
new_dataset_id = resolve_item_id(
|
|
118
|
+
item=new_dataset, type="SemanticModel", workspace=new_dataset_workspace
|
|
118
119
|
)
|
|
119
120
|
|
|
120
121
|
if dataset_id == new_dataset_id:
|
|
121
122
|
raise ValueError(
|
|
122
|
-
f"{icons.red_dot} The 'dataset' and 'new_dataset' parameters are both set to
|
|
123
|
+
f"{icons.red_dot} The 'dataset' and 'new_dataset' parameters are both set to the same semantic model within the same workspace. These parameters must be set to different values."
|
|
123
124
|
)
|
|
124
|
-
|
|
125
|
-
workspace=dataset_workspace
|
|
126
|
-
)
|
|
125
|
+
dataset_workspace_id = resolve_workspace_id(workspace=dataset_workspace)
|
|
127
126
|
|
|
128
|
-
if isinstance(report_workspace, str):
|
|
127
|
+
if isinstance(report_workspace, str) or report_workspace is None:
|
|
129
128
|
report_workspace = [report_workspace]
|
|
130
129
|
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
)
|
|
134
|
-
|
|
135
|
-
if dfR.empty:
|
|
136
|
-
print(
|
|
137
|
-
f"{icons.info} The '{dataset_name}' semantic model within the '{dataset_workspace_name}' workspace has no dependent reports."
|
|
138
|
-
)
|
|
139
|
-
return
|
|
140
|
-
|
|
141
|
-
if report_workspace is None:
|
|
142
|
-
dfR_filt = dfR.copy()
|
|
143
|
-
else:
|
|
130
|
+
for w in report_workspace:
|
|
131
|
+
dfR = fabric.list_reports(workspace=w)
|
|
144
132
|
dfR_filt = dfR[
|
|
145
|
-
(dfR["
|
|
146
|
-
|
|
133
|
+
(dfR["Dataset ID"] == dataset_id)
|
|
134
|
+
& (dfR["Dataset Workspace Id"] == dataset_workspace_id)
|
|
147
135
|
]
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
dataset_workspace=new_dataset_workspace,
|
|
164
|
-
)
|
|
136
|
+
if dfR_filt.empty:
|
|
137
|
+
(wksp_name, _) = resolve_workspace_name_and_id(workspace=w)
|
|
138
|
+
print(
|
|
139
|
+
f"{icons.info} No reports found for the '{dataset_name}' semantic model within the '{wksp_name}' workspace."
|
|
140
|
+
)
|
|
141
|
+
else:
|
|
142
|
+
# Rebind reports to new dataset
|
|
143
|
+
for _, r in dfR_filt.iterrows():
|
|
144
|
+
rpt_name = r["Name"]
|
|
145
|
+
report_rebind(
|
|
146
|
+
report=rpt_name,
|
|
147
|
+
dataset=new_dataset,
|
|
148
|
+
report_workspace=w,
|
|
149
|
+
dataset_workspace=new_dataset_workspace,
|
|
150
|
+
)
|
|
@@ -171,6 +171,7 @@ class ReportWrapper:
|
|
|
171
171
|
self,
|
|
172
172
|
file_path: str,
|
|
173
173
|
json_path: Optional[str] = None,
|
|
174
|
+
verbose: bool = True,
|
|
174
175
|
) -> dict | List[Tuple[str, dict]]:
|
|
175
176
|
"""
|
|
176
177
|
Get the json content of the specified report definition file.
|
|
@@ -181,6 +182,8 @@ class ReportWrapper:
|
|
|
181
182
|
The path of the report definition file. For example: "definition/pages/pages.json". You may also use wildcards. For example: "definition/pages/*/page.json".
|
|
182
183
|
json_path : str, default=None
|
|
183
184
|
The json path to the specific part of the file to be retrieved. If None, the entire file content is returned.
|
|
185
|
+
verbose : bool, default=True
|
|
186
|
+
If True, prints messages about the retrieval process. If False, suppresses these messages.
|
|
184
187
|
|
|
185
188
|
Returns
|
|
186
189
|
-------
|
|
@@ -192,6 +195,7 @@ class ReportWrapper:
|
|
|
192
195
|
|
|
193
196
|
# Find matching parts
|
|
194
197
|
if "*" in file_path:
|
|
198
|
+
results = []
|
|
195
199
|
matching_parts = [
|
|
196
200
|
(part.get("path"), part.get("payload"))
|
|
197
201
|
for part in parts
|
|
@@ -199,9 +203,11 @@ class ReportWrapper:
|
|
|
199
203
|
]
|
|
200
204
|
|
|
201
205
|
if not matching_parts:
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
206
|
+
if verbose:
|
|
207
|
+
print(
|
|
208
|
+
f"{icons.red_dot} No files match the wildcard path '{file_path}'."
|
|
209
|
+
)
|
|
210
|
+
return results
|
|
205
211
|
|
|
206
212
|
results = []
|
|
207
213
|
for path, payload in matching_parts:
|
|
@@ -220,8 +226,8 @@ class ReportWrapper:
|
|
|
220
226
|
# raise ValueError(
|
|
221
227
|
# f"{icons.red_dot} No match found for '{json_path}' in '{path}'."
|
|
222
228
|
# )
|
|
223
|
-
if not results:
|
|
224
|
-
|
|
229
|
+
if not results and verbose:
|
|
230
|
+
print(
|
|
225
231
|
f"{icons.red_dot} No match found for '{json_path}' in any of the files matching the wildcard path '{file_path}'."
|
|
226
232
|
)
|
|
227
233
|
return results
|
|
@@ -241,14 +247,11 @@ class ReportWrapper:
|
|
|
241
247
|
matches = jsonpath_expr.find(payload)
|
|
242
248
|
if matches:
|
|
243
249
|
return matches[0].value
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
f"{icons.red_dot} No match found for '{json_path}'."
|
|
247
|
-
)
|
|
250
|
+
elif verbose:
|
|
251
|
+
print(f"{icons.red_dot} No match found for '{json_path}'.")
|
|
248
252
|
|
|
249
|
-
|
|
250
|
-
f"{icons.red_dot} File '{file_path}' not found in report definition."
|
|
251
|
-
)
|
|
253
|
+
if verbose:
|
|
254
|
+
print(f"{icons.red_dot} File '{file_path}' not found in report definition.")
|
|
252
255
|
|
|
253
256
|
def add(self, file_path: str, payload: dict | bytes):
|
|
254
257
|
"""
|
|
@@ -674,33 +677,65 @@ class ReportWrapper:
|
|
|
674
677
|
columns = {
|
|
675
678
|
"Custom Visual Name": "str",
|
|
676
679
|
"Custom Visual Display Name": "str",
|
|
680
|
+
"Is Public": "bool",
|
|
677
681
|
"Used in Report": "bool",
|
|
678
682
|
}
|
|
679
683
|
|
|
680
684
|
df = _create_dataframe(columns=columns)
|
|
681
685
|
|
|
682
|
-
|
|
686
|
+
visuals = []
|
|
687
|
+
rp = self.get(
|
|
688
|
+
file_path=self._report_file_path,
|
|
689
|
+
json_path="$.resourcePackages",
|
|
690
|
+
verbose=False,
|
|
691
|
+
)
|
|
683
692
|
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
|
|
693
|
+
if rp:
|
|
694
|
+
visuals += [
|
|
695
|
+
{"Custom Visual Name": item.get("name"), "Is Public": False}
|
|
696
|
+
for item in rp
|
|
697
|
+
if item.get("type") == "CustomVisual"
|
|
698
|
+
]
|
|
699
|
+
|
|
700
|
+
# Load public custom visuals
|
|
701
|
+
public_custom_visuals = (
|
|
702
|
+
self.get(
|
|
703
|
+
file_path=self._report_file_path,
|
|
704
|
+
json_path="$.publicCustomVisuals",
|
|
705
|
+
verbose=False,
|
|
706
|
+
)
|
|
707
|
+
or []
|
|
687
708
|
)
|
|
688
709
|
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
if r["Custom Visual Name"] in visual_types:
|
|
699
|
-
df.at[_, "Used in Report"] = True
|
|
700
|
-
else:
|
|
701
|
-
df.at[_, "Used in Report"] = False
|
|
710
|
+
visuals += [
|
|
711
|
+
{
|
|
712
|
+
"Custom Visual Name": (
|
|
713
|
+
item.get("name") if isinstance(item, dict) else item
|
|
714
|
+
),
|
|
715
|
+
"Is Public": True,
|
|
716
|
+
}
|
|
717
|
+
for item in public_custom_visuals
|
|
718
|
+
]
|
|
702
719
|
|
|
703
|
-
|
|
720
|
+
if visuals:
|
|
721
|
+
df = pd.DataFrame(visuals, columns=list(columns.keys()))
|
|
722
|
+
|
|
723
|
+
# df["Custom Visual Name"] = report_file.get("publicCustomVisuals")
|
|
724
|
+
df["Custom Visual Display Name"] = df["Custom Visual Name"].apply(
|
|
725
|
+
lambda x: helper.vis_type_mapping.get(x, x)
|
|
726
|
+
)
|
|
727
|
+
|
|
728
|
+
visual_types = set()
|
|
729
|
+
for v in self.__all_visuals():
|
|
730
|
+
payload = v.get("payload", {})
|
|
731
|
+
visual = payload.get("visual", {})
|
|
732
|
+
visual_type = visual.get("visualType")
|
|
733
|
+
if visual_type:
|
|
734
|
+
visual_types.add(visual_type)
|
|
735
|
+
|
|
736
|
+
df["Used in Report"] = df["Custom Visual Name"].isin(visual_types)
|
|
737
|
+
|
|
738
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
704
739
|
|
|
705
740
|
return df
|
|
706
741
|
|
|
@@ -1886,7 +1921,6 @@ class ReportWrapper:
|
|
|
1886
1921
|
)
|
|
1887
1922
|
|
|
1888
1923
|
self._ensure_pbir()
|
|
1889
|
-
theme_version = "5.6.4"
|
|
1890
1924
|
|
|
1891
1925
|
# Extract theme_json from theme_file_path
|
|
1892
1926
|
if theme_file_path:
|
|
@@ -1912,14 +1946,25 @@ class ReportWrapper:
|
|
|
1912
1946
|
theme_name_full = f"{theme_name}.json"
|
|
1913
1947
|
|
|
1914
1948
|
# Add theme.json file
|
|
1915
|
-
|
|
1916
|
-
|
|
1917
|
-
|
|
1949
|
+
try:
|
|
1950
|
+
self.add(
|
|
1951
|
+
file_path=f"StaticResources/RegisteredResources/{theme_name_full}",
|
|
1952
|
+
payload=theme_json,
|
|
1953
|
+
)
|
|
1954
|
+
except Exception:
|
|
1955
|
+
self.update(
|
|
1956
|
+
file_path=f"StaticResources/RegisteredResources/{theme_name_full}",
|
|
1957
|
+
payload=theme_json,
|
|
1958
|
+
)
|
|
1959
|
+
|
|
1960
|
+
rpt_version_at_import = self.get(
|
|
1961
|
+
file_path=self._report_file_path,
|
|
1962
|
+
json_path="$.themeCollection.baseTheme.reportVersionAtImport",
|
|
1918
1963
|
)
|
|
1919
1964
|
|
|
1920
1965
|
custom_theme = {
|
|
1921
1966
|
"name": theme_name_full,
|
|
1922
|
-
"reportVersionAtImport":
|
|
1967
|
+
"reportVersionAtImport": rpt_version_at_import,
|
|
1923
1968
|
"type": "RegisteredResources",
|
|
1924
1969
|
}
|
|
1925
1970
|
|
sempy_labs/tom/_model.py
CHANGED
|
@@ -151,7 +151,12 @@ class TOMWrapper:
|
|
|
151
151
|
|
|
152
152
|
self._table_map = {}
|
|
153
153
|
self._column_map = {}
|
|
154
|
-
self._compat_level = self.model.
|
|
154
|
+
self._compat_level = self.model.Database.CompatibilityLevel
|
|
155
|
+
|
|
156
|
+
# Max compat level
|
|
157
|
+
s = self.model.Server.SupportedCompatibilityLevels
|
|
158
|
+
nums = [int(x) for x in s.split(",") if x.strip() != "1000000"]
|
|
159
|
+
self._max_compat_level = max(nums)
|
|
155
160
|
|
|
156
161
|
# Minimum campat level for lineage tags is 1540 (https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.table.lineagetag?view=analysisservices-dotnet#microsoft-analysisservices-tabular-table-lineagetag)
|
|
157
162
|
if self._compat_level >= 1540:
|
|
@@ -240,6 +245,22 @@ class TOMWrapper:
|
|
|
240
245
|
if t.CalculationGroup is not None:
|
|
241
246
|
yield t
|
|
242
247
|
|
|
248
|
+
def all_functions(self):
|
|
249
|
+
"""
|
|
250
|
+
Outputs a list of all user-defined functions in the semantic model.
|
|
251
|
+
|
|
252
|
+
Parameters
|
|
253
|
+
----------
|
|
254
|
+
|
|
255
|
+
Returns
|
|
256
|
+
-------
|
|
257
|
+
Iterator[Microsoft.AnalysisServices.Tabular.Function]
|
|
258
|
+
All user-defined functions within the semantic model.
|
|
259
|
+
"""
|
|
260
|
+
|
|
261
|
+
for f in self.model.Functions:
|
|
262
|
+
yield f
|
|
263
|
+
|
|
243
264
|
def all_measures(self):
|
|
244
265
|
"""
|
|
245
266
|
Outputs a list of all measures in the semantic model.
|
|
@@ -759,6 +780,60 @@ class TOMWrapper:
|
|
|
759
780
|
obj.Description = description
|
|
760
781
|
self.model.Roles.Add(obj)
|
|
761
782
|
|
|
783
|
+
def set_compatibility_level(self, compatibility_level: int):
|
|
784
|
+
"""
|
|
785
|
+
Sets compatibility level of the semantic model
|
|
786
|
+
|
|
787
|
+
Parameters
|
|
788
|
+
----------
|
|
789
|
+
compatibility_level : int
|
|
790
|
+
The compatibility level to set the for the semantic model.
|
|
791
|
+
"""
|
|
792
|
+
import Microsoft.AnalysisServices.Tabular as TOM
|
|
793
|
+
|
|
794
|
+
if compatibility_level < 1500 or compatibility_level > self._max_compat_level:
|
|
795
|
+
raise ValueError(
|
|
796
|
+
f"{icons.red_dot} Compatibility level must be between 1500 and {self._max_compat_level}."
|
|
797
|
+
)
|
|
798
|
+
if self._compat_level > compatibility_level:
|
|
799
|
+
print(
|
|
800
|
+
f"{icons.warning} Compatibility level can only be increased, not decreased."
|
|
801
|
+
)
|
|
802
|
+
return
|
|
803
|
+
|
|
804
|
+
self.model.Database.CompatibilityLevel = compatibility_level
|
|
805
|
+
bim = TOM.JsonScripter.ScriptCreateOrReplace(self.model.Database)
|
|
806
|
+
fabric.execute_tmsl(script=bim, workspace=self._workspace_id)
|
|
807
|
+
|
|
808
|
+
def set_user_defined_function(self, name: str, expression: str):
|
|
809
|
+
"""
|
|
810
|
+
Sets the definition of a `user-defined <https://learn.microsoft.com/en-us/dax/best-practices/dax-user-defined-functions#using-model-explorer>_` function within the semantic model. This function requires that the compatibility level is at least 1702.
|
|
811
|
+
|
|
812
|
+
Parameters
|
|
813
|
+
----------
|
|
814
|
+
name : str
|
|
815
|
+
Name of the user-defined function.
|
|
816
|
+
expression : str
|
|
817
|
+
The DAX expression for the user-defined function.
|
|
818
|
+
"""
|
|
819
|
+
import Microsoft.AnalysisServices.Tabular as TOM
|
|
820
|
+
|
|
821
|
+
if self._compat_level < 1702:
|
|
822
|
+
raise ValueError(
|
|
823
|
+
f"{icons.warning} User-defined functions require a compatibility level of at least 1702. The current compatibility level is {self._compat_level}. Use the 'tom.set_compatibility_level' function to change the compatibility level."
|
|
824
|
+
)
|
|
825
|
+
|
|
826
|
+
existing = [f.Name for f in self.model.Functions]
|
|
827
|
+
|
|
828
|
+
if name in existing:
|
|
829
|
+
self.model.Functions[name].Expression = expression
|
|
830
|
+
else:
|
|
831
|
+
obj = TOM.Function()
|
|
832
|
+
obj.Name = name
|
|
833
|
+
obj.Expression = expression
|
|
834
|
+
obj.LineageTag = generate_guid()
|
|
835
|
+
self.model.Functions.Add(obj)
|
|
836
|
+
|
|
762
837
|
def set_rls(self, role_name: str, table_name: str, filter_expression: str):
|
|
763
838
|
"""
|
|
764
839
|
Sets the row level security permissions for a table within a role.
|
|
@@ -1908,6 +1983,8 @@ class TOMWrapper:
|
|
|
1908
1983
|
object.Parent.CalculationItems.Remove(object.Name)
|
|
1909
1984
|
elif objType == TOM.ObjectType.TablePermission:
|
|
1910
1985
|
object.Parent.TablePermissions.Remove(object.Name)
|
|
1986
|
+
elif objType == TOM.ObjectType.Function:
|
|
1987
|
+
object.Parent.Functions.Remove(object.Name)
|
|
1911
1988
|
|
|
1912
1989
|
def used_in_relationships(self, object: Union["TOM.Table", "TOM.Column"]):
|
|
1913
1990
|
"""
|
|
@@ -4749,8 +4826,8 @@ class TOMWrapper:
|
|
|
4749
4826
|
value_filter_behavior = value_filter_behavior.capitalize()
|
|
4750
4827
|
min_compat = 1606
|
|
4751
4828
|
|
|
4752
|
-
if self.model.
|
|
4753
|
-
self.model.
|
|
4829
|
+
if self.model.Database.CompatibilityLevel < min_compat:
|
|
4830
|
+
self.model.Database.CompatibilityLevel = min_compat
|
|
4754
4831
|
|
|
4755
4832
|
self.model.ValueFilterBehavior = System.Enum.Parse(
|
|
4756
4833
|
TOM.ValueFilterBehaviorType, value_filter_behavior
|
|
@@ -5840,7 +5917,7 @@ class TOMWrapper:
|
|
|
5840
5917
|
import Microsoft.AnalysisServices.Tabular as TOM
|
|
5841
5918
|
|
|
5842
5919
|
# ChangedProperty logic (min compat level is 1567) https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.changedproperty?view=analysisservices-dotnet
|
|
5843
|
-
if self.model.
|
|
5920
|
+
if self.model.Database.CompatibilityLevel >= 1567:
|
|
5844
5921
|
for t in self.model.Tables:
|
|
5845
5922
|
if any(
|
|
5846
5923
|
p.SourceType == TOM.PartitionSourceType.Entity
|