semantic-link-labs 0.7.4__py3-none-any.whl → 0.8.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.1.dist-info}/METADATA +43 -7
- {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.1.dist-info}/RECORD +59 -40
- {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.1.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +116 -58
- sempy_labs/_ai.py +0 -2
- sempy_labs/_capacities.py +39 -3
- sempy_labs/_capacity_migration.py +623 -0
- sempy_labs/_clear_cache.py +8 -8
- sempy_labs/_connections.py +15 -13
- sempy_labs/_data_pipelines.py +118 -0
- sempy_labs/_documentation.py +144 -0
- sempy_labs/_eventhouses.py +118 -0
- sempy_labs/_eventstreams.py +118 -0
- sempy_labs/_generate_semantic_model.py +3 -3
- sempy_labs/_git.py +23 -24
- sempy_labs/_helper_functions.py +140 -47
- sempy_labs/_icons.py +40 -0
- sempy_labs/_kql_databases.py +134 -0
- sempy_labs/_kql_querysets.py +124 -0
- sempy_labs/_list_functions.py +218 -421
- sempy_labs/_mirrored_warehouses.py +50 -0
- sempy_labs/_ml_experiments.py +122 -0
- sempy_labs/_ml_models.py +120 -0
- sempy_labs/_model_auto_build.py +0 -4
- sempy_labs/_model_bpa.py +10 -12
- sempy_labs/_model_bpa_bulk.py +8 -7
- sempy_labs/_model_dependencies.py +26 -18
- sempy_labs/_notebooks.py +5 -16
- sempy_labs/_query_scale_out.py +6 -5
- sempy_labs/_refresh_semantic_model.py +7 -19
- sempy_labs/_spark.py +40 -45
- sempy_labs/_sql.py +60 -15
- sempy_labs/_vertipaq.py +25 -25
- sempy_labs/_warehouses.py +132 -0
- sempy_labs/_workspaces.py +0 -3
- sempy_labs/admin/__init__.py +53 -0
- sempy_labs/admin/_basic_functions.py +888 -0
- sempy_labs/admin/_domains.py +411 -0
- sempy_labs/directlake/_directlake_schema_sync.py +1 -1
- sempy_labs/directlake/_dl_helper.py +32 -16
- sempy_labs/directlake/_generate_shared_expression.py +11 -14
- sempy_labs/directlake/_guardrails.py +7 -7
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +14 -24
- sempy_labs/directlake/_update_directlake_partition_entity.py +1 -1
- sempy_labs/directlake/_warm_cache.py +1 -1
- sempy_labs/lakehouse/_get_lakehouse_tables.py +3 -3
- sempy_labs/lakehouse/_lakehouse.py +3 -2
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +5 -0
- sempy_labs/report/__init__.py +9 -6
- sempy_labs/report/_generate_report.py +1 -1
- sempy_labs/report/_report_bpa.py +369 -0
- sempy_labs/report/_report_bpa_rules.py +113 -0
- sempy_labs/report/_report_helper.py +254 -0
- sempy_labs/report/_report_list_functions.py +95 -0
- sempy_labs/report/_report_rebind.py +0 -4
- sempy_labs/report/_reportwrapper.py +2037 -0
- sempy_labs/tom/_model.py +333 -22
- {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.1.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,254 @@
|
|
|
1
|
+
import sempy.fabric as fabric
|
|
2
|
+
from typing import Tuple, Optional
|
|
3
|
+
import sempy_labs._icons as icons
|
|
4
|
+
import re
|
|
5
|
+
import base64
|
|
6
|
+
import json
|
|
7
|
+
import requests
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
vis_type_mapping = {
|
|
11
|
+
"barChart": "Bar chart",
|
|
12
|
+
"columnChart": "Column chart",
|
|
13
|
+
"clusteredBarChart": "Clustered bar chart",
|
|
14
|
+
"clusteredColumnChart": "Clustered column chart",
|
|
15
|
+
"hundredPercentStackedBarChart": "100% Stacked bar chart",
|
|
16
|
+
"hundredPercentStackedColumnChart": "100% Stacked column chart",
|
|
17
|
+
"lineChart": "Line chart",
|
|
18
|
+
"areaChart": "Area chart",
|
|
19
|
+
"stackedAreaChart": "Stacked area chart",
|
|
20
|
+
"lineStackedColumnComboChart": "Line and stacked column chart",
|
|
21
|
+
"lineClusteredColumnComboChart": "Line and clustered column chart",
|
|
22
|
+
"ribbonChart": "Ribbon chart",
|
|
23
|
+
"waterfallChart": "Waterfall chart",
|
|
24
|
+
"funnel": "Funnel chart",
|
|
25
|
+
"scatterChart": "Scatter chart",
|
|
26
|
+
"pieChart": "Pie chart",
|
|
27
|
+
"donutChart": "Donut chart",
|
|
28
|
+
"treemap": "Treemap",
|
|
29
|
+
"map": "Map",
|
|
30
|
+
"filledMap": "Filled map",
|
|
31
|
+
"shapeMap": "Shape map",
|
|
32
|
+
"azureMap": "Azure map",
|
|
33
|
+
"gauge": "Gauge",
|
|
34
|
+
"card": "Card",
|
|
35
|
+
"multiRowCard": "Multi-row card",
|
|
36
|
+
"kpi": "KPI",
|
|
37
|
+
"slicer": "Slicer",
|
|
38
|
+
"tableEx": "Table",
|
|
39
|
+
"pivotTable": "Matrix",
|
|
40
|
+
"scriptVisual": "R script visual",
|
|
41
|
+
"pythonVisual": "Python visual",
|
|
42
|
+
"keyDriversVisual": "Key influencers",
|
|
43
|
+
"decompositionTreeVisual": "Decomposition tree",
|
|
44
|
+
"qnaVisual": "Q&A",
|
|
45
|
+
"aiNarratives": "Narrative",
|
|
46
|
+
"scorecard": "Metrics (Preview)",
|
|
47
|
+
"rdlVisual": "Paginated report",
|
|
48
|
+
"cardVisual": "Card (new)",
|
|
49
|
+
"advancedSlicerVisual": "Slicer (new)",
|
|
50
|
+
"actionButton": "Button",
|
|
51
|
+
"bookmarkNavigator": "Bookmark navigator",
|
|
52
|
+
"image": "Image",
|
|
53
|
+
"textbox": "Textbox",
|
|
54
|
+
"pageNavigator": "Page navigator",
|
|
55
|
+
"shape": "Shape",
|
|
56
|
+
"Group": "Group",
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
page_type_mapping = {
|
|
60
|
+
(320, 240): "Tooltip",
|
|
61
|
+
(816, 1056): "Letter",
|
|
62
|
+
(960, 720): "4:3",
|
|
63
|
+
(1280, 720): "16:9",
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
page_types = ["Tooltip", "Letter", "4:3", "16:9"]
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def get_web_url(report: str, workspace: Optional[str] = None):
|
|
70
|
+
|
|
71
|
+
workspace = fabric.resolve_workspace_name(workspace)
|
|
72
|
+
|
|
73
|
+
dfR = fabric.list_reports(workspace=workspace)
|
|
74
|
+
dfR_filt = dfR[dfR["Name"] == report]
|
|
75
|
+
|
|
76
|
+
if len(dfR_filt) == 0:
|
|
77
|
+
raise ValueError(
|
|
78
|
+
f"{icons.red_dot} The '{report}' report does not exist within the '{workspace}' workspace."
|
|
79
|
+
)
|
|
80
|
+
web_url = dfR_filt["Web Url"].iloc[0]
|
|
81
|
+
|
|
82
|
+
return web_url
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def populate_custom_visual_display_names():
|
|
86
|
+
|
|
87
|
+
url = "https://catalogapi.azure.com/offers?api-version=2018-08-01-beta&storefront=appsource&$filter=offerType+eq+%27PowerBIVisuals%27"
|
|
88
|
+
|
|
89
|
+
def fetch_all_pages(start_url):
|
|
90
|
+
combined_json = {}
|
|
91
|
+
current_url = start_url
|
|
92
|
+
|
|
93
|
+
while current_url:
|
|
94
|
+
# Send GET request to the current page URL
|
|
95
|
+
response = requests.get(current_url)
|
|
96
|
+
|
|
97
|
+
if response.status_code == 200:
|
|
98
|
+
data = response.json()
|
|
99
|
+
# Merge the current page JSON into the combined JSON
|
|
100
|
+
for key, value in data.items():
|
|
101
|
+
if key not in combined_json:
|
|
102
|
+
combined_json[key] = value
|
|
103
|
+
else:
|
|
104
|
+
# If the key already exists and is a list, extend it
|
|
105
|
+
if isinstance(value, list):
|
|
106
|
+
combined_json[key].extend(value)
|
|
107
|
+
# For other types (non-lists), update the value
|
|
108
|
+
else:
|
|
109
|
+
combined_json[key] = value
|
|
110
|
+
|
|
111
|
+
# Get the next page link if it exists
|
|
112
|
+
current_url = data.get("nextPageLink")
|
|
113
|
+
else:
|
|
114
|
+
print(f"Error fetching page: {response.status_code}")
|
|
115
|
+
break
|
|
116
|
+
|
|
117
|
+
return combined_json
|
|
118
|
+
|
|
119
|
+
cvJson = fetch_all_pages(url)
|
|
120
|
+
|
|
121
|
+
for i in cvJson.get("items", []):
|
|
122
|
+
vizId = i.get("powerBIVisualId")
|
|
123
|
+
displayName = i.get("displayName")
|
|
124
|
+
vis_type_mapping[vizId] = displayName
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def resolve_page_name(self, page_name: str) -> Tuple[str, str, str]:
|
|
128
|
+
|
|
129
|
+
dfP = self.list_pages()
|
|
130
|
+
if any(r["Page Name"] == page_name for _, r in dfP.iterrows()):
|
|
131
|
+
valid_page_name = page_name
|
|
132
|
+
dfP_filt = dfP[dfP["Page Name"] == page_name]
|
|
133
|
+
valid_display_name = dfP_filt["Page Display Name"].iloc[0]
|
|
134
|
+
file_path = dfP_filt["File Path"].iloc[0]
|
|
135
|
+
elif any(r["Page Display Name"] == page_name for _, r in dfP.iterrows()):
|
|
136
|
+
valid_display_name = page_name
|
|
137
|
+
dfP_filt = dfP[dfP["Page Display Name"] == page_name]
|
|
138
|
+
valid_page_name = dfP_filt["Page Name"].iloc[0]
|
|
139
|
+
file_path = dfP_filt["File Path"].iloc[0]
|
|
140
|
+
else:
|
|
141
|
+
raise ValueError(
|
|
142
|
+
f"{icons.red_dot} Invalid page name. The '{page_name}' page does not exist in the '{self._report}' report within the '{self._workspace}' workspace."
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
return valid_page_name, valid_display_name, file_path
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
def visual_page_mapping(self) -> Tuple[dict, dict]:
|
|
149
|
+
|
|
150
|
+
page_mapping = {}
|
|
151
|
+
visual_mapping = {}
|
|
152
|
+
rd = self.rdef
|
|
153
|
+
for _, r in rd.iterrows():
|
|
154
|
+
file_path = r["path"]
|
|
155
|
+
payload = r["payload"]
|
|
156
|
+
if file_path.endswith("/page.json"):
|
|
157
|
+
pattern_page = r"/pages/(.*?)/page.json"
|
|
158
|
+
page_name = re.search(pattern_page, file_path).group(1)
|
|
159
|
+
obj_file = base64.b64decode(payload).decode("utf-8")
|
|
160
|
+
obj_json = json.loads(obj_file)
|
|
161
|
+
page_id = obj_json.get("name")
|
|
162
|
+
page_display = obj_json.get("displayName")
|
|
163
|
+
page_mapping[page_name] = (page_id, page_display)
|
|
164
|
+
for _, r in rd.iterrows():
|
|
165
|
+
file_path = r["path"]
|
|
166
|
+
payload = r["payload"]
|
|
167
|
+
if file_path.endswith("/visual.json"):
|
|
168
|
+
pattern_page = r"/pages/(.*?)/visuals/"
|
|
169
|
+
page_name = re.search(pattern_page, file_path).group(1)
|
|
170
|
+
visual_mapping[file_path] = (
|
|
171
|
+
page_mapping.get(page_name)[0],
|
|
172
|
+
page_mapping.get(page_name)[1],
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
return page_mapping, visual_mapping
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
def resolve_visual_name(
|
|
179
|
+
self, page_name: str, visual_name: str
|
|
180
|
+
) -> Tuple[str, str, str, str]:
|
|
181
|
+
"""
|
|
182
|
+
Obtains the page name, page display name, and the file path for a given page in a report.
|
|
183
|
+
|
|
184
|
+
Parameters
|
|
185
|
+
----------
|
|
186
|
+
page_name : str
|
|
187
|
+
The name of the page of the report - either the page name (GUID) or the page display name.
|
|
188
|
+
visual_name : str
|
|
189
|
+
The name of the visual of the report.
|
|
190
|
+
|
|
191
|
+
Returns
|
|
192
|
+
-------
|
|
193
|
+
Tuple[str, str, str, str] Page name, page display name, visual name, file path from the report definition.
|
|
194
|
+
|
|
195
|
+
"""
|
|
196
|
+
|
|
197
|
+
dfV = self.list_visuals()
|
|
198
|
+
if any(
|
|
199
|
+
(r["Page Name"] == page_name) & (r["Visual Name"] == visual_name)
|
|
200
|
+
for _, r in dfV.iterrows()
|
|
201
|
+
):
|
|
202
|
+
valid_page_name = page_name
|
|
203
|
+
dfV_filt = dfV[
|
|
204
|
+
(dfV["Page Name"] == page_name) & (dfV["Visual Name"] == visual_name)
|
|
205
|
+
]
|
|
206
|
+
file_path = dfV_filt["File Path"].iloc[0]
|
|
207
|
+
valid_display_name = dfV_filt["Page Display Name"].iloc[0]
|
|
208
|
+
elif any(
|
|
209
|
+
(r["Page Display Name"] == page_name) & (r["Visual Name"] == visual_name)
|
|
210
|
+
for _, r in dfV.iterrows()
|
|
211
|
+
):
|
|
212
|
+
valid_display_name = page_name
|
|
213
|
+
dfV_filt = dfV[
|
|
214
|
+
(dfV["Page Display Name"] == page_name)
|
|
215
|
+
& (dfV["Visual Name"] == visual_name)
|
|
216
|
+
]
|
|
217
|
+
file_path = dfV_filt["File Path"].iloc[0]
|
|
218
|
+
valid_page_name = dfV_filt["Page Name"].iloc[0]
|
|
219
|
+
else:
|
|
220
|
+
raise ValueError(
|
|
221
|
+
f"{icons.red_dot} Invalid page/visual name. The '{visual_name}' visual on the '{page_name}' page does not exist in the '{self._report}' report within the '{self._workspace}' workspace."
|
|
222
|
+
)
|
|
223
|
+
|
|
224
|
+
return valid_page_name, valid_display_name, visual_name, file_path
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
def find_entity_property_pairs(data, result=None, keys_path=None):
|
|
228
|
+
|
|
229
|
+
if result is None:
|
|
230
|
+
result = {}
|
|
231
|
+
if keys_path is None:
|
|
232
|
+
keys_path = []
|
|
233
|
+
|
|
234
|
+
if isinstance(data, dict):
|
|
235
|
+
if (
|
|
236
|
+
"Entity" in data.get("Expression", {}).get("SourceRef", {})
|
|
237
|
+
and "Property" in data
|
|
238
|
+
):
|
|
239
|
+
entity = data.get("Expression", {}).get("SourceRef", {}).get("Entity", {})
|
|
240
|
+
property_value = data.get("Property")
|
|
241
|
+
object_type = keys_path[-1].replace("HierarchyLevel", "Hierarchy")
|
|
242
|
+
result[property_value] = (entity, object_type)
|
|
243
|
+
keys_path.pop()
|
|
244
|
+
|
|
245
|
+
# Recursively search the rest of the dictionary
|
|
246
|
+
for key, value in data.items():
|
|
247
|
+
keys_path.append(key)
|
|
248
|
+
find_entity_property_pairs(value, result, keys_path)
|
|
249
|
+
|
|
250
|
+
elif isinstance(data, list):
|
|
251
|
+
for item in data:
|
|
252
|
+
find_entity_property_pairs(item, result, keys_path)
|
|
253
|
+
|
|
254
|
+
return result
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
import sempy.fabric as fabric
|
|
2
|
+
from typing import Optional
|
|
3
|
+
import pandas as pd
|
|
4
|
+
from sempy_labs._helper_functions import (
|
|
5
|
+
format_dax_object_name,
|
|
6
|
+
)
|
|
7
|
+
from sempy_labs.report._reportwrapper import ReportWrapper
|
|
8
|
+
from sempy_labs._list_functions import list_reports_using_semantic_model
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def list_unused_objects_in_reports(
|
|
12
|
+
dataset: str, workspace: Optional[str] = None
|
|
13
|
+
) -> pd.DataFrame:
|
|
14
|
+
"""
|
|
15
|
+
Shows a list of all columns in the semantic model which are not used in any related Power BI reports (including dependencies).
|
|
16
|
+
|
|
17
|
+
Parameters
|
|
18
|
+
----------
|
|
19
|
+
dataset : str
|
|
20
|
+
Name of the semantic model.
|
|
21
|
+
workspace : str, default=None
|
|
22
|
+
The Fabric workspace name.
|
|
23
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
24
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
25
|
+
|
|
26
|
+
Returns
|
|
27
|
+
-------
|
|
28
|
+
pandas.DataFrame
|
|
29
|
+
A pandas dataframe showing a list of all columns in the semantic model which are not used in any related Power BI reports (including dependencies).
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
# TODO: what about relationships/RLS?
|
|
33
|
+
|
|
34
|
+
dfR = _list_all_report_semantic_model_objects(dataset=dataset, workspace=workspace)
|
|
35
|
+
dfR_filt = (
|
|
36
|
+
dfR[dfR["Object Type"] == "Column"][["Table Name", "Object Name"]]
|
|
37
|
+
.drop_duplicates()
|
|
38
|
+
.reset_index(drop=True)
|
|
39
|
+
)
|
|
40
|
+
dfR_filt["Column Object"] = format_dax_object_name(
|
|
41
|
+
dfR_filt["Table Name"], dfR_filt["Object Name"]
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
|
|
45
|
+
dfC["Column Object"] = format_dax_object_name(dfC["Table Name"], dfC["Column Name"])
|
|
46
|
+
|
|
47
|
+
df = dfC[~(dfC["Column Object"].isin(dfR_filt["Column Object"].values))]
|
|
48
|
+
df = df.drop("Column Object", axis=1)
|
|
49
|
+
|
|
50
|
+
return df
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def _list_all_report_semantic_model_objects(
|
|
54
|
+
dataset: str, workspace: Optional[str] = None
|
|
55
|
+
) -> pd.DataFrame:
|
|
56
|
+
"""
|
|
57
|
+
Shows a unique list of all semantic model objects (columns, measures, hierarchies) which are used in all reports which leverage the semantic model.
|
|
58
|
+
|
|
59
|
+
Parameters
|
|
60
|
+
----------
|
|
61
|
+
dataset : str
|
|
62
|
+
Name of the semantic model.
|
|
63
|
+
workspace : str, default=None
|
|
64
|
+
The Fabric workspace name.
|
|
65
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
66
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
67
|
+
|
|
68
|
+
Returns
|
|
69
|
+
-------
|
|
70
|
+
pandas.DataFrame
|
|
71
|
+
A pandas dataframe.
|
|
72
|
+
"""
|
|
73
|
+
|
|
74
|
+
dfR = list_reports_using_semantic_model(dataset=dataset, workspace=workspace)
|
|
75
|
+
dfs = []
|
|
76
|
+
|
|
77
|
+
for _, r in dfR.iterrows():
|
|
78
|
+
report_name = r["Report Name"]
|
|
79
|
+
report_workspace = r["Report Workspace Name"]
|
|
80
|
+
|
|
81
|
+
rpt = ReportWrapper(report=report_name, workspace=report_workspace)
|
|
82
|
+
|
|
83
|
+
new_data = rpt._list_all_semantic_model_objects()
|
|
84
|
+
new_data["Report Name"] = report_name
|
|
85
|
+
new_data["Report Workspace"] = report_workspace
|
|
86
|
+
dfs.append(new_data)
|
|
87
|
+
|
|
88
|
+
df = pd.concat(dfs, ignore_index=True)
|
|
89
|
+
|
|
90
|
+
colName = "Report Name"
|
|
91
|
+
df.insert(2, colName, df.pop(colName))
|
|
92
|
+
colName = "Report Workspace"
|
|
93
|
+
df.insert(3, colName, df.pop(colName))
|
|
94
|
+
|
|
95
|
+
return df
|
|
@@ -30,10 +30,6 @@ def report_rebind(
|
|
|
30
30
|
The name of the Fabric workspace in which the semantic model resides.
|
|
31
31
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
32
32
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
33
|
-
|
|
34
|
-
Returns
|
|
35
|
-
-------
|
|
36
|
-
|
|
37
33
|
"""
|
|
38
34
|
|
|
39
35
|
if report_workspace is None:
|