semantic-link-labs 0.8.3__py3-none-any.whl → 0.8.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/METADATA +38 -8
- {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/RECORD +109 -104
- {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +53 -1
- sempy_labs/_bpa_translation/_model/_translations_am-ET.po +24 -5
- sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +28 -4
- sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +33 -4
- sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_da-DK.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_de-DE.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_el-GR.po +36 -4
- sempy_labs/_bpa_translation/_model/_translations_es-ES.po +90 -58
- sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +31 -5
- sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_he-IL.po +28 -4
- sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_id-ID.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_is-IS.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_it-IT.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +24 -4
- sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +72 -56
- sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +95 -71
- sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +33 -4
- sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_te-IN.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_th-TH.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +100 -72
- sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +23 -5
- sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +32 -4
- sempy_labs/_capacities.py +49 -14
- sempy_labs/_capacity_migration.py +1 -7
- sempy_labs/_data_pipelines.py +6 -0
- sempy_labs/_dataflows.py +118 -1
- sempy_labs/_dax.py +189 -3
- sempy_labs/_deployment_pipelines.py +13 -7
- sempy_labs/_environments.py +6 -0
- sempy_labs/_eventhouses.py +6 -0
- sempy_labs/_eventstreams.py +6 -0
- sempy_labs/_external_data_shares.py +6 -4
- sempy_labs/_generate_semantic_model.py +26 -3
- sempy_labs/_git.py +14 -14
- sempy_labs/_helper_functions.py +197 -1
- sempy_labs/_icons.py +55 -22
- sempy_labs/_kql_databases.py +6 -0
- sempy_labs/_kql_querysets.py +6 -0
- sempy_labs/_list_functions.py +1 -1
- sempy_labs/_managed_private_endpoints.py +166 -0
- sempy_labs/_mirrored_databases.py +428 -0
- sempy_labs/_mirrored_warehouses.py +2 -0
- sempy_labs/_ml_experiments.py +6 -0
- sempy_labs/_ml_models.py +7 -1
- sempy_labs/_model_bpa.py +215 -181
- sempy_labs/_model_bpa_bulk.py +46 -42
- sempy_labs/_model_bpa_rules.py +8 -3
- sempy_labs/_model_dependencies.py +41 -87
- sempy_labs/_notebooks.py +107 -12
- sempy_labs/_query_scale_out.py +8 -6
- sempy_labs/_refresh_semantic_model.py +299 -49
- sempy_labs/_spark.py +12 -5
- sempy_labs/_translations.py +2 -0
- sempy_labs/_vertipaq.py +89 -86
- sempy_labs/_warehouses.py +79 -0
- sempy_labs/_workloads.py +128 -0
- sempy_labs/_workspace_identity.py +4 -4
- sempy_labs/_workspaces.py +14 -1
- sempy_labs/admin/__init__.py +2 -0
- sempy_labs/admin/_basic_functions.py +131 -43
- sempy_labs/admin/_domains.py +18 -18
- sempy_labs/directlake/__init__.py +2 -0
- sempy_labs/directlake/_directlake_schema_sync.py +2 -1
- sempy_labs/directlake/_dl_helper.py +4 -1
- sempy_labs/directlake/_get_shared_expression.py +7 -1
- sempy_labs/directlake/_guardrails.py +2 -1
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +1 -7
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +78 -0
- sempy_labs/directlake/_update_directlake_partition_entity.py +13 -32
- sempy_labs/directlake/_warm_cache.py +10 -9
- sempy_labs/lakehouse/_get_lakehouse_tables.py +6 -2
- sempy_labs/lakehouse/_shortcuts.py +4 -0
- sempy_labs/migration/_create_pqt_file.py +5 -2
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +3 -2
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +2 -0
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +2 -8
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +17 -0
- sempy_labs/migration/_migration_validation.py +2 -0
- sempy_labs/migration/_refresh_calc_tables.py +1 -0
- sempy_labs/report/__init__.py +6 -1
- sempy_labs/report/_download_report.py +75 -0
- sempy_labs/report/_generate_report.py +6 -0
- sempy_labs/report/_paginated.py +74 -0
- sempy_labs/report/_report_functions.py +6 -0
- sempy_labs/report/_report_rebind.py +2 -0
- sempy_labs/report/_reportwrapper.py +4 -2
- sempy_labs/tom/_model.py +135 -68
- {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/top_level.txt +0 -0
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import sempy.fabric as fabric
|
|
2
2
|
from sempy_labs.directlake._get_shared_expression import get_shared_expression
|
|
3
|
+
from sempy_labs.directlake._generate_shared_expression import generate_shared_expression
|
|
3
4
|
from sempy_labs._helper_functions import (
|
|
4
5
|
resolve_lakehouse_name,
|
|
5
6
|
)
|
|
@@ -53,6 +54,8 @@ def update_direct_lake_model_lakehouse_connection(
|
|
|
53
54
|
f"Therefore it cannot be used to support the '{dataset}' semantic model within the '{workspace}' workspace."
|
|
54
55
|
)
|
|
55
56
|
|
|
57
|
+
icons.sll_tags.append("UpdateDLConnection")
|
|
58
|
+
|
|
56
59
|
shEx = get_shared_expression(lakehouse, lakehouse_workspace)
|
|
57
60
|
|
|
58
61
|
with connect_semantic_model(
|
|
@@ -69,3 +72,78 @@ def update_direct_lake_model_lakehouse_connection(
|
|
|
69
72
|
print(
|
|
70
73
|
f"{icons.green_dot} The expression in the '{dataset}' semantic model has been updated to point to the '{lakehouse}' lakehouse in the '{lakehouse_workspace}' workspace."
|
|
71
74
|
)
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def update_direct_lake_model_connection(
|
|
78
|
+
dataset: str,
|
|
79
|
+
workspace: Optional[str] = None,
|
|
80
|
+
source: Optional[str] = None,
|
|
81
|
+
source_type: Optional[str] = "Lakehouse",
|
|
82
|
+
source_workspace: Optional[str] = None,
|
|
83
|
+
):
|
|
84
|
+
"""
|
|
85
|
+
Remaps a Direct Lake semantic model's SQL Endpoint connection to a new lakehouse/warehouse.
|
|
86
|
+
|
|
87
|
+
Parameters
|
|
88
|
+
----------
|
|
89
|
+
dataset : str
|
|
90
|
+
Name of the semantic model.
|
|
91
|
+
workspace : str, default=None
|
|
92
|
+
The Fabric workspace name in which the semantic model exists.
|
|
93
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
94
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
95
|
+
source : str, default=None
|
|
96
|
+
The name of the Fabric lakehouse/warehouse used by the Direct Lake semantic model.
|
|
97
|
+
Defaults to None which resolves to the lakehouse attached to the notebook.
|
|
98
|
+
source_type : str, default="Lakehouse"
|
|
99
|
+
The type of source for the Direct Lake semantic model. Valid options: "Lakehouse", "Warehouse".
|
|
100
|
+
source_workspace : str, default=None
|
|
101
|
+
The Fabric workspace used by the lakehouse/warehouse.
|
|
102
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
103
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
104
|
+
"""
|
|
105
|
+
|
|
106
|
+
if workspace is None:
|
|
107
|
+
workspace = fabric.resolve_workspace_name(workspace)
|
|
108
|
+
|
|
109
|
+
source_type = source_type.capitalize()
|
|
110
|
+
|
|
111
|
+
if source_type not in ["Lakehouse", "Warehouse"]:
|
|
112
|
+
raise ValueError(
|
|
113
|
+
f"{icons.red_dot} The 'source_type' must be either 'Lakehouse' or 'Warehouse'."
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
if source_workspace is None:
|
|
117
|
+
source_workspace = workspace
|
|
118
|
+
|
|
119
|
+
if source is None:
|
|
120
|
+
source_id = fabric.get_lakehouse_id()
|
|
121
|
+
source = resolve_lakehouse_name(source_id, source_workspace)
|
|
122
|
+
else:
|
|
123
|
+
source_id = fabric.resolve_item_id(
|
|
124
|
+
item_name=source, type=source_type, workspace=source_workspace
|
|
125
|
+
)
|
|
126
|
+
source = fabric.resolve_item_name(
|
|
127
|
+
item_id=source_id, workspace=source_workspace, type=source_type
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
icons.sll_tags.append("UpdateDLConnection")
|
|
131
|
+
|
|
132
|
+
shEx = generate_shared_expression(
|
|
133
|
+
item_name=source, item_type=source_type, workspace=source_workspace
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
with connect_semantic_model(
|
|
137
|
+
dataset=dataset, readonly=False, workspace=workspace
|
|
138
|
+
) as tom:
|
|
139
|
+
|
|
140
|
+
if not tom.is_direct_lake():
|
|
141
|
+
raise ValueError(
|
|
142
|
+
f"{icons.red_dot} The '{dataset}' semantic model is not in Direct Lake. This function is only applicable to Direct Lake semantic models."
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
tom.model.Expressions["DatabaseQuery"].Expression = shEx
|
|
146
|
+
|
|
147
|
+
print(
|
|
148
|
+
f"{icons.green_dot} The expression in the '{dataset}' semantic model has been updated to point to the '{source}' {source_type.lower()} in the '{source_workspace}' workspace."
|
|
149
|
+
)
|
|
@@ -3,6 +3,7 @@ import sempy.fabric as fabric
|
|
|
3
3
|
from sempy_labs.tom import connect_semantic_model
|
|
4
4
|
from sempy_labs._refresh_semantic_model import refresh_semantic_model
|
|
5
5
|
from sempy_labs.directlake._dl_helper import get_direct_lake_source
|
|
6
|
+
from sempy_labs._helper_functions import _convert_data_type
|
|
6
7
|
from typing import List, Optional, Union
|
|
7
8
|
import sempy_labs._icons as icons
|
|
8
9
|
|
|
@@ -12,7 +13,6 @@ def update_direct_lake_partition_entity(
|
|
|
12
13
|
table_name: Union[str, List[str]],
|
|
13
14
|
entity_name: Union[str, List[str]],
|
|
14
15
|
workspace: Optional[str] = None,
|
|
15
|
-
**kwargs,
|
|
16
16
|
):
|
|
17
17
|
"""
|
|
18
18
|
Remaps a table (or tables) in a Direct Lake semantic model to a table in a lakehouse.
|
|
@@ -31,28 +31,8 @@ def update_direct_lake_partition_entity(
|
|
|
31
31
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
32
32
|
"""
|
|
33
33
|
|
|
34
|
-
if
|
|
35
|
-
|
|
36
|
-
"The 'lakehouse' parameter has been deprecated as it is no longer necessary. Please remove this parameter from the function going forward."
|
|
37
|
-
)
|
|
38
|
-
del kwargs["lakehouse"]
|
|
39
|
-
if "lakehouse_workspace" in kwargs:
|
|
40
|
-
print(
|
|
41
|
-
"The 'lakehouse_workspace' parameter has been deprecated as it is no longer necessary. Please remove this parameter from the function going forward."
|
|
42
|
-
)
|
|
43
|
-
del kwargs["lakehouse_workspace"]
|
|
44
|
-
|
|
45
|
-
workspace = fabric.resolve_workspace_name(workspace)
|
|
46
|
-
|
|
47
|
-
artifact_type, lakehouse_name, lakehouse_id, lakehouse_workspace_id = (
|
|
48
|
-
get_direct_lake_source(dataset=dataset, workspace=workspace)
|
|
49
|
-
)
|
|
50
|
-
|
|
51
|
-
if artifact_type == "Warehouse":
|
|
52
|
-
raise ValueError(
|
|
53
|
-
f"{icons.red_dot} This function is only valid for Direct Lake semantic models which source from lakehouses, not warehouses."
|
|
54
|
-
)
|
|
55
|
-
lakehouse_workspace = fabric.resolve_workspace_name(lakehouse_workspace_id)
|
|
34
|
+
if workspace is None:
|
|
35
|
+
workspace = fabric.resolve_workspace_name(workspace)
|
|
56
36
|
|
|
57
37
|
# Support both str & list types
|
|
58
38
|
if isinstance(table_name, str):
|
|
@@ -65,6 +45,8 @@ def update_direct_lake_partition_entity(
|
|
|
65
45
|
f"{icons.red_dot} The 'table_name' and 'entity_name' arrays must be of equal length."
|
|
66
46
|
)
|
|
67
47
|
|
|
48
|
+
icons.sll_tags.append("UpdateDLPartition")
|
|
49
|
+
|
|
68
50
|
with connect_semantic_model(
|
|
69
51
|
dataset=dataset, readonly=False, workspace=workspace
|
|
70
52
|
) as tom:
|
|
@@ -88,12 +70,11 @@ def update_direct_lake_partition_entity(
|
|
|
88
70
|
raise ValueError(
|
|
89
71
|
f"{icons.red_dot} The '{tName}' table in the '{dataset}' semantic model has not been updated."
|
|
90
72
|
)
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
)
|
|
73
|
+
|
|
74
|
+
tom.model.Tables[tName].Partitions[part_name].Source.EntityName = eName
|
|
75
|
+
print(
|
|
76
|
+
f"{icons.green_dot} The '{tName}' table in the '{dataset}' semantic model has been updated to point to the '{eName}' table."
|
|
77
|
+
)
|
|
97
78
|
|
|
98
79
|
|
|
99
80
|
def add_table_to_direct_lake_semantic_model(
|
|
@@ -149,9 +130,9 @@ def add_table_to_direct_lake_semantic_model(
|
|
|
149
130
|
|
|
150
131
|
table_count = tom.model.Tables.Count
|
|
151
132
|
|
|
152
|
-
if tom.is_direct_lake()
|
|
133
|
+
if not tom.is_direct_lake() and table_count > 0:
|
|
153
134
|
raise ValueError(
|
|
154
|
-
"This function is only valid for Direct Lake semantic models or semantic models with no tables."
|
|
135
|
+
f"{icons.red_dot} This function is only valid for Direct Lake semantic models or semantic models with no tables."
|
|
155
136
|
)
|
|
156
137
|
|
|
157
138
|
if any(
|
|
@@ -204,7 +185,7 @@ def add_table_to_direct_lake_semantic_model(
|
|
|
204
185
|
for i, r in dfLC_filt.iterrows():
|
|
205
186
|
lakeCName = r["Column Name"]
|
|
206
187
|
dType = r["Data Type"]
|
|
207
|
-
dt =
|
|
188
|
+
dt = _convert_data_type(dType)
|
|
208
189
|
tom.add_data_column(
|
|
209
190
|
table_name=table_name,
|
|
210
191
|
column_name=lakeCName,
|
|
@@ -177,7 +177,7 @@ def warm_direct_lake_cache_isresident(
|
|
|
177
177
|
dfC["DAX Object Name"] = format_dax_object_name(
|
|
178
178
|
dfC["Table Name"], dfC["Column Name"]
|
|
179
179
|
)
|
|
180
|
-
dfC_filtered = dfC[dfC["Is Resident"]]
|
|
180
|
+
dfC_filtered = dfC[dfC["Is Resident"] == True]
|
|
181
181
|
|
|
182
182
|
if len(dfC_filtered) == 0:
|
|
183
183
|
raise ValueError(
|
|
@@ -186,17 +186,18 @@ def warm_direct_lake_cache_isresident(
|
|
|
186
186
|
|
|
187
187
|
# Refresh/frame dataset
|
|
188
188
|
refresh_semantic_model(dataset=dataset, refresh_type="full", workspace=workspace)
|
|
189
|
-
|
|
190
189
|
time.sleep(2)
|
|
191
190
|
|
|
192
|
-
tbls = dfC_filtered["Table Name"].unique()
|
|
193
|
-
column_values = dfC_filtered["DAX Object Name"].tolist()
|
|
194
|
-
|
|
195
191
|
# Run basic query to get columns into memory; completed one table at a time (so as not to overload the capacity)
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
192
|
+
tbls = dfC_filtered["Table Name"].unique()
|
|
193
|
+
for table_name in (bar := tqdm(tbls)):
|
|
194
|
+
bar.set_description(f"Warming the '{table_name}' table...")
|
|
195
|
+
css = ", ".join(
|
|
196
|
+
dfC_filtered[dfC_filtered["Table Name"] == table_name]["DAX Object Name"]
|
|
197
|
+
.astype(str)
|
|
198
|
+
.tolist()
|
|
199
|
+
)
|
|
200
|
+
dax = f"""EVALUATE TOPN(1,SUMMARIZECOLUMNS({css}))"""
|
|
200
201
|
fabric.evaluate_dax(dataset=dataset, dax_string=dax, workspace=workspace)
|
|
201
202
|
|
|
202
203
|
print(
|
|
@@ -31,12 +31,14 @@ def get_lakehouse_tables(
|
|
|
31
31
|
"""
|
|
32
32
|
Shows the tables of a lakehouse and their respective properties. Option to include additional properties relevant to Direct Lake guardrails.
|
|
33
33
|
|
|
34
|
+
This is a wrapper function for the following API: `Tables - List Tables <https://learn.microsoft.com/rest/api/fabric/lakehouse/tables/list-tables>`_ plus extended capabilities.
|
|
35
|
+
|
|
34
36
|
Parameters
|
|
35
37
|
----------
|
|
36
38
|
lakehouse : str, default=None
|
|
37
39
|
The Fabric lakehouse.
|
|
38
40
|
Defaults to None which resolves to the lakehouse attached to the notebook.
|
|
39
|
-
|
|
41
|
+
workspace : str, default=None
|
|
40
42
|
The Fabric workspace used by the lakehouse.
|
|
41
43
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
42
44
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
@@ -155,7 +157,9 @@ def get_lakehouse_tables(
|
|
|
155
157
|
if extended:
|
|
156
158
|
intColumns = ["Files", "Row Groups", "Table Size"]
|
|
157
159
|
df[intColumns] = df[intColumns].astype(int)
|
|
158
|
-
|
|
160
|
+
|
|
161
|
+
col_name = guardrail.columns[0]
|
|
162
|
+
df["SKU"] = guardrail[col_name].iloc[0]
|
|
159
163
|
df["Parquet File Guardrail"] = guardrail["Parquet files per table"].iloc[0]
|
|
160
164
|
df["Row Group Guardrail"] = guardrail["Row groups per table"].iloc[0]
|
|
161
165
|
df["Row Count Guardrail"] = (
|
|
@@ -20,6 +20,8 @@ def create_shortcut_onelake(
|
|
|
20
20
|
"""
|
|
21
21
|
Creates a `shortcut <https://learn.microsoft.com/fabric/onelake/onelake-shortcuts>`_ to a delta table in OneLake.
|
|
22
22
|
|
|
23
|
+
This is a wrapper function for the following API: `OneLake Shortcuts - Create Shortcut <https://learn.microsoft.com/rest/api/fabric/core/onelake-shortcuts/create-shortcut>`_.
|
|
24
|
+
|
|
23
25
|
Parameters
|
|
24
26
|
----------
|
|
25
27
|
table_name : str
|
|
@@ -169,6 +171,8 @@ def delete_shortcut(
|
|
|
169
171
|
"""
|
|
170
172
|
Deletes a shortcut.
|
|
171
173
|
|
|
174
|
+
This is a wrapper function for the following API: `OneLake Shortcuts - Delete Shortcut <https://learn.microsoft.com/rest/api/fabric/core/onelake-shortcuts/delete-shortcut>`_.
|
|
175
|
+
|
|
172
176
|
Parameters
|
|
173
177
|
----------
|
|
174
178
|
shortcut_name : str
|
|
@@ -47,7 +47,6 @@ def create_pqt_file(
|
|
|
47
47
|
|
|
48
48
|
folderPath = "/lakehouse/default/Files"
|
|
49
49
|
subFolderPath = os.path.join(folderPath, "pqtnewfolder")
|
|
50
|
-
os.makedirs(subFolderPath, exist_ok=True)
|
|
51
50
|
|
|
52
51
|
with connect_semantic_model(
|
|
53
52
|
dataset=dataset, workspace=workspace, readonly=True
|
|
@@ -81,8 +80,10 @@ def create_pqt_file(
|
|
|
81
80
|
for e in tom.model.Expressions:
|
|
82
81
|
expr_map[e.Name] = [str(e.Kind), e.Expression]
|
|
83
82
|
|
|
83
|
+
expressions = tom.model.Expressions.Count
|
|
84
|
+
|
|
84
85
|
# Dataflows Gen2 max table limit is 50.
|
|
85
|
-
max_length = 50
|
|
86
|
+
max_length = 50 - expressions
|
|
86
87
|
table_chunks = [
|
|
87
88
|
dict(list(table_map.items())[i : i + max_length])
|
|
88
89
|
for i in range(0, len(table_map), max_length)
|
|
@@ -90,6 +91,8 @@ def create_pqt_file(
|
|
|
90
91
|
|
|
91
92
|
def create_pqt(table_map: dict, expr_map: dict, file_name: str):
|
|
92
93
|
|
|
94
|
+
os.makedirs(subFolderPath, exist_ok=True)
|
|
95
|
+
|
|
93
96
|
class QueryMetadata:
|
|
94
97
|
def __init__(
|
|
95
98
|
self,
|
|
@@ -282,6 +282,8 @@ def migrate_field_parameters(
|
|
|
282
282
|
if new_dataset_workspace is None:
|
|
283
283
|
new_dataset_workspace = workspace
|
|
284
284
|
|
|
285
|
+
icons.sll_tags.append("DirectLakeMigration")
|
|
286
|
+
|
|
285
287
|
dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
|
|
286
288
|
dfC["Column Object"] = format_dax_object_name(dfC["Table Name"], dfC["Column Name"])
|
|
287
289
|
dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
|
|
@@ -337,6 +339,7 @@ def migrate_field_parameters(
|
|
|
337
339
|
try:
|
|
338
340
|
par = TOM.Partition()
|
|
339
341
|
par.Name = tName
|
|
342
|
+
par.Mode = TOM.ModeType.Import
|
|
340
343
|
|
|
341
344
|
parSource = TOM.CalculatedPartitionSource()
|
|
342
345
|
par.Source = parSource
|
|
@@ -345,7 +348,6 @@ def migrate_field_parameters(
|
|
|
345
348
|
tbl = TOM.Table()
|
|
346
349
|
tbl.Name = tName
|
|
347
350
|
tbl.LineageTag = generate_guid()
|
|
348
|
-
tbl.SourceLineageTag = generate_guid()
|
|
349
351
|
tbl.Partitions.Add(par)
|
|
350
352
|
|
|
351
353
|
columns = ["Value1", "Value2", "Value3"]
|
|
@@ -356,7 +358,6 @@ def migrate_field_parameters(
|
|
|
356
358
|
col.SourceColumn = "[" + colName + "]"
|
|
357
359
|
col.DataType = TOM.DataType.String
|
|
358
360
|
col.LineageTag = generate_guid()
|
|
359
|
-
col.SourceLineageTag = generate_guid()
|
|
360
361
|
|
|
361
362
|
tbl.Columns.Add(col)
|
|
362
363
|
|
|
@@ -64,6 +64,8 @@ def migrate_calc_tables_to_semantic_model(
|
|
|
64
64
|
lakehouse_id = fabric.get_lakehouse_id()
|
|
65
65
|
lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)
|
|
66
66
|
|
|
67
|
+
icons.sll_tags.append("DirectLakeMigration")
|
|
68
|
+
|
|
67
69
|
# Get calc tables but not field parameters
|
|
68
70
|
dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
|
|
69
71
|
dfP_filt = dfP[(dfP["Source Type"] == "Calculated")]
|
|
@@ -52,6 +52,8 @@ def migrate_model_objects_to_semantic_model(
|
|
|
52
52
|
if new_dataset_workspace is None:
|
|
53
53
|
new_dataset_workspace = workspace
|
|
54
54
|
|
|
55
|
+
icons.sll_tags.append("DirectLakeMigration")
|
|
56
|
+
|
|
55
57
|
dfT = fabric.list_tables(dataset=dataset, workspace=workspace)
|
|
56
58
|
dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
|
|
57
59
|
dfM = fabric.list_measures(dataset=dataset, workspace=workspace)
|
|
@@ -334,14 +336,6 @@ def migrate_model_objects_to_semantic_model(
|
|
|
334
336
|
print(
|
|
335
337
|
f"{icons.warning} The {relName} relationship was not created as Direct Lake does not support relationships based on columns with different data types."
|
|
336
338
|
)
|
|
337
|
-
# Direct Lake using DateTime columns
|
|
338
|
-
elif isDirectLake and (
|
|
339
|
-
r.FromColumn.DataType == TOM.DataType.DateTime
|
|
340
|
-
or r.ToColumn.DataType == TOM.DataType.DateTime
|
|
341
|
-
):
|
|
342
|
-
print(
|
|
343
|
-
f"{icons.red_dot} The {relName} relationship was not created as Direct Lake does not support relationships based on columns of DateTime data type."
|
|
344
|
-
)
|
|
345
339
|
# Columns do not exist in the new semantic model
|
|
346
340
|
elif not any(
|
|
347
341
|
c.Name == r.FromColumn.Name and c.Parent.Name == r.FromTable.Name
|
|
@@ -61,6 +61,8 @@ def migrate_tables_columns_to_semantic_model(
|
|
|
61
61
|
lakehouse_id = fabric.get_lakehouse_id()
|
|
62
62
|
lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)
|
|
63
63
|
|
|
64
|
+
icons.sll_tags.append("DirectLakeMigration")
|
|
65
|
+
|
|
64
66
|
# Check that lakehouse is attached to the notebook
|
|
65
67
|
if not lakehouse_attached() and (lakehouse is None and lakehouse_workspace is None):
|
|
66
68
|
raise ValueError(
|
|
@@ -106,9 +108,24 @@ def migrate_tables_columns_to_semantic_model(
|
|
|
106
108
|
with connect_semantic_model(
|
|
107
109
|
dataset=new_dataset, readonly=False, workspace=new_dataset_workspace
|
|
108
110
|
) as tom:
|
|
111
|
+
|
|
112
|
+
# Additional updates
|
|
113
|
+
tom.set_annotation(
|
|
114
|
+
object=tom.model, name="__PBI_TimeIntelligenceEnabled", value="0"
|
|
115
|
+
)
|
|
116
|
+
tom.set_annotation(
|
|
117
|
+
object=tom.model, name="PBI_QueryOrder", value='["DatabaseQuery"]'
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
# Begin migration
|
|
109
121
|
if not any(e.Name == "DatabaseQuery" for e in tom.model.Expressions):
|
|
110
122
|
tom.add_expression("DatabaseQuery", expression=shEx)
|
|
111
123
|
print(f"{icons.green_dot} The 'DatabaseQuery' expression has been added.")
|
|
124
|
+
tom.set_annotation(
|
|
125
|
+
object=tom.model.Expressions["DatabaseQuery"],
|
|
126
|
+
name="PBI_IncludeFutureArtifacts",
|
|
127
|
+
value="False",
|
|
128
|
+
)
|
|
112
129
|
|
|
113
130
|
for i, r in dfT_filt.iterrows():
|
|
114
131
|
tName = r["Name"]
|
|
@@ -46,6 +46,8 @@ def migration_validation(
|
|
|
46
46
|
if new_dataset_workspace is None:
|
|
47
47
|
new_dataset_workspace = workspace
|
|
48
48
|
|
|
49
|
+
icons.sll_tags.append("DirectLakeMigration")
|
|
50
|
+
|
|
49
51
|
dfA = list_semantic_model_objects(dataset=dataset, workspace=workspace)
|
|
50
52
|
dfB = list_semantic_model_objects(
|
|
51
53
|
dataset=new_dataset, workspace=new_dataset_workspace
|
sempy_labs/report/__init__.py
CHANGED
|
@@ -1,13 +1,16 @@
|
|
|
1
1
|
from sempy_labs.report._reportwrapper import (
|
|
2
2
|
ReportWrapper,
|
|
3
3
|
)
|
|
4
|
-
|
|
4
|
+
from sempy_labs.report._paginated import (
|
|
5
|
+
get_report_datasources,
|
|
6
|
+
)
|
|
5
7
|
from sempy_labs.report._generate_report import (
|
|
6
8
|
create_report_from_reportjson,
|
|
7
9
|
get_report_definition,
|
|
8
10
|
update_report_from_reportjson,
|
|
9
11
|
create_model_bpa_report,
|
|
10
12
|
)
|
|
13
|
+
from sempy_labs.report._download_report import download_report
|
|
11
14
|
from sempy_labs.report._report_functions import (
|
|
12
15
|
get_report_json,
|
|
13
16
|
# report_dependency_tree,
|
|
@@ -39,4 +42,6 @@ __all__ = [
|
|
|
39
42
|
"ReportWrapper",
|
|
40
43
|
"report_bpa_rules",
|
|
41
44
|
"run_report_bpa",
|
|
45
|
+
"get_report_datasources",
|
|
46
|
+
"download_report",
|
|
42
47
|
]
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
import sempy.fabric as fabric
|
|
2
|
+
import sempy_labs._icons as icons
|
|
3
|
+
from typing import Optional
|
|
4
|
+
from sempy_labs._helper_functions import (
|
|
5
|
+
resolve_workspace_name_and_id,
|
|
6
|
+
resolve_lakehouse_name,
|
|
7
|
+
)
|
|
8
|
+
from sempy_labs.lakehouse._lakehouse import lakehouse_attached
|
|
9
|
+
from sempy.fabric.exceptions import FabricHTTPException
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def download_report(
|
|
13
|
+
report: str,
|
|
14
|
+
file_name: Optional[str] = None,
|
|
15
|
+
download_type: str = "LiveConnect",
|
|
16
|
+
workspace: Optional[str] = None,
|
|
17
|
+
):
|
|
18
|
+
"""
|
|
19
|
+
Downloads the specified report from the specified workspace to a Power BI .pbix file.
|
|
20
|
+
|
|
21
|
+
This is a wrapper function for the following API: `Reports - Export Report In Group <https://learn.microsoft.com/rest/api/power-bi/reports/export-report-in-group>`.
|
|
22
|
+
|
|
23
|
+
Parameters
|
|
24
|
+
----------
|
|
25
|
+
report: str
|
|
26
|
+
Name of the report.
|
|
27
|
+
file_name : str, default=None
|
|
28
|
+
Name of the .pbix file to be saved.
|
|
29
|
+
Defaults to None which resolves to the name of the report.
|
|
30
|
+
download_type : str, default="LiveConnect"
|
|
31
|
+
The type of download. Valid values are "LiveConnect" and "IncludeModel".
|
|
32
|
+
workspace : str, default=None
|
|
33
|
+
The Fabric workspace name.
|
|
34
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
35
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
if not lakehouse_attached():
|
|
39
|
+
raise ValueError(
|
|
40
|
+
f"{icons.red_dot} A lakehouse must be attached to the notebook."
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
lakehouse_id = fabric.get_lakehouse_id()
|
|
44
|
+
workspace_name = fabric.resolve_workspace_name()
|
|
45
|
+
lakehouse_name = resolve_lakehouse_name(
|
|
46
|
+
lakehouse_id=lakehouse_id, workspace=workspace_name
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
download_types = ["LiveConnect", "IncludeModel"]
|
|
50
|
+
if download_type not in download_types:
|
|
51
|
+
raise ValueError(
|
|
52
|
+
f"{icons.red_dot} Invalid download_type parameter. Valid options: {download_types}."
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
file_name = file_name or report
|
|
56
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
57
|
+
report_id = fabric.resolve_item_id(
|
|
58
|
+
item_name=report, type="Report", workspace=workspace
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
client = fabric.PowerBIRestClient()
|
|
62
|
+
response = client.get(
|
|
63
|
+
f"/v1.0/myorg/groups/{workspace_id}/reports/{report_id}/Export?downloadType={download_type}"
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
if response.status_code != 200:
|
|
67
|
+
raise FabricHTTPException(response)
|
|
68
|
+
|
|
69
|
+
# Save file to the attached lakehouse
|
|
70
|
+
with open(f"/lakehouse/default/Files/{file_name}.pbix", "wb") as file:
|
|
71
|
+
file.write(response.content)
|
|
72
|
+
|
|
73
|
+
print(
|
|
74
|
+
f"{icons.green_dot} The '{report}' report within the '{workspace}' workspace has been exported as the '{file_name}' file in the '{lakehouse_name}' lakehouse within the '{workspace_name} workspace."
|
|
75
|
+
)
|
|
@@ -23,6 +23,8 @@ def create_report_from_reportjson(
|
|
|
23
23
|
"""
|
|
24
24
|
Creates a report based on a report.json file (and an optional themes.json file).
|
|
25
25
|
|
|
26
|
+
This is a wrapper function for the following API: `Items - Create Report <https://learn.microsoft.com/rest/api/fabric/report/items/create-report>`_.
|
|
27
|
+
|
|
26
28
|
Parameters
|
|
27
29
|
----------
|
|
28
30
|
report : str
|
|
@@ -124,6 +126,8 @@ def update_report_from_reportjson(
|
|
|
124
126
|
"""
|
|
125
127
|
Updates a report based on a report.json file.
|
|
126
128
|
|
|
129
|
+
This is a wrapper function for the following API: `Items - Update Report Definition <https://learn.microsoft.com/rest/api/fabric/report/items/update-report-definition>`_.
|
|
130
|
+
|
|
127
131
|
Parameters
|
|
128
132
|
----------
|
|
129
133
|
report : str
|
|
@@ -179,6 +183,8 @@ def get_report_definition(report: str, workspace: Optional[str] = None) -> pd.Da
|
|
|
179
183
|
"""
|
|
180
184
|
Gets the collection of definition files of a report.
|
|
181
185
|
|
|
186
|
+
This is a wrapper function for the following API: `Items - Get Report Definition <https://learn.microsoft.com/rest/api/fabric/report/items/get-report-definition>`_.
|
|
187
|
+
|
|
182
188
|
Parameters
|
|
183
189
|
----------
|
|
184
190
|
report : str
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
import sempy.fabric as fabric
|
|
2
|
+
from typing import Optional
|
|
3
|
+
from sempy.fabric.exceptions import FabricHTTPException
|
|
4
|
+
import pandas as pd
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def get_report_datasources(
|
|
8
|
+
report: str,
|
|
9
|
+
workspace: Optional[str] = None,
|
|
10
|
+
) -> pd.DataFrame:
|
|
11
|
+
"""
|
|
12
|
+
Returns a list of data sources for the specified paginated report (RDL) from the specified workspace.
|
|
13
|
+
|
|
14
|
+
Parameters
|
|
15
|
+
----------
|
|
16
|
+
report : str | List[str]
|
|
17
|
+
Name(s) of the Power BI report(s).
|
|
18
|
+
workspace : str, default=None
|
|
19
|
+
The name of the Fabric workspace in which the report resides.
|
|
20
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
21
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
22
|
+
|
|
23
|
+
Returns
|
|
24
|
+
-------
|
|
25
|
+
pandas.DataFrame
|
|
26
|
+
A pandas dataframe showing a list of data sources for the specified paginated report (RDL) from the specified workspace.
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
df = pd.DataFrame(
|
|
30
|
+
columns=[
|
|
31
|
+
"Report Name",
|
|
32
|
+
"Report Id",
|
|
33
|
+
"Datasource Id",
|
|
34
|
+
"Datasource Type",
|
|
35
|
+
"Gateway Id",
|
|
36
|
+
"Server",
|
|
37
|
+
"Database",
|
|
38
|
+
]
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
if workspace is None:
|
|
42
|
+
workspace_id = fabric.get_workspace_id()
|
|
43
|
+
workspace = fabric.resolve_workspace_name(workspace_id)
|
|
44
|
+
else:
|
|
45
|
+
workspace_id = fabric.resolve_workspace_id(workspace)
|
|
46
|
+
|
|
47
|
+
report_id = fabric.resolve_item_id(
|
|
48
|
+
item_name=report, type="PaginatedReport", workspace=workspace
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
client = fabric.PowerBIRestClient()
|
|
52
|
+
|
|
53
|
+
response = client.get(
|
|
54
|
+
f"/v1.0/myorg/groups/{workspace_id}/reports/{report_id}/datasources"
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
if response.status_code != 200:
|
|
58
|
+
raise FabricHTTPException(response)
|
|
59
|
+
|
|
60
|
+
for i in response.json().get("value", []):
|
|
61
|
+
conn = i.get("connectionDetails", {})
|
|
62
|
+
new_data = {
|
|
63
|
+
"Report Name": report,
|
|
64
|
+
"Report Id": report_id,
|
|
65
|
+
"Datasource Id": i.get("datasourceId"),
|
|
66
|
+
"Datasource Type": i.get("datasourceType"),
|
|
67
|
+
"Gateway Id": i.get("gatewayId"),
|
|
68
|
+
"Server": conn.get("server") if conn else None,
|
|
69
|
+
"Database": conn.get("database") if conn else None,
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
73
|
+
|
|
74
|
+
return df
|
|
@@ -33,6 +33,8 @@ def get_report_json(
|
|
|
33
33
|
"""
|
|
34
34
|
Gets the report.json file content of a Power BI report.
|
|
35
35
|
|
|
36
|
+
This is a wrapper function for the following API: `Items - Get Report Definition <https://learn.microsoft.com/rest/api/fabric/report/items/get-report-definition>`_.
|
|
37
|
+
|
|
36
38
|
Parameters
|
|
37
39
|
----------
|
|
38
40
|
report : str
|
|
@@ -157,6 +159,8 @@ def export_report(
|
|
|
157
159
|
"""
|
|
158
160
|
Exports a Power BI report to a file in your lakehouse.
|
|
159
161
|
|
|
162
|
+
This is a wrapper function for the following APIs: `Reports - Export To File In Group <https://learn.microsoft.com/rest/api/power-bi/reports/export-to-file-in-group>`_, `Reports - Get Export To File Status In Group <https://learn.microsoft.com/rest/api/power-bi/reports/get-export-to-file-status-in-group>`_, `Reports - Get File Of Export To File In Group <https://learn.microsoft.com/rest/api/power-bi/reports/get-file-of-export-to-file-in-group>`_.
|
|
163
|
+
|
|
160
164
|
Parameters
|
|
161
165
|
----------
|
|
162
166
|
report : str
|
|
@@ -401,6 +405,8 @@ def clone_report(
|
|
|
401
405
|
"""
|
|
402
406
|
Clones a Power BI report.
|
|
403
407
|
|
|
408
|
+
This is a wrapper function for the following API: `Reports - Clone Report In Group <https://learn.microsoft.com/rest/api/power-bi/reports/clone-report-in-group>`_.
|
|
409
|
+
|
|
404
410
|
Parameters
|
|
405
411
|
----------
|
|
406
412
|
report : str
|
|
@@ -19,6 +19,8 @@ def report_rebind(
|
|
|
19
19
|
"""
|
|
20
20
|
Rebinds a report to a semantic model.
|
|
21
21
|
|
|
22
|
+
This is a wrapper function for the following API: `Reports - Rebind Report In Group <https://learn.microsoft.com/rest/api/power-bi/reports/rebind-report-in-group>`_.
|
|
23
|
+
|
|
22
24
|
Parameters
|
|
23
25
|
----------
|
|
24
26
|
report : str | List[str]
|