semantic-link-labs 0.8.10__py3-none-any.whl → 0.9.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/METADATA +6 -5
- {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/RECORD +81 -80
- {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +34 -3
- sempy_labs/_authentication.py +80 -4
- sempy_labs/_capacities.py +770 -200
- sempy_labs/_capacity_migration.py +7 -37
- sempy_labs/_clear_cache.py +37 -35
- sempy_labs/_connections.py +13 -13
- sempy_labs/_data_pipelines.py +20 -20
- sempy_labs/_dataflows.py +27 -28
- sempy_labs/_dax.py +41 -47
- sempy_labs/_deployment_pipelines.py +1 -1
- sempy_labs/_environments.py +26 -23
- sempy_labs/_eventhouses.py +16 -15
- sempy_labs/_eventstreams.py +16 -15
- sempy_labs/_external_data_shares.py +18 -20
- sempy_labs/_gateways.py +16 -14
- sempy_labs/_generate_semantic_model.py +107 -62
- sempy_labs/_git.py +105 -43
- sempy_labs/_helper_functions.py +251 -194
- sempy_labs/_job_scheduler.py +227 -0
- sempy_labs/_kql_databases.py +16 -15
- sempy_labs/_kql_querysets.py +16 -15
- sempy_labs/_list_functions.py +150 -126
- sempy_labs/_managed_private_endpoints.py +19 -17
- sempy_labs/_mirrored_databases.py +51 -48
- sempy_labs/_mirrored_warehouses.py +5 -4
- sempy_labs/_ml_experiments.py +16 -15
- sempy_labs/_ml_models.py +15 -14
- sempy_labs/_model_bpa.py +210 -207
- sempy_labs/_model_bpa_bulk.py +2 -2
- sempy_labs/_model_bpa_rules.py +3 -3
- sempy_labs/_model_dependencies.py +55 -29
- sempy_labs/_notebooks.py +29 -25
- sempy_labs/_one_lake_integration.py +23 -26
- sempy_labs/_query_scale_out.py +75 -64
- sempy_labs/_refresh_semantic_model.py +25 -26
- sempy_labs/_spark.py +33 -32
- sempy_labs/_sql.py +19 -12
- sempy_labs/_translations.py +10 -7
- sempy_labs/_vertipaq.py +38 -33
- sempy_labs/_warehouses.py +26 -25
- sempy_labs/_workspace_identity.py +11 -10
- sempy_labs/_workspaces.py +40 -33
- sempy_labs/admin/_basic_functions.py +166 -115
- sempy_labs/admin/_domains.py +7 -2
- sempy_labs/admin/_external_data_share.py +3 -3
- sempy_labs/admin/_git.py +4 -1
- sempy_labs/admin/_items.py +11 -6
- sempy_labs/admin/_scanner.py +10 -5
- sempy_labs/directlake/_directlake_schema_compare.py +25 -16
- sempy_labs/directlake/_directlake_schema_sync.py +24 -12
- sempy_labs/directlake/_dl_helper.py +74 -55
- sempy_labs/directlake/_generate_shared_expression.py +10 -9
- sempy_labs/directlake/_get_directlake_lakehouse.py +32 -36
- sempy_labs/directlake/_get_shared_expression.py +4 -3
- sempy_labs/directlake/_guardrails.py +12 -6
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +15 -9
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +16 -10
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +35 -31
- sempy_labs/directlake/_update_directlake_partition_entity.py +39 -31
- sempy_labs/directlake/_warm_cache.py +87 -65
- sempy_labs/lakehouse/_get_lakehouse_columns.py +23 -26
- sempy_labs/lakehouse/_get_lakehouse_tables.py +27 -38
- sempy_labs/lakehouse/_lakehouse.py +7 -20
- sempy_labs/lakehouse/_shortcuts.py +42 -23
- sempy_labs/migration/_create_pqt_file.py +16 -11
- sempy_labs/migration/_refresh_calc_tables.py +16 -10
- sempy_labs/report/_download_report.py +9 -8
- sempy_labs/report/_generate_report.py +85 -44
- sempy_labs/report/_paginated.py +9 -9
- sempy_labs/report/_report_bpa.py +15 -11
- sempy_labs/report/_report_functions.py +80 -91
- sempy_labs/report/_report_helper.py +8 -4
- sempy_labs/report/_report_list_functions.py +24 -13
- sempy_labs/report/_report_rebind.py +17 -16
- sempy_labs/report/_reportwrapper.py +41 -33
- sempy_labs/tom/_model.py +139 -21
- {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/top_level.txt +0 -0
|
@@ -14,17 +14,17 @@ from uuid import UUID
|
|
|
14
14
|
|
|
15
15
|
@log
|
|
16
16
|
def get_measure_dependencies(
|
|
17
|
-
dataset: str, workspace: Optional[str] = None
|
|
17
|
+
dataset: str | UUID, workspace: Optional[str | UUID] = None
|
|
18
18
|
) -> pd.DataFrame:
|
|
19
19
|
"""
|
|
20
20
|
Shows all dependencies for all measures in a semantic model.
|
|
21
21
|
|
|
22
22
|
Parameters
|
|
23
23
|
----------
|
|
24
|
-
dataset : str
|
|
25
|
-
Name of the semantic model.
|
|
26
|
-
workspace : str, default=None
|
|
27
|
-
The Fabric workspace name.
|
|
24
|
+
dataset : str | uuid.UUID
|
|
25
|
+
Name or ID of the semantic model.
|
|
26
|
+
workspace : str | uuid.UUID, default=None
|
|
27
|
+
The Fabric workspace name or ID.
|
|
28
28
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
29
29
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
30
30
|
|
|
@@ -34,8 +34,6 @@ def get_measure_dependencies(
|
|
|
34
34
|
Shows all dependencies for all measures in the semantic model.
|
|
35
35
|
"""
|
|
36
36
|
|
|
37
|
-
workspace = fabric.resolve_workspace_name(workspace)
|
|
38
|
-
|
|
39
37
|
dep = fabric.evaluate_dax(
|
|
40
38
|
dataset=dataset,
|
|
41
39
|
workspace=workspace,
|
|
@@ -144,17 +142,18 @@ def get_measure_dependencies(
|
|
|
144
142
|
|
|
145
143
|
@log
|
|
146
144
|
def get_model_calc_dependencies(
|
|
147
|
-
dataset: str | UUID,
|
|
145
|
+
dataset: str | UUID,
|
|
146
|
+
workspace: Optional[str] = None,
|
|
148
147
|
) -> pd.DataFrame:
|
|
149
148
|
"""
|
|
150
149
|
Shows all dependencies for all objects in a semantic model.
|
|
151
150
|
|
|
152
151
|
Parameters
|
|
153
152
|
----------
|
|
154
|
-
dataset : str | UUID
|
|
153
|
+
dataset : str | uuid.UUID
|
|
155
154
|
Name or ID of the semantic model.
|
|
156
|
-
workspace : str, default=None
|
|
157
|
-
The Fabric workspace name.
|
|
155
|
+
workspace : str | uuid.UUID, default=None
|
|
156
|
+
The Fabric workspace name or ID.
|
|
158
157
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
159
158
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
160
159
|
|
|
@@ -164,11 +163,9 @@ def get_model_calc_dependencies(
|
|
|
164
163
|
Shows all dependencies for all objects in the semantic model.
|
|
165
164
|
"""
|
|
166
165
|
|
|
167
|
-
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
168
|
-
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
169
166
|
dep = fabric.evaluate_dax(
|
|
170
|
-
dataset=
|
|
171
|
-
workspace=
|
|
167
|
+
dataset=dataset,
|
|
168
|
+
workspace=workspace,
|
|
172
169
|
dax_string="""
|
|
173
170
|
SELECT
|
|
174
171
|
[TABLE] AS [Table Name],
|
|
@@ -193,12 +190,24 @@ def get_model_calc_dependencies(
|
|
|
193
190
|
dep["Referenced Table"], dep["Referenced Object"]
|
|
194
191
|
)
|
|
195
192
|
dep["Parent Node"] = dep["Object Name"]
|
|
193
|
+
|
|
196
194
|
# Initialize dependency DataFrame with 'Done' status
|
|
197
195
|
df = dep.copy()
|
|
198
196
|
objs = {"Measure", "Calc Column", "Calculation Item", "Calc Table"}
|
|
199
197
|
df["Done"] = (
|
|
200
198
|
df["Referenced Object Type"].apply(lambda x: x not in objs).astype(bool)
|
|
201
199
|
)
|
|
200
|
+
|
|
201
|
+
# Set to track visited dependencies to prevent circular references
|
|
202
|
+
visited = set(
|
|
203
|
+
zip(
|
|
204
|
+
df["Full Object Name"],
|
|
205
|
+
df["Referenced Full Object Name"],
|
|
206
|
+
df["Object Type"],
|
|
207
|
+
df["Referenced Object Type"],
|
|
208
|
+
)
|
|
209
|
+
)
|
|
210
|
+
|
|
202
211
|
# Expand dependencies iteratively
|
|
203
212
|
while not df["Done"].all():
|
|
204
213
|
incomplete_rows = df[df["Done"] == False]
|
|
@@ -212,11 +221,24 @@ def get_model_calc_dependencies(
|
|
|
212
221
|
# Expand dependencies and update 'Done' status as needed
|
|
213
222
|
new_rows = []
|
|
214
223
|
for _, dependency in dep_filt.iterrows():
|
|
224
|
+
# Check if the dependency has already been visited
|
|
225
|
+
dependency_pair = (
|
|
226
|
+
row["Full Object Name"],
|
|
227
|
+
dependency["Referenced Full Object Name"],
|
|
228
|
+
row["Object Type"],
|
|
229
|
+
dependency["Referenced Object Type"],
|
|
230
|
+
)
|
|
231
|
+
if dependency_pair in visited:
|
|
232
|
+
continue # Skip already visited dependencies
|
|
233
|
+
|
|
234
|
+
visited.add(dependency_pair) # Mark as visited
|
|
235
|
+
|
|
215
236
|
is_done = dependency["Referenced Object Type"] not in objs
|
|
216
237
|
new_row = {
|
|
217
238
|
"Table Name": row["Table Name"],
|
|
218
239
|
"Object Name": row["Object Name"],
|
|
219
240
|
"Object Type": row["Object Type"],
|
|
241
|
+
"Expression": row["Expression"],
|
|
220
242
|
"Referenced Table": dependency["Referenced Table"],
|
|
221
243
|
"Referenced Object": dependency["Referenced Object"],
|
|
222
244
|
"Referenced Object Type": dependency["Referenced Object Type"],
|
|
@@ -228,7 +250,14 @@ def get_model_calc_dependencies(
|
|
|
228
250
|
"Parent Node": row["Referenced Object"],
|
|
229
251
|
}
|
|
230
252
|
new_rows.append(new_row)
|
|
231
|
-
|
|
253
|
+
|
|
254
|
+
if new_rows:
|
|
255
|
+
new_rows_df = pd.DataFrame(new_rows)
|
|
256
|
+
new_rows_df = new_rows_df.dropna(
|
|
257
|
+
axis=1, how="all"
|
|
258
|
+
) # Drop empty columns
|
|
259
|
+
df = pd.concat([df, new_rows_df], ignore_index=True)
|
|
260
|
+
|
|
232
261
|
df.loc[df.index == row.name, "Done"] = True
|
|
233
262
|
# Finalize DataFrame and yield result
|
|
234
263
|
df = df.drop(columns=["Done"])
|
|
@@ -238,39 +267,36 @@ def get_model_calc_dependencies(
|
|
|
238
267
|
|
|
239
268
|
@log
|
|
240
269
|
def measure_dependency_tree(
|
|
241
|
-
dataset: str, measure_name: str, workspace: Optional[str] = None
|
|
270
|
+
dataset: str | UUID, measure_name: str, workspace: Optional[str | UUID] = None
|
|
242
271
|
):
|
|
243
272
|
"""
|
|
244
273
|
Prints a measure dependency tree of all dependent objects for a measure in a semantic model.
|
|
245
274
|
|
|
246
275
|
Parameters
|
|
247
276
|
----------
|
|
248
|
-
dataset : str
|
|
249
|
-
Name of the semantic model.
|
|
277
|
+
dataset : str | uuid.UUID
|
|
278
|
+
Name or ID of the semantic model.
|
|
250
279
|
measure_name : str
|
|
251
280
|
Name of the measure.
|
|
252
|
-
workspace : str, default=None
|
|
253
|
-
The Fabric workspace name.
|
|
281
|
+
workspace : str | uuid.UUID, default=None
|
|
282
|
+
The Fabric workspace name or ID.
|
|
254
283
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
255
284
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
256
|
-
|
|
257
|
-
Returns
|
|
258
|
-
-------
|
|
259
|
-
|
|
260
285
|
"""
|
|
261
286
|
|
|
262
|
-
|
|
287
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
288
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
263
289
|
|
|
264
|
-
dfM = fabric.list_measures(dataset=
|
|
290
|
+
dfM = fabric.list_measures(dataset=dataset_id, workspace=workspace_id)
|
|
265
291
|
dfM_filt = dfM[dfM["Measure Name"] == measure_name]
|
|
266
292
|
|
|
267
293
|
if len(dfM_filt) == 0:
|
|
268
294
|
print(
|
|
269
|
-
f"{icons.red_dot} The '{measure_name}' measure does not exist in the '{
|
|
295
|
+
f"{icons.red_dot} The '{measure_name}' measure does not exist in the '{dataset_name}' semantic model in the '{workspace_name}' workspace."
|
|
270
296
|
)
|
|
271
297
|
return
|
|
272
298
|
|
|
273
|
-
md = get_measure_dependencies(
|
|
299
|
+
md = get_measure_dependencies(dataset_id, workspace_id)
|
|
274
300
|
df_filt = md[md["Object Name"] == measure_name]
|
|
275
301
|
|
|
276
302
|
# Create a dictionary to hold references to nodes
|
sempy_labs/_notebooks.py
CHANGED
|
@@ -4,6 +4,7 @@ import sempy_labs._icons as icons
|
|
|
4
4
|
from typing import Optional
|
|
5
5
|
import base64
|
|
6
6
|
import requests
|
|
7
|
+
from sempy._utils._log import log
|
|
7
8
|
from sempy_labs._helper_functions import (
|
|
8
9
|
resolve_workspace_name_and_id,
|
|
9
10
|
lro,
|
|
@@ -11,17 +12,18 @@ from sempy_labs._helper_functions import (
|
|
|
11
12
|
)
|
|
12
13
|
from sempy.fabric.exceptions import FabricHTTPException
|
|
13
14
|
import os
|
|
15
|
+
from uuid import UUID
|
|
14
16
|
|
|
15
17
|
_notebook_prefix = "notebook-content."
|
|
16
18
|
|
|
17
19
|
|
|
18
20
|
def _get_notebook_definition_base(
|
|
19
|
-
notebook_name: str, workspace: Optional[str] = None
|
|
21
|
+
notebook_name: str, workspace: Optional[str | UUID] = None
|
|
20
22
|
) -> pd.DataFrame:
|
|
21
23
|
|
|
22
|
-
(
|
|
24
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
23
25
|
item_id = fabric.resolve_item_id(
|
|
24
|
-
item_name=notebook_name, type="Notebook", workspace=
|
|
26
|
+
item_name=notebook_name, type="Notebook", workspace=workspace_id
|
|
25
27
|
)
|
|
26
28
|
client = fabric.FabricRestClient()
|
|
27
29
|
response = client.post(
|
|
@@ -33,7 +35,9 @@ def _get_notebook_definition_base(
|
|
|
33
35
|
return pd.json_normalize(result["definition"]["parts"])
|
|
34
36
|
|
|
35
37
|
|
|
36
|
-
def _get_notebook_type(
|
|
38
|
+
def _get_notebook_type(
|
|
39
|
+
notebook_name: str, workspace: Optional[str | UUID] = None
|
|
40
|
+
) -> str:
|
|
37
41
|
|
|
38
42
|
df_items = _get_notebook_definition_base(
|
|
39
43
|
notebook_name=notebook_name, workspace=workspace
|
|
@@ -49,7 +53,7 @@ def _get_notebook_type(notebook_name: str, workspace: Optional[str] = None) -> s
|
|
|
49
53
|
|
|
50
54
|
|
|
51
55
|
def get_notebook_definition(
|
|
52
|
-
notebook_name: str, workspace: Optional[str] = None, decode: bool = True
|
|
56
|
+
notebook_name: str, workspace: Optional[str | UUID] = None, decode: bool = True
|
|
53
57
|
) -> str:
|
|
54
58
|
"""
|
|
55
59
|
Obtains the notebook definition.
|
|
@@ -60,8 +64,8 @@ def get_notebook_definition(
|
|
|
60
64
|
----------
|
|
61
65
|
notebook_name : str
|
|
62
66
|
The name of the notebook.
|
|
63
|
-
workspace : str, default=None
|
|
64
|
-
The name of the workspace.
|
|
67
|
+
workspace : str | uuid.UUID, default=None
|
|
68
|
+
The name or ID of the workspace.
|
|
65
69
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
66
70
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
67
71
|
decode : bool, default=True
|
|
@@ -88,11 +92,12 @@ def get_notebook_definition(
|
|
|
88
92
|
return result
|
|
89
93
|
|
|
90
94
|
|
|
95
|
+
@log
|
|
91
96
|
def import_notebook_from_web(
|
|
92
97
|
notebook_name: str,
|
|
93
98
|
url: str,
|
|
94
99
|
description: Optional[str] = None,
|
|
95
|
-
workspace: Optional[str] = None,
|
|
100
|
+
workspace: Optional[str | UUID] = None,
|
|
96
101
|
overwrite: bool = False,
|
|
97
102
|
):
|
|
98
103
|
"""
|
|
@@ -110,16 +115,15 @@ def import_notebook_from_web(
|
|
|
110
115
|
description : str, default=None
|
|
111
116
|
The description of the notebook.
|
|
112
117
|
Defaults to None which does not place a description.
|
|
113
|
-
workspace : str, default=None
|
|
114
|
-
The name of the workspace.
|
|
118
|
+
workspace : str | uuid.UUID, default=None
|
|
119
|
+
The name or ID of the workspace.
|
|
115
120
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
116
121
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
117
122
|
overwrite : bool, default=False
|
|
118
123
|
If set to True, overwrites the existing notebook in the workspace if it exists.
|
|
119
124
|
"""
|
|
120
125
|
|
|
121
|
-
|
|
122
|
-
workspace = fabric.resolve_workspace_name(workspace)
|
|
126
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
123
127
|
|
|
124
128
|
# Fix links to go to the raw github file
|
|
125
129
|
starting_text = "https://github.com/"
|
|
@@ -139,7 +143,7 @@ def import_notebook_from_web(
|
|
|
139
143
|
create_notebook(
|
|
140
144
|
name=notebook_name,
|
|
141
145
|
notebook_content=response.content,
|
|
142
|
-
workspace=
|
|
146
|
+
workspace=workspace_id,
|
|
143
147
|
description=description,
|
|
144
148
|
)
|
|
145
149
|
elif len(dfI_filt) > 0 and overwrite:
|
|
@@ -149,7 +153,7 @@ def import_notebook_from_web(
|
|
|
149
153
|
# )
|
|
150
154
|
else:
|
|
151
155
|
raise ValueError(
|
|
152
|
-
f"{icons.red_dot} The '{notebook_name}' already exists within the '{
|
|
156
|
+
f"{icons.red_dot} The '{notebook_name}' already exists within the '{workspace_name}' workspace and 'overwrite' is set to False."
|
|
153
157
|
)
|
|
154
158
|
|
|
155
159
|
|
|
@@ -158,7 +162,7 @@ def create_notebook(
|
|
|
158
162
|
notebook_content: str,
|
|
159
163
|
type: str = "py",
|
|
160
164
|
description: Optional[str] = None,
|
|
161
|
-
workspace: Optional[str] = None,
|
|
165
|
+
workspace: Optional[str | UUID] = None,
|
|
162
166
|
):
|
|
163
167
|
"""
|
|
164
168
|
Creates a new notebook with a definition within a workspace.
|
|
@@ -174,13 +178,13 @@ def create_notebook(
|
|
|
174
178
|
description : str, default=None
|
|
175
179
|
The description of the notebook.
|
|
176
180
|
Defaults to None which does not place a description.
|
|
177
|
-
workspace : str, default=None
|
|
178
|
-
The name of the workspace.
|
|
181
|
+
workspace : str | uuid.UUID, default=None
|
|
182
|
+
The name or ID of the workspace.
|
|
179
183
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
180
184
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
181
185
|
"""
|
|
182
186
|
|
|
183
|
-
(
|
|
187
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
184
188
|
client = fabric.FabricRestClient()
|
|
185
189
|
notebook_payload = base64.b64encode(notebook_content)
|
|
186
190
|
|
|
@@ -205,12 +209,12 @@ def create_notebook(
|
|
|
205
209
|
lro(client, response, status_codes=[201, 202])
|
|
206
210
|
|
|
207
211
|
print(
|
|
208
|
-
f"{icons.green_dot} The '{name}' notebook was created within the '{
|
|
212
|
+
f"{icons.green_dot} The '{name}' notebook was created within the '{workspace_name}' workspace."
|
|
209
213
|
)
|
|
210
214
|
|
|
211
215
|
|
|
212
216
|
def update_notebook_definition(
|
|
213
|
-
name: str, notebook_content: str, workspace: Optional[str] = None
|
|
217
|
+
name: str, notebook_content: str, workspace: Optional[str | UUID] = None
|
|
214
218
|
):
|
|
215
219
|
"""
|
|
216
220
|
Updates an existing notebook with a new definition.
|
|
@@ -221,17 +225,17 @@ def update_notebook_definition(
|
|
|
221
225
|
The name of the notebook to be updated.
|
|
222
226
|
notebook_content : str
|
|
223
227
|
The Jupyter notebook content (not in Base64 format).
|
|
224
|
-
workspace : str, default=None
|
|
225
|
-
The name of the workspace.
|
|
228
|
+
workspace : str | uuid.UUID, default=None
|
|
229
|
+
The name or ID of the workspace.
|
|
226
230
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
227
231
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
228
232
|
"""
|
|
229
233
|
|
|
230
|
-
(
|
|
234
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
231
235
|
client = fabric.FabricRestClient()
|
|
232
236
|
notebook_payload = base64.b64encode(notebook_content)
|
|
233
237
|
notebook_id = fabric.resolve_item_id(
|
|
234
|
-
item_name=name, type="Notebook", workspace=
|
|
238
|
+
item_name=name, type="Notebook", workspace=workspace_id
|
|
235
239
|
)
|
|
236
240
|
|
|
237
241
|
type = _get_notebook_type(notebook_name=name, workspace=workspace_id)
|
|
@@ -256,5 +260,5 @@ def update_notebook_definition(
|
|
|
256
260
|
lro(client, response, return_status_code=True)
|
|
257
261
|
|
|
258
262
|
print(
|
|
259
|
-
f"{icons.green_dot} The '{name}' notebook was updated within the '{
|
|
263
|
+
f"{icons.green_dot} The '{name}' notebook was updated within the '{workspace_name}' workspace."
|
|
260
264
|
)
|
|
@@ -2,14 +2,18 @@ import sempy.fabric as fabric
|
|
|
2
2
|
import pandas as pd
|
|
3
3
|
from typing import Optional
|
|
4
4
|
from sempy._utils._log import log
|
|
5
|
-
from sempy_labs._helper_functions import
|
|
5
|
+
from sempy_labs._helper_functions import (
|
|
6
|
+
resolve_workspace_name_and_id,
|
|
7
|
+
resolve_dataset_name_and_id,
|
|
8
|
+
)
|
|
6
9
|
import sempy_labs._icons as icons
|
|
10
|
+
from uuid import UUID
|
|
7
11
|
|
|
8
12
|
|
|
9
13
|
@log
|
|
10
14
|
def export_model_to_onelake(
|
|
11
|
-
dataset: str,
|
|
12
|
-
workspace: Optional[str] = None,
|
|
15
|
+
dataset: str | UUID,
|
|
16
|
+
workspace: Optional[str | UUID] = None,
|
|
13
17
|
destination_lakehouse: Optional[str] = None,
|
|
14
18
|
destination_workspace: Optional[str] = None,
|
|
15
19
|
):
|
|
@@ -18,10 +22,10 @@ def export_model_to_onelake(
|
|
|
18
22
|
|
|
19
23
|
Parameters
|
|
20
24
|
----------
|
|
21
|
-
dataset : str
|
|
22
|
-
Name of the semantic model.
|
|
23
|
-
workspace : str, default=None
|
|
24
|
-
The Fabric workspace name.
|
|
25
|
+
dataset : str | uuid.UUID
|
|
26
|
+
Name or ID of the semantic model.
|
|
27
|
+
workspace : str | uuid.UUID, default=None
|
|
28
|
+
The Fabric workspace name or ID.
|
|
25
29
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
26
30
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
27
31
|
destination_lakehouse : str, default=None
|
|
@@ -30,22 +34,15 @@ def export_model_to_onelake(
|
|
|
30
34
|
The name of the Fabric workspace in which the lakehouse resides.
|
|
31
35
|
"""
|
|
32
36
|
|
|
33
|
-
(
|
|
37
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
38
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
34
39
|
|
|
35
40
|
if destination_workspace is None:
|
|
36
|
-
destination_workspace =
|
|
41
|
+
destination_workspace = workspace_name
|
|
37
42
|
destination_workspace_id = workspace_id
|
|
38
43
|
else:
|
|
39
44
|
destination_workspace_id = fabric.resolve_workspace_id(destination_workspace)
|
|
40
45
|
|
|
41
|
-
dfD = fabric.list_datasets(workspace=workspace)
|
|
42
|
-
dfD_filt = dfD[dfD["Dataset Name"] == dataset]
|
|
43
|
-
|
|
44
|
-
if len(dfD_filt) == 0:
|
|
45
|
-
raise ValueError(
|
|
46
|
-
f"{icons.red_dot} The '{dataset}' semantic model does not exist in the '{workspace}' workspace."
|
|
47
|
-
)
|
|
48
|
-
|
|
49
46
|
tmsl = f"""
|
|
50
47
|
{{
|
|
51
48
|
'export': {{
|
|
@@ -53,7 +50,7 @@ def export_model_to_onelake(
|
|
|
53
50
|
'type': 'full',
|
|
54
51
|
'objects': [
|
|
55
52
|
{{
|
|
56
|
-
'database': '{
|
|
53
|
+
'database': '{dataset_name}'
|
|
57
54
|
}}
|
|
58
55
|
]
|
|
59
56
|
}}
|
|
@@ -62,13 +59,13 @@ def export_model_to_onelake(
|
|
|
62
59
|
|
|
63
60
|
# Export model's tables as delta tables
|
|
64
61
|
try:
|
|
65
|
-
fabric.execute_tmsl(script=tmsl, workspace=
|
|
62
|
+
fabric.execute_tmsl(script=tmsl, workspace=workspace_id)
|
|
66
63
|
print(
|
|
67
|
-
f"{icons.green_dot} The '{
|
|
64
|
+
f"{icons.green_dot} The '{dataset_name}' semantic model's tables have been exported as delta tables to the '{workspace_name}' workspace.\n"
|
|
68
65
|
)
|
|
69
66
|
except Exception as e:
|
|
70
67
|
raise ValueError(
|
|
71
|
-
f"{icons.red_dot} The '{
|
|
68
|
+
f"{icons.red_dot} The '{dataset_name}' semantic model's tables have not been exported as delta tables to the '{workspace_name}' workspace.\nMake sure you enable OneLake integration for the '{dataset_name}' semantic model. Follow the instructions here: https://learn.microsoft.com/power-bi/enterprise/onelake-integration-overview#enable-onelake-integration"
|
|
72
69
|
) from e
|
|
73
70
|
|
|
74
71
|
# Create shortcuts if destination lakehouse is specified
|
|
@@ -92,14 +89,14 @@ def export_model_to_onelake(
|
|
|
92
89
|
destination_lakehouse_id = dfI_filt["Id"].iloc[0]
|
|
93
90
|
|
|
94
91
|
# Source...
|
|
95
|
-
dfI_Source = fabric.list_items(workspace=
|
|
92
|
+
dfI_Source = fabric.list_items(workspace=workspace_id, type="SemanticModel")
|
|
96
93
|
dfI_filtSource = dfI_Source[(dfI_Source["Display Name"] == dataset)]
|
|
97
94
|
sourceLakehouseId = dfI_filtSource["Id"].iloc[0]
|
|
98
95
|
|
|
99
96
|
# Valid tables
|
|
100
97
|
dfP = fabric.list_partitions(
|
|
101
|
-
dataset=
|
|
102
|
-
workspace=
|
|
98
|
+
dataset=dataset_id,
|
|
99
|
+
workspace=workspace_id,
|
|
103
100
|
additional_xmla_properties=["Parent.SystemManaged"],
|
|
104
101
|
)
|
|
105
102
|
dfP_filt = dfP[
|
|
@@ -107,7 +104,7 @@ def export_model_to_onelake(
|
|
|
107
104
|
& (dfP["Source Type"] != "CalculationGroup")
|
|
108
105
|
& (dfP["Parent System Managed"] == False)
|
|
109
106
|
]
|
|
110
|
-
dfC = fabric.list_columns(dataset=
|
|
107
|
+
dfC = fabric.list_columns(dataset=dataset_id, workspace=workspace_id)
|
|
111
108
|
tmc = pd.DataFrame(dfP.groupby("Table Name")["Mode"].nunique()).reset_index()
|
|
112
109
|
oneMode = tmc[tmc["Mode"] == 1]
|
|
113
110
|
tableAll = dfP_filt[
|
|
@@ -141,7 +138,7 @@ def export_model_to_onelake(
|
|
|
141
138
|
)
|
|
142
139
|
if response.status_code == 201:
|
|
143
140
|
print(
|
|
144
|
-
f"{icons.bullet} The shortcut '{shortcutName}' was created in the '{destination_lakehouse}' lakehouse within the '{destination_workspace}' workspace. It is based on the '{tableName}' table in the '{
|
|
141
|
+
f"{icons.bullet} The shortcut '{shortcutName}' was created in the '{destination_lakehouse}' lakehouse within the '{destination_workspace}' workspace. It is based on the '{tableName}' table in the '{dataset_name}' semantic model within the '{workspace_name}' workspace.\n"
|
|
145
142
|
)
|
|
146
143
|
else:
|
|
147
144
|
print(response.status_code)
|