semantic-link-labs 0.9.4__py3-none-any.whl → 0.9.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/METADATA +19 -2
- {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/RECORD +71 -64
- {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +33 -4
- sempy_labs/_capacities.py +59 -128
- sempy_labs/_capacity_migration.py +19 -21
- sempy_labs/_connections.py +2 -4
- sempy_labs/_dashboards.py +60 -0
- sempy_labs/_data_pipelines.py +5 -31
- sempy_labs/_dataflows.py +2 -2
- sempy_labs/_dax_query_view.py +55 -0
- sempy_labs/_delta_analyzer.py +16 -14
- sempy_labs/_environments.py +28 -49
- sempy_labs/_eventhouses.py +27 -53
- sempy_labs/_eventstreams.py +16 -34
- sempy_labs/_external_data_shares.py +4 -10
- sempy_labs/_gateways.py +4 -4
- sempy_labs/_generate_semantic_model.py +2 -2
- sempy_labs/_git.py +90 -1
- sempy_labs/_graphQL.py +8 -21
- sempy_labs/_helper_functions.py +440 -91
- sempy_labs/_kql_databases.py +24 -35
- sempy_labs/_kql_querysets.py +15 -32
- sempy_labs/_list_functions.py +17 -192
- sempy_labs/_managed_private_endpoints.py +9 -2
- sempy_labs/_mirrored_databases.py +17 -49
- sempy_labs/_ml_experiments.py +6 -31
- sempy_labs/_ml_models.py +4 -28
- sempy_labs/_model_bpa.py +4 -11
- sempy_labs/_model_bpa_bulk.py +23 -27
- sempy_labs/_mounted_data_factories.py +119 -0
- sempy_labs/_notebooks.py +16 -26
- sempy_labs/_one_lake_integration.py +2 -1
- sempy_labs/_semantic_models.py +20 -0
- sempy_labs/_sql.py +13 -8
- sempy_labs/_sqldatabase.py +61 -100
- sempy_labs/_utils.py +42 -0
- sempy_labs/_vertipaq.py +25 -13
- sempy_labs/_warehouses.py +19 -20
- sempy_labs/_workloads.py +23 -9
- sempy_labs/_workspace_identity.py +6 -0
- sempy_labs/_workspaces.py +55 -7
- sempy_labs/admin/__init__.py +21 -1
- sempy_labs/admin/_apps.py +1 -1
- sempy_labs/admin/_artifacts.py +62 -0
- sempy_labs/admin/_basic_functions.py +3 -54
- sempy_labs/admin/_capacities.py +61 -0
- sempy_labs/admin/_reports.py +74 -0
- sempy_labs/admin/_scanner.py +2 -2
- sempy_labs/admin/_shared.py +4 -2
- sempy_labs/admin/_users.py +133 -0
- sempy_labs/admin/_workspaces.py +148 -0
- sempy_labs/directlake/_directlake_schema_compare.py +2 -1
- sempy_labs/directlake/_directlake_schema_sync.py +65 -19
- sempy_labs/directlake/_dl_helper.py +0 -6
- sempy_labs/directlake/_generate_shared_expression.py +10 -11
- sempy_labs/directlake/_guardrails.py +2 -1
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +12 -25
- sempy_labs/directlake/_update_directlake_partition_entity.py +11 -3
- sempy_labs/lakehouse/__init__.py +2 -0
- sempy_labs/lakehouse/_lakehouse.py +6 -7
- sempy_labs/lakehouse/_shortcuts.py +198 -57
- sempy_labs/migration/_migration_validation.py +0 -4
- sempy_labs/report/_download_report.py +4 -6
- sempy_labs/report/_generate_report.py +15 -23
- sempy_labs/report/_report_bpa.py +12 -19
- sempy_labs/report/_report_functions.py +2 -1
- sempy_labs/report/_report_rebind.py +8 -6
- sempy_labs/tom/_model.py +34 -16
- {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/top_level.txt +0 -0
|
@@ -1,21 +1,25 @@
|
|
|
1
1
|
import sempy.fabric as fabric
|
|
2
|
+
import pandas as pd
|
|
2
3
|
from sempy_labs._helper_functions import (
|
|
3
|
-
|
|
4
|
-
resolve_lakehouse_id,
|
|
4
|
+
resolve_lakehouse_name_and_id,
|
|
5
5
|
resolve_workspace_name_and_id,
|
|
6
6
|
_base_api,
|
|
7
|
+
_create_dataframe,
|
|
8
|
+
resolve_workspace_name,
|
|
7
9
|
)
|
|
10
|
+
from sempy._utils._log import log
|
|
8
11
|
from typing import Optional
|
|
9
12
|
import sempy_labs._icons as icons
|
|
10
|
-
from sempy.fabric.exceptions import FabricHTTPException
|
|
11
13
|
from uuid import UUID
|
|
14
|
+
from sempy.fabric.exceptions import FabricHTTPException
|
|
12
15
|
|
|
13
16
|
|
|
17
|
+
@log
|
|
14
18
|
def create_shortcut_onelake(
|
|
15
19
|
table_name: str,
|
|
16
|
-
source_lakehouse: str,
|
|
20
|
+
source_lakehouse: str | UUID,
|
|
17
21
|
source_workspace: str | UUID,
|
|
18
|
-
destination_lakehouse: str,
|
|
22
|
+
destination_lakehouse: Optional[str | UUID] = None,
|
|
19
23
|
destination_workspace: Optional[str | UUID] = None,
|
|
20
24
|
shortcut_name: Optional[str] = None,
|
|
21
25
|
source_path: str = "Tables",
|
|
@@ -30,12 +34,13 @@ def create_shortcut_onelake(
|
|
|
30
34
|
----------
|
|
31
35
|
table_name : str
|
|
32
36
|
The table name for which a shortcut will be created.
|
|
33
|
-
source_lakehouse : str
|
|
37
|
+
source_lakehouse : str | uuid.UUID
|
|
34
38
|
The Fabric lakehouse in which the table resides.
|
|
35
39
|
source_workspace : str | uuid.UUID
|
|
36
40
|
The name or ID of the Fabric workspace in which the source lakehouse exists.
|
|
37
|
-
destination_lakehouse : str
|
|
41
|
+
destination_lakehouse : str | uuid.UUID, default=None
|
|
38
42
|
The Fabric lakehouse in which the shortcut will be created.
|
|
43
|
+
Defaults to None which resolves to the lakehouse attached to the notebook.
|
|
39
44
|
destination_workspace : str | uuid.UUID, default=None
|
|
40
45
|
The name or ID of the Fabric workspace in which the shortcut will be created.
|
|
41
46
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
@@ -62,21 +67,18 @@ def create_shortcut_onelake(
|
|
|
62
67
|
(source_workspace_name, source_workspace_id) = resolve_workspace_name_and_id(
|
|
63
68
|
source_workspace
|
|
64
69
|
)
|
|
65
|
-
source_lakehouse_id = resolve_lakehouse_id(source_lakehouse, source_workspace_id)
|
|
66
|
-
source_lakehouse_name = fabric.resolve_item_name(
|
|
67
|
-
item_id=source_lakehouse_id, type="Lakehouse", workspace=source_workspace_id
|
|
68
|
-
)
|
|
69
70
|
|
|
70
|
-
(
|
|
71
|
-
|
|
71
|
+
(source_lakehouse_name, source_lakehouse_id) = resolve_lakehouse_name_and_id(
|
|
72
|
+
lakehouse=source_lakehouse, workspace=source_workspace_id
|
|
72
73
|
)
|
|
73
|
-
|
|
74
|
-
|
|
74
|
+
|
|
75
|
+
(destination_workspace_name, destination_workspace_id) = (
|
|
76
|
+
resolve_workspace_name_and_id(destination_workspace)
|
|
75
77
|
)
|
|
76
|
-
destination_lakehouse_name =
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
78
|
+
(destination_lakehouse_name, destination_lakehouse_id) = (
|
|
79
|
+
resolve_lakehouse_name_and_id(
|
|
80
|
+
lakehouse=destination_lakehouse, workspace=destination_workspace_id
|
|
81
|
+
)
|
|
80
82
|
)
|
|
81
83
|
|
|
82
84
|
if shortcut_name is None:
|
|
@@ -84,18 +86,39 @@ def create_shortcut_onelake(
|
|
|
84
86
|
|
|
85
87
|
source_full_path = f"{source_path}/{table_name}"
|
|
86
88
|
|
|
89
|
+
actual_shortcut_name = shortcut_name.replace(" ", "")
|
|
90
|
+
|
|
87
91
|
payload = {
|
|
88
92
|
"path": destination_path,
|
|
89
|
-
"name":
|
|
93
|
+
"name": actual_shortcut_name,
|
|
90
94
|
"target": {
|
|
91
95
|
"oneLake": {
|
|
92
|
-
"workspaceId": source_workspace_id,
|
|
93
96
|
"itemId": source_lakehouse_id,
|
|
94
97
|
"path": source_full_path,
|
|
98
|
+
"workspaceId": source_workspace_id,
|
|
95
99
|
}
|
|
96
100
|
},
|
|
97
101
|
}
|
|
98
102
|
|
|
103
|
+
# Check if the shortcut already exists
|
|
104
|
+
try:
|
|
105
|
+
response = _base_api(
|
|
106
|
+
request=f"/v1/workspaces/{destination_workspace_id}/items/{destination_lakehouse_id}/shortcuts/{destination_path}/{actual_shortcut_name}"
|
|
107
|
+
)
|
|
108
|
+
response_json = response.json()
|
|
109
|
+
del response_json["target"]["type"]
|
|
110
|
+
if response_json.get("target") == payload.get("target"):
|
|
111
|
+
print(
|
|
112
|
+
f"{icons.info} The '{actual_shortcut_name}' shortcut already exists in the '{destination_lakehouse_name}' lakehouse within the '{destination_workspace_name}' workspace."
|
|
113
|
+
)
|
|
114
|
+
return
|
|
115
|
+
else:
|
|
116
|
+
raise ValueError(
|
|
117
|
+
f"{icons.red_dot} The '{actual_shortcut_name}' shortcut already exists in the '{destination_lakehouse_name} lakehouse within the '{destination_workspace_name}' workspace but has a different source."
|
|
118
|
+
)
|
|
119
|
+
except FabricHTTPException:
|
|
120
|
+
pass
|
|
121
|
+
|
|
99
122
|
_base_api(
|
|
100
123
|
request=f"/v1/workspaces/{destination_workspace_id}/items/{destination_lakehouse_id}/shortcuts",
|
|
101
124
|
payload=payload,
|
|
@@ -146,17 +169,14 @@ def create_shortcut(
|
|
|
146
169
|
|
|
147
170
|
sourceTitle = source_titles[source]
|
|
148
171
|
|
|
149
|
-
(
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
else:
|
|
154
|
-
lakehouse_id = resolve_lakehouse_id(lakehouse, workspace)
|
|
172
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
173
|
+
(lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id(
|
|
174
|
+
lakehouse=lakehouse, workspace=workspace_id
|
|
175
|
+
)
|
|
155
176
|
|
|
156
|
-
client = fabric.FabricRestClient()
|
|
157
177
|
shortcutActualName = shortcut_name.replace(" ", "")
|
|
158
178
|
|
|
159
|
-
|
|
179
|
+
payload = {
|
|
160
180
|
"path": "Tables",
|
|
161
181
|
"name": shortcutActualName,
|
|
162
182
|
"target": {
|
|
@@ -168,22 +188,16 @@ def create_shortcut(
|
|
|
168
188
|
},
|
|
169
189
|
}
|
|
170
190
|
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
else:
|
|
182
|
-
print(response.status_code)
|
|
183
|
-
except Exception as e:
|
|
184
|
-
raise ValueError(
|
|
185
|
-
f"{icons.red_dot} Failed to create a shortcut for the '{shortcut_name}' table."
|
|
186
|
-
) from e
|
|
191
|
+
_base_api(
|
|
192
|
+
request=f"/v1/workspaces/{workspace_id}/items/{lakehouse_id}/shortcuts",
|
|
193
|
+
method="post",
|
|
194
|
+
payload=payload,
|
|
195
|
+
status_codes=201,
|
|
196
|
+
)
|
|
197
|
+
print(
|
|
198
|
+
f"{icons.green_dot} The shortcut '{shortcutActualName}' was created in the '{lakehouse_name}' lakehouse within"
|
|
199
|
+
f" the '{workspace_name}' workspace. It is based on the '{subpath}' table in '{sourceTitle}'."
|
|
200
|
+
)
|
|
187
201
|
|
|
188
202
|
|
|
189
203
|
def delete_shortcut(
|
|
@@ -203,7 +217,7 @@ def delete_shortcut(
|
|
|
203
217
|
The name of the shortcut.
|
|
204
218
|
shortcut_path : str = "Tables"
|
|
205
219
|
The path of the shortcut to be deleted. Must start with either "Files" or "Tables". Examples: Tables/FolderName/SubFolderName; Files/FolderName/SubFolderName.
|
|
206
|
-
lakehouse : str, default=None
|
|
220
|
+
lakehouse : str | uuid.UUID, default=None
|
|
207
221
|
The Fabric lakehouse name in which the shortcut resides.
|
|
208
222
|
Defaults to None which resolves to the lakehouse attached to the notebook.
|
|
209
223
|
workspace : str | UUID, default=None
|
|
@@ -213,20 +227,15 @@ def delete_shortcut(
|
|
|
213
227
|
"""
|
|
214
228
|
|
|
215
229
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
230
|
+
(lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id(
|
|
231
|
+
lakehouse=lakehouse, workspace=workspace_id
|
|
232
|
+
)
|
|
216
233
|
|
|
217
|
-
|
|
218
|
-
lakehouse_id
|
|
219
|
-
|
|
220
|
-
else:
|
|
221
|
-
lakehouse_id = resolve_lakehouse_id(lakehouse, workspace_id)
|
|
222
|
-
|
|
223
|
-
client = fabric.FabricRestClient()
|
|
224
|
-
response = client.delete(
|
|
225
|
-
f"/v1/workspaces/{workspace_id}/items/{lakehouse_id}/shortcuts/{shortcut_path}/{shortcut_name}"
|
|
234
|
+
_base_api(
|
|
235
|
+
request=f"/v1/workspaces/{workspace_id}/items/{lakehouse_id}/shortcuts/{shortcut_path}/{shortcut_name}",
|
|
236
|
+
method="delete",
|
|
226
237
|
)
|
|
227
238
|
|
|
228
|
-
if response.status_code != 200:
|
|
229
|
-
raise FabricHTTPException(response)
|
|
230
239
|
print(
|
|
231
240
|
f"{icons.green_dot} The '{shortcut_name}' shortcut in the '{lakehouse}' within the '{workspace_name}' workspace has been deleted."
|
|
232
241
|
)
|
|
@@ -258,3 +267,135 @@ def reset_shortcut_cache(workspace: Optional[str | UUID] = None):
|
|
|
258
267
|
print(
|
|
259
268
|
f"{icons.green_dot} The shortcut cache has been reset for the '{workspace_name}' workspace."
|
|
260
269
|
)
|
|
270
|
+
|
|
271
|
+
|
|
272
|
+
@log
|
|
273
|
+
def list_shortcuts(
|
|
274
|
+
lakehouse: Optional[str | UUID] = None,
|
|
275
|
+
workspace: Optional[str | UUID] = None,
|
|
276
|
+
path: Optional[str] = None,
|
|
277
|
+
) -> pd.DataFrame:
|
|
278
|
+
"""
|
|
279
|
+
Shows all shortcuts which exist in a Fabric lakehouse and their properties.
|
|
280
|
+
|
|
281
|
+
Parameters
|
|
282
|
+
----------
|
|
283
|
+
lakehouse : str | uuid.UUID, default=None
|
|
284
|
+
The Fabric lakehouse name or ID.
|
|
285
|
+
Defaults to None which resolves to the lakehouse attached to the notebook.
|
|
286
|
+
workspace : str | uuid.UUID, default=None
|
|
287
|
+
The name or ID of the Fabric workspace in which lakehouse resides.
|
|
288
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
289
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
290
|
+
path: str, default=None
|
|
291
|
+
The path within lakehouse where to look for shortcuts. If provied, must start with either "Files" or "Tables". Examples: Tables/FolderName/SubFolderName; Files/FolderName/SubFolderName.
|
|
292
|
+
Defaults to None which will retun all shortcuts on the given lakehouse
|
|
293
|
+
|
|
294
|
+
Returns
|
|
295
|
+
-------
|
|
296
|
+
pandas.DataFrame
|
|
297
|
+
A pandas dataframe showing all the shortcuts which exist in the specified lakehouse.
|
|
298
|
+
"""
|
|
299
|
+
|
|
300
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
301
|
+
(lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id(
|
|
302
|
+
lakehouse=lakehouse, workspace=workspace_id
|
|
303
|
+
)
|
|
304
|
+
|
|
305
|
+
columns = {
|
|
306
|
+
"Shortcut Name": "string",
|
|
307
|
+
"Shortcut Path": "string",
|
|
308
|
+
"Source Type": "string",
|
|
309
|
+
"Source Workspace Id": "string",
|
|
310
|
+
"Source Workspace Name": "string",
|
|
311
|
+
"Source Item Id": "string",
|
|
312
|
+
"Source Item Name": "string",
|
|
313
|
+
"Source Item Type": "string",
|
|
314
|
+
"OneLake Path": "string",
|
|
315
|
+
"Connection Id": "string",
|
|
316
|
+
"Location": "string",
|
|
317
|
+
"Bucket": "string",
|
|
318
|
+
"SubPath": "string",
|
|
319
|
+
"Source Properties Raw": "string",
|
|
320
|
+
}
|
|
321
|
+
df = _create_dataframe(columns=columns)
|
|
322
|
+
|
|
323
|
+
# To improve performance create a dataframe to cache all items for a given workspace
|
|
324
|
+
itm_clms = {
|
|
325
|
+
"Id": "string",
|
|
326
|
+
"Display Name": "string",
|
|
327
|
+
"Description": "string",
|
|
328
|
+
"Type": "string",
|
|
329
|
+
"Workspace Id": "string",
|
|
330
|
+
}
|
|
331
|
+
source_items_df = _create_dataframe(columns=itm_clms)
|
|
332
|
+
|
|
333
|
+
url = f"/v1/workspaces/{workspace_id}/items/{lakehouse_id}/shortcuts"
|
|
334
|
+
|
|
335
|
+
if path is not None:
|
|
336
|
+
url += f"?parentPath={path}"
|
|
337
|
+
|
|
338
|
+
responses = _base_api(
|
|
339
|
+
request=url,
|
|
340
|
+
uses_pagination=True,
|
|
341
|
+
)
|
|
342
|
+
|
|
343
|
+
sources = {
|
|
344
|
+
"AdlsGen2": "adlsGen2",
|
|
345
|
+
"AmazonS3": "amazonS3",
|
|
346
|
+
"Dataverse": "dataverse",
|
|
347
|
+
"ExternalDataShare": "externalDataShare",
|
|
348
|
+
"GoogleCloudStorage": "googleCloudStorage",
|
|
349
|
+
"OneLake": "oneLake",
|
|
350
|
+
"S3Compatible": "s3Compatible",
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
for r in responses:
|
|
354
|
+
for i in r.get("value", []):
|
|
355
|
+
tgt = i.get("target", {})
|
|
356
|
+
tgt_type = tgt.get("type")
|
|
357
|
+
connection_id = tgt.get(sources.get(tgt_type), {}).get("connectionId")
|
|
358
|
+
location = tgt.get(sources.get(tgt_type), {}).get("location")
|
|
359
|
+
sub_path = tgt.get(sources.get(tgt_type), {}).get("subpath")
|
|
360
|
+
source_workspace_id = tgt.get(sources.get(tgt_type), {}).get("workspaceId")
|
|
361
|
+
source_item_id = tgt.get(sources.get(tgt_type), {}).get("itemId")
|
|
362
|
+
bucket = tgt.get(sources.get(tgt_type), {}).get("bucket")
|
|
363
|
+
source_workspace_name = (
|
|
364
|
+
resolve_workspace_name(workspace_id=source_workspace_id)
|
|
365
|
+
if source_workspace_id is not None
|
|
366
|
+
else None
|
|
367
|
+
)
|
|
368
|
+
# Cache and use it to getitem type and name
|
|
369
|
+
source_item_type = None
|
|
370
|
+
source_item_name = None
|
|
371
|
+
dfI = source_items_df[
|
|
372
|
+
source_items_df["Workspace Id"] == source_workspace_id
|
|
373
|
+
]
|
|
374
|
+
if dfI.empty:
|
|
375
|
+
dfI = fabric.list_items(workspace=source_workspace_id)
|
|
376
|
+
source_items_df = pd.concat([source_items_df, dfI], ignore_index=True)
|
|
377
|
+
|
|
378
|
+
dfI_filt = dfI[dfI["Id"] == source_item_id]
|
|
379
|
+
if not dfI_filt.empty:
|
|
380
|
+
source_item_type = dfI_filt["Type"].iloc[0]
|
|
381
|
+
source_item_name = dfI_filt["Display Name"].iloc[0]
|
|
382
|
+
|
|
383
|
+
new_data = {
|
|
384
|
+
"Shortcut Name": i.get("name"),
|
|
385
|
+
"Shortcut Path": i.get("path"),
|
|
386
|
+
"Source Type": tgt_type,
|
|
387
|
+
"Source Workspace Id": source_workspace_id,
|
|
388
|
+
"Source Workspace Name": source_workspace_name,
|
|
389
|
+
"Source Item Id": source_item_id,
|
|
390
|
+
"Source Item Name": source_item_name,
|
|
391
|
+
"Source Item Type": source_item_type,
|
|
392
|
+
"OneLake Path": tgt.get(sources.get("oneLake"), {}).get("path"),
|
|
393
|
+
"Connection Id": connection_id,
|
|
394
|
+
"Location": location,
|
|
395
|
+
"Bucket": bucket,
|
|
396
|
+
"SubPath": sub_path,
|
|
397
|
+
"Source Properties Raw": str(tgt),
|
|
398
|
+
}
|
|
399
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
400
|
+
|
|
401
|
+
return df
|
|
@@ -42,10 +42,6 @@ def migration_validation(
|
|
|
42
42
|
f"{icons.red_dot} The 'dataset' and 'new_dataset' parameters are both set to '{dataset}'. These parameters must be set to different values."
|
|
43
43
|
)
|
|
44
44
|
|
|
45
|
-
workspace = fabric.resolve_workspace_name(workspace)
|
|
46
|
-
if new_dataset_workspace is None:
|
|
47
|
-
new_dataset_workspace = workspace
|
|
48
|
-
|
|
49
45
|
icons.sll_tags.append("DirectLakeMigration")
|
|
50
46
|
|
|
51
47
|
dfA = list_semantic_model_objects(dataset=dataset, workspace=workspace)
|
|
@@ -3,10 +3,11 @@ import sempy_labs._icons as icons
|
|
|
3
3
|
from typing import Optional
|
|
4
4
|
from sempy_labs._helper_functions import (
|
|
5
5
|
resolve_workspace_name_and_id,
|
|
6
|
-
|
|
6
|
+
resolve_lakehouse_name_and_id,
|
|
7
7
|
_base_api,
|
|
8
8
|
resolve_item_id,
|
|
9
9
|
_mount,
|
|
10
|
+
resolve_workspace_name,
|
|
10
11
|
)
|
|
11
12
|
from sempy_labs.lakehouse._lakehouse import lakehouse_attached
|
|
12
13
|
from uuid import UUID
|
|
@@ -44,11 +45,8 @@ def download_report(
|
|
|
44
45
|
)
|
|
45
46
|
|
|
46
47
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
47
|
-
lakehouse_id =
|
|
48
|
-
lakehouse_workspace =
|
|
49
|
-
lakehouse_name = resolve_lakehouse_name(
|
|
50
|
-
lakehouse_id=lakehouse_id, workspace=lakehouse_workspace
|
|
51
|
-
)
|
|
48
|
+
(lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id()
|
|
49
|
+
lakehouse_workspace = resolve_workspace_name()
|
|
52
50
|
|
|
53
51
|
download_types = ["LiveConnect", "IncludeModel"]
|
|
54
52
|
if download_type not in download_types:
|
|
@@ -11,6 +11,7 @@ from sempy_labs._helper_functions import (
|
|
|
11
11
|
_update_dataframe_datatypes,
|
|
12
12
|
_base_api,
|
|
13
13
|
resolve_item_id,
|
|
14
|
+
get_item_definition,
|
|
14
15
|
)
|
|
15
16
|
import sempy_labs._icons as icons
|
|
16
17
|
from sempy._utils._log import log
|
|
@@ -178,7 +179,9 @@ def update_report_from_reportjson(
|
|
|
178
179
|
|
|
179
180
|
|
|
180
181
|
def get_report_definition(
|
|
181
|
-
report: str
|
|
182
|
+
report: str | UUID,
|
|
183
|
+
workspace: Optional[str | UUID] = None,
|
|
184
|
+
return_dataframe: bool = True,
|
|
182
185
|
) -> pd.DataFrame | dict:
|
|
183
186
|
"""
|
|
184
187
|
Gets the collection of definition files of a report.
|
|
@@ -187,8 +190,8 @@ def get_report_definition(
|
|
|
187
190
|
|
|
188
191
|
Parameters
|
|
189
192
|
----------
|
|
190
|
-
report : str
|
|
191
|
-
Name of the report.
|
|
193
|
+
report : str | uuid.UUID
|
|
194
|
+
Name or ID of the report.
|
|
192
195
|
workspace : str | uuid.UUID, default=None
|
|
193
196
|
The Fabric workspace name or ID in which the report resides.
|
|
194
197
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
@@ -198,25 +201,14 @@ def get_report_definition(
|
|
|
198
201
|
|
|
199
202
|
Returns
|
|
200
203
|
-------
|
|
201
|
-
pandas.DataFrame
|
|
204
|
+
pandas.DataFrame
|
|
202
205
|
The collection of report definition files within a pandas dataframe.
|
|
203
206
|
"""
|
|
204
207
|
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
result = _base_api(
|
|
209
|
-
request=f"/v1/workspaces/{workspace_id}/reports/{report_id}/getDefinition",
|
|
210
|
-
method="post",
|
|
211
|
-
lro_return_json=True,
|
|
212
|
-
status_codes=None,
|
|
208
|
+
return get_item_definition(
|
|
209
|
+
item=report, type="Report", workspace=workspace, return_dataframe=True
|
|
213
210
|
)
|
|
214
211
|
|
|
215
|
-
if return_dataframe:
|
|
216
|
-
return pd.json_normalize(result["definition"]["parts"])
|
|
217
|
-
else:
|
|
218
|
-
return result
|
|
219
|
-
|
|
220
212
|
|
|
221
213
|
@log
|
|
222
214
|
def create_model_bpa_report(
|
|
@@ -327,9 +319,9 @@ def _create_report(
|
|
|
327
319
|
|
|
328
320
|
from sempy_labs.report import report_rebind
|
|
329
321
|
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
322
|
+
(report_workspace_name, report_workspace_id) = resolve_workspace_name_and_id(
|
|
323
|
+
workspace=report_workspace
|
|
324
|
+
)
|
|
333
325
|
|
|
334
326
|
dfR = fabric.list_reports(workspace=report_workspace)
|
|
335
327
|
dfR_filt = dfR[dfR["Name"] == report]
|
|
@@ -346,7 +338,7 @@ def _create_report(
|
|
|
346
338
|
)
|
|
347
339
|
|
|
348
340
|
print(
|
|
349
|
-
f"{icons.green_dot} The '{report}' report has been created within the '{
|
|
341
|
+
f"{icons.green_dot} The '{report}' report has been created within the '{report_workspace_name}'"
|
|
350
342
|
)
|
|
351
343
|
updated_report = True
|
|
352
344
|
# Update the report if it exists
|
|
@@ -360,12 +352,12 @@ def _create_report(
|
|
|
360
352
|
status_codes=None,
|
|
361
353
|
)
|
|
362
354
|
print(
|
|
363
|
-
f"{icons.green_dot} The '{report}' report has been updated within the '{
|
|
355
|
+
f"{icons.green_dot} The '{report}' report has been updated within the '{report_workspace_name}'"
|
|
364
356
|
)
|
|
365
357
|
updated_report = True
|
|
366
358
|
else:
|
|
367
359
|
raise ValueError(
|
|
368
|
-
f"{icons.red_dot} The '{report}' report within the '{
|
|
360
|
+
f"{icons.red_dot} The '{report}' report within the '{report_workspace_name}' workspace already exists and the 'overwrite' parameter was set to False."
|
|
369
361
|
)
|
|
370
362
|
|
|
371
363
|
# Rebind the report to the semantic model to make sure it is pointed at the correct semantic model
|
sempy_labs/report/_report_bpa.py
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import sempy.fabric as fabric
|
|
2
1
|
from typing import Optional
|
|
3
2
|
import pandas as pd
|
|
4
3
|
import datetime
|
|
@@ -7,8 +6,7 @@ from sempy_labs.report import ReportWrapper, report_bpa_rules
|
|
|
7
6
|
from sempy_labs._helper_functions import (
|
|
8
7
|
format_dax_object_name,
|
|
9
8
|
save_as_delta_table,
|
|
10
|
-
|
|
11
|
-
resolve_lakehouse_name,
|
|
9
|
+
resolve_item_name_and_id,
|
|
12
10
|
resolve_workspace_capacity,
|
|
13
11
|
_get_column_aggregate,
|
|
14
12
|
resolve_workspace_name_and_id,
|
|
@@ -54,9 +52,7 @@ def run_report_bpa(
|
|
|
54
52
|
A pandas dataframe in HTML format showing report objects which violated the best practice analyzer rules.
|
|
55
53
|
"""
|
|
56
54
|
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
rpt = ReportWrapper(report=report, workspace=workspace_id)
|
|
55
|
+
rpt = ReportWrapper(report=report, workspace=workspace)
|
|
60
56
|
|
|
61
57
|
dfCV = rpt.list_custom_visuals()
|
|
62
58
|
dfP = rpt.list_pages()
|
|
@@ -149,7 +145,7 @@ def run_report_bpa(
|
|
|
149
145
|
df_output["Description"] = row["Description"]
|
|
150
146
|
df_output["URL"] = row["URL"]
|
|
151
147
|
df_output["Report URL"] = helper.get_web_url(
|
|
152
|
-
report=report, workspace=
|
|
148
|
+
report=report, workspace=workspace
|
|
153
149
|
)
|
|
154
150
|
|
|
155
151
|
page_mapping_dict = dfP.set_index("Page Display Name")["Page URL"].to_dict()
|
|
@@ -205,31 +201,28 @@ def run_report_bpa(
|
|
|
205
201
|
|
|
206
202
|
now = datetime.datetime.now()
|
|
207
203
|
delta_table_name = "reportbparesults"
|
|
208
|
-
|
|
209
|
-
lake_workspace = fabric.resolve_workspace_name()
|
|
210
|
-
lakehouse = resolve_lakehouse_name(
|
|
211
|
-
lakehouse_id=lakehouse_id, workspace=lake_workspace
|
|
212
|
-
)
|
|
213
|
-
|
|
214
|
-
lakeT = get_lakehouse_tables(lakehouse=lakehouse, workspace=lake_workspace)
|
|
204
|
+
lakeT = get_lakehouse_tables()
|
|
215
205
|
lakeT_filt = lakeT[lakeT["Table Name"] == delta_table_name]
|
|
216
206
|
|
|
217
207
|
if len(lakeT_filt) == 0:
|
|
218
208
|
runId = 1
|
|
219
209
|
else:
|
|
220
|
-
max_run_id = _get_column_aggregate(
|
|
221
|
-
lakehouse=lakehouse, table_name=delta_table_name
|
|
222
|
-
)
|
|
210
|
+
max_run_id = _get_column_aggregate(table_name=delta_table_name)
|
|
223
211
|
runId = max_run_id + 1
|
|
224
212
|
|
|
213
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
214
|
+
(report_name, report_id) = resolve_item_name_and_id(
|
|
215
|
+
item=report, type="Report", workspace=workspace_id
|
|
216
|
+
)
|
|
217
|
+
|
|
225
218
|
export_df = finalDF.copy()
|
|
226
219
|
capacity_id, capacity_name = resolve_workspace_capacity(workspace=workspace_id)
|
|
227
220
|
export_df["Capacity Name"] = capacity_name
|
|
228
221
|
export_df["Capacity Id"] = capacity_id
|
|
229
222
|
export_df["Workspace Name"] = workspace_name
|
|
230
223
|
export_df["Workspace Id"] = workspace_id
|
|
231
|
-
export_df["Report Name"] =
|
|
232
|
-
export_df["Report Id"] =
|
|
224
|
+
export_df["Report Name"] = report_name
|
|
225
|
+
export_df["Report Id"] = report_id
|
|
233
226
|
export_df["RunId"] = runId
|
|
234
227
|
export_df["Timestamp"] = now
|
|
235
228
|
export_df["RunId"] = export_df["RunId"].astype(int)
|
|
@@ -18,6 +18,7 @@ from sempy_labs._helper_functions import (
|
|
|
18
18
|
_base_api,
|
|
19
19
|
_create_spark_session,
|
|
20
20
|
_mount,
|
|
21
|
+
resolve_workspace_id,
|
|
21
22
|
)
|
|
22
23
|
from typing import List, Optional, Union
|
|
23
24
|
from sempy._utils._log import log
|
|
@@ -192,7 +193,7 @@ def clone_report(
|
|
|
192
193
|
target_workspace = workspace_name
|
|
193
194
|
target_workspace_id = workspace_id
|
|
194
195
|
else:
|
|
195
|
-
target_workspace_id =
|
|
196
|
+
target_workspace_id = resolve_workspace_id(workspace=target_workspace)
|
|
196
197
|
|
|
197
198
|
if target_dataset is not None:
|
|
198
199
|
if target_dataset_workspace is None:
|
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
import sempy.fabric as fabric
|
|
2
1
|
from sempy_labs._helper_functions import (
|
|
3
2
|
resolve_dataset_id,
|
|
4
3
|
resolve_workspace_name_and_id,
|
|
5
4
|
resolve_report_id,
|
|
6
5
|
_base_api,
|
|
6
|
+
resolve_dataset_name_and_id,
|
|
7
7
|
)
|
|
8
8
|
from typing import Optional, List
|
|
9
9
|
from sempy._utils._log import log
|
|
@@ -104,10 +104,12 @@ def report_rebind_all(
|
|
|
104
104
|
f"{icons.red_dot} The 'dataset' and 'new_dataset' parameters are both set to '{dataset}'. These parameters must be set to different values."
|
|
105
105
|
)
|
|
106
106
|
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
107
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(
|
|
108
|
+
dataset=dataset, workspace=dataset_workspace
|
|
109
|
+
)
|
|
110
|
+
(dataset_workspace_name, dataset_workspace_id) = resolve_workspace_name_and_id(
|
|
111
|
+
workspace=dataset_workspace
|
|
112
|
+
)
|
|
111
113
|
|
|
112
114
|
if isinstance(report_workspace, str):
|
|
113
115
|
report_workspace = [report_workspace]
|
|
@@ -118,7 +120,7 @@ def report_rebind_all(
|
|
|
118
120
|
|
|
119
121
|
if len(dfR) == 0:
|
|
120
122
|
print(
|
|
121
|
-
f"{icons.info} The '{
|
|
123
|
+
f"{icons.info} The '{dataset_name}' semantic model within the '{dataset_workspace_name}' workspace has no dependent reports."
|
|
122
124
|
)
|
|
123
125
|
return
|
|
124
126
|
|