semantic-link-labs 0.9.5__py3-none-any.whl → 0.9.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.9.5.dist-info → semantic_link_labs-0.9.6.dist-info}/METADATA +3 -2
- {semantic_link_labs-0.9.5.dist-info → semantic_link_labs-0.9.6.dist-info}/RECORD +48 -47
- {semantic_link_labs-0.9.5.dist-info → semantic_link_labs-0.9.6.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +15 -1
- sempy_labs/_capacities.py +37 -1
- sempy_labs/_capacity_migration.py +11 -14
- sempy_labs/_connections.py +2 -4
- sempy_labs/_dataflows.py +2 -2
- sempy_labs/_dax_query_view.py +55 -0
- sempy_labs/_delta_analyzer.py +16 -14
- sempy_labs/_environments.py +8 -1
- sempy_labs/_eventhouses.py +5 -1
- sempy_labs/_external_data_shares.py +4 -10
- sempy_labs/_generate_semantic_model.py +2 -1
- sempy_labs/_graphQL.py +5 -1
- sempy_labs/_helper_functions.py +272 -51
- sempy_labs/_kql_databases.py +5 -1
- sempy_labs/_list_functions.py +5 -37
- sempy_labs/_managed_private_endpoints.py +9 -2
- sempy_labs/_mirrored_databases.py +3 -1
- sempy_labs/_ml_experiments.py +1 -1
- sempy_labs/_model_bpa.py +2 -11
- sempy_labs/_model_bpa_bulk.py +23 -27
- sempy_labs/_one_lake_integration.py +2 -1
- sempy_labs/_semantic_models.py +20 -0
- sempy_labs/_sql.py +6 -2
- sempy_labs/_sqldatabase.py +61 -100
- sempy_labs/_vertipaq.py +8 -11
- sempy_labs/_warehouses.py +14 -3
- sempy_labs/_workspace_identity.py +6 -0
- sempy_labs/_workspaces.py +42 -2
- sempy_labs/admin/_basic_functions.py +3 -2
- sempy_labs/admin/_scanner.py +2 -2
- sempy_labs/directlake/_directlake_schema_compare.py +2 -1
- sempy_labs/directlake/_directlake_schema_sync.py +65 -19
- sempy_labs/directlake/_dl_helper.py +0 -6
- sempy_labs/directlake/_generate_shared_expression.py +10 -11
- sempy_labs/directlake/_guardrails.py +2 -1
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +12 -25
- sempy_labs/directlake/_update_directlake_partition_entity.py +2 -2
- sempy_labs/lakehouse/_shortcuts.py +7 -5
- sempy_labs/migration/_migration_validation.py +0 -4
- sempy_labs/report/_download_report.py +4 -6
- sempy_labs/report/_generate_report.py +6 -6
- sempy_labs/report/_report_functions.py +2 -1
- sempy_labs/report/_report_rebind.py +8 -6
- {semantic_link_labs-0.9.5.dist-info → semantic_link_labs-0.9.6.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.9.5.dist-info → semantic_link_labs-0.9.6.dist-info}/top_level.txt +0 -0
|
@@ -13,6 +13,8 @@ def provision_workspace_identity(workspace: Optional[str | UUID] = None):
|
|
|
13
13
|
|
|
14
14
|
This is a wrapper function for the following API: `Workspaces - Provision Identity <https://learn.microsoft.com/rest/api/fabric/core/workspaces/provision-identity>`_.
|
|
15
15
|
|
|
16
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
17
|
+
|
|
16
18
|
Parameters
|
|
17
19
|
----------
|
|
18
20
|
workspace : str | uuid.UUID, default=None
|
|
@@ -28,6 +30,7 @@ def provision_workspace_identity(workspace: Optional[str | UUID] = None):
|
|
|
28
30
|
method="post",
|
|
29
31
|
lro_return_status_code=True,
|
|
30
32
|
status_codes=None,
|
|
33
|
+
client="fabric_sp",
|
|
31
34
|
)
|
|
32
35
|
|
|
33
36
|
print(
|
|
@@ -41,6 +44,8 @@ def deprovision_workspace_identity(workspace: Optional[str | UUID] = None):
|
|
|
41
44
|
|
|
42
45
|
This is a wrapper function for the following API: `Workspaces - Derovision Identity <https://learn.microsoft.com/rest/api/fabric/core/workspaces/deprovision-identity>`_.
|
|
43
46
|
|
|
47
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
48
|
+
|
|
44
49
|
Parameters
|
|
45
50
|
----------
|
|
46
51
|
workspace : str | uuid.UUID, default=None
|
|
@@ -56,6 +61,7 @@ def deprovision_workspace_identity(workspace: Optional[str | UUID] = None):
|
|
|
56
61
|
method="post",
|
|
57
62
|
lro_return_status_code=True,
|
|
58
63
|
status_codes=None,
|
|
64
|
+
client="fabric_sp",
|
|
59
65
|
)
|
|
60
66
|
|
|
61
67
|
print(
|
sempy_labs/_workspaces.py
CHANGED
|
@@ -18,6 +18,8 @@ def delete_user_from_workspace(
|
|
|
18
18
|
|
|
19
19
|
This is a wrapper function for the following API: `Groups - Delete User In Group <https://learn.microsoft.com/rest/api/power-bi/groups/delete-user-in-group>`_.
|
|
20
20
|
|
|
21
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
22
|
+
|
|
21
23
|
Parameters
|
|
22
24
|
----------
|
|
23
25
|
email_address : str
|
|
@@ -33,6 +35,7 @@ def delete_user_from_workspace(
|
|
|
33
35
|
_base_api(
|
|
34
36
|
request=f"/v1.0/myorg/groups/{workspace_id}/users/{email_address}",
|
|
35
37
|
method="delete",
|
|
38
|
+
client="fabric_sp",
|
|
36
39
|
)
|
|
37
40
|
print(
|
|
38
41
|
f"{icons.green_dot} The '{email_address}' user has been removed from accessing the '{workspace_name}' workspace."
|
|
@@ -50,6 +53,8 @@ def update_workspace_user(
|
|
|
50
53
|
|
|
51
54
|
This is a wrapper function for the following API: `Groups - Update Group User <https://learn.microsoft.com/rest/api/power-bi/groups/update-group-user>`_.
|
|
52
55
|
|
|
56
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
57
|
+
|
|
53
58
|
Parameters
|
|
54
59
|
----------
|
|
55
60
|
email_address : str
|
|
@@ -90,6 +95,7 @@ def update_workspace_user(
|
|
|
90
95
|
request=f"/v1.0/myorg/groups/{workspace_id}/users",
|
|
91
96
|
method="put",
|
|
92
97
|
payload=payload,
|
|
98
|
+
client="fabric_sp",
|
|
93
99
|
)
|
|
94
100
|
print(
|
|
95
101
|
f"{icons.green_dot} The '{email_address}' user has been updated to a '{role_name}' within the '{workspace_name}' workspace."
|
|
@@ -102,6 +108,8 @@ def list_workspace_users(workspace: Optional[str | UUID] = None) -> pd.DataFrame
|
|
|
102
108
|
|
|
103
109
|
This is a wrapper function for the following API: `Workspaces - List Workspace Role Assignments <https://learn.microsoft.com/rest/api/fabric/core/workspaces/list-workspace-role-assignments>`_.
|
|
104
110
|
|
|
111
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
112
|
+
|
|
105
113
|
Parameters
|
|
106
114
|
----------
|
|
107
115
|
workspace : str | uuid.UUID, default=None
|
|
@@ -127,7 +135,9 @@ def list_workspace_users(workspace: Optional[str | UUID] = None) -> pd.DataFrame
|
|
|
127
135
|
df = _create_dataframe(columns=columns)
|
|
128
136
|
|
|
129
137
|
responses = _base_api(
|
|
130
|
-
request=f"v1/workspaces/{workspace_id}/roleAssignments",
|
|
138
|
+
request=f"v1/workspaces/{workspace_id}/roleAssignments",
|
|
139
|
+
uses_pagination=True,
|
|
140
|
+
client="fabric_sp",
|
|
131
141
|
)
|
|
132
142
|
|
|
133
143
|
for r in responses:
|
|
@@ -251,6 +261,8 @@ def unassign_workspace_from_capacity(workspace: Optional[str | UUID] = None):
|
|
|
251
261
|
|
|
252
262
|
This is a wrapper function for the following API: `Workspaces - Unassign From Capacity <https://learn.microsoft.com/rest/api/fabric/core/workspaces/unassign-from-capacity>`_.
|
|
253
263
|
|
|
264
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
265
|
+
|
|
254
266
|
Parameters
|
|
255
267
|
----------
|
|
256
268
|
workspace : str | uuid.UUID, default=None
|
|
@@ -265,6 +277,7 @@ def unassign_workspace_from_capacity(workspace: Optional[str | UUID] = None):
|
|
|
265
277
|
request=f"/v1/workspaces/{workspace_id}/unassignFromCapacity",
|
|
266
278
|
method="post",
|
|
267
279
|
status_codes=[200, 202],
|
|
280
|
+
client="fabric_sp",
|
|
268
281
|
)
|
|
269
282
|
print(
|
|
270
283
|
f"{icons.green_dot} The '{workspace_name}' workspace has been unassigned from its capacity."
|
|
@@ -279,6 +292,8 @@ def list_workspace_role_assignments(
|
|
|
279
292
|
|
|
280
293
|
This is a wrapper function for the following API: `Workspaces - List Workspace Role Assignments <https://learn.microsoft.com/rest/api/fabric/core/workspaces/list-workspace-role-assignments>`_.
|
|
281
294
|
|
|
295
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
296
|
+
|
|
282
297
|
Parameters
|
|
283
298
|
----------
|
|
284
299
|
workspace : str | uuid.UUID, default=None
|
|
@@ -303,7 +318,9 @@ def list_workspace_role_assignments(
|
|
|
303
318
|
df = _create_dataframe(columns=columns)
|
|
304
319
|
|
|
305
320
|
responses = _base_api(
|
|
306
|
-
request=f"v1/workspaces/{workspace_id}/roleAssignments",
|
|
321
|
+
request=f"v1/workspaces/{workspace_id}/roleAssignments",
|
|
322
|
+
uses_pagination=True,
|
|
323
|
+
client="fabric_sp",
|
|
307
324
|
)
|
|
308
325
|
|
|
309
326
|
for r in responses:
|
|
@@ -318,3 +335,26 @@ def list_workspace_role_assignments(
|
|
|
318
335
|
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
319
336
|
|
|
320
337
|
return df
|
|
338
|
+
|
|
339
|
+
|
|
340
|
+
def delete_workspace(workspace: Optional[str | UUID] = None):
|
|
341
|
+
"""
|
|
342
|
+
Deletes a workspace.
|
|
343
|
+
|
|
344
|
+
This is a wrapper function for the following API: `Workspaces - Delete Workspace <https://learn.microsoft.com/rest/api/fabric/core/workspaces/delete-workspace>`_.
|
|
345
|
+
|
|
346
|
+
Parameters
|
|
347
|
+
----------
|
|
348
|
+
workspace : str | uuid.UUID, default=None
|
|
349
|
+
The Fabric workspace name or ID.
|
|
350
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
351
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
352
|
+
"""
|
|
353
|
+
|
|
354
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
355
|
+
|
|
356
|
+
_base_api(
|
|
357
|
+
request=f"v1/workspaces/{workspace_id}", method="delete", client="fabric_sp"
|
|
358
|
+
)
|
|
359
|
+
|
|
360
|
+
print(f"{icons.green_dot} The '{workspace_name}' workspace has been deleted.")
|
|
@@ -354,9 +354,10 @@ def _resolve_workspace_name_and_id(
|
|
|
354
354
|
workspace: str | UUID,
|
|
355
355
|
) -> Tuple[str, UUID]:
|
|
356
356
|
|
|
357
|
+
from sempy_labs._helper_functions import resolve_workspace_name_and_id
|
|
358
|
+
|
|
357
359
|
if workspace is None:
|
|
358
|
-
workspace_id =
|
|
359
|
-
workspace_name = fabric.resolve_workspace_name(workspace_id)
|
|
360
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id()
|
|
360
361
|
else:
|
|
361
362
|
dfW = list_workspaces(workspace=workspace)
|
|
362
363
|
if not dfW.empty:
|
sempy_labs/admin/_scanner.py
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import sempy.fabric as fabric
|
|
2
1
|
from typing import Optional, List
|
|
3
2
|
from uuid import UUID
|
|
4
3
|
from sempy.fabric.exceptions import FabricHTTPException
|
|
@@ -10,6 +9,7 @@ from sempy_labs._helper_functions import (
|
|
|
10
9
|
_base_api,
|
|
11
10
|
_is_valid_uuid,
|
|
12
11
|
_build_url,
|
|
12
|
+
resolve_workspace_name,
|
|
13
13
|
)
|
|
14
14
|
|
|
15
15
|
|
|
@@ -54,7 +54,7 @@ def scan_workspaces(
|
|
|
54
54
|
"""
|
|
55
55
|
|
|
56
56
|
if workspace is None:
|
|
57
|
-
workspace =
|
|
57
|
+
workspace = resolve_workspace_name()
|
|
58
58
|
|
|
59
59
|
if isinstance(workspace, str):
|
|
60
60
|
workspace = [workspace]
|
|
@@ -4,6 +4,7 @@ from sempy_labs._helper_functions import (
|
|
|
4
4
|
format_dax_object_name,
|
|
5
5
|
resolve_workspace_name_and_id,
|
|
6
6
|
resolve_dataset_name_and_id,
|
|
7
|
+
resolve_workspace_name,
|
|
7
8
|
)
|
|
8
9
|
from IPython.display import display
|
|
9
10
|
from sempy_labs.lakehouse import get_lakehouse_columns
|
|
@@ -70,7 +71,7 @@ def direct_lake_schema_compare(
|
|
|
70
71
|
f"{icons.red_dot} This function only supports Direct Lake semantic models where the source lakehouse resides in the same workpace as the semantic model."
|
|
71
72
|
)
|
|
72
73
|
|
|
73
|
-
lakehouse_workspace =
|
|
74
|
+
lakehouse_workspace = resolve_workspace_name(workspace_id=lakehouse_workspace_id)
|
|
74
75
|
dfT = fabric.list_tables(dataset=dataset_id, workspace=workspace_id)
|
|
75
76
|
dfC = fabric.list_columns(dataset=dataset_id, workspace=workspace_id)
|
|
76
77
|
lc = get_lakehouse_columns(lakehouse_name, lakehouse_workspace)
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import sempy
|
|
2
|
-
import
|
|
2
|
+
import pandas as pd
|
|
3
3
|
from sempy_labs.lakehouse import get_lakehouse_columns
|
|
4
4
|
from sempy_labs.directlake._dl_helper import get_direct_lake_source
|
|
5
5
|
from sempy_labs.tom import connect_semantic_model
|
|
@@ -19,8 +19,8 @@ def direct_lake_schema_sync(
|
|
|
19
19
|
dataset: str | UUID,
|
|
20
20
|
workspace: Optional[str | UUID] = None,
|
|
21
21
|
add_to_model: bool = False,
|
|
22
|
-
|
|
23
|
-
):
|
|
22
|
+
remove_from_model: bool = False,
|
|
23
|
+
) -> pd.DataFrame:
|
|
24
24
|
"""
|
|
25
25
|
Shows/adds columns which exist in the lakehouse but do not exist in the semantic model (only for tables in the semantic model).
|
|
26
26
|
|
|
@@ -34,22 +34,18 @@ def direct_lake_schema_sync(
|
|
|
34
34
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
35
35
|
add_to_model : bool, default=False
|
|
36
36
|
If set to True, columns which exist in the lakehouse but do not exist in the semantic model are added to the semantic model. No new tables are added.
|
|
37
|
+
remove_from_model : bool, default=False
|
|
38
|
+
If set to True, columns which exist in the semantic model but do not exist in the lakehouse are removed from the semantic model. No new tables are removed.
|
|
39
|
+
|
|
40
|
+
Returns
|
|
41
|
+
-------
|
|
42
|
+
pandas.DataFrame
|
|
43
|
+
A pandas dataframe showing the status of columns in the semantic model and lakehouse (prior to adding/removing them from the model using this function).
|
|
37
44
|
"""
|
|
38
45
|
|
|
39
46
|
sempy.fabric._client._utils._init_analysis_services()
|
|
40
47
|
import Microsoft.AnalysisServices.Tabular as TOM
|
|
41
48
|
|
|
42
|
-
if "lakehouse" in kwargs:
|
|
43
|
-
print(
|
|
44
|
-
"The 'lakehouse' parameter has been deprecated as it is no longer necessary. Please remove this parameter from the function going forward."
|
|
45
|
-
)
|
|
46
|
-
del kwargs["lakehouse"]
|
|
47
|
-
if "lakehouse_workspace" in kwargs:
|
|
48
|
-
print(
|
|
49
|
-
"The 'lakehouse_workspace' parameter has been deprecated as it is no longer necessary. Please remove this parameter from the function going forward."
|
|
50
|
-
)
|
|
51
|
-
del kwargs["lakehouse_workspace"]
|
|
52
|
-
|
|
53
49
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
54
50
|
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
55
51
|
|
|
@@ -67,14 +63,54 @@ def direct_lake_schema_sync(
|
|
|
67
63
|
f"{icons.red_dot} This function only supports Direct Lake semantic models where the source lakehouse resides in the same workpace as the semantic model."
|
|
68
64
|
)
|
|
69
65
|
|
|
70
|
-
|
|
66
|
+
lc = get_lakehouse_columns(lakehouse_id, lakehouse_workspace_id)
|
|
71
67
|
|
|
72
|
-
|
|
68
|
+
readonly = True
|
|
69
|
+
if add_to_model or remove_from_model:
|
|
70
|
+
readonly = False
|
|
71
|
+
df = pd.DataFrame(
|
|
72
|
+
columns=[
|
|
73
|
+
"TableName",
|
|
74
|
+
"ColumnName",
|
|
75
|
+
"SourceTableName",
|
|
76
|
+
"SourceColumnName",
|
|
77
|
+
"Status",
|
|
78
|
+
]
|
|
79
|
+
)
|
|
73
80
|
|
|
74
81
|
with connect_semantic_model(
|
|
75
|
-
dataset=dataset_id, readonly=
|
|
82
|
+
dataset=dataset_id, readonly=readonly, workspace=workspace_id
|
|
76
83
|
) as tom:
|
|
84
|
+
# Check if the columns in the semantic model exist in the lakehouse
|
|
85
|
+
for c in tom.all_columns():
|
|
86
|
+
partition_name = next(p.Name for p in c.Table.Partitions)
|
|
87
|
+
p = c.Table.Partitions[partition_name]
|
|
88
|
+
if p.SourceType == TOM.PartitionSourceType.Entity:
|
|
89
|
+
entity_name = p.Source.EntityName
|
|
90
|
+
source_column = c.SourceColumn
|
|
91
|
+
lc_filt = lc[
|
|
92
|
+
(lc["Table Name"] == entity_name)
|
|
93
|
+
& (lc["Column Name"] == source_column)
|
|
94
|
+
]
|
|
95
|
+
# Remove column from model if it doesn't exist in the lakehouse
|
|
96
|
+
if lc_filt.empty:
|
|
97
|
+
new_data = {
|
|
98
|
+
"TableName": c.Parent.Name,
|
|
99
|
+
"ColumnName": c.Name,
|
|
100
|
+
"SourceTableName": entity_name,
|
|
101
|
+
"SourceColumnName": source_column,
|
|
102
|
+
"Status": "Not in lakehouse",
|
|
103
|
+
}
|
|
104
|
+
df = pd.concat(
|
|
105
|
+
[df, pd.DataFrame(new_data, index=[0])], ignore_index=True
|
|
106
|
+
)
|
|
107
|
+
if remove_from_model:
|
|
108
|
+
tom.remove_object(object=c)
|
|
109
|
+
print(
|
|
110
|
+
f"{icons.green_dot} The '{c.Parent.Name}'[{c.Name}] column has been removed from the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
|
|
111
|
+
)
|
|
77
112
|
|
|
113
|
+
# Check if the lakehouse columns exist in the semantic model
|
|
78
114
|
for i, r in lc.iterrows():
|
|
79
115
|
lakeTName = r["Table Name"]
|
|
80
116
|
lakeCName = r["Column Name"]
|
|
@@ -97,9 +133,17 @@ def direct_lake_schema_sync(
|
|
|
97
133
|
c.SourceColumn == lakeCName and c.Parent.Name == table_name
|
|
98
134
|
for c in tom.all_columns()
|
|
99
135
|
):
|
|
100
|
-
|
|
101
|
-
|
|
136
|
+
new_data = {
|
|
137
|
+
"TableName": table_name,
|
|
138
|
+
"ColumnName": None,
|
|
139
|
+
"SourceTableName": lakeTName,
|
|
140
|
+
"SourceColumnName": lakeCName,
|
|
141
|
+
"Status": "Not in semantic model",
|
|
142
|
+
}
|
|
143
|
+
df = pd.concat(
|
|
144
|
+
[df, pd.DataFrame(new_data, index=[0])], ignore_index=True
|
|
102
145
|
)
|
|
146
|
+
|
|
103
147
|
if add_to_model:
|
|
104
148
|
dt = _convert_data_type(dType)
|
|
105
149
|
tom.add_data_column(
|
|
@@ -111,3 +155,5 @@ def direct_lake_schema_sync(
|
|
|
111
155
|
print(
|
|
112
156
|
f"{icons.green_dot} The '{lakeCName}' column in the '{lakeTName}' lakehouse table was added to the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
|
|
113
157
|
)
|
|
158
|
+
|
|
159
|
+
return df
|
|
@@ -7,7 +7,6 @@ import sempy_labs._icons as icons
|
|
|
7
7
|
from sempy._utils._log import log
|
|
8
8
|
from sempy_labs._helper_functions import (
|
|
9
9
|
retry,
|
|
10
|
-
resolve_lakehouse_name,
|
|
11
10
|
_convert_data_type,
|
|
12
11
|
resolve_dataset_name_and_id,
|
|
13
12
|
resolve_workspace_name_and_id,
|
|
@@ -129,11 +128,6 @@ def generate_direct_lake_semantic_model(
|
|
|
129
128
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
130
129
|
if lakehouse_workspace is None:
|
|
131
130
|
lakehouse_workspace = workspace
|
|
132
|
-
if lakehouse is None:
|
|
133
|
-
lakehouse_id = fabric.get_lakehouse_id()
|
|
134
|
-
lakehouse_workspace_id = fabric.get_workspace_id()
|
|
135
|
-
lakehouse_workspace = fabric.resolve_workspace_name(lakehouse_workspace_id)
|
|
136
|
-
lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)
|
|
137
131
|
|
|
138
132
|
dfLT = get_lakehouse_tables(lakehouse=lakehouse, workspace=lakehouse_workspace)
|
|
139
133
|
|
|
@@ -1,10 +1,8 @@
|
|
|
1
|
-
import sempy.fabric as fabric
|
|
2
1
|
from sempy_labs._helper_functions import (
|
|
3
|
-
resolve_lakehouse_name,
|
|
4
|
-
resolve_lakehouse_id,
|
|
5
|
-
resolve_warehouse_id,
|
|
6
2
|
resolve_workspace_name_and_id,
|
|
7
3
|
_base_api,
|
|
4
|
+
resolve_lakehouse_name_and_id,
|
|
5
|
+
resolve_item_name_and_id,
|
|
8
6
|
)
|
|
9
7
|
from typing import Optional
|
|
10
8
|
import sempy_labs._icons as icons
|
|
@@ -45,13 +43,14 @@ def generate_shared_expression(
|
|
|
45
43
|
f"{icons.red_dot} Invalid item type. Valid options: {item_types}."
|
|
46
44
|
)
|
|
47
45
|
|
|
48
|
-
if
|
|
49
|
-
item_id =
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
46
|
+
if item_type == "Lakehouse":
|
|
47
|
+
(item_name, item_id) = resolve_lakehouse_name_and_id(
|
|
48
|
+
lakehouse=item_name, workspace=workspace_id
|
|
49
|
+
)
|
|
50
|
+
else:
|
|
51
|
+
(item_name, item_id) = resolve_item_name_and_id(
|
|
52
|
+
item=item_name, type=item_type, workspace=workspace_id
|
|
53
|
+
)
|
|
55
54
|
|
|
56
55
|
item_type_rest = f"{item_type.lower()}s"
|
|
57
56
|
response = _base_api(
|
|
@@ -48,6 +48,7 @@ def get_sku_size(workspace: Optional[str | UUID] = None) -> str:
|
|
|
48
48
|
str
|
|
49
49
|
The SKU size for a workspace.
|
|
50
50
|
"""
|
|
51
|
+
from sempy_labs._capacities import list_capacities
|
|
51
52
|
|
|
52
53
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
53
54
|
|
|
@@ -59,7 +60,7 @@ def get_sku_size(workspace: Optional[str | UUID] = None) -> str:
|
|
|
59
60
|
)
|
|
60
61
|
|
|
61
62
|
capacity_id = dfW["Capacity Id"].iloc[0]
|
|
62
|
-
dfC =
|
|
63
|
+
dfC = list_capacities()
|
|
63
64
|
dfC_filt = dfC[dfC["Id"] == capacity_id]
|
|
64
65
|
|
|
65
66
|
if len(dfC_filt) == 0:
|
|
@@ -4,6 +4,8 @@ from sempy_labs._helper_functions import (
|
|
|
4
4
|
resolve_lakehouse_name,
|
|
5
5
|
resolve_dataset_name_and_id,
|
|
6
6
|
resolve_workspace_name_and_id,
|
|
7
|
+
resolve_item_name_and_id,
|
|
8
|
+
resolve_lakehouse_name_and_id,
|
|
7
9
|
)
|
|
8
10
|
from sempy_labs.tom import connect_semantic_model
|
|
9
11
|
from typing import Optional
|
|
@@ -40,22 +42,9 @@ def update_direct_lake_model_lakehouse_connection(
|
|
|
40
42
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
41
43
|
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
42
44
|
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
if lakehouse is None:
|
|
47
|
-
lakehouse_id = fabric.get_lakehouse_id()
|
|
48
|
-
lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)
|
|
49
|
-
|
|
50
|
-
# Check if lakehouse is valid
|
|
51
|
-
dfI = fabric.list_items(workspace=lakehouse_workspace, type="Lakehouse")
|
|
52
|
-
dfI_filt = dfI[(dfI["Display Name"] == lakehouse)]
|
|
53
|
-
|
|
54
|
-
if len(dfI_filt) == 0:
|
|
55
|
-
raise ValueError(
|
|
56
|
-
f"{icons.red_dot} The '{lakehouse}' lakehouse does not exist within the '{lakehouse_workspace}' workspace. "
|
|
57
|
-
f"Therefore it cannot be used to support the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
|
|
58
|
-
)
|
|
45
|
+
(lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id(
|
|
46
|
+
lakehouse=lakehouse, workspace=lakehouse_workspace
|
|
47
|
+
)
|
|
59
48
|
|
|
60
49
|
icons.sll_tags.append("UpdateDLConnection")
|
|
61
50
|
|
|
@@ -121,21 +110,19 @@ def update_direct_lake_model_connection(
|
|
|
121
110
|
if source_workspace is None:
|
|
122
111
|
source_workspace = workspace_name
|
|
123
112
|
|
|
124
|
-
if
|
|
125
|
-
source_id =
|
|
126
|
-
|
|
127
|
-
else:
|
|
128
|
-
source_id = fabric.resolve_item_id(
|
|
129
|
-
item_name=source, type=source_type, workspace=source_workspace
|
|
113
|
+
if source_type == "Lakehouse":
|
|
114
|
+
(source_name, source_id) = resolve_lakehouse_name_and_id(
|
|
115
|
+
lakehouse=source, workspace=source_workspace
|
|
130
116
|
)
|
|
131
|
-
|
|
132
|
-
|
|
117
|
+
else:
|
|
118
|
+
(source_name, source_id) = resolve_item_name_and_id(
|
|
119
|
+
item=source, type=source_type, workspace=source_workspace
|
|
133
120
|
)
|
|
134
121
|
|
|
135
122
|
icons.sll_tags.append("UpdateDLConnection")
|
|
136
123
|
|
|
137
124
|
shEx = generate_shared_expression(
|
|
138
|
-
item_name=
|
|
125
|
+
item_name=source_name, item_type=source_type, workspace=source_workspace
|
|
139
126
|
)
|
|
140
127
|
|
|
141
128
|
with connect_semantic_model(
|
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
import sempy
|
|
2
|
-
import sempy.fabric as fabric
|
|
3
2
|
from sempy_labs.tom import connect_semantic_model
|
|
4
3
|
from sempy_labs._refresh_semantic_model import refresh_semantic_model
|
|
5
4
|
from sempy_labs.directlake._dl_helper import get_direct_lake_source
|
|
@@ -7,6 +6,7 @@ from sempy_labs._helper_functions import (
|
|
|
7
6
|
_convert_data_type,
|
|
8
7
|
resolve_dataset_name_and_id,
|
|
9
8
|
resolve_workspace_name_and_id,
|
|
9
|
+
resolve_workspace_name,
|
|
10
10
|
)
|
|
11
11
|
from typing import List, Optional, Union
|
|
12
12
|
import sempy_labs._icons as icons
|
|
@@ -144,7 +144,7 @@ def add_table_to_direct_lake_semantic_model(
|
|
|
144
144
|
f"{icons.red_dot} This function only supports Direct Lake semantic models where the source lakehouse resides in the same workpace as the semantic model."
|
|
145
145
|
)
|
|
146
146
|
|
|
147
|
-
lakehouse_workspace =
|
|
147
|
+
lakehouse_workspace = resolve_workspace_name(workspace_id=lakehouse_workspace_id)
|
|
148
148
|
|
|
149
149
|
with connect_semantic_model(
|
|
150
150
|
dataset=dataset_id, readonly=False, workspace=workspace_id
|
|
@@ -5,6 +5,7 @@ from sempy_labs._helper_functions import (
|
|
|
5
5
|
resolve_workspace_name_and_id,
|
|
6
6
|
_base_api,
|
|
7
7
|
_create_dataframe,
|
|
8
|
+
resolve_workspace_name,
|
|
8
9
|
)
|
|
9
10
|
from sempy._utils._log import log
|
|
10
11
|
from typing import Optional
|
|
@@ -16,9 +17,9 @@ from sempy.fabric.exceptions import FabricHTTPException
|
|
|
16
17
|
@log
|
|
17
18
|
def create_shortcut_onelake(
|
|
18
19
|
table_name: str,
|
|
19
|
-
source_lakehouse: str,
|
|
20
|
+
source_lakehouse: str | UUID,
|
|
20
21
|
source_workspace: str | UUID,
|
|
21
|
-
destination_lakehouse: str,
|
|
22
|
+
destination_lakehouse: Optional[str | UUID] = None,
|
|
22
23
|
destination_workspace: Optional[str | UUID] = None,
|
|
23
24
|
shortcut_name: Optional[str] = None,
|
|
24
25
|
source_path: str = "Tables",
|
|
@@ -33,12 +34,13 @@ def create_shortcut_onelake(
|
|
|
33
34
|
----------
|
|
34
35
|
table_name : str
|
|
35
36
|
The table name for which a shortcut will be created.
|
|
36
|
-
source_lakehouse : str
|
|
37
|
+
source_lakehouse : str | uuid.UUID
|
|
37
38
|
The Fabric lakehouse in which the table resides.
|
|
38
39
|
source_workspace : str | uuid.UUID
|
|
39
40
|
The name or ID of the Fabric workspace in which the source lakehouse exists.
|
|
40
|
-
destination_lakehouse : str
|
|
41
|
+
destination_lakehouse : str | uuid.UUID, default=None
|
|
41
42
|
The Fabric lakehouse in which the shortcut will be created.
|
|
43
|
+
Defaults to None which resolves to the lakehouse attached to the notebook.
|
|
42
44
|
destination_workspace : str | uuid.UUID, default=None
|
|
43
45
|
The name or ID of the Fabric workspace in which the shortcut will be created.
|
|
44
46
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
@@ -359,7 +361,7 @@ def list_shortcuts(
|
|
|
359
361
|
source_item_id = tgt.get(sources.get(tgt_type), {}).get("itemId")
|
|
360
362
|
bucket = tgt.get(sources.get(tgt_type), {}).get("bucket")
|
|
361
363
|
source_workspace_name = (
|
|
362
|
-
|
|
364
|
+
resolve_workspace_name(workspace_id=source_workspace_id)
|
|
363
365
|
if source_workspace_id is not None
|
|
364
366
|
else None
|
|
365
367
|
)
|
|
@@ -42,10 +42,6 @@ def migration_validation(
|
|
|
42
42
|
f"{icons.red_dot} The 'dataset' and 'new_dataset' parameters are both set to '{dataset}'. These parameters must be set to different values."
|
|
43
43
|
)
|
|
44
44
|
|
|
45
|
-
workspace = fabric.resolve_workspace_name(workspace)
|
|
46
|
-
if new_dataset_workspace is None:
|
|
47
|
-
new_dataset_workspace = workspace
|
|
48
|
-
|
|
49
45
|
icons.sll_tags.append("DirectLakeMigration")
|
|
50
46
|
|
|
51
47
|
dfA = list_semantic_model_objects(dataset=dataset, workspace=workspace)
|
|
@@ -3,10 +3,11 @@ import sempy_labs._icons as icons
|
|
|
3
3
|
from typing import Optional
|
|
4
4
|
from sempy_labs._helper_functions import (
|
|
5
5
|
resolve_workspace_name_and_id,
|
|
6
|
-
|
|
6
|
+
resolve_lakehouse_name_and_id,
|
|
7
7
|
_base_api,
|
|
8
8
|
resolve_item_id,
|
|
9
9
|
_mount,
|
|
10
|
+
resolve_workspace_name,
|
|
10
11
|
)
|
|
11
12
|
from sempy_labs.lakehouse._lakehouse import lakehouse_attached
|
|
12
13
|
from uuid import UUID
|
|
@@ -44,11 +45,8 @@ def download_report(
|
|
|
44
45
|
)
|
|
45
46
|
|
|
46
47
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
47
|
-
lakehouse_id =
|
|
48
|
-
lakehouse_workspace =
|
|
49
|
-
lakehouse_name = resolve_lakehouse_name(
|
|
50
|
-
lakehouse_id=lakehouse_id, workspace=lakehouse_workspace
|
|
51
|
-
)
|
|
48
|
+
(lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id()
|
|
49
|
+
lakehouse_workspace = resolve_workspace_name()
|
|
52
50
|
|
|
53
51
|
download_types = ["LiveConnect", "IncludeModel"]
|
|
54
52
|
if download_type not in download_types:
|
|
@@ -319,9 +319,9 @@ def _create_report(
|
|
|
319
319
|
|
|
320
320
|
from sempy_labs.report import report_rebind
|
|
321
321
|
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
322
|
+
(report_workspace_name, report_workspace_id) = resolve_workspace_name_and_id(
|
|
323
|
+
workspace=report_workspace
|
|
324
|
+
)
|
|
325
325
|
|
|
326
326
|
dfR = fabric.list_reports(workspace=report_workspace)
|
|
327
327
|
dfR_filt = dfR[dfR["Name"] == report]
|
|
@@ -338,7 +338,7 @@ def _create_report(
|
|
|
338
338
|
)
|
|
339
339
|
|
|
340
340
|
print(
|
|
341
|
-
f"{icons.green_dot} The '{report}' report has been created within the '{
|
|
341
|
+
f"{icons.green_dot} The '{report}' report has been created within the '{report_workspace_name}'"
|
|
342
342
|
)
|
|
343
343
|
updated_report = True
|
|
344
344
|
# Update the report if it exists
|
|
@@ -352,12 +352,12 @@ def _create_report(
|
|
|
352
352
|
status_codes=None,
|
|
353
353
|
)
|
|
354
354
|
print(
|
|
355
|
-
f"{icons.green_dot} The '{report}' report has been updated within the '{
|
|
355
|
+
f"{icons.green_dot} The '{report}' report has been updated within the '{report_workspace_name}'"
|
|
356
356
|
)
|
|
357
357
|
updated_report = True
|
|
358
358
|
else:
|
|
359
359
|
raise ValueError(
|
|
360
|
-
f"{icons.red_dot} The '{report}' report within the '{
|
|
360
|
+
f"{icons.red_dot} The '{report}' report within the '{report_workspace_name}' workspace already exists and the 'overwrite' parameter was set to False."
|
|
361
361
|
)
|
|
362
362
|
|
|
363
363
|
# Rebind the report to the semantic model to make sure it is pointed at the correct semantic model
|
|
@@ -18,6 +18,7 @@ from sempy_labs._helper_functions import (
|
|
|
18
18
|
_base_api,
|
|
19
19
|
_create_spark_session,
|
|
20
20
|
_mount,
|
|
21
|
+
resolve_workspace_id,
|
|
21
22
|
)
|
|
22
23
|
from typing import List, Optional, Union
|
|
23
24
|
from sempy._utils._log import log
|
|
@@ -192,7 +193,7 @@ def clone_report(
|
|
|
192
193
|
target_workspace = workspace_name
|
|
193
194
|
target_workspace_id = workspace_id
|
|
194
195
|
else:
|
|
195
|
-
target_workspace_id =
|
|
196
|
+
target_workspace_id = resolve_workspace_id(workspace=target_workspace)
|
|
196
197
|
|
|
197
198
|
if target_dataset is not None:
|
|
198
199
|
if target_dataset_workspace is None:
|