semantic-link-labs 0.9.5__py3-none-any.whl → 0.9.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.9.5.dist-info → semantic_link_labs-0.9.7.dist-info}/METADATA +8 -5
- {semantic_link_labs-0.9.5.dist-info → semantic_link_labs-0.9.7.dist-info}/RECORD +65 -61
- {semantic_link_labs-0.9.5.dist-info → semantic_link_labs-0.9.7.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +19 -1
- sempy_labs/_ai.py +3 -1
- sempy_labs/_capacities.py +37 -2
- sempy_labs/_capacity_migration.py +11 -14
- sempy_labs/_connections.py +2 -4
- sempy_labs/_dataflows.py +2 -2
- sempy_labs/_dax_query_view.py +57 -0
- sempy_labs/_delta_analyzer.py +16 -14
- sempy_labs/_delta_analyzer_history.py +298 -0
- sempy_labs/_environments.py +8 -1
- sempy_labs/_eventhouses.py +5 -1
- sempy_labs/_external_data_shares.py +4 -10
- sempy_labs/_generate_semantic_model.py +2 -1
- sempy_labs/_graphQL.py +5 -1
- sempy_labs/_helper_functions.py +440 -63
- sempy_labs/_icons.py +6 -6
- sempy_labs/_kql_databases.py +5 -1
- sempy_labs/_list_functions.py +8 -38
- sempy_labs/_managed_private_endpoints.py +9 -2
- sempy_labs/_mirrored_databases.py +3 -1
- sempy_labs/_ml_experiments.py +1 -1
- sempy_labs/_model_bpa.py +2 -11
- sempy_labs/_model_bpa_bulk.py +33 -38
- sempy_labs/_model_bpa_rules.py +1 -1
- sempy_labs/_one_lake_integration.py +2 -1
- sempy_labs/_semantic_models.py +20 -0
- sempy_labs/_sql.py +6 -2
- sempy_labs/_sqldatabase.py +61 -100
- sempy_labs/_vertipaq.py +8 -11
- sempy_labs/_warehouses.py +14 -3
- sempy_labs/_workspace_identity.py +6 -0
- sempy_labs/_workspaces.py +42 -2
- sempy_labs/admin/_basic_functions.py +29 -2
- sempy_labs/admin/_reports.py +1 -1
- sempy_labs/admin/_scanner.py +2 -4
- sempy_labs/admin/_tenant.py +8 -3
- sempy_labs/directlake/_directlake_schema_compare.py +2 -1
- sempy_labs/directlake/_directlake_schema_sync.py +65 -19
- sempy_labs/directlake/_dl_helper.py +0 -6
- sempy_labs/directlake/_generate_shared_expression.py +19 -12
- sempy_labs/directlake/_guardrails.py +2 -1
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +90 -57
- sempy_labs/directlake/_update_directlake_partition_entity.py +5 -2
- sempy_labs/graph/_groups.py +6 -0
- sempy_labs/graph/_teams.py +2 -0
- sempy_labs/graph/_users.py +4 -0
- sempy_labs/lakehouse/__init__.py +12 -3
- sempy_labs/lakehouse/_blobs.py +231 -0
- sempy_labs/lakehouse/_shortcuts.py +29 -8
- sempy_labs/migration/_direct_lake_to_import.py +47 -10
- sempy_labs/migration/_migration_validation.py +0 -4
- sempy_labs/report/__init__.py +4 -0
- sempy_labs/report/_download_report.py +4 -6
- sempy_labs/report/_generate_report.py +6 -6
- sempy_labs/report/_report_functions.py +5 -4
- sempy_labs/report/_report_helper.py +17 -5
- sempy_labs/report/_report_rebind.py +8 -6
- sempy_labs/report/_reportwrapper.py +17 -8
- sempy_labs/report/_save_report.py +147 -0
- sempy_labs/tom/_model.py +154 -23
- {semantic_link_labs-0.9.5.dist-info → semantic_link_labs-0.9.7.dist-info/licenses}/LICENSE +0 -0
- {semantic_link_labs-0.9.5.dist-info → semantic_link_labs-0.9.7.dist-info}/top_level.txt +0 -0
|
@@ -966,17 +966,25 @@ class ReportWrapper:
|
|
|
966
966
|
keys_path = []
|
|
967
967
|
|
|
968
968
|
if isinstance(data, dict):
|
|
969
|
+
expression = data.get("Expression", {})
|
|
970
|
+
source_ref = (
|
|
971
|
+
expression.get("SourceRef", {})
|
|
972
|
+
if isinstance(expression, dict)
|
|
973
|
+
else {}
|
|
974
|
+
)
|
|
969
975
|
if (
|
|
970
|
-
|
|
976
|
+
isinstance(source_ref, dict)
|
|
977
|
+
and "Entity" in source_ref
|
|
971
978
|
and "Property" in data
|
|
972
979
|
):
|
|
973
|
-
entity = (
|
|
974
|
-
|
|
975
|
-
|
|
976
|
-
|
|
980
|
+
entity = source_ref.get("Entity", "")
|
|
981
|
+
property_value = data.get("Property", "")
|
|
982
|
+
|
|
983
|
+
object_type = (
|
|
984
|
+
keys_path[-1].replace("HierarchyLevel", "Hierarchy")
|
|
985
|
+
if keys_path
|
|
986
|
+
else "Unknown"
|
|
977
987
|
)
|
|
978
|
-
property_value = data.get("Property", {})
|
|
979
|
-
object_type = keys_path[-1].replace("HierarchyLevel", "Hierarchy")
|
|
980
988
|
is_agg = keys_path[-3] == "Aggregation"
|
|
981
989
|
is_viz_calc = keys_path[-3] == "NativeVisualCalculation"
|
|
982
990
|
is_sparkline = keys_path[-3] == "SparklineData"
|
|
@@ -987,7 +995,8 @@ class ReportWrapper:
|
|
|
987
995
|
is_viz_calc,
|
|
988
996
|
is_sparkline,
|
|
989
997
|
)
|
|
990
|
-
keys_path
|
|
998
|
+
if keys_path:
|
|
999
|
+
keys_path.pop()
|
|
991
1000
|
|
|
992
1001
|
# Recursively search the rest of the dictionary
|
|
993
1002
|
for key, value in data.items():
|
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import base64
|
|
3
|
+
import json
|
|
4
|
+
import sempy.fabric as fabric
|
|
5
|
+
import sempy_labs._icons as icons
|
|
6
|
+
from sempy_labs.report._generate_report import get_report_definition
|
|
7
|
+
from sempy_labs._generate_semantic_model import get_semantic_model_definition
|
|
8
|
+
from sempy_labs._helper_functions import (
|
|
9
|
+
_mount,
|
|
10
|
+
resolve_workspace_name_and_id,
|
|
11
|
+
resolve_item_name,
|
|
12
|
+
resolve_workspace_name,
|
|
13
|
+
resolve_item_name_and_id,
|
|
14
|
+
)
|
|
15
|
+
from uuid import UUID
|
|
16
|
+
from sempy._utils._log import log
|
|
17
|
+
from typing import Optional
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@log
|
|
21
|
+
def save_report_as_pbip(
|
|
22
|
+
report: str | UUID,
|
|
23
|
+
workspace: Optional[str | UUID] = None,
|
|
24
|
+
thick_report: bool = True,
|
|
25
|
+
live_connect: bool = True,
|
|
26
|
+
lakehouse: Optional[str | UUID] = None,
|
|
27
|
+
lakehouse_workspace: Optional[str | UUID] = None,
|
|
28
|
+
):
|
|
29
|
+
"""
|
|
30
|
+
Saves a report as a .pbip file to the default lakehouse attached to the notebook.
|
|
31
|
+
|
|
32
|
+
Parameters
|
|
33
|
+
----------
|
|
34
|
+
report : str | uuid.UUID
|
|
35
|
+
Name or ID of the Power BI report.
|
|
36
|
+
workspace : str | uuid.UUID, default=None
|
|
37
|
+
The name or ID of the Fabric workspace in which the report resides.
|
|
38
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
39
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
40
|
+
thick_report : bool, default=True
|
|
41
|
+
If set to True, saves the report and underlying semantic model.
|
|
42
|
+
If set to False, saves just the report.
|
|
43
|
+
live_connect : bool, default=True
|
|
44
|
+
If set to True, saves a .pbip live-connected to the workspace in the Power BI / Fabric service.
|
|
45
|
+
If set to False, saves a .pbip with a local model, independent from the Power BI / Fabric service.
|
|
46
|
+
lakehouse : str | uuid.UUID, default=None
|
|
47
|
+
The Fabric lakehouse name or ID. This will be the lakehouse to which the report is saved.
|
|
48
|
+
Defaults to None which resolves to the lakehouse attached to the notebook.
|
|
49
|
+
lakehouse_workspace : str | uuid.UUID, default=None
|
|
50
|
+
The Fabric workspace name or ID used by the lakehouse.
|
|
51
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
52
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
53
|
+
"""
|
|
54
|
+
|
|
55
|
+
(report_workspace_name, report_workspace_id) = resolve_workspace_name_and_id(
|
|
56
|
+
workspace
|
|
57
|
+
)
|
|
58
|
+
(report_name, report_id) = resolve_item_name_and_id(
|
|
59
|
+
item=report, type="Report", workspace=workspace
|
|
60
|
+
)
|
|
61
|
+
indent = 2
|
|
62
|
+
|
|
63
|
+
local_path = _mount(lakehouse=lakehouse, workspace=lakehouse_workspace)
|
|
64
|
+
save_location = f"{local_path}/Files"
|
|
65
|
+
|
|
66
|
+
# Find semantic model info
|
|
67
|
+
dfR = fabric.list_reports(workspace=workspace)
|
|
68
|
+
dfR_filt = dfR[dfR["Id"] == report_id]
|
|
69
|
+
if dfR_filt.empty:
|
|
70
|
+
raise ValueError(
|
|
71
|
+
f"{icons.red_dot} The '{report} report does not exist within the '{report_workspace_name} workspace."
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
dataset_id = dfR_filt["Dataset Id"].iloc[0]
|
|
75
|
+
dataset_workspace_id = dfR_filt["Dataset Workspace Id"].iloc[0]
|
|
76
|
+
dataset_name = resolve_item_name(item_id=dataset_id, workspace=dataset_workspace_id)
|
|
77
|
+
dataset_workspace_name = resolve_workspace_name(dataset_workspace_id)
|
|
78
|
+
path_prefix = f"{save_location}/{report_workspace_name}/{report_name}/{report_name}"
|
|
79
|
+
|
|
80
|
+
# Local model not supported if the report and model are in different workspaces
|
|
81
|
+
if dataset_workspace_name != report_workspace_name and not live_connect:
|
|
82
|
+
live_connect = True
|
|
83
|
+
print(
|
|
84
|
+
f"{icons.warning} The '{report_name}' report from the '{report_workspace_name}' workspace is being saved as a live-connected report/model."
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
def add_files(name, type, object_workspace):
|
|
88
|
+
|
|
89
|
+
path_prefix_full = f"{path_prefix}.{type}"
|
|
90
|
+
|
|
91
|
+
if type == "Report":
|
|
92
|
+
dataframe = get_report_definition(report=name, workspace=workspace)
|
|
93
|
+
elif type == "SemanticModel":
|
|
94
|
+
dataframe = get_semantic_model_definition(
|
|
95
|
+
dataset=name, workspace=object_workspace
|
|
96
|
+
)
|
|
97
|
+
else:
|
|
98
|
+
raise NotImplementedError
|
|
99
|
+
|
|
100
|
+
# Create and save files based on dataset/report definition
|
|
101
|
+
for _, r in dataframe.iterrows():
|
|
102
|
+
path = r["path"]
|
|
103
|
+
file_content = base64.b64decode(r["payload"])
|
|
104
|
+
file_path = f"{path_prefix_full}/{path}"
|
|
105
|
+
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
|
106
|
+
|
|
107
|
+
# Update the definition.pbir file for local models
|
|
108
|
+
if not live_connect and type == "Report" and path == "definition.pbir":
|
|
109
|
+
file_content = {
|
|
110
|
+
"version": "1.0",
|
|
111
|
+
"datasetReference": {
|
|
112
|
+
"byPath": {"path": f"../{report_name}.SemanticModel"},
|
|
113
|
+
"byConnection": None,
|
|
114
|
+
},
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
with open(file_path, "w") as f:
|
|
118
|
+
json.dump(file_content, f, indent=indent)
|
|
119
|
+
else:
|
|
120
|
+
with open(file_path, "wb") as f:
|
|
121
|
+
f.write(file_content)
|
|
122
|
+
|
|
123
|
+
# Create and save .pbip file for report, converting the file extension
|
|
124
|
+
if type == "Report":
|
|
125
|
+
# Standard .pbip file content
|
|
126
|
+
pbip = {
|
|
127
|
+
"version": "1.0",
|
|
128
|
+
"artifacts": [{"report": {"path": f"{report_name}.Report"}}],
|
|
129
|
+
"settings": {"enableAutoRecovery": True},
|
|
130
|
+
}
|
|
131
|
+
# Ensure the directory exists
|
|
132
|
+
os.makedirs(os.path.dirname(path_prefix), exist_ok=True)
|
|
133
|
+
# Write the .pbip file directly
|
|
134
|
+
pbip_final = f"{path_prefix}.pbip"
|
|
135
|
+
with open(pbip_final, "w") as file:
|
|
136
|
+
json.dump(pbip, file, indent=indent)
|
|
137
|
+
|
|
138
|
+
add_files(name=report_name, type="Report", object_workspace=workspace)
|
|
139
|
+
if thick_report:
|
|
140
|
+
add_files(
|
|
141
|
+
name=dataset_name,
|
|
142
|
+
type="SemanticModel",
|
|
143
|
+
object_workspace=dataset_workspace_name,
|
|
144
|
+
)
|
|
145
|
+
print(
|
|
146
|
+
f"{icons.green_dot} The '{report_name}' report within the '{report_workspace_name}' workspace has been saved to this location: {save_location}."
|
|
147
|
+
)
|
sempy_labs/tom/_model.py
CHANGED
|
@@ -11,6 +11,9 @@ from sempy_labs._helper_functions import (
|
|
|
11
11
|
resolve_dataset_name_and_id,
|
|
12
12
|
resolve_workspace_name_and_id,
|
|
13
13
|
_base_api,
|
|
14
|
+
resolve_workspace_id,
|
|
15
|
+
resolve_item_id,
|
|
16
|
+
resolve_lakehouse_id,
|
|
14
17
|
)
|
|
15
18
|
from sempy_labs._list_functions import list_relationships
|
|
16
19
|
from sempy_labs._refresh_semantic_model import refresh_semantic_model
|
|
@@ -84,7 +87,7 @@ class TOMWrapper:
|
|
|
84
87
|
# No token provider (standard authentication)
|
|
85
88
|
if self._token_provider is None:
|
|
86
89
|
self._tom_server = fabric.create_tom_server(
|
|
87
|
-
readonly=readonly, workspace=workspace_id
|
|
90
|
+
dataset=dataset, readonly=readonly, workspace=workspace_id
|
|
88
91
|
)
|
|
89
92
|
# Service Principal Authentication for Azure AS via token provider
|
|
90
93
|
elif self._is_azure_as:
|
|
@@ -2257,7 +2260,7 @@ class TOMWrapper:
|
|
|
2257
2260
|
|
|
2258
2261
|
if validate:
|
|
2259
2262
|
dax_query = f"""
|
|
2260
|
-
define measure '{table_name}'[test] =
|
|
2263
|
+
define measure '{table_name}'[test] =
|
|
2261
2264
|
var mn = MIN('{table_name}'[{column_name}])
|
|
2262
2265
|
var ma = MAX('{table_name}'[{column_name}])
|
|
2263
2266
|
var x = COUNTROWS(DISTINCT('{table_name}'[{column_name}]))
|
|
@@ -3309,14 +3312,16 @@ class TOMWrapper:
|
|
|
3309
3312
|
.tolist()
|
|
3310
3313
|
)
|
|
3311
3314
|
cols = (
|
|
3312
|
-
fil[fil["Referenced Object Type"]
|
|
3315
|
+
fil[fil["Referenced Object Type"].isin(["Column", "Calc Column"])][
|
|
3313
3316
|
"Referenced Full Object Name"
|
|
3314
3317
|
]
|
|
3315
3318
|
.unique()
|
|
3316
3319
|
.tolist()
|
|
3317
3320
|
)
|
|
3318
3321
|
tbls = (
|
|
3319
|
-
fil[fil["Referenced Object Type"]
|
|
3322
|
+
fil[fil["Referenced Object Type"].isin(["Table", "Calc Table"])][
|
|
3323
|
+
"Referenced Table"
|
|
3324
|
+
]
|
|
3320
3325
|
.unique()
|
|
3321
3326
|
.tolist()
|
|
3322
3327
|
)
|
|
@@ -3489,7 +3494,7 @@ class TOMWrapper:
|
|
|
3489
3494
|
tableList.append(c.Parent.Name)
|
|
3490
3495
|
if (
|
|
3491
3496
|
re.search(
|
|
3492
|
-
create_pattern(tableList,
|
|
3497
|
+
create_pattern(tableList, obj.Name),
|
|
3493
3498
|
expr,
|
|
3494
3499
|
)
|
|
3495
3500
|
is not None
|
|
@@ -4857,6 +4862,18 @@ class TOMWrapper:
|
|
|
4857
4862
|
|
|
4858
4863
|
result_df = pd.DataFrame(columns=["Table Name", "Object Name", "Object Type"])
|
|
4859
4864
|
|
|
4865
|
+
def add_to_result(table_name, object_name, object_type, dataframe):
|
|
4866
|
+
|
|
4867
|
+
new_data = {
|
|
4868
|
+
"Table Name": table_name,
|
|
4869
|
+
"Object Name": object_name,
|
|
4870
|
+
"Object Type": object_type,
|
|
4871
|
+
}
|
|
4872
|
+
|
|
4873
|
+
return pd.concat(
|
|
4874
|
+
[dataframe, pd.DataFrame(new_data, index=[0])], ignore_index=True
|
|
4875
|
+
)
|
|
4876
|
+
|
|
4860
4877
|
for _, r in filt.iterrows():
|
|
4861
4878
|
added = False
|
|
4862
4879
|
obj_type = r["Referenced Object Type"]
|
|
@@ -4890,15 +4907,7 @@ class TOMWrapper:
|
|
|
4890
4907
|
)
|
|
4891
4908
|
added = True
|
|
4892
4909
|
if added:
|
|
4893
|
-
|
|
4894
|
-
"Table Name": table_name,
|
|
4895
|
-
"Object Name": object_name,
|
|
4896
|
-
"Object Type": obj_type,
|
|
4897
|
-
}
|
|
4898
|
-
|
|
4899
|
-
result_df = pd.concat(
|
|
4900
|
-
[result_df, pd.DataFrame(new_data, index=[0])], ignore_index=True
|
|
4901
|
-
)
|
|
4910
|
+
result_df = add_to_result(table_name, object_name, obj_type, result_df)
|
|
4902
4911
|
|
|
4903
4912
|
# Reduce model...
|
|
4904
4913
|
|
|
@@ -4921,11 +4930,21 @@ class TOMWrapper:
|
|
|
4921
4930
|
self.add_to_perspective(
|
|
4922
4931
|
object=r.FromColumn, perspective_name=perspective_name
|
|
4923
4932
|
)
|
|
4933
|
+
|
|
4934
|
+
result_df = add_to_result(
|
|
4935
|
+
r.FromTable.Name, r.FromColumn.Name, "Column", result_df
|
|
4936
|
+
)
|
|
4924
4937
|
if not self.in_perspective(r.ToColumn, perspective_name=perspective_name):
|
|
4938
|
+
table_name = r.ToTable.Name
|
|
4939
|
+
object_name = r.ToColumn.Name
|
|
4925
4940
|
self.add_to_perspective(
|
|
4926
4941
|
object=r.ToColumn, perspective_name=perspective_name
|
|
4927
4942
|
)
|
|
4928
4943
|
|
|
4944
|
+
result_df = add_to_result(
|
|
4945
|
+
r.ToTable.Name, r.ToColumn.Name, "Column", result_df
|
|
4946
|
+
)
|
|
4947
|
+
|
|
4929
4948
|
# Remove objects not in the perspective
|
|
4930
4949
|
for t in self.model.Tables:
|
|
4931
4950
|
if not self.in_perspective(object=t, perspective_name=perspective_name):
|
|
@@ -4943,6 +4962,111 @@ class TOMWrapper:
|
|
|
4943
4962
|
# Return the objects added to the perspective based on dependencies
|
|
4944
4963
|
return result_df.drop_duplicates()
|
|
4945
4964
|
|
|
4965
|
+
def convert_direct_lake_to_import(
|
|
4966
|
+
self,
|
|
4967
|
+
table_name: str,
|
|
4968
|
+
entity_name: Optional[str] = None,
|
|
4969
|
+
schema: Optional[str] = None,
|
|
4970
|
+
source: Optional[str | UUID] = None,
|
|
4971
|
+
source_type: str = "Lakehouse",
|
|
4972
|
+
source_workspace: Optional[str | UUID] = None,
|
|
4973
|
+
):
|
|
4974
|
+
"""
|
|
4975
|
+
Converts a Direct Lake table's partition to an import-mode partition.
|
|
4976
|
+
|
|
4977
|
+
The entity_name and schema parameters default to using the existing values in the Direct Lake partition. The source, source_type, and source_workspace
|
|
4978
|
+
parameters do not default to existing values. This is because it may not always be possible to reconcile the source and its workspace.
|
|
4979
|
+
|
|
4980
|
+
Parameters
|
|
4981
|
+
----------
|
|
4982
|
+
table_name : str
|
|
4983
|
+
The table name.
|
|
4984
|
+
entity_name : str, default=None
|
|
4985
|
+
The entity name of the Direct Lake partition (the table name in the source).
|
|
4986
|
+
schema : str, default=None
|
|
4987
|
+
The schema of the source table. Defaults to None which resolves to the existing schema.
|
|
4988
|
+
source : str | uuid.UUID, default=None
|
|
4989
|
+
The source name or ID. This is the name or ID of the Lakehouse or Warehouse.
|
|
4990
|
+
source_type : str, default="Lakehouse"
|
|
4991
|
+
The source type (i.e. "Lakehouse" or "Warehouse").
|
|
4992
|
+
source_workspace: str | uuid.UUID, default=None
|
|
4993
|
+
The workspace name or ID of the source. This is the workspace in which the Lakehouse or Warehouse exists.
|
|
4994
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
4995
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
4996
|
+
"""
|
|
4997
|
+
import Microsoft.AnalysisServices.Tabular as TOM
|
|
4998
|
+
|
|
4999
|
+
p = next(p for p in self.model.Tables[table_name].Partitions)
|
|
5000
|
+
if p.Mode != TOM.ModeType.DirectLake:
|
|
5001
|
+
print(f"{icons.info} The '{table_name}' table is not in Direct Lake mode.")
|
|
5002
|
+
return
|
|
5003
|
+
|
|
5004
|
+
partition_name = p.Name
|
|
5005
|
+
partition_entity_name = entity_name or p.Source.EntityName
|
|
5006
|
+
partition_schema = schema or p.Source.SchemaName
|
|
5007
|
+
|
|
5008
|
+
# Update name of the Direct Lake partition (will be removed later)
|
|
5009
|
+
self.model.Tables[table_name].Partitions[
|
|
5010
|
+
partition_name
|
|
5011
|
+
].Name = f"{partition_name}_remove"
|
|
5012
|
+
|
|
5013
|
+
source_workspace_id = resolve_workspace_id(workspace=source_workspace)
|
|
5014
|
+
if source_type == "Lakehouse":
|
|
5015
|
+
item_id = resolve_lakehouse_id(
|
|
5016
|
+
lakehouse=source, workspace=source_workspace_id
|
|
5017
|
+
)
|
|
5018
|
+
else:
|
|
5019
|
+
item_id = resolve_item_id(
|
|
5020
|
+
item=source, type=source_type, workspace=source_workspace_id
|
|
5021
|
+
)
|
|
5022
|
+
|
|
5023
|
+
def _generate_m_expression(
|
|
5024
|
+
workspace_id, artifact_id, artifact_type, table_name, schema_name
|
|
5025
|
+
):
|
|
5026
|
+
"""
|
|
5027
|
+
Generates the M expression for the import partition.
|
|
5028
|
+
"""
|
|
5029
|
+
|
|
5030
|
+
if artifact_type == "Lakehouse":
|
|
5031
|
+
type_id = "lakehouseId"
|
|
5032
|
+
elif artifact_type == "Warehouse":
|
|
5033
|
+
type_id = "warehouseId"
|
|
5034
|
+
else:
|
|
5035
|
+
raise NotImplementedError
|
|
5036
|
+
|
|
5037
|
+
full_table_name = (
|
|
5038
|
+
f"{schema_name}.{table_name}" if schema_name else table_name
|
|
5039
|
+
)
|
|
5040
|
+
|
|
5041
|
+
return f"""let
|
|
5042
|
+
Source = {artifact_type}.Contents(null),
|
|
5043
|
+
#"Workspace" = Source{{[workspaceId="{workspace_id}"]}}[Data],
|
|
5044
|
+
#"Artifact" = #"Workspace"{{[{type_id}="{artifact_id}"]}}[Data],
|
|
5045
|
+
result = #"Artifact"{{[Id="{full_table_name}",ItemKind="Table"]}}[Data]
|
|
5046
|
+
in
|
|
5047
|
+
result
|
|
5048
|
+
"""
|
|
5049
|
+
|
|
5050
|
+
m_expression = _generate_m_expression(
|
|
5051
|
+
source_workspace_id,
|
|
5052
|
+
item_id,
|
|
5053
|
+
source_type,
|
|
5054
|
+
partition_entity_name,
|
|
5055
|
+
partition_schema,
|
|
5056
|
+
)
|
|
5057
|
+
|
|
5058
|
+
# Add the import partition
|
|
5059
|
+
self.add_m_partition(
|
|
5060
|
+
table_name=table_name,
|
|
5061
|
+
partition_name=f"{partition_name}",
|
|
5062
|
+
expression=m_expression,
|
|
5063
|
+
mode="Import",
|
|
5064
|
+
)
|
|
5065
|
+
# Remove the Direct Lake partition
|
|
5066
|
+
self.remove_object(object=p)
|
|
5067
|
+
|
|
5068
|
+
print(f"{icons.green_dot} The '{table_name}' table has been converted to Import mode.")
|
|
5069
|
+
|
|
4946
5070
|
def close(self):
|
|
4947
5071
|
|
|
4948
5072
|
if not self._readonly and self.model is not None:
|
|
@@ -4956,18 +5080,25 @@ class TOMWrapper:
|
|
|
4956
5080
|
p.SourceType == TOM.PartitionSourceType.Entity
|
|
4957
5081
|
for p in t.Partitions
|
|
4958
5082
|
):
|
|
4959
|
-
|
|
4960
|
-
|
|
4961
|
-
|
|
5083
|
+
entity_name = next(p.Source.EntityName for p in t.Partitions)
|
|
5084
|
+
if t.Name != entity_name:
|
|
5085
|
+
self.add_changed_property(object=t, property="Name")
|
|
5086
|
+
# if t.LineageTag in list(self._table_map.keys()):
|
|
5087
|
+
# if self._table_map.get(t.LineageTag) != t.Name:
|
|
5088
|
+
# self.add_changed_property(object=t, property="Name")
|
|
4962
5089
|
|
|
4963
5090
|
for c in self.all_columns():
|
|
5091
|
+
# if c.LineageTag in list(self._column_map.keys()):
|
|
5092
|
+
if any(
|
|
5093
|
+
p.SourceType == TOM.PartitionSourceType.Entity
|
|
5094
|
+
for p in c.Parent.Partitions
|
|
5095
|
+
):
|
|
5096
|
+
if c.Name != c.SourceColumn:
|
|
5097
|
+
self.add_changed_property(object=c, property="Name")
|
|
5098
|
+
# c.SourceLineageTag = c.SourceColumn
|
|
5099
|
+
# if self._column_map.get(c.LineageTag)[0] != c.Name:
|
|
5100
|
+
# self.add_changed_property(object=c, property="Name")
|
|
4964
5101
|
if c.LineageTag in list(self._column_map.keys()):
|
|
4965
|
-
if any(
|
|
4966
|
-
p.SourceType == TOM.PartitionSourceType.Entity
|
|
4967
|
-
for p in c.Parent.Partitions
|
|
4968
|
-
):
|
|
4969
|
-
if self._column_map.get(c.LineageTag)[0] != c.Name:
|
|
4970
|
-
self.add_changed_property(object=c, property="Name")
|
|
4971
5102
|
if self._column_map.get(c.LineageTag)[1] != c.DataType:
|
|
4972
5103
|
self.add_changed_property(object=c, property="DataType")
|
|
4973
5104
|
|
|
File without changes
|
|
File without changes
|