semantic-link-labs 0.4.1__py3-none-any.whl → 0.4.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.4.2.dist-info}/METADATA +1 -1
- semantic_link_labs-0.4.2.dist-info/RECORD +53 -0
- sempy_labs/__init__.py +25 -25
- sempy_labs/_ai.py +28 -27
- sempy_labs/_clear_cache.py +2 -1
- sempy_labs/_dax.py +5 -9
- sempy_labs/_generate_semantic_model.py +7 -8
- sempy_labs/_helper_functions.py +17 -13
- sempy_labs/_icons.py +5 -0
- sempy_labs/_list_functions.py +273 -17
- sempy_labs/_model_auto_build.py +1 -1
- sempy_labs/_model_bpa.py +37 -37
- sempy_labs/_model_dependencies.py +11 -12
- sempy_labs/_one_lake_integration.py +15 -22
- sempy_labs/_query_scale_out.py +1 -1
- sempy_labs/_refresh_semantic_model.py +4 -4
- sempy_labs/_translations.py +5 -5
- sempy_labs/_vertipaq.py +11 -11
- sempy_labs/directlake/_directlake_schema_compare.py +11 -9
- sempy_labs/directlake/_directlake_schema_sync.py +36 -37
- sempy_labs/directlake/_fallback.py +3 -3
- sempy_labs/directlake/_get_directlake_lakehouse.py +3 -4
- sempy_labs/directlake/_get_shared_expression.py +3 -3
- sempy_labs/directlake/_guardrails.py +3 -3
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +28 -25
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +4 -4
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +10 -11
- sempy_labs/directlake/_update_directlake_partition_entity.py +25 -9
- sempy_labs/directlake/_warm_cache.py +5 -7
- sempy_labs/lakehouse/__init__.py +0 -2
- sempy_labs/lakehouse/_get_lakehouse_columns.py +3 -2
- sempy_labs/lakehouse/_get_lakehouse_tables.py +10 -7
- sempy_labs/lakehouse/_lakehouse.py +6 -5
- sempy_labs/lakehouse/_shortcuts.py +8 -106
- sempy_labs/migration/__init__.py +4 -2
- sempy_labs/migration/_create_pqt_file.py +2 -2
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +7 -7
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +4 -4
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +5 -6
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +6 -6
- sempy_labs/migration/_migration_validation.py +1 -164
- sempy_labs/migration/_refresh_calc_tables.py +3 -5
- sempy_labs/report/__init__.py +2 -2
- sempy_labs/report/_generate_report.py +14 -15
- sempy_labs/report/_report_functions.py +11 -10
- sempy_labs/report/_report_rebind.py +6 -7
- sempy_labs/tom/__init__.py +6 -0
- sempy_labs/{_tom.py → tom/_model.py} +166 -187
- semantic_link_labs-0.4.1.dist-info/RECORD +0 -52
- {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.4.2.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.4.2.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.4.2.dist-info}/top_level.txt +0 -0
|
@@ -4,8 +4,9 @@ import pandas as pd
|
|
|
4
4
|
from sempy_labs._list_functions import list_tables
|
|
5
5
|
from sempy_labs._helper_functions import format_dax_object_name
|
|
6
6
|
from typing import Optional, Tuple
|
|
7
|
+
from sempy._utils._log import log
|
|
7
8
|
|
|
8
|
-
|
|
9
|
+
@log
|
|
9
10
|
def show_unsupported_direct_lake_objects(
|
|
10
11
|
dataset: str, workspace: Optional[str] = None
|
|
11
12
|
) -> Tuple[pd.DataFrame, pd.DataFrame, pd.DataFrame]:
|
|
@@ -29,9 +30,8 @@ def show_unsupported_direct_lake_objects(
|
|
|
29
30
|
|
|
30
31
|
pd.options.mode.chained_assignment = None
|
|
31
32
|
|
|
32
|
-
if workspace
|
|
33
|
-
|
|
34
|
-
workspace = fabric.resolve_workspace_name(workspace_id)
|
|
33
|
+
if workspace is None:
|
|
34
|
+
workspace = fabric.resolve_workspace_name()
|
|
35
35
|
|
|
36
36
|
dfT = list_tables(dataset, workspace)
|
|
37
37
|
dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
|
|
@@ -5,8 +5,9 @@ from sempy_labs._helper_functions import (
|
|
|
5
5
|
resolve_lakehouse_name,
|
|
6
6
|
resolve_workspace_name_and_id,
|
|
7
7
|
)
|
|
8
|
-
from sempy_labs.
|
|
9
|
-
from typing import
|
|
8
|
+
from sempy_labs.tom import connect_semantic_model
|
|
9
|
+
from typing import Optional
|
|
10
|
+
import sempy_labs._icons as icons
|
|
10
11
|
|
|
11
12
|
|
|
12
13
|
def update_direct_lake_model_lakehouse_connection(
|
|
@@ -41,10 +42,10 @@ def update_direct_lake_model_lakehouse_connection(
|
|
|
41
42
|
|
|
42
43
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
43
44
|
|
|
44
|
-
if lakehouse_workspace
|
|
45
|
+
if lakehouse_workspace is None:
|
|
45
46
|
lakehouse_workspace = workspace
|
|
46
47
|
|
|
47
|
-
if lakehouse
|
|
48
|
+
if lakehouse is None:
|
|
48
49
|
lakehouse_id = fabric.get_lakehouse_id()
|
|
49
50
|
lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)
|
|
50
51
|
|
|
@@ -54,7 +55,7 @@ def update_direct_lake_model_lakehouse_connection(
|
|
|
54
55
|
|
|
55
56
|
if len(dfI_filt) == 0:
|
|
56
57
|
print(
|
|
57
|
-
f"The '{lakehouse}' lakehouse does not exist within the '{lakehouse_workspace}' workspace. Therefore it cannot be used to support the '{dataset}' semantic model within the '{workspace}' workspace."
|
|
58
|
+
f"{icons.red_dot} The '{lakehouse}' lakehouse does not exist within the '{lakehouse_workspace}' workspace. Therefore it cannot be used to support the '{dataset}' semantic model within the '{workspace}' workspace."
|
|
58
59
|
)
|
|
59
60
|
|
|
60
61
|
dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
|
|
@@ -62,7 +63,7 @@ def update_direct_lake_model_lakehouse_connection(
|
|
|
62
63
|
|
|
63
64
|
if len(dfP_filt) == 0:
|
|
64
65
|
print(
|
|
65
|
-
f"The '{dataset}' semantic model is not in Direct Lake. This function is only applicable to Direct Lake semantic models."
|
|
66
|
+
f"{icons.yellow_dot} The '{dataset}' semantic model is not in Direct Lake. This function is only applicable to Direct Lake semantic models."
|
|
66
67
|
)
|
|
67
68
|
else:
|
|
68
69
|
with connect_semantic_model(
|
|
@@ -73,9 +74,7 @@ def update_direct_lake_model_lakehouse_connection(
|
|
|
73
74
|
try:
|
|
74
75
|
tom.model.Expressions["DatabaseQuery"].Expression = shEx
|
|
75
76
|
print(
|
|
76
|
-
f"The expression in the '{dataset}' semantic model has been updated to point to the '{lakehouse}' lakehouse in the '{lakehouse_workspace}' workspace."
|
|
77
|
-
)
|
|
78
|
-
except:
|
|
79
|
-
print(
|
|
80
|
-
f"ERROR: The expression in the '{dataset}' semantic model was not updated."
|
|
77
|
+
f"{icons.green_dot} The expression in the '{dataset}' semantic model has been updated to point to the '{lakehouse}' lakehouse in the '{lakehouse_workspace}' workspace."
|
|
81
78
|
)
|
|
79
|
+
except Exception as e:
|
|
80
|
+
raise ValueError(f"{icons.red_dot} The expression in the '{dataset}' semantic model was not updated.") from e
|
|
@@ -1,13 +1,17 @@
|
|
|
1
|
+
import sempy
|
|
1
2
|
import sempy.fabric as fabric
|
|
2
|
-
from sempy_labs.
|
|
3
|
+
from sempy_labs.tom import connect_semantic_model
|
|
4
|
+
from sempy_labs._helper_functions import resolve_lakehouse_name
|
|
3
5
|
from typing import List, Optional, Union
|
|
4
|
-
|
|
6
|
+
import sempy_labs._icons as icons
|
|
5
7
|
|
|
6
8
|
def update_direct_lake_partition_entity(
|
|
7
9
|
dataset: str,
|
|
8
10
|
table_name: Union[str, List[str]],
|
|
9
11
|
entity_name: Union[str, List[str]],
|
|
10
12
|
workspace: Optional[str] = None,
|
|
13
|
+
lakehouse: Optional[str] = None,
|
|
14
|
+
lakehouse_workspace: Optional[str] = None
|
|
11
15
|
):
|
|
12
16
|
"""
|
|
13
17
|
Remaps a table (or tables) in a Direct Lake semantic model to a table in a lakehouse.
|
|
@@ -24,10 +28,24 @@ def update_direct_lake_partition_entity(
|
|
|
24
28
|
The Fabric workspace name in which the semantic model exists.
|
|
25
29
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
26
30
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
31
|
+
lakehouse : str, default=None
|
|
32
|
+
The Fabric lakehouse used by the Direct Lake semantic model.
|
|
33
|
+
Defaults to None which resolves to the lakehouse attached to the notebook.
|
|
34
|
+
lakehouse_workspace : str, default=None
|
|
35
|
+
The Fabric workspace used by the lakehouse.
|
|
36
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
37
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
27
38
|
"""
|
|
28
39
|
|
|
29
40
|
workspace = fabric.resolve_workspace_name(workspace)
|
|
30
41
|
|
|
42
|
+
if lakehouse_workspace is None:
|
|
43
|
+
lakehouse_workspace = workspace
|
|
44
|
+
|
|
45
|
+
if lakehouse is None:
|
|
46
|
+
lakehouse_id = fabric.get_lakehouse_id()
|
|
47
|
+
lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)
|
|
48
|
+
|
|
31
49
|
# Support both str & list types
|
|
32
50
|
if isinstance(table_name, str):
|
|
33
51
|
table_name = [table_name]
|
|
@@ -36,7 +54,7 @@ def update_direct_lake_partition_entity(
|
|
|
36
54
|
|
|
37
55
|
if len(table_name) != len(entity_name):
|
|
38
56
|
print(
|
|
39
|
-
f"
|
|
57
|
+
f"{icons.red_dot} The 'table_name' and 'entity_name' arrays must be of equal length."
|
|
40
58
|
)
|
|
41
59
|
return
|
|
42
60
|
|
|
@@ -46,7 +64,7 @@ def update_direct_lake_partition_entity(
|
|
|
46
64
|
|
|
47
65
|
if not tom.is_direct_lake():
|
|
48
66
|
print(
|
|
49
|
-
f"The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode."
|
|
67
|
+
f"{icons.yellow_dot} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode."
|
|
50
68
|
)
|
|
51
69
|
return
|
|
52
70
|
|
|
@@ -56,9 +74,7 @@ def update_direct_lake_partition_entity(
|
|
|
56
74
|
try:
|
|
57
75
|
tom.model.Tables[tName].Partitions[0].EntityName = eName
|
|
58
76
|
print(
|
|
59
|
-
f"The '{tName}' table in the '{dataset}' semantic model has been updated to point to the '{eName}' table in the '{lakehouse}' lakehouse within the '{lakehouse_workspace}' workspace."
|
|
60
|
-
)
|
|
61
|
-
except:
|
|
62
|
-
print(
|
|
63
|
-
f"ERROR: The '{tName}' table in the '{dataset}' semantic model has not been updated."
|
|
77
|
+
f"{icons.green_dot} The '{tName}' table in the '{dataset}' semantic model has been updated to point to the '{eName}' table in the '{lakehouse}' lakehouse within the '{lakehouse_workspace}' workspace."
|
|
64
78
|
)
|
|
79
|
+
except Exception as e:
|
|
80
|
+
raise ValueError(f"{icons.red_dot} The '{tName}' table in the '{dataset}' semantic model has not been updated.") from e
|
|
@@ -131,7 +131,7 @@ def warm_direct_lake_cache_perspective(
|
|
|
131
131
|
bar.set_description(f"Warming the '{tableName}' table...")
|
|
132
132
|
css = ",".join(map(str, filtered_list))
|
|
133
133
|
dax = """EVALUATE TOPN(1,SUMMARIZECOLUMNS(""" + css + "))" ""
|
|
134
|
-
|
|
134
|
+
fabric.evaluate_dax(dataset=dataset, dax_string=dax, workspace=workspace)
|
|
135
135
|
|
|
136
136
|
print(f"{icons.green_dot} The following columns have been put into memory:")
|
|
137
137
|
|
|
@@ -168,10 +168,9 @@ def warm_direct_lake_cache_isresident(
|
|
|
168
168
|
|
|
169
169
|
dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
|
|
170
170
|
if not any(r["Mode"] == "DirectLake" for i, r in dfP.iterrows()):
|
|
171
|
-
|
|
172
|
-
f"The '{dataset}' semantic model in the '{workspace}' workspace is not in Direct Lake mode. This function is specifically for semantic models in Direct Lake mode."
|
|
171
|
+
raise ValueError(
|
|
172
|
+
f"{icons.red_dot} The '{dataset}' semantic model in the '{workspace}' workspace is not in Direct Lake mode. This function is specifically for semantic models in Direct Lake mode."
|
|
173
173
|
)
|
|
174
|
-
return
|
|
175
174
|
|
|
176
175
|
# Identify columns which are currently in memory (Is Resident = True)
|
|
177
176
|
dfC = fabric.list_columns(dataset=dataset, workspace=workspace, extended=True)
|
|
@@ -181,10 +180,9 @@ def warm_direct_lake_cache_isresident(
|
|
|
181
180
|
dfC_filtered = dfC[dfC["Is Resident"]]
|
|
182
181
|
|
|
183
182
|
if len(dfC_filtered) == 0:
|
|
184
|
-
|
|
183
|
+
raise ValueError(
|
|
185
184
|
f"{icons.yellow_dot} At present, no columns are in memory in the '{dataset}' semantic model in the '{workspace}' workspace."
|
|
186
185
|
)
|
|
187
|
-
return
|
|
188
186
|
|
|
189
187
|
# Refresh/frame dataset
|
|
190
188
|
refresh_semantic_model(dataset=dataset, refresh_type="full", workspace=workspace)
|
|
@@ -199,7 +197,7 @@ def warm_direct_lake_cache_isresident(
|
|
|
199
197
|
bar.set_description(f"Warming the '{tableName}' table...")
|
|
200
198
|
css = ",".join(map(str, column_values))
|
|
201
199
|
dax = """EVALUATE TOPN(1,SUMMARIZECOLUMNS(""" + css + "))" ""
|
|
202
|
-
|
|
200
|
+
fabric.evaluate_dax(dataset=dataset, dax_string=dax, workspace=workspace)
|
|
203
201
|
|
|
204
202
|
print(
|
|
205
203
|
f"{icons.green_dot} The following columns have been put into memory. Temperature indicates the column temperature prior to the semantic model refresh."
|
sempy_labs/lakehouse/__init__.py
CHANGED
|
@@ -6,7 +6,6 @@ from sempy_labs.lakehouse._lakehouse import (
|
|
|
6
6
|
)
|
|
7
7
|
|
|
8
8
|
from sempy_labs.lakehouse._shortcuts import (
|
|
9
|
-
list_shortcuts,
|
|
10
9
|
# create_shortcut,
|
|
11
10
|
create_shortcut_onelake,
|
|
12
11
|
delete_shortcut,
|
|
@@ -17,7 +16,6 @@ __all__ = [
|
|
|
17
16
|
"get_lakehouse_tables",
|
|
18
17
|
"lakehouse_attached",
|
|
19
18
|
"optimize_lakehouse_tables",
|
|
20
|
-
"list_shortcuts",
|
|
21
19
|
# create_shortcut,
|
|
22
20
|
"create_shortcut_onelake",
|
|
23
21
|
"delete_shortcut",
|
|
@@ -8,8 +8,9 @@ from sempy_labs._helper_functions import (
|
|
|
8
8
|
resolve_lakehouse_id,
|
|
9
9
|
)
|
|
10
10
|
from typing import Optional
|
|
11
|
+
from sempy._utils._log import log
|
|
11
12
|
|
|
12
|
-
|
|
13
|
+
@log
|
|
13
14
|
def get_lakehouse_columns(
|
|
14
15
|
lakehouse: Optional[str] = None, workspace: Optional[str] = None
|
|
15
16
|
):
|
|
@@ -47,7 +48,7 @@ def get_lakehouse_columns(
|
|
|
47
48
|
|
|
48
49
|
workspace = fabric.resolve_workspace_name(workspace)
|
|
49
50
|
|
|
50
|
-
if lakehouse
|
|
51
|
+
if lakehouse is None:
|
|
51
52
|
lakehouse_id = fabric.get_lakehouse_id()
|
|
52
53
|
lakehouse = resolve_lakehouse_name(lakehouse_id, workspace)
|
|
53
54
|
else:
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import sempy
|
|
1
2
|
import sempy.fabric as fabric
|
|
2
3
|
import pandas as pd
|
|
3
4
|
from pyspark.sql import SparkSession
|
|
@@ -14,8 +15,10 @@ from sempy_labs.directlake._guardrails import (
|
|
|
14
15
|
)
|
|
15
16
|
from sempy_labs.lakehouse._lakehouse import lakehouse_attached
|
|
16
17
|
from typing import Optional
|
|
18
|
+
import sempy_labs._icons as icons
|
|
19
|
+
from sempy._utils._log import log
|
|
17
20
|
|
|
18
|
-
|
|
21
|
+
@log
|
|
19
22
|
def get_lakehouse_tables(
|
|
20
23
|
lakehouse: Optional[str] = None,
|
|
21
24
|
workspace: Optional[str] = None,
|
|
@@ -61,7 +64,7 @@ def get_lakehouse_tables(
|
|
|
61
64
|
|
|
62
65
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
63
66
|
|
|
64
|
-
if lakehouse
|
|
67
|
+
if lakehouse is None:
|
|
65
68
|
lakehouse_id = fabric.get_lakehouse_id()
|
|
66
69
|
lakehouse = resolve_lakehouse_name(lakehouse_id, workspace)
|
|
67
70
|
else:
|
|
@@ -80,7 +83,7 @@ def get_lakehouse_tables(
|
|
|
80
83
|
tType = i["type"]
|
|
81
84
|
tFormat = i["format"]
|
|
82
85
|
tLocation = i["location"]
|
|
83
|
-
if extended
|
|
86
|
+
if not extended:
|
|
84
87
|
new_data = {
|
|
85
88
|
"Workspace Name": workspace,
|
|
86
89
|
"Lakehouse Name": lakehouse,
|
|
@@ -170,9 +173,9 @@ def get_lakehouse_tables(
|
|
|
170
173
|
|
|
171
174
|
if export:
|
|
172
175
|
lakeAttach = lakehouse_attached()
|
|
173
|
-
if lakeAttach
|
|
176
|
+
if lakeAttach is False:
|
|
174
177
|
print(
|
|
175
|
-
f"In order to save the report.json file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
|
|
178
|
+
f"{icons.red_dot} In order to save the report.json file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
|
|
176
179
|
)
|
|
177
180
|
return
|
|
178
181
|
spark = SparkSession.builder.getOrCreate()
|
|
@@ -234,7 +237,7 @@ def get_lakehouse_tables(
|
|
|
234
237
|
export_df[c] = export_df[c].astype(bool)
|
|
235
238
|
|
|
236
239
|
print(
|
|
237
|
-
f"Saving Lakehouse table properties to the '{lakeTName}' table in the lakehouse...\n"
|
|
240
|
+
f"{icons.in_progress} Saving Lakehouse table properties to the '{lakeTName}' table in the lakehouse...\n"
|
|
238
241
|
)
|
|
239
242
|
now = datetime.datetime.now()
|
|
240
243
|
export_df["Timestamp"] = now
|
|
@@ -244,7 +247,7 @@ def get_lakehouse_tables(
|
|
|
244
247
|
spark_df = spark.createDataFrame(export_df)
|
|
245
248
|
spark_df.write.mode("append").format("delta").saveAsTable(lakeTName)
|
|
246
249
|
print(
|
|
247
|
-
f"
|
|
250
|
+
f"{icons.bullet} Lakehouse table properties have been saved to the '{lakeTName}' delta table."
|
|
248
251
|
)
|
|
249
252
|
|
|
250
253
|
return df
|
|
@@ -4,7 +4,8 @@ from tqdm.auto import tqdm
|
|
|
4
4
|
from pyspark.sql import SparkSession
|
|
5
5
|
from sempy_labs._helper_functions import resolve_lakehouse_name
|
|
6
6
|
from typing import List, Optional, Union
|
|
7
|
-
|
|
7
|
+
import sempy_labs._icons as icons
|
|
8
|
+
from sempy._utils._log import log
|
|
8
9
|
|
|
9
10
|
def lakehouse_attached() -> bool:
|
|
10
11
|
"""
|
|
@@ -24,7 +25,7 @@ def lakehouse_attached() -> bool:
|
|
|
24
25
|
else:
|
|
25
26
|
return False
|
|
26
27
|
|
|
27
|
-
|
|
28
|
+
@log
|
|
28
29
|
def optimize_lakehouse_tables(
|
|
29
30
|
tables: Optional[Union[str, List[str]]] = None,
|
|
30
31
|
lakehouse: Optional[str] = None,
|
|
@@ -49,11 +50,11 @@ def optimize_lakehouse_tables(
|
|
|
49
50
|
from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
|
|
50
51
|
from delta import DeltaTable
|
|
51
52
|
|
|
52
|
-
if workspace
|
|
53
|
+
if workspace is None:
|
|
53
54
|
workspace_id = fabric.get_workspace_id()
|
|
54
55
|
workspace = fabric.resolve_workspace_name(workspace_id)
|
|
55
56
|
|
|
56
|
-
if lakehouse
|
|
57
|
+
if lakehouse is None:
|
|
57
58
|
lakehouse_id = fabric.get_lakehouse_id()
|
|
58
59
|
lakehouse = resolve_lakehouse_name(lakehouse_id, workspace)
|
|
59
60
|
|
|
@@ -80,6 +81,6 @@ def optimize_lakehouse_tables(
|
|
|
80
81
|
deltaTable = DeltaTable.forPath(spark, tablePath)
|
|
81
82
|
deltaTable.optimize().executeCompaction()
|
|
82
83
|
print(
|
|
83
|
-
f"The '{tableName}' table has been optimized. ({str(i)}/{str(tableCount)})"
|
|
84
|
+
f"{icons.green_dot} The '{tableName}' table has been optimized. ({str(i)}/{str(tableCount)})"
|
|
84
85
|
)
|
|
85
86
|
i += 1
|
|
@@ -1,12 +1,11 @@
|
|
|
1
1
|
import sempy
|
|
2
2
|
import sempy.fabric as fabric
|
|
3
|
-
import pandas as pd
|
|
4
3
|
from sempy_labs._helper_functions import (
|
|
5
4
|
resolve_lakehouse_name,
|
|
6
5
|
resolve_lakehouse_id,
|
|
7
6
|
resolve_workspace_name_and_id,
|
|
8
7
|
)
|
|
9
|
-
from typing import
|
|
8
|
+
from typing import Optional
|
|
10
9
|
import sempy_labs._icons as icons
|
|
11
10
|
|
|
12
11
|
|
|
@@ -42,7 +41,7 @@ def create_shortcut_onelake(
|
|
|
42
41
|
sourceWorkspaceId = fabric.resolve_workspace_id(source_workspace)
|
|
43
42
|
sourceLakehouseId = resolve_lakehouse_id(source_lakehouse, source_workspace)
|
|
44
43
|
|
|
45
|
-
if destination_workspace
|
|
44
|
+
if destination_workspace is None:
|
|
46
45
|
destination_workspace = source_workspace
|
|
47
46
|
|
|
48
47
|
destinationWorkspaceId = fabric.resolve_workspace_id(destination_workspace)
|
|
@@ -50,7 +49,7 @@ def create_shortcut_onelake(
|
|
|
50
49
|
destination_lakehouse, destination_workspace
|
|
51
50
|
)
|
|
52
51
|
|
|
53
|
-
if shortcut_name
|
|
52
|
+
if shortcut_name is None:
|
|
54
53
|
shortcut_name = table_name
|
|
55
54
|
|
|
56
55
|
client = fabric.FabricRestClient()
|
|
@@ -80,9 +79,7 @@ def create_shortcut_onelake(
|
|
|
80
79
|
else:
|
|
81
80
|
print(response.status_code)
|
|
82
81
|
except Exception as e:
|
|
83
|
-
|
|
84
|
-
f"{icons.red_dot} Failed to create a shortcut for the '{table_name}' table: {e}"
|
|
85
|
-
)
|
|
82
|
+
raise ValueError(f"{icons.red_dot} Failed to create a shortcut for the '{table_name}' table.") from e
|
|
86
83
|
|
|
87
84
|
|
|
88
85
|
def create_shortcut(
|
|
@@ -126,7 +123,7 @@ def create_shortcut(
|
|
|
126
123
|
|
|
127
124
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
128
125
|
|
|
129
|
-
if lakehouse
|
|
126
|
+
if lakehouse is None:
|
|
130
127
|
lakehouse_id = fabric.get_lakehouse_id()
|
|
131
128
|
else:
|
|
132
129
|
lakehouse_id = resolve_lakehouse_id(lakehouse, workspace)
|
|
@@ -157,103 +154,8 @@ def create_shortcut(
|
|
|
157
154
|
)
|
|
158
155
|
else:
|
|
159
156
|
print(response.status_code)
|
|
160
|
-
except:
|
|
161
|
-
|
|
162
|
-
f"{icons.red_dot} Failed to create a shortcut for the '{shortcut_name}' table."
|
|
163
|
-
)
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
def list_shortcuts(
|
|
167
|
-
lakehouse: Optional[str] = None, workspace: Optional[str] = None
|
|
168
|
-
) -> pd.DataFrame:
|
|
169
|
-
"""
|
|
170
|
-
Shows all shortcuts which exist in a Fabric lakehouse.
|
|
171
|
-
|
|
172
|
-
Parameters
|
|
173
|
-
----------
|
|
174
|
-
lakehouse : str, default=None
|
|
175
|
-
The Fabric lakehouse name.
|
|
176
|
-
Defaults to None which resolves to the lakehouse attached to the notebook.
|
|
177
|
-
workspace : str, default=None
|
|
178
|
-
The name of the Fabric workspace in which lakehouse resides.
|
|
179
|
-
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
180
|
-
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
181
|
-
|
|
182
|
-
Returns
|
|
183
|
-
-------
|
|
184
|
-
pandas.DataFrame
|
|
185
|
-
A pandas dataframe showing all the shortcuts which exist in the specified lakehouse.
|
|
186
|
-
"""
|
|
187
|
-
|
|
188
|
-
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
189
|
-
|
|
190
|
-
if lakehouse == None:
|
|
191
|
-
lakehouse_id = fabric.get_lakehouse_id()
|
|
192
|
-
lakehouse = resolve_lakehouse_name(lakehouse_id, workspace)
|
|
193
|
-
else:
|
|
194
|
-
lakehouse_id = resolve_lakehouse_id(lakehouse, workspace)
|
|
195
|
-
|
|
196
|
-
df = pd.DataFrame(
|
|
197
|
-
columns=[
|
|
198
|
-
"Shortcut Name",
|
|
199
|
-
"Shortcut Path",
|
|
200
|
-
"Source",
|
|
201
|
-
"Source Lakehouse Name",
|
|
202
|
-
"Source Workspace Name",
|
|
203
|
-
"Source Path",
|
|
204
|
-
"Source Connection ID",
|
|
205
|
-
"Source Location",
|
|
206
|
-
"Source SubPath",
|
|
207
|
-
]
|
|
208
|
-
)
|
|
209
|
-
|
|
210
|
-
client = fabric.FabricRestClient()
|
|
211
|
-
response = client.get(
|
|
212
|
-
f"/v1/workspaces/{workspace_id}/items/{lakehouse_id}/shortcuts"
|
|
213
|
-
)
|
|
214
|
-
if response.status_code == 200:
|
|
215
|
-
for s in response.json()["value"]:
|
|
216
|
-
shortcutName = s["name"]
|
|
217
|
-
shortcutPath = s["path"]
|
|
218
|
-
source = list(s["target"].keys())[0]
|
|
219
|
-
(
|
|
220
|
-
sourceLakehouseName,
|
|
221
|
-
sourceWorkspaceName,
|
|
222
|
-
sourcePath,
|
|
223
|
-
connectionId,
|
|
224
|
-
location,
|
|
225
|
-
subpath,
|
|
226
|
-
) = (None, None, None, None, None, None)
|
|
227
|
-
if source == "oneLake":
|
|
228
|
-
sourceLakehouseId = s["target"][source]["itemId"]
|
|
229
|
-
sourcePath = s["target"][source]["path"]
|
|
230
|
-
sourceWorkspaceId = s["target"][source]["workspaceId"]
|
|
231
|
-
sourceWorkspaceName = fabric.resolve_workspace_name(sourceWorkspaceId)
|
|
232
|
-
sourceLakehouseName = resolve_lakehouse_name(
|
|
233
|
-
sourceLakehouseId, sourceWorkspaceName
|
|
234
|
-
)
|
|
235
|
-
else:
|
|
236
|
-
connectionId = s["target"][source]["connectionId"]
|
|
237
|
-
location = s["target"][source]["location"]
|
|
238
|
-
subpath = s["target"][source]["subpath"]
|
|
239
|
-
|
|
240
|
-
new_data = {
|
|
241
|
-
"Shortcut Name": shortcutName,
|
|
242
|
-
"Shortcut Path": shortcutPath,
|
|
243
|
-
"Source": source,
|
|
244
|
-
"Source Lakehouse Name": sourceLakehouseName,
|
|
245
|
-
"Source Workspace Name": sourceWorkspaceName,
|
|
246
|
-
"Source Path": sourcePath,
|
|
247
|
-
"Source Connection ID": connectionId,
|
|
248
|
-
"Source Location": location,
|
|
249
|
-
"Source SubPath": subpath,
|
|
250
|
-
}
|
|
251
|
-
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
252
|
-
|
|
253
|
-
print(
|
|
254
|
-
f"This function relies on an API which is not yet official as of May 21, 2024. Once the API becomes official this function will work as expected."
|
|
255
|
-
)
|
|
256
|
-
return df
|
|
157
|
+
except Exception as e:
|
|
158
|
+
raise ValueError(f"{icons.red_dot} Failed to create a shortcut for the '{shortcut_name}' table.") from e
|
|
257
159
|
|
|
258
160
|
|
|
259
161
|
def delete_shortcut(
|
|
@@ -277,7 +179,7 @@ def delete_shortcut(
|
|
|
277
179
|
|
|
278
180
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
279
181
|
|
|
280
|
-
if lakehouse
|
|
182
|
+
if lakehouse is None:
|
|
281
183
|
lakehouse_id = fabric.get_lakehouse_id()
|
|
282
184
|
lakehouse = resolve_lakehouse_name(lakehouse_id, workspace)
|
|
283
185
|
else:
|
sempy_labs/migration/__init__.py
CHANGED
|
@@ -14,7 +14,9 @@ from sempy_labs.migration._migrate_tables_columns_to_semantic_model import (
|
|
|
14
14
|
)
|
|
15
15
|
from sempy_labs.migration._migration_validation import (
|
|
16
16
|
migration_validation,
|
|
17
|
-
|
|
17
|
+
)
|
|
18
|
+
from sempy_labs.migration._refresh_calc_tables import (
|
|
19
|
+
refresh_calc_tables,
|
|
18
20
|
)
|
|
19
21
|
|
|
20
22
|
__all__ = [
|
|
@@ -25,5 +27,5 @@ __all__ = [
|
|
|
25
27
|
"migrate_model_objects_to_semantic_model",
|
|
26
28
|
"migrate_tables_columns_to_semantic_model",
|
|
27
29
|
"migration_validation",
|
|
28
|
-
|
|
30
|
+
"refresh_calc_tables"
|
|
29
31
|
]
|
|
@@ -34,13 +34,13 @@ def create_pqt_file(
|
|
|
34
34
|
|
|
35
35
|
lakeAttach = lakehouse_attached()
|
|
36
36
|
|
|
37
|
-
if lakeAttach
|
|
37
|
+
if lakeAttach is False:
|
|
38
38
|
print(
|
|
39
39
|
f"{icons.red_dot} In order to run the 'create_pqt_file' function, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
|
|
40
40
|
)
|
|
41
41
|
return
|
|
42
42
|
|
|
43
|
-
if workspace
|
|
43
|
+
if workspace is None:
|
|
44
44
|
workspace_id = fabric.get_workspace_id()
|
|
45
45
|
workspace = fabric.resolve_workspace_name(workspace_id)
|
|
46
46
|
|
|
@@ -8,7 +8,7 @@ from sempy_labs._helper_functions import (
|
|
|
8
8
|
resolve_lakehouse_id,
|
|
9
9
|
create_abfss_path,
|
|
10
10
|
)
|
|
11
|
-
from sempy_labs.
|
|
11
|
+
from sempy_labs.tom import connect_semantic_model
|
|
12
12
|
from pyspark.sql import SparkSession
|
|
13
13
|
from typing import List, Optional, Union
|
|
14
14
|
from sempy._utils._log import log
|
|
@@ -52,16 +52,16 @@ def migrate_calc_tables_to_lakehouse(
|
|
|
52
52
|
|
|
53
53
|
workspace = fabric.resolve_workspace_name(workspace)
|
|
54
54
|
|
|
55
|
-
if new_dataset_workspace
|
|
55
|
+
if new_dataset_workspace is None:
|
|
56
56
|
new_dataset_workspace = workspace
|
|
57
57
|
|
|
58
|
-
if lakehouse_workspace
|
|
58
|
+
if lakehouse_workspace is None:
|
|
59
59
|
lakehouse_workspace = new_dataset_workspace
|
|
60
60
|
lakehouse_workspace_id = fabric.resolve_workspace_id(lakehouse_workspace)
|
|
61
61
|
else:
|
|
62
62
|
lakehouse_workspace_id = fabric.resolve_workspace_id(lakehouse_workspace)
|
|
63
63
|
|
|
64
|
-
if lakehouse
|
|
64
|
+
if lakehouse is None:
|
|
65
65
|
lakehouse_id = fabric.get_lakehouse_id()
|
|
66
66
|
lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)
|
|
67
67
|
else:
|
|
@@ -288,16 +288,16 @@ def migrate_field_parameters(
|
|
|
288
288
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
289
289
|
"""
|
|
290
290
|
|
|
291
|
-
from
|
|
291
|
+
from sempy_labs import format_dax_object_name
|
|
292
292
|
|
|
293
293
|
sempy.fabric._client._utils._init_analysis_services()
|
|
294
294
|
import Microsoft.AnalysisServices.Tabular as TOM
|
|
295
295
|
|
|
296
|
-
if workspace
|
|
296
|
+
if workspace is None:
|
|
297
297
|
workspace_id = fabric.get_workspace_id()
|
|
298
298
|
workspace = fabric.resolve_workspace_name(workspace_id)
|
|
299
299
|
|
|
300
|
-
if new_dataset_workspace
|
|
300
|
+
if new_dataset_workspace is None:
|
|
301
301
|
new_dataset_workspace = workspace
|
|
302
302
|
|
|
303
303
|
dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
|
|
@@ -3,7 +3,7 @@ import sempy.fabric as fabric
|
|
|
3
3
|
import re, datetime, time
|
|
4
4
|
from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
|
|
5
5
|
from sempy_labs._helper_functions import resolve_lakehouse_name
|
|
6
|
-
from sempy_labs.
|
|
6
|
+
from sempy_labs.tom import connect_semantic_model
|
|
7
7
|
from typing import Optional
|
|
8
8
|
from sempy._utils._log import log
|
|
9
9
|
import sempy_labs._icons as icons
|
|
@@ -46,12 +46,12 @@ def migrate_calc_tables_to_semantic_model(
|
|
|
46
46
|
|
|
47
47
|
workspace = fabric.resolve_workspace_name(workspace)
|
|
48
48
|
|
|
49
|
-
if new_dataset_workspace
|
|
49
|
+
if new_dataset_workspace is None:
|
|
50
50
|
new_dataset_workspace = workspace
|
|
51
51
|
|
|
52
|
-
if lakehouse_workspace
|
|
52
|
+
if lakehouse_workspace is None:
|
|
53
53
|
lakehouse_workspace = new_dataset_workspace
|
|
54
|
-
if lakehouse
|
|
54
|
+
if lakehouse is None:
|
|
55
55
|
lakehouse_id = fabric.get_lakehouse_id()
|
|
56
56
|
lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)
|
|
57
57
|
|
|
@@ -3,12 +3,11 @@ import sempy.fabric as fabric
|
|
|
3
3
|
import re, datetime, time
|
|
4
4
|
from sempy_labs._list_functions import list_tables
|
|
5
5
|
from sempy_labs._helper_functions import create_relationship_name
|
|
6
|
-
from sempy_labs.
|
|
6
|
+
from sempy_labs.tom import connect_semantic_model
|
|
7
7
|
from typing import Optional
|
|
8
8
|
from sempy._utils._log import log
|
|
9
9
|
import sempy_labs._icons as icons
|
|
10
10
|
|
|
11
|
-
|
|
12
11
|
@log
|
|
13
12
|
def migrate_model_objects_to_semantic_model(
|
|
14
13
|
dataset: str,
|
|
@@ -39,13 +38,13 @@ def migrate_model_objects_to_semantic_model(
|
|
|
39
38
|
import Microsoft.AnalysisServices.Tabular as TOM
|
|
40
39
|
import System
|
|
41
40
|
|
|
42
|
-
if workspace
|
|
41
|
+
if workspace is None:
|
|
43
42
|
workspace_id = fabric.get_workspace_id()
|
|
44
43
|
workspace = fabric.resolve_workspace_name(workspace_id)
|
|
45
44
|
else:
|
|
46
|
-
|
|
45
|
+
workspace_id = fabric.resolve_workspace_id(workspace)
|
|
47
46
|
|
|
48
|
-
if new_dataset_workspace
|
|
47
|
+
if new_dataset_workspace is None:
|
|
49
48
|
new_dataset_workspace = workspace
|
|
50
49
|
|
|
51
50
|
dfT = list_tables(dataset, workspace)
|
|
@@ -239,7 +238,7 @@ def migrate_model_objects_to_semantic_model(
|
|
|
239
238
|
f"\n{icons.in_progress} Updating calculation group column name..."
|
|
240
239
|
)
|
|
241
240
|
dfC_filt = dfC[
|
|
242
|
-
(dfC["Table Name"] == cgName) & (dfC["Hidden"]
|
|
241
|
+
(dfC["Table Name"] == cgName) & (dfC["Hidden"] is False)
|
|
243
242
|
]
|
|
244
243
|
colName = dfC_filt["Column Name"].iloc[0]
|
|
245
244
|
tom.model.Tables[cgName].Columns["Name"].Name = colName
|