semantic-link-labs 0.5.0__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- semantic_link_labs-0.7.0.dist-info/METADATA +148 -0
- semantic_link_labs-0.7.0.dist-info/RECORD +111 -0
- {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.7.0.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +45 -15
- sempy_labs/_ai.py +42 -85
- sempy_labs/_bpa_translation/_translations_am-ET.po +828 -0
- sempy_labs/_bpa_translation/_translations_ar-AE.po +860 -0
- sempy_labs/_bpa_translation/_translations_cs-CZ.po +894 -0
- sempy_labs/_bpa_translation/_translations_da-DK.po +894 -0
- sempy_labs/_bpa_translation/_translations_de-DE.po +933 -0
- sempy_labs/_bpa_translation/_translations_el-GR.po +936 -0
- sempy_labs/_bpa_translation/_translations_es-ES.po +915 -0
- sempy_labs/_bpa_translation/_translations_fa-IR.po +883 -0
- sempy_labs/_bpa_translation/_translations_fr-FR.po +938 -0
- sempy_labs/_bpa_translation/_translations_ga-IE.po +912 -0
- sempy_labs/_bpa_translation/_translations_he-IL.po +855 -0
- sempy_labs/_bpa_translation/_translations_hi-IN.po +892 -0
- sempy_labs/_bpa_translation/_translations_hu-HU.po +910 -0
- sempy_labs/_bpa_translation/_translations_is-IS.po +887 -0
- sempy_labs/_bpa_translation/_translations_it-IT.po +931 -0
- sempy_labs/_bpa_translation/_translations_ja-JP.po +805 -0
- sempy_labs/_bpa_translation/_translations_nl-NL.po +924 -0
- sempy_labs/_bpa_translation/_translations_pl-PL.po +913 -0
- sempy_labs/_bpa_translation/_translations_pt-BR.po +909 -0
- sempy_labs/_bpa_translation/_translations_pt-PT.po +904 -0
- sempy_labs/_bpa_translation/_translations_ru-RU.po +909 -0
- sempy_labs/_bpa_translation/_translations_ta-IN.po +922 -0
- sempy_labs/_bpa_translation/_translations_te-IN.po +896 -0
- sempy_labs/_bpa_translation/_translations_th-TH.po +873 -0
- sempy_labs/_bpa_translation/_translations_zh-CN.po +767 -0
- sempy_labs/_bpa_translation/_translations_zu-ZA.po +916 -0
- sempy_labs/_clear_cache.py +12 -8
- sempy_labs/_connections.py +77 -70
- sempy_labs/_dax.py +7 -9
- sempy_labs/_generate_semantic_model.py +75 -90
- sempy_labs/_helper_functions.py +371 -20
- sempy_labs/_icons.py +23 -0
- sempy_labs/_list_functions.py +855 -427
- sempy_labs/_model_auto_build.py +4 -3
- sempy_labs/_model_bpa.py +307 -1118
- sempy_labs/_model_bpa_bulk.py +363 -0
- sempy_labs/_model_bpa_rules.py +831 -0
- sempy_labs/_model_dependencies.py +20 -16
- sempy_labs/_one_lake_integration.py +18 -12
- sempy_labs/_query_scale_out.py +116 -129
- sempy_labs/_refresh_semantic_model.py +23 -10
- sempy_labs/_translations.py +367 -288
- sempy_labs/_vertipaq.py +152 -123
- sempy_labs/directlake/__init__.py +7 -1
- sempy_labs/directlake/_directlake_schema_compare.py +33 -30
- sempy_labs/directlake/_directlake_schema_sync.py +60 -77
- sempy_labs/directlake/_dl_helper.py +233 -0
- sempy_labs/directlake/_get_directlake_lakehouse.py +7 -8
- sempy_labs/directlake/_get_shared_expression.py +5 -3
- sempy_labs/directlake/_guardrails.py +20 -16
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +17 -10
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +3 -2
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +10 -5
- sempy_labs/directlake/_update_directlake_partition_entity.py +169 -22
- sempy_labs/directlake/_warm_cache.py +7 -4
- sempy_labs/lakehouse/_get_lakehouse_columns.py +1 -1
- sempy_labs/lakehouse/_get_lakehouse_tables.py +65 -71
- sempy_labs/lakehouse/_lakehouse.py +5 -3
- sempy_labs/lakehouse/_shortcuts.py +20 -13
- sempy_labs/migration/__init__.py +1 -1
- sempy_labs/migration/_create_pqt_file.py +184 -186
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +240 -269
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +78 -77
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +444 -425
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +96 -102
- sempy_labs/migration/_migration_validation.py +2 -2
- sempy_labs/migration/_refresh_calc_tables.py +94 -100
- sempy_labs/report/_BPAReportTemplate.json +232 -0
- sempy_labs/report/__init__.py +6 -2
- sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
- sempy_labs/report/_bpareporttemplate/.platform +11 -0
- sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
- sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
- sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
- sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
- sempy_labs/report/_generate_report.py +260 -139
- sempy_labs/report/_report_functions.py +90 -59
- sempy_labs/report/_report_rebind.py +40 -34
- sempy_labs/tom/__init__.py +1 -4
- sempy_labs/tom/_model.py +601 -181
- semantic_link_labs-0.5.0.dist-info/METADATA +0 -22
- semantic_link_labs-0.5.0.dist-info/RECORD +0 -53
- sempy_labs/directlake/_fallback.py +0 -58
- {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.7.0.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.7.0.dist-info}/top_level.txt +0 -0
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import sempy
|
|
2
2
|
import sempy.fabric as fabric
|
|
3
|
-
import json
|
|
4
|
-
import
|
|
5
|
-
|
|
3
|
+
import json
|
|
4
|
+
import os
|
|
5
|
+
import shutil
|
|
6
6
|
from sempy_labs.lakehouse._lakehouse import lakehouse_attached
|
|
7
7
|
from sempy._utils._log import log
|
|
8
8
|
from typing import Optional
|
|
@@ -11,10 +11,16 @@ import sempy_labs._icons as icons
|
|
|
11
11
|
|
|
12
12
|
@log
|
|
13
13
|
def create_pqt_file(
|
|
14
|
-
dataset: str,
|
|
14
|
+
dataset: str,
|
|
15
|
+
workspace: Optional[str] = None,
|
|
16
|
+
file_name: Optional[str] = "PowerQueryTemplate",
|
|
15
17
|
):
|
|
16
18
|
"""
|
|
17
|
-
Dynamically generates a `Power Query Template <https://learn.microsoft.com/power-query/power-query-template>`_ file based on the semantic model. The .pqt file is
|
|
19
|
+
Dynamically generates a `Power Query Template <https://learn.microsoft.com/power-query/power-query-template>`_ file based on the semantic model. The .pqt file is
|
|
20
|
+
saved within the Files section of your lakehouse.
|
|
21
|
+
|
|
22
|
+
Dataflows Gen2 has a `limit of 50 tables <https://learn.microsoft.com/power-query/power-query-online-limits>`_. If there are more than 50 tables, this will save multiple Power Query Template
|
|
23
|
+
files (with each file having a max of 50 tables).
|
|
18
24
|
|
|
19
25
|
Parameters
|
|
20
26
|
----------
|
|
@@ -28,10 +34,14 @@ def create_pqt_file(
|
|
|
28
34
|
The name of the Power Query Template file to be generated.
|
|
29
35
|
"""
|
|
30
36
|
|
|
31
|
-
|
|
37
|
+
sempy.fabric._client._utils._init_analysis_services()
|
|
38
|
+
import Microsoft.AnalysisServices.Tabular as TOM
|
|
39
|
+
from sempy_labs.tom import connect_semantic_model
|
|
32
40
|
|
|
33
|
-
if
|
|
34
|
-
raise ValueError(
|
|
41
|
+
if not lakehouse_attached():
|
|
42
|
+
raise ValueError(
|
|
43
|
+
f"{icons.red_dot} In order to run the 'create_pqt_file' function, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
|
|
44
|
+
)
|
|
35
45
|
|
|
36
46
|
workspace = fabric.resolve_workspace_name(workspace)
|
|
37
47
|
|
|
@@ -39,192 +49,180 @@ def create_pqt_file(
|
|
|
39
49
|
subFolderPath = os.path.join(folderPath, "pqtnewfolder")
|
|
40
50
|
os.makedirs(subFolderPath, exist_ok=True)
|
|
41
51
|
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
(
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
52
|
+
with connect_semantic_model(
|
|
53
|
+
dataset=dataset, workspace=workspace, readonly=True
|
|
54
|
+
) as tom:
|
|
55
|
+
if not any(
|
|
56
|
+
p.SourceType == TOM.PartitionSourceType.M for p in tom.all_partitions()
|
|
57
|
+
) and not any(t.RefreshPolicy for t in tom.model.Tables):
|
|
58
|
+
print(
|
|
59
|
+
f"{icons.info} The '{dataset}' semantic model within the '{workspace}' workspace has no Power Query logic."
|
|
60
|
+
)
|
|
61
|
+
return
|
|
62
|
+
|
|
63
|
+
table_map = {}
|
|
64
|
+
expr_map = {}
|
|
65
|
+
|
|
66
|
+
for t in tom.model.Tables:
|
|
67
|
+
table_name = t.Name
|
|
68
|
+
for char in icons.special_characters:
|
|
69
|
+
table_name = table_name.replace(char, "")
|
|
70
|
+
if t.RefreshPolicy:
|
|
71
|
+
table_map[table_name] = t.RefreshPolicy.SourceExpression
|
|
72
|
+
elif any(p.SourceType == TOM.PartitionSourceType.M for p in t.Partitions):
|
|
73
|
+
part_name = next(
|
|
74
|
+
p.Name
|
|
75
|
+
for p in t.Partitions
|
|
76
|
+
if p.SourceType == TOM.PartitionSourceType.M
|
|
77
|
+
)
|
|
78
|
+
expr = t.Partitions[part_name].Source.Expression
|
|
79
|
+
table_map[table_name] = expr
|
|
80
|
+
|
|
81
|
+
for e in tom.model.Expressions:
|
|
82
|
+
expr_map[e.Name] = [str(e.Kind), e.Expression]
|
|
83
|
+
|
|
84
|
+
# Dataflows Gen2 max table limit is 50.
|
|
85
|
+
max_length = 50
|
|
86
|
+
table_chunks = [
|
|
87
|
+
dict(list(table_map.items())[i : i + max_length])
|
|
88
|
+
for i in range(0, len(table_map), max_length)
|
|
89
|
+
]
|
|
90
|
+
|
|
91
|
+
def create_pqt(table_map: dict, expr_map: dict, file_name: str):
|
|
92
|
+
|
|
93
|
+
class QueryMetadata:
|
|
94
|
+
def __init__(
|
|
95
|
+
self,
|
|
96
|
+
QueryName,
|
|
97
|
+
QueryGroupId=None,
|
|
98
|
+
LastKnownIsParameter=None,
|
|
99
|
+
LastKnownResultTypeName=None,
|
|
100
|
+
LoadEnabled=True,
|
|
101
|
+
IsHidden=False,
|
|
102
|
+
):
|
|
103
|
+
self.QueryName = QueryName
|
|
104
|
+
self.QueryGroupId = QueryGroupId
|
|
105
|
+
self.LastKnownIsParameter = LastKnownIsParameter
|
|
106
|
+
self.LastKnownResultTypeName = LastKnownResultTypeName
|
|
107
|
+
self.LoadEnabled = LoadEnabled
|
|
108
|
+
self.IsHidden = IsHidden
|
|
109
|
+
|
|
110
|
+
class RootObject:
|
|
111
|
+
def __init__(
|
|
112
|
+
self,
|
|
113
|
+
DocumentLocale,
|
|
114
|
+
EngineVersion,
|
|
115
|
+
QueriesMetadata,
|
|
116
|
+
QueryGroups=None,
|
|
117
|
+
):
|
|
118
|
+
if QueryGroups is None:
|
|
119
|
+
QueryGroups = []
|
|
120
|
+
self.DocumentLocale = DocumentLocale
|
|
121
|
+
self.EngineVersion = EngineVersion
|
|
122
|
+
self.QueriesMetadata = QueriesMetadata
|
|
123
|
+
self.QueryGroups = QueryGroups
|
|
124
|
+
|
|
125
|
+
# STEP 1: Create MashupDocument.pq
|
|
126
|
+
mdfileName = "MashupDocument.pq"
|
|
127
|
+
mdFilePath = os.path.join(subFolderPath, mdfileName)
|
|
128
|
+
sb = "section Section1;"
|
|
129
|
+
for t_name, query in table_map.items():
|
|
130
|
+
sb = f'{sb}\nshared #"{t_name}" = '
|
|
131
|
+
if query is not None:
|
|
114
132
|
pQueryNoSpaces = (
|
|
115
|
-
|
|
133
|
+
query.replace(" ", "")
|
|
116
134
|
.replace("\n", "")
|
|
117
135
|
.replace("\t", "")
|
|
118
136
|
.replace("\r", "")
|
|
119
137
|
)
|
|
120
138
|
if pQueryNoSpaces.startswith('letSource=""'):
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
if pSourceType == "M" and i == 1:
|
|
124
|
-
sb = sb + pQuery + ";"
|
|
125
|
-
elif refreshPolicy and i == 1:
|
|
126
|
-
sb = sb + sourceExpression + ";"
|
|
127
|
-
i += 1
|
|
128
|
-
|
|
129
|
-
for index, row in dfE.iterrows():
|
|
130
|
-
expr = row["Expression"]
|
|
131
|
-
eName = row["Name"]
|
|
132
|
-
eName = '#"' + eName + '"'
|
|
133
|
-
sb = sb + "\n" + "shared " + eName + " = " + expr + ";"
|
|
134
|
-
|
|
135
|
-
with open(mdFilePath, "w") as file:
|
|
136
|
-
file.write(sb)
|
|
137
|
-
|
|
138
|
-
# STEP 2: Create the MashupMetadata.json file
|
|
139
|
-
mmfileName = "MashupMetadata.json"
|
|
140
|
-
mmFilePath = os.path.join(subFolderPath, mmfileName)
|
|
141
|
-
queryMetadata = []
|
|
142
|
-
|
|
143
|
-
for tName in dfP["Table Name"].unique():
|
|
144
|
-
sourceType = dfP.loc[(dfP["Table Name"] == tName), "Source Type"].iloc[0]
|
|
145
|
-
refreshPolicy = dfT.loc[(dfT["Name"] == tName), "Refresh Policy"].iloc[0]
|
|
146
|
-
if sourceType == "M" or refreshPolicy:
|
|
147
|
-
queryMetadata.append(
|
|
148
|
-
QueryMetadata(tName, None, None, None, True, False)
|
|
149
|
-
)
|
|
139
|
+
query = 'let\n\tSource = ""\nin\n\tSource'
|
|
140
|
+
sb = f"{sb}{query};"
|
|
150
141
|
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
if eKind == "M":
|
|
155
|
-
queryMetadata.append(
|
|
156
|
-
QueryMetadata(eName, None, None, None, True, False)
|
|
157
|
-
)
|
|
158
|
-
else:
|
|
159
|
-
queryMetadata.append(
|
|
160
|
-
QueryMetadata(eName, None, None, None, False, False)
|
|
161
|
-
)
|
|
142
|
+
for e_name, kind_expr in expr_map.items():
|
|
143
|
+
expr = kind_expr[1]
|
|
144
|
+
sb = f'{sb}\nshared #"{e_name}" = {expr};'
|
|
162
145
|
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
def obj_to_dict(obj):
|
|
166
|
-
if isinstance(obj, list):
|
|
167
|
-
return [obj_to_dict(e) for e in obj]
|
|
168
|
-
elif hasattr(obj, "__dict__"):
|
|
169
|
-
return {k: obj_to_dict(v) for k, v in obj.__dict__.items()}
|
|
170
|
-
else:
|
|
171
|
-
return obj
|
|
172
|
-
|
|
173
|
-
jsonContent = json.dumps(obj_to_dict(rootObject), indent=4)
|
|
174
|
-
|
|
175
|
-
with open(mmFilePath, "w") as json_file:
|
|
176
|
-
json_file.write(jsonContent)
|
|
177
|
-
|
|
178
|
-
# STEP 3: Create Metadata.json file
|
|
179
|
-
mFileName = "Metadata.json"
|
|
180
|
-
mFilePath = os.path.join(subFolderPath, mFileName)
|
|
181
|
-
metaData = {"Name": "fileName", "Description": "", "Version": "1.0.0.0"}
|
|
182
|
-
jsonContent = json.dumps(metaData, indent=4)
|
|
183
|
-
|
|
184
|
-
with open(mFilePath, "w") as json_file:
|
|
185
|
-
json_file.write(jsonContent)
|
|
186
|
-
|
|
187
|
-
# STEP 4: Create [Content_Types].xml file:
|
|
188
|
-
ns = "http://schemas.openxmlformats.org/package/2006/content-types"
|
|
189
|
-
ET.register_namespace("", ns)
|
|
190
|
-
types = ET.Element("{%s}Types" % ns)
|
|
191
|
-
default1 = ET.SubElement(
|
|
192
|
-
types,
|
|
193
|
-
"{%s}Default" % ns,
|
|
194
|
-
{"Extension": "json", "ContentType": "application/json"},
|
|
195
|
-
)
|
|
196
|
-
default2 = ET.SubElement(
|
|
197
|
-
types,
|
|
198
|
-
"{%s}Default" % ns,
|
|
199
|
-
{"Extension": "pq", "ContentType": "application/x-ms-m"},
|
|
200
|
-
)
|
|
201
|
-
xmlDocument = ET.ElementTree(types)
|
|
202
|
-
xmlFileName = "[Content_Types].xml"
|
|
203
|
-
xmlFilePath = os.path.join(subFolderPath, xmlFileName)
|
|
204
|
-
xmlDocument.write(
|
|
205
|
-
xmlFilePath, xml_declaration=True, encoding="utf-8", method="xml"
|
|
206
|
-
)
|
|
207
|
-
|
|
208
|
-
# STEP 5: Zip up the 4 files
|
|
209
|
-
zipFileName = file_name + ".zip"
|
|
210
|
-
zipFilePath = os.path.join(folderPath, zipFileName)
|
|
211
|
-
shutil.make_archive(zipFilePath[:-4], "zip", subFolderPath)
|
|
146
|
+
with open(mdFilePath, "w") as file:
|
|
147
|
+
file.write(sb)
|
|
212
148
|
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
newFilePath = os.path.join(directory, fileNameWithoutExtension + newExt)
|
|
218
|
-
shutil.move(zipFilePath, newFilePath)
|
|
149
|
+
# STEP 2: Create the MashupMetadata.json file
|
|
150
|
+
mmfileName = "MashupMetadata.json"
|
|
151
|
+
mmFilePath = os.path.join(subFolderPath, mmfileName)
|
|
152
|
+
queryMetadata = []
|
|
219
153
|
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
154
|
+
for t_name, query in table_map.items():
|
|
155
|
+
queryMetadata.append(
|
|
156
|
+
QueryMetadata(t_name, None, None, None, True, False)
|
|
157
|
+
)
|
|
158
|
+
for e_name, kind_expr in expr_map.items():
|
|
159
|
+
e_kind = kind_expr[0]
|
|
160
|
+
if e_kind == "M":
|
|
161
|
+
queryMetadata.append(
|
|
162
|
+
QueryMetadata(e_name, None, None, None, True, False)
|
|
163
|
+
)
|
|
164
|
+
else:
|
|
165
|
+
queryMetadata.append(
|
|
166
|
+
QueryMetadata(e_name, None, None, None, False, False)
|
|
167
|
+
)
|
|
226
168
|
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
169
|
+
rootObject = RootObject(
|
|
170
|
+
"en-US", "2.132.328.0", queryMetadata
|
|
171
|
+
) # "2.126.453.0"
|
|
172
|
+
|
|
173
|
+
def obj_to_dict(obj):
|
|
174
|
+
if isinstance(obj, list):
|
|
175
|
+
return [obj_to_dict(e) for e in obj]
|
|
176
|
+
elif hasattr(obj, "__dict__"):
|
|
177
|
+
return {k: obj_to_dict(v) for k, v in obj.__dict__.items()}
|
|
178
|
+
else:
|
|
179
|
+
return obj
|
|
180
|
+
|
|
181
|
+
jsonContent = json.dumps(obj_to_dict(rootObject), indent=4)
|
|
182
|
+
|
|
183
|
+
with open(mmFilePath, "w") as json_file:
|
|
184
|
+
json_file.write(jsonContent)
|
|
185
|
+
|
|
186
|
+
# STEP 3: Create Metadata.json file
|
|
187
|
+
mFileName = "Metadata.json"
|
|
188
|
+
mFilePath = os.path.join(subFolderPath, mFileName)
|
|
189
|
+
metaData = {"Name": f"{file_name}", "Description": "", "Version": "1.0.0.0"}
|
|
190
|
+
jsonContent = json.dumps(metaData, indent=4)
|
|
191
|
+
|
|
192
|
+
with open(mFilePath, "w") as json_file:
|
|
193
|
+
json_file.write(jsonContent)
|
|
194
|
+
|
|
195
|
+
# STEP 4: Create [Content_Types].xml file:
|
|
196
|
+
xml_content = """<?xml version="1.0" encoding="utf-8"?><Types xmlns="http://schemas.openxmlformats.org/package/2006/content-types"><Default Extension="json" ContentType="application/json" /><Default Extension="pq" ContentType="application/x-ms-m" /></Types>"""
|
|
197
|
+
xmlFileName = "[Content_Types].xml"
|
|
198
|
+
xmlFilePath = os.path.join(subFolderPath, xmlFileName)
|
|
199
|
+
with open(xmlFilePath, "w", encoding="utf-8") as file:
|
|
200
|
+
file.write(xml_content)
|
|
201
|
+
|
|
202
|
+
# STEP 5: Zip up the 4 files
|
|
203
|
+
zipFileName = f"{file_name}.zip"
|
|
204
|
+
zipFilePath = os.path.join(folderPath, zipFileName)
|
|
205
|
+
shutil.make_archive(zipFilePath[:-4], "zip", subFolderPath)
|
|
206
|
+
|
|
207
|
+
# STEP 6: Convert the zip file back into a .pqt file
|
|
208
|
+
newExt = ".pqt"
|
|
209
|
+
directory = os.path.dirname(zipFilePath)
|
|
210
|
+
fileNameWithoutExtension = os.path.splitext(os.path.basename(zipFilePath))[
|
|
211
|
+
0
|
|
212
|
+
]
|
|
213
|
+
newFilePath = os.path.join(directory, fileNameWithoutExtension + newExt)
|
|
214
|
+
shutil.move(zipFilePath, newFilePath)
|
|
215
|
+
|
|
216
|
+
# STEP 7: Delete subFolder directory which is no longer needed
|
|
217
|
+
shutil.rmtree(subFolderPath, ignore_errors=True)
|
|
218
|
+
|
|
219
|
+
print(
|
|
220
|
+
f"{icons.green_dot} '{file_name}.pqt' has been created based on the '{dataset}' semantic model in the '{workspace}' workspace within the Files section of your lakehouse."
|
|
221
|
+
)
|
|
222
|
+
|
|
223
|
+
a = 0
|
|
224
|
+
for t_map in table_chunks:
|
|
225
|
+
if a > 0:
|
|
226
|
+
file_name = f"{file_name}_{a}"
|
|
227
|
+
a += 1
|
|
228
|
+
create_pqt(t_map, expr_map, file_name=file_name)
|