semantic-link-labs 0.5.0__py3-none-any.whl → 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (113) hide show
  1. semantic_link_labs-0.7.0.dist-info/METADATA +148 -0
  2. semantic_link_labs-0.7.0.dist-info/RECORD +111 -0
  3. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.7.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +45 -15
  5. sempy_labs/_ai.py +42 -85
  6. sempy_labs/_bpa_translation/_translations_am-ET.po +828 -0
  7. sempy_labs/_bpa_translation/_translations_ar-AE.po +860 -0
  8. sempy_labs/_bpa_translation/_translations_cs-CZ.po +894 -0
  9. sempy_labs/_bpa_translation/_translations_da-DK.po +894 -0
  10. sempy_labs/_bpa_translation/_translations_de-DE.po +933 -0
  11. sempy_labs/_bpa_translation/_translations_el-GR.po +936 -0
  12. sempy_labs/_bpa_translation/_translations_es-ES.po +915 -0
  13. sempy_labs/_bpa_translation/_translations_fa-IR.po +883 -0
  14. sempy_labs/_bpa_translation/_translations_fr-FR.po +938 -0
  15. sempy_labs/_bpa_translation/_translations_ga-IE.po +912 -0
  16. sempy_labs/_bpa_translation/_translations_he-IL.po +855 -0
  17. sempy_labs/_bpa_translation/_translations_hi-IN.po +892 -0
  18. sempy_labs/_bpa_translation/_translations_hu-HU.po +910 -0
  19. sempy_labs/_bpa_translation/_translations_is-IS.po +887 -0
  20. sempy_labs/_bpa_translation/_translations_it-IT.po +931 -0
  21. sempy_labs/_bpa_translation/_translations_ja-JP.po +805 -0
  22. sempy_labs/_bpa_translation/_translations_nl-NL.po +924 -0
  23. sempy_labs/_bpa_translation/_translations_pl-PL.po +913 -0
  24. sempy_labs/_bpa_translation/_translations_pt-BR.po +909 -0
  25. sempy_labs/_bpa_translation/_translations_pt-PT.po +904 -0
  26. sempy_labs/_bpa_translation/_translations_ru-RU.po +909 -0
  27. sempy_labs/_bpa_translation/_translations_ta-IN.po +922 -0
  28. sempy_labs/_bpa_translation/_translations_te-IN.po +896 -0
  29. sempy_labs/_bpa_translation/_translations_th-TH.po +873 -0
  30. sempy_labs/_bpa_translation/_translations_zh-CN.po +767 -0
  31. sempy_labs/_bpa_translation/_translations_zu-ZA.po +916 -0
  32. sempy_labs/_clear_cache.py +12 -8
  33. sempy_labs/_connections.py +77 -70
  34. sempy_labs/_dax.py +7 -9
  35. sempy_labs/_generate_semantic_model.py +75 -90
  36. sempy_labs/_helper_functions.py +371 -20
  37. sempy_labs/_icons.py +23 -0
  38. sempy_labs/_list_functions.py +855 -427
  39. sempy_labs/_model_auto_build.py +4 -3
  40. sempy_labs/_model_bpa.py +307 -1118
  41. sempy_labs/_model_bpa_bulk.py +363 -0
  42. sempy_labs/_model_bpa_rules.py +831 -0
  43. sempy_labs/_model_dependencies.py +20 -16
  44. sempy_labs/_one_lake_integration.py +18 -12
  45. sempy_labs/_query_scale_out.py +116 -129
  46. sempy_labs/_refresh_semantic_model.py +23 -10
  47. sempy_labs/_translations.py +367 -288
  48. sempy_labs/_vertipaq.py +152 -123
  49. sempy_labs/directlake/__init__.py +7 -1
  50. sempy_labs/directlake/_directlake_schema_compare.py +33 -30
  51. sempy_labs/directlake/_directlake_schema_sync.py +60 -77
  52. sempy_labs/directlake/_dl_helper.py +233 -0
  53. sempy_labs/directlake/_get_directlake_lakehouse.py +7 -8
  54. sempy_labs/directlake/_get_shared_expression.py +5 -3
  55. sempy_labs/directlake/_guardrails.py +20 -16
  56. sempy_labs/directlake/_list_directlake_model_calc_tables.py +17 -10
  57. sempy_labs/directlake/_show_unsupported_directlake_objects.py +3 -2
  58. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +10 -5
  59. sempy_labs/directlake/_update_directlake_partition_entity.py +169 -22
  60. sempy_labs/directlake/_warm_cache.py +7 -4
  61. sempy_labs/lakehouse/_get_lakehouse_columns.py +1 -1
  62. sempy_labs/lakehouse/_get_lakehouse_tables.py +65 -71
  63. sempy_labs/lakehouse/_lakehouse.py +5 -3
  64. sempy_labs/lakehouse/_shortcuts.py +20 -13
  65. sempy_labs/migration/__init__.py +1 -1
  66. sempy_labs/migration/_create_pqt_file.py +184 -186
  67. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +240 -269
  68. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +78 -77
  69. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +444 -425
  70. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +96 -102
  71. sempy_labs/migration/_migration_validation.py +2 -2
  72. sempy_labs/migration/_refresh_calc_tables.py +94 -100
  73. sempy_labs/report/_BPAReportTemplate.json +232 -0
  74. sempy_labs/report/__init__.py +6 -2
  75. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
  76. sempy_labs/report/_bpareporttemplate/.platform +11 -0
  77. sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
  78. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
  79. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
  80. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
  81. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
  82. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
  83. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
  84. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
  85. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
  86. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
  87. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
  88. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
  89. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
  90. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
  91. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
  92. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
  93. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
  94. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
  95. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
  96. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
  97. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
  98. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
  99. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
  100. sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
  101. sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
  102. sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
  103. sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
  104. sempy_labs/report/_generate_report.py +260 -139
  105. sempy_labs/report/_report_functions.py +90 -59
  106. sempy_labs/report/_report_rebind.py +40 -34
  107. sempy_labs/tom/__init__.py +1 -4
  108. sempy_labs/tom/_model.py +601 -181
  109. semantic_link_labs-0.5.0.dist-info/METADATA +0 -22
  110. semantic_link_labs-0.5.0.dist-info/RECORD +0 -53
  111. sempy_labs/directlake/_fallback.py +0 -58
  112. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.7.0.dist-info}/LICENSE +0 -0
  113. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.7.0.dist-info}/top_level.txt +0 -0
@@ -1,8 +1,8 @@
1
1
  import sempy
2
2
  import sempy.fabric as fabric
3
- import json, os, shutil
4
- import xml.etree.ElementTree as ET
5
- from sempy_labs._list_functions import list_tables
3
+ import json
4
+ import os
5
+ import shutil
6
6
  from sempy_labs.lakehouse._lakehouse import lakehouse_attached
7
7
  from sempy._utils._log import log
8
8
  from typing import Optional
@@ -11,10 +11,16 @@ import sempy_labs._icons as icons
11
11
 
12
12
  @log
13
13
  def create_pqt_file(
14
- dataset: str, workspace: Optional[str] = None, file_name: Optional[str] = 'PowerQueryTemplate'
14
+ dataset: str,
15
+ workspace: Optional[str] = None,
16
+ file_name: Optional[str] = "PowerQueryTemplate",
15
17
  ):
16
18
  """
17
- Dynamically generates a `Power Query Template <https://learn.microsoft.com/power-query/power-query-template>`_ file based on the semantic model. The .pqt file is saved within the Files section of your lakehouse.
19
+ Dynamically generates a `Power Query Template <https://learn.microsoft.com/power-query/power-query-template>`_ file based on the semantic model. The .pqt file is
20
+ saved within the Files section of your lakehouse.
21
+
22
+ Dataflows Gen2 has a `limit of 50 tables <https://learn.microsoft.com/power-query/power-query-online-limits>`_. If there are more than 50 tables, this will save multiple Power Query Template
23
+ files (with each file having a max of 50 tables).
18
24
 
19
25
  Parameters
20
26
  ----------
@@ -28,10 +34,14 @@ def create_pqt_file(
28
34
  The name of the Power Query Template file to be generated.
29
35
  """
30
36
 
31
- lakeAttach = lakehouse_attached()
37
+ sempy.fabric._client._utils._init_analysis_services()
38
+ import Microsoft.AnalysisServices.Tabular as TOM
39
+ from sempy_labs.tom import connect_semantic_model
32
40
 
33
- if lakeAttach is False:
34
- raise ValueError(f"{icons.red_dot} In order to run the 'create_pqt_file' function, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook.")
41
+ if not lakehouse_attached():
42
+ raise ValueError(
43
+ f"{icons.red_dot} In order to run the 'create_pqt_file' function, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
44
+ )
35
45
 
36
46
  workspace = fabric.resolve_workspace_name(workspace)
37
47
 
@@ -39,192 +49,180 @@ def create_pqt_file(
39
49
  subFolderPath = os.path.join(folderPath, "pqtnewfolder")
40
50
  os.makedirs(subFolderPath, exist_ok=True)
41
51
 
42
- dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
43
- dfT = list_tables(dataset, workspace)
44
- dfE = fabric.list_expressions(dataset=dataset, workspace=workspace)
45
-
46
- # Check if M-partitions are used
47
- if any(dfP["Source Type"] == "M"):
48
-
49
- class QueryMetadata:
50
- def __init__(
51
- self,
52
- QueryName,
53
- QueryGroupId=None,
54
- LastKnownIsParameter=None,
55
- LastKnownResultTypeName=None,
56
- LoadEnabled=True,
57
- IsHidden=False,
58
- ):
59
- self.QueryName = QueryName
60
- self.QueryGroupId = QueryGroupId
61
- self.LastKnownIsParameter = LastKnownIsParameter
62
- self.LastKnownResultTypeName = LastKnownResultTypeName
63
- self.LoadEnabled = LoadEnabled
64
- self.IsHidden = IsHidden
65
-
66
- class RootObject:
67
- def __init__(
68
- self, DocumentLocale, EngineVersion, QueriesMetadata, QueryGroups=None
69
- ):
70
- if QueryGroups is None:
71
- QueryGroups = []
72
- self.DocumentLocale = DocumentLocale
73
- self.EngineVersion = EngineVersion
74
- self.QueriesMetadata = QueriesMetadata
75
- self.QueryGroups = QueryGroups
76
-
77
- # STEP 1: Create MashupDocument.pq
78
- mdfileName = "MashupDocument.pq"
79
- mdFilePath = os.path.join(subFolderPath, mdfileName)
80
- sb = "section Section1;"
81
- for table_name in dfP["Table Name"].unique():
82
- tName = '#"' + table_name + '"'
83
- sourceExpression = dfT.loc[
84
- (dfT["Name"] == table_name), "Source Expression"
85
- ].iloc[0]
86
- refreshPolicy = dfT.loc[(dfT["Name"] == table_name), "Refresh Policy"].iloc[
87
- 0
88
- ]
89
- sourceType = dfP.loc[(dfP["Table Name"] == table_name), "Source Type"].iloc[
90
- 0
91
- ]
92
-
93
- if sourceType == "M" or refreshPolicy:
94
- sb = sb + "\n" + "shared " + tName + " = "
95
-
96
- partitions_in_table = dfP.loc[
97
- dfP["Table Name"] == table_name, "Partition Name"
98
- ].unique()
99
-
100
- i = 1
101
- for partition_name in partitions_in_table:
102
- pSourceType = dfP.loc[
103
- (dfP["Table Name"] == table_name)
104
- & (dfP["Partition Name"] == partition_name),
105
- "Source Type",
106
- ].iloc[0]
107
- pQuery = dfP.loc[
108
- (dfP["Table Name"] == table_name)
109
- & (dfP["Partition Name"] == partition_name),
110
- "Query",
111
- ].iloc[0]
112
-
113
- if pQuery is not None:
52
+ with connect_semantic_model(
53
+ dataset=dataset, workspace=workspace, readonly=True
54
+ ) as tom:
55
+ if not any(
56
+ p.SourceType == TOM.PartitionSourceType.M for p in tom.all_partitions()
57
+ ) and not any(t.RefreshPolicy for t in tom.model.Tables):
58
+ print(
59
+ f"{icons.info} The '{dataset}' semantic model within the '{workspace}' workspace has no Power Query logic."
60
+ )
61
+ return
62
+
63
+ table_map = {}
64
+ expr_map = {}
65
+
66
+ for t in tom.model.Tables:
67
+ table_name = t.Name
68
+ for char in icons.special_characters:
69
+ table_name = table_name.replace(char, "")
70
+ if t.RefreshPolicy:
71
+ table_map[table_name] = t.RefreshPolicy.SourceExpression
72
+ elif any(p.SourceType == TOM.PartitionSourceType.M for p in t.Partitions):
73
+ part_name = next(
74
+ p.Name
75
+ for p in t.Partitions
76
+ if p.SourceType == TOM.PartitionSourceType.M
77
+ )
78
+ expr = t.Partitions[part_name].Source.Expression
79
+ table_map[table_name] = expr
80
+
81
+ for e in tom.model.Expressions:
82
+ expr_map[e.Name] = [str(e.Kind), e.Expression]
83
+
84
+ # Dataflows Gen2 max table limit is 50.
85
+ max_length = 50
86
+ table_chunks = [
87
+ dict(list(table_map.items())[i : i + max_length])
88
+ for i in range(0, len(table_map), max_length)
89
+ ]
90
+
91
+ def create_pqt(table_map: dict, expr_map: dict, file_name: str):
92
+
93
+ class QueryMetadata:
94
+ def __init__(
95
+ self,
96
+ QueryName,
97
+ QueryGroupId=None,
98
+ LastKnownIsParameter=None,
99
+ LastKnownResultTypeName=None,
100
+ LoadEnabled=True,
101
+ IsHidden=False,
102
+ ):
103
+ self.QueryName = QueryName
104
+ self.QueryGroupId = QueryGroupId
105
+ self.LastKnownIsParameter = LastKnownIsParameter
106
+ self.LastKnownResultTypeName = LastKnownResultTypeName
107
+ self.LoadEnabled = LoadEnabled
108
+ self.IsHidden = IsHidden
109
+
110
+ class RootObject:
111
+ def __init__(
112
+ self,
113
+ DocumentLocale,
114
+ EngineVersion,
115
+ QueriesMetadata,
116
+ QueryGroups=None,
117
+ ):
118
+ if QueryGroups is None:
119
+ QueryGroups = []
120
+ self.DocumentLocale = DocumentLocale
121
+ self.EngineVersion = EngineVersion
122
+ self.QueriesMetadata = QueriesMetadata
123
+ self.QueryGroups = QueryGroups
124
+
125
+ # STEP 1: Create MashupDocument.pq
126
+ mdfileName = "MashupDocument.pq"
127
+ mdFilePath = os.path.join(subFolderPath, mdfileName)
128
+ sb = "section Section1;"
129
+ for t_name, query in table_map.items():
130
+ sb = f'{sb}\nshared #"{t_name}" = '
131
+ if query is not None:
114
132
  pQueryNoSpaces = (
115
- pQuery.replace(" ", "")
133
+ query.replace(" ", "")
116
134
  .replace("\n", "")
117
135
  .replace("\t", "")
118
136
  .replace("\r", "")
119
137
  )
120
138
  if pQueryNoSpaces.startswith('letSource=""'):
121
- pQuery = 'let\n\tSource = ""\nin\n\tSource'
122
-
123
- if pSourceType == "M" and i == 1:
124
- sb = sb + pQuery + ";"
125
- elif refreshPolicy and i == 1:
126
- sb = sb + sourceExpression + ";"
127
- i += 1
128
-
129
- for index, row in dfE.iterrows():
130
- expr = row["Expression"]
131
- eName = row["Name"]
132
- eName = '#"' + eName + '"'
133
- sb = sb + "\n" + "shared " + eName + " = " + expr + ";"
134
-
135
- with open(mdFilePath, "w") as file:
136
- file.write(sb)
137
-
138
- # STEP 2: Create the MashupMetadata.json file
139
- mmfileName = "MashupMetadata.json"
140
- mmFilePath = os.path.join(subFolderPath, mmfileName)
141
- queryMetadata = []
142
-
143
- for tName in dfP["Table Name"].unique():
144
- sourceType = dfP.loc[(dfP["Table Name"] == tName), "Source Type"].iloc[0]
145
- refreshPolicy = dfT.loc[(dfT["Name"] == tName), "Refresh Policy"].iloc[0]
146
- if sourceType == "M" or refreshPolicy:
147
- queryMetadata.append(
148
- QueryMetadata(tName, None, None, None, True, False)
149
- )
139
+ query = 'let\n\tSource = ""\nin\n\tSource'
140
+ sb = f"{sb}{query};"
150
141
 
151
- for i, r in dfE.iterrows():
152
- eName = r["Name"]
153
- eKind = r["Kind"]
154
- if eKind == "M":
155
- queryMetadata.append(
156
- QueryMetadata(eName, None, None, None, True, False)
157
- )
158
- else:
159
- queryMetadata.append(
160
- QueryMetadata(eName, None, None, None, False, False)
161
- )
142
+ for e_name, kind_expr in expr_map.items():
143
+ expr = kind_expr[1]
144
+ sb = f'{sb}\nshared #"{e_name}" = {expr};'
162
145
 
163
- rootObject = RootObject("en-US", "2.126.453.0", queryMetadata)
164
-
165
- def obj_to_dict(obj):
166
- if isinstance(obj, list):
167
- return [obj_to_dict(e) for e in obj]
168
- elif hasattr(obj, "__dict__"):
169
- return {k: obj_to_dict(v) for k, v in obj.__dict__.items()}
170
- else:
171
- return obj
172
-
173
- jsonContent = json.dumps(obj_to_dict(rootObject), indent=4)
174
-
175
- with open(mmFilePath, "w") as json_file:
176
- json_file.write(jsonContent)
177
-
178
- # STEP 3: Create Metadata.json file
179
- mFileName = "Metadata.json"
180
- mFilePath = os.path.join(subFolderPath, mFileName)
181
- metaData = {"Name": "fileName", "Description": "", "Version": "1.0.0.0"}
182
- jsonContent = json.dumps(metaData, indent=4)
183
-
184
- with open(mFilePath, "w") as json_file:
185
- json_file.write(jsonContent)
186
-
187
- # STEP 4: Create [Content_Types].xml file:
188
- ns = "http://schemas.openxmlformats.org/package/2006/content-types"
189
- ET.register_namespace("", ns)
190
- types = ET.Element("{%s}Types" % ns)
191
- default1 = ET.SubElement(
192
- types,
193
- "{%s}Default" % ns,
194
- {"Extension": "json", "ContentType": "application/json"},
195
- )
196
- default2 = ET.SubElement(
197
- types,
198
- "{%s}Default" % ns,
199
- {"Extension": "pq", "ContentType": "application/x-ms-m"},
200
- )
201
- xmlDocument = ET.ElementTree(types)
202
- xmlFileName = "[Content_Types].xml"
203
- xmlFilePath = os.path.join(subFolderPath, xmlFileName)
204
- xmlDocument.write(
205
- xmlFilePath, xml_declaration=True, encoding="utf-8", method="xml"
206
- )
207
-
208
- # STEP 5: Zip up the 4 files
209
- zipFileName = file_name + ".zip"
210
- zipFilePath = os.path.join(folderPath, zipFileName)
211
- shutil.make_archive(zipFilePath[:-4], "zip", subFolderPath)
146
+ with open(mdFilePath, "w") as file:
147
+ file.write(sb)
212
148
 
213
- # STEP 6: Convert the zip file back into a .pqt file
214
- newExt = ".pqt"
215
- directory = os.path.dirname(zipFilePath)
216
- fileNameWithoutExtension = os.path.splitext(os.path.basename(zipFilePath))[0]
217
- newFilePath = os.path.join(directory, fileNameWithoutExtension + newExt)
218
- shutil.move(zipFilePath, newFilePath)
149
+ # STEP 2: Create the MashupMetadata.json file
150
+ mmfileName = "MashupMetadata.json"
151
+ mmFilePath = os.path.join(subFolderPath, mmfileName)
152
+ queryMetadata = []
219
153
 
220
- # STEP 7: Delete subFolder directory which is no longer needed
221
- shutil.rmtree(subFolderPath, ignore_errors=True)
222
-
223
- print(
224
- f"{icons.green_dot} '{file_name}.pqt' has been created based on the '{dataset}' semantic model in the '{workspace}' workspace within the Files section of your lakehouse."
225
- )
154
+ for t_name, query in table_map.items():
155
+ queryMetadata.append(
156
+ QueryMetadata(t_name, None, None, None, True, False)
157
+ )
158
+ for e_name, kind_expr in expr_map.items():
159
+ e_kind = kind_expr[0]
160
+ if e_kind == "M":
161
+ queryMetadata.append(
162
+ QueryMetadata(e_name, None, None, None, True, False)
163
+ )
164
+ else:
165
+ queryMetadata.append(
166
+ QueryMetadata(e_name, None, None, None, False, False)
167
+ )
226
168
 
227
- else:
228
- print(
229
- f"{icons.yellow_dot} The '{dataset}' semantic model in the '{workspace}' workspace does not use Power Query so a Power Query Template file cannot be generated."
230
- )
169
+ rootObject = RootObject(
170
+ "en-US", "2.132.328.0", queryMetadata
171
+ ) # "2.126.453.0"
172
+
173
+ def obj_to_dict(obj):
174
+ if isinstance(obj, list):
175
+ return [obj_to_dict(e) for e in obj]
176
+ elif hasattr(obj, "__dict__"):
177
+ return {k: obj_to_dict(v) for k, v in obj.__dict__.items()}
178
+ else:
179
+ return obj
180
+
181
+ jsonContent = json.dumps(obj_to_dict(rootObject), indent=4)
182
+
183
+ with open(mmFilePath, "w") as json_file:
184
+ json_file.write(jsonContent)
185
+
186
+ # STEP 3: Create Metadata.json file
187
+ mFileName = "Metadata.json"
188
+ mFilePath = os.path.join(subFolderPath, mFileName)
189
+ metaData = {"Name": f"{file_name}", "Description": "", "Version": "1.0.0.0"}
190
+ jsonContent = json.dumps(metaData, indent=4)
191
+
192
+ with open(mFilePath, "w") as json_file:
193
+ json_file.write(jsonContent)
194
+
195
+ # STEP 4: Create [Content_Types].xml file:
196
+ xml_content = """<?xml version="1.0" encoding="utf-8"?><Types xmlns="http://schemas.openxmlformats.org/package/2006/content-types"><Default Extension="json" ContentType="application/json" /><Default Extension="pq" ContentType="application/x-ms-m" /></Types>"""
197
+ xmlFileName = "[Content_Types].xml"
198
+ xmlFilePath = os.path.join(subFolderPath, xmlFileName)
199
+ with open(xmlFilePath, "w", encoding="utf-8") as file:
200
+ file.write(xml_content)
201
+
202
+ # STEP 5: Zip up the 4 files
203
+ zipFileName = f"{file_name}.zip"
204
+ zipFilePath = os.path.join(folderPath, zipFileName)
205
+ shutil.make_archive(zipFilePath[:-4], "zip", subFolderPath)
206
+
207
+ # STEP 6: Convert the zip file back into a .pqt file
208
+ newExt = ".pqt"
209
+ directory = os.path.dirname(zipFilePath)
210
+ fileNameWithoutExtension = os.path.splitext(os.path.basename(zipFilePath))[
211
+ 0
212
+ ]
213
+ newFilePath = os.path.join(directory, fileNameWithoutExtension + newExt)
214
+ shutil.move(zipFilePath, newFilePath)
215
+
216
+ # STEP 7: Delete subFolder directory which is no longer needed
217
+ shutil.rmtree(subFolderPath, ignore_errors=True)
218
+
219
+ print(
220
+ f"{icons.green_dot} '{file_name}.pqt' has been created based on the '{dataset}' semantic model in the '{workspace}' workspace within the Files section of your lakehouse."
221
+ )
222
+
223
+ a = 0
224
+ for t_map in table_chunks:
225
+ if a > 0:
226
+ file_name = f"{file_name}_{a}"
227
+ a += 1
228
+ create_pqt(t_map, expr_map, file_name=file_name)