semantic-link-labs 0.12.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (243) hide show
  1. semantic_link_labs-0.12.8.dist-info/METADATA +354 -0
  2. semantic_link_labs-0.12.8.dist-info/RECORD +243 -0
  3. semantic_link_labs-0.12.8.dist-info/WHEEL +5 -0
  4. semantic_link_labs-0.12.8.dist-info/licenses/LICENSE +21 -0
  5. semantic_link_labs-0.12.8.dist-info/top_level.txt +1 -0
  6. sempy_labs/__init__.py +606 -0
  7. sempy_labs/_a_lib_info.py +2 -0
  8. sempy_labs/_ai.py +437 -0
  9. sempy_labs/_authentication.py +264 -0
  10. sempy_labs/_bpa_translation/_model/_translations_am-ET.po +869 -0
  11. sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +908 -0
  12. sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +968 -0
  13. sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +963 -0
  14. sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +943 -0
  15. sempy_labs/_bpa_translation/_model/_translations_da-DK.po +945 -0
  16. sempy_labs/_bpa_translation/_model/_translations_de-DE.po +988 -0
  17. sempy_labs/_bpa_translation/_model/_translations_el-GR.po +993 -0
  18. sempy_labs/_bpa_translation/_model/_translations_es-ES.po +971 -0
  19. sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +933 -0
  20. sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +942 -0
  21. sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +994 -0
  22. sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +967 -0
  23. sempy_labs/_bpa_translation/_model/_translations_he-IL.po +902 -0
  24. sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +944 -0
  25. sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +963 -0
  26. sempy_labs/_bpa_translation/_model/_translations_id-ID.po +946 -0
  27. sempy_labs/_bpa_translation/_model/_translations_is-IS.po +939 -0
  28. sempy_labs/_bpa_translation/_model/_translations_it-IT.po +986 -0
  29. sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +846 -0
  30. sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +839 -0
  31. sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +967 -0
  32. sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +978 -0
  33. sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +962 -0
  34. sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +962 -0
  35. sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +957 -0
  36. sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +968 -0
  37. sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +964 -0
  38. sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +952 -0
  39. sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +950 -0
  40. sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +942 -0
  41. sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +976 -0
  42. sempy_labs/_bpa_translation/_model/_translations_te-IN.po +947 -0
  43. sempy_labs/_bpa_translation/_model/_translations_th-TH.po +924 -0
  44. sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +953 -0
  45. sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +961 -0
  46. sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +804 -0
  47. sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +969 -0
  48. sempy_labs/_capacities.py +1198 -0
  49. sempy_labs/_capacity_migration.py +660 -0
  50. sempy_labs/_clear_cache.py +351 -0
  51. sempy_labs/_connections.py +610 -0
  52. sempy_labs/_dashboards.py +69 -0
  53. sempy_labs/_data_access_security.py +98 -0
  54. sempy_labs/_data_pipelines.py +162 -0
  55. sempy_labs/_dataflows.py +668 -0
  56. sempy_labs/_dax.py +501 -0
  57. sempy_labs/_daxformatter.py +80 -0
  58. sempy_labs/_delta_analyzer.py +467 -0
  59. sempy_labs/_delta_analyzer_history.py +301 -0
  60. sempy_labs/_dictionary_diffs.py +221 -0
  61. sempy_labs/_documentation.py +147 -0
  62. sempy_labs/_domains.py +51 -0
  63. sempy_labs/_eventhouses.py +182 -0
  64. sempy_labs/_external_data_shares.py +230 -0
  65. sempy_labs/_gateways.py +521 -0
  66. sempy_labs/_generate_semantic_model.py +521 -0
  67. sempy_labs/_get_connection_string.py +84 -0
  68. sempy_labs/_git.py +543 -0
  69. sempy_labs/_graphQL.py +90 -0
  70. sempy_labs/_helper_functions.py +2833 -0
  71. sempy_labs/_icons.py +149 -0
  72. sempy_labs/_job_scheduler.py +609 -0
  73. sempy_labs/_kql_databases.py +149 -0
  74. sempy_labs/_kql_querysets.py +124 -0
  75. sempy_labs/_kusto.py +137 -0
  76. sempy_labs/_labels.py +124 -0
  77. sempy_labs/_list_functions.py +1720 -0
  78. sempy_labs/_managed_private_endpoints.py +253 -0
  79. sempy_labs/_mirrored_databases.py +416 -0
  80. sempy_labs/_mirrored_warehouses.py +60 -0
  81. sempy_labs/_ml_experiments.py +113 -0
  82. sempy_labs/_model_auto_build.py +140 -0
  83. sempy_labs/_model_bpa.py +557 -0
  84. sempy_labs/_model_bpa_bulk.py +378 -0
  85. sempy_labs/_model_bpa_rules.py +859 -0
  86. sempy_labs/_model_dependencies.py +343 -0
  87. sempy_labs/_mounted_data_factories.py +123 -0
  88. sempy_labs/_notebooks.py +441 -0
  89. sempy_labs/_one_lake_integration.py +151 -0
  90. sempy_labs/_onelake.py +131 -0
  91. sempy_labs/_query_scale_out.py +433 -0
  92. sempy_labs/_refresh_semantic_model.py +435 -0
  93. sempy_labs/_semantic_models.py +468 -0
  94. sempy_labs/_spark.py +455 -0
  95. sempy_labs/_sql.py +241 -0
  96. sempy_labs/_sql_audit_settings.py +207 -0
  97. sempy_labs/_sql_endpoints.py +214 -0
  98. sempy_labs/_tags.py +201 -0
  99. sempy_labs/_translations.py +43 -0
  100. sempy_labs/_user_delegation_key.py +44 -0
  101. sempy_labs/_utils.py +79 -0
  102. sempy_labs/_vertipaq.py +1021 -0
  103. sempy_labs/_vpax.py +388 -0
  104. sempy_labs/_warehouses.py +234 -0
  105. sempy_labs/_workloads.py +140 -0
  106. sempy_labs/_workspace_identity.py +72 -0
  107. sempy_labs/_workspaces.py +595 -0
  108. sempy_labs/admin/__init__.py +170 -0
  109. sempy_labs/admin/_activities.py +167 -0
  110. sempy_labs/admin/_apps.py +145 -0
  111. sempy_labs/admin/_artifacts.py +65 -0
  112. sempy_labs/admin/_basic_functions.py +463 -0
  113. sempy_labs/admin/_capacities.py +508 -0
  114. sempy_labs/admin/_dataflows.py +45 -0
  115. sempy_labs/admin/_datasets.py +186 -0
  116. sempy_labs/admin/_domains.py +522 -0
  117. sempy_labs/admin/_external_data_share.py +100 -0
  118. sempy_labs/admin/_git.py +72 -0
  119. sempy_labs/admin/_items.py +265 -0
  120. sempy_labs/admin/_labels.py +211 -0
  121. sempy_labs/admin/_reports.py +241 -0
  122. sempy_labs/admin/_scanner.py +118 -0
  123. sempy_labs/admin/_shared.py +82 -0
  124. sempy_labs/admin/_sharing_links.py +110 -0
  125. sempy_labs/admin/_tags.py +131 -0
  126. sempy_labs/admin/_tenant.py +503 -0
  127. sempy_labs/admin/_tenant_keys.py +89 -0
  128. sempy_labs/admin/_users.py +140 -0
  129. sempy_labs/admin/_workspaces.py +236 -0
  130. sempy_labs/deployment_pipeline/__init__.py +23 -0
  131. sempy_labs/deployment_pipeline/_items.py +580 -0
  132. sempy_labs/directlake/__init__.py +57 -0
  133. sempy_labs/directlake/_autosync.py +58 -0
  134. sempy_labs/directlake/_directlake_schema_compare.py +120 -0
  135. sempy_labs/directlake/_directlake_schema_sync.py +161 -0
  136. sempy_labs/directlake/_dl_helper.py +274 -0
  137. sempy_labs/directlake/_generate_shared_expression.py +94 -0
  138. sempy_labs/directlake/_get_directlake_lakehouse.py +62 -0
  139. sempy_labs/directlake/_get_shared_expression.py +34 -0
  140. sempy_labs/directlake/_guardrails.py +96 -0
  141. sempy_labs/directlake/_list_directlake_model_calc_tables.py +70 -0
  142. sempy_labs/directlake/_show_unsupported_directlake_objects.py +90 -0
  143. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +239 -0
  144. sempy_labs/directlake/_update_directlake_partition_entity.py +259 -0
  145. sempy_labs/directlake/_warm_cache.py +236 -0
  146. sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
  147. sempy_labs/environment/__init__.py +23 -0
  148. sempy_labs/environment/_items.py +212 -0
  149. sempy_labs/environment/_pubstage.py +223 -0
  150. sempy_labs/eventstream/__init__.py +37 -0
  151. sempy_labs/eventstream/_items.py +263 -0
  152. sempy_labs/eventstream/_topology.py +652 -0
  153. sempy_labs/graph/__init__.py +59 -0
  154. sempy_labs/graph/_groups.py +651 -0
  155. sempy_labs/graph/_sensitivity_labels.py +120 -0
  156. sempy_labs/graph/_teams.py +125 -0
  157. sempy_labs/graph/_user_licenses.py +96 -0
  158. sempy_labs/graph/_users.py +516 -0
  159. sempy_labs/graph_model/__init__.py +15 -0
  160. sempy_labs/graph_model/_background_jobs.py +63 -0
  161. sempy_labs/graph_model/_items.py +149 -0
  162. sempy_labs/lakehouse/__init__.py +67 -0
  163. sempy_labs/lakehouse/_blobs.py +247 -0
  164. sempy_labs/lakehouse/_get_lakehouse_columns.py +102 -0
  165. sempy_labs/lakehouse/_get_lakehouse_tables.py +274 -0
  166. sempy_labs/lakehouse/_helper.py +250 -0
  167. sempy_labs/lakehouse/_lakehouse.py +351 -0
  168. sempy_labs/lakehouse/_livy_sessions.py +143 -0
  169. sempy_labs/lakehouse/_materialized_lake_views.py +157 -0
  170. sempy_labs/lakehouse/_partitioning.py +165 -0
  171. sempy_labs/lakehouse/_schemas.py +217 -0
  172. sempy_labs/lakehouse/_shortcuts.py +440 -0
  173. sempy_labs/migration/__init__.py +35 -0
  174. sempy_labs/migration/_create_pqt_file.py +238 -0
  175. sempy_labs/migration/_direct_lake_to_import.py +105 -0
  176. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +398 -0
  177. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +148 -0
  178. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +533 -0
  179. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +172 -0
  180. sempy_labs/migration/_migration_validation.py +71 -0
  181. sempy_labs/migration/_refresh_calc_tables.py +131 -0
  182. sempy_labs/mirrored_azure_databricks_catalog/__init__.py +15 -0
  183. sempy_labs/mirrored_azure_databricks_catalog/_discover.py +213 -0
  184. sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +45 -0
  185. sempy_labs/ml_model/__init__.py +23 -0
  186. sempy_labs/ml_model/_functions.py +427 -0
  187. sempy_labs/report/_BPAReportTemplate.json +232 -0
  188. sempy_labs/report/__init__.py +55 -0
  189. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
  190. sempy_labs/report/_bpareporttemplate/.platform +11 -0
  191. sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
  192. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
  193. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
  194. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
  195. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
  196. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
  197. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
  198. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
  199. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
  200. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
  201. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
  202. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
  203. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
  204. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
  205. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
  206. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
  207. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
  208. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
  209. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
  210. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
  211. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
  212. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
  213. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
  214. sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
  215. sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
  216. sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
  217. sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
  218. sempy_labs/report/_download_report.py +76 -0
  219. sempy_labs/report/_export_report.py +257 -0
  220. sempy_labs/report/_generate_report.py +427 -0
  221. sempy_labs/report/_paginated.py +76 -0
  222. sempy_labs/report/_report_bpa.py +354 -0
  223. sempy_labs/report/_report_bpa_rules.py +115 -0
  224. sempy_labs/report/_report_functions.py +581 -0
  225. sempy_labs/report/_report_helper.py +227 -0
  226. sempy_labs/report/_report_list_functions.py +110 -0
  227. sempy_labs/report/_report_rebind.py +149 -0
  228. sempy_labs/report/_reportwrapper.py +3100 -0
  229. sempy_labs/report/_save_report.py +147 -0
  230. sempy_labs/snowflake_database/__init__.py +10 -0
  231. sempy_labs/snowflake_database/_items.py +105 -0
  232. sempy_labs/sql_database/__init__.py +21 -0
  233. sempy_labs/sql_database/_items.py +201 -0
  234. sempy_labs/sql_database/_mirroring.py +79 -0
  235. sempy_labs/theme/__init__.py +12 -0
  236. sempy_labs/theme/_org_themes.py +129 -0
  237. sempy_labs/tom/__init__.py +3 -0
  238. sempy_labs/tom/_model.py +5977 -0
  239. sempy_labs/variable_library/__init__.py +19 -0
  240. sempy_labs/variable_library/_functions.py +403 -0
  241. sempy_labs/warehouse/__init__.py +28 -0
  242. sempy_labs/warehouse/_items.py +234 -0
  243. sempy_labs/warehouse/_restore_points.py +309 -0
@@ -0,0 +1,238 @@
1
+ import sempy
2
+ import json
3
+ import os
4
+ import shutil
5
+ from sempy_labs.lakehouse._lakehouse import lakehouse_attached
6
+ from sempy._utils._log import log
7
+ from typing import Optional
8
+ import sempy_labs._icons as icons
9
+ from uuid import UUID
10
+ from sempy_labs._helper_functions import (
11
+ resolve_dataset_name_and_id,
12
+ resolve_workspace_name_and_id,
13
+ )
14
+
15
+
16
+ @log
17
+ def create_pqt_file(
18
+ dataset: str | UUID,
19
+ workspace: Optional[str | UUID] = None,
20
+ file_name: str = "PowerQueryTemplate",
21
+ ):
22
+ """
23
+ Dynamically generates a `Power Query Template <https://learn.microsoft.com/power-query/power-query-template>`_ file based on the semantic model. The .pqt file is
24
+ saved within the Files section of your lakehouse.
25
+
26
+ Dataflows Gen2 has a `limit of 50 tables <https://learn.microsoft.com/power-query/power-query-online-limits>`_. If there are more than 50 tables, this will save multiple Power Query Template
27
+ files (with each file having a max of 50 tables).
28
+
29
+ Parameters
30
+ ----------
31
+ dataset : str | uuid.UUID
32
+ Name or ID of the semantic model.
33
+ workspace : str | uuid.UUID, default=None
34
+ The Fabric workspace name or ID.
35
+ Defaults to None which resolves to the workspace of the attached lakehouse
36
+ or if no lakehouse attached, resolves to the workspace of the notebook.
37
+ file_name : str, default='PowerQueryTemplate'
38
+ The name of the Power Query Template file to be generated.
39
+ """
40
+
41
+ sempy.fabric._client._utils._init_analysis_services()
42
+ import Microsoft.AnalysisServices.Tabular as TOM
43
+ from sempy_labs.tom import connect_semantic_model
44
+
45
+ if not lakehouse_attached():
46
+ raise ValueError(
47
+ f"{icons.red_dot} In order to run the 'create_pqt_file' function, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
48
+ )
49
+
50
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
51
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
52
+
53
+ folderPath = "/lakehouse/default/Files"
54
+ subFolderPath = os.path.join(folderPath, "pqtnewfolder")
55
+
56
+ with connect_semantic_model(
57
+ dataset=dataset_id, workspace=workspace_id, readonly=True
58
+ ) as tom:
59
+ if not any(
60
+ p.SourceType == TOM.PartitionSourceType.M for p in tom.all_partitions()
61
+ ) and not any(t.RefreshPolicy for t in tom.model.Tables):
62
+ print(
63
+ f"{icons.info} The '{dataset_name}' semantic model within the '{workspace_name}' workspace has no Power Query logic."
64
+ )
65
+ return
66
+
67
+ table_map = {}
68
+ expr_map = {}
69
+
70
+ for t in tom.model.Tables:
71
+ table_name = t.Name
72
+ for char in icons.special_characters:
73
+ table_name = table_name.replace(char, "")
74
+ if t.RefreshPolicy:
75
+ table_map[table_name] = t.RefreshPolicy.SourceExpression
76
+ elif any(p.SourceType == TOM.PartitionSourceType.M for p in t.Partitions):
77
+ part_name = next(
78
+ p.Name
79
+ for p in t.Partitions
80
+ if p.SourceType == TOM.PartitionSourceType.M
81
+ )
82
+ expr = t.Partitions[part_name].Source.Expression
83
+ table_map[table_name] = expr
84
+
85
+ for e in tom.model.Expressions:
86
+ expr_map[e.Name] = [str(e.Kind), e.Expression]
87
+
88
+ expressions = tom.model.Expressions.Count
89
+
90
+ # Dataflows Gen2 max table limit is 50.
91
+ max_length = 50 - expressions
92
+ table_chunks = [
93
+ dict(list(table_map.items())[i : i + max_length])
94
+ for i in range(0, len(table_map), max_length)
95
+ ]
96
+
97
+ def create_pqt(table_map: dict, expr_map: dict, file_name: str):
98
+
99
+ os.makedirs(subFolderPath, exist_ok=True)
100
+
101
+ class QueryMetadata:
102
+ def __init__(
103
+ self,
104
+ QueryName,
105
+ QueryGroupId=None,
106
+ LastKnownIsParameter=None,
107
+ LastKnownResultTypeName=None,
108
+ LoadEnabled=True,
109
+ IsHidden=False,
110
+ ):
111
+ self.QueryName = QueryName
112
+ self.QueryGroupId = QueryGroupId
113
+ self.LastKnownIsParameter = LastKnownIsParameter
114
+ self.LastKnownResultTypeName = LastKnownResultTypeName
115
+ self.LoadEnabled = LoadEnabled
116
+ self.IsHidden = IsHidden
117
+
118
+ class RootObject:
119
+ def __init__(
120
+ self,
121
+ DocumentLocale,
122
+ EngineVersion,
123
+ QueriesMetadata,
124
+ QueryGroups=None,
125
+ ):
126
+ if QueryGroups is None:
127
+ QueryGroups = []
128
+ self.DocumentLocale = DocumentLocale
129
+ self.EngineVersion = EngineVersion
130
+ self.QueriesMetadata = QueriesMetadata
131
+ self.QueryGroups = QueryGroups
132
+
133
+ # STEP 1: Create MashupDocument.pq
134
+ mdfileName = "MashupDocument.pq"
135
+ mdFilePath = os.path.join(subFolderPath, mdfileName)
136
+ sb = "section Section1;"
137
+ for t_name, query in table_map.items():
138
+ sb = f'{sb}\nshared #"{t_name}" = '
139
+ if query is not None:
140
+ pQueryNoSpaces = (
141
+ query.replace(" ", "")
142
+ .replace("\n", "")
143
+ .replace("\t", "")
144
+ .replace("\r", "")
145
+ )
146
+ if pQueryNoSpaces.startswith('letSource=""'):
147
+ query = 'let\n\tSource = ""\nin\n\tSource'
148
+ sb = f"{sb}{query};"
149
+
150
+ for e_name, kind_expr in expr_map.items():
151
+ expr = kind_expr[1]
152
+ sb = f'{sb}\nshared #"{e_name}" = {expr};'
153
+
154
+ with open(mdFilePath, "w") as file:
155
+ file.write(sb)
156
+
157
+ # STEP 2: Create the MashupMetadata.json file
158
+ mmfileName = "MashupMetadata.json"
159
+ mmFilePath = os.path.join(subFolderPath, mmfileName)
160
+ queryMetadata = []
161
+
162
+ for t_name, query in table_map.items():
163
+ queryMetadata.append(
164
+ QueryMetadata(t_name, None, None, None, True, False)
165
+ )
166
+ for e_name, kind_expr in expr_map.items():
167
+ e_kind = kind_expr[0]
168
+ if e_kind == "M":
169
+ queryMetadata.append(
170
+ QueryMetadata(e_name, None, None, None, True, False)
171
+ )
172
+ else:
173
+ queryMetadata.append(
174
+ QueryMetadata(e_name, None, None, None, False, False)
175
+ )
176
+
177
+ rootObject = RootObject(
178
+ "en-US", "2.132.328.0", queryMetadata
179
+ ) # "2.126.453.0"
180
+
181
+ def obj_to_dict(obj):
182
+ if isinstance(obj, list):
183
+ return [obj_to_dict(e) for e in obj]
184
+ elif hasattr(obj, "__dict__"):
185
+ return {k: obj_to_dict(v) for k, v in obj.__dict__.items()}
186
+ else:
187
+ return obj
188
+
189
+ jsonContent = json.dumps(obj_to_dict(rootObject), indent=4)
190
+
191
+ with open(mmFilePath, "w") as json_file:
192
+ json_file.write(jsonContent)
193
+
194
+ # STEP 3: Create Metadata.json file
195
+ mFileName = "Metadata.json"
196
+ mFilePath = os.path.join(subFolderPath, mFileName)
197
+ metaData = {"Name": f"{file_name}", "Description": "", "Version": "1.0.0.0"}
198
+ jsonContent = json.dumps(metaData, indent=4)
199
+
200
+ with open(mFilePath, "w") as json_file:
201
+ json_file.write(jsonContent)
202
+
203
+ # STEP 4: Create [Content_Types].xml file:
204
+ xml_content = """<?xml version="1.0" encoding="utf-8"?><Types xmlns="http://schemas.openxmlformats.org/package/2006/content-types"><Default Extension="json" ContentType="application/json" /><Default Extension="pq" ContentType="application/x-ms-m" /></Types>"""
205
+ xmlFileName = "[Content_Types].xml"
206
+ xmlFilePath = os.path.join(subFolderPath, xmlFileName)
207
+ with open(xmlFilePath, "w", encoding="utf-8") as file:
208
+ file.write(xml_content)
209
+
210
+ # STEP 5: Zip up the 4 files
211
+ zipFileName = f"{file_name}.zip"
212
+ zipFilePath = os.path.join(folderPath, zipFileName)
213
+ shutil.make_archive(zipFilePath[:-4], "zip", subFolderPath)
214
+
215
+ # STEP 6: Convert the zip file back into a .pqt file
216
+ newExt = ".pqt"
217
+ directory = os.path.dirname(zipFilePath)
218
+ fileNameWithoutExtension = os.path.splitext(os.path.basename(zipFilePath))[
219
+ 0
220
+ ]
221
+ newFilePath = os.path.join(directory, fileNameWithoutExtension + newExt)
222
+ shutil.move(zipFilePath, newFilePath)
223
+
224
+ # STEP 7: Delete subFolder directory which is no longer needed
225
+ shutil.rmtree(subFolderPath, ignore_errors=True)
226
+
227
+ print(
228
+ f"{icons.green_dot} '{file_name}.pqt' has been created based on the '{dataset_name}' semantic model in the '{workspace_name}' workspace within the Files section of your lakehouse."
229
+ )
230
+
231
+ a = 0
232
+ for t_map in table_chunks:
233
+ if a > 0:
234
+ save_file_name = f"{file_name}_{a}"
235
+ else:
236
+ save_file_name = file_name
237
+ a += 1
238
+ create_pqt(t_map, expr_map, file_name=save_file_name)
@@ -0,0 +1,105 @@
1
+ import sempy
2
+ from uuid import UUID
3
+ import sempy_labs._icons as icons
4
+ from typing import Optional
5
+ from sempy._utils._log import log
6
+
7
+
8
+ @log
9
+ def migrate_direct_lake_to_import(
10
+ dataset: str | UUID,
11
+ workspace: Optional[str | UUID] = None,
12
+ mode: str = "import",
13
+ ):
14
+ """
15
+ Migrates a semantic model or specific table(s) from a Direct Lake mode to import or DirectQuery mode. After running this function, you must go to the semantic model settings and update the cloud connection. Not doing so will result in an inablity to refresh/use the semantic model.
16
+
17
+ Parameters
18
+ ----------
19
+ dataset : str | uuid.UUID
20
+ Name or ID of the semantic model.
21
+ workspace : str | uuid.UUID, default=None
22
+ The Fabric workspace name or ID.
23
+ Defaults to None which resolves to the workspace of the attached lakehouse
24
+ or if no lakehouse attached, resolves to the workspace of the notebook.
25
+ mode : str, default="import"
26
+ The mode to migrate to. Can be either "import" or "directquery".
27
+ """
28
+
29
+ sempy.fabric._client._utils._init_analysis_services()
30
+ import Microsoft.AnalysisServices.Tabular as TOM
31
+ from sempy_labs.tom import connect_semantic_model
32
+
33
+ modes = {
34
+ "import": "Import",
35
+ "directquery": "DirectQuery",
36
+ "dq": "DirectQuery",
37
+ }
38
+
39
+ # Resolve mode
40
+ mode = mode.lower()
41
+ actual_mode = modes.get(mode)
42
+ if actual_mode is None:
43
+ raise ValueError(f"Invalid mode '{mode}'. Must be one of {list(modes.keys())}.")
44
+
45
+ # if isinstance(tables, str):
46
+ # tables = [tables]
47
+
48
+ with connect_semantic_model(
49
+ dataset=dataset, workspace=workspace, readonly=False
50
+ ) as tom:
51
+
52
+ if not tom.is_direct_lake():
53
+ print(
54
+ f"{icons.warning} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode."
55
+ )
56
+ return
57
+
58
+ # if tables is None:
59
+ table_list = [t for t in tom.model.Tables]
60
+ # else:
61
+ # table_list = [t for t in tom.model.Tables if t.Name in tables]
62
+ # if not table_list:
63
+ # raise ValueError(f"{icons.red_dot} No tables found to migrate.")
64
+
65
+ for t in table_list:
66
+ table_name = t.Name
67
+ if t.Partitions.Count == 1 and all(
68
+ p.Mode == TOM.ModeType.DirectLake for p in t.Partitions
69
+ ):
70
+ p = next(p for p in t.Partitions)
71
+ partition_name = p.Name
72
+ entity_name = p.Source.EntityName
73
+ schema_name = p.Source.SchemaName or "dbo"
74
+ # Rename Direct Lake partition
75
+ t.Partitions[partition_name].Name = f"{partition_name}_remove"
76
+
77
+ # Generate M expression for import partition
78
+ expression = f"""let\n\tSource = DatabaseQuery,\n\tData = Source{{[Schema="{schema_name}",Item="{entity_name}"]}}[Data]\nin\n\tData"""
79
+
80
+ # Generate M partition
81
+ tom.add_m_partition(
82
+ table_name=table_name,
83
+ partition_name=partition_name,
84
+ expression=expression,
85
+ mode=actual_mode,
86
+ )
87
+ # Remove Direct Lake partition
88
+ tom.remove_object(object=p)
89
+ # if tables is not None:
90
+ # print(
91
+ # f"{icons.green_dot} The '{table_name}' table has been migrated to '{actual_mode}' mode."
92
+ # )
93
+
94
+ tom.model.Model.DefaultMode = TOM.ModeType.Import
95
+ # if tables is None:
96
+ print(
97
+ f"{icons.green_dot} All tables which were in Direct Lake mode have been migrated to '{actual_mode}' mode."
98
+ )
99
+
100
+ # Check
101
+ # for t in tom.model.Tables:
102
+ # if t.Partitions.Count == 1 and all(p.Mode == TOM.ModeType.Import for p in t.Partitions) and t.CalculationGroup is None:
103
+ # p = next(p for p in t.Partitions)
104
+ # print(p.Name)
105
+ # print(p.Source.Expression)