semantic-link-labs 0.12.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (243) hide show
  1. semantic_link_labs-0.12.8.dist-info/METADATA +354 -0
  2. semantic_link_labs-0.12.8.dist-info/RECORD +243 -0
  3. semantic_link_labs-0.12.8.dist-info/WHEEL +5 -0
  4. semantic_link_labs-0.12.8.dist-info/licenses/LICENSE +21 -0
  5. semantic_link_labs-0.12.8.dist-info/top_level.txt +1 -0
  6. sempy_labs/__init__.py +606 -0
  7. sempy_labs/_a_lib_info.py +2 -0
  8. sempy_labs/_ai.py +437 -0
  9. sempy_labs/_authentication.py +264 -0
  10. sempy_labs/_bpa_translation/_model/_translations_am-ET.po +869 -0
  11. sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +908 -0
  12. sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +968 -0
  13. sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +963 -0
  14. sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +943 -0
  15. sempy_labs/_bpa_translation/_model/_translations_da-DK.po +945 -0
  16. sempy_labs/_bpa_translation/_model/_translations_de-DE.po +988 -0
  17. sempy_labs/_bpa_translation/_model/_translations_el-GR.po +993 -0
  18. sempy_labs/_bpa_translation/_model/_translations_es-ES.po +971 -0
  19. sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +933 -0
  20. sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +942 -0
  21. sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +994 -0
  22. sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +967 -0
  23. sempy_labs/_bpa_translation/_model/_translations_he-IL.po +902 -0
  24. sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +944 -0
  25. sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +963 -0
  26. sempy_labs/_bpa_translation/_model/_translations_id-ID.po +946 -0
  27. sempy_labs/_bpa_translation/_model/_translations_is-IS.po +939 -0
  28. sempy_labs/_bpa_translation/_model/_translations_it-IT.po +986 -0
  29. sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +846 -0
  30. sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +839 -0
  31. sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +967 -0
  32. sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +978 -0
  33. sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +962 -0
  34. sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +962 -0
  35. sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +957 -0
  36. sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +968 -0
  37. sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +964 -0
  38. sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +952 -0
  39. sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +950 -0
  40. sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +942 -0
  41. sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +976 -0
  42. sempy_labs/_bpa_translation/_model/_translations_te-IN.po +947 -0
  43. sempy_labs/_bpa_translation/_model/_translations_th-TH.po +924 -0
  44. sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +953 -0
  45. sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +961 -0
  46. sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +804 -0
  47. sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +969 -0
  48. sempy_labs/_capacities.py +1198 -0
  49. sempy_labs/_capacity_migration.py +660 -0
  50. sempy_labs/_clear_cache.py +351 -0
  51. sempy_labs/_connections.py +610 -0
  52. sempy_labs/_dashboards.py +69 -0
  53. sempy_labs/_data_access_security.py +98 -0
  54. sempy_labs/_data_pipelines.py +162 -0
  55. sempy_labs/_dataflows.py +668 -0
  56. sempy_labs/_dax.py +501 -0
  57. sempy_labs/_daxformatter.py +80 -0
  58. sempy_labs/_delta_analyzer.py +467 -0
  59. sempy_labs/_delta_analyzer_history.py +301 -0
  60. sempy_labs/_dictionary_diffs.py +221 -0
  61. sempy_labs/_documentation.py +147 -0
  62. sempy_labs/_domains.py +51 -0
  63. sempy_labs/_eventhouses.py +182 -0
  64. sempy_labs/_external_data_shares.py +230 -0
  65. sempy_labs/_gateways.py +521 -0
  66. sempy_labs/_generate_semantic_model.py +521 -0
  67. sempy_labs/_get_connection_string.py +84 -0
  68. sempy_labs/_git.py +543 -0
  69. sempy_labs/_graphQL.py +90 -0
  70. sempy_labs/_helper_functions.py +2833 -0
  71. sempy_labs/_icons.py +149 -0
  72. sempy_labs/_job_scheduler.py +609 -0
  73. sempy_labs/_kql_databases.py +149 -0
  74. sempy_labs/_kql_querysets.py +124 -0
  75. sempy_labs/_kusto.py +137 -0
  76. sempy_labs/_labels.py +124 -0
  77. sempy_labs/_list_functions.py +1720 -0
  78. sempy_labs/_managed_private_endpoints.py +253 -0
  79. sempy_labs/_mirrored_databases.py +416 -0
  80. sempy_labs/_mirrored_warehouses.py +60 -0
  81. sempy_labs/_ml_experiments.py +113 -0
  82. sempy_labs/_model_auto_build.py +140 -0
  83. sempy_labs/_model_bpa.py +557 -0
  84. sempy_labs/_model_bpa_bulk.py +378 -0
  85. sempy_labs/_model_bpa_rules.py +859 -0
  86. sempy_labs/_model_dependencies.py +343 -0
  87. sempy_labs/_mounted_data_factories.py +123 -0
  88. sempy_labs/_notebooks.py +441 -0
  89. sempy_labs/_one_lake_integration.py +151 -0
  90. sempy_labs/_onelake.py +131 -0
  91. sempy_labs/_query_scale_out.py +433 -0
  92. sempy_labs/_refresh_semantic_model.py +435 -0
  93. sempy_labs/_semantic_models.py +468 -0
  94. sempy_labs/_spark.py +455 -0
  95. sempy_labs/_sql.py +241 -0
  96. sempy_labs/_sql_audit_settings.py +207 -0
  97. sempy_labs/_sql_endpoints.py +214 -0
  98. sempy_labs/_tags.py +201 -0
  99. sempy_labs/_translations.py +43 -0
  100. sempy_labs/_user_delegation_key.py +44 -0
  101. sempy_labs/_utils.py +79 -0
  102. sempy_labs/_vertipaq.py +1021 -0
  103. sempy_labs/_vpax.py +388 -0
  104. sempy_labs/_warehouses.py +234 -0
  105. sempy_labs/_workloads.py +140 -0
  106. sempy_labs/_workspace_identity.py +72 -0
  107. sempy_labs/_workspaces.py +595 -0
  108. sempy_labs/admin/__init__.py +170 -0
  109. sempy_labs/admin/_activities.py +167 -0
  110. sempy_labs/admin/_apps.py +145 -0
  111. sempy_labs/admin/_artifacts.py +65 -0
  112. sempy_labs/admin/_basic_functions.py +463 -0
  113. sempy_labs/admin/_capacities.py +508 -0
  114. sempy_labs/admin/_dataflows.py +45 -0
  115. sempy_labs/admin/_datasets.py +186 -0
  116. sempy_labs/admin/_domains.py +522 -0
  117. sempy_labs/admin/_external_data_share.py +100 -0
  118. sempy_labs/admin/_git.py +72 -0
  119. sempy_labs/admin/_items.py +265 -0
  120. sempy_labs/admin/_labels.py +211 -0
  121. sempy_labs/admin/_reports.py +241 -0
  122. sempy_labs/admin/_scanner.py +118 -0
  123. sempy_labs/admin/_shared.py +82 -0
  124. sempy_labs/admin/_sharing_links.py +110 -0
  125. sempy_labs/admin/_tags.py +131 -0
  126. sempy_labs/admin/_tenant.py +503 -0
  127. sempy_labs/admin/_tenant_keys.py +89 -0
  128. sempy_labs/admin/_users.py +140 -0
  129. sempy_labs/admin/_workspaces.py +236 -0
  130. sempy_labs/deployment_pipeline/__init__.py +23 -0
  131. sempy_labs/deployment_pipeline/_items.py +580 -0
  132. sempy_labs/directlake/__init__.py +57 -0
  133. sempy_labs/directlake/_autosync.py +58 -0
  134. sempy_labs/directlake/_directlake_schema_compare.py +120 -0
  135. sempy_labs/directlake/_directlake_schema_sync.py +161 -0
  136. sempy_labs/directlake/_dl_helper.py +274 -0
  137. sempy_labs/directlake/_generate_shared_expression.py +94 -0
  138. sempy_labs/directlake/_get_directlake_lakehouse.py +62 -0
  139. sempy_labs/directlake/_get_shared_expression.py +34 -0
  140. sempy_labs/directlake/_guardrails.py +96 -0
  141. sempy_labs/directlake/_list_directlake_model_calc_tables.py +70 -0
  142. sempy_labs/directlake/_show_unsupported_directlake_objects.py +90 -0
  143. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +239 -0
  144. sempy_labs/directlake/_update_directlake_partition_entity.py +259 -0
  145. sempy_labs/directlake/_warm_cache.py +236 -0
  146. sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
  147. sempy_labs/environment/__init__.py +23 -0
  148. sempy_labs/environment/_items.py +212 -0
  149. sempy_labs/environment/_pubstage.py +223 -0
  150. sempy_labs/eventstream/__init__.py +37 -0
  151. sempy_labs/eventstream/_items.py +263 -0
  152. sempy_labs/eventstream/_topology.py +652 -0
  153. sempy_labs/graph/__init__.py +59 -0
  154. sempy_labs/graph/_groups.py +651 -0
  155. sempy_labs/graph/_sensitivity_labels.py +120 -0
  156. sempy_labs/graph/_teams.py +125 -0
  157. sempy_labs/graph/_user_licenses.py +96 -0
  158. sempy_labs/graph/_users.py +516 -0
  159. sempy_labs/graph_model/__init__.py +15 -0
  160. sempy_labs/graph_model/_background_jobs.py +63 -0
  161. sempy_labs/graph_model/_items.py +149 -0
  162. sempy_labs/lakehouse/__init__.py +67 -0
  163. sempy_labs/lakehouse/_blobs.py +247 -0
  164. sempy_labs/lakehouse/_get_lakehouse_columns.py +102 -0
  165. sempy_labs/lakehouse/_get_lakehouse_tables.py +274 -0
  166. sempy_labs/lakehouse/_helper.py +250 -0
  167. sempy_labs/lakehouse/_lakehouse.py +351 -0
  168. sempy_labs/lakehouse/_livy_sessions.py +143 -0
  169. sempy_labs/lakehouse/_materialized_lake_views.py +157 -0
  170. sempy_labs/lakehouse/_partitioning.py +165 -0
  171. sempy_labs/lakehouse/_schemas.py +217 -0
  172. sempy_labs/lakehouse/_shortcuts.py +440 -0
  173. sempy_labs/migration/__init__.py +35 -0
  174. sempy_labs/migration/_create_pqt_file.py +238 -0
  175. sempy_labs/migration/_direct_lake_to_import.py +105 -0
  176. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +398 -0
  177. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +148 -0
  178. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +533 -0
  179. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +172 -0
  180. sempy_labs/migration/_migration_validation.py +71 -0
  181. sempy_labs/migration/_refresh_calc_tables.py +131 -0
  182. sempy_labs/mirrored_azure_databricks_catalog/__init__.py +15 -0
  183. sempy_labs/mirrored_azure_databricks_catalog/_discover.py +213 -0
  184. sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +45 -0
  185. sempy_labs/ml_model/__init__.py +23 -0
  186. sempy_labs/ml_model/_functions.py +427 -0
  187. sempy_labs/report/_BPAReportTemplate.json +232 -0
  188. sempy_labs/report/__init__.py +55 -0
  189. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
  190. sempy_labs/report/_bpareporttemplate/.platform +11 -0
  191. sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
  192. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
  193. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
  194. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
  195. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
  196. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
  197. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
  198. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
  199. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
  200. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
  201. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
  202. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
  203. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
  204. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
  205. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
  206. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
  207. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
  208. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
  209. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
  210. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
  211. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
  212. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
  213. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
  214. sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
  215. sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
  216. sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
  217. sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
  218. sempy_labs/report/_download_report.py +76 -0
  219. sempy_labs/report/_export_report.py +257 -0
  220. sempy_labs/report/_generate_report.py +427 -0
  221. sempy_labs/report/_paginated.py +76 -0
  222. sempy_labs/report/_report_bpa.py +354 -0
  223. sempy_labs/report/_report_bpa_rules.py +115 -0
  224. sempy_labs/report/_report_functions.py +581 -0
  225. sempy_labs/report/_report_helper.py +227 -0
  226. sempy_labs/report/_report_list_functions.py +110 -0
  227. sempy_labs/report/_report_rebind.py +149 -0
  228. sempy_labs/report/_reportwrapper.py +3100 -0
  229. sempy_labs/report/_save_report.py +147 -0
  230. sempy_labs/snowflake_database/__init__.py +10 -0
  231. sempy_labs/snowflake_database/_items.py +105 -0
  232. sempy_labs/sql_database/__init__.py +21 -0
  233. sempy_labs/sql_database/_items.py +201 -0
  234. sempy_labs/sql_database/_mirroring.py +79 -0
  235. sempy_labs/theme/__init__.py +12 -0
  236. sempy_labs/theme/_org_themes.py +129 -0
  237. sempy_labs/tom/__init__.py +3 -0
  238. sempy_labs/tom/_model.py +5977 -0
  239. sempy_labs/variable_library/__init__.py +19 -0
  240. sempy_labs/variable_library/_functions.py +403 -0
  241. sempy_labs/warehouse/__init__.py +28 -0
  242. sempy_labs/warehouse/_items.py +234 -0
  243. sempy_labs/warehouse/_restore_points.py +309 -0
@@ -0,0 +1,521 @@
1
+ import sempy.fabric as fabric
2
+ import pandas as pd
3
+ import json
4
+ import os
5
+ from typing import Optional, List
6
+ from sempy._utils._log import log
7
+ from sempy_labs._helper_functions import (
8
+ resolve_workspace_name_and_id,
9
+ resolve_dataset_name_and_id,
10
+ _conv_b64,
11
+ _decode_b64,
12
+ _base_api,
13
+ _mount,
14
+ )
15
+ from sempy_labs.lakehouse._lakehouse import lakehouse_attached
16
+ import sempy_labs._icons as icons
17
+ from sempy_labs._refresh_semantic_model import refresh_semantic_model
18
+ from uuid import UUID
19
+
20
+
21
+ @log
22
+ def create_blank_semantic_model(
23
+ dataset: str,
24
+ compatibility_level: int = 1702,
25
+ workspace: Optional[str | UUID] = None,
26
+ overwrite: bool = True,
27
+ ):
28
+ """
29
+ Creates a new blank semantic model (no tables/columns etc.).
30
+
31
+ Parameters
32
+ ----------
33
+ dataset : str
34
+ Name of the semantic model.
35
+ compatibility_level : int, default=1702
36
+ The compatibility level of the semantic model.
37
+ workspace : str | uuid.UUID, default=None
38
+ The Fabric workspace name or ID.
39
+ Defaults to None which resolves to the workspace of the attached lakehouse
40
+ or if no lakehouse attached, resolves to the workspace of the notebook.
41
+ overwrite : bool, default=False
42
+ If set to True, overwrites the existing semantic model in the workspace if it exists.
43
+ """
44
+
45
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
46
+ dfD = fabric.list_datasets(workspace=workspace_id, mode="rest")
47
+ dfD_filt = dfD[dfD["Dataset Name"] == dataset]
48
+
49
+ if len(dfD_filt) > 0 and not overwrite:
50
+ raise ValueError(
51
+ f"{icons.warning} The '{dataset}' semantic model already exists within the '{workspace_name}' workspace. The 'overwrite' parameter is set to False so the blank new semantic model was not created."
52
+ )
53
+
54
+ min_compat = 1500
55
+ if compatibility_level < min_compat:
56
+ raise ValueError(
57
+ f"{icons.red_dot} Compatiblity level must be at least {min_compat}."
58
+ )
59
+
60
+ # If the model does not exist
61
+ if len(dfD_filt) == 0:
62
+ tmsl = f"""
63
+ {{
64
+ "createOrReplace": {{
65
+ "object": {{
66
+ "database": '{dataset}'
67
+ }},
68
+ "database": {{
69
+ "name": '{dataset}',
70
+ "compatibilityLevel": {compatibility_level},
71
+ "model": {{
72
+ "cultures": [
73
+ {{
74
+ "name": "en-US",
75
+ "linguisticMetadata": {{
76
+ "content": {{
77
+ "Version": "1.0.0",
78
+ "Language": "en-US"
79
+ }},
80
+ "contentType": "json"
81
+ }}
82
+ }}
83
+ ],
84
+ "collation": "Latin1_General_100_BIN2_UTF8",
85
+ "dataAccessOptions": {{
86
+ "legacyRedirects": true,
87
+ "returnErrorValuesAsNull": true,
88
+ }},
89
+ "defaultPowerBIDataSourceVersion": "powerBI_V3",
90
+ "sourceQueryCulture": "en-US",
91
+ }}
92
+ }}
93
+ }}
94
+ }}
95
+ """
96
+ else:
97
+ tmsl = f"""
98
+ {{
99
+ "createOrReplace": {{
100
+ "object": {{
101
+ "database": '{dataset}'
102
+ }},
103
+ "database": {{
104
+ "name": '{dataset}',
105
+ "compatibilityLevel": {compatibility_level},
106
+ "model": {{
107
+ "culture": "en-US",
108
+ "defaultPowerBIDataSourceVersion": "powerBI_V3"
109
+ }}
110
+ }}
111
+ }}
112
+ }}
113
+ """
114
+
115
+ fabric.execute_tmsl(script=tmsl, workspace=workspace_id)
116
+
117
+ return print(
118
+ f"{icons.green_dot} The '{dataset}' semantic model was created within the '{workspace_name}' workspace."
119
+ )
120
+
121
+
122
+ @log
123
+ def create_semantic_model_from_bim(
124
+ dataset: str, bim_file: dict, workspace: Optional[str | UUID] = None
125
+ ):
126
+ """
127
+ Creates a new semantic model based on a Model.bim file.
128
+
129
+ This is a wrapper function for the following API: `Items - Create Semantic Model <https://learn.microsoft.com/rest/api/fabric/semanticmodel/items/create-semantic-model>`_.
130
+
131
+ Parameters
132
+ ----------
133
+ dataset : str
134
+ Name of the semantic model.
135
+ bim_file : dict
136
+ The model.bim file.
137
+ workspace : str | uuid.UUID, default=None
138
+ The Fabric workspace name or ID.
139
+ Defaults to None which resolves to the workspace of the attached lakehouse
140
+ or if no lakehouse attached, resolves to the workspace of the notebook.
141
+ """
142
+
143
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
144
+
145
+ dfI = fabric.list_datasets(workspace=workspace_id, mode="rest")
146
+ dfI_filt = dfI[(dfI["Dataset Name"] == dataset)]
147
+
148
+ if not dfI_filt.empty:
149
+ raise ValueError(
150
+ f"{icons.red_dot} The '{dataset}' semantic model already exists as a semantic model in the '{workspace_name}' workspace."
151
+ )
152
+
153
+ defPBIDataset = {"version": "1.0", "settings": {}}
154
+ payloadPBIDefinition = _conv_b64(defPBIDataset)
155
+ payloadBim = _conv_b64(bim_file)
156
+
157
+ payload = {
158
+ "displayName": dataset,
159
+ "definition": {
160
+ "parts": [
161
+ {
162
+ "path": "model.bim",
163
+ "payload": payloadBim,
164
+ "payloadType": "InlineBase64",
165
+ },
166
+ {
167
+ "path": "definition.pbidataset",
168
+ "payload": payloadPBIDefinition,
169
+ "payloadType": "InlineBase64",
170
+ },
171
+ ]
172
+ },
173
+ }
174
+
175
+ _base_api(
176
+ request=f"v1/workspaces/{workspace_id}/semanticModels",
177
+ payload=payload,
178
+ method="post",
179
+ lro_return_status_code=True,
180
+ status_codes=[201, 202],
181
+ )
182
+
183
+ print(
184
+ f"{icons.green_dot} The '{dataset}' semantic model has been created within the '{workspace_name}' workspace."
185
+ )
186
+
187
+
188
+ @log
189
+ def update_semantic_model_from_bim(
190
+ dataset: str | UUID, bim_file: dict, workspace: Optional[str | UUID] = None
191
+ ):
192
+ """
193
+ Updates a semantic model definition based on a Model.bim file.
194
+
195
+ This is a wrapper function for the following API: `Items - Update Semantic Model Definition <https://learn.microsoft.com/rest/api/fabric/semanticmodel/items/update-semantic-model-definition>`_.
196
+
197
+ Parameters
198
+ ----------
199
+ dataset : str | uuid.UUID
200
+ Name or ID of the semantic model.
201
+ bim_file : dict
202
+ The model.bim file.
203
+ workspace : str | uuid.UUID, default=None
204
+ The Fabric workspace name or ID.
205
+ Defaults to None which resolves to the workspace of the attached lakehouse
206
+ or if no lakehouse attached, resolves to the workspace of the notebook.
207
+ """
208
+
209
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
210
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
211
+
212
+ defPBIDataset = {"version": "1.0", "settings": {}}
213
+ payloadPBIDefinition = _conv_b64(defPBIDataset)
214
+ payloadBim = _conv_b64(bim_file)
215
+
216
+ payload = {
217
+ "displayName": dataset_name,
218
+ "definition": {
219
+ "parts": [
220
+ {
221
+ "path": "model.bim",
222
+ "payload": payloadBim,
223
+ "payloadType": "InlineBase64",
224
+ },
225
+ {
226
+ "path": "definition.pbidataset",
227
+ "payload": payloadPBIDefinition,
228
+ "payloadType": "InlineBase64",
229
+ },
230
+ ]
231
+ },
232
+ }
233
+
234
+ _base_api(
235
+ request=f"v1/workspaces/{workspace_id}/semanticModels/{dataset_id}/updateDefinition",
236
+ payload=payload,
237
+ method="post",
238
+ lro_return_status_code=True,
239
+ status_codes=None,
240
+ )
241
+
242
+ print(
243
+ f"{icons.green_dot} The '{dataset_name}' semantic model has been updated within the '{workspace_name}' workspace."
244
+ )
245
+
246
+
247
+ @log
248
+ def deploy_semantic_model(
249
+ source_dataset: str,
250
+ source_workspace: Optional[str | UUID] = None,
251
+ target_dataset: Optional[str] = None,
252
+ target_workspace: Optional[str | UUID] = None,
253
+ refresh_target_dataset: bool = True,
254
+ overwrite: bool = False,
255
+ perspective: Optional[str] = None,
256
+ ):
257
+ """
258
+ Deploys a semantic model based on an existing semantic model.
259
+
260
+ Parameters
261
+ ----------
262
+ source_dataset : str
263
+ Name of the semantic model to deploy.
264
+ source_workspace : str | uuid.UUID, default=None
265
+ The Fabric workspace name or ID.
266
+ Defaults to None which resolves to the workspace of the attached lakehouse
267
+ or if no lakehouse attached, resolves to the workspace of the notebook.
268
+ target_dataset: str
269
+ Name of the new semantic model to be created.
270
+ target_workspace : str | uuid.UUID, default=None
271
+ The Fabric workspace name or ID in which the new semantic model will be deployed.
272
+ Defaults to None which resolves to the workspace of the attached lakehouse
273
+ or if no lakehouse attached, resolves to the workspace of the notebook.
274
+ refresh_target_dataset : bool, default=True
275
+ If set to True, this will initiate a full refresh of the target semantic model in the target workspace.
276
+ overwrite : bool, default=False
277
+ If set to True, overwrites the existing semantic model in the workspace if it exists.
278
+ perspective : str, default=None
279
+ Set this to the name of a perspective in the model and it will reduce the deployed model down to the tables/columns/measures/hierarchies within that perspective.
280
+ """
281
+
282
+ (source_workspace_name, source_workspace_id) = resolve_workspace_name_and_id(
283
+ source_workspace
284
+ )
285
+
286
+ (target_workspace_name, target_workspace_id) = resolve_workspace_name_and_id(
287
+ target_workspace
288
+ )
289
+
290
+ if target_dataset is None:
291
+ target_dataset = source_dataset
292
+
293
+ if (
294
+ target_dataset == source_dataset
295
+ and target_workspace_name == source_workspace_name
296
+ ):
297
+ raise ValueError(
298
+ f"{icons.red_dot} The 'dataset' and 'new_dataset' parameters have the same value. And, the 'workspace' and 'new_dataset_workspace' "
299
+ f"parameters have the same value. At least one of these must be different. Please update the parameters."
300
+ )
301
+
302
+ dfD = fabric.list_datasets(workspace=target_workspace_id, mode="rest")
303
+ dfD_filt = dfD[dfD["Dataset Name"] == target_dataset]
304
+ if not dfD_filt.empty and not overwrite:
305
+ raise ValueError(
306
+ f"{icons.warning} The '{target_dataset}' semantic model already exists within the '{target_workspace_name}' workspace. The 'overwrite' parameter is set to False so the source semantic model was not deployed to the target destination."
307
+ )
308
+
309
+ if perspective is not None:
310
+ from sempy_labs.tom import connect_semantic_model
311
+
312
+ with connect_semantic_model(
313
+ dataset=source_dataset, workspace=source_workspace, readonly=True
314
+ ) as tom:
315
+
316
+ df_added = tom._reduce_model(perspective_name=perspective)
317
+ bim = tom.get_bim()
318
+
319
+ else:
320
+ bim = get_semantic_model_bim(
321
+ dataset=source_dataset, workspace=source_workspace_id
322
+ )
323
+
324
+ # Create the semantic model if the model does not exist
325
+ if dfD_filt.empty:
326
+ create_semantic_model_from_bim(
327
+ dataset=target_dataset,
328
+ bim_file=bim,
329
+ workspace=target_workspace_id,
330
+ )
331
+ # Update the semantic model if the model exists
332
+ else:
333
+ update_semantic_model_from_bim(
334
+ dataset=target_dataset, bim_file=bim, workspace=target_workspace_id
335
+ )
336
+
337
+ if refresh_target_dataset:
338
+ refresh_semantic_model(dataset=target_dataset, workspace=target_workspace_id)
339
+
340
+ if perspective is not None:
341
+ return df_added
342
+
343
+
344
+ @log
345
+ def get_semantic_model_bim(
346
+ dataset: str | UUID,
347
+ workspace: Optional[str | UUID] = None,
348
+ save_to_file_name: Optional[str] = None,
349
+ ) -> dict:
350
+ """
351
+ Extracts the Model.bim file for a given semantic model.
352
+
353
+ Parameters
354
+ ----------
355
+ dataset : str | uuid.UUID
356
+ Name or ID of the semantic model.
357
+ workspace : str | uuid.UUID, default=None
358
+ The Fabric workspace name or ID in which the semantic model resides.
359
+ Defaults to None which resolves to the workspace of the attached lakehouse
360
+ or if no lakehouse attached, resolves to the workspace of the notebook.
361
+ save_to_file_name : str, default=None
362
+ If specified, saves the Model.bim as a file in the lakehouse attached to the notebook.
363
+
364
+ Returns
365
+ -------
366
+ dict
367
+ The Model.bim file for the semantic model.
368
+ """
369
+
370
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
371
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
372
+
373
+ bimJson = get_semantic_model_definition(
374
+ dataset=dataset_id,
375
+ workspace=workspace_id,
376
+ format="TMSL",
377
+ return_dataframe=False,
378
+ )
379
+
380
+ if save_to_file_name is not None:
381
+ if not lakehouse_attached():
382
+ raise ValueError(
383
+ f"{icons.red_dot} In order to save the model.bim file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
384
+ )
385
+
386
+ local_path = _mount()
387
+ save_folder = f"{local_path}/Files"
388
+ file_ext = ".bim"
389
+ if not save_to_file_name.endswith(file_ext):
390
+ save_to_file_name = f"{save_to_file_name}{file_ext}"
391
+ file_path = os.path.join(save_folder, save_to_file_name)
392
+ with open(file_path, "w") as json_file:
393
+ json.dump(bimJson, json_file, indent=4)
394
+ print(
395
+ f"{icons.green_dot} The {file_ext} file for the '{dataset_name}' semantic model has been saved to the lakehouse attached to the notebook within: 'Files/{save_to_file_name}'.\n\n"
396
+ )
397
+
398
+ return bimJson
399
+
400
+
401
+ @log
402
+ def get_semantic_model_definition(
403
+ dataset: str | UUID,
404
+ format: str = "TMSL",
405
+ workspace: Optional[str | UUID] = None,
406
+ return_dataframe: bool = True,
407
+ ) -> pd.DataFrame | dict | List:
408
+ """
409
+ Extracts the semantic model definition.
410
+
411
+ This is a wrapper function for the following API: `Items - Get Semantic Model Definition <https://learn.microsoft.com/rest/api/fabric/semanticmodel/items/get-semantic-model-definition>`_.
412
+
413
+ Parameters
414
+ ----------
415
+ dataset : str | uuid.UUID
416
+ Name or ID of the semantic model.
417
+ format : str, default="TMSL"
418
+ The output format. Valid options are "TMSL" or "TMDL". "TMSL" returns the .bim file whereas "TMDL" returns the collection of TMDL files. Can also enter 'bim' for the TMSL version.
419
+ workspace : str | uuid.UUID, default=None
420
+ The Fabric workspace name or ID in which the semantic model resides.
421
+ Defaults to None which resolves to the workspace of the attached lakehouse
422
+ or if no lakehouse attached, resolves to the workspace of the notebook.
423
+ return_dataframe : bool, default=True
424
+ If True, returns a dataframe.
425
+ If False, returns the .bim file for TMSL format. Returns a list of the TMDL files (decoded) for TMDL format.
426
+
427
+ Returns
428
+ -------
429
+ pandas.DataFrame | dict | List
430
+ A pandas dataframe with the semantic model definition or the file or files comprising the semantic model definition.
431
+ """
432
+
433
+ valid_formats = ["TMSL", "TMDL"]
434
+
435
+ format = format.upper()
436
+ if format == "BIM":
437
+ format = "TMSL"
438
+ if format not in valid_formats:
439
+ raise ValueError(
440
+ f"{icons.red_dot} Invalid format. Valid options: {valid_formats}."
441
+ )
442
+
443
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
444
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
445
+
446
+ result = _base_api(
447
+ request=f"v1/workspaces/{workspace_id}/semanticModels/{dataset_id}/getDefinition?format={format}",
448
+ method="post",
449
+ lro_return_json=True,
450
+ status_codes=None,
451
+ )
452
+
453
+ files = result["definition"]["parts"]
454
+
455
+ if return_dataframe:
456
+ return pd.json_normalize(files)
457
+ elif format == "TMSL":
458
+ payload = next(
459
+ (part["payload"] for part in files if part["path"] == "model.bim"), None
460
+ )
461
+ return json.loads(_decode_b64(payload))
462
+ else:
463
+ decoded_parts = [
464
+ {"file_name": part["path"], "content": _decode_b64(part["payload"])}
465
+ for part in files
466
+ ]
467
+
468
+ return decoded_parts
469
+
470
+
471
+ @log
472
+ def get_semantic_model_size(
473
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
474
+ ):
475
+ """
476
+ Gets size of the semantic model in bytes.
477
+
478
+ Parameters
479
+ ----------
480
+ dataset : str | uuid.UUID
481
+ Name or ID of the semantic model.
482
+ workspace : str | uuid.UUID, default=None
483
+ The Fabric workspace name or ID in which the semantic model resides.
484
+ Defaults to None which resolves to the workspace of the attached lakehouse
485
+ or if no lakehouse attached, resolves to the workspace of the notebook.
486
+
487
+ Returns
488
+ -------
489
+ int
490
+ The size of the semantic model in bytes
491
+ """
492
+
493
+ dict = fabric.evaluate_dax(
494
+ dataset=dataset,
495
+ workspace=workspace,
496
+ dax_string="""
497
+ EVALUATE SELECTCOLUMNS(FILTER(INFO.STORAGETABLECOLUMNS(), [COLUMN_TYPE] = "BASIC_DATA"),[DICTIONARY_SIZE])
498
+ """,
499
+ )
500
+
501
+ used_size = fabric.evaluate_dax(
502
+ dataset=dataset,
503
+ workspace=workspace,
504
+ dax_string="""
505
+ EVALUATE SELECTCOLUMNS(INFO.STORAGETABLECOLUMNSEGMENTS(),[USED_SIZE])
506
+ """,
507
+ )
508
+ dict_size = dict["[DICTIONARY_SIZE]"].sum()
509
+ used_size = used_size["[USED_SIZE]"].sum()
510
+ model_size = dict_size + used_size
511
+ # Calculate proper bytes size by dividing by 1024 and multiplying by 1000 - per 1000
512
+ if model_size >= 10**9:
513
+ result = model_size / (1024**3) * 10**9
514
+ elif model_size >= 10**6:
515
+ result = model_size / (1024**2) * 10**6
516
+ elif model_size >= 10**3:
517
+ result = model_size / (1024) * 10**3
518
+ else:
519
+ result = model_size
520
+
521
+ return result
@@ -0,0 +1,84 @@
1
+ from sempy_labs._helper_functions import (
2
+ resolve_item_id,
3
+ _base_api,
4
+ resolve_workspace_id,
5
+ )
6
+ from typing import Optional, Literal
7
+ import sempy_labs._icons as icons
8
+ from uuid import UUID
9
+ from sempy._utils._log import log
10
+
11
+
12
+ @log
13
+ def get_connection_string(
14
+ item: str | UUID,
15
+ type: Literal["Lakehouse", "Warehouse", "SQLEndpoint"],
16
+ workspace: Optional[str | UUID] = None,
17
+ guest_tenant_id: Optional[UUID] = None,
18
+ private_link_type: Optional[str] = None,
19
+ ) -> str:
20
+ """
21
+ Returns the SQL connection string of the specified item.
22
+
23
+ Parameters
24
+ ----------
25
+ item : str | uuid.UUID
26
+ The name or ID of the item (Lakehouse or MirroredDatabase).
27
+ type : Literal['Lakehouse', 'Warehouse', 'SQLEndpoint']
28
+ The type of the item. Must be 'Lakehouse' or 'MirroredDatabase'.
29
+ workspace : str | uuid.UUID, default=None
30
+ The Fabric workspace name or ID.
31
+ Defaults to None which resolves to the workspace of the attached lakehouse
32
+ or if no lakehouse attached, resolves to the workspace of the notebook.
33
+ guest_tenant_id : uuid.UUID, default=None
34
+ The guest tenant ID if the end user's tenant is different from the warehouse tenant.
35
+ private_link_type : str, default=None
36
+ Indicates the type of private link this connection string uses. Must be either 'Workspace' or 'None' or left as None.
37
+
38
+ Returns
39
+ -------
40
+ str
41
+ Returns the SQL connection string of the specified item.
42
+ """
43
+ workspace_id = resolve_workspace_id(workspace)
44
+ item_id = resolve_item_id(item=item, type=type, workspace=workspace)
45
+
46
+ type_dict = {
47
+ "Warehouse": "warehouses",
48
+ "SQLEndpoint": "sqlEndpoints",
49
+ }
50
+ type_for_url = type_dict.get(type)
51
+
52
+ if type == "Lakehouse":
53
+ response = _base_api(
54
+ f"/v1/workspaces/{workspace_id}/lakehouses/{item_id}", client="fabric_sp"
55
+ ).json()
56
+ return (
57
+ response.get("properties", {})
58
+ .get("sqlEndpointProperties", {})
59
+ .get("connectionString", {})
60
+ )
61
+ if type in ["SQLEndpoint", "Warehouse"]:
62
+ url = f"/v1/workspaces/{workspace_id}/{type_for_url}/{item_id}/connectionString"
63
+ else:
64
+ raise ValueError(
65
+ f"{icons.red_dot} The type must be 'Lakehouse', 'Warehouse' or 'SQLEndpoint'."
66
+ )
67
+
68
+ if private_link_type is not None and private_link_type not in ["Workspace", "None"]:
69
+ raise ValueError(
70
+ f"{icons.red_dot} private_link_type must be 'Workspace' or 'None' or left as None."
71
+ )
72
+
73
+ if guest_tenant_id or private_link_type:
74
+ params = []
75
+ if guest_tenant_id:
76
+ params.append(f"guestTenantId={guest_tenant_id}")
77
+ if private_link_type:
78
+ params.append(f"privateLinkType={private_link_type}")
79
+ param_str = "?" + "&".join(params)
80
+ url += param_str
81
+
82
+ response = _base_api(request=url, client="fabric_sp")
83
+
84
+ return response.json().get("connectionString")