semantic-link-labs 0.12.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (243) hide show
  1. semantic_link_labs-0.12.8.dist-info/METADATA +354 -0
  2. semantic_link_labs-0.12.8.dist-info/RECORD +243 -0
  3. semantic_link_labs-0.12.8.dist-info/WHEEL +5 -0
  4. semantic_link_labs-0.12.8.dist-info/licenses/LICENSE +21 -0
  5. semantic_link_labs-0.12.8.dist-info/top_level.txt +1 -0
  6. sempy_labs/__init__.py +606 -0
  7. sempy_labs/_a_lib_info.py +2 -0
  8. sempy_labs/_ai.py +437 -0
  9. sempy_labs/_authentication.py +264 -0
  10. sempy_labs/_bpa_translation/_model/_translations_am-ET.po +869 -0
  11. sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +908 -0
  12. sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +968 -0
  13. sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +963 -0
  14. sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +943 -0
  15. sempy_labs/_bpa_translation/_model/_translations_da-DK.po +945 -0
  16. sempy_labs/_bpa_translation/_model/_translations_de-DE.po +988 -0
  17. sempy_labs/_bpa_translation/_model/_translations_el-GR.po +993 -0
  18. sempy_labs/_bpa_translation/_model/_translations_es-ES.po +971 -0
  19. sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +933 -0
  20. sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +942 -0
  21. sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +994 -0
  22. sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +967 -0
  23. sempy_labs/_bpa_translation/_model/_translations_he-IL.po +902 -0
  24. sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +944 -0
  25. sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +963 -0
  26. sempy_labs/_bpa_translation/_model/_translations_id-ID.po +946 -0
  27. sempy_labs/_bpa_translation/_model/_translations_is-IS.po +939 -0
  28. sempy_labs/_bpa_translation/_model/_translations_it-IT.po +986 -0
  29. sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +846 -0
  30. sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +839 -0
  31. sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +967 -0
  32. sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +978 -0
  33. sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +962 -0
  34. sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +962 -0
  35. sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +957 -0
  36. sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +968 -0
  37. sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +964 -0
  38. sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +952 -0
  39. sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +950 -0
  40. sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +942 -0
  41. sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +976 -0
  42. sempy_labs/_bpa_translation/_model/_translations_te-IN.po +947 -0
  43. sempy_labs/_bpa_translation/_model/_translations_th-TH.po +924 -0
  44. sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +953 -0
  45. sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +961 -0
  46. sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +804 -0
  47. sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +969 -0
  48. sempy_labs/_capacities.py +1198 -0
  49. sempy_labs/_capacity_migration.py +660 -0
  50. sempy_labs/_clear_cache.py +351 -0
  51. sempy_labs/_connections.py +610 -0
  52. sempy_labs/_dashboards.py +69 -0
  53. sempy_labs/_data_access_security.py +98 -0
  54. sempy_labs/_data_pipelines.py +162 -0
  55. sempy_labs/_dataflows.py +668 -0
  56. sempy_labs/_dax.py +501 -0
  57. sempy_labs/_daxformatter.py +80 -0
  58. sempy_labs/_delta_analyzer.py +467 -0
  59. sempy_labs/_delta_analyzer_history.py +301 -0
  60. sempy_labs/_dictionary_diffs.py +221 -0
  61. sempy_labs/_documentation.py +147 -0
  62. sempy_labs/_domains.py +51 -0
  63. sempy_labs/_eventhouses.py +182 -0
  64. sempy_labs/_external_data_shares.py +230 -0
  65. sempy_labs/_gateways.py +521 -0
  66. sempy_labs/_generate_semantic_model.py +521 -0
  67. sempy_labs/_get_connection_string.py +84 -0
  68. sempy_labs/_git.py +543 -0
  69. sempy_labs/_graphQL.py +90 -0
  70. sempy_labs/_helper_functions.py +2833 -0
  71. sempy_labs/_icons.py +149 -0
  72. sempy_labs/_job_scheduler.py +609 -0
  73. sempy_labs/_kql_databases.py +149 -0
  74. sempy_labs/_kql_querysets.py +124 -0
  75. sempy_labs/_kusto.py +137 -0
  76. sempy_labs/_labels.py +124 -0
  77. sempy_labs/_list_functions.py +1720 -0
  78. sempy_labs/_managed_private_endpoints.py +253 -0
  79. sempy_labs/_mirrored_databases.py +416 -0
  80. sempy_labs/_mirrored_warehouses.py +60 -0
  81. sempy_labs/_ml_experiments.py +113 -0
  82. sempy_labs/_model_auto_build.py +140 -0
  83. sempy_labs/_model_bpa.py +557 -0
  84. sempy_labs/_model_bpa_bulk.py +378 -0
  85. sempy_labs/_model_bpa_rules.py +859 -0
  86. sempy_labs/_model_dependencies.py +343 -0
  87. sempy_labs/_mounted_data_factories.py +123 -0
  88. sempy_labs/_notebooks.py +441 -0
  89. sempy_labs/_one_lake_integration.py +151 -0
  90. sempy_labs/_onelake.py +131 -0
  91. sempy_labs/_query_scale_out.py +433 -0
  92. sempy_labs/_refresh_semantic_model.py +435 -0
  93. sempy_labs/_semantic_models.py +468 -0
  94. sempy_labs/_spark.py +455 -0
  95. sempy_labs/_sql.py +241 -0
  96. sempy_labs/_sql_audit_settings.py +207 -0
  97. sempy_labs/_sql_endpoints.py +214 -0
  98. sempy_labs/_tags.py +201 -0
  99. sempy_labs/_translations.py +43 -0
  100. sempy_labs/_user_delegation_key.py +44 -0
  101. sempy_labs/_utils.py +79 -0
  102. sempy_labs/_vertipaq.py +1021 -0
  103. sempy_labs/_vpax.py +388 -0
  104. sempy_labs/_warehouses.py +234 -0
  105. sempy_labs/_workloads.py +140 -0
  106. sempy_labs/_workspace_identity.py +72 -0
  107. sempy_labs/_workspaces.py +595 -0
  108. sempy_labs/admin/__init__.py +170 -0
  109. sempy_labs/admin/_activities.py +167 -0
  110. sempy_labs/admin/_apps.py +145 -0
  111. sempy_labs/admin/_artifacts.py +65 -0
  112. sempy_labs/admin/_basic_functions.py +463 -0
  113. sempy_labs/admin/_capacities.py +508 -0
  114. sempy_labs/admin/_dataflows.py +45 -0
  115. sempy_labs/admin/_datasets.py +186 -0
  116. sempy_labs/admin/_domains.py +522 -0
  117. sempy_labs/admin/_external_data_share.py +100 -0
  118. sempy_labs/admin/_git.py +72 -0
  119. sempy_labs/admin/_items.py +265 -0
  120. sempy_labs/admin/_labels.py +211 -0
  121. sempy_labs/admin/_reports.py +241 -0
  122. sempy_labs/admin/_scanner.py +118 -0
  123. sempy_labs/admin/_shared.py +82 -0
  124. sempy_labs/admin/_sharing_links.py +110 -0
  125. sempy_labs/admin/_tags.py +131 -0
  126. sempy_labs/admin/_tenant.py +503 -0
  127. sempy_labs/admin/_tenant_keys.py +89 -0
  128. sempy_labs/admin/_users.py +140 -0
  129. sempy_labs/admin/_workspaces.py +236 -0
  130. sempy_labs/deployment_pipeline/__init__.py +23 -0
  131. sempy_labs/deployment_pipeline/_items.py +580 -0
  132. sempy_labs/directlake/__init__.py +57 -0
  133. sempy_labs/directlake/_autosync.py +58 -0
  134. sempy_labs/directlake/_directlake_schema_compare.py +120 -0
  135. sempy_labs/directlake/_directlake_schema_sync.py +161 -0
  136. sempy_labs/directlake/_dl_helper.py +274 -0
  137. sempy_labs/directlake/_generate_shared_expression.py +94 -0
  138. sempy_labs/directlake/_get_directlake_lakehouse.py +62 -0
  139. sempy_labs/directlake/_get_shared_expression.py +34 -0
  140. sempy_labs/directlake/_guardrails.py +96 -0
  141. sempy_labs/directlake/_list_directlake_model_calc_tables.py +70 -0
  142. sempy_labs/directlake/_show_unsupported_directlake_objects.py +90 -0
  143. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +239 -0
  144. sempy_labs/directlake/_update_directlake_partition_entity.py +259 -0
  145. sempy_labs/directlake/_warm_cache.py +236 -0
  146. sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
  147. sempy_labs/environment/__init__.py +23 -0
  148. sempy_labs/environment/_items.py +212 -0
  149. sempy_labs/environment/_pubstage.py +223 -0
  150. sempy_labs/eventstream/__init__.py +37 -0
  151. sempy_labs/eventstream/_items.py +263 -0
  152. sempy_labs/eventstream/_topology.py +652 -0
  153. sempy_labs/graph/__init__.py +59 -0
  154. sempy_labs/graph/_groups.py +651 -0
  155. sempy_labs/graph/_sensitivity_labels.py +120 -0
  156. sempy_labs/graph/_teams.py +125 -0
  157. sempy_labs/graph/_user_licenses.py +96 -0
  158. sempy_labs/graph/_users.py +516 -0
  159. sempy_labs/graph_model/__init__.py +15 -0
  160. sempy_labs/graph_model/_background_jobs.py +63 -0
  161. sempy_labs/graph_model/_items.py +149 -0
  162. sempy_labs/lakehouse/__init__.py +67 -0
  163. sempy_labs/lakehouse/_blobs.py +247 -0
  164. sempy_labs/lakehouse/_get_lakehouse_columns.py +102 -0
  165. sempy_labs/lakehouse/_get_lakehouse_tables.py +274 -0
  166. sempy_labs/lakehouse/_helper.py +250 -0
  167. sempy_labs/lakehouse/_lakehouse.py +351 -0
  168. sempy_labs/lakehouse/_livy_sessions.py +143 -0
  169. sempy_labs/lakehouse/_materialized_lake_views.py +157 -0
  170. sempy_labs/lakehouse/_partitioning.py +165 -0
  171. sempy_labs/lakehouse/_schemas.py +217 -0
  172. sempy_labs/lakehouse/_shortcuts.py +440 -0
  173. sempy_labs/migration/__init__.py +35 -0
  174. sempy_labs/migration/_create_pqt_file.py +238 -0
  175. sempy_labs/migration/_direct_lake_to_import.py +105 -0
  176. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +398 -0
  177. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +148 -0
  178. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +533 -0
  179. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +172 -0
  180. sempy_labs/migration/_migration_validation.py +71 -0
  181. sempy_labs/migration/_refresh_calc_tables.py +131 -0
  182. sempy_labs/mirrored_azure_databricks_catalog/__init__.py +15 -0
  183. sempy_labs/mirrored_azure_databricks_catalog/_discover.py +213 -0
  184. sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +45 -0
  185. sempy_labs/ml_model/__init__.py +23 -0
  186. sempy_labs/ml_model/_functions.py +427 -0
  187. sempy_labs/report/_BPAReportTemplate.json +232 -0
  188. sempy_labs/report/__init__.py +55 -0
  189. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
  190. sempy_labs/report/_bpareporttemplate/.platform +11 -0
  191. sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
  192. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
  193. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
  194. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
  195. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
  196. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
  197. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
  198. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
  199. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
  200. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
  201. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
  202. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
  203. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
  204. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
  205. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
  206. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
  207. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
  208. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
  209. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
  210. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
  211. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
  212. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
  213. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
  214. sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
  215. sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
  216. sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
  217. sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
  218. sempy_labs/report/_download_report.py +76 -0
  219. sempy_labs/report/_export_report.py +257 -0
  220. sempy_labs/report/_generate_report.py +427 -0
  221. sempy_labs/report/_paginated.py +76 -0
  222. sempy_labs/report/_report_bpa.py +354 -0
  223. sempy_labs/report/_report_bpa_rules.py +115 -0
  224. sempy_labs/report/_report_functions.py +581 -0
  225. sempy_labs/report/_report_helper.py +227 -0
  226. sempy_labs/report/_report_list_functions.py +110 -0
  227. sempy_labs/report/_report_rebind.py +149 -0
  228. sempy_labs/report/_reportwrapper.py +3100 -0
  229. sempy_labs/report/_save_report.py +147 -0
  230. sempy_labs/snowflake_database/__init__.py +10 -0
  231. sempy_labs/snowflake_database/_items.py +105 -0
  232. sempy_labs/sql_database/__init__.py +21 -0
  233. sempy_labs/sql_database/_items.py +201 -0
  234. sempy_labs/sql_database/_mirroring.py +79 -0
  235. sempy_labs/theme/__init__.py +12 -0
  236. sempy_labs/theme/_org_themes.py +129 -0
  237. sempy_labs/tom/__init__.py +3 -0
  238. sempy_labs/tom/_model.py +5977 -0
  239. sempy_labs/variable_library/__init__.py +19 -0
  240. sempy_labs/variable_library/_functions.py +403 -0
  241. sempy_labs/warehouse/__init__.py +28 -0
  242. sempy_labs/warehouse/_items.py +234 -0
  243. sempy_labs/warehouse/_restore_points.py +309 -0
sempy_labs/_onelake.py ADDED
@@ -0,0 +1,131 @@
1
+ from sempy_labs._helper_functions import (
2
+ _base_api,
3
+ resolve_workspace_id,
4
+ resolve_lakehouse_name_and_id,
5
+ resolve_workspace_name_and_id,
6
+ )
7
+ from sempy._utils._log import log
8
+ from uuid import UUID
9
+ from typing import Optional
10
+ import pandas as pd
11
+ import sempy_labs._icons as icons
12
+
13
+
14
+ @log
15
+ def get_onelake_settings(workspace: Optional[str | UUID] = None):
16
+ """
17
+ Obtains the workspace OneLake settings.
18
+
19
+ This is a wrapper function for the following API: `OneLake Settings - Get Settings <https://learn.microsoft.com/rest/api/fabric/core/onelake-settings/get-settings>`_.
20
+
21
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
22
+
23
+ Parameters
24
+ ----------
25
+ workspace : str | uuid.UUID, default=None
26
+ The name or ID of the workspace.
27
+ Defaults to None which resolves to the workspace of the attached lakehouse
28
+ or if no lakehouse attached, resolves to the workspace of the notebook.
29
+
30
+ Returns
31
+ -------
32
+ pandas.DataFrame
33
+ The workspace OneLake settings.
34
+ """
35
+
36
+ workspace_id = resolve_workspace_id(workspace)
37
+ result = _base_api(
38
+ request=f"/v1/workspaces/{workspace_id}/onelake/settings", client="fabric_sp"
39
+ ).json()
40
+
41
+ d = result.get("diagnostics", {})
42
+ enabled = True if d.get("status", {}) == "Enabled" else False
43
+ rows = []
44
+ rows.append(
45
+ {
46
+ "Enabled": enabled,
47
+ "Destination Type": (
48
+ d.get("destination", {}).get("type", {}) if enabled else None
49
+ ),
50
+ "Destination Id": (
51
+ d.get("destination", {}).get("lakehouse", {}).get("itemId", {})
52
+ if enabled
53
+ else None
54
+ ),
55
+ "Destination Workspace Id": (
56
+ d.get("destination", {}).get("lakehouse", {}).get("workspaceId", {})
57
+ if enabled
58
+ else None
59
+ ),
60
+ }
61
+ )
62
+
63
+ return pd.DataFrame(rows)
64
+
65
+
66
+ def modify_onelake_diagnostics(
67
+ workspace: Optional[str | UUID] = None,
68
+ enabled: bool = True,
69
+ destination_lakehouse: Optional[str | UUID] = None,
70
+ destination_workspace: Optional[str | UUID] = None,
71
+ ):
72
+ """
73
+ Obtains the workspace OneLake settings.
74
+
75
+ This is a wrapper function for the following API: `OneLake Settings - Modify Diagnostics <https://learn.microsoft.com/rest/api/fabric/core/onelake-settings/modify-diagnostics>`_.
76
+
77
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
78
+
79
+ Parameters
80
+ ----------
81
+ workspace : str | uuid.UUID, default=None
82
+ The name or ID of the workspace.
83
+ Defaults to None which resolves to the workspace of the attached lakehouse
84
+ or if no lakehouse attached, resolves to the workspace of the notebook.
85
+ enabled : bool, default=True
86
+ Whether to enable or disable OneLake diagnostics.
87
+ destination_lakehouse : str | uuid.UUID, default=None
88
+ The name or ID of the destination lakehouse.
89
+ Defaults to None which resolves to the lakehouse of the attached lakehouse
90
+ or if no lakehouse attached, resolves to the lakehouse of the notebook.
91
+ destination_workspace : str | uuid.UUID, default=None
92
+ The name or ID of the destination workspace.
93
+ Defaults to None which resolves to the workspace of the attached lakehouse
94
+ or if no lakehouse attached, resolves to the workspace of the notebook.
95
+ """
96
+
97
+ workspace_id = resolve_workspace_id(workspace)
98
+ (destination_workspace_name, destination_workspace_id) = (
99
+ resolve_workspace_name_and_id(destination_workspace)
100
+ )
101
+ (destination_lakehouse_name, destination_lakehouse_id) = (
102
+ resolve_lakehouse_name_and_id(destination_lakehouse, destination_workspace_id)
103
+ )
104
+
105
+ if enabled:
106
+ payload = {
107
+ "status": "Enabled",
108
+ "destination": {
109
+ "type": "Lakehouse",
110
+ "lakehouse": {
111
+ "referenceType": "ById",
112
+ "itemId": destination_lakehouse_id,
113
+ "workspaceId": destination_workspace_id,
114
+ },
115
+ },
116
+ }
117
+ else:
118
+ payload = {"status": "Disabled"}
119
+ _base_api(
120
+ request=f"/v1/workspaces/{workspace_id}/onelake/settings/modifyDiagnostics",
121
+ client="fabric_sp",
122
+ method="post",
123
+ payload=payload,
124
+ )
125
+
126
+ if enabled:
127
+ print(
128
+ f"{icons.green_dot} OneLake diagnostics have been enabled and updated to use the '{destination_lakehouse_name}' lakehouse in the '{destination_workspace_name}' workspace as the destination."
129
+ )
130
+ else:
131
+ print(f"{icons.green_dot} OneLake diagnostics have been disabled.")
@@ -0,0 +1,433 @@
1
+ import sempy.fabric as fabric
2
+ import pandas as pd
3
+ from sempy_labs._helper_functions import (
4
+ resolve_workspace_name_and_id,
5
+ resolve_dataset_name_and_id,
6
+ _update_dataframe_datatypes,
7
+ _base_api,
8
+ _create_dataframe,
9
+ resolve_workspace_id,
10
+ )
11
+ from sempy._utils._log import log
12
+ from typing import Optional, Tuple
13
+ import sempy_labs._icons as icons
14
+ from uuid import UUID
15
+
16
+
17
+ @log
18
+ def qso_sync(dataset: str | UUID, workspace: Optional[str | UUID] = None):
19
+ """
20
+ Triggers a query scale-out sync of read-only replicas for the specified dataset from the specified workspace.
21
+
22
+ This is a wrapper function for the following API: `Datasets - Trigger Query Scale Out Sync In Group <https://learn.microsoft.com/rest/api/power-bi/datasets/trigger-query-scale-out-sync-in-group>`_.
23
+
24
+ Parameters
25
+ ----------
26
+ dataset : str | uuid.UUID
27
+ Name or ID of the semantic model.
28
+ workspace : str | uuid.UUID, default=None
29
+ The Fabric workspace name or ID.
30
+ Defaults to None which resolves to the workspace of the attached lakehouse
31
+ or if no lakehouse attached, resolves to the workspace of the notebook.
32
+ """
33
+
34
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
35
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
36
+
37
+ _base_api(
38
+ request=f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/queryScaleOut/sync",
39
+ method="post",
40
+ )
41
+ print(
42
+ f"{icons.green_dot} QSO sync initiated for the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
43
+ )
44
+
45
+
46
+ @log
47
+ def qso_sync_status(
48
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
49
+ ) -> Tuple[pd.DataFrame, pd.DataFrame]:
50
+ """
51
+ Returns the query scale-out sync status for the specified dataset from the specified workspace.
52
+
53
+ This is a wrapper function for the following API: `Datasets - Get Query Scale Out Sync Status In Group <https://learn.microsoft.com/rest/api/power-bi/datasets/get-query-scale-out-sync-status-in-group>`_.
54
+
55
+ Parameters
56
+ ----------
57
+ dataset : str | uuid.UUID
58
+ Name or ID of the semantic model.
59
+ workspace : str | uuid.UUID, default=None
60
+ The Fabric workspace name or ID.
61
+ Defaults to None which resolves to the workspace of the attached lakehouse
62
+ or if no lakehouse attached, resolves to the workspace of the notebook.
63
+
64
+ Returns
65
+ -------
66
+ Tuple[pandas.DataFrame, pandas.DataFrame]
67
+ 2 pandas dataframes showing the query scale-out sync status.
68
+ """
69
+
70
+ columns = {
71
+ "Scale Out Status": "string",
72
+ "Sync Start Time": "datetime",
73
+ "Sync End Time": "datetime",
74
+ "Commit Version": "int",
75
+ "Commit Timestamp": "datetime",
76
+ "Target Sync Version": "int",
77
+ "Target Sync Timestamp": "datetime",
78
+ "Trigger Reason": "string",
79
+ "Min Active Read Version": "int",
80
+ "Min Active Read Timestamp": "datetime",
81
+ }
82
+ df = _create_dataframe(columns=columns)
83
+
84
+ columns_rep = {
85
+ "Replica ID": "string",
86
+ "Replica Type": "string",
87
+ "Replica Version": "string",
88
+ "Replica Timestamp": "datetime",
89
+ }
90
+
91
+ dfRep = _create_dataframe(columns=columns_rep)
92
+
93
+ workspace_id = resolve_workspace_id(workspace)
94
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
95
+
96
+ response = _base_api(
97
+ request=f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/queryScaleOut/syncStatus"
98
+ )
99
+
100
+ o = response.json()
101
+ sos = o.get("scaleOutStatus")
102
+
103
+ if sos == "Enabled":
104
+ new_data = {
105
+ "Scale Out Status": o.get("scaleOutStatus"),
106
+ "Sync Start Time": o.get("syncStartTime"),
107
+ "Sync End Time": o.get("syncEndTime"),
108
+ "Commit Version": o.get("commitVersion"),
109
+ "Commit Timestamp": o.get("commitTimestamp"),
110
+ "Target Sync Version": o.get("targetSyncVersion"),
111
+ "Target Sync Timestamp": o.get("targetSyncTimestamp"),
112
+ "Trigger Reason": o.get("triggerReason"),
113
+ "Min Active Read Version": o.get("minActiveReadVersion"),
114
+ "Min Active Read Timestamp": o.get("minActiveReadTimestamp"),
115
+ }
116
+ df = pd.DataFrame([new_data])
117
+ for r in o.get("scaleOutReplicas", []):
118
+ new_data = {
119
+ "Replica ID": r.get("replicaId"),
120
+ "Replica Type": r.get("replicaType"),
121
+ "Replica Version": str(r.get("replicaVersion")),
122
+ "Replica Timestamp": r.get("replicaTimestamp"),
123
+ }
124
+ dfRep = pd.DataFrame([new_data])
125
+
126
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
127
+ _update_dataframe_datatypes(dataframe=dfRep, column_map=columns_rep)
128
+
129
+ return df, dfRep
130
+ else:
131
+ print(f"{sos}\n\n")
132
+ return df, dfRep
133
+
134
+
135
+ @log
136
+ def disable_qso(
137
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
138
+ ) -> pd.DataFrame:
139
+ """
140
+ Sets the max read-only replicas to 0, disabling query scale out.
141
+
142
+ This is a wrapper function for the following API: `Datasets - Update Dataset In Group <https://learn.microsoft.com/rest/api/power-bi/datasets/update-dataset-in-group>`_.
143
+
144
+ Parameters
145
+ ----------
146
+ dataset : str | uuid.UUID
147
+ Name or ID of the semantic model.
148
+ workspace : str | uuid.UUID, default=None
149
+ The Fabric workspace name or ID.
150
+ Defaults to None which resolves to the workspace of the attached lakehouse
151
+ or if no lakehouse attached, resolves to the workspace of the notebook.
152
+
153
+ Returns
154
+ -------
155
+ pandas.DataFrame
156
+ A pandas dataframe showing the current query scale out settings.
157
+ """
158
+
159
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
160
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
161
+
162
+ payload = {"queryScaleOutSettings": {"maxReadOnlyReplicas": "0"}}
163
+
164
+ _base_api(
165
+ request=f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}",
166
+ method="patch",
167
+ payload=payload,
168
+ )
169
+
170
+ df = list_qso_settings(dataset=dataset_id, workspace=workspace_id)
171
+
172
+ print(
173
+ f"{icons.green_dot} Query scale out has been disabled for the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
174
+ )
175
+
176
+ return df
177
+
178
+
179
+ @log
180
+ def set_qso(
181
+ dataset: str | UUID,
182
+ auto_sync: bool = True,
183
+ max_read_only_replicas: int = -1,
184
+ workspace: Optional[str | UUID] = None,
185
+ ) -> pd.DataFrame:
186
+ """
187
+ Sets the query scale out settings for a semantic model.
188
+
189
+ This is a wrapper function for the following API: `Datasets - Update Dataset In Group <https://learn.microsoft.com/rest/api/power-bi/datasets/update-dataset-in-group>`_.
190
+
191
+ Parameters
192
+ ----------
193
+ dataset : str | uuid.UUID
194
+ Name or ID of the semantic model.
195
+ auto_sync : bool, default=True
196
+ Whether the semantic model automatically syncs read-only replicas.
197
+ max_read_only_replicas : int, default=-1
198
+ To enable semantic model scale-out, set max_read_only_replicas to -1, or any non-0 value. A value of -1 allows Power BI to create as many read-only replicas as your Power BI capacity supports. You can also explicitly set the replica count to a value lower than that of the capacity maximum. Setting max_read_only_replicas to -1 is recommended.
199
+ workspace : str | uuid.UUID, default=None
200
+ The Fabric workspace name or ID.
201
+ Defaults to None which resolves to the workspace of the attached lakehouse
202
+ or if no lakehouse attached, resolves to the workspace of the notebook.
203
+
204
+ Returns
205
+ -------
206
+ pandas.DataFrame
207
+ A pandas dataframe showing the current query scale-out settings.
208
+ """
209
+
210
+ from sempy_labs._helper_functions import is_default_semantic_model
211
+
212
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
213
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
214
+
215
+ if is_default_semantic_model(dataset=dataset_id, workspace=workspace_id):
216
+ raise ValueError(
217
+ f"{icons.red_dot} The 'set_qso' function does not run against default semantic models."
218
+ )
219
+
220
+ if max_read_only_replicas == 0:
221
+ disable_qso(dataset=dataset_id, workspace=workspace_id)
222
+ return
223
+
224
+ payload = {
225
+ "queryScaleOutSettings": {
226
+ "autoSyncReadOnlyReplicas": auto_sync,
227
+ "maxReadOnlyReplicas": max_read_only_replicas,
228
+ }
229
+ }
230
+
231
+ dfL = list_qso_settings(dataset=dataset_id, workspace=workspace_id)
232
+ storage_mode = dfL["Storage Mode"].iloc[0]
233
+
234
+ if storage_mode == "Small":
235
+ set_semantic_model_storage_format(
236
+ dataset=dataset_id, storage_format="Large", workspace=workspace_id
237
+ )
238
+
239
+ _base_api(
240
+ request=f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}",
241
+ method="patch",
242
+ payload=payload,
243
+ )
244
+
245
+ df = list_qso_settings(dataset=dataset_id, workspace=workspace_id)
246
+ print(
247
+ f"{icons.green_dot} Query scale out has been set on the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
248
+ )
249
+
250
+ return df
251
+
252
+
253
+ @log
254
+ def set_semantic_model_storage_format(
255
+ dataset: str | UUID, storage_format: str, workspace: Optional[str | UUID] = None
256
+ ):
257
+ """
258
+ Sets the semantic model storage format.
259
+
260
+ Parameters
261
+ ----------
262
+ dataset : str | uuid.UUID
263
+ Name or ID of the semantic model.
264
+ storage_format : str
265
+ The storage format for the semantic model. Valid options: 'Large', 'Small'.
266
+ workspace : str | uuid.UUID, default=None
267
+ The Fabric workspace name or ID.
268
+ Defaults to None which resolves to the workspace of the attached lakehouse
269
+ or if no lakehouse attached, resolves to the workspace of the notebook.
270
+ """
271
+
272
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
273
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
274
+
275
+ storage_format = storage_format.capitalize()
276
+
277
+ if storage_format == "Abf":
278
+ storage_format = "Small"
279
+ elif storage_format.startswith("Premium"):
280
+ storage_format = "Large"
281
+
282
+ storageFormats = ["Small", "Large"]
283
+
284
+ if storage_format == "Large":
285
+ payload = {"targetStorageMode": "PremiumFiles"}
286
+ elif storage_format == "Small":
287
+ payload = {"targetStorageMode": "Abf"}
288
+ else:
289
+ raise ValueError(
290
+ f"{icons.red_dot} Invalid storage format value. Valid options: {storageFormats}."
291
+ )
292
+
293
+ dfL = list_qso_settings(dataset=dataset_id, workspace=workspace_id)
294
+ current_storage_format = dfL["Storage Mode"].iloc[0]
295
+
296
+ if current_storage_format == storage_format:
297
+ print(
298
+ f"{icons.info} The '{dataset_name}' semantic model within the '{workspace_name}' workspace is already set to '{storage_format.lower()}' storage format."
299
+ )
300
+ return
301
+
302
+ _base_api(
303
+ request=f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}",
304
+ method="patch",
305
+ payload=payload,
306
+ )
307
+ print(
308
+ f"{icons.green_dot} The semantic model storage format for the '{dataset_name}' semantic model within the '{workspace_name}' workspace has been set to '{storage_format}'."
309
+ )
310
+
311
+
312
+ @log
313
+ def list_qso_settings(
314
+ dataset: Optional[str | UUID] = None, workspace: Optional[str | UUID] = None
315
+ ) -> pd.DataFrame:
316
+ """
317
+ Shows the query scale out settings for a semantic model (or all semantic models within a workspace).
318
+
319
+ Parameters
320
+ ----------
321
+ dataset : str | uuid.UUID, default=None
322
+ Name or ID of the semantic model.
323
+ workspace : str | uuid.UUID, default=None
324
+ The Fabric workspace name or ID.
325
+ Defaults to None which resolves to the workspace of the attached lakehouse
326
+ or if no lakehouse attached, resolves to the workspace of the notebook.
327
+
328
+ Returns
329
+ -------
330
+ pandas.DataFrame
331
+ A pandas dataframe showing the query scale out settings.
332
+ """
333
+
334
+ workspace_id = resolve_workspace_id(workspace)
335
+
336
+ if dataset is not None:
337
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
338
+
339
+ columns = {
340
+ "Dataset Id": "string",
341
+ "Dataset Name": "string",
342
+ "Storage Mode": "string",
343
+ "QSO Auto Sync Enabled": "bool",
344
+ "QSO Max Read Only Replicas": "int",
345
+ }
346
+ df = _create_dataframe(columns=columns)
347
+
348
+ response = _base_api(request=f"/v1.0/myorg/groups/{workspace_id}/datasets")
349
+
350
+ rows = []
351
+ for v in response.json().get("value", []):
352
+ tsm = v.get("targetStorageMode")
353
+ if tsm == "Abf":
354
+ sm = "Small"
355
+ else:
356
+ sm = "Large"
357
+ rows.append(
358
+ {
359
+ "Dataset Id": v.get("id"),
360
+ "Dataset Name": v.get("name"),
361
+ "Storage Mode": sm,
362
+ "QSO Auto Sync Enabled": v.get("queryScaleOutSettings", {}).get(
363
+ "autoSyncReadOnlyReplicas"
364
+ ),
365
+ "QSO Max Read Only Replicas": v.get("queryScaleOutSettings", {}).get(
366
+ "maxReadOnlyReplicas"
367
+ ),
368
+ }
369
+ )
370
+
371
+ if rows:
372
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
373
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
374
+
375
+ if dataset is not None:
376
+ df = df[df["Dataset Id"] == dataset_id]
377
+
378
+ return df
379
+
380
+
381
+ @log
382
+ def set_workspace_default_storage_format(
383
+ storage_format: str, workspace: Optional[str | UUID] = None
384
+ ):
385
+ """
386
+ Sets the default storage format for semantic models within a workspace.
387
+
388
+ Parameters
389
+ ----------
390
+ storage_format : str
391
+ The storage format for the semantic model. Valid options: 'Large', 'Small'.
392
+ workspace : str | uuid.UUID, default=None
393
+ The Fabric workspace name or ID.
394
+ Defaults to None which resolves to the workspace of the attached lakehouse
395
+ or if no lakehouse attached, resolves to the workspace of the notebook.
396
+ """
397
+
398
+ # https://learn.microsoft.com/en-us/rest/api/power-bi/groups/update-group#defaultdatasetstorageformat
399
+
400
+ storageFormats = ["Small", "Large"]
401
+ storage_format = storage_format.capitalize()
402
+
403
+ if storage_format not in storageFormats:
404
+ raise ValueError(
405
+ f"{icons.red_dot} Invalid storage format. Please choose from these options: {storageFormats}."
406
+ )
407
+
408
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
409
+
410
+ # Check current storage format
411
+ dfW = fabric.list_workspaces(filter=f"id eq '{workspace_id}'")
412
+ if len(dfW) == 0:
413
+ raise ValueError()
414
+ current_storage_format = dfW["Default Dataset Storage Format"].iloc[0]
415
+
416
+ if current_storage_format == storage_format:
417
+ print(
418
+ f"{icons.info} The '{workspace_name}' is already set to a default storage format of '{current_storage_format}'."
419
+ )
420
+ return
421
+
422
+ payload = {
423
+ "name": workspace_name,
424
+ "defaultDatasetStorageFormat": storage_format,
425
+ }
426
+
427
+ _base_api(
428
+ request=f"/v1.0/myorg/groups/{workspace_id}", method="patch", payload=payload
429
+ )
430
+
431
+ print(
432
+ f"{icons.green_dot} The default storage format for the '{workspace_name}' workspace has been updated to '{storage_format}."
433
+ )