semantic-link-labs 0.12.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (243) hide show
  1. semantic_link_labs-0.12.8.dist-info/METADATA +354 -0
  2. semantic_link_labs-0.12.8.dist-info/RECORD +243 -0
  3. semantic_link_labs-0.12.8.dist-info/WHEEL +5 -0
  4. semantic_link_labs-0.12.8.dist-info/licenses/LICENSE +21 -0
  5. semantic_link_labs-0.12.8.dist-info/top_level.txt +1 -0
  6. sempy_labs/__init__.py +606 -0
  7. sempy_labs/_a_lib_info.py +2 -0
  8. sempy_labs/_ai.py +437 -0
  9. sempy_labs/_authentication.py +264 -0
  10. sempy_labs/_bpa_translation/_model/_translations_am-ET.po +869 -0
  11. sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +908 -0
  12. sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +968 -0
  13. sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +963 -0
  14. sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +943 -0
  15. sempy_labs/_bpa_translation/_model/_translations_da-DK.po +945 -0
  16. sempy_labs/_bpa_translation/_model/_translations_de-DE.po +988 -0
  17. sempy_labs/_bpa_translation/_model/_translations_el-GR.po +993 -0
  18. sempy_labs/_bpa_translation/_model/_translations_es-ES.po +971 -0
  19. sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +933 -0
  20. sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +942 -0
  21. sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +994 -0
  22. sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +967 -0
  23. sempy_labs/_bpa_translation/_model/_translations_he-IL.po +902 -0
  24. sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +944 -0
  25. sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +963 -0
  26. sempy_labs/_bpa_translation/_model/_translations_id-ID.po +946 -0
  27. sempy_labs/_bpa_translation/_model/_translations_is-IS.po +939 -0
  28. sempy_labs/_bpa_translation/_model/_translations_it-IT.po +986 -0
  29. sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +846 -0
  30. sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +839 -0
  31. sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +967 -0
  32. sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +978 -0
  33. sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +962 -0
  34. sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +962 -0
  35. sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +957 -0
  36. sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +968 -0
  37. sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +964 -0
  38. sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +952 -0
  39. sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +950 -0
  40. sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +942 -0
  41. sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +976 -0
  42. sempy_labs/_bpa_translation/_model/_translations_te-IN.po +947 -0
  43. sempy_labs/_bpa_translation/_model/_translations_th-TH.po +924 -0
  44. sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +953 -0
  45. sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +961 -0
  46. sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +804 -0
  47. sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +969 -0
  48. sempy_labs/_capacities.py +1198 -0
  49. sempy_labs/_capacity_migration.py +660 -0
  50. sempy_labs/_clear_cache.py +351 -0
  51. sempy_labs/_connections.py +610 -0
  52. sempy_labs/_dashboards.py +69 -0
  53. sempy_labs/_data_access_security.py +98 -0
  54. sempy_labs/_data_pipelines.py +162 -0
  55. sempy_labs/_dataflows.py +668 -0
  56. sempy_labs/_dax.py +501 -0
  57. sempy_labs/_daxformatter.py +80 -0
  58. sempy_labs/_delta_analyzer.py +467 -0
  59. sempy_labs/_delta_analyzer_history.py +301 -0
  60. sempy_labs/_dictionary_diffs.py +221 -0
  61. sempy_labs/_documentation.py +147 -0
  62. sempy_labs/_domains.py +51 -0
  63. sempy_labs/_eventhouses.py +182 -0
  64. sempy_labs/_external_data_shares.py +230 -0
  65. sempy_labs/_gateways.py +521 -0
  66. sempy_labs/_generate_semantic_model.py +521 -0
  67. sempy_labs/_get_connection_string.py +84 -0
  68. sempy_labs/_git.py +543 -0
  69. sempy_labs/_graphQL.py +90 -0
  70. sempy_labs/_helper_functions.py +2833 -0
  71. sempy_labs/_icons.py +149 -0
  72. sempy_labs/_job_scheduler.py +609 -0
  73. sempy_labs/_kql_databases.py +149 -0
  74. sempy_labs/_kql_querysets.py +124 -0
  75. sempy_labs/_kusto.py +137 -0
  76. sempy_labs/_labels.py +124 -0
  77. sempy_labs/_list_functions.py +1720 -0
  78. sempy_labs/_managed_private_endpoints.py +253 -0
  79. sempy_labs/_mirrored_databases.py +416 -0
  80. sempy_labs/_mirrored_warehouses.py +60 -0
  81. sempy_labs/_ml_experiments.py +113 -0
  82. sempy_labs/_model_auto_build.py +140 -0
  83. sempy_labs/_model_bpa.py +557 -0
  84. sempy_labs/_model_bpa_bulk.py +378 -0
  85. sempy_labs/_model_bpa_rules.py +859 -0
  86. sempy_labs/_model_dependencies.py +343 -0
  87. sempy_labs/_mounted_data_factories.py +123 -0
  88. sempy_labs/_notebooks.py +441 -0
  89. sempy_labs/_one_lake_integration.py +151 -0
  90. sempy_labs/_onelake.py +131 -0
  91. sempy_labs/_query_scale_out.py +433 -0
  92. sempy_labs/_refresh_semantic_model.py +435 -0
  93. sempy_labs/_semantic_models.py +468 -0
  94. sempy_labs/_spark.py +455 -0
  95. sempy_labs/_sql.py +241 -0
  96. sempy_labs/_sql_audit_settings.py +207 -0
  97. sempy_labs/_sql_endpoints.py +214 -0
  98. sempy_labs/_tags.py +201 -0
  99. sempy_labs/_translations.py +43 -0
  100. sempy_labs/_user_delegation_key.py +44 -0
  101. sempy_labs/_utils.py +79 -0
  102. sempy_labs/_vertipaq.py +1021 -0
  103. sempy_labs/_vpax.py +388 -0
  104. sempy_labs/_warehouses.py +234 -0
  105. sempy_labs/_workloads.py +140 -0
  106. sempy_labs/_workspace_identity.py +72 -0
  107. sempy_labs/_workspaces.py +595 -0
  108. sempy_labs/admin/__init__.py +170 -0
  109. sempy_labs/admin/_activities.py +167 -0
  110. sempy_labs/admin/_apps.py +145 -0
  111. sempy_labs/admin/_artifacts.py +65 -0
  112. sempy_labs/admin/_basic_functions.py +463 -0
  113. sempy_labs/admin/_capacities.py +508 -0
  114. sempy_labs/admin/_dataflows.py +45 -0
  115. sempy_labs/admin/_datasets.py +186 -0
  116. sempy_labs/admin/_domains.py +522 -0
  117. sempy_labs/admin/_external_data_share.py +100 -0
  118. sempy_labs/admin/_git.py +72 -0
  119. sempy_labs/admin/_items.py +265 -0
  120. sempy_labs/admin/_labels.py +211 -0
  121. sempy_labs/admin/_reports.py +241 -0
  122. sempy_labs/admin/_scanner.py +118 -0
  123. sempy_labs/admin/_shared.py +82 -0
  124. sempy_labs/admin/_sharing_links.py +110 -0
  125. sempy_labs/admin/_tags.py +131 -0
  126. sempy_labs/admin/_tenant.py +503 -0
  127. sempy_labs/admin/_tenant_keys.py +89 -0
  128. sempy_labs/admin/_users.py +140 -0
  129. sempy_labs/admin/_workspaces.py +236 -0
  130. sempy_labs/deployment_pipeline/__init__.py +23 -0
  131. sempy_labs/deployment_pipeline/_items.py +580 -0
  132. sempy_labs/directlake/__init__.py +57 -0
  133. sempy_labs/directlake/_autosync.py +58 -0
  134. sempy_labs/directlake/_directlake_schema_compare.py +120 -0
  135. sempy_labs/directlake/_directlake_schema_sync.py +161 -0
  136. sempy_labs/directlake/_dl_helper.py +274 -0
  137. sempy_labs/directlake/_generate_shared_expression.py +94 -0
  138. sempy_labs/directlake/_get_directlake_lakehouse.py +62 -0
  139. sempy_labs/directlake/_get_shared_expression.py +34 -0
  140. sempy_labs/directlake/_guardrails.py +96 -0
  141. sempy_labs/directlake/_list_directlake_model_calc_tables.py +70 -0
  142. sempy_labs/directlake/_show_unsupported_directlake_objects.py +90 -0
  143. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +239 -0
  144. sempy_labs/directlake/_update_directlake_partition_entity.py +259 -0
  145. sempy_labs/directlake/_warm_cache.py +236 -0
  146. sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
  147. sempy_labs/environment/__init__.py +23 -0
  148. sempy_labs/environment/_items.py +212 -0
  149. sempy_labs/environment/_pubstage.py +223 -0
  150. sempy_labs/eventstream/__init__.py +37 -0
  151. sempy_labs/eventstream/_items.py +263 -0
  152. sempy_labs/eventstream/_topology.py +652 -0
  153. sempy_labs/graph/__init__.py +59 -0
  154. sempy_labs/graph/_groups.py +651 -0
  155. sempy_labs/graph/_sensitivity_labels.py +120 -0
  156. sempy_labs/graph/_teams.py +125 -0
  157. sempy_labs/graph/_user_licenses.py +96 -0
  158. sempy_labs/graph/_users.py +516 -0
  159. sempy_labs/graph_model/__init__.py +15 -0
  160. sempy_labs/graph_model/_background_jobs.py +63 -0
  161. sempy_labs/graph_model/_items.py +149 -0
  162. sempy_labs/lakehouse/__init__.py +67 -0
  163. sempy_labs/lakehouse/_blobs.py +247 -0
  164. sempy_labs/lakehouse/_get_lakehouse_columns.py +102 -0
  165. sempy_labs/lakehouse/_get_lakehouse_tables.py +274 -0
  166. sempy_labs/lakehouse/_helper.py +250 -0
  167. sempy_labs/lakehouse/_lakehouse.py +351 -0
  168. sempy_labs/lakehouse/_livy_sessions.py +143 -0
  169. sempy_labs/lakehouse/_materialized_lake_views.py +157 -0
  170. sempy_labs/lakehouse/_partitioning.py +165 -0
  171. sempy_labs/lakehouse/_schemas.py +217 -0
  172. sempy_labs/lakehouse/_shortcuts.py +440 -0
  173. sempy_labs/migration/__init__.py +35 -0
  174. sempy_labs/migration/_create_pqt_file.py +238 -0
  175. sempy_labs/migration/_direct_lake_to_import.py +105 -0
  176. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +398 -0
  177. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +148 -0
  178. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +533 -0
  179. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +172 -0
  180. sempy_labs/migration/_migration_validation.py +71 -0
  181. sempy_labs/migration/_refresh_calc_tables.py +131 -0
  182. sempy_labs/mirrored_azure_databricks_catalog/__init__.py +15 -0
  183. sempy_labs/mirrored_azure_databricks_catalog/_discover.py +213 -0
  184. sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +45 -0
  185. sempy_labs/ml_model/__init__.py +23 -0
  186. sempy_labs/ml_model/_functions.py +427 -0
  187. sempy_labs/report/_BPAReportTemplate.json +232 -0
  188. sempy_labs/report/__init__.py +55 -0
  189. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
  190. sempy_labs/report/_bpareporttemplate/.platform +11 -0
  191. sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
  192. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
  193. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
  194. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
  195. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
  196. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
  197. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
  198. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
  199. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
  200. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
  201. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
  202. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
  203. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
  204. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
  205. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
  206. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
  207. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
  208. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
  209. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
  210. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
  211. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
  212. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
  213. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
  214. sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
  215. sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
  216. sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
  217. sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
  218. sempy_labs/report/_download_report.py +76 -0
  219. sempy_labs/report/_export_report.py +257 -0
  220. sempy_labs/report/_generate_report.py +427 -0
  221. sempy_labs/report/_paginated.py +76 -0
  222. sempy_labs/report/_report_bpa.py +354 -0
  223. sempy_labs/report/_report_bpa_rules.py +115 -0
  224. sempy_labs/report/_report_functions.py +581 -0
  225. sempy_labs/report/_report_helper.py +227 -0
  226. sempy_labs/report/_report_list_functions.py +110 -0
  227. sempy_labs/report/_report_rebind.py +149 -0
  228. sempy_labs/report/_reportwrapper.py +3100 -0
  229. sempy_labs/report/_save_report.py +147 -0
  230. sempy_labs/snowflake_database/__init__.py +10 -0
  231. sempy_labs/snowflake_database/_items.py +105 -0
  232. sempy_labs/sql_database/__init__.py +21 -0
  233. sempy_labs/sql_database/_items.py +201 -0
  234. sempy_labs/sql_database/_mirroring.py +79 -0
  235. sempy_labs/theme/__init__.py +12 -0
  236. sempy_labs/theme/_org_themes.py +129 -0
  237. sempy_labs/tom/__init__.py +3 -0
  238. sempy_labs/tom/_model.py +5977 -0
  239. sempy_labs/variable_library/__init__.py +19 -0
  240. sempy_labs/variable_library/_functions.py +403 -0
  241. sempy_labs/warehouse/__init__.py +28 -0
  242. sempy_labs/warehouse/_items.py +234 -0
  243. sempy_labs/warehouse/_restore_points.py +309 -0
@@ -0,0 +1,468 @@
1
+ from uuid import UUID
2
+ from typing import Optional, List
3
+ import pandas as pd
4
+ from sempy_labs._helper_functions import (
5
+ _create_dataframe,
6
+ _base_api,
7
+ _update_dataframe_datatypes,
8
+ resolve_workspace_name_and_id,
9
+ resolve_dataset_name_and_id,
10
+ delete_item,
11
+ resolve_dataset_id,
12
+ resolve_workspace_id,
13
+ )
14
+ import sempy_labs._icons as icons
15
+ import re
16
+ from sempy._utils._log import log
17
+
18
+
19
+ @log
20
+ def get_semantic_model_refresh_schedule(
21
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
22
+ ) -> pd.DataFrame:
23
+ """
24
+ Gets the refresh schedule for the specified dataset from the specified workspace.
25
+
26
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
27
+
28
+ Parameters
29
+ ----------
30
+ dataset : str | uuid.UUID
31
+ Name or ID of the semantic model.
32
+ workspace : str | uuid.UUID, default=None
33
+ The workspace name or ID.
34
+ Defaults to None which resolves to the workspace of the attached lakehouse
35
+ or if no lakehouse attached, resolves to the workspace of the notebook.
36
+
37
+ Returns
38
+ -------
39
+ pandas.DataFrame
40
+ Shows the refresh schedule for the specified dataset from the specified workspace.
41
+ """
42
+
43
+ workspace_id = resolve_workspace_id(workspace)
44
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace)
45
+
46
+ columns = {
47
+ "Days": "str",
48
+ "Times": "str",
49
+ "Enabled": "bool",
50
+ "Local Time Zone Id": "str",
51
+ "Notify Option": "str",
52
+ }
53
+
54
+ column_map = {
55
+ "days": "Days",
56
+ "times": "Times",
57
+ "enabled": "Enabled",
58
+ "localTimeZoneId": "Local Time Zone Id",
59
+ "notifyOption": "Notify Option",
60
+ }
61
+
62
+ df = _create_dataframe(columns)
63
+
64
+ result = _base_api(
65
+ request=f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/refreshSchedule",
66
+ client="fabric_sp",
67
+ ).json()
68
+
69
+ df = (
70
+ pd.json_normalize(result)
71
+ .drop(columns=["@odata.context"], errors="ignore")
72
+ .rename(columns=column_map)
73
+ )
74
+
75
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
76
+
77
+ return df
78
+
79
+
80
+ @log
81
+ def enable_semantic_model_scheduled_refresh(
82
+ dataset: str | UUID,
83
+ workspace: Optional[str | UUID] = None,
84
+ enable: bool = True,
85
+ ):
86
+ """
87
+ Enables the scheduled refresh for the specified dataset from the specified workspace.
88
+
89
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
90
+
91
+ Parameters
92
+ ----------
93
+ dataset : str | uuid.UUID
94
+ Name or ID of the semantic model.
95
+ workspace : str | uuid.UUID, default=None
96
+ The workspace name or ID.
97
+ Defaults to None which resolves to the workspace of the attached lakehouse
98
+ or if no lakehouse attached, resolves to the workspace of the notebook.
99
+ enable : bool, default=True
100
+ If True, enables the scheduled refresh.
101
+ If False, disables the scheduled refresh.
102
+ """
103
+
104
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
105
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace)
106
+
107
+ df = get_semantic_model_refresh_schedule(dataset=dataset, workspace=workspace)
108
+ status = df["Enabled"].iloc[0]
109
+
110
+ if enable and status:
111
+ print(
112
+ f"{icons.info} Scheduled refresh for the '{dataset_name}' within the '{workspace_name}' workspace is already enabled."
113
+ )
114
+ elif not enable and not status:
115
+ print(
116
+ f"{icons.info} Scheduled refresh for the '{dataset_name}' within the '{workspace_name}' workspace is already disabled."
117
+ )
118
+ else:
119
+ payload = {"value": {"enabled": enable}}
120
+
121
+ _base_api(
122
+ request=f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/refreshSchedule",
123
+ method="patch",
124
+ payload=payload,
125
+ client="fabric_sp",
126
+ )
127
+
128
+ print(
129
+ f"{icons.green_dot} Scheduled refresh for the '{dataset_name}' within the '{workspace_name}' workspace has been enabled."
130
+ )
131
+
132
+
133
+ @log
134
+ def delete_semantic_model(dataset: str | UUID, workspace: Optional[str | UUID] = None):
135
+ """
136
+ Deletes a semantic model.
137
+
138
+ This is a wrapper function for the following API: `Items - Delete Semantic Model <https://learn.microsoft.com/rest/api/fabric/semanticmodel/items/delete-semantic-model>`_.
139
+
140
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
141
+
142
+ Parameters
143
+ ----------
144
+ dataset: str | uuid.UUID
145
+ Name or ID of the semantic model.
146
+ workspace : str | uuid.UUID, default=None
147
+ The Fabric workspace name or ID.
148
+ Defaults to None which resolves to the workspace of the attached lakehouse
149
+ or if no lakehouse attached, resolves to the workspace of the notebook.
150
+ """
151
+
152
+ delete_item(item=dataset, type="SemanticModel", workspace=workspace)
153
+
154
+
155
+ @log
156
+ def update_semantic_model_refresh_schedule(
157
+ dataset: str | UUID,
158
+ days: Optional[str | List[str]] = None,
159
+ times: Optional[str | List[str]] = None,
160
+ time_zone: Optional[str] = None,
161
+ workspace: Optional[str | UUID] = None,
162
+ ):
163
+ """
164
+ Updates the refresh schedule for the specified dataset from the specified workspace.
165
+
166
+ This is a wrapper function for the following API: `Datasets - Update Refresh Schedule In Group <https://learn.microsoft.com/rest/api/power-bi/datasets/update-refresh-schedule-in-group>`_.
167
+
168
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
169
+
170
+ Parameters
171
+ ----------
172
+ dataset : str | uuid.UUID
173
+ Name or ID of the semantic model.
174
+ days : str | list[str], default=None
175
+ The days of the week to refresh the dataset.
176
+ Valid values are: "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday".
177
+ Defaults to None which means the refresh schedule will not be updated.
178
+ times : str | list[str], default=None
179
+ The times of the day to refresh the dataset.
180
+ Valid format is "HH:MM" (24-hour format).
181
+ Defaults to None which means the refresh schedule will not be updated.
182
+ time_zone : str, default=None
183
+ The time zone to use for the refresh schedule.
184
+ Defaults to None which means the refresh schedule will not be updated.
185
+ workspace : str | uuid.UUID, default=None
186
+ The workspace name or ID.
187
+ Defaults to None which resolves to the workspace of the attached lakehouse
188
+ or if no lakehouse attached, resolves to the workspace of the notebook.
189
+ """
190
+
191
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
192
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace)
193
+
194
+ payload = {"value": {}}
195
+
196
+ def is_valid_time_format(time_str):
197
+ pattern = r"^(?:[01]\d|2[0-3]):[0-5]\d$"
198
+ return re.match(pattern, time_str) is not None
199
+
200
+ weekdays = [
201
+ "Monday",
202
+ "Tuesday",
203
+ "Wednesday",
204
+ "Thursday",
205
+ "Friday",
206
+ "Sunday",
207
+ "Saturday",
208
+ ]
209
+ if days:
210
+ if isinstance(days, str):
211
+ days = [days]
212
+ for i in range(len(days)):
213
+ days[i] = days[i].capitalize()
214
+ if days[i] not in weekdays:
215
+ raise ValueError(
216
+ f"{icons.red_dot} Invalid day '{days[i]}'. Valid days are: {weekdays}"
217
+ )
218
+ payload["value"]["days"] = days
219
+ if times:
220
+ if isinstance(times, str):
221
+ times = [times]
222
+ for i in range(len(times)):
223
+ if not is_valid_time_format(times[i]):
224
+ raise ValueError(
225
+ f"{icons.red_dot} Invalid time '{times[i]}'. Valid time format is 'HH:MM' (24-hour format)."
226
+ )
227
+ payload["value"]["times"] = times
228
+ if time_zone:
229
+ payload["value"]["localTimeZoneId"] = time_zone
230
+
231
+ if not payload.get("value"):
232
+ print(
233
+ f"{icons.info} No changes were made to the refresh schedule for the '{dataset_name}' within the '{workspace_name}' workspace."
234
+ )
235
+ return
236
+
237
+ _base_api(
238
+ request=f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/refreshSchedule",
239
+ method="patch",
240
+ client="fabric_sp",
241
+ payload=payload,
242
+ )
243
+
244
+ print(
245
+ f"{icons.green_dot} Refresh schedule for the '{dataset_name}' within the '{workspace_name}' workspace has been updated."
246
+ )
247
+
248
+
249
+ @log
250
+ def list_semantic_model_datasources(
251
+ dataset: str | UUID,
252
+ workspace: Optional[str | UUID] = None,
253
+ expand_details: bool = True,
254
+ ) -> pd.DataFrame:
255
+ """
256
+ Lists the data sources for the specified semantic model.
257
+
258
+ This is a wrapper function for the following API: `Datasets - Get Datasources In Group <https://learn.microsoft.com/rest/api/power-bi/datasets/get-datasources-in-group>`_.
259
+
260
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
261
+
262
+ Parameters
263
+ ----------
264
+ dataset : str | uuid.UUID
265
+ Name or ID of the semantic model.
266
+ workspace : str | uuid.UUID, default=None
267
+ The workspace name or ID.
268
+ Defaults to None which resolves to the workspace of the attached lakehouse
269
+ or if no lakehouse attached, resolves to the workspace of the notebook.
270
+ expand_details : bool, default=True
271
+ If True, expands the connection details for each data source.
272
+
273
+ Returns
274
+ -------
275
+ pandas.DataFrame
276
+ DataFrame containing the data sources for the specified semantic model.
277
+ """
278
+
279
+ workspace_id = resolve_workspace_id(workspace)
280
+ dataset_id = resolve_dataset_id(dataset, workspace_id)
281
+
282
+ if expand_details:
283
+ columns = {
284
+ "Datasource Type": "str",
285
+ "Connection Server": "str",
286
+ "Connection Database": "str",
287
+ "Connection Path": "str",
288
+ "Connection Account": "str",
289
+ "Connection Domain": "str",
290
+ "Connection Kind": "str",
291
+ "Connection Email Address": "str",
292
+ "Connection URL": "str",
293
+ "Connection Class Info": "str",
294
+ "Connection Login Server": "str",
295
+ "Datasource Id": "str",
296
+ "Gateway Id": "str",
297
+ }
298
+ else:
299
+ columns = {
300
+ "Datasource Type": "str",
301
+ "Connection Details": "str",
302
+ "Datasource Id": "str",
303
+ "Gateway Id": "str",
304
+ }
305
+
306
+ df = _create_dataframe(columns)
307
+
308
+ response = _base_api(
309
+ request=f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/datasources",
310
+ client="fabric_sp",
311
+ )
312
+
313
+ rows = []
314
+ for item in response.json().get("value", []):
315
+ ds_type = item.get("datasourceType")
316
+ conn_details = item.get("connectionDetails", {})
317
+ ds_id = item.get("datasourceId")
318
+ gateway_id = item.get("gatewayId")
319
+ if expand_details:
320
+ rows.append(
321
+ {
322
+ "Datasource Type": ds_type,
323
+ "Connection Server": conn_details.get("server"),
324
+ "Connection Database": conn_details.get("database"),
325
+ "Connection Path": conn_details.get("path"),
326
+ "Connection Account": conn_details.get("account"),
327
+ "Connection Domain": conn_details.get("domain"),
328
+ "Connection Kind": conn_details.get("kind"),
329
+ "Connection Email Address": conn_details.get("emailAddress"),
330
+ "Connection URL": conn_details.get("url"),
331
+ "Connection Class Info": conn_details.get("classInfo"),
332
+ "Connection Login Server": conn_details.get("loginServer"),
333
+ "Datasource Id": ds_id,
334
+ "Gateway Id": gateway_id,
335
+ }
336
+ )
337
+ else:
338
+ rows.append(
339
+ {
340
+ "Datasource Type": ds_type,
341
+ "Connection Details": conn_details,
342
+ "Datasource Id": ds_id,
343
+ "Gateway Id": gateway_id,
344
+ }
345
+ )
346
+
347
+ if rows:
348
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
349
+
350
+ return df
351
+
352
+
353
+ @log
354
+ def bind_semantic_model_connection(
355
+ dataset: str | UUID,
356
+ connection_id: UUID,
357
+ connectivity_type: str,
358
+ connection_type: str,
359
+ connection_path: str,
360
+ workspace: Optional[str | UUID] = None,
361
+ ):
362
+ """
363
+ Binds a semantic model data source reference to a data connection.
364
+ This API can also be used to unbind data source references.
365
+
366
+ This is a wrapper function for the following API: `Items - Bind Semantic Model Connection <https://learn.microsoft.com/rest/api/fabric/semanticmodel/items/bind-semantic-model-connection>`_.
367
+
368
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
369
+
370
+ Parameters
371
+ ----------
372
+ dataset : str | uuid.UUID
373
+ Name or ID of the semantic model.
374
+ connection_id : uuid.UUID
375
+ The object ID of the connection.
376
+ connectivity_type : str
377
+ The connectivity type of the connection. Additional connectivity types may be added over time.
378
+ connection_type : str
379
+ The `type <https://learn.microsoft.com/rest/api/fabric/semanticmodel/items/bind-semantic-model-connection?tabs=HTTP#connectivitytype>`_ of the connection.
380
+ connection_path : str
381
+ The path of the connection.
382
+ workspace : str | uuid.UUID, default=None
383
+ The workspace name or ID.
384
+ Defaults to None which resolves to the workspace of the attached lakehouse
385
+ or if no lakehouse attached, resolves to the workspace of the notebook.
386
+ """
387
+
388
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
389
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(
390
+ dataset=dataset, workspace=workspace_id
391
+ )
392
+
393
+ payload = {
394
+ "connectionBinding": {
395
+ "id": str(connection_id),
396
+ "connectivityType": connectivity_type,
397
+ "connectionDetails": {
398
+ "type": connection_type,
399
+ "path": connection_path,
400
+ },
401
+ }
402
+ }
403
+
404
+ _base_api(
405
+ request=f"/v1/workspaces/{workspace_id}/semanticModels/{dataset_id}/bindConnection",
406
+ method="post",
407
+ client="fabric_sp",
408
+ payload=payload,
409
+ )
410
+
411
+ print(
412
+ f"{icons.green_dot} Connection '{connection_id}' has been bound to the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
413
+ )
414
+
415
+
416
+ @log
417
+ def unbind_semantic_model_connection(
418
+ dataset: str | UUID,
419
+ connection_type: str,
420
+ connection_path: str,
421
+ workspace: Optional[str | UUID] = None,
422
+ ):
423
+ """
424
+ Unbinds a semantic model data source reference to a data connection.
425
+
426
+ This is a wrapper function for the following API: `Items - Bind Semantic Model Connection <https://learn.microsoft.com/rest/api/fabric/semanticmodel/items/bind-semantic-model-connection>`_.
427
+
428
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
429
+
430
+ Parameters
431
+ ----------
432
+ dataset : str | uuid.UUID
433
+ Name or ID of the semantic model.
434
+ connection_type : str
435
+ The `type <https://learn.microsoft.com/rest/api/fabric/semanticmodel/items/bind-semantic-model-connection?tabs=HTTP#connectivitytype>`_ of the connection.
436
+ connection_path : str
437
+ The path of the connection.
438
+ workspace : str | uuid.UUID, default=None
439
+ The workspace name or ID.
440
+ Defaults to None which resolves to the workspace of the attached lakehouse
441
+ or if no lakehouse attached, resolves to the workspace of the notebook.
442
+ """
443
+
444
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
445
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(
446
+ dataset=dataset, workspace=workspace_id
447
+ )
448
+
449
+ payload = {
450
+ "connectionBinding": {
451
+ "connectivityType": "None",
452
+ "connectionDetails": {
453
+ "type": connection_type,
454
+ "path": connection_path,
455
+ },
456
+ }
457
+ }
458
+
459
+ _base_api(
460
+ request=f"/v1/workspaces/{workspace_id}/semanticModels/{dataset_id}/bindConnection",
461
+ method="post",
462
+ client="fabric_sp",
463
+ payload=payload,
464
+ )
465
+
466
+ print(
467
+ f"{icons.green_dot} The '{dataset_name}' semantic model within the '{workspace_name}' workspace has been unbound from its connection."
468
+ )