semantic-link-labs 0.12.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (243) hide show
  1. semantic_link_labs-0.12.8.dist-info/METADATA +354 -0
  2. semantic_link_labs-0.12.8.dist-info/RECORD +243 -0
  3. semantic_link_labs-0.12.8.dist-info/WHEEL +5 -0
  4. semantic_link_labs-0.12.8.dist-info/licenses/LICENSE +21 -0
  5. semantic_link_labs-0.12.8.dist-info/top_level.txt +1 -0
  6. sempy_labs/__init__.py +606 -0
  7. sempy_labs/_a_lib_info.py +2 -0
  8. sempy_labs/_ai.py +437 -0
  9. sempy_labs/_authentication.py +264 -0
  10. sempy_labs/_bpa_translation/_model/_translations_am-ET.po +869 -0
  11. sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +908 -0
  12. sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +968 -0
  13. sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +963 -0
  14. sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +943 -0
  15. sempy_labs/_bpa_translation/_model/_translations_da-DK.po +945 -0
  16. sempy_labs/_bpa_translation/_model/_translations_de-DE.po +988 -0
  17. sempy_labs/_bpa_translation/_model/_translations_el-GR.po +993 -0
  18. sempy_labs/_bpa_translation/_model/_translations_es-ES.po +971 -0
  19. sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +933 -0
  20. sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +942 -0
  21. sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +994 -0
  22. sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +967 -0
  23. sempy_labs/_bpa_translation/_model/_translations_he-IL.po +902 -0
  24. sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +944 -0
  25. sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +963 -0
  26. sempy_labs/_bpa_translation/_model/_translations_id-ID.po +946 -0
  27. sempy_labs/_bpa_translation/_model/_translations_is-IS.po +939 -0
  28. sempy_labs/_bpa_translation/_model/_translations_it-IT.po +986 -0
  29. sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +846 -0
  30. sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +839 -0
  31. sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +967 -0
  32. sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +978 -0
  33. sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +962 -0
  34. sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +962 -0
  35. sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +957 -0
  36. sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +968 -0
  37. sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +964 -0
  38. sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +952 -0
  39. sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +950 -0
  40. sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +942 -0
  41. sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +976 -0
  42. sempy_labs/_bpa_translation/_model/_translations_te-IN.po +947 -0
  43. sempy_labs/_bpa_translation/_model/_translations_th-TH.po +924 -0
  44. sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +953 -0
  45. sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +961 -0
  46. sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +804 -0
  47. sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +969 -0
  48. sempy_labs/_capacities.py +1198 -0
  49. sempy_labs/_capacity_migration.py +660 -0
  50. sempy_labs/_clear_cache.py +351 -0
  51. sempy_labs/_connections.py +610 -0
  52. sempy_labs/_dashboards.py +69 -0
  53. sempy_labs/_data_access_security.py +98 -0
  54. sempy_labs/_data_pipelines.py +162 -0
  55. sempy_labs/_dataflows.py +668 -0
  56. sempy_labs/_dax.py +501 -0
  57. sempy_labs/_daxformatter.py +80 -0
  58. sempy_labs/_delta_analyzer.py +467 -0
  59. sempy_labs/_delta_analyzer_history.py +301 -0
  60. sempy_labs/_dictionary_diffs.py +221 -0
  61. sempy_labs/_documentation.py +147 -0
  62. sempy_labs/_domains.py +51 -0
  63. sempy_labs/_eventhouses.py +182 -0
  64. sempy_labs/_external_data_shares.py +230 -0
  65. sempy_labs/_gateways.py +521 -0
  66. sempy_labs/_generate_semantic_model.py +521 -0
  67. sempy_labs/_get_connection_string.py +84 -0
  68. sempy_labs/_git.py +543 -0
  69. sempy_labs/_graphQL.py +90 -0
  70. sempy_labs/_helper_functions.py +2833 -0
  71. sempy_labs/_icons.py +149 -0
  72. sempy_labs/_job_scheduler.py +609 -0
  73. sempy_labs/_kql_databases.py +149 -0
  74. sempy_labs/_kql_querysets.py +124 -0
  75. sempy_labs/_kusto.py +137 -0
  76. sempy_labs/_labels.py +124 -0
  77. sempy_labs/_list_functions.py +1720 -0
  78. sempy_labs/_managed_private_endpoints.py +253 -0
  79. sempy_labs/_mirrored_databases.py +416 -0
  80. sempy_labs/_mirrored_warehouses.py +60 -0
  81. sempy_labs/_ml_experiments.py +113 -0
  82. sempy_labs/_model_auto_build.py +140 -0
  83. sempy_labs/_model_bpa.py +557 -0
  84. sempy_labs/_model_bpa_bulk.py +378 -0
  85. sempy_labs/_model_bpa_rules.py +859 -0
  86. sempy_labs/_model_dependencies.py +343 -0
  87. sempy_labs/_mounted_data_factories.py +123 -0
  88. sempy_labs/_notebooks.py +441 -0
  89. sempy_labs/_one_lake_integration.py +151 -0
  90. sempy_labs/_onelake.py +131 -0
  91. sempy_labs/_query_scale_out.py +433 -0
  92. sempy_labs/_refresh_semantic_model.py +435 -0
  93. sempy_labs/_semantic_models.py +468 -0
  94. sempy_labs/_spark.py +455 -0
  95. sempy_labs/_sql.py +241 -0
  96. sempy_labs/_sql_audit_settings.py +207 -0
  97. sempy_labs/_sql_endpoints.py +214 -0
  98. sempy_labs/_tags.py +201 -0
  99. sempy_labs/_translations.py +43 -0
  100. sempy_labs/_user_delegation_key.py +44 -0
  101. sempy_labs/_utils.py +79 -0
  102. sempy_labs/_vertipaq.py +1021 -0
  103. sempy_labs/_vpax.py +388 -0
  104. sempy_labs/_warehouses.py +234 -0
  105. sempy_labs/_workloads.py +140 -0
  106. sempy_labs/_workspace_identity.py +72 -0
  107. sempy_labs/_workspaces.py +595 -0
  108. sempy_labs/admin/__init__.py +170 -0
  109. sempy_labs/admin/_activities.py +167 -0
  110. sempy_labs/admin/_apps.py +145 -0
  111. sempy_labs/admin/_artifacts.py +65 -0
  112. sempy_labs/admin/_basic_functions.py +463 -0
  113. sempy_labs/admin/_capacities.py +508 -0
  114. sempy_labs/admin/_dataflows.py +45 -0
  115. sempy_labs/admin/_datasets.py +186 -0
  116. sempy_labs/admin/_domains.py +522 -0
  117. sempy_labs/admin/_external_data_share.py +100 -0
  118. sempy_labs/admin/_git.py +72 -0
  119. sempy_labs/admin/_items.py +265 -0
  120. sempy_labs/admin/_labels.py +211 -0
  121. sempy_labs/admin/_reports.py +241 -0
  122. sempy_labs/admin/_scanner.py +118 -0
  123. sempy_labs/admin/_shared.py +82 -0
  124. sempy_labs/admin/_sharing_links.py +110 -0
  125. sempy_labs/admin/_tags.py +131 -0
  126. sempy_labs/admin/_tenant.py +503 -0
  127. sempy_labs/admin/_tenant_keys.py +89 -0
  128. sempy_labs/admin/_users.py +140 -0
  129. sempy_labs/admin/_workspaces.py +236 -0
  130. sempy_labs/deployment_pipeline/__init__.py +23 -0
  131. sempy_labs/deployment_pipeline/_items.py +580 -0
  132. sempy_labs/directlake/__init__.py +57 -0
  133. sempy_labs/directlake/_autosync.py +58 -0
  134. sempy_labs/directlake/_directlake_schema_compare.py +120 -0
  135. sempy_labs/directlake/_directlake_schema_sync.py +161 -0
  136. sempy_labs/directlake/_dl_helper.py +274 -0
  137. sempy_labs/directlake/_generate_shared_expression.py +94 -0
  138. sempy_labs/directlake/_get_directlake_lakehouse.py +62 -0
  139. sempy_labs/directlake/_get_shared_expression.py +34 -0
  140. sempy_labs/directlake/_guardrails.py +96 -0
  141. sempy_labs/directlake/_list_directlake_model_calc_tables.py +70 -0
  142. sempy_labs/directlake/_show_unsupported_directlake_objects.py +90 -0
  143. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +239 -0
  144. sempy_labs/directlake/_update_directlake_partition_entity.py +259 -0
  145. sempy_labs/directlake/_warm_cache.py +236 -0
  146. sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
  147. sempy_labs/environment/__init__.py +23 -0
  148. sempy_labs/environment/_items.py +212 -0
  149. sempy_labs/environment/_pubstage.py +223 -0
  150. sempy_labs/eventstream/__init__.py +37 -0
  151. sempy_labs/eventstream/_items.py +263 -0
  152. sempy_labs/eventstream/_topology.py +652 -0
  153. sempy_labs/graph/__init__.py +59 -0
  154. sempy_labs/graph/_groups.py +651 -0
  155. sempy_labs/graph/_sensitivity_labels.py +120 -0
  156. sempy_labs/graph/_teams.py +125 -0
  157. sempy_labs/graph/_user_licenses.py +96 -0
  158. sempy_labs/graph/_users.py +516 -0
  159. sempy_labs/graph_model/__init__.py +15 -0
  160. sempy_labs/graph_model/_background_jobs.py +63 -0
  161. sempy_labs/graph_model/_items.py +149 -0
  162. sempy_labs/lakehouse/__init__.py +67 -0
  163. sempy_labs/lakehouse/_blobs.py +247 -0
  164. sempy_labs/lakehouse/_get_lakehouse_columns.py +102 -0
  165. sempy_labs/lakehouse/_get_lakehouse_tables.py +274 -0
  166. sempy_labs/lakehouse/_helper.py +250 -0
  167. sempy_labs/lakehouse/_lakehouse.py +351 -0
  168. sempy_labs/lakehouse/_livy_sessions.py +143 -0
  169. sempy_labs/lakehouse/_materialized_lake_views.py +157 -0
  170. sempy_labs/lakehouse/_partitioning.py +165 -0
  171. sempy_labs/lakehouse/_schemas.py +217 -0
  172. sempy_labs/lakehouse/_shortcuts.py +440 -0
  173. sempy_labs/migration/__init__.py +35 -0
  174. sempy_labs/migration/_create_pqt_file.py +238 -0
  175. sempy_labs/migration/_direct_lake_to_import.py +105 -0
  176. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +398 -0
  177. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +148 -0
  178. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +533 -0
  179. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +172 -0
  180. sempy_labs/migration/_migration_validation.py +71 -0
  181. sempy_labs/migration/_refresh_calc_tables.py +131 -0
  182. sempy_labs/mirrored_azure_databricks_catalog/__init__.py +15 -0
  183. sempy_labs/mirrored_azure_databricks_catalog/_discover.py +213 -0
  184. sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +45 -0
  185. sempy_labs/ml_model/__init__.py +23 -0
  186. sempy_labs/ml_model/_functions.py +427 -0
  187. sempy_labs/report/_BPAReportTemplate.json +232 -0
  188. sempy_labs/report/__init__.py +55 -0
  189. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
  190. sempy_labs/report/_bpareporttemplate/.platform +11 -0
  191. sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
  192. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
  193. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
  194. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
  195. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
  196. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
  197. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
  198. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
  199. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
  200. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
  201. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
  202. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
  203. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
  204. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
  205. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
  206. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
  207. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
  208. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
  209. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
  210. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
  211. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
  212. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
  213. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
  214. sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
  215. sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
  216. sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
  217. sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
  218. sempy_labs/report/_download_report.py +76 -0
  219. sempy_labs/report/_export_report.py +257 -0
  220. sempy_labs/report/_generate_report.py +427 -0
  221. sempy_labs/report/_paginated.py +76 -0
  222. sempy_labs/report/_report_bpa.py +354 -0
  223. sempy_labs/report/_report_bpa_rules.py +115 -0
  224. sempy_labs/report/_report_functions.py +581 -0
  225. sempy_labs/report/_report_helper.py +227 -0
  226. sempy_labs/report/_report_list_functions.py +110 -0
  227. sempy_labs/report/_report_rebind.py +149 -0
  228. sempy_labs/report/_reportwrapper.py +3100 -0
  229. sempy_labs/report/_save_report.py +147 -0
  230. sempy_labs/snowflake_database/__init__.py +10 -0
  231. sempy_labs/snowflake_database/_items.py +105 -0
  232. sempy_labs/sql_database/__init__.py +21 -0
  233. sempy_labs/sql_database/_items.py +201 -0
  234. sempy_labs/sql_database/_mirroring.py +79 -0
  235. sempy_labs/theme/__init__.py +12 -0
  236. sempy_labs/theme/_org_themes.py +129 -0
  237. sempy_labs/tom/__init__.py +3 -0
  238. sempy_labs/tom/_model.py +5977 -0
  239. sempy_labs/variable_library/__init__.py +19 -0
  240. sempy_labs/variable_library/_functions.py +403 -0
  241. sempy_labs/warehouse/__init__.py +28 -0
  242. sempy_labs/warehouse/_items.py +234 -0
  243. sempy_labs/warehouse/_restore_points.py +309 -0
@@ -0,0 +1,435 @@
1
+ import sempy.fabric as fabric
2
+ import time
3
+ from sempy_labs._helper_functions import (
4
+ resolve_workspace_name_and_id,
5
+ _get_partition_map,
6
+ _process_and_display_chart,
7
+ resolve_dataset_name_and_id,
8
+ _update_dataframe_datatypes,
9
+ _base_api,
10
+ )
11
+ from typing import Any, List, Optional, Union
12
+ from sempy._utils._log import log
13
+ import sempy_labs._icons as icons
14
+ import pandas as pd
15
+ import warnings
16
+ import ipywidgets as widgets
17
+ import json
18
+ from uuid import UUID
19
+
20
+
21
+ @log
22
+ def refresh_semantic_model(
23
+ dataset: str | UUID,
24
+ tables: Optional[Union[str, List[str]]] = None,
25
+ partitions: Optional[Union[str, List[str]]] = None,
26
+ refresh_type: str = "full",
27
+ retry_count: int = 0,
28
+ apply_refresh_policy: bool = True,
29
+ max_parallelism: int = 10,
30
+ workspace: Optional[str | UUID] = None,
31
+ visualize: bool = False,
32
+ commit_mode: str = "transactional",
33
+ ) -> pd.DataFrame | None:
34
+ """
35
+ Refreshes a semantic model.
36
+
37
+ Parameters
38
+ ----------
39
+ dataset : str | uuid.UUID
40
+ Name or ID of the semantic model.
41
+ tables : str, List[str], default=None
42
+ A string or a list of tables to refresh.
43
+ partitions: str, List[str], default=None
44
+ A string or a list of partitions to refresh. Partitions must be formatted as such: 'Table Name'[Partition Name].
45
+ refresh_type : str, default="full"
46
+ The type of processing to perform. Types align with the TMSL refresh command types: full, clearValues, calculate, dataOnly, automatic, and defragment. The add type isn't supported. Defaults to "full".
47
+ retry_count : int, default=0
48
+ Number of times the operation retries before failing.
49
+ apply_refresh_policy : bool, default=True
50
+ If an incremental refresh policy is defined, determines whether to apply the policy. Modes are true or false. If the policy isn't applied, the full process leaves partition definitions unchanged, and fully refreshes all partitions in the table. If commitMode is transactional, applyRefreshPolicy can be true or false. If commitMode is partialBatch, applyRefreshPolicy of true isn't supported, and applyRefreshPolicy must be set to false.
51
+ max_parallelism : int, default=10
52
+ Determines the maximum number of threads that can run the processing commands in parallel.
53
+ This value aligns with the MaxParallelism property that can be set in the TMSL Sequence command or by using other methods.
54
+ Defaults to 10.
55
+ workspace : str | uuid.UUID, default=None
56
+ The Fabric workspace name or ID.
57
+ Defaults to None which resolves to the workspace of the attached lakehouse
58
+ or if no lakehouse attached, resolves to the workspace of the notebook.
59
+ visualize : bool, default=False
60
+ If True, displays a Gantt chart showing the refresh statistics for each table/partition.
61
+ commit_mode : str, default="transactional"
62
+ Determines whether to commit objects in batches or only when complete. Modes are "transactional" and "partialBatch". Defaults to "transactional".
63
+
64
+ Returns
65
+ -------
66
+ pandas.DataFrame | None
67
+ If 'visualize' is set to True, returns a pandas dataframe showing the SSAS trace output used to generate the visualization.
68
+ """
69
+
70
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
71
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
72
+
73
+ if isinstance(tables, str):
74
+ tables = [tables]
75
+ if isinstance(partitions, str):
76
+ partitions = [partitions]
77
+
78
+ objects: List[Any] = []
79
+
80
+ if tables is not None:
81
+ objects = objects + [{"table": table} for table in tables]
82
+ if partitions is not None:
83
+
84
+ def extract_names(partition):
85
+ parts = partition.split("[")
86
+ table_name = parts[0].strip("'")
87
+ partition_name = parts[1].strip("]")
88
+ return {"table": table_name, "partition": partition_name}
89
+
90
+ objects = objects + [extract_names(partition) for partition in partitions]
91
+
92
+ refresh_type = refresh_type.lower()
93
+ for prefix, mapped_value in icons.refresh_type_mapping.items():
94
+ if refresh_type.startswith(prefix):
95
+ refresh_type = mapped_value
96
+ break
97
+
98
+ valid_refresh_types = list(icons.refresh_type_mapping.values())
99
+ if refresh_type not in valid_refresh_types:
100
+ raise ValueError(
101
+ f"{icons.red_dot} Invalid refresh type. Refresh type must be one of these values: {valid_refresh_types}."
102
+ )
103
+
104
+ def refresh_and_trace_dataset(
105
+ dataset,
106
+ workspace,
107
+ refresh_type,
108
+ retry_count,
109
+ apply_refresh_policy,
110
+ max_parallelism,
111
+ objects,
112
+ visualize,
113
+ commit_mode,
114
+ ):
115
+ # Ignore specific warnings
116
+ warnings.filterwarnings(
117
+ "ignore",
118
+ message="No trace logs have been recorded. Try starting the trace with a larger 'delay'",
119
+ )
120
+
121
+ def extract_failure_error():
122
+ error_messages = []
123
+ combined_messages = ""
124
+ final_message = f"{icons.red_dot} The refresh of the '{dataset_name}' semantic model within the '{workspace_name}' workspace has failed."
125
+ for _, r in fabric.get_refresh_execution_details(
126
+ refresh_request_id=request_id,
127
+ dataset=dataset_id,
128
+ workspace=workspace_id,
129
+ ).messages.iterrows():
130
+ error_messages.append(f"{r['Type']}: {r['Message']}")
131
+
132
+ if error_messages:
133
+ combined_messages = "\n".join(error_messages)
134
+ final_message += f"'\n' {combined_messages}"
135
+
136
+ return final_message
137
+
138
+ # Function to perform dataset refresh
139
+ def refresh_dataset():
140
+ return fabric.refresh_dataset(
141
+ dataset=dataset_id,
142
+ workspace=workspace_id,
143
+ refresh_type=refresh_type,
144
+ retry_count=retry_count,
145
+ apply_refresh_policy=apply_refresh_policy,
146
+ max_parallelism=max_parallelism,
147
+ commit_mode=commit_mode,
148
+ objects=objects if objects else None,
149
+ )
150
+
151
+ def check_refresh_status(request_id):
152
+ request_details = fabric.get_refresh_execution_details(
153
+ dataset=dataset_id,
154
+ refresh_request_id=request_id,
155
+ workspace=workspace_id,
156
+ )
157
+ return request_details.status
158
+
159
+ def display_trace_logs(trace, partition_map, widget, title, stop=False):
160
+ if stop:
161
+ df = trace.stop()
162
+ else:
163
+ df = trace.get_trace_logs()
164
+ if not df.empty:
165
+ df = df[
166
+ df["Event Subclass"].isin(["ExecuteSql", "Process"])
167
+ ].reset_index(drop=True)
168
+ df = pd.merge(
169
+ df,
170
+ partition_map[
171
+ ["PartitionID", "Object Name", "TableName", "PartitionName"]
172
+ ],
173
+ left_on="Object ID",
174
+ right_on="PartitionID",
175
+ how="left",
176
+ )
177
+ if not df.empty:
178
+ _process_and_display_chart(df, title=title, widget=widget)
179
+ if stop:
180
+ df.drop(["Object Name", "PartitionID"], axis=1, inplace=True)
181
+ df.rename(columns={"TableName": "Table Name"}, inplace=True)
182
+ df.rename(columns={"PartitionName": "Partition Name"}, inplace=True)
183
+ return df
184
+
185
+ # Start the refresh process
186
+ if not visualize:
187
+ request_id = refresh_dataset()
188
+ print(
189
+ f"{icons.in_progress} Refresh of the '{dataset_name}' semantic model within the '{workspace_name}' workspace is in progress..."
190
+ )
191
+
192
+ # Monitor refresh progress and handle tracing if visualize is enabled
193
+ if visualize:
194
+ partition_map = _get_partition_map(dataset, workspace)
195
+ widget = widgets.Output()
196
+
197
+ with fabric.create_trace_connection(
198
+ dataset=dataset_id, workspace=workspace_id
199
+ ) as trace_connection:
200
+ with trace_connection.create_trace(icons.refresh_event_schema) as trace:
201
+ trace.start()
202
+ request_id = refresh_dataset()
203
+
204
+ while True:
205
+ status = check_refresh_status(request_id)
206
+ # Check if the refresh has completed
207
+ if status == "Completed":
208
+ break
209
+ elif status == "Failed":
210
+ raise ValueError(extract_failure_error())
211
+ elif status == "Cancelled":
212
+ print(
213
+ f"{icons.yellow_dot} The refresh of the '{dataset_name}' semantic model within the '{workspace_name}' workspace has been cancelled."
214
+ )
215
+ return
216
+
217
+ # Capture and display logs in real-time
218
+ display_trace_logs(
219
+ trace,
220
+ partition_map,
221
+ widget,
222
+ title="Refresh in progress...",
223
+ )
224
+
225
+ time.sleep(3) # Wait before the next check
226
+
227
+ # Final log display after completion
228
+ time.sleep(5)
229
+
230
+ # Stop trace and display final chart
231
+ final_df = display_trace_logs(
232
+ trace,
233
+ partition_map,
234
+ widget,
235
+ title="Refresh Completed",
236
+ stop=True,
237
+ )
238
+
239
+ print(
240
+ f"{icons.green_dot} Refresh '{refresh_type}' of the '{dataset_name}' semantic model within the '{workspace_name}' workspace is complete."
241
+ )
242
+ return final_df
243
+
244
+ # For non-visualize case, only check refresh status
245
+ else:
246
+ while True:
247
+ status = check_refresh_status(request_id)
248
+ if status == "Completed":
249
+ break
250
+ elif status == "Failed":
251
+ raise ValueError(extract_failure_error())
252
+ elif status == "Cancelled":
253
+ print(
254
+ f"{icons.yellow_dot} The refresh of the '{dataset_name}' semantic model within the '{workspace_name}' workspace has been cancelled."
255
+ )
256
+ return
257
+
258
+ time.sleep(3)
259
+
260
+ print(
261
+ f"{icons.green_dot} Refresh '{refresh_type}' of the '{dataset_name}' semantic model within the '{workspace_name}' workspace is complete."
262
+ )
263
+
264
+ final_output = refresh_and_trace_dataset(
265
+ dataset=dataset,
266
+ workspace=workspace,
267
+ refresh_type=refresh_type,
268
+ retry_count=retry_count,
269
+ apply_refresh_policy=apply_refresh_policy,
270
+ max_parallelism=max_parallelism,
271
+ objects=objects,
272
+ visualize=visualize,
273
+ commit_mode=commit_mode,
274
+ )
275
+
276
+ return final_output
277
+
278
+
279
+ @log
280
+ def cancel_dataset_refresh(
281
+ dataset: str | UUID,
282
+ request_id: Optional[str] = None,
283
+ workspace: Optional[str | UUID] = None,
284
+ ):
285
+ """
286
+ Cancels the refresh of a semantic model which was executed via the `Enhanced Refresh API <https://learn.microsoft.com/power-bi/connect-data/asynchronous-refresh>`_
287
+
288
+ Parameters
289
+ ----------
290
+ dataset : str | uuid.UUID
291
+ Name or ID of the semantic model.
292
+ request_id : str, default=None
293
+ The request id of a semantic model refresh.
294
+ Defaults to finding the latest active refresh of the semantic model.
295
+ workspace : str | uuid.UUID, default=None
296
+ The Fabric workspace name or ID.
297
+ Defaults to None which resolves to the workspace of the attached lakehouse
298
+ or if no lakehouse attached, resolves to the workspace of the notebook.
299
+ """
300
+
301
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
302
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
303
+
304
+ rr = fabric.list_refresh_requests(dataset=dataset_id, workspace=workspace_id)
305
+ rr_filt = rr[rr["Status"] == "Unknown"]
306
+
307
+ if request_id is None:
308
+ if len(rr_filt) == 0:
309
+ raise ValueError(
310
+ f"{icons.red_dot} There are no active Enhanced API refreshes of the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
311
+ )
312
+
313
+ request_id = rr_filt["Request Id"].iloc[0]
314
+
315
+ _base_api(
316
+ request=f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/refreshes/{request_id}",
317
+ method="delete",
318
+ )
319
+ print(
320
+ f"{icons.green_dot} The '{request_id}' refresh request for the '{dataset_name}' semantic model within the '{workspace_name}' workspace has been cancelled."
321
+ )
322
+
323
+
324
+ @log
325
+ def get_semantic_model_refresh_history(
326
+ dataset: str | UUID,
327
+ request_id: Optional[str] = None,
328
+ workspace: Optional[str | UUID] = None,
329
+ ) -> pd.DataFrame:
330
+ """
331
+ Obtains the semantic model refresh history (refreshes executed via the Enhanced Refresh API).
332
+
333
+ This is a wrapper function for the following API: `Datasets - Get Refresh History In Group <https://learn.microsoft.com/rest/api/power-bi/datasets/get-refresh-history-in-group>`_.
334
+
335
+ Parameters
336
+ ----------
337
+ dataset : str | uuid.UUID
338
+ Name or ID of the semantic model.
339
+ request_id : str, default=None
340
+ The request id of a semantic model refresh.
341
+ Defaults to None which resolves to showing all refresh requests for the given semantic model.
342
+ workspace : str | uuid.UUID, default=None
343
+ The Fabric workspace name or ID.
344
+ Defaults to None which resolves to the workspace of the attached lakehouse
345
+ or if no lakehouse attached, resolves to the workspace of the notebook.
346
+
347
+ Returns
348
+ -------
349
+ pandas.DataFrame
350
+ A pandas dataframe showing the semantic model refresh history.
351
+ """
352
+
353
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
354
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
355
+ df = pd.DataFrame(
356
+ columns=[
357
+ "Request Id",
358
+ "Refresh Type",
359
+ "Start Time",
360
+ "End Time",
361
+ "Status",
362
+ "Extended Status",
363
+ ]
364
+ )
365
+
366
+ response = _base_api(
367
+ request=f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/refreshes"
368
+ )
369
+ data = []
370
+
371
+ for i in response.json().get("value", []):
372
+ error = i.get("serviceExceptionJson")
373
+ if error:
374
+ error_json = json.loads(error)
375
+ if request_id is None:
376
+ new_data = {
377
+ "Request Id": i.get("requestId"),
378
+ "Refresh Type": i.get("refreshType"),
379
+ "Start Time": i.get("startTime"),
380
+ "End Time": i.get("endTime"),
381
+ "Error Code": error_json.get("errorCode") if error else None,
382
+ "Error Description": (
383
+ error_json.get("errorDescription") if error else None
384
+ ),
385
+ "Status": i.get("status"),
386
+ "Extended Status": i.get("extendedStatus"),
387
+ "Attempts": i.get("refreshAttempts"),
388
+ }
389
+ data.append(new_data)
390
+
391
+ elif request_id == i.get("requestId"):
392
+ for attempt in i.get("refreshAttempts", []):
393
+ attempt_error = attempt.get("serviceExceptionJson")
394
+ if attempt_error:
395
+ attempt_error_json = json.loads(attempt_error)
396
+ new_data = {
397
+ "Request Id": i.get("requestId"),
398
+ "Refresh Type": i.get("refreshType"),
399
+ "Start Time": i.get("startTime"),
400
+ "End Time": i.get("endTime"),
401
+ "Error Code": error_json.get("errorCode") if error else None,
402
+ "Error Description": (
403
+ error_json.get("errorDescription") if error else None
404
+ ),
405
+ "Status": i.get("status"),
406
+ "Extended Status": i.get("extendedStatus"),
407
+ "Attempt Id": attempt.get("attemptId"),
408
+ "Attempt Start Time": attempt.get("startTime"),
409
+ "Attempt End Time": attempt.get("endTime"),
410
+ "Attempt Error Code": (
411
+ attempt_error_json.get("errorCode") if attempt_error else None
412
+ ),
413
+ "Attempt Error Description": (
414
+ attempt_error_json.get("errorDescription")
415
+ if attempt_error
416
+ else None
417
+ ),
418
+ "Type": attempt.get("type"),
419
+ }
420
+ data.append(new_data)
421
+
422
+ if data:
423
+ df = pd.DataFrame(data)
424
+
425
+ # date_cols = ["Start Time", "End Time"]
426
+ # df[date_cols] = df[date_cols].apply(pd.to_datetime)
427
+
428
+ if "Attempt Id" in df.columns:
429
+ column_map = {
430
+ "Attempt Id": "int",
431
+ }
432
+
433
+ _update_dataframe_datatypes(dataframe=df, column_map=column_map)
434
+
435
+ return df