semantic-link-labs 0.12.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (243) hide show
  1. semantic_link_labs-0.12.8.dist-info/METADATA +354 -0
  2. semantic_link_labs-0.12.8.dist-info/RECORD +243 -0
  3. semantic_link_labs-0.12.8.dist-info/WHEEL +5 -0
  4. semantic_link_labs-0.12.8.dist-info/licenses/LICENSE +21 -0
  5. semantic_link_labs-0.12.8.dist-info/top_level.txt +1 -0
  6. sempy_labs/__init__.py +606 -0
  7. sempy_labs/_a_lib_info.py +2 -0
  8. sempy_labs/_ai.py +437 -0
  9. sempy_labs/_authentication.py +264 -0
  10. sempy_labs/_bpa_translation/_model/_translations_am-ET.po +869 -0
  11. sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +908 -0
  12. sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +968 -0
  13. sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +963 -0
  14. sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +943 -0
  15. sempy_labs/_bpa_translation/_model/_translations_da-DK.po +945 -0
  16. sempy_labs/_bpa_translation/_model/_translations_de-DE.po +988 -0
  17. sempy_labs/_bpa_translation/_model/_translations_el-GR.po +993 -0
  18. sempy_labs/_bpa_translation/_model/_translations_es-ES.po +971 -0
  19. sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +933 -0
  20. sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +942 -0
  21. sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +994 -0
  22. sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +967 -0
  23. sempy_labs/_bpa_translation/_model/_translations_he-IL.po +902 -0
  24. sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +944 -0
  25. sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +963 -0
  26. sempy_labs/_bpa_translation/_model/_translations_id-ID.po +946 -0
  27. sempy_labs/_bpa_translation/_model/_translations_is-IS.po +939 -0
  28. sempy_labs/_bpa_translation/_model/_translations_it-IT.po +986 -0
  29. sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +846 -0
  30. sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +839 -0
  31. sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +967 -0
  32. sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +978 -0
  33. sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +962 -0
  34. sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +962 -0
  35. sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +957 -0
  36. sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +968 -0
  37. sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +964 -0
  38. sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +952 -0
  39. sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +950 -0
  40. sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +942 -0
  41. sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +976 -0
  42. sempy_labs/_bpa_translation/_model/_translations_te-IN.po +947 -0
  43. sempy_labs/_bpa_translation/_model/_translations_th-TH.po +924 -0
  44. sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +953 -0
  45. sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +961 -0
  46. sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +804 -0
  47. sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +969 -0
  48. sempy_labs/_capacities.py +1198 -0
  49. sempy_labs/_capacity_migration.py +660 -0
  50. sempy_labs/_clear_cache.py +351 -0
  51. sempy_labs/_connections.py +610 -0
  52. sempy_labs/_dashboards.py +69 -0
  53. sempy_labs/_data_access_security.py +98 -0
  54. sempy_labs/_data_pipelines.py +162 -0
  55. sempy_labs/_dataflows.py +668 -0
  56. sempy_labs/_dax.py +501 -0
  57. sempy_labs/_daxformatter.py +80 -0
  58. sempy_labs/_delta_analyzer.py +467 -0
  59. sempy_labs/_delta_analyzer_history.py +301 -0
  60. sempy_labs/_dictionary_diffs.py +221 -0
  61. sempy_labs/_documentation.py +147 -0
  62. sempy_labs/_domains.py +51 -0
  63. sempy_labs/_eventhouses.py +182 -0
  64. sempy_labs/_external_data_shares.py +230 -0
  65. sempy_labs/_gateways.py +521 -0
  66. sempy_labs/_generate_semantic_model.py +521 -0
  67. sempy_labs/_get_connection_string.py +84 -0
  68. sempy_labs/_git.py +543 -0
  69. sempy_labs/_graphQL.py +90 -0
  70. sempy_labs/_helper_functions.py +2833 -0
  71. sempy_labs/_icons.py +149 -0
  72. sempy_labs/_job_scheduler.py +609 -0
  73. sempy_labs/_kql_databases.py +149 -0
  74. sempy_labs/_kql_querysets.py +124 -0
  75. sempy_labs/_kusto.py +137 -0
  76. sempy_labs/_labels.py +124 -0
  77. sempy_labs/_list_functions.py +1720 -0
  78. sempy_labs/_managed_private_endpoints.py +253 -0
  79. sempy_labs/_mirrored_databases.py +416 -0
  80. sempy_labs/_mirrored_warehouses.py +60 -0
  81. sempy_labs/_ml_experiments.py +113 -0
  82. sempy_labs/_model_auto_build.py +140 -0
  83. sempy_labs/_model_bpa.py +557 -0
  84. sempy_labs/_model_bpa_bulk.py +378 -0
  85. sempy_labs/_model_bpa_rules.py +859 -0
  86. sempy_labs/_model_dependencies.py +343 -0
  87. sempy_labs/_mounted_data_factories.py +123 -0
  88. sempy_labs/_notebooks.py +441 -0
  89. sempy_labs/_one_lake_integration.py +151 -0
  90. sempy_labs/_onelake.py +131 -0
  91. sempy_labs/_query_scale_out.py +433 -0
  92. sempy_labs/_refresh_semantic_model.py +435 -0
  93. sempy_labs/_semantic_models.py +468 -0
  94. sempy_labs/_spark.py +455 -0
  95. sempy_labs/_sql.py +241 -0
  96. sempy_labs/_sql_audit_settings.py +207 -0
  97. sempy_labs/_sql_endpoints.py +214 -0
  98. sempy_labs/_tags.py +201 -0
  99. sempy_labs/_translations.py +43 -0
  100. sempy_labs/_user_delegation_key.py +44 -0
  101. sempy_labs/_utils.py +79 -0
  102. sempy_labs/_vertipaq.py +1021 -0
  103. sempy_labs/_vpax.py +388 -0
  104. sempy_labs/_warehouses.py +234 -0
  105. sempy_labs/_workloads.py +140 -0
  106. sempy_labs/_workspace_identity.py +72 -0
  107. sempy_labs/_workspaces.py +595 -0
  108. sempy_labs/admin/__init__.py +170 -0
  109. sempy_labs/admin/_activities.py +167 -0
  110. sempy_labs/admin/_apps.py +145 -0
  111. sempy_labs/admin/_artifacts.py +65 -0
  112. sempy_labs/admin/_basic_functions.py +463 -0
  113. sempy_labs/admin/_capacities.py +508 -0
  114. sempy_labs/admin/_dataflows.py +45 -0
  115. sempy_labs/admin/_datasets.py +186 -0
  116. sempy_labs/admin/_domains.py +522 -0
  117. sempy_labs/admin/_external_data_share.py +100 -0
  118. sempy_labs/admin/_git.py +72 -0
  119. sempy_labs/admin/_items.py +265 -0
  120. sempy_labs/admin/_labels.py +211 -0
  121. sempy_labs/admin/_reports.py +241 -0
  122. sempy_labs/admin/_scanner.py +118 -0
  123. sempy_labs/admin/_shared.py +82 -0
  124. sempy_labs/admin/_sharing_links.py +110 -0
  125. sempy_labs/admin/_tags.py +131 -0
  126. sempy_labs/admin/_tenant.py +503 -0
  127. sempy_labs/admin/_tenant_keys.py +89 -0
  128. sempy_labs/admin/_users.py +140 -0
  129. sempy_labs/admin/_workspaces.py +236 -0
  130. sempy_labs/deployment_pipeline/__init__.py +23 -0
  131. sempy_labs/deployment_pipeline/_items.py +580 -0
  132. sempy_labs/directlake/__init__.py +57 -0
  133. sempy_labs/directlake/_autosync.py +58 -0
  134. sempy_labs/directlake/_directlake_schema_compare.py +120 -0
  135. sempy_labs/directlake/_directlake_schema_sync.py +161 -0
  136. sempy_labs/directlake/_dl_helper.py +274 -0
  137. sempy_labs/directlake/_generate_shared_expression.py +94 -0
  138. sempy_labs/directlake/_get_directlake_lakehouse.py +62 -0
  139. sempy_labs/directlake/_get_shared_expression.py +34 -0
  140. sempy_labs/directlake/_guardrails.py +96 -0
  141. sempy_labs/directlake/_list_directlake_model_calc_tables.py +70 -0
  142. sempy_labs/directlake/_show_unsupported_directlake_objects.py +90 -0
  143. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +239 -0
  144. sempy_labs/directlake/_update_directlake_partition_entity.py +259 -0
  145. sempy_labs/directlake/_warm_cache.py +236 -0
  146. sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
  147. sempy_labs/environment/__init__.py +23 -0
  148. sempy_labs/environment/_items.py +212 -0
  149. sempy_labs/environment/_pubstage.py +223 -0
  150. sempy_labs/eventstream/__init__.py +37 -0
  151. sempy_labs/eventstream/_items.py +263 -0
  152. sempy_labs/eventstream/_topology.py +652 -0
  153. sempy_labs/graph/__init__.py +59 -0
  154. sempy_labs/graph/_groups.py +651 -0
  155. sempy_labs/graph/_sensitivity_labels.py +120 -0
  156. sempy_labs/graph/_teams.py +125 -0
  157. sempy_labs/graph/_user_licenses.py +96 -0
  158. sempy_labs/graph/_users.py +516 -0
  159. sempy_labs/graph_model/__init__.py +15 -0
  160. sempy_labs/graph_model/_background_jobs.py +63 -0
  161. sempy_labs/graph_model/_items.py +149 -0
  162. sempy_labs/lakehouse/__init__.py +67 -0
  163. sempy_labs/lakehouse/_blobs.py +247 -0
  164. sempy_labs/lakehouse/_get_lakehouse_columns.py +102 -0
  165. sempy_labs/lakehouse/_get_lakehouse_tables.py +274 -0
  166. sempy_labs/lakehouse/_helper.py +250 -0
  167. sempy_labs/lakehouse/_lakehouse.py +351 -0
  168. sempy_labs/lakehouse/_livy_sessions.py +143 -0
  169. sempy_labs/lakehouse/_materialized_lake_views.py +157 -0
  170. sempy_labs/lakehouse/_partitioning.py +165 -0
  171. sempy_labs/lakehouse/_schemas.py +217 -0
  172. sempy_labs/lakehouse/_shortcuts.py +440 -0
  173. sempy_labs/migration/__init__.py +35 -0
  174. sempy_labs/migration/_create_pqt_file.py +238 -0
  175. sempy_labs/migration/_direct_lake_to_import.py +105 -0
  176. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +398 -0
  177. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +148 -0
  178. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +533 -0
  179. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +172 -0
  180. sempy_labs/migration/_migration_validation.py +71 -0
  181. sempy_labs/migration/_refresh_calc_tables.py +131 -0
  182. sempy_labs/mirrored_azure_databricks_catalog/__init__.py +15 -0
  183. sempy_labs/mirrored_azure_databricks_catalog/_discover.py +213 -0
  184. sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +45 -0
  185. sempy_labs/ml_model/__init__.py +23 -0
  186. sempy_labs/ml_model/_functions.py +427 -0
  187. sempy_labs/report/_BPAReportTemplate.json +232 -0
  188. sempy_labs/report/__init__.py +55 -0
  189. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
  190. sempy_labs/report/_bpareporttemplate/.platform +11 -0
  191. sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
  192. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
  193. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
  194. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
  195. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
  196. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
  197. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
  198. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
  199. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
  200. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
  201. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
  202. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
  203. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
  204. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
  205. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
  206. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
  207. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
  208. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
  209. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
  210. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
  211. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
  212. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
  213. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
  214. sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
  215. sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
  216. sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
  217. sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
  218. sempy_labs/report/_download_report.py +76 -0
  219. sempy_labs/report/_export_report.py +257 -0
  220. sempy_labs/report/_generate_report.py +427 -0
  221. sempy_labs/report/_paginated.py +76 -0
  222. sempy_labs/report/_report_bpa.py +354 -0
  223. sempy_labs/report/_report_bpa_rules.py +115 -0
  224. sempy_labs/report/_report_functions.py +581 -0
  225. sempy_labs/report/_report_helper.py +227 -0
  226. sempy_labs/report/_report_list_functions.py +110 -0
  227. sempy_labs/report/_report_rebind.py +149 -0
  228. sempy_labs/report/_reportwrapper.py +3100 -0
  229. sempy_labs/report/_save_report.py +147 -0
  230. sempy_labs/snowflake_database/__init__.py +10 -0
  231. sempy_labs/snowflake_database/_items.py +105 -0
  232. sempy_labs/sql_database/__init__.py +21 -0
  233. sempy_labs/sql_database/_items.py +201 -0
  234. sempy_labs/sql_database/_mirroring.py +79 -0
  235. sempy_labs/theme/__init__.py +12 -0
  236. sempy_labs/theme/_org_themes.py +129 -0
  237. sempy_labs/tom/__init__.py +3 -0
  238. sempy_labs/tom/_model.py +5977 -0
  239. sempy_labs/variable_library/__init__.py +19 -0
  240. sempy_labs/variable_library/_functions.py +403 -0
  241. sempy_labs/warehouse/__init__.py +28 -0
  242. sempy_labs/warehouse/_items.py +234 -0
  243. sempy_labs/warehouse/_restore_points.py +309 -0
@@ -0,0 +1,557 @@
1
+ import sempy.fabric as fabric
2
+ import pandas as pd
3
+ import warnings
4
+ import datetime
5
+ from IPython.display import display, HTML
6
+ from sempy_labs._model_dependencies import get_model_calc_dependencies
7
+ from sempy_labs._helper_functions import (
8
+ format_dax_object_name,
9
+ create_relationship_name,
10
+ save_as_delta_table,
11
+ resolve_workspace_capacity,
12
+ resolve_dataset_name_and_id,
13
+ get_language_codes,
14
+ _get_column_aggregate,
15
+ resolve_workspace_name_and_id,
16
+ _create_spark_session,
17
+ )
18
+ from sempy_labs.lakehouse import get_lakehouse_tables, lakehouse_attached
19
+ from sempy_labs.tom import connect_semantic_model
20
+ from sempy_labs._model_bpa_rules import model_bpa_rules
21
+ from typing import Optional
22
+ from sempy._utils._log import log
23
+ import sempy_labs._icons as icons
24
+ from pyspark.sql.functions import col, flatten
25
+ from pyspark.sql.types import StructType, StructField, StringType
26
+ import os
27
+ from uuid import UUID
28
+
29
+
30
+ @log
31
+ def run_model_bpa(
32
+ dataset: str | UUID,
33
+ rules: Optional[pd.DataFrame] = None,
34
+ workspace: Optional[str] = None,
35
+ export: bool = False,
36
+ return_dataframe: bool = False,
37
+ extended: bool = False,
38
+ language: Optional[str] = None,
39
+ check_dependencies: bool = True,
40
+ **kwargs,
41
+ ):
42
+ """
43
+ Displays an HTML visualization of the results of the Best Practice Analyzer scan for a semantic model.
44
+
45
+ The Best Practice Analyzer rules are based on the rules defined `here <https://github.com/microsoft/Analysis-Services/tree/master/BestPracticeRules>`_. The framework for the Best Practice Analyzer and rules are based on the foundation set by `Tabular Editor <https://github.com/TabularEditor/TabularEditor>`_.
46
+
47
+ Parameters
48
+ ----------
49
+ dataset : str | uuid.UUID
50
+ Name or ID of the semantic model.
51
+ rules : pandas.DataFrame, default=None
52
+ A pandas dataframe containing rules to be evaluated.
53
+ workspace : str | uuid.UUID, default=None
54
+ The Fabric workspace name or ID.
55
+ Defaults to None which resolves to the workspace of the attached lakehouse
56
+ or if no lakehouse attached, resolves to the workspace of the notebook.
57
+ export : bool, default=False
58
+ If True, exports the resulting dataframe to a delta table in the lakehouse attached to the notebook.
59
+ return_dataframe : bool, default=False
60
+ If True, returns a pandas dataframe instead of the visualization.
61
+ extended : bool, default=False
62
+ If True, runs the set_vertipaq_annotations function to collect Vertipaq Analyzer statistics to be used in the analysis of the semantic model.
63
+ language : str, default=None
64
+ Specifying a language name or code (i.e. 'it-IT' for Italian) will auto-translate the Category, Rule Name and Description into the specified language.
65
+ Defaults to None which resolves to English.
66
+ check_dependencies : bool, default=True
67
+ If True, leverages the model dependencies from get_model_calc_dependencies to evaluate the rules. Set this parameter to False if running the rules against a semantic model in a shared capacity.
68
+
69
+ Returns
70
+ -------
71
+ pandas.DataFrame
72
+ A pandas dataframe in HTML format showing semantic model objects which violated the best practice analyzer rules.
73
+ """
74
+
75
+ import polib
76
+
77
+ if "extend" in kwargs:
78
+ print(
79
+ "The 'extend' parameter has been deprecated. Please remove this parameter from the function going forward."
80
+ )
81
+ del kwargs["extend"]
82
+
83
+ warnings.filterwarnings(
84
+ "ignore",
85
+ message="This pattern is interpreted as a regular expression, and has match groups.",
86
+ )
87
+ warnings.filterwarnings(
88
+ "ignore", category=UserWarning, message=".*Arrow optimization.*"
89
+ )
90
+
91
+ language_list = list(icons.language_map.keys())
92
+ if language is not None:
93
+ language = get_language_codes(languages=language)[0]
94
+
95
+ # Map languages to the closest language (first 2 letters matching)
96
+ def map_language(language, language_list):
97
+
98
+ mapped = False
99
+
100
+ if language in language_list:
101
+ mapped is True
102
+ return language
103
+
104
+ language_prefix = language[:2]
105
+ for lang_code in language_list:
106
+ if lang_code.startswith(language_prefix):
107
+ mapped is True
108
+ return lang_code
109
+ if not mapped:
110
+ return language
111
+
112
+ if language is not None:
113
+ language = map_language(language, language_list)
114
+
115
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
116
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(
117
+ dataset, workspace=workspace_id
118
+ )
119
+
120
+ if language is not None and language not in language_list:
121
+ print(
122
+ f"{icons.yellow_dot} The '{language}' language code is not in our predefined language list. Please file an issue and let us know which language code you are using: https://github.com/microsoft/semantic-link-labs/issues/new?assignees=&labels=&projects=&template=bug_report.md&title=."
123
+ )
124
+
125
+ with connect_semantic_model(
126
+ dataset=dataset_id, workspace=workspace_id, readonly=True
127
+ ) as tom:
128
+
129
+ if extended:
130
+ tom.set_vertipaq_annotations()
131
+
132
+ # Do not run BPA for models with no tables
133
+ if tom.model.Tables.Count == 0:
134
+ print(
135
+ f"{icons.warning} The '{dataset_name}' semantic model within the '{workspace_name}' workspace has no tables and therefore there are no valid BPA results."
136
+ )
137
+ return
138
+
139
+ if check_dependencies:
140
+ dep = get_model_calc_dependencies(
141
+ dataset=dataset_id, workspace=workspace_id
142
+ )
143
+ else:
144
+ dep = pd.DataFrame(
145
+ columns=[
146
+ "Table Name",
147
+ "Object Name",
148
+ "Object Type",
149
+ "Expression",
150
+ "Referenced Table",
151
+ "Referenced Object",
152
+ "Referenced Object Type",
153
+ "Full Object Name",
154
+ "Referenced Full Object Name",
155
+ "Parent Node",
156
+ ]
157
+ )
158
+
159
+ def translate_using_po(rule_file):
160
+ current_dir = os.path.dirname(os.path.abspath(__file__))
161
+ translation_file = (
162
+ f"{current_dir}/_bpa_translation/_model/_translations_{language}.po"
163
+ )
164
+ for c in ["Category", "Description", "Rule Name"]:
165
+ po = polib.pofile(translation_file)
166
+ for entry in po:
167
+ if entry.tcomment == c.lower().replace(" ", "_"):
168
+ rule_file.loc[rule_file["Rule Name"] == entry.msgid, c] = (
169
+ entry.msgstr
170
+ )
171
+
172
+ translated = False
173
+
174
+ # Translations
175
+ if language is not None and rules is None and language in language_list:
176
+ rules = model_bpa_rules(dependencies=dep)
177
+ translate_using_po(rules)
178
+ translated = True
179
+ if rules is None:
180
+ rules = model_bpa_rules(dependencies=dep)
181
+ if language is not None and not translated:
182
+
183
+ def translate_using_spark(rule_file):
184
+
185
+ from synapse.ml.services import Translate
186
+
187
+ rules_temp = rule_file.copy()
188
+ rules_temp = rules_temp.drop(["Expression", "URL", "Severity"], axis=1)
189
+
190
+ schema = StructType(
191
+ [
192
+ StructField("Category", StringType(), True),
193
+ StructField("Scope", StringType(), True),
194
+ StructField("Rule Name", StringType(), True),
195
+ StructField("Description", StringType(), True),
196
+ ]
197
+ )
198
+
199
+ spark = _create_spark_session()
200
+ dfRules = spark.createDataFrame(rules_temp, schema)
201
+
202
+ columns = ["Category", "Rule Name", "Description"]
203
+ for clm in columns:
204
+ translate = (
205
+ Translate()
206
+ .setTextCol(clm)
207
+ .setToLanguage(language)
208
+ .setOutputCol("translation")
209
+ .setConcurrency(5)
210
+ )
211
+
212
+ if clm == "Rule Name":
213
+ transDF = (
214
+ translate.transform(dfRules)
215
+ .withColumn(
216
+ "translation",
217
+ flatten(col("translation.translations")),
218
+ )
219
+ .withColumn("translation", col("translation.text"))
220
+ .select(clm, "translation")
221
+ )
222
+ else:
223
+ transDF = (
224
+ translate.transform(dfRules)
225
+ .withColumn(
226
+ "translation",
227
+ flatten(col("translation.translations")),
228
+ )
229
+ .withColumn("translation", col("translation.text"))
230
+ .select("Rule Name", clm, "translation")
231
+ )
232
+
233
+ df_panda = transDF.toPandas()
234
+ rule_file = pd.merge(
235
+ rule_file,
236
+ df_panda[["Rule Name", "translation"]],
237
+ on="Rule Name",
238
+ how="left",
239
+ )
240
+
241
+ rule_file = rule_file.rename(
242
+ columns={"translation": f"{clm}Translated"}
243
+ )
244
+ rule_file[f"{clm}Translated"] = rule_file[f"{clm}Translated"].apply(
245
+ lambda x: x[0] if x is not None else None
246
+ )
247
+
248
+ for clm in columns:
249
+ rule_file = rule_file.drop([clm], axis=1)
250
+ rule_file = rule_file.rename(columns={f"{clm}Translated": clm})
251
+
252
+ return rule_file
253
+
254
+ rules = translate_using_spark(rules)
255
+
256
+ rules.loc[rules["Severity"] == "Warning", "Severity"] = icons.warning
257
+ rules.loc[rules["Severity"] == "Error", "Severity"] = icons.error
258
+ rules.loc[rules["Severity"] == "Info", "Severity"] = icons.info
259
+
260
+ pd.set_option("display.max_colwidth", 1000)
261
+
262
+ violations = pd.DataFrame(columns=["Object Name", "Scope", "Rule Name"])
263
+
264
+ scope_to_dataframe = {
265
+ "Relationship": (
266
+ tom.model.Relationships,
267
+ lambda obj: create_relationship_name(
268
+ obj.FromTable.Name,
269
+ obj.FromColumn.Name,
270
+ obj.ToTable.Name,
271
+ obj.ToColumn.Name,
272
+ ),
273
+ ),
274
+ "Column": (
275
+ tom.all_columns(),
276
+ lambda obj: format_dax_object_name(obj.Parent.Name, obj.Name),
277
+ ),
278
+ "Calculated Column": (
279
+ tom.all_calculated_columns(),
280
+ lambda obj: format_dax_object_name(obj.Parent.Name, obj.Name),
281
+ ),
282
+ "Measure": (tom.all_measures(), lambda obj: obj.Name),
283
+ "Hierarchy": (
284
+ tom.all_hierarchies(),
285
+ lambda obj: format_dax_object_name(obj.Parent.Name, obj.Name),
286
+ ),
287
+ "Table": (tom.model.Tables, lambda obj: obj.Name),
288
+ "Calculated Table": (tom.all_calculated_tables(), lambda obj: obj.Name),
289
+ "Role": (tom.model.Roles, lambda obj: obj.Name),
290
+ "Model": (tom.model, lambda obj: obj.Model.Name),
291
+ "Calculation Item": (
292
+ tom.all_calculation_items(),
293
+ lambda obj: format_dax_object_name(obj.Parent.Table.Name, obj.Name),
294
+ ),
295
+ "Row Level Security": (
296
+ tom.all_rls(),
297
+ lambda obj: format_dax_object_name(obj.Parent.Name, obj.Name),
298
+ ),
299
+ "Partition": (
300
+ tom.all_partitions(),
301
+ lambda obj: format_dax_object_name(obj.Parent.Name, obj.Name),
302
+ ),
303
+ "Function": (
304
+ tom.all_functions(),
305
+ lambda obj: obj.Name,
306
+ ),
307
+ }
308
+
309
+ for i, r in rules.iterrows():
310
+ ruleName = r["Rule Name"]
311
+ expr = r["Expression"]
312
+ scopes = r["Scope"]
313
+
314
+ if isinstance(scopes, str):
315
+ scopes = [scopes]
316
+
317
+ for scope in scopes:
318
+ func = scope_to_dataframe[scope][0]
319
+ nm = scope_to_dataframe[scope][1]
320
+
321
+ if scope == "Model":
322
+ x = []
323
+ if expr(func, tom):
324
+ x = ["Model"]
325
+ elif scope == "Measure":
326
+ x = [nm(obj) for obj in tom.all_measures() if expr(obj, tom)]
327
+ elif scope == "Function":
328
+ x = [nm(obj) for obj in tom.all_functions() if expr(obj, tom)]
329
+ elif scope == "Column":
330
+ x = [nm(obj) for obj in tom.all_columns() if expr(obj, tom)]
331
+ elif scope == "Partition":
332
+ x = [nm(obj) for obj in tom.all_partitions() if expr(obj, tom)]
333
+ elif scope == "Hierarchy":
334
+ x = [nm(obj) for obj in tom.all_hierarchies() if expr(obj, tom)]
335
+ elif scope == "Table":
336
+ x = [nm(obj) for obj in tom.model.Tables if expr(obj, tom)]
337
+ elif scope == "Calculated Table":
338
+ x = [
339
+ nm(obj) for obj in tom.all_calculated_tables() if expr(obj, tom)
340
+ ]
341
+ elif scope == "Relationship":
342
+ x = [nm(obj) for obj in tom.model.Relationships if expr(obj, tom)]
343
+ elif scope == "Role":
344
+ x = [nm(obj) for obj in tom.model.Roles if expr(obj, tom)]
345
+ elif scope == "Row Level Security":
346
+ x = [nm(obj) for obj in tom.all_rls() if expr(obj, tom)]
347
+ elif scope == "Calculation Item":
348
+ x = [
349
+ nm(obj) for obj in tom.all_calculation_items() if expr(obj, tom)
350
+ ]
351
+ elif scope == "Calculated Column":
352
+ x = [
353
+ nm(obj)
354
+ for obj in tom.all_calculated_columns()
355
+ if expr(obj, tom)
356
+ ]
357
+
358
+ if len(x) > 0:
359
+ new_data = {
360
+ "Object Name": x,
361
+ "Scope": scope,
362
+ "Rule Name": ruleName,
363
+ }
364
+ violations = pd.concat(
365
+ [violations, pd.DataFrame(new_data)], ignore_index=True
366
+ )
367
+
368
+ prepDF = pd.merge(
369
+ violations,
370
+ rules[["Rule Name", "Category", "Severity", "Description", "URL"]],
371
+ left_on="Rule Name",
372
+ right_on="Rule Name",
373
+ how="left",
374
+ )
375
+ prepDF.rename(columns={"Scope": "Object Type"}, inplace=True)
376
+ finalDF = prepDF[
377
+ [
378
+ "Category",
379
+ "Rule Name",
380
+ "Severity",
381
+ "Object Type",
382
+ "Object Name",
383
+ "Description",
384
+ "URL",
385
+ ]
386
+ ]
387
+
388
+ if export:
389
+ if not lakehouse_attached():
390
+ raise ValueError(
391
+ f"{icons.red_dot} In order to save the Best Practice Analyzer results, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
392
+ )
393
+
394
+ dfExport = finalDF.copy()
395
+ delta_table_name = "modelbparesults"
396
+
397
+ lakeT = get_lakehouse_tables()
398
+ lakeT_filt = lakeT[lakeT["Table Name"] == delta_table_name]
399
+
400
+ dfExport["Severity"] = dfExport["Severity"].replace(icons.severity_mapping)
401
+
402
+ if len(lakeT_filt) == 0:
403
+ runId = 1
404
+ else:
405
+ max_run_id = _get_column_aggregate(table_name=delta_table_name)
406
+ runId = max_run_id + 1
407
+
408
+ now = datetime.datetime.now()
409
+ dfD = fabric.list_datasets(workspace=workspace_id, mode="rest")
410
+ dfD_filt = dfD[dfD["Dataset Id"] == dataset_id]
411
+ configured_by = dfD_filt["Configured By"].iloc[0]
412
+ capacity_id, capacity_name = resolve_workspace_capacity(workspace=workspace_id)
413
+ dfExport["Capacity Name"] = capacity_name
414
+ dfExport["Capacity Id"] = capacity_id
415
+ dfExport["Workspace Name"] = workspace_name
416
+ dfExport["Workspace Id"] = workspace_id
417
+ dfExport["Dataset Name"] = dataset_name
418
+ dfExport["Dataset Id"] = dataset_id
419
+ dfExport["Configured By"] = configured_by
420
+ dfExport["Timestamp"] = now
421
+ dfExport["RunId"] = runId
422
+ dfExport["RunId"] = dfExport["RunId"].astype("int")
423
+
424
+ dfExport = dfExport[list(icons.bpa_schema.keys())]
425
+ dfExport["RunId"] = dfExport["RunId"].astype("int")
426
+ schema = {
427
+ key.replace(" ", "_"): value for key, value in icons.bpa_schema.items()
428
+ }
429
+ save_as_delta_table(
430
+ dataframe=dfExport,
431
+ delta_table_name=delta_table_name,
432
+ write_mode="append",
433
+ schema=schema,
434
+ merge_schema=True,
435
+ )
436
+
437
+ if return_dataframe:
438
+ return finalDF
439
+
440
+ pd.set_option("display.max_colwidth", 100)
441
+
442
+ finalDF = (
443
+ finalDF[
444
+ [
445
+ "Category",
446
+ "Rule Name",
447
+ "Object Type",
448
+ "Object Name",
449
+ "Severity",
450
+ "Description",
451
+ "URL",
452
+ ]
453
+ ]
454
+ .sort_values(["Category", "Rule Name", "Object Type", "Object Name"])
455
+ .set_index(["Category", "Rule Name"])
456
+ )
457
+
458
+ bpa2 = finalDF.reset_index()
459
+ bpa_dict = {
460
+ cat: bpa2[bpa2["Category"] == cat].drop("Category", axis=1)
461
+ for cat in bpa2["Category"].drop_duplicates().values
462
+ }
463
+
464
+ styles = """
465
+ <style>
466
+ .tab { overflow: hidden; border: 1px solid #ccc; background-color: #f1f1f1; }
467
+ .tab button { background-color: inherit; float: left; border: none; outline: none; cursor: pointer; padding: 14px 16px; transition: 0.3s; }
468
+ .tab button:hover { background-color: #ddd; }
469
+ .tab button.active { background-color: #ccc; }
470
+ .tabcontent { display: none; padding: 6px 12px; border: 1px solid #ccc; border-top: none; }
471
+ .tabcontent.active { display: block; }
472
+ .tooltip { position: relative; display: inline-block; }
473
+ .tooltip .tooltiptext { visibility: hidden; width: 300px; background-color: #555; color: #fff; text-align: center; border-radius: 6px; padding: 5px; position: absolute; z-index: 1; bottom: 125%; left: 50%; margin-left: -110px; opacity: 0; transition: opacity 0.3s; }
474
+ .tooltip:hover .tooltiptext { visibility: visible; opacity: 1; }
475
+ </style>
476
+ """
477
+
478
+ # JavaScript for tab functionality
479
+ script = """
480
+ <script>
481
+ function openTab(evt, tabName) {
482
+ var i, tabcontent, tablinks;
483
+ tabcontent = document.getElementsByClassName("tabcontent");
484
+ for (i = 0; i < tabcontent.length; i++) {
485
+ tabcontent[i].style.display = "none";
486
+ }
487
+ tablinks = document.getElementsByClassName("tablinks");
488
+ for (i = 0; i < tablinks.length; i++) {
489
+ tablinks[i].className = tablinks[i].className.replace(" active", "");
490
+ }
491
+ document.getElementById(tabName).style.display = "block";
492
+ evt.currentTarget.className += " active";
493
+ }
494
+ </script>
495
+ """
496
+
497
+ # JavaScript for dynamic tooltip positioning
498
+ dynamic_script = """
499
+ <script>
500
+ function adjustTooltipPosition(event) {
501
+ var tooltip = event.target.querySelector('.tooltiptext');
502
+ var rect = tooltip.getBoundingClientRect();
503
+ var topSpace = rect.top;
504
+ var bottomSpace = window.innerHeight - rect.bottom;
505
+
506
+ if (topSpace < bottomSpace) {
507
+ tooltip.style.bottom = '125%';
508
+ } else {
509
+ tooltip.style.bottom = 'auto';
510
+ tooltip.style.top = '125%';
511
+ }
512
+ }
513
+ </script>
514
+ """
515
+
516
+ # HTML for tabs
517
+ tab_html = '<div class="tab">'
518
+ content_html = ""
519
+ for i, (title, df) in enumerate(bpa_dict.items()):
520
+ if df.shape[0] == 0:
521
+ continue
522
+
523
+ tab_id = f"tab{i}"
524
+ active_class = ""
525
+ if i == 0:
526
+ active_class = "active"
527
+
528
+ summary = " + ".join(
529
+ [f"{idx} ({v})" for idx, v in df["Severity"].value_counts().items()]
530
+ )
531
+ tab_html += f'<button class="tablinks {active_class}" onclick="openTab(event, \'{tab_id}\')"><b>{title}</b><br/>{summary}</button>'
532
+ content_html += f'<div id="{tab_id}" class="tabcontent {active_class}">'
533
+
534
+ # Adding tooltip for Rule Name using Description column
535
+ content_html += '<table border="1">'
536
+ content_html += "<tr><th>Rule Name</th><th>Object Type</th><th>Object Name</th><th>Severity</th></tr>"
537
+ for _, row in df.iterrows():
538
+ content_html += "<tr>"
539
+ if pd.notnull(row["URL"]):
540
+ content_html += f'<td class="tooltip" onmouseover="adjustTooltipPosition(event)"><a href="{row["URL"]}">{row["Rule Name"]}</a><span class="tooltiptext">{row["Description"]}</span></td>'
541
+ elif pd.notnull(row["Description"]):
542
+ content_html += f'<td class="tooltip" onmouseover="adjustTooltipPosition(event)">{row["Rule Name"]}<span class="tooltiptext">{row["Description"]}</span></td>'
543
+ else:
544
+ content_html += f'<td>{row["Rule Name"]}</td>'
545
+ content_html += f'<td>{row["Object Type"]}</td>'
546
+ content_html += f'<td>{row["Object Name"]}</td>'
547
+ content_html += f'<td style="text-align: center;">{row["Severity"]}</td>'
548
+ # content_html += f'<td>{row["Severity"]}</td>'
549
+ content_html += "</tr>"
550
+ content_html += "</table>"
551
+
552
+ content_html += "</div>"
553
+ tab_html += "</div>"
554
+
555
+ # Display the tabs, tab contents, and run the script
556
+ if not export:
557
+ return display(HTML(styles + tab_html + content_html + script))