semantic-link-labs 0.12.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (243) hide show
  1. semantic_link_labs-0.12.8.dist-info/METADATA +354 -0
  2. semantic_link_labs-0.12.8.dist-info/RECORD +243 -0
  3. semantic_link_labs-0.12.8.dist-info/WHEEL +5 -0
  4. semantic_link_labs-0.12.8.dist-info/licenses/LICENSE +21 -0
  5. semantic_link_labs-0.12.8.dist-info/top_level.txt +1 -0
  6. sempy_labs/__init__.py +606 -0
  7. sempy_labs/_a_lib_info.py +2 -0
  8. sempy_labs/_ai.py +437 -0
  9. sempy_labs/_authentication.py +264 -0
  10. sempy_labs/_bpa_translation/_model/_translations_am-ET.po +869 -0
  11. sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +908 -0
  12. sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +968 -0
  13. sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +963 -0
  14. sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +943 -0
  15. sempy_labs/_bpa_translation/_model/_translations_da-DK.po +945 -0
  16. sempy_labs/_bpa_translation/_model/_translations_de-DE.po +988 -0
  17. sempy_labs/_bpa_translation/_model/_translations_el-GR.po +993 -0
  18. sempy_labs/_bpa_translation/_model/_translations_es-ES.po +971 -0
  19. sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +933 -0
  20. sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +942 -0
  21. sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +994 -0
  22. sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +967 -0
  23. sempy_labs/_bpa_translation/_model/_translations_he-IL.po +902 -0
  24. sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +944 -0
  25. sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +963 -0
  26. sempy_labs/_bpa_translation/_model/_translations_id-ID.po +946 -0
  27. sempy_labs/_bpa_translation/_model/_translations_is-IS.po +939 -0
  28. sempy_labs/_bpa_translation/_model/_translations_it-IT.po +986 -0
  29. sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +846 -0
  30. sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +839 -0
  31. sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +967 -0
  32. sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +978 -0
  33. sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +962 -0
  34. sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +962 -0
  35. sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +957 -0
  36. sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +968 -0
  37. sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +964 -0
  38. sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +952 -0
  39. sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +950 -0
  40. sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +942 -0
  41. sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +976 -0
  42. sempy_labs/_bpa_translation/_model/_translations_te-IN.po +947 -0
  43. sempy_labs/_bpa_translation/_model/_translations_th-TH.po +924 -0
  44. sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +953 -0
  45. sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +961 -0
  46. sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +804 -0
  47. sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +969 -0
  48. sempy_labs/_capacities.py +1198 -0
  49. sempy_labs/_capacity_migration.py +660 -0
  50. sempy_labs/_clear_cache.py +351 -0
  51. sempy_labs/_connections.py +610 -0
  52. sempy_labs/_dashboards.py +69 -0
  53. sempy_labs/_data_access_security.py +98 -0
  54. sempy_labs/_data_pipelines.py +162 -0
  55. sempy_labs/_dataflows.py +668 -0
  56. sempy_labs/_dax.py +501 -0
  57. sempy_labs/_daxformatter.py +80 -0
  58. sempy_labs/_delta_analyzer.py +467 -0
  59. sempy_labs/_delta_analyzer_history.py +301 -0
  60. sempy_labs/_dictionary_diffs.py +221 -0
  61. sempy_labs/_documentation.py +147 -0
  62. sempy_labs/_domains.py +51 -0
  63. sempy_labs/_eventhouses.py +182 -0
  64. sempy_labs/_external_data_shares.py +230 -0
  65. sempy_labs/_gateways.py +521 -0
  66. sempy_labs/_generate_semantic_model.py +521 -0
  67. sempy_labs/_get_connection_string.py +84 -0
  68. sempy_labs/_git.py +543 -0
  69. sempy_labs/_graphQL.py +90 -0
  70. sempy_labs/_helper_functions.py +2833 -0
  71. sempy_labs/_icons.py +149 -0
  72. sempy_labs/_job_scheduler.py +609 -0
  73. sempy_labs/_kql_databases.py +149 -0
  74. sempy_labs/_kql_querysets.py +124 -0
  75. sempy_labs/_kusto.py +137 -0
  76. sempy_labs/_labels.py +124 -0
  77. sempy_labs/_list_functions.py +1720 -0
  78. sempy_labs/_managed_private_endpoints.py +253 -0
  79. sempy_labs/_mirrored_databases.py +416 -0
  80. sempy_labs/_mirrored_warehouses.py +60 -0
  81. sempy_labs/_ml_experiments.py +113 -0
  82. sempy_labs/_model_auto_build.py +140 -0
  83. sempy_labs/_model_bpa.py +557 -0
  84. sempy_labs/_model_bpa_bulk.py +378 -0
  85. sempy_labs/_model_bpa_rules.py +859 -0
  86. sempy_labs/_model_dependencies.py +343 -0
  87. sempy_labs/_mounted_data_factories.py +123 -0
  88. sempy_labs/_notebooks.py +441 -0
  89. sempy_labs/_one_lake_integration.py +151 -0
  90. sempy_labs/_onelake.py +131 -0
  91. sempy_labs/_query_scale_out.py +433 -0
  92. sempy_labs/_refresh_semantic_model.py +435 -0
  93. sempy_labs/_semantic_models.py +468 -0
  94. sempy_labs/_spark.py +455 -0
  95. sempy_labs/_sql.py +241 -0
  96. sempy_labs/_sql_audit_settings.py +207 -0
  97. sempy_labs/_sql_endpoints.py +214 -0
  98. sempy_labs/_tags.py +201 -0
  99. sempy_labs/_translations.py +43 -0
  100. sempy_labs/_user_delegation_key.py +44 -0
  101. sempy_labs/_utils.py +79 -0
  102. sempy_labs/_vertipaq.py +1021 -0
  103. sempy_labs/_vpax.py +388 -0
  104. sempy_labs/_warehouses.py +234 -0
  105. sempy_labs/_workloads.py +140 -0
  106. sempy_labs/_workspace_identity.py +72 -0
  107. sempy_labs/_workspaces.py +595 -0
  108. sempy_labs/admin/__init__.py +170 -0
  109. sempy_labs/admin/_activities.py +167 -0
  110. sempy_labs/admin/_apps.py +145 -0
  111. sempy_labs/admin/_artifacts.py +65 -0
  112. sempy_labs/admin/_basic_functions.py +463 -0
  113. sempy_labs/admin/_capacities.py +508 -0
  114. sempy_labs/admin/_dataflows.py +45 -0
  115. sempy_labs/admin/_datasets.py +186 -0
  116. sempy_labs/admin/_domains.py +522 -0
  117. sempy_labs/admin/_external_data_share.py +100 -0
  118. sempy_labs/admin/_git.py +72 -0
  119. sempy_labs/admin/_items.py +265 -0
  120. sempy_labs/admin/_labels.py +211 -0
  121. sempy_labs/admin/_reports.py +241 -0
  122. sempy_labs/admin/_scanner.py +118 -0
  123. sempy_labs/admin/_shared.py +82 -0
  124. sempy_labs/admin/_sharing_links.py +110 -0
  125. sempy_labs/admin/_tags.py +131 -0
  126. sempy_labs/admin/_tenant.py +503 -0
  127. sempy_labs/admin/_tenant_keys.py +89 -0
  128. sempy_labs/admin/_users.py +140 -0
  129. sempy_labs/admin/_workspaces.py +236 -0
  130. sempy_labs/deployment_pipeline/__init__.py +23 -0
  131. sempy_labs/deployment_pipeline/_items.py +580 -0
  132. sempy_labs/directlake/__init__.py +57 -0
  133. sempy_labs/directlake/_autosync.py +58 -0
  134. sempy_labs/directlake/_directlake_schema_compare.py +120 -0
  135. sempy_labs/directlake/_directlake_schema_sync.py +161 -0
  136. sempy_labs/directlake/_dl_helper.py +274 -0
  137. sempy_labs/directlake/_generate_shared_expression.py +94 -0
  138. sempy_labs/directlake/_get_directlake_lakehouse.py +62 -0
  139. sempy_labs/directlake/_get_shared_expression.py +34 -0
  140. sempy_labs/directlake/_guardrails.py +96 -0
  141. sempy_labs/directlake/_list_directlake_model_calc_tables.py +70 -0
  142. sempy_labs/directlake/_show_unsupported_directlake_objects.py +90 -0
  143. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +239 -0
  144. sempy_labs/directlake/_update_directlake_partition_entity.py +259 -0
  145. sempy_labs/directlake/_warm_cache.py +236 -0
  146. sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
  147. sempy_labs/environment/__init__.py +23 -0
  148. sempy_labs/environment/_items.py +212 -0
  149. sempy_labs/environment/_pubstage.py +223 -0
  150. sempy_labs/eventstream/__init__.py +37 -0
  151. sempy_labs/eventstream/_items.py +263 -0
  152. sempy_labs/eventstream/_topology.py +652 -0
  153. sempy_labs/graph/__init__.py +59 -0
  154. sempy_labs/graph/_groups.py +651 -0
  155. sempy_labs/graph/_sensitivity_labels.py +120 -0
  156. sempy_labs/graph/_teams.py +125 -0
  157. sempy_labs/graph/_user_licenses.py +96 -0
  158. sempy_labs/graph/_users.py +516 -0
  159. sempy_labs/graph_model/__init__.py +15 -0
  160. sempy_labs/graph_model/_background_jobs.py +63 -0
  161. sempy_labs/graph_model/_items.py +149 -0
  162. sempy_labs/lakehouse/__init__.py +67 -0
  163. sempy_labs/lakehouse/_blobs.py +247 -0
  164. sempy_labs/lakehouse/_get_lakehouse_columns.py +102 -0
  165. sempy_labs/lakehouse/_get_lakehouse_tables.py +274 -0
  166. sempy_labs/lakehouse/_helper.py +250 -0
  167. sempy_labs/lakehouse/_lakehouse.py +351 -0
  168. sempy_labs/lakehouse/_livy_sessions.py +143 -0
  169. sempy_labs/lakehouse/_materialized_lake_views.py +157 -0
  170. sempy_labs/lakehouse/_partitioning.py +165 -0
  171. sempy_labs/lakehouse/_schemas.py +217 -0
  172. sempy_labs/lakehouse/_shortcuts.py +440 -0
  173. sempy_labs/migration/__init__.py +35 -0
  174. sempy_labs/migration/_create_pqt_file.py +238 -0
  175. sempy_labs/migration/_direct_lake_to_import.py +105 -0
  176. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +398 -0
  177. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +148 -0
  178. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +533 -0
  179. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +172 -0
  180. sempy_labs/migration/_migration_validation.py +71 -0
  181. sempy_labs/migration/_refresh_calc_tables.py +131 -0
  182. sempy_labs/mirrored_azure_databricks_catalog/__init__.py +15 -0
  183. sempy_labs/mirrored_azure_databricks_catalog/_discover.py +213 -0
  184. sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +45 -0
  185. sempy_labs/ml_model/__init__.py +23 -0
  186. sempy_labs/ml_model/_functions.py +427 -0
  187. sempy_labs/report/_BPAReportTemplate.json +232 -0
  188. sempy_labs/report/__init__.py +55 -0
  189. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
  190. sempy_labs/report/_bpareporttemplate/.platform +11 -0
  191. sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
  192. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
  193. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
  194. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
  195. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
  196. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
  197. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
  198. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
  199. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
  200. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
  201. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
  202. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
  203. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
  204. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
  205. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
  206. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
  207. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
  208. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
  209. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
  210. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
  211. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
  212. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
  213. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
  214. sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
  215. sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
  216. sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
  217. sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
  218. sempy_labs/report/_download_report.py +76 -0
  219. sempy_labs/report/_export_report.py +257 -0
  220. sempy_labs/report/_generate_report.py +427 -0
  221. sempy_labs/report/_paginated.py +76 -0
  222. sempy_labs/report/_report_bpa.py +354 -0
  223. sempy_labs/report/_report_bpa_rules.py +115 -0
  224. sempy_labs/report/_report_functions.py +581 -0
  225. sempy_labs/report/_report_helper.py +227 -0
  226. sempy_labs/report/_report_list_functions.py +110 -0
  227. sempy_labs/report/_report_rebind.py +149 -0
  228. sempy_labs/report/_reportwrapper.py +3100 -0
  229. sempy_labs/report/_save_report.py +147 -0
  230. sempy_labs/snowflake_database/__init__.py +10 -0
  231. sempy_labs/snowflake_database/_items.py +105 -0
  232. sempy_labs/sql_database/__init__.py +21 -0
  233. sempy_labs/sql_database/_items.py +201 -0
  234. sempy_labs/sql_database/_mirroring.py +79 -0
  235. sempy_labs/theme/__init__.py +12 -0
  236. sempy_labs/theme/_org_themes.py +129 -0
  237. sempy_labs/tom/__init__.py +3 -0
  238. sempy_labs/tom/_model.py +5977 -0
  239. sempy_labs/variable_library/__init__.py +19 -0
  240. sempy_labs/variable_library/_functions.py +403 -0
  241. sempy_labs/warehouse/__init__.py +28 -0
  242. sempy_labs/warehouse/_items.py +234 -0
  243. sempy_labs/warehouse/_restore_points.py +309 -0
@@ -0,0 +1,427 @@
1
+ import sempy.fabric as fabric
2
+ import pandas as pd
3
+ import json
4
+ import os
5
+ from typing import Optional
6
+ from sempy_labs._helper_functions import (
7
+ resolve_workspace_name_and_id,
8
+ _conv_b64,
9
+ resolve_dataset_name_and_id,
10
+ resolve_item_name_and_id,
11
+ _update_dataframe_datatypes,
12
+ _base_api,
13
+ resolve_item_id,
14
+ _get_item_definition,
15
+ )
16
+ import sempy_labs._icons as icons
17
+ from sempy._utils._log import log
18
+ from uuid import UUID
19
+
20
+
21
+ @log
22
+ def create_report_from_reportjson(
23
+ report: str,
24
+ dataset: str | UUID,
25
+ report_json: dict,
26
+ theme_json: Optional[dict] = None,
27
+ workspace: Optional[str | UUID] = None,
28
+ ):
29
+ """
30
+ Creates a report based on a report.json file (and an optional themes.json file).
31
+
32
+ This is a wrapper function for the following API: `Items - Create Report <https://learn.microsoft.com/rest/api/fabric/report/items/create-report>`_.
33
+
34
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
35
+
36
+ Parameters
37
+ ----------
38
+ report : str
39
+ Name of the report.
40
+ dataset : str | uuid.UUID
41
+ Name or ID of the semantic model to connect to the report.
42
+ report_json : dict
43
+ The report.json file to be used to create the report.
44
+ theme_json : dict, default=None
45
+ The theme.json file to be used for the theme of the report.
46
+ workspace : str | uuid.UUID, default=None
47
+ The Fabric workspace name or ID.
48
+ Defaults to None which resolves to the workspace of the attached lakehouse
49
+ or if no lakehouse attached, resolves to the workspace of the notebook.
50
+ """
51
+
52
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
53
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
54
+
55
+ dfI = fabric.list_items(workspace=workspace, type="Report")
56
+ dfI_rpt = dfI[dfI["Display Name"] == report]
57
+
58
+ if not dfI_rpt.empty:
59
+ print(
60
+ f"{icons.yellow_dot} '{report}' report already exists in the '{workspace_name}' workspace."
61
+ )
62
+ return
63
+
64
+ defPBIR = {
65
+ "version": "1.0",
66
+ "datasetReference": {
67
+ "byPath": None,
68
+ "byConnection": {
69
+ "connectionString": None,
70
+ "pbiServiceModelId": None,
71
+ "pbiModelVirtualServerName": "sobe_wowvirtualserver",
72
+ "pbiModelDatabaseName": dataset_id,
73
+ "name": "EntityDataSource",
74
+ "connectionType": "pbiServiceXmlaStyleLive",
75
+ },
76
+ },
77
+ }
78
+
79
+ definitionPBIR = _conv_b64(defPBIR)
80
+ payloadReportJson = _conv_b64(report_json)
81
+
82
+ request_body = {
83
+ "displayName": report,
84
+ "definition": {
85
+ "parts": [
86
+ {
87
+ "path": "report.json",
88
+ "payload": payloadReportJson,
89
+ "payloadType": "InlineBase64",
90
+ },
91
+ {
92
+ "path": "definition.pbir",
93
+ "payload": definitionPBIR,
94
+ "payloadType": "InlineBase64",
95
+ },
96
+ ]
97
+ },
98
+ }
99
+
100
+ if theme_json is not None:
101
+ theme_payload = _conv_b64(theme_json)
102
+ theme_id = theme_json["payload"]["blob"]["displayName"]
103
+ theme_path = f"StaticResources/SharedResources/BaseThemes/{theme_id}.json"
104
+
105
+ part = {
106
+ "path": theme_path,
107
+ "payload": theme_payload,
108
+ "payloadType": "InlineBase64",
109
+ }
110
+ request_body["definition"]["parts"].append(part)
111
+
112
+ _base_api(
113
+ request=f"/v1/workspaces/{workspace_id}/reports",
114
+ method="post",
115
+ payload=request_body,
116
+ lro_return_status_code=True,
117
+ status_codes=[201, 202],
118
+ client="fabric_sp",
119
+ )
120
+
121
+ print(
122
+ f"{icons.green_dot} Succesfully created the '{report}' report within the '{workspace_name}' workspace."
123
+ )
124
+
125
+
126
+ @log
127
+ def update_report_from_reportjson(
128
+ report: str | UUID, report_json: dict, workspace: Optional[str | UUID] = None
129
+ ):
130
+ """
131
+ Updates a report based on a report.json file.
132
+
133
+ This is a wrapper function for the following API: `Items - Update Report Definition <https://learn.microsoft.com/rest/api/fabric/report/items/update-report-definition>`_.
134
+
135
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
136
+
137
+ Parameters
138
+ ----------
139
+ report : str | uuid.UUID
140
+ Name or ID of the report.
141
+ report_json : dict
142
+ The report.json file to be used to update the report.
143
+ workspace : str | uuid.UUID, default=None
144
+ The Fabric workspace name or ID in which the report resides.
145
+ Defaults to None which resolves to the workspace of the attached lakehouse
146
+ or if no lakehouse attached, resolves to the workspace of the notebook.
147
+ """
148
+
149
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
150
+ report_id = resolve_item_id(item=report, type="Report", workspace=workspace)
151
+
152
+ # Get the existing PBIR file
153
+ df_items = get_report_definition(report=report, workspace=workspace_id)
154
+ df_items_filt = df_items[df_items["path"] == "definition.pbir"]
155
+ rptDefFile = df_items_filt["payload"].iloc[0]
156
+ payloadReportJson = _conv_b64(report_json)
157
+
158
+ payload = {
159
+ "definition": {
160
+ "parts": [
161
+ {
162
+ "path": "report.json",
163
+ "payload": payloadReportJson,
164
+ "payloadType": "InlineBase64",
165
+ },
166
+ {
167
+ "path": "definition.pbir",
168
+ "payload": rptDefFile,
169
+ "payloadType": "InlineBase64",
170
+ },
171
+ ]
172
+ }
173
+ }
174
+
175
+ _base_api(
176
+ request=f"/v1/workspaces/{workspace_id}/reports/{report_id}/updateDefinition",
177
+ method="post",
178
+ payload=payload,
179
+ lro_return_status_code=True,
180
+ status_codes=None,
181
+ client="fabric_sp",
182
+ )
183
+
184
+ print(
185
+ f"{icons.green_dot} The '{report}' report within the '{workspace_name}' workspace has been successfully updated."
186
+ )
187
+
188
+
189
+ @log
190
+ def get_report_definition(
191
+ report: str | UUID,
192
+ workspace: Optional[str | UUID] = None,
193
+ return_dataframe: bool = True,
194
+ ) -> pd.DataFrame | dict:
195
+ """
196
+ Gets the collection of definition files of a report.
197
+
198
+ This is a wrapper function for the following API: `Items - Get Report Definition <https://learn.microsoft.com/rest/api/fabric/report/items/get-report-definition>`_.
199
+
200
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
201
+
202
+ Parameters
203
+ ----------
204
+ report : str | uuid.UUID
205
+ Name or ID of the report.
206
+ workspace : str | uuid.UUID, default=None
207
+ The Fabric workspace name or ID in which the report resides.
208
+ Defaults to None which resolves to the workspace of the attached lakehouse
209
+ or if no lakehouse attached, resolves to the workspace of the notebook.
210
+ return_dataframe : bool, default=True
211
+ If True, returns a dataframe. If False, returns a json dictionary.
212
+
213
+ Returns
214
+ -------
215
+ pandas.DataFrame
216
+ The collection of report definition files within a pandas dataframe.
217
+ """
218
+
219
+ return _get_item_definition(
220
+ item=report,
221
+ type="Report",
222
+ workspace=workspace,
223
+ return_dataframe=return_dataframe,
224
+ )
225
+
226
+
227
+ @log
228
+ def create_model_bpa_report(
229
+ report: Optional[str] = icons.model_bpa_name,
230
+ dataset: Optional[str] = icons.model_bpa_name,
231
+ dataset_workspace: Optional[str | UUID] = None,
232
+ ):
233
+ """
234
+ Dynamically generates a Best Practice Analyzer report for analyzing semantic models.
235
+
236
+ Parameters
237
+ ----------
238
+ report : str, default='ModelBPA'
239
+ Name of the report.
240
+ Defaults to 'ModelBPA'.
241
+ dataset : str, default='ModelBPA'
242
+ Name of the semantic model which feeds this report.
243
+ Defaults to 'ModelBPA'
244
+ dataset_workspace : str | uuid.UUID, default=None
245
+ The Fabric workspace name or ID in which the semantic model resides.
246
+ Defaults to None which resolves to the workspace of the attached lakehouse
247
+ or if no lakehouse attached, resolves to the workspace of the notebook.
248
+
249
+ """
250
+ (dataset_workspace_name, dataset_workspace_id) = resolve_workspace_name_and_id(
251
+ dataset_workspace
252
+ )
253
+
254
+ dfI = fabric.list_items(workspace=dataset_workspace_id, type="SemanticModel")
255
+ dfI_filt = dfI[dfI["Display Name"] == dataset]
256
+
257
+ if len(dfI_filt) == 0:
258
+ raise ValueError(
259
+ f"The '{dataset}' semantic model does not exist within the '{dataset_workspace_name}' workspace."
260
+ )
261
+
262
+ dfR = fabric.list_reports(workspace=dataset_workspace_id)
263
+ dfR_filt = dfR[dfR["Name"] == report]
264
+ # dataset_id = resolve_dataset_id(dataset=dataset, workspace=dataset_workspace)
265
+
266
+ current_dir = os.path.dirname(__file__)
267
+ # directory_path = os.path.join(current_dir, "_bpareporttemplate")
268
+ # len_dir_path = len(directory_path) + 1
269
+
270
+ # request_body = {"displayName": report, "definition": {"parts": []}}
271
+
272
+ # def get_all_file_paths(directory):
273
+ # file_paths = []
274
+
275
+ # for root, directories, files in os.walk(directory):
276
+ # for filename in files:
277
+ # full_path = os.path.join(root, filename)
278
+ # file_paths.append(full_path)
279
+
280
+ # return file_paths
281
+
282
+ # all_files = get_all_file_paths(directory_path)
283
+
284
+ # for file_path in all_files:
285
+ # fp = file_path[len_dir_path:]
286
+ # with open(file_path, "r") as file:
287
+ # json_file = json.load(file)
288
+ # if fp == 'definition.pbir':
289
+ # conn_string = f"Data Source=powerbi://api.powerbi.com/v1.0/myorg/{dataset_workspace};Initial Catalog={dataset};Integrated Security=ClaimsToken"
290
+ # json_file['datasetReference']['byConnection']['connectionString'] = conn_string
291
+ # json_file['datasetReference']['byConnection']['pbiModelDatabaseName'] = dataset_id
292
+ # part = {
293
+ # "path": fp,
294
+ # "payload": _conv_b64(json_file),
295
+ # "payloadType": "InlineBase64",
296
+ # }
297
+
298
+ # request_body["definition"]["parts"].append(part)
299
+
300
+ # _create_report(
301
+ # report=report,
302
+ # request_body=request_body,
303
+ # dataset=dataset,
304
+ # report_workspace=dataset_workspace,
305
+ # dataset_workspace=dataset_workspace,
306
+ # )
307
+
308
+ json_file_path = os.path.join(current_dir, "_BPAReportTemplate.json")
309
+ with open(json_file_path, "r") as file:
310
+ report_json = json.load(file)
311
+
312
+ if len(dfR_filt) > 0:
313
+ update_report_from_reportjson(
314
+ report=report, report_json=report_json, workspace=dataset_workspace_id
315
+ )
316
+ else:
317
+ create_report_from_reportjson(
318
+ report=report,
319
+ dataset=dataset,
320
+ report_json=report_json,
321
+ workspace=dataset_workspace_id,
322
+ )
323
+
324
+
325
+ def _create_report(
326
+ report: str,
327
+ request_body: dict,
328
+ dataset: str,
329
+ dataset_workspace: Optional[str] = None,
330
+ report_workspace: Optional[str] = None,
331
+ overwrite: bool = False,
332
+ ):
333
+
334
+ from sempy_labs.report import report_rebind
335
+
336
+ (report_workspace_name, report_workspace_id) = resolve_workspace_name_and_id(
337
+ workspace=report_workspace
338
+ )
339
+
340
+ dfR = fabric.list_reports(workspace=report_workspace)
341
+ dfR_filt = dfR[dfR["Name"] == report]
342
+
343
+ updated_report = False
344
+ # Create report if it does not exist
345
+ if dfR_filt.empty:
346
+ _base_api(
347
+ request=f"/v1/workspaces/{report_workspace_id}/reports",
348
+ method="post",
349
+ payload=request_body,
350
+ lro_return_status_code=True,
351
+ status_codes=[201, 202],
352
+ )
353
+
354
+ print(
355
+ f"{icons.green_dot} The '{report}' report has been created within the '{report_workspace_name}'"
356
+ )
357
+ updated_report = True
358
+ # Update the report if it exists
359
+ elif not dfR_filt.empty and overwrite:
360
+ report_id = dfR_filt["Id"].iloc[0]
361
+ _base_api(
362
+ request=f"/v1/workspaces/{report_workspace_id}/reports/{report_id}/updateDefinition",
363
+ method="post",
364
+ payload=request_body,
365
+ lro_return_status_code=True,
366
+ status_codes=None,
367
+ )
368
+ print(
369
+ f"{icons.green_dot} The '{report}' report has been updated within the '{report_workspace_name}'"
370
+ )
371
+ updated_report = True
372
+ else:
373
+ raise ValueError(
374
+ f"{icons.red_dot} The '{report}' report within the '{report_workspace_name}' workspace already exists and the 'overwrite' parameter was set to False."
375
+ )
376
+
377
+ # Rebind the report to the semantic model to make sure it is pointed at the correct semantic model
378
+ if updated_report:
379
+ report_rebind(
380
+ report=report,
381
+ dataset=dataset,
382
+ report_workspace=report_workspace,
383
+ dataset_workspace=dataset_workspace,
384
+ )
385
+
386
+
387
+ def _get_report(
388
+ report: str | UUID, workspace: Optional[str | UUID] = None
389
+ ) -> pd.DataFrame:
390
+
391
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
392
+ (report_name, report_id) = resolve_item_name_and_id(
393
+ item=report, type="Report", workspace=workspace
394
+ )
395
+
396
+ response = _base_api(
397
+ request=f"v1.0/myorg/groups/{workspace_id}/reports/{report_id}"
398
+ )
399
+ result = response.json()
400
+
401
+ new_data = {
402
+ "Id": result.get("id"),
403
+ "Report Type": result.get("reportType"),
404
+ "Name": result.get("name"),
405
+ "Web Url": result.get("webUrl"),
406
+ "Embed Url": result.get("embedUrl"),
407
+ "Is From Pbix": result.get("isFromPbix"),
408
+ "Is Owned By Me": result.get("isOwnedByMe"),
409
+ "Dataset Id": result.get("datasetId"),
410
+ "Dataset Workspace Id": result.get("datasetWorkspaceId"),
411
+ "Users": result.get("users") if result.get("users") is not None else [],
412
+ "Subscriptions": (
413
+ result.get("subscriptions")
414
+ if result.get("subscriptions") is not None
415
+ else []
416
+ ),
417
+ }
418
+
419
+ df = pd.DataFrame([new_data])
420
+
421
+ column_map = {
422
+ "Is From Pbix": "bool",
423
+ "Is Owned By Me": "bool",
424
+ }
425
+ _update_dataframe_datatypes(dataframe=df, column_map=column_map)
426
+
427
+ return df
@@ -0,0 +1,76 @@
1
+ from typing import Optional
2
+ import pandas as pd
3
+ from uuid import UUID
4
+ from sempy_labs._helper_functions import (
5
+ resolve_workspace_id,
6
+ _base_api,
7
+ resolve_item_id,
8
+ _create_dataframe,
9
+ )
10
+ from sempy._utils._log import log
11
+
12
+
13
+ @log
14
+ def get_report_datasources(
15
+ report: str | UUID,
16
+ workspace: Optional[str | UUID] = None,
17
+ ) -> pd.DataFrame:
18
+ """
19
+ Returns a list of data sources for the specified paginated report (RDL) from the specified workspace.
20
+
21
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
22
+
23
+ Parameters
24
+ ----------
25
+ report : str | uuid.UUID
26
+ Name or ID of the Power BI report.
27
+ workspace : str | uuid.UUID, default=None
28
+ The name or ID of the Fabric workspace in which the report resides.
29
+ Defaults to None which resolves to the workspace of the attached lakehouse
30
+ or if no lakehouse attached, resolves to the workspace of the notebook.
31
+
32
+ Returns
33
+ -------
34
+ pandas.DataFrame
35
+ A pandas dataframe showing a list of data sources for the specified paginated report (RDL) from the specified workspace.
36
+ """
37
+
38
+ columns = {
39
+ "Report Name": "str",
40
+ "Report Id": "str",
41
+ "Datasource Id": "str",
42
+ "Datasource Type": "str",
43
+ "Gateway Id": "str",
44
+ "Server": "str",
45
+ "Database": "str",
46
+ }
47
+ df = _create_dataframe(columns=columns)
48
+
49
+ workspace_id = resolve_workspace_id(workspace)
50
+ report_id = resolve_item_id(
51
+ item=report, type="PaginatedReport", workspace=workspace
52
+ )
53
+
54
+ response = _base_api(
55
+ request=f"v1.0/myorg/groups/{workspace_id}/reports/{report_id}/datasources",
56
+ client="fabric_sp",
57
+ )
58
+
59
+ rows = []
60
+ for i in response.json().get("value", []):
61
+ conn = i.get("connectionDetails", {})
62
+ rows.append(
63
+ {
64
+ "Report Name": report,
65
+ "Report Id": report_id,
66
+ "Datasource Id": i.get("datasourceId"),
67
+ "Datasource Type": i.get("datasourceType"),
68
+ "Gateway Id": i.get("gatewayId"),
69
+ "Server": conn.get("server") if conn else None,
70
+ "Database": conn.get("database") if conn else None,
71
+ }
72
+ )
73
+ if rows:
74
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
75
+
76
+ return df