semantic-link-labs 0.6.0__py3-none-any.whl → 0.7.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (104) hide show
  1. semantic_link_labs-0.7.1.dist-info/METADATA +148 -0
  2. semantic_link_labs-0.7.1.dist-info/RECORD +111 -0
  3. {semantic_link_labs-0.6.0.dist-info → semantic_link_labs-0.7.1.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +26 -2
  5. sempy_labs/_ai.py +3 -65
  6. sempy_labs/_bpa_translation/_translations_am-ET.po +828 -0
  7. sempy_labs/_bpa_translation/_translations_ar-AE.po +860 -0
  8. sempy_labs/_bpa_translation/_translations_cs-CZ.po +894 -0
  9. sempy_labs/_bpa_translation/_translations_da-DK.po +894 -0
  10. sempy_labs/_bpa_translation/_translations_de-DE.po +933 -0
  11. sempy_labs/_bpa_translation/_translations_el-GR.po +936 -0
  12. sempy_labs/_bpa_translation/_translations_es-ES.po +915 -0
  13. sempy_labs/_bpa_translation/_translations_fa-IR.po +883 -0
  14. sempy_labs/_bpa_translation/_translations_fr-FR.po +938 -0
  15. sempy_labs/_bpa_translation/_translations_ga-IE.po +912 -0
  16. sempy_labs/_bpa_translation/_translations_he-IL.po +855 -0
  17. sempy_labs/_bpa_translation/_translations_hi-IN.po +892 -0
  18. sempy_labs/_bpa_translation/_translations_hu-HU.po +910 -0
  19. sempy_labs/_bpa_translation/_translations_is-IS.po +887 -0
  20. sempy_labs/_bpa_translation/_translations_it-IT.po +931 -0
  21. sempy_labs/_bpa_translation/_translations_ja-JP.po +805 -0
  22. sempy_labs/_bpa_translation/_translations_nl-NL.po +924 -0
  23. sempy_labs/_bpa_translation/_translations_pl-PL.po +913 -0
  24. sempy_labs/_bpa_translation/_translations_pt-BR.po +909 -0
  25. sempy_labs/_bpa_translation/_translations_pt-PT.po +904 -0
  26. sempy_labs/_bpa_translation/_translations_ru-RU.po +909 -0
  27. sempy_labs/_bpa_translation/_translations_ta-IN.po +922 -0
  28. sempy_labs/_bpa_translation/_translations_te-IN.po +896 -0
  29. sempy_labs/_bpa_translation/_translations_th-TH.po +873 -0
  30. sempy_labs/_bpa_translation/_translations_zh-CN.po +767 -0
  31. sempy_labs/_bpa_translation/_translations_zu-ZA.po +916 -0
  32. sempy_labs/_clear_cache.py +9 -4
  33. sempy_labs/_generate_semantic_model.py +30 -56
  34. sempy_labs/_helper_functions.py +361 -14
  35. sempy_labs/_icons.py +10 -1
  36. sempy_labs/_list_functions.py +539 -260
  37. sempy_labs/_model_bpa.py +194 -18
  38. sempy_labs/_model_bpa_bulk.py +367 -0
  39. sempy_labs/_model_bpa_rules.py +19 -8
  40. sempy_labs/_model_dependencies.py +12 -10
  41. sempy_labs/_one_lake_integration.py +7 -7
  42. sempy_labs/_query_scale_out.py +61 -96
  43. sempy_labs/_refresh_semantic_model.py +7 -0
  44. sempy_labs/_translations.py +154 -1
  45. sempy_labs/_vertipaq.py +103 -90
  46. sempy_labs/directlake/__init__.py +5 -1
  47. sempy_labs/directlake/_directlake_schema_compare.py +27 -31
  48. sempy_labs/directlake/_directlake_schema_sync.py +55 -66
  49. sempy_labs/directlake/_dl_helper.py +233 -0
  50. sempy_labs/directlake/_get_directlake_lakehouse.py +6 -7
  51. sempy_labs/directlake/_get_shared_expression.py +1 -1
  52. sempy_labs/directlake/_guardrails.py +17 -13
  53. sempy_labs/directlake/_update_directlake_partition_entity.py +54 -30
  54. sempy_labs/directlake/_warm_cache.py +1 -1
  55. sempy_labs/lakehouse/__init__.py +2 -0
  56. sempy_labs/lakehouse/_get_lakehouse_tables.py +61 -69
  57. sempy_labs/lakehouse/_lakehouse.py +66 -9
  58. sempy_labs/lakehouse/_shortcuts.py +1 -1
  59. sempy_labs/migration/_create_pqt_file.py +174 -182
  60. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +236 -268
  61. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +75 -73
  62. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +442 -426
  63. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +91 -97
  64. sempy_labs/migration/_refresh_calc_tables.py +92 -101
  65. sempy_labs/report/_BPAReportTemplate.json +232 -0
  66. sempy_labs/report/__init__.py +6 -2
  67. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
  68. sempy_labs/report/_bpareporttemplate/.platform +11 -0
  69. sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
  70. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
  71. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
  72. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
  73. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
  74. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
  75. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
  76. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
  77. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
  78. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
  79. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
  80. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
  81. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
  82. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
  83. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
  84. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
  85. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
  86. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
  87. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
  88. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
  89. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
  90. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
  91. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
  92. sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
  93. sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
  94. sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
  95. sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
  96. sempy_labs/report/_generate_report.py +255 -139
  97. sempy_labs/report/_report_functions.py +26 -33
  98. sempy_labs/report/_report_rebind.py +31 -26
  99. sempy_labs/tom/_model.py +75 -58
  100. semantic_link_labs-0.6.0.dist-info/METADATA +0 -22
  101. semantic_link_labs-0.6.0.dist-info/RECORD +0 -54
  102. sempy_labs/directlake/_fallback.py +0 -60
  103. {semantic_link_labs-0.6.0.dist-info → semantic_link_labs-0.7.1.dist-info}/LICENSE +0 -0
  104. {semantic_link_labs-0.6.0.dist-info → semantic_link_labs-0.7.1.dist-info}/top_level.txt +0 -0
@@ -1,11 +1,18 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
3
  import json
4
- import base64
4
+ import os
5
5
  import time
6
6
  from typing import Optional
7
- from sempy_labs._helper_functions import resolve_workspace_name_and_id
7
+ from sempy_labs._helper_functions import (
8
+ resolve_workspace_name_and_id,
9
+ _conv_b64,
10
+ resolve_report_id,
11
+ lro,
12
+ )
8
13
  import sempy_labs._icons as icons
14
+ from sempy._utils._log import log
15
+ from sempy.fabric.exceptions import FabricHTTPException
9
16
 
10
17
 
11
18
  def create_report_from_reportjson(
@@ -36,10 +43,9 @@ def create_report_from_reportjson(
36
43
 
37
44
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
38
45
 
39
- objectType = "Report"
46
+ dfI = fabric.list_items(workspace=workspace)
40
47
 
41
- dfI_m = fabric.list_items(workspace=workspace, type="SemanticModel")
42
- dfI_model = dfI_m[(dfI_m["Display Name"] == dataset)]
48
+ dfI_model = dfI[(dfI["Display Name"] == dataset) & (dfI["Type"] == "SemanticModel")]
43
49
 
44
50
  if len(dfI_model) == 0:
45
51
  raise ValueError(
@@ -48,8 +54,7 @@ def create_report_from_reportjson(
48
54
 
49
55
  datasetId = dfI_model["Id"].iloc[0]
50
56
 
51
- dfI_r = fabric.list_items(workspace=workspace, type="Report")
52
- dfI_rpt = dfI_r[(dfI_r["Display Name"] == report)]
57
+ dfI_rpt = dfI[(dfI["Display Name"] == report) & (dfI["Type"] == "Report")]
53
58
 
54
59
  if len(dfI_rpt) > 0:
55
60
  print(
@@ -73,83 +78,50 @@ def create_report_from_reportjson(
73
78
  },
74
79
  }
75
80
 
76
- def conv_b64(file):
77
-
78
- loadJson = json.dumps(file)
79
- f = base64.b64encode(loadJson.encode("utf-8")).decode("utf-8")
80
-
81
- return f
82
-
83
- definitionPBIR = conv_b64(defPBIR)
84
- payloadReportJson = conv_b64(report_json)
85
-
86
- if theme_json is None:
87
- request_body = {
88
- "displayName": report,
89
- "type": objectType,
90
- "definition": {
91
- "parts": [
92
- {
93
- "path": "report.json",
94
- "payload": payloadReportJson,
95
- "payloadType": "InlineBase64",
96
- },
97
- {
98
- "path": "definition.pbir",
99
- "payload": definitionPBIR,
100
- "payloadType": "InlineBase64",
101
- },
102
- ]
103
- },
104
- }
105
- else:
106
- payloadThemeJson = conv_b64(theme_json)
107
- themeID = theme_json["payload"]["blob"]["displayName"]
108
- themePath = "StaticResources/SharedResources/BaseThemes/" + themeID + ".json"
109
- request_body = {
110
- "displayName": report,
111
- "type": objectType,
112
- "definition": {
113
- "parts": [
114
- {
115
- "path": "report.json",
116
- "payload": payloadReportJson,
117
- "payloadType": "InlineBase64",
118
- },
119
- {
120
- "path": themePath,
121
- "payload": payloadThemeJson,
122
- "payloadType": "InlineBase64",
123
- },
124
- {
125
- "path": "definition.pbir",
126
- "payload": definitionPBIR,
127
- "payloadType": "InlineBase64",
128
- },
129
- ]
130
- },
81
+ definitionPBIR = _conv_b64(defPBIR)
82
+ payloadReportJson = _conv_b64(report_json)
83
+
84
+ request_body = {
85
+ "displayName": report,
86
+ "definition": {
87
+ "parts": [
88
+ {
89
+ "path": "report.json",
90
+ "payload": payloadReportJson,
91
+ "payloadType": "InlineBase64",
92
+ },
93
+ {
94
+ "path": "definition.pbir",
95
+ "payload": definitionPBIR,
96
+ "payloadType": "InlineBase64",
97
+ },
98
+ ]
99
+ },
100
+ }
101
+
102
+ if theme_json is not None:
103
+ theme_payload = _conv_b64(theme_json)
104
+ theme_id = theme_json["payload"]["blob"]["displayName"]
105
+ theme_path = f"StaticResources/SharedResources/BaseThemes/{theme_id}.json"
106
+
107
+ part = {
108
+ "path": theme_path,
109
+ "payload": theme_payload,
110
+ "payloadType": "InlineBase64",
131
111
  }
112
+ request_body["definition"]["parts"].append(part)
113
+
114
+ response = client.post(f"/v1/workspaces/{workspace_id}/reports", json=request_body)
132
115
 
133
- response = client.post(f"/v1/workspaces/{workspace_id}/items", json=request_body)
116
+ lro(client, response, status_codes=[201, 202])
134
117
 
135
- if response.status_code == 201:
136
- print(f"{icons.green_dot} Report creation succeeded")
137
- print(response.json())
138
- elif response.status_code == 202:
139
- operationId = response.headers["x-ms-operation-id"]
140
- response = client.get(f"/v1/operations/{operationId}")
141
- response_body = json.loads(response.content)
142
- while response_body["status"] != "Succeeded":
143
- time.sleep(3)
144
- response = client.get(f"/v1/operations/{operationId}")
145
- response_body = json.loads(response.content)
146
- response = client.get(f"/v1/operations/{operationId}/result")
147
- print(f"{icons.green_dot} Report creation succeeded")
148
- print(response.json())
118
+ print(
119
+ f"{icons.green_dot} Succesfully created the '{report}' report within the '{workspace}' workspace."
120
+ )
149
121
 
150
122
 
151
123
  def update_report_from_reportjson(
152
- report: str, report_json: str, workspace: Optional[str] = None
124
+ report: str, report_json: dict, workspace: Optional[str] = None
153
125
  ):
154
126
  """
155
127
  Updates a report based on a report.json file.
@@ -158,7 +130,7 @@ def update_report_from_reportjson(
158
130
  ----------
159
131
  report : str
160
132
  Name of the report.
161
- report_json : str
133
+ report_json : dict
162
134
  The report.json file to be used to update the report.
163
135
  workspace : str, default=None
164
136
  The Fabric workspace name in which the report resides.
@@ -167,55 +139,15 @@ def update_report_from_reportjson(
167
139
  """
168
140
 
169
141
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
142
+ report_id = resolve_report_id(report=report, workspace=workspace)
170
143
 
171
- dfR = fabric.list_reports(workspace=workspace)
172
- dfR_filt = dfR[(dfR["Name"] == report) & (dfR["Report Type"] == "PowerBIReport")]
173
-
174
- if len(dfR_filt) == 0:
175
- raise ValueError(
176
- f"{icons.red_dot} The '{report}' report does not exist in the '{workspace}' workspace."
177
- )
178
-
179
- reportId = dfR_filt["Id"].iloc[0]
180
- client = fabric.FabricRestClient()
181
-
182
- response = client.post(
183
- f"/v1/workspaces/{workspace_id}/items/{reportId}/getDefinition"
184
- )
185
- df_items = pd.json_normalize(response.json()["definition"]["parts"])
144
+ # Get the existing PBIR file
145
+ df_items = get_report_definition(report=report, workspace=workspace)
186
146
  df_items_filt = df_items[df_items["path"] == "definition.pbir"]
187
147
  rptDefFile = df_items_filt["payload"].iloc[0]
188
- # datasetId = dfR_filt['Dataset Id'].iloc[0]
189
- # datasetWorkspaceId = dfR_filt['Dataset Workspace Id'].iloc[0]
190
-
191
- # defPBIR = {
192
- # "version": "1.0",
193
- # "datasetReference": {
194
- # "byPath": None,
195
- # "byConnection": {
196
- # "connectionString": None,
197
- # "pbiServiceModelId": None,
198
- # "pbiModelVirtualServerName": "sobe_wowvirtualserver",
199
- # "pbiModelDatabaseName": datasetId,
200
- # "name": "EntityDataSource",
201
- # "connectionType": "pbiServiceXmlaStyleLive"
202
- # }
203
- # }
204
- # }
205
-
206
- def conv_b64(file):
207
-
208
- loadJson = json.dumps(file)
209
- f = base64.b64encode(loadJson.encode("utf-8")).decode("utf-8")
210
-
211
- return f
212
-
213
- # definitionPBIR = conv_b64(defPBIR)
214
- payloadReportJson = conv_b64(report_json)
148
+ payloadReportJson = _conv_b64(report_json)
215
149
 
216
150
  request_body = {
217
- "displayName": report,
218
- "type": "Report",
219
151
  "definition": {
220
152
  "parts": [
221
153
  {
@@ -229,25 +161,209 @@ def update_report_from_reportjson(
229
161
  "payloadType": "InlineBase64",
230
162
  },
231
163
  ]
232
- },
164
+ }
233
165
  }
234
166
 
167
+ client = fabric.FabricRestClient()
235
168
  response = client.post(
236
- f"/v1/workspaces/{workspace_id}/reports/{reportId}/updateDefinition",
169
+ f"/v1/workspaces/{workspace_id}/reports/{report_id}/updateDefinition",
237
170
  json=request_body,
238
171
  )
239
172
 
240
- if response.status_code == 201:
241
- print(f"{icons.green_dot} The '{report}' report has been successfully updated.")
242
- # print(response.json())
243
- elif response.status_code == 202:
244
- operationId = response.headers["x-ms-operation-id"]
245
- response = client.get(f"/v1/operations/{operationId}")
246
- response_body = json.loads(response.content)
247
- while response_body["status"] != "Succeeded":
248
- time.sleep(3)
249
- response = client.get(f"/v1/operations/{operationId}")
250
- response_body = json.loads(response.content)
251
- response = client.get(f"/v1/operations/{operationId}/result")
252
- print(f"{icons.green_dot} The '{report}' report has been successfully updated.")
253
- # print(response.json())
173
+ lro(client, response, return_status_code=True)
174
+
175
+ print(
176
+ f"{icons.green_dot} The '{report}' report within the '{workspace}' workspace has been successfully updated."
177
+ )
178
+
179
+
180
+ def get_report_definition(report: str, workspace: Optional[str] = None) -> pd.DataFrame:
181
+ """
182
+ Gets the collection of definition files of a report.
183
+
184
+ Parameters
185
+ ----------
186
+ report : str
187
+ Name of the report.
188
+ workspace : str, default=None
189
+ The Fabric workspace name in which the report resides.
190
+ Defaults to None which resolves to the workspace of the attached lakehouse
191
+ or if no lakehouse attached, resolves to the workspace of the notebook.
192
+
193
+ Returns
194
+ -------
195
+ pandas.DataFrame
196
+ The collection of report definition files within a pandas dataframe.
197
+ """
198
+
199
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
200
+
201
+ report_id = resolve_report_id(report=report, workspace=workspace)
202
+ client = fabric.FabricRestClient()
203
+ response = client.post(
204
+ f"/v1/workspaces/{workspace_id}/reports/{report_id}/getDefinition",
205
+ )
206
+
207
+ result = lro(client, response).json()
208
+ rdef = pd.json_normalize(result["definition"]["parts"])
209
+
210
+ return rdef
211
+
212
+
213
+ @log
214
+ def create_model_bpa_report(
215
+ report: Optional[str] = icons.model_bpa_name,
216
+ dataset: Optional[str] = icons.model_bpa_name,
217
+ dataset_workspace: Optional[str] = None,
218
+ ):
219
+ """
220
+ Dynamically generates a Best Practice Analyzer report for analyzing semantic models.
221
+
222
+ Parameters
223
+ ----------
224
+ report : str, default='ModelBPA'
225
+ Name of the report.
226
+ Defaults to 'ModelBPA'.
227
+ dataset : str, default='ModelBPA'
228
+ Name of the semantic model which feeds this report.
229
+ Defaults to 'ModelBPA'
230
+ dataset_workspace : str, default=None
231
+ The Fabric workspace name in which the semantic model resides.
232
+ Defaults to None which resolves to the workspace of the attached lakehouse
233
+ or if no lakehouse attached, resolves to the workspace of the notebook.
234
+
235
+ """
236
+
237
+ # from sempy_labs._helper_functions import resolve_dataset_id
238
+
239
+ dfI = fabric.list_items(workspace=dataset_workspace, type="SemanticModel")
240
+ dfI_filt = dfI[dfI["Display Name"] == dataset]
241
+
242
+ if len(dfI_filt) == 0:
243
+ raise ValueError(
244
+ f"The '{dataset}' semantic model does not exist within the '{dataset_workspace}' workspace."
245
+ )
246
+
247
+ dfR = fabric.list_reports(workspace=dataset_workspace)
248
+ dfR_filt = dfR[dfR["Name"] == report]
249
+ # dataset_id = resolve_dataset_id(dataset=dataset, workspace=dataset_workspace)
250
+
251
+ current_dir = os.path.dirname(__file__)
252
+ # directory_path = os.path.join(current_dir, "_bpareporttemplate")
253
+ # len_dir_path = len(directory_path) + 1
254
+
255
+ # request_body = {"displayName": report, "definition": {"parts": []}}
256
+
257
+ # def get_all_file_paths(directory):
258
+ # file_paths = []
259
+
260
+ # for root, directories, files in os.walk(directory):
261
+ # for filename in files:
262
+ # full_path = os.path.join(root, filename)
263
+ # file_paths.append(full_path)
264
+
265
+ # return file_paths
266
+
267
+ # all_files = get_all_file_paths(directory_path)
268
+
269
+ # for file_path in all_files:
270
+ # fp = file_path[len_dir_path:]
271
+ # with open(file_path, "r") as file:
272
+ # json_file = json.load(file)
273
+ # if fp == 'definition.pbir':
274
+ # conn_string = f"Data Source=powerbi://api.powerbi.com/v1.0/myorg/{dataset_workspace};Initial Catalog={dataset};Integrated Security=ClaimsToken"
275
+ # json_file['datasetReference']['byConnection']['connectionString'] = conn_string
276
+ # json_file['datasetReference']['byConnection']['pbiModelDatabaseName'] = dataset_id
277
+ # part = {
278
+ # "path": fp,
279
+ # "payload": _conv_b64(json_file),
280
+ # "payloadType": "InlineBase64",
281
+ # }
282
+
283
+ # request_body["definition"]["parts"].append(part)
284
+
285
+ # _create_report(
286
+ # report=report,
287
+ # request_body=request_body,
288
+ # dataset=dataset,
289
+ # report_workspace=dataset_workspace,
290
+ # dataset_workspace=dataset_workspace,
291
+ # )
292
+
293
+ json_file_path = os.path.join(current_dir, "_BPAReportTemplate.json")
294
+ with open(json_file_path, "r") as file:
295
+ report_json = json.load(file)
296
+
297
+ if len(dfR_filt) > 0:
298
+ update_report_from_reportjson(
299
+ report=report, report_json=report_json, workspace=dataset_workspace
300
+ )
301
+ else:
302
+ create_report_from_reportjson(
303
+ report=report,
304
+ dataset=dataset,
305
+ report_json=report_json,
306
+ workspace=dataset_workspace,
307
+ )
308
+
309
+
310
+ def _create_report(
311
+ report: str,
312
+ request_body: dict,
313
+ dataset: str,
314
+ dataset_workspace: Optional[str] = None,
315
+ report_workspace: Optional[str] = None,
316
+ update_if_exists: Optional[bool] = False,
317
+ ):
318
+
319
+ from sempy_labs.report import report_rebind
320
+
321
+ report_workspace = fabric.resolve_workspace_name(report_workspace)
322
+ report_workspace_id = fabric.resolve_workspace_id(report_workspace)
323
+ client = fabric.FabricRestClient()
324
+
325
+ dfR = fabric.list_reports(workspace=report_workspace)
326
+ dfR_filt = dfR[dfR["Name"] == report]
327
+
328
+ updated_report = False
329
+
330
+ # Create report if it does not exist
331
+ if len(dfR_filt) == 0:
332
+ response = client.post(
333
+ f"/v1/workspaces/{report_workspace_id}/reports",
334
+ json=request_body,
335
+ lro_wait=True,
336
+ )
337
+ if response.status_code not in [200, 201]:
338
+ raise FabricHTTPException(response)
339
+ print(
340
+ f"{icons.green_dot} The '{report}' report has been created within the '{report_workspace}'"
341
+ )
342
+ updated_report = True
343
+ # Update the report if it exists
344
+ elif len(dfR_filt) > 0 and update_if_exists:
345
+ report_id = dfR_filt["Id"].iloc[0]
346
+ response = client.post(
347
+ f"/v1/workspaces/{report_workspace_id}/reports/{report_id}/updateDefinition",
348
+ json=request_body,
349
+ lro_wait=True,
350
+ )
351
+ if response.status_code not in [200, 201]:
352
+ raise FabricHTTPException(response)
353
+ print(
354
+ f"{icons.green_dot} The '{report}' report has been updated within the '{report_workspace}'"
355
+ )
356
+ updated_report = True
357
+ else:
358
+ raise ValueError(
359
+ f"{icons.red_dot} The '{report}' report within the '{report_workspace}' workspace already exists and it was selected not to update it if the report already exists."
360
+ )
361
+
362
+ # Rebind the report to the semantic model to make sure it is pointed at the correct semantic model
363
+ if updated_report:
364
+ report_rebind(
365
+ report=report,
366
+ dataset=dataset,
367
+ report_workspace=report_workspace,
368
+ dataset_workspace=dataset_workspace,
369
+ )
@@ -19,8 +19,10 @@ from sempy_labs._helper_functions import (
19
19
  resolve_lakehouse_name,
20
20
  language_validate,
21
21
  resolve_workspace_name_and_id,
22
+ lro,
23
+ _decode_b64,
22
24
  )
23
- from typing import Any, List, Optional, Union
25
+ from typing import List, Optional, Union
24
26
  from sempy._utils._log import log
25
27
  import sempy_labs._icons as icons
26
28
  from sempy.fabric.exceptions import FabricHTTPException
@@ -30,7 +32,7 @@ def get_report_json(
30
32
  report: str,
31
33
  workspace: Optional[str] = None,
32
34
  save_to_file_name: Optional[str] = None,
33
- ) -> Any:
35
+ ) -> dict:
34
36
  """
35
37
  Gets the report.json file content of a Power BI report.
36
38
 
@@ -39,7 +41,7 @@ def get_report_json(
39
41
  report : str
40
42
  Name of the Power BI report.
41
43
  workspace : str, default=None
42
- The Fabric workspace name.
44
+ The Fabric workspace name in which the report exists.
43
45
  Defaults to None which resolves to the workspace of the attached lakehouse
44
46
  or if no lakehouse attached, resolves to the workspace of the notebook.
45
47
  save_to_file_name : str, default=None
@@ -47,54 +49,47 @@ def get_report_json(
47
49
 
48
50
  Returns
49
51
  -------
50
- Any
52
+ dict
51
53
  The report.json file for a given Power BI report.
52
54
  """
53
55
 
54
56
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
57
+ report_id = resolve_report_id(report=report, workspace=workspace)
58
+ fmt = "PBIR-Legacy"
55
59
 
56
60
  client = fabric.FabricRestClient()
57
-
58
- dfI = fabric.list_items(workspace=workspace, type="Report")
59
- dfI_filt = dfI[(dfI["Display Name"] == report)]
60
-
61
- if len(dfI_filt) == 0:
62
- raise ValueError(
63
- f"{icons.red_dot} The '{report}' report does not exist in the '{workspace}' workspace."
64
- )
65
-
66
- itemId = dfI_filt["Id"].iloc[0]
67
61
  response = client.post(
68
- f"/v1/workspaces/{workspace_id}/items/{itemId}/getDefinition"
62
+ f"/v1/workspaces/{workspace_id}/reports/{report_id}/getDefinition?format={fmt}"
69
63
  )
70
- df_items = pd.json_normalize(response.json()["definition"]["parts"])
64
+
65
+ result = lro(client, response).json()
66
+ df_items = pd.json_normalize(result["definition"]["parts"])
71
67
  df_items_filt = df_items[df_items["path"] == "report.json"]
72
68
  payload = df_items_filt["payload"].iloc[0]
73
-
74
- reportFile = base64.b64decode(payload).decode("utf-8")
75
- reportJson = json.loads(reportFile)
69
+ report_file = _decode_b64(payload)
70
+ report_json = json.loads(report_file)
76
71
 
77
72
  if save_to_file_name is not None:
78
- lakeAttach = lakehouse_attached()
79
- if lakeAttach is False:
73
+ if not lakehouse_attached():
80
74
  raise ValueError(
81
75
  f"{icons.red_dot} In order to save the report.json file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
82
76
  )
83
77
 
84
78
  lakehouse_id = fabric.get_lakehouse_id()
85
- lakehouse = resolve_lakehouse_name(lakehouse_id, workspace)
79
+ lake_workspace = fabric.resolve_workspace_name()
80
+ lakehouse = resolve_lakehouse_name(lakehouse_id, lake_workspace)
86
81
  folderPath = "/lakehouse/default/Files"
87
82
  fileExt = ".json"
88
83
  if not save_to_file_name.endswith(fileExt):
89
- save_to_file_name = save_to_file_name + fileExt
84
+ save_to_file_name = f"{save_to_file_name}{fileExt}"
90
85
  filePath = os.path.join(folderPath, save_to_file_name)
91
86
  with open(filePath, "w") as json_file:
92
- json.dump(reportJson, json_file, indent=4)
87
+ json.dump(report_json, json_file, indent=4)
93
88
  print(
94
89
  f"{icons.green_dot} The report.json file for the '{report}' report has been saved to the '{lakehouse}' in this location: '{filePath}'.\n\n"
95
90
  )
96
91
 
97
- return reportJson
92
+ return report_json
98
93
 
99
94
 
100
95
  def report_dependency_tree(workspace: Optional[str] = None):
@@ -132,7 +127,7 @@ def report_dependency_tree(workspace: Optional[str] = None):
132
127
  node_dict = {}
133
128
  rootNode = Node(workspace)
134
129
  node_dict[workspace] = rootNode
135
- rootNode.custom_property = workspace_icon + " "
130
+ rootNode.custom_property = f"{workspace_icon} "
136
131
 
137
132
  for i, r in dfR.iterrows():
138
133
  datasetName = r["Dataset Name"]
@@ -141,10 +136,10 @@ def report_dependency_tree(workspace: Optional[str] = None):
141
136
  if parentNode is None:
142
137
  parentNode = Node(datasetName, parent=rootNode)
143
138
  node_dict[datasetName] = parentNode
144
- parentNode.custom_property = dataset_icon + " "
139
+ parentNode.custom_property = f"{dataset_icon} "
145
140
 
146
141
  child_node = Node(reportName, parent=parentNode)
147
- child_node.custom_property = report_icon + " "
142
+ child_node.custom_property = f"{report_icon} "
148
143
 
149
144
  # Print the tree structure
150
145
  for pre, _, node in RenderTree(node_dict[workspace]):
@@ -190,9 +185,7 @@ def export_report(
190
185
 
191
186
  # https://learn.microsoft.com/rest/api/power-bi/reports/export-to-file-in-group
192
187
 
193
- lakeAttach = lakehouse_attached()
194
-
195
- if lakeAttach is False:
188
+ if not lakehouse_attached():
196
189
  raise ValueError(
197
190
  f"{icons.red_dot} In order to run the 'export_report' function, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
198
191
  )
@@ -240,9 +233,9 @@ def export_report(
240
233
  )
241
234
 
242
235
  if file_name is None:
243
- file_name = report + fileExt
236
+ file_name = f"{report}{fileExt}"
244
237
  else:
245
- file_name = file_name + fileExt
238
+ file_name = f"{file_name}{fileExt}"
246
239
 
247
240
  folderPath = "/lakehouse/default/Files"
248
241
  filePath = os.path.join(folderPath, file_name)
@@ -77,12 +77,10 @@ def report_rebind_all(
77
77
  report_workspace: Optional[str | List[str]] = None,
78
78
  ):
79
79
  """
80
- Rebinds all reports in a workspace which are bound to a specific semantic model to a new semantic model.
80
+ Rebinds all reports across all workspaces which are bound to a specific semantic model to a new semantic model.
81
81
 
82
82
  Parameters
83
83
  ----------
84
- report : str
85
- Name of the Power BI report.
86
84
  dataset : str
87
85
  Name of the semantic model currently binded to the reports.
88
86
  new_dataset : str
@@ -97,35 +95,42 @@ def report_rebind_all(
97
95
  or if no lakehouse attached, resolves to the workspace of the notebook.
98
96
  report_workspace : str | List[str], default=None
99
97
  The name(s) of the Fabric workspace(s) in which the report(s) reside(s).
100
- Defaults to None which resolves to the workspace of the attached lakehouse
101
- or if no lakehouse attached, resolves to the workspace of the notebook.
102
-
103
- Returns
104
- -------
105
-
98
+ Defaults to None which finds all reports in all workspaces which use the semantic model and rebinds them to
99
+ the new semantic model.
106
100
  """
107
101
 
108
- dataset_workspace = fabric.resolve_workspace_name()
102
+ from sempy_labs._list_functions import list_reports_using_semantic_model
103
+
104
+ dataset_workspace = fabric.resolve_workspace_name(dataset_workspace)
109
105
 
110
106
  if new_dataset_workpace is None:
111
107
  new_dataset_workpace = dataset_workspace
112
108
 
113
- if report_workspace is None:
114
- report_workspace = dataset_workspace
115
-
116
109
  if isinstance(report_workspace, str):
117
110
  report_workspace = [report_workspace]
118
111
 
119
- datasetId = resolve_dataset_id(dataset, dataset_workspace)
120
-
121
- for rw in report_workspace:
122
- dfRep = fabric.list_reports(workspace=rw)
123
- dfRep_filt = dfRep[dfRep["Dataset Id"] == datasetId]
124
- for i, r in dfRep_filt.iterrows():
125
- rptName = r["Name"]
126
- report_rebind(
127
- report=rptName,
128
- dataset=new_dataset,
129
- report_workspace=rw,
130
- dataset_workspace=new_dataset_workpace,
131
- )
112
+ dfR = list_reports_using_semantic_model(
113
+ dataset=dataset, workspace=dataset_workspace
114
+ )
115
+
116
+ if len(dfR) == 0:
117
+ print(
118
+ f"{icons.info} The '{dataset}' semantic model within the '{dataset_workspace}' workspace has no dependent reports."
119
+ )
120
+ return
121
+
122
+ if report_workspace is None:
123
+ dfR_filt = dfR.copy()
124
+ else:
125
+ dfR_filt = dfR[dfR["Report Workspace Name"].isin(report_workspace)]
126
+
127
+ for i, r in dfR_filt.iterrows():
128
+ rpt_name = r["Report Name"]
129
+ rpt_wksp = r["Report Workspace Name"]
130
+
131
+ report_rebind(
132
+ report=rpt_name,
133
+ dataset=new_dataset,
134
+ report_workspace=rpt_wksp,
135
+ dataset_workspace=new_dataset_workpace,
136
+ )