semantic-link-labs 0.4.1__py3-none-any.whl → 0.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (53) hide show
  1. {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.5.0.dist-info}/METADATA +1 -1
  2. semantic_link_labs-0.5.0.dist-info/RECORD +53 -0
  3. {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.5.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +51 -27
  5. sempy_labs/_ai.py +32 -51
  6. sempy_labs/_clear_cache.py +2 -3
  7. sempy_labs/_connections.py +39 -38
  8. sempy_labs/_dax.py +5 -9
  9. sempy_labs/_generate_semantic_model.py +15 -21
  10. sempy_labs/_helper_functions.py +20 -25
  11. sempy_labs/_icons.py +6 -0
  12. sempy_labs/_list_functions.py +1172 -392
  13. sempy_labs/_model_auto_build.py +3 -5
  14. sempy_labs/_model_bpa.py +20 -24
  15. sempy_labs/_model_dependencies.py +7 -14
  16. sempy_labs/_one_lake_integration.py +14 -24
  17. sempy_labs/_query_scale_out.py +13 -31
  18. sempy_labs/_refresh_semantic_model.py +8 -18
  19. sempy_labs/_translations.py +5 -5
  20. sempy_labs/_vertipaq.py +11 -18
  21. sempy_labs/directlake/_directlake_schema_compare.py +11 -15
  22. sempy_labs/directlake/_directlake_schema_sync.py +35 -40
  23. sempy_labs/directlake/_fallback.py +3 -7
  24. sempy_labs/directlake/_get_directlake_lakehouse.py +3 -4
  25. sempy_labs/directlake/_get_shared_expression.py +5 -11
  26. sempy_labs/directlake/_guardrails.py +5 -7
  27. sempy_labs/directlake/_list_directlake_model_calc_tables.py +28 -26
  28. sempy_labs/directlake/_show_unsupported_directlake_objects.py +3 -4
  29. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +11 -16
  30. sempy_labs/directlake/_update_directlake_partition_entity.py +25 -15
  31. sempy_labs/directlake/_warm_cache.py +10 -15
  32. sempy_labs/lakehouse/__init__.py +0 -2
  33. sempy_labs/lakehouse/_get_lakehouse_columns.py +4 -3
  34. sempy_labs/lakehouse/_get_lakehouse_tables.py +12 -11
  35. sempy_labs/lakehouse/_lakehouse.py +6 -7
  36. sempy_labs/lakehouse/_shortcuts.py +10 -111
  37. sempy_labs/migration/__init__.py +4 -2
  38. sempy_labs/migration/_create_pqt_file.py +5 -14
  39. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +7 -7
  40. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +4 -4
  41. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +3 -8
  42. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +6 -6
  43. sempy_labs/migration/_migration_validation.py +5 -164
  44. sempy_labs/migration/_refresh_calc_tables.py +5 -5
  45. sempy_labs/report/__init__.py +2 -2
  46. sempy_labs/report/_generate_report.py +14 -19
  47. sempy_labs/report/_report_functions.py +41 -83
  48. sempy_labs/report/_report_rebind.py +43 -44
  49. sempy_labs/tom/__init__.py +6 -0
  50. sempy_labs/{_tom.py → tom/_model.py} +274 -337
  51. semantic_link_labs-0.4.1.dist-info/RECORD +0 -52
  52. {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.5.0.dist-info}/LICENSE +0 -0
  53. {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.5.0.dist-info}/top_level.txt +0 -0
@@ -1,173 +1,10 @@
1
1
  import sempy
2
2
  import sempy.fabric as fabric
3
3
  import pandas as pd
4
- from sempy_labs._helper_functions import create_relationship_name
5
- from sempy_labs._tom import connect_semantic_model
6
4
  from typing import List, Optional, Union
5
+ from sempy_labs._list_functions import list_semantic_model_objects
7
6
  from sempy._utils._log import log
8
7
 
9
-
10
- def list_semantic_model_objects(dataset: str, workspace: Optional[str] = None):
11
- """
12
- Shows a list of semantic model objects.
13
-
14
- Parameters
15
- ----------
16
- dataset : str
17
- Name of the semantic model.
18
- workspace : str, default=None
19
- The Fabric workspace name.
20
- Defaults to None which resolves to the workspace of the attached lakehouse
21
- or if no lakehouse attached, resolves to the workspace of the notebook.
22
-
23
-
24
- Returns
25
- -------
26
- pandas.DataFrame
27
- A pandas dataframe showing a list of objects in the semantic model
28
- """
29
-
30
- df = pd.DataFrame(columns=["Parent Name", "Object Name", "Object Type"])
31
- with connect_semantic_model(
32
- dataset=dataset, workspace=workspace, readonly=True
33
- ) as tom:
34
- for t in tom.model.Tables:
35
- if t.CalculationGroup is not None:
36
- new_data = {
37
- "Parent Name": t.Parent.Name,
38
- "Object Name": t.Name,
39
- "Object Type": "Calculation Group",
40
- }
41
- df = pd.concat(
42
- [df, pd.DataFrame(new_data, index=[0])], ignore_index=True
43
- )
44
- for ci in t.CalculationGroup.CalculationItems:
45
- new_data = {
46
- "Parent Name": t.Name,
47
- "Object Name": ci.Name,
48
- "Object Type": str(ci.ObjectType),
49
- }
50
- df = pd.concat(
51
- [df, pd.DataFrame(new_data, index=[0])], ignore_index=True
52
- )
53
- elif any(str(p.SourceType) == "Calculated" for p in t.Partitions):
54
- new_data = {
55
- "Parent Name": t.Parent.Name,
56
- "Object Name": t.Name,
57
- "Object Type": "Calculated Table",
58
- }
59
- df = pd.concat(
60
- [df, pd.DataFrame(new_data, index=[0])], ignore_index=True
61
- )
62
- else:
63
- new_data = {
64
- "Parent Name": t.Parent.Name,
65
- "Object Name": t.Name,
66
- "Object Type": str(t.ObjectType),
67
- }
68
- df = pd.concat(
69
- [df, pd.DataFrame(new_data, index=[0])], ignore_index=True
70
- )
71
- for c in t.Columns:
72
- if str(c.Type) != "RowNumber":
73
- if str(c.Type) == "Calculated":
74
- new_data = {
75
- "Parent Name": c.Parent.Name,
76
- "Object Name": c.Name,
77
- "Object Type": "Calculated Column",
78
- }
79
- df = pd.concat(
80
- [df, pd.DataFrame(new_data, index=[0])], ignore_index=True
81
- )
82
- else:
83
- new_data = {
84
- "Parent Name": c.Parent.Name,
85
- "Object Name": c.Name,
86
- "Object Type": str(c.ObjectType),
87
- }
88
- df = pd.concat(
89
- [df, pd.DataFrame(new_data, index=[0])], ignore_index=True
90
- )
91
- for m in t.Measures:
92
- new_data = {
93
- "Parent Name": m.Parent.Name,
94
- "Object Name": m.Name,
95
- "Object Type": str(m.ObjectType),
96
- }
97
- df = pd.concat(
98
- [df, pd.DataFrame(new_data, index=[0])], ignore_index=True
99
- )
100
- for h in t.Hierarchies:
101
- new_data = {
102
- "Parent Name": h.Parent.Name,
103
- "Object Name": h.Name,
104
- "Object Type": str(h.ObjectType),
105
- }
106
- df = pd.concat(
107
- [df, pd.DataFrame(new_data, index=[0])], ignore_index=True
108
- )
109
- for l in h.Levels:
110
- new_data = {
111
- "Parent Name": l.Parent.Name,
112
- "Object Name": l.Name,
113
- "Object Type": str(l.ObjectType),
114
- }
115
- df = pd.concat(
116
- [df, pd.DataFrame(new_data, index=[0])], ignore_index=True
117
- )
118
- for p in t.Partitions:
119
- new_data = {
120
- "Parent Name": p.Parent.Name,
121
- "Object Name": p.Name,
122
- "Object Type": str(p.ObjectType),
123
- }
124
- df = pd.concat(
125
- [df, pd.DataFrame(new_data, index=[0])], ignore_index=True
126
- )
127
- for r in tom.model.Relationships:
128
- rName = create_relationship_name(
129
- r.FromTable.Name, r.FromColumn.Name, r.ToTable.Name, r.ToColumn.Name
130
- )
131
- new_data = {
132
- "Parent Name": r.Parent.Name,
133
- "Object Name": rName,
134
- "Object Type": str(r.ObjectType),
135
- }
136
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
137
- for role in tom.model.Roles:
138
- new_data = {
139
- "Parent Name": role.Parent.Name,
140
- "Object Name": role.Name,
141
- "Object Type": str(role.ObjectType),
142
- }
143
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
144
- for rls in role.TablePermissions:
145
- new_data = {
146
- "Parent Name": role.Name,
147
- "Object Name": rls.Name,
148
- "Object Type": str(rls.ObjectType),
149
- }
150
- df = pd.concat(
151
- [df, pd.DataFrame(new_data, index=[0])], ignore_index=True
152
- )
153
- for tr in tom.model.Cultures:
154
- new_data = {
155
- "Parent Name": tr.Parent.Name,
156
- "Object Name": tr.Name,
157
- "Object Type": str(tr.ObjectType),
158
- }
159
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
160
- for per in tom.model.Perspectives:
161
- new_data = {
162
- "Parent Name": per.Parent.Name,
163
- "Object Name": per.Name,
164
- "Object Type": str(per.ObjectType),
165
- }
166
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
167
-
168
- return df
169
-
170
-
171
8
  @log
172
9
  def migration_validation(
173
10
  dataset: str,
@@ -199,6 +36,10 @@ def migration_validation(
199
36
  A pandas dataframe showing a list of objects and whether they were successfully migrated. Also shows the % of objects which were migrated successfully.
200
37
  """
201
38
 
39
+ workspace = fabric.resolve_workspace_name(workspace)
40
+ if new_dataset_workspace is None:
41
+ new_dataset_workspace = workspace
42
+
202
43
  dfA = list_semantic_model_objects(dataset=dataset, workspace=workspace)
203
44
  dfB = list_semantic_model_objects(
204
45
  dataset=new_dataset, workspace=new_dataset_workspace
@@ -3,7 +3,7 @@ import sempy.fabric as fabric
3
3
  import pandas as pd
4
4
  import re, datetime, time
5
5
  from pyspark.sql import SparkSession
6
- from sempy_labs._tom import connect_semantic_model
6
+ from sempy_labs.tom import connect_semantic_model
7
7
  from typing import List, Optional, Union
8
8
  from sempy._utils._log import log
9
9
  import sempy_labs._icons as icons
@@ -30,6 +30,8 @@ def refresh_calc_tables(dataset: str, workspace: Optional[str] = None):
30
30
  timeout = datetime.timedelta(minutes=1)
31
31
  success = False
32
32
 
33
+ workspace = fabric.resolve_workspace_name(workspace)
34
+
33
35
  while not success:
34
36
  try:
35
37
  with connect_semantic_model(
@@ -118,10 +120,8 @@ def refresh_calc_tables(dataset: str, workspace: Optional[str] = None):
118
120
  print(
119
121
  f"{icons.green_dot} Calculated table '{tName}' has been refreshed as the '{delta_table_name.lower()}' table in the lakehouse."
120
122
  )
121
- except:
122
- print(
123
- f"{icons.red_dot} Failed to create calculated table '{tName}' as a delta table in the lakehouse."
124
- )
123
+ except Exception as e:
124
+ raise ValueError(f"{icons.red_dot} Failed to create calculated table '{tName}' as a delta table in the lakehouse.") from e
125
125
 
126
126
  except Exception as e:
127
127
  if datetime.datetime.now() - start_time > timeout:
@@ -1,6 +1,6 @@
1
1
  from sempy_labs.report._generate_report import (
2
2
  create_report_from_reportjson,
3
- update_report_from_reportjson,
3
+ #update_report_from_reportjson,
4
4
  )
5
5
  from sempy_labs.report._report_functions import (
6
6
  get_report_json,
@@ -20,7 +20,7 @@ from sempy_labs.report._report_rebind import (
20
20
 
21
21
  __all__ = [
22
22
  "create_report_from_reportjson",
23
- "update_report_from_reportjson",
23
+ #"update_report_from_reportjson",
24
24
  "get_report_json",
25
25
  # report_dependency_tree,
26
26
  "export_report",
@@ -4,13 +4,14 @@ import pandas as pd
4
4
  import json, base64, time
5
5
  from typing import Optional
6
6
  from sempy_labs._helper_functions import resolve_workspace_name_and_id
7
+ import sempy_labs._icons as icons
7
8
 
8
9
 
9
10
  def create_report_from_reportjson(
10
11
  report: str,
11
12
  dataset: str,
12
- report_json: str,
13
- theme_json: Optional[str] = None,
13
+ report_json: dict,
14
+ theme_json: Optional[dict] = None,
14
15
  workspace: Optional[str] = None,
15
16
  ):
16
17
  """
@@ -22,9 +23,9 @@ def create_report_from_reportjson(
22
23
  Name of the report.
23
24
  dataset : str
24
25
  Name of the semantic model to connect to the report.
25
- report_json : str
26
+ report_json : dict
26
27
  The report.json file to be used to create the report.
27
- theme_json : str, default=None
28
+ theme_json : dict, default=None
28
29
  The theme.json file to be used for the theme of the report.
29
30
  workspace : str, default=None
30
31
  The Fabric workspace name.
@@ -40,10 +41,7 @@ def create_report_from_reportjson(
40
41
  dfI_model = dfI_m[(dfI_m["Display Name"] == dataset)]
41
42
 
42
43
  if len(dfI_model) == 0:
43
- print(
44
- f"ERROR: The '{dataset}' semantic model does not exist in the '{workspace}' workspace."
45
- )
46
- return
44
+ raise ValueError(f"{icons.red_dot} The '{dataset}' semantic model does not exist in the '{workspace}' workspace.")
47
45
 
48
46
  datasetId = dfI_model["Id"].iloc[0]
49
47
 
@@ -52,7 +50,7 @@ def create_report_from_reportjson(
52
50
 
53
51
  if len(dfI_rpt) > 0:
54
52
  print(
55
- f"WARNING: '{report}' already exists as a report in the '{workspace}' workspace."
53
+ f"{icons.yellow_dot} '{report}' already exists as a report in the '{workspace}' workspace."
56
54
  )
57
55
  return
58
56
 
@@ -82,7 +80,7 @@ def create_report_from_reportjson(
82
80
  definitionPBIR = conv_b64(defPBIR)
83
81
  payloadReportJson = conv_b64(report_json)
84
82
 
85
- if theme_json == None:
83
+ if theme_json is None:
86
84
  request_body = {
87
85
  "displayName": report,
88
86
  "type": objectType,
@@ -132,7 +130,7 @@ def create_report_from_reportjson(
132
130
  response = client.post(f"/v1/workspaces/{workspace_id}/items", json=request_body)
133
131
 
134
132
  if response.status_code == 201:
135
- print("Report creation succeeded")
133
+ print(f"{icons.green_dot} Report creation succeeded")
136
134
  print(response.json())
137
135
  elif response.status_code == 202:
138
136
  operationId = response.headers["x-ms-operation-id"]
@@ -143,7 +141,7 @@ def create_report_from_reportjson(
143
141
  response = client.get(f"/v1/operations/{operationId}")
144
142
  response_body = json.loads(response.content)
145
143
  response = client.get(f"/v1/operations/{operationId}/result")
146
- print("Report creation succeeded")
144
+ print(f"{icons.green_dot} Report creation succeeded")
147
145
  print(response.json())
148
146
 
149
147
 
@@ -167,14 +165,11 @@ def update_report_from_reportjson(
167
165
 
168
166
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
169
167
 
170
- objectType = "Report"
171
-
172
168
  dfR = fabric.list_reports(workspace=workspace)
173
169
  dfR_filt = dfR[(dfR["Name"] == report) & (dfR["Report Type"] == "PowerBIReport")]
174
170
 
175
171
  if len(dfR_filt) == 0:
176
- print(f"The '{report}' report does not exist in the '{workspace}' workspace.")
177
- return
172
+ raise ValueError(f"{icons.red_dot} The '{report}' report does not exist in the '{workspace}' workspace.")
178
173
 
179
174
  reportId = dfR_filt["Id"].iloc[0]
180
175
  client = fabric.FabricRestClient()
@@ -215,7 +210,7 @@ def update_report_from_reportjson(
215
210
 
216
211
  request_body = {
217
212
  "displayName": report,
218
- "type": objectType,
213
+ "type": 'Report',
219
214
  "definition": {
220
215
  "parts": [
221
216
  {
@@ -238,7 +233,7 @@ def update_report_from_reportjson(
238
233
  )
239
234
 
240
235
  if response.status_code == 201:
241
- print(f"The '{report}' report has been successfully updated.")
236
+ print(f"{icons.green_dot} The '{report}' report has been successfully updated.")
242
237
  # print(response.json())
243
238
  elif response.status_code == 202:
244
239
  operationId = response.headers["x-ms-operation-id"]
@@ -249,5 +244,5 @@ def update_report_from_reportjson(
249
244
  response = client.get(f"/v1/operations/{operationId}")
250
245
  response_body = json.loads(response.content)
251
246
  response = client.get(f"/v1/operations/{operationId}/result")
252
- print(f"The '{report}' report has been successfully updated.")
247
+ print(f"{icons.green_dot} The '{report}' report has been successfully updated.")
253
248
  # print(response.json())
@@ -1,7 +1,7 @@
1
1
  import sempy
2
2
  import sempy.fabric as fabric
3
3
  import pandas as pd
4
- import json, os, time, base64, copy, re
4
+ import json, os, time, base64, copy
5
5
  from anytree import Node, RenderTree
6
6
  from powerbiclient import Report
7
7
  from synapse.ml.services import Translate
@@ -55,10 +55,7 @@ def get_report_json(
55
55
  dfI_filt = dfI[(dfI["Display Name"] == report)]
56
56
 
57
57
  if len(dfI_filt) == 0:
58
- print(
59
- f"{icons.red_dot} The '{report}' report does not exist in the '{workspace}' workspace."
60
- )
61
- return
58
+ raise ValueError(f"{icons.red_dot} The '{report}' report does not exist in the '{workspace}' workspace.")
62
59
 
63
60
  itemId = dfI_filt["Id"].iloc[0]
64
61
  response = client.post(
@@ -73,11 +70,8 @@ def get_report_json(
73
70
 
74
71
  if save_to_file_name is not None:
75
72
  lakeAttach = lakehouse_attached()
76
- if lakeAttach == False:
77
- print(
78
- f"{icons.red_dot} In order to save the report.json file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
79
- )
80
- return
73
+ if lakeAttach is False:
74
+ raise ValueError(f"{icons.red_dot} In order to save the report.json file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook.")
81
75
 
82
76
  lakehouse_id = fabric.get_lakehouse_id()
83
77
  lakehouse = resolve_lakehouse_name(lakehouse_id, workspace)
@@ -107,7 +101,7 @@ def report_dependency_tree(workspace: Optional[str] = None):
107
101
  or if no lakehouse attached, resolves to the workspace of the notebook.
108
102
  """
109
103
 
110
- if workspace == None:
104
+ if workspace is None:
111
105
  workspaceId = fabric.get_workspace_id()
112
106
  workspace = fabric.resolve_workspace_name(workspaceId)
113
107
 
@@ -190,11 +184,8 @@ def export_report(
190
184
 
191
185
  lakeAttach = lakehouse_attached()
192
186
 
193
- if lakeAttach == False:
194
- print(
195
- f"{icons.red_dot} In order to run the 'export_report' function, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
196
- )
197
- return
187
+ if lakeAttach is False:
188
+ raise ValueError(f"{icons.red_dot} In order to run the 'export_report' function, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook.")
198
189
 
199
190
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
200
191
 
@@ -204,15 +195,10 @@ def export_report(
204
195
  visual_name = [visual_name]
205
196
 
206
197
  if bookmark_name is not None and (page_name is not None or visual_name is not None):
207
- print(
208
- f"{icons.red_dot} If the 'bookmark_name' parameter is set, the 'page_name' and 'visual_name' parameters must not be set."
209
- )
210
- return
198
+ raise ValueError(f"{icons.red_dot} If the 'bookmark_name' parameter is set, the 'page_name' and 'visual_name' parameters must not be set.")
199
+
211
200
  if visual_name is not None and page_name is None:
212
- print(
213
- f"{icons.red_dot} If the 'visual_name' parameter is set, the 'page_name' parameter must be set."
214
- )
215
- return
201
+ raise ValueError(f"{icons.red_dot} If the 'visual_name' parameter is set, the 'page_name' parameter must be set.")
216
202
 
217
203
  validFormats = {
218
204
  "ACCESSIBLEPDF": ".pdf",
@@ -235,12 +221,9 @@ def export_report(
235
221
 
236
222
  fileExt = validFormats.get(export_format)
237
223
  if fileExt is None:
238
- print(
239
- f"{icons.red_dot} The '{export_format}' format is not a valid format for exporting Power BI reports. Please enter a valid format. Options: {validFormats}"
240
- )
241
- return
224
+ raise ValueError(f"{icons.red_dot} The '{export_format}' format is not a valid format for exporting Power BI reports. Please enter a valid format. Options: {validFormats}")
242
225
 
243
- if file_name == None:
226
+ if file_name is None:
244
227
  file_name = report + fileExt
245
228
  else:
246
229
  file_name = file_name + fileExt
@@ -255,10 +238,7 @@ def export_report(
255
238
  ]
256
239
 
257
240
  if len(dfI_filt) == 0:
258
- print(
259
- f"{icons.red_dot} The '{report}' report does not exist in the '{workspace}' workspace."
260
- )
261
- return
241
+ raise ValueError(f"{icons.red_dot} The '{report}' report does not exist in the '{workspace}' workspace.")
262
242
 
263
243
  reportType = dfI_filt["Type"].iloc[0]
264
244
 
@@ -279,23 +259,15 @@ def export_report(
279
259
  ]
280
260
 
281
261
  if reportType == "Report" and export_format in paginatedOnly:
282
- print(
283
- f"{icons.red_dot} The '{export_format}' format is only supported for paginated reports."
284
- )
285
- return
262
+ raise ValueError(f"{icons.red_dot} The '{export_format}' format is only supported for paginated reports.")
263
+
286
264
  if reportType == "PaginatedReport" and export_format in pbiOnly:
287
- print(
288
- f"{icons.red_dot} The '{export_format}' format is only supported for Power BI reports."
289
- )
290
- return
265
+ raise ValueError(f"{icons.red_dot} The '{export_format}' format is only supported for Power BI reports.")
291
266
 
292
267
  if reportType == "PaginatedReport" and (
293
268
  bookmark_name is not None or page_name is not None or visual_name is not None
294
269
  ):
295
- print(
296
- f"{icons.red_dot} Export for paginated reports does not support bookmarks/pages/visuals. Those parameters must not be set for paginated reports."
297
- )
298
- return
270
+ raise ValueError(f"{icons.red_dot} Export for paginated reports does not support bookmarks/pages/visuals. Those parameters must not be set for paginated reports.")
299
271
 
300
272
  reportId = dfI_filt["Id"].iloc[0]
301
273
  client = fabric.PowerBIRestClient()
@@ -332,19 +304,15 @@ def export_report(
332
304
  for page in page_name:
333
305
  dfPage_filt = dfPage[dfPage["Page ID"] == page]
334
306
  if len(dfPage_filt) == 0:
335
- print(
336
- f"{icons.red_dot} The '{page}' page does not exist in the '{report}' report within the '{workspace}' workspace."
337
- )
338
- return
307
+ raise ValueError(f"{icons.red_dot} The '{page}' page does not exist in the '{report}' report within the '{workspace}' workspace.")
308
+
339
309
  page_dict = {"pageName": page}
340
310
  request_body["powerBIReportConfiguration"]["pages"].append(page_dict)
341
311
 
342
312
  elif page_name is not None and visual_name is not None:
343
313
  if len(page_name) != len(visual_name):
344
- print(
345
- f"{icons.red_dot} Each 'visual_name' must map to a single 'page_name'."
346
- )
347
- return
314
+ raise ValueError(f"{icons.red_dot} Each 'visual_name' must map to a single 'page_name'.")
315
+
348
316
  if reportType == "Report":
349
317
  request_body = {"format": export_format, "powerBIReportConfiguration": {}}
350
318
 
@@ -356,10 +324,8 @@ def export_report(
356
324
  (dfVisual["Page ID"] == page) & (dfVisual["Visual ID"] == visual)
357
325
  ]
358
326
  if len(dfVisual_filt) == 0:
359
- print(
360
- f"{icons.red_dot} The '{visual}' visual does not exist on the '{page}' in the '{report}' report within the '{workspace}' workspace."
361
- )
362
- return
327
+ raise ValueError(f"{icons.red_dot} The '{visual}' visual does not exist on the '{page}' in the '{report}' report within the '{workspace}' workspace.")
328
+
363
329
  page_dict = {"pageName": page, "visualName": visual}
364
330
  request_body["powerBIReportConfiguration"]["pages"].append(page_dict)
365
331
  a += 1
@@ -393,9 +359,7 @@ def export_report(
393
359
  )
394
360
  response_body = json.loads(response.content)
395
361
  if response_body["status"] == "Failed":
396
- print(
397
- f"{icons.red_dot} The export for the '{report}' report within the '{workspace}' workspace in the '{export_format}' format has failed."
398
- )
362
+ raise ValueError(f"{icons.red_dot} The export for the '{report}' report within the '{workspace}' workspace in the '{export_format}' format has failed.")
399
363
  else:
400
364
  response = client.get(
401
365
  f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}/exports/{exportId}/file"
@@ -447,10 +411,7 @@ def clone_report(
447
411
  dfI_filt = dfI[(dfI["Display Name"] == report)]
448
412
 
449
413
  if len(dfI_filt) == 0:
450
- print(
451
- f"{icons.red_dot} The '{report}' report does not exist within the '{workspace}' workspace."
452
- )
453
- return
414
+ raise ValueError(f"{icons.red_dot} The '{report}' report does not exist within the '{workspace}' workspace.")
454
415
 
455
416
  reportId = resolve_report_id(report, workspace)
456
417
 
@@ -462,11 +423,11 @@ def clone_report(
462
423
  dfW_filt = dfW[dfW["Name"] == target_workspace]
463
424
 
464
425
  if len(dfW_filt) == 0:
465
- print(f"{icons.red_dot} The '{workspace}' is not a valid workspace.")
466
- return
426
+ raise ValueError(f"{icons.red_dot} The '{workspace}' is not a valid workspace.")
427
+
467
428
  target_workspace_id = dfW_filt["Id"].iloc[0]
468
429
 
469
- if target_dataset == None:
430
+ if target_dataset is None:
470
431
  dfR = fabric.list_reports(workspace=target_workspace)
471
432
  dfR_filt = dfR[dfR["Name"] == report]
472
433
  target_dataset_id = dfR_filt["Dataset Id"].iloc[0]
@@ -478,10 +439,8 @@ def clone_report(
478
439
  dfD_filt = dfD[dfD["Dataset Name"] == target_dataset]
479
440
 
480
441
  if len(dfD_filt) == 0:
481
- print(
482
- f"{icons.red_dot} The '{target_dataset}' target dataset does not exist in the '{target_workspace}' workspace."
483
- )
484
- return
442
+ raise ValueError(f"{icons.red_dot} The '{target_dataset}' target dataset does not exist in the '{target_workspace}' workspace.")
443
+
485
444
  target_dataset_id = dfD_filt["Dataset Id"].iloc[0]
486
445
 
487
446
  client = fabric.PowerBIRestClient()
@@ -508,9 +467,7 @@ def clone_report(
508
467
  f"{icons.green_dot} The '{report}' report has been successfully cloned as the '{cloned_report}' report within the '{target_workspace}' workspace using the '{target_dataset}' semantic model."
509
468
  )
510
469
  else:
511
- print(
512
- f"{icons.red_dot} POST request failed with status code: {response.status_code}"
513
- )
470
+ raise ValueError(f"{icons.red_dot} POST request failed with status code: {response.status_code}")
514
471
 
515
472
 
516
473
  def launch_report(report: str, workspace: Optional[str] = None):
@@ -532,7 +489,7 @@ def launch_report(report: str, workspace: Optional[str] = None):
532
489
  An embedded Power BI report within the notebook.
533
490
  """
534
491
 
535
- from .HelperFunctions import resolve_report_id
492
+ from sempy_labs import resolve_report_id
536
493
 
537
494
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
538
495
 
@@ -562,7 +519,7 @@ def list_report_pages(report: str, workspace: Optional[str] = None):
562
519
  A pandas dataframe showing the pages within a Power BI report and their properties.
563
520
  """
564
521
 
565
- if workspace == None:
522
+ if workspace is None:
566
523
  workspace_id = fabric.get_workspace_id()
567
524
  workspace = fabric.resolve_workspace_name(workspace_id)
568
525
 
@@ -573,14 +530,14 @@ def list_report_pages(report: str, workspace: Optional[str] = None):
573
530
  reportJson = get_report_json(report=report, workspace=workspace)
574
531
 
575
532
  for section in reportJson["sections"]:
576
- pageID = section["name"]
577
- pageName = section["displayName"]
533
+ pageID = section.get("name")
534
+ pageName = section.get("displayName")
578
535
  # pageFilters = section['filters']
579
- pageWidth = section["width"]
580
- pageHeight = section["height"]
536
+ pageWidth = section.get("width")
537
+ pageHeight = section.get("height")
581
538
  visualCount = len(section["visualContainers"])
582
539
  pageHidden = False
583
- pageConfig = section["config"]
540
+ pageConfig = section.get("config")
584
541
  pageConfigJson = json.loads(pageConfig)
585
542
 
586
543
  try:
@@ -626,7 +583,7 @@ def list_report_visuals(report: str, workspace: Optional[str] = None):
626
583
  A pandas dataframe showing the visuals within a Power BI report and their properties.
627
584
  """
628
585
 
629
- if workspace == None:
586
+ if workspace is None:
630
587
  workspace_id = fabric.get_workspace_id()
631
588
  workspace = fabric.resolve_workspace_name(workspace_id)
632
589
 
@@ -681,7 +638,7 @@ def list_report_bookmarks(report: str, workspace: Optional[str] = None):
681
638
  A pandas dataframe showing the bookmarks within a Power BI report and their properties.
682
639
  """
683
640
 
684
- if workspace == None:
641
+ if workspace is None:
685
642
  workspace_id = fabric.get_workspace_id()
686
643
  workspace = fabric.resolve_workspace_name(workspace_id)
687
644
 
@@ -750,6 +707,7 @@ def list_report_bookmarks(report: str, workspace: Optional[str] = None):
750
707
  )
751
708
 
752
709
 
710
+ @log
753
711
  def translate_report_titles(
754
712
  report: str, languages: Union[str, List[str]], workspace: Optional[str] = None
755
713
  ):