semantic-link-labs 0.5.0__py3-none-any.whl → 0.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (54) hide show
  1. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.6.0.dist-info}/METADATA +2 -2
  2. semantic_link_labs-0.6.0.dist-info/RECORD +54 -0
  3. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.6.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +19 -13
  5. sempy_labs/_ai.py +43 -24
  6. sempy_labs/_clear_cache.py +4 -5
  7. sempy_labs/_connections.py +77 -70
  8. sempy_labs/_dax.py +7 -9
  9. sempy_labs/_generate_semantic_model.py +55 -44
  10. sempy_labs/_helper_functions.py +13 -6
  11. sempy_labs/_icons.py +14 -0
  12. sempy_labs/_list_functions.py +491 -304
  13. sempy_labs/_model_auto_build.py +4 -3
  14. sempy_labs/_model_bpa.py +131 -1118
  15. sempy_labs/_model_bpa_rules.py +831 -0
  16. sempy_labs/_model_dependencies.py +14 -12
  17. sempy_labs/_one_lake_integration.py +11 -5
  18. sempy_labs/_query_scale_out.py +89 -81
  19. sempy_labs/_refresh_semantic_model.py +16 -10
  20. sempy_labs/_translations.py +213 -287
  21. sempy_labs/_vertipaq.py +53 -37
  22. sempy_labs/directlake/__init__.py +2 -0
  23. sempy_labs/directlake/_directlake_schema_compare.py +12 -5
  24. sempy_labs/directlake/_directlake_schema_sync.py +13 -19
  25. sempy_labs/directlake/_fallback.py +5 -3
  26. sempy_labs/directlake/_get_directlake_lakehouse.py +1 -1
  27. sempy_labs/directlake/_get_shared_expression.py +4 -2
  28. sempy_labs/directlake/_guardrails.py +3 -3
  29. sempy_labs/directlake/_list_directlake_model_calc_tables.py +17 -10
  30. sempy_labs/directlake/_show_unsupported_directlake_objects.py +3 -2
  31. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +10 -5
  32. sempy_labs/directlake/_update_directlake_partition_entity.py +132 -9
  33. sempy_labs/directlake/_warm_cache.py +6 -3
  34. sempy_labs/lakehouse/_get_lakehouse_columns.py +1 -1
  35. sempy_labs/lakehouse/_get_lakehouse_tables.py +5 -3
  36. sempy_labs/lakehouse/_lakehouse.py +2 -1
  37. sempy_labs/lakehouse/_shortcuts.py +19 -12
  38. sempy_labs/migration/__init__.py +1 -1
  39. sempy_labs/migration/_create_pqt_file.py +21 -15
  40. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +16 -13
  41. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +17 -18
  42. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +43 -40
  43. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +14 -14
  44. sempy_labs/migration/_migration_validation.py +2 -2
  45. sempy_labs/migration/_refresh_calc_tables.py +8 -5
  46. sempy_labs/report/__init__.py +2 -2
  47. sempy_labs/report/_generate_report.py +10 -5
  48. sempy_labs/report/_report_functions.py +67 -29
  49. sempy_labs/report/_report_rebind.py +9 -8
  50. sempy_labs/tom/__init__.py +1 -4
  51. sempy_labs/tom/_model.py +555 -152
  52. semantic_link_labs-0.5.0.dist-info/RECORD +0 -53
  53. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.6.0.dist-info}/LICENSE +0 -0
  54. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.6.0.dist-info}/top_level.txt +0 -0
@@ -1,6 +1,8 @@
1
1
  import sempy
2
2
  import sempy.fabric as fabric
3
- import re, datetime, time
3
+ import re
4
+ import datetime
5
+ import time
4
6
  from sempy_labs._list_functions import list_tables
5
7
  from sempy_labs._helper_functions import create_relationship_name
6
8
  from sempy_labs.tom import connect_semantic_model
@@ -8,6 +10,7 @@ from typing import Optional
8
10
  from sempy._utils._log import log
9
11
  import sempy_labs._icons as icons
10
12
 
13
+
11
14
  @log
12
15
  def migrate_model_objects_to_semantic_model(
13
16
  dataset: str,
@@ -59,7 +62,6 @@ def migrate_model_objects_to_semantic_model(
59
62
 
60
63
  dfP_cc = dfPar[(dfPar["Source Type"] == "Calculated")]
61
64
  dfP_fp = dfP_cc[dfP_cc["Query"].str.contains("NAMEOF")]
62
- dfC_fp = dfC[dfC["Table Name"].isin(dfP_fp["Table Name"].values)]
63
65
 
64
66
  print(f"{icons.in_progress} Updating '{new_dataset}' based on '{dataset}'...")
65
67
  start_time = datetime.datetime.now()
@@ -124,12 +126,15 @@ def migrate_model_objects_to_semantic_model(
124
126
  c.IsKey = bool(dfC_filt["Key"].iloc[0])
125
127
  sbc = dfC_filt["Sort By Column"].iloc[0]
126
128
 
127
- if sbc != None:
128
- try:
129
+ if sbc is not None:
130
+ if any(
131
+ o.Name == sbc and o.Parent.Name == c.Parent.Name
132
+ for o in tom.all_columns()
133
+ ):
129
134
  c.SortByColumn = tom.model.Tables[
130
135
  t.Name
131
136
  ].Columns[sbc]
132
- except:
137
+ else:
133
138
  print(
134
139
  f"{icons.red_dot} Failed to create '{sbc}' as a Sort By Column for the '{c.Name}' in the '{t.Name}' table."
135
140
  )
@@ -159,9 +164,11 @@ def migrate_model_objects_to_semantic_model(
159
164
  cols = r["Column Name"]
160
165
  lvls = r["Level Name"]
161
166
 
162
- try:
163
- tom.model.Tables[tName].Hierarchies[hName]
164
- except:
167
+ if not any(
168
+ t.Name == tName and h.Name == hName
169
+ for t in tom.model.Tables
170
+ for h in t.Hierarchies
171
+ ):
165
172
  tom.add_hierarchy(
166
173
  table_name=tName,
167
174
  hierarchy_name=hName,
@@ -184,9 +191,7 @@ def migrate_model_objects_to_semantic_model(
184
191
  mDesc = r["Measure Description"]
185
192
  mFS = r["Format String"]
186
193
 
187
- try:
188
- tom.model.Tables[tName].Measures[mName]
189
- except:
194
+ if not any(m.Name == mName for m in tom.all_measures()):
190
195
  tom.add_measure(
191
196
  table_name=tName,
192
197
  measure_name=mName,
@@ -199,7 +204,7 @@ def migrate_model_objects_to_semantic_model(
199
204
  print(
200
205
  f"{icons.green_dot} The '{mName}' measure has been added."
201
206
  )
202
-
207
+ print(f"\n{icons.in_progress} Creating calculation groups...")
203
208
  for cgName in dfCI["Calculation Group Name"].unique():
204
209
 
205
210
  isHidden = bool(
@@ -216,9 +221,7 @@ def migrate_model_objects_to_semantic_model(
216
221
  (dfCI["Calculation Group Name"] == cgName), "Description"
217
222
  ].iloc[0]
218
223
 
219
- try:
220
- tom.model.Tables[cgName]
221
- except:
224
+ if not any(t.Name == cgName for t in tom.model.Tables):
222
225
  tom.add_calculation_group(
223
226
  name=cgName,
224
227
  description=desc,
@@ -230,9 +233,9 @@ def migrate_model_objects_to_semantic_model(
230
233
  )
231
234
  tom.model.DiscourageImplicitMeasures = True
232
235
 
233
- print(
234
- f"\n{icons.in_progress} Updating calculation group column name..."
235
- )
236
+ # print(
237
+ # f"\n{icons.in_progress} Updating calculation group column names..."
238
+ # )
236
239
  dfC_filt = dfC[
237
240
  (dfC["Table Name"] == cgName) & (dfC["Hidden"] == False)
238
241
  ]
@@ -263,11 +266,12 @@ def migrate_model_objects_to_semantic_model(
263
266
  & (dfCI["Calculation Item Name"] == calcItem),
264
267
  "Format String Expression",
265
268
  ].iloc[0]
266
- try:
267
- tom.model.Tables[cgName].CalculationGroup.CalculationItems[
268
- calcItem
269
- ]
270
- except:
269
+
270
+ if not any(
271
+ ci.CalculationGroup.Parent.Name == cgName
272
+ and ci.Name == calcItem
273
+ for ci in tom.all_calculation_items()
274
+ ):
271
275
  tom.add_calculation_item(
272
276
  table_name=cgName,
273
277
  calculation_item_name=calcItem,
@@ -336,7 +340,13 @@ def migrate_model_objects_to_semantic_model(
336
340
  f"{icons.yellow_dot} {relName} was not created since columns used in a relationship must have the same data type."
337
341
  )
338
342
  else:
339
- try:
343
+ if not any(
344
+ r.FromTable.Name == fromTable
345
+ and r.FromColumn.Name == fromColumn
346
+ and r.ToTable.Name == toTable
347
+ and r.ToColumn.Name == toColumn
348
+ for r in tom.model.Relationships
349
+ ):
340
350
  tom.add_relationship(
341
351
  from_table=fromTable,
342
352
  from_column=fromColumn,
@@ -349,11 +359,10 @@ def migrate_model_objects_to_semantic_model(
349
359
  rely_on_referential_integrity=rori,
350
360
  is_active=isActive,
351
361
  )
352
-
353
362
  print(
354
363
  f"{icons.green_dot} The {relName} relationship has been added."
355
364
  )
356
- except:
365
+ else:
357
366
  print(
358
367
  f"{icons.red_dot} The {relName} relationship was not added."
359
368
  )
@@ -364,9 +373,7 @@ def migrate_model_objects_to_semantic_model(
364
373
  roleDesc = row["Description"]
365
374
  modPerm = row["Model Permission"]
366
375
 
367
- try:
368
- tom.model.Roles[roleName]
369
- except:
376
+ if not any(r.Name == roleName for r in tom.model.Roles):
370
377
  tom.add_role(
371
378
  role_name=roleName,
372
379
  model_permission=modPerm,
@@ -382,14 +389,14 @@ def migrate_model_objects_to_semantic_model(
382
389
  tName = row["Table"]
383
390
  expr = row["Filter Expression"]
384
391
 
385
- try:
392
+ if any(t.Name == tName for t in tom.model.Tables):
386
393
  tom.set_rls(
387
394
  role_name=roleName, table_name=tName, filter_expression=expr
388
395
  )
389
396
  print(
390
397
  f"{icons.green_dot} Row level security for the '{tName}' table within the '{roleName}' role has been set."
391
398
  )
392
- except:
399
+ else:
393
400
  print(
394
401
  f"{icons.red_dot} Row level security for the '{tName}' table within the '{roleName}' role was not set."
395
402
  )
@@ -397,9 +404,7 @@ def migrate_model_objects_to_semantic_model(
397
404
  print(f"\n{icons.in_progress} Creating perspectives...")
398
405
  for pName in dfP["Perspective Name"].unique():
399
406
 
400
- try:
401
- tom.model.Perspectives[pName]
402
- except:
407
+ if not any(p.Name == pName for p in tom.model.Perspectives):
403
408
  tom.add_perspective(perspective_name=pName)
404
409
  print(
405
410
  f"{icons.green_dot} The '{pName}' perspective has been added."
@@ -433,14 +438,12 @@ def migrate_model_objects_to_semantic_model(
433
438
  object=tom.model.Tables[tName].Hierarchies[oName],
434
439
  perspective_name=pName,
435
440
  )
436
- except:
441
+ except Exception:
437
442
  pass
438
443
 
439
444
  print(f"\n{icons.in_progress} Creating translation languages...")
440
445
  for trName in dfTranslation["Culture Name"].unique():
441
- try:
442
- tom.model.Cultures[trName]
443
- except:
446
+ if not any(c.Name == trName for c in tom.model.Cultures):
444
447
  tom.add_translation(trName)
445
448
  print(
446
449
  f"{icons.green_dot} The '{trName}' translation language has been added."
@@ -506,14 +509,14 @@ def migrate_model_objects_to_semantic_model(
506
509
  property=prop,
507
510
  value=translation,
508
511
  )
509
- except:
512
+ except Exception:
510
513
  pass
511
514
 
512
515
  print(
513
516
  f"\n{icons.green_dot} Migration of objects from '{dataset}' -> '{new_dataset}' is complete."
514
517
  )
515
518
 
516
- except Exception as e:
519
+ except Exception:
517
520
  if datetime.datetime.now() - start_time > timeout:
518
521
  break
519
522
  time.sleep(1)
@@ -1,13 +1,13 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  import pandas as pd
4
- import datetime, time
3
+ import datetime
4
+ import time
5
5
  from sempy_labs._list_functions import list_tables
6
6
  from sempy_labs.directlake._get_shared_expression import get_shared_expression
7
7
  from sempy_labs._helper_functions import resolve_lakehouse_name
8
8
  from sempy_labs.lakehouse._lakehouse import lakehouse_attached
9
9
  from sempy_labs.tom import connect_semantic_model
10
- from typing import List, Optional, Union
10
+ from typing import Optional
11
11
  from sempy._utils._log import log
12
12
  import sempy_labs._icons as icons
13
13
 
@@ -95,9 +95,9 @@ def migrate_tables_columns_to_semantic_model(
95
95
  dataset=new_dataset, readonly=False, workspace=new_dataset_workspace
96
96
  ) as tom:
97
97
  success = True
98
- try:
99
- tom.model.Expressions["DatabaseQuery"]
100
- except:
98
+ if not any(
99
+ e.Name == "DatabaseQuery" for e in tom.model.Expressions
100
+ ):
101
101
  tom.add_expression("DatabaseQuery", expression=shEx)
102
102
  print(
103
103
  f"{icons.green_dot} The 'DatabaseQuery' expression has been added."
@@ -109,9 +109,7 @@ def migrate_tables_columns_to_semantic_model(
109
109
  tHid = bool(r["Hidden"])
110
110
  tDesc = r["Description"]
111
111
 
112
- try:
113
- tom.model.Tables[tName]
114
- except:
112
+ if not any(t.Name == tName for t in tom.model.Tables):
115
113
  tom.add_table(
116
114
  name=tName,
117
115
  description=tDesc,
@@ -132,9 +130,10 @@ def migrate_tables_columns_to_semantic_model(
132
130
  cHid = bool(r["Hidden"])
133
131
  cDataType = r["Data Type"]
134
132
 
135
- try:
136
- tom.model.Tables[tName].Columns[cName]
137
- except:
133
+ if not any(
134
+ c.Name == cName and c.Parent.Name == tName
135
+ for c in tom.all_columns()
136
+ ):
138
137
  tom.add_data_column(
139
138
  table_name=tName,
140
139
  column_name=cName,
@@ -149,13 +148,14 @@ def migrate_tables_columns_to_semantic_model(
149
148
  print(
150
149
  f"\n{icons.green_dot} All regular tables and columns have been added to the '{new_dataset}' semantic model."
151
150
  )
152
- except Exception as e:
151
+ except Exception:
153
152
  if datetime.datetime.now() - start_time > timeout:
154
153
  break
155
154
  time.sleep(1)
156
155
  else:
157
156
  print(
158
- f"{icons.red_dot} Lakehouse not attached to notebook and lakehouse/lakehouse_workspace are not specified. Please add your lakehouse to this notebook or specify the lakehouse/lakehouse_workspace parameters."
157
+ f"{icons.red_dot} Lakehouse not attached to notebook and lakehouse/lakehouse_workspace are not specified. Please add your lakehouse to this notebook"
158
+ f" or specify the lakehouse/lakehouse_workspace parameters."
159
159
  )
160
160
  print(
161
161
  "To attach a lakehouse to a notebook, go to the the 'Explorer' window to the left, click 'Lakehouses' to add your lakehouse to this notebook"
@@ -1,10 +1,10 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  import pandas as pd
4
- from typing import List, Optional, Union
3
+ from typing import Optional
5
4
  from sempy_labs._list_functions import list_semantic_model_objects
6
5
  from sempy._utils._log import log
7
6
 
7
+
8
8
  @log
9
9
  def migration_validation(
10
10
  dataset: str,
@@ -1,10 +1,11 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  import pandas as pd
4
- import re, datetime, time
3
+ import re
4
+ import datetime
5
+ import time
5
6
  from pyspark.sql import SparkSession
6
7
  from sempy_labs.tom import connect_semantic_model
7
- from typing import List, Optional, Union
8
+ from typing import Optional
8
9
  from sempy._utils._log import log
9
10
  import sempy_labs._icons as icons
10
11
 
@@ -121,9 +122,11 @@ def refresh_calc_tables(dataset: str, workspace: Optional[str] = None):
121
122
  f"{icons.green_dot} Calculated table '{tName}' has been refreshed as the '{delta_table_name.lower()}' table in the lakehouse."
122
123
  )
123
124
  except Exception as e:
124
- raise ValueError(f"{icons.red_dot} Failed to create calculated table '{tName}' as a delta table in the lakehouse.") from e
125
+ raise ValueError(
126
+ f"{icons.red_dot} Failed to create calculated table '{tName}' as a delta table in the lakehouse."
127
+ ) from e
125
128
 
126
- except Exception as e:
129
+ except Exception:
127
130
  if datetime.datetime.now() - start_time > timeout:
128
131
  break
129
132
  time.sleep(1)
@@ -1,6 +1,6 @@
1
1
  from sempy_labs.report._generate_report import (
2
2
  create_report_from_reportjson,
3
- #update_report_from_reportjson,
3
+ # update_report_from_reportjson,
4
4
  )
5
5
  from sempy_labs.report._report_functions import (
6
6
  get_report_json,
@@ -20,7 +20,7 @@ from sempy_labs.report._report_rebind import (
20
20
 
21
21
  __all__ = [
22
22
  "create_report_from_reportjson",
23
- #"update_report_from_reportjson",
23
+ # "update_report_from_reportjson",
24
24
  "get_report_json",
25
25
  # report_dependency_tree,
26
26
  "export_report",
@@ -1,7 +1,8 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  import pandas as pd
4
- import json, base64, time
3
+ import json
4
+ import base64
5
+ import time
5
6
  from typing import Optional
6
7
  from sempy_labs._helper_functions import resolve_workspace_name_and_id
7
8
  import sempy_labs._icons as icons
@@ -41,7 +42,9 @@ def create_report_from_reportjson(
41
42
  dfI_model = dfI_m[(dfI_m["Display Name"] == dataset)]
42
43
 
43
44
  if len(dfI_model) == 0:
44
- raise ValueError(f"{icons.red_dot} The '{dataset}' semantic model does not exist in the '{workspace}' workspace.")
45
+ raise ValueError(
46
+ f"{icons.red_dot} The '{dataset}' semantic model does not exist in the '{workspace}' workspace."
47
+ )
45
48
 
46
49
  datasetId = dfI_model["Id"].iloc[0]
47
50
 
@@ -169,7 +172,9 @@ def update_report_from_reportjson(
169
172
  dfR_filt = dfR[(dfR["Name"] == report) & (dfR["Report Type"] == "PowerBIReport")]
170
173
 
171
174
  if len(dfR_filt) == 0:
172
- raise ValueError(f"{icons.red_dot} The '{report}' report does not exist in the '{workspace}' workspace.")
175
+ raise ValueError(
176
+ f"{icons.red_dot} The '{report}' report does not exist in the '{workspace}' workspace."
177
+ )
173
178
 
174
179
  reportId = dfR_filt["Id"].iloc[0]
175
180
  client = fabric.FabricRestClient()
@@ -210,7 +215,7 @@ def update_report_from_reportjson(
210
215
 
211
216
  request_body = {
212
217
  "displayName": report,
213
- "type": 'Report',
218
+ "type": "Report",
214
219
  "definition": {
215
220
  "parts": [
216
221
  {
@@ -1,7 +1,10 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  import pandas as pd
4
- import json, os, time, base64, copy
3
+ import json
4
+ import os
5
+ import time
6
+ import base64
7
+ import copy
5
8
  from anytree import Node, RenderTree
6
9
  from powerbiclient import Report
7
10
  from synapse.ml.services import Translate
@@ -20,6 +23,7 @@ from sempy_labs._helper_functions import (
20
23
  from typing import Any, List, Optional, Union
21
24
  from sempy._utils._log import log
22
25
  import sempy_labs._icons as icons
26
+ from sempy.fabric.exceptions import FabricHTTPException
23
27
 
24
28
 
25
29
  def get_report_json(
@@ -55,7 +59,9 @@ def get_report_json(
55
59
  dfI_filt = dfI[(dfI["Display Name"] == report)]
56
60
 
57
61
  if len(dfI_filt) == 0:
58
- raise ValueError(f"{icons.red_dot} The '{report}' report does not exist in the '{workspace}' workspace.")
62
+ raise ValueError(
63
+ f"{icons.red_dot} The '{report}' report does not exist in the '{workspace}' workspace."
64
+ )
59
65
 
60
66
  itemId = dfI_filt["Id"].iloc[0]
61
67
  response = client.post(
@@ -71,7 +77,9 @@ def get_report_json(
71
77
  if save_to_file_name is not None:
72
78
  lakeAttach = lakehouse_attached()
73
79
  if lakeAttach is False:
74
- raise ValueError(f"{icons.red_dot} In order to save the report.json file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook.")
80
+ raise ValueError(
81
+ f"{icons.red_dot} In order to save the report.json file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
82
+ )
75
83
 
76
84
  lakehouse_id = fabric.get_lakehouse_id()
77
85
  lakehouse = resolve_lakehouse_name(lakehouse_id, workspace)
@@ -185,7 +193,9 @@ def export_report(
185
193
  lakeAttach = lakehouse_attached()
186
194
 
187
195
  if lakeAttach is False:
188
- raise ValueError(f"{icons.red_dot} In order to run the 'export_report' function, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook.")
196
+ raise ValueError(
197
+ f"{icons.red_dot} In order to run the 'export_report' function, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
198
+ )
189
199
 
190
200
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
191
201
 
@@ -195,10 +205,14 @@ def export_report(
195
205
  visual_name = [visual_name]
196
206
 
197
207
  if bookmark_name is not None and (page_name is not None or visual_name is not None):
198
- raise ValueError(f"{icons.red_dot} If the 'bookmark_name' parameter is set, the 'page_name' and 'visual_name' parameters must not be set.")
208
+ raise ValueError(
209
+ f"{icons.red_dot} If the 'bookmark_name' parameter is set, the 'page_name' and 'visual_name' parameters must not be set."
210
+ )
199
211
 
200
212
  if visual_name is not None and page_name is None:
201
- raise ValueError(f"{icons.red_dot} If the 'visual_name' parameter is set, the 'page_name' parameter must be set.")
213
+ raise ValueError(
214
+ f"{icons.red_dot} If the 'visual_name' parameter is set, the 'page_name' parameter must be set."
215
+ )
202
216
 
203
217
  validFormats = {
204
218
  "ACCESSIBLEPDF": ".pdf",
@@ -221,7 +235,9 @@ def export_report(
221
235
 
222
236
  fileExt = validFormats.get(export_format)
223
237
  if fileExt is None:
224
- raise ValueError(f"{icons.red_dot} The '{export_format}' format is not a valid format for exporting Power BI reports. Please enter a valid format. Options: {validFormats}")
238
+ raise ValueError(
239
+ f"{icons.red_dot} The '{export_format}' format is not a valid format for exporting Power BI reports. Please enter a valid format. Options: {validFormats}"
240
+ )
225
241
 
226
242
  if file_name is None:
227
243
  file_name = report + fileExt
@@ -238,7 +254,9 @@ def export_report(
238
254
  ]
239
255
 
240
256
  if len(dfI_filt) == 0:
241
- raise ValueError(f"{icons.red_dot} The '{report}' report does not exist in the '{workspace}' workspace.")
257
+ raise ValueError(
258
+ f"{icons.red_dot} The '{report}' report does not exist in the '{workspace}' workspace."
259
+ )
242
260
 
243
261
  reportType = dfI_filt["Type"].iloc[0]
244
262
 
@@ -259,15 +277,21 @@ def export_report(
259
277
  ]
260
278
 
261
279
  if reportType == "Report" and export_format in paginatedOnly:
262
- raise ValueError(f"{icons.red_dot} The '{export_format}' format is only supported for paginated reports.")
280
+ raise ValueError(
281
+ f"{icons.red_dot} The '{export_format}' format is only supported for paginated reports."
282
+ )
263
283
 
264
284
  if reportType == "PaginatedReport" and export_format in pbiOnly:
265
- raise ValueError(f"{icons.red_dot} The '{export_format}' format is only supported for Power BI reports.")
285
+ raise ValueError(
286
+ f"{icons.red_dot} The '{export_format}' format is only supported for Power BI reports."
287
+ )
266
288
 
267
289
  if reportType == "PaginatedReport" and (
268
290
  bookmark_name is not None or page_name is not None or visual_name is not None
269
291
  ):
270
- raise ValueError(f"{icons.red_dot} Export for paginated reports does not support bookmarks/pages/visuals. Those parameters must not be set for paginated reports.")
292
+ raise ValueError(
293
+ f"{icons.red_dot} Export for paginated reports does not support bookmarks/pages/visuals. Those parameters must not be set for paginated reports."
294
+ )
271
295
 
272
296
  reportId = dfI_filt["Id"].iloc[0]
273
297
  client = fabric.PowerBIRestClient()
@@ -304,14 +328,18 @@ def export_report(
304
328
  for page in page_name:
305
329
  dfPage_filt = dfPage[dfPage["Page ID"] == page]
306
330
  if len(dfPage_filt) == 0:
307
- raise ValueError(f"{icons.red_dot} The '{page}' page does not exist in the '{report}' report within the '{workspace}' workspace.")
331
+ raise ValueError(
332
+ f"{icons.red_dot} The '{page}' page does not exist in the '{report}' report within the '{workspace}' workspace."
333
+ )
308
334
 
309
335
  page_dict = {"pageName": page}
310
336
  request_body["powerBIReportConfiguration"]["pages"].append(page_dict)
311
337
 
312
338
  elif page_name is not None and visual_name is not None:
313
339
  if len(page_name) != len(visual_name):
314
- raise ValueError(f"{icons.red_dot} Each 'visual_name' must map to a single 'page_name'.")
340
+ raise ValueError(
341
+ f"{icons.red_dot} Each 'visual_name' must map to a single 'page_name'."
342
+ )
315
343
 
316
344
  if reportType == "Report":
317
345
  request_body = {"format": export_format, "powerBIReportConfiguration": {}}
@@ -324,7 +352,9 @@ def export_report(
324
352
  (dfVisual["Page ID"] == page) & (dfVisual["Visual ID"] == visual)
325
353
  ]
326
354
  if len(dfVisual_filt) == 0:
327
- raise ValueError(f"{icons.red_dot} The '{visual}' visual does not exist on the '{page}' in the '{report}' report within the '{workspace}' workspace.")
355
+ raise ValueError(
356
+ f"{icons.red_dot} The '{visual}' visual does not exist on the '{page}' in the '{report}' report within the '{workspace}' workspace."
357
+ )
328
358
 
329
359
  page_dict = {"pageName": page, "visualName": visual}
330
360
  request_body["powerBIReportConfiguration"]["pages"].append(page_dict)
@@ -359,7 +389,9 @@ def export_report(
359
389
  )
360
390
  response_body = json.loads(response.content)
361
391
  if response_body["status"] == "Failed":
362
- raise ValueError(f"{icons.red_dot} The export for the '{report}' report within the '{workspace}' workspace in the '{export_format}' format has failed.")
392
+ raise ValueError(
393
+ f"{icons.red_dot} The export for the '{report}' report within the '{workspace}' workspace in the '{export_format}' format has failed."
394
+ )
363
395
  else:
364
396
  response = client.get(
365
397
  f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}/exports/{exportId}/file"
@@ -411,7 +443,9 @@ def clone_report(
411
443
  dfI_filt = dfI[(dfI["Display Name"] == report)]
412
444
 
413
445
  if len(dfI_filt) == 0:
414
- raise ValueError(f"{icons.red_dot} The '{report}' report does not exist within the '{workspace}' workspace.")
446
+ raise ValueError(
447
+ f"{icons.red_dot} The '{report}' report does not exist within the '{workspace}' workspace."
448
+ )
415
449
 
416
450
  reportId = resolve_report_id(report, workspace)
417
451
 
@@ -423,7 +457,9 @@ def clone_report(
423
457
  dfW_filt = dfW[dfW["Name"] == target_workspace]
424
458
 
425
459
  if len(dfW_filt) == 0:
426
- raise ValueError(f"{icons.red_dot} The '{workspace}' is not a valid workspace.")
460
+ raise ValueError(
461
+ f"{icons.red_dot} The '{workspace}' is not a valid workspace."
462
+ )
427
463
 
428
464
  target_workspace_id = dfW_filt["Id"].iloc[0]
429
465
 
@@ -439,7 +475,9 @@ def clone_report(
439
475
  dfD_filt = dfD[dfD["Dataset Name"] == target_dataset]
440
476
 
441
477
  if len(dfD_filt) == 0:
442
- raise ValueError(f"{icons.red_dot} The '{target_dataset}' target dataset does not exist in the '{target_workspace}' workspace.")
478
+ raise ValueError(
479
+ f"{icons.red_dot} The '{target_dataset}' target dataset does not exist in the '{target_workspace}' workspace."
480
+ )
443
481
 
444
482
  target_dataset_id = dfD_filt["Dataset Id"].iloc[0]
445
483
 
@@ -462,12 +500,12 @@ def clone_report(
462
500
  f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}/Clone", json=request_body
463
501
  )
464
502
 
465
- if response.status_code == 200:
466
- print(
467
- f"{icons.green_dot} The '{report}' report has been successfully cloned as the '{cloned_report}' report within the '{target_workspace}' workspace using the '{target_dataset}' semantic model."
468
- )
469
- else:
470
- raise ValueError(f"{icons.red_dot} POST request failed with status code: {response.status_code}")
503
+ if response.status_code != 200:
504
+ raise FabricHTTPException(response)
505
+ print(
506
+ f"{icons.green_dot} The '{report}' report has been successfully cloned as the '{cloned_report}' report within the"
507
+ f" '{target_workspace}' workspace using the '{target_dataset}' semantic model."
508
+ )
471
509
 
472
510
 
473
511
  def launch_report(report: str, workspace: Optional[str] = None):
@@ -544,7 +582,7 @@ def list_report_pages(report: str, workspace: Optional[str] = None):
544
582
  pageH = pageConfigJson["visibility"]
545
583
  if pageH == 1:
546
584
  pageHidden = True
547
- except:
585
+ except Exception:
548
586
  pass
549
587
 
550
588
  new_data = {
@@ -605,7 +643,7 @@ def list_report_visuals(report: str, workspace: Optional[str] = None):
605
643
  "properties"
606
644
  ]["text"]["expr"]["Literal"]["Value"]
607
645
  title = title[1:-1]
608
- except:
646
+ except Exception:
609
647
  title = ""
610
648
 
611
649
  new_data = {
@@ -673,7 +711,7 @@ def list_report_bookmarks(report: str, workspace: Optional[str] = None):
673
711
  ][vc]["singleVisual"]["display"]["mode"]
674
712
  if hidden == "hidden":
675
713
  vHidden = True
676
- except:
714
+ except Exception:
677
715
  pass
678
716
 
679
717
  new_data = {
@@ -701,7 +739,7 @@ def list_report_bookmarks(report: str, workspace: Optional[str] = None):
701
739
 
702
740
  return df
703
741
 
704
- except:
742
+ except Exception:
705
743
  print(
706
744
  f"The '{report}' report within the '{workspace}' workspace has no bookmarks."
707
745
  )