semantic-link-labs 0.4.2__py3-none-any.whl → 0.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (54) hide show
  1. {semantic_link_labs-0.4.2.dist-info → semantic_link_labs-0.6.0.dist-info}/METADATA +2 -2
  2. semantic_link_labs-0.6.0.dist-info/RECORD +54 -0
  3. {semantic_link_labs-0.4.2.dist-info → semantic_link_labs-0.6.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +44 -14
  5. sempy_labs/_ai.py +31 -32
  6. sempy_labs/_clear_cache.py +5 -8
  7. sempy_labs/_connections.py +80 -72
  8. sempy_labs/_dax.py +7 -9
  9. sempy_labs/_generate_semantic_model.py +60 -54
  10. sempy_labs/_helper_functions.py +8 -10
  11. sempy_labs/_icons.py +15 -0
  12. sempy_labs/_list_functions.py +1139 -428
  13. sempy_labs/_model_auto_build.py +5 -6
  14. sempy_labs/_model_bpa.py +134 -1125
  15. sempy_labs/_model_bpa_rules.py +831 -0
  16. sempy_labs/_model_dependencies.py +21 -25
  17. sempy_labs/_one_lake_integration.py +10 -7
  18. sempy_labs/_query_scale_out.py +83 -93
  19. sempy_labs/_refresh_semantic_model.py +12 -16
  20. sempy_labs/_translations.py +214 -288
  21. sempy_labs/_vertipaq.py +51 -42
  22. sempy_labs/directlake/__init__.py +2 -0
  23. sempy_labs/directlake/_directlake_schema_compare.py +12 -11
  24. sempy_labs/directlake/_directlake_schema_sync.py +13 -23
  25. sempy_labs/directlake/_fallback.py +5 -7
  26. sempy_labs/directlake/_get_directlake_lakehouse.py +1 -1
  27. sempy_labs/directlake/_get_shared_expression.py +4 -8
  28. sempy_labs/directlake/_guardrails.py +6 -8
  29. sempy_labs/directlake/_list_directlake_model_calc_tables.py +18 -12
  30. sempy_labs/directlake/_show_unsupported_directlake_objects.py +4 -4
  31. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +9 -8
  32. sempy_labs/directlake/_update_directlake_partition_entity.py +129 -12
  33. sempy_labs/directlake/_warm_cache.py +5 -5
  34. sempy_labs/lakehouse/_get_lakehouse_columns.py +2 -2
  35. sempy_labs/lakehouse/_get_lakehouse_tables.py +4 -4
  36. sempy_labs/lakehouse/_lakehouse.py +3 -4
  37. sempy_labs/lakehouse/_shortcuts.py +17 -13
  38. sempy_labs/migration/__init__.py +1 -1
  39. sempy_labs/migration/_create_pqt_file.py +21 -24
  40. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +16 -13
  41. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +17 -18
  42. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +45 -46
  43. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +14 -14
  44. sempy_labs/migration/_migration_validation.py +6 -2
  45. sempy_labs/migration/_refresh_calc_tables.py +10 -5
  46. sempy_labs/report/__init__.py +2 -2
  47. sempy_labs/report/_generate_report.py +8 -7
  48. sempy_labs/report/_report_functions.py +47 -52
  49. sempy_labs/report/_report_rebind.py +38 -37
  50. sempy_labs/tom/__init__.py +1 -4
  51. sempy_labs/tom/_model.py +541 -180
  52. semantic_link_labs-0.4.2.dist-info/RECORD +0 -53
  53. {semantic_link_labs-0.4.2.dist-info → semantic_link_labs-0.6.0.dist-info}/LICENSE +0 -0
  54. {semantic_link_labs-0.4.2.dist-info → semantic_link_labs-0.6.0.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,10 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  import pandas as pd
4
- import json, os, time, base64, copy
3
+ import json
4
+ import os
5
+ import time
6
+ import base64
7
+ import copy
5
8
  from anytree import Node, RenderTree
6
9
  from powerbiclient import Report
7
10
  from synapse.ml.services import Translate
@@ -20,6 +23,7 @@ from sempy_labs._helper_functions import (
20
23
  from typing import Any, List, Optional, Union
21
24
  from sempy._utils._log import log
22
25
  import sempy_labs._icons as icons
26
+ from sempy.fabric.exceptions import FabricHTTPException
23
27
 
24
28
 
25
29
  def get_report_json(
@@ -55,10 +59,9 @@ def get_report_json(
55
59
  dfI_filt = dfI[(dfI["Display Name"] == report)]
56
60
 
57
61
  if len(dfI_filt) == 0:
58
- print(
62
+ raise ValueError(
59
63
  f"{icons.red_dot} The '{report}' report does not exist in the '{workspace}' workspace."
60
64
  )
61
- return
62
65
 
63
66
  itemId = dfI_filt["Id"].iloc[0]
64
67
  response = client.post(
@@ -74,10 +77,9 @@ def get_report_json(
74
77
  if save_to_file_name is not None:
75
78
  lakeAttach = lakehouse_attached()
76
79
  if lakeAttach is False:
77
- print(
80
+ raise ValueError(
78
81
  f"{icons.red_dot} In order to save the report.json file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
79
82
  )
80
- return
81
83
 
82
84
  lakehouse_id = fabric.get_lakehouse_id()
83
85
  lakehouse = resolve_lakehouse_name(lakehouse_id, workspace)
@@ -191,10 +193,9 @@ def export_report(
191
193
  lakeAttach = lakehouse_attached()
192
194
 
193
195
  if lakeAttach is False:
194
- print(
196
+ raise ValueError(
195
197
  f"{icons.red_dot} In order to run the 'export_report' function, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
196
198
  )
197
- return
198
199
 
199
200
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
200
201
 
@@ -204,15 +205,14 @@ def export_report(
204
205
  visual_name = [visual_name]
205
206
 
206
207
  if bookmark_name is not None and (page_name is not None or visual_name is not None):
207
- print(
208
+ raise ValueError(
208
209
  f"{icons.red_dot} If the 'bookmark_name' parameter is set, the 'page_name' and 'visual_name' parameters must not be set."
209
210
  )
210
- return
211
+
211
212
  if visual_name is not None and page_name is None:
212
- print(
213
+ raise ValueError(
213
214
  f"{icons.red_dot} If the 'visual_name' parameter is set, the 'page_name' parameter must be set."
214
215
  )
215
- return
216
216
 
217
217
  validFormats = {
218
218
  "ACCESSIBLEPDF": ".pdf",
@@ -235,10 +235,9 @@ def export_report(
235
235
 
236
236
  fileExt = validFormats.get(export_format)
237
237
  if fileExt is None:
238
- print(
238
+ raise ValueError(
239
239
  f"{icons.red_dot} The '{export_format}' format is not a valid format for exporting Power BI reports. Please enter a valid format. Options: {validFormats}"
240
240
  )
241
- return
242
241
 
243
242
  if file_name is None:
244
243
  file_name = report + fileExt
@@ -255,10 +254,9 @@ def export_report(
255
254
  ]
256
255
 
257
256
  if len(dfI_filt) == 0:
258
- print(
257
+ raise ValueError(
259
258
  f"{icons.red_dot} The '{report}' report does not exist in the '{workspace}' workspace."
260
259
  )
261
- return
262
260
 
263
261
  reportType = dfI_filt["Type"].iloc[0]
264
262
 
@@ -279,23 +277,21 @@ def export_report(
279
277
  ]
280
278
 
281
279
  if reportType == "Report" and export_format in paginatedOnly:
282
- print(
280
+ raise ValueError(
283
281
  f"{icons.red_dot} The '{export_format}' format is only supported for paginated reports."
284
282
  )
285
- return
283
+
286
284
  if reportType == "PaginatedReport" and export_format in pbiOnly:
287
- print(
285
+ raise ValueError(
288
286
  f"{icons.red_dot} The '{export_format}' format is only supported for Power BI reports."
289
287
  )
290
- return
291
288
 
292
289
  if reportType == "PaginatedReport" and (
293
290
  bookmark_name is not None or page_name is not None or visual_name is not None
294
291
  ):
295
- print(
292
+ raise ValueError(
296
293
  f"{icons.red_dot} Export for paginated reports does not support bookmarks/pages/visuals. Those parameters must not be set for paginated reports."
297
294
  )
298
- return
299
295
 
300
296
  reportId = dfI_filt["Id"].iloc[0]
301
297
  client = fabric.PowerBIRestClient()
@@ -332,19 +328,19 @@ def export_report(
332
328
  for page in page_name:
333
329
  dfPage_filt = dfPage[dfPage["Page ID"] == page]
334
330
  if len(dfPage_filt) == 0:
335
- print(
331
+ raise ValueError(
336
332
  f"{icons.red_dot} The '{page}' page does not exist in the '{report}' report within the '{workspace}' workspace."
337
333
  )
338
- return
334
+
339
335
  page_dict = {"pageName": page}
340
336
  request_body["powerBIReportConfiguration"]["pages"].append(page_dict)
341
337
 
342
338
  elif page_name is not None and visual_name is not None:
343
339
  if len(page_name) != len(visual_name):
344
- print(
340
+ raise ValueError(
345
341
  f"{icons.red_dot} Each 'visual_name' must map to a single 'page_name'."
346
342
  )
347
- return
343
+
348
344
  if reportType == "Report":
349
345
  request_body = {"format": export_format, "powerBIReportConfiguration": {}}
350
346
 
@@ -356,10 +352,10 @@ def export_report(
356
352
  (dfVisual["Page ID"] == page) & (dfVisual["Visual ID"] == visual)
357
353
  ]
358
354
  if len(dfVisual_filt) == 0:
359
- print(
355
+ raise ValueError(
360
356
  f"{icons.red_dot} The '{visual}' visual does not exist on the '{page}' in the '{report}' report within the '{workspace}' workspace."
361
357
  )
362
- return
358
+
363
359
  page_dict = {"pageName": page, "visualName": visual}
364
360
  request_body["powerBIReportConfiguration"]["pages"].append(page_dict)
365
361
  a += 1
@@ -393,7 +389,7 @@ def export_report(
393
389
  )
394
390
  response_body = json.loads(response.content)
395
391
  if response_body["status"] == "Failed":
396
- print(
392
+ raise ValueError(
397
393
  f"{icons.red_dot} The export for the '{report}' report within the '{workspace}' workspace in the '{export_format}' format has failed."
398
394
  )
399
395
  else:
@@ -447,10 +443,9 @@ def clone_report(
447
443
  dfI_filt = dfI[(dfI["Display Name"] == report)]
448
444
 
449
445
  if len(dfI_filt) == 0:
450
- print(
446
+ raise ValueError(
451
447
  f"{icons.red_dot} The '{report}' report does not exist within the '{workspace}' workspace."
452
448
  )
453
- return
454
449
 
455
450
  reportId = resolve_report_id(report, workspace)
456
451
 
@@ -462,8 +457,10 @@ def clone_report(
462
457
  dfW_filt = dfW[dfW["Name"] == target_workspace]
463
458
 
464
459
  if len(dfW_filt) == 0:
465
- print(f"{icons.red_dot} The '{workspace}' is not a valid workspace.")
466
- return
460
+ raise ValueError(
461
+ f"{icons.red_dot} The '{workspace}' is not a valid workspace."
462
+ )
463
+
467
464
  target_workspace_id = dfW_filt["Id"].iloc[0]
468
465
 
469
466
  if target_dataset is None:
@@ -478,10 +475,10 @@ def clone_report(
478
475
  dfD_filt = dfD[dfD["Dataset Name"] == target_dataset]
479
476
 
480
477
  if len(dfD_filt) == 0:
481
- print(
478
+ raise ValueError(
482
479
  f"{icons.red_dot} The '{target_dataset}' target dataset does not exist in the '{target_workspace}' workspace."
483
480
  )
484
- return
481
+
485
482
  target_dataset_id = dfD_filt["Dataset Id"].iloc[0]
486
483
 
487
484
  client = fabric.PowerBIRestClient()
@@ -503,14 +500,12 @@ def clone_report(
503
500
  f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}/Clone", json=request_body
504
501
  )
505
502
 
506
- if response.status_code == 200:
507
- print(
508
- f"{icons.green_dot} The '{report}' report has been successfully cloned as the '{cloned_report}' report within the '{target_workspace}' workspace using the '{target_dataset}' semantic model."
509
- )
510
- else:
511
- print(
512
- f"{icons.red_dot} POST request failed with status code: {response.status_code}"
513
- )
503
+ if response.status_code != 200:
504
+ raise FabricHTTPException(response)
505
+ print(
506
+ f"{icons.green_dot} The '{report}' report has been successfully cloned as the '{cloned_report}' report within the"
507
+ f" '{target_workspace}' workspace using the '{target_dataset}' semantic model."
508
+ )
514
509
 
515
510
 
516
511
  def launch_report(report: str, workspace: Optional[str] = None):
@@ -573,21 +568,21 @@ def list_report_pages(report: str, workspace: Optional[str] = None):
573
568
  reportJson = get_report_json(report=report, workspace=workspace)
574
569
 
575
570
  for section in reportJson["sections"]:
576
- pageID = section["name"]
577
- pageName = section["displayName"]
571
+ pageID = section.get("name")
572
+ pageName = section.get("displayName")
578
573
  # pageFilters = section['filters']
579
- pageWidth = section["width"]
580
- pageHeight = section["height"]
574
+ pageWidth = section.get("width")
575
+ pageHeight = section.get("height")
581
576
  visualCount = len(section["visualContainers"])
582
577
  pageHidden = False
583
- pageConfig = section["config"]
578
+ pageConfig = section.get("config")
584
579
  pageConfigJson = json.loads(pageConfig)
585
580
 
586
581
  try:
587
582
  pageH = pageConfigJson["visibility"]
588
583
  if pageH == 1:
589
584
  pageHidden = True
590
- except:
585
+ except Exception:
591
586
  pass
592
587
 
593
588
  new_data = {
@@ -648,7 +643,7 @@ def list_report_visuals(report: str, workspace: Optional[str] = None):
648
643
  "properties"
649
644
  ]["text"]["expr"]["Literal"]["Value"]
650
645
  title = title[1:-1]
651
- except:
646
+ except Exception:
652
647
  title = ""
653
648
 
654
649
  new_data = {
@@ -716,7 +711,7 @@ def list_report_bookmarks(report: str, workspace: Optional[str] = None):
716
711
  ][vc]["singleVisual"]["display"]["mode"]
717
712
  if hidden == "hidden":
718
713
  vHidden = True
719
- except:
714
+ except Exception:
720
715
  pass
721
716
 
722
717
  new_data = {
@@ -744,7 +739,7 @@ def list_report_bookmarks(report: str, workspace: Optional[str] = None):
744
739
 
745
740
  return df
746
741
 
747
- except:
742
+ except Exception:
748
743
  print(
749
744
  f"The '{report}' report within the '{workspace}' workspace has no bookmarks."
750
745
  )
@@ -1,13 +1,14 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  from sempy_labs._helper_functions import resolve_dataset_id, resolve_report_id
4
- from typing import Optional
3
+ from typing import Optional, List
5
4
  from sempy._utils._log import log
6
5
  import sempy_labs._icons as icons
6
+ from sempy.fabric.exceptions import FabricHTTPException
7
+
7
8
 
8
9
  @log
9
10
  def report_rebind(
10
- report: str,
11
+ report: str | List[str],
11
12
  dataset: str,
12
13
  report_workspace: Optional[str] = None,
13
14
  dataset_workspace: Optional[str] = None,
@@ -17,8 +18,8 @@ def report_rebind(
17
18
 
18
19
  Parameters
19
20
  ----------
20
- report : str
21
- Name of the Power BI report.
21
+ report : str | List[str]
22
+ Name(s) of the Power BI report(s).
22
23
  dataset : str
23
24
  Name of the semantic model.
24
25
  report_workspace : str, default=None
@@ -45,24 +46,25 @@ def report_rebind(
45
46
 
46
47
  client = fabric.PowerBIRestClient()
47
48
 
48
- reportId = resolve_report_id(report=report, workspace=report_workspace)
49
- datasetId = resolve_dataset_id(dataset=dataset, workspace=dataset_workspace)
49
+ if isinstance(report, str):
50
+ report = [report]
50
51
 
51
- # Prepare API
52
- request_body = {"datasetId": datasetId}
52
+ for rpt in report:
53
+ reportId = resolve_report_id(report=rpt, workspace=report_workspace)
54
+ datasetId = resolve_dataset_id(dataset=dataset, workspace=dataset_workspace)
53
55
 
54
- response = client.post(
55
- f"/v1.0/myorg/groups/{report_workspace_id}/reports/{reportId}/Rebind",
56
- json=request_body,
57
- )
56
+ # Prepare API
57
+ request_body = {"datasetId": datasetId}
58
58
 
59
- if response.status_code == 200:
60
- print(
61
- f"{icons.green_dot} The '{report}' report has been successfully rebinded to the '{dataset}' semantic model."
59
+ response = client.post(
60
+ f"/v1.0/myorg/groups/{report_workspace_id}/reports/{reportId}/Rebind",
61
+ json=request_body,
62
62
  )
63
- else:
63
+
64
+ if response.status_code != 200:
65
+ raise FabricHTTPException(response)
64
66
  print(
65
- f"{icons.red_dot} The '{report}' report within the '{report_workspace}' workspace failed to rebind to the '{dataset}' semantic model within the '{dataset_workspace}' workspace."
67
+ f"{icons.green_dot} The '{rpt}' report has been successfully rebinded to the '{dataset}' semantic model."
66
68
  )
67
69
 
68
70
 
@@ -72,7 +74,7 @@ def report_rebind_all(
72
74
  new_dataset: str,
73
75
  dataset_workspace: Optional[str] = None,
74
76
  new_dataset_workpace: Optional[str] = None,
75
- report_workspace: Optional[str] = None,
77
+ report_workspace: Optional[str | List[str]] = None,
76
78
  ):
77
79
  """
78
80
  Rebinds all reports in a workspace which are bound to a specific semantic model to a new semantic model.
@@ -93,8 +95,8 @@ def report_rebind_all(
93
95
  The name of the Fabric workspace in which the new semantic model resides.
94
96
  Defaults to None which resolves to the workspace of the attached lakehouse
95
97
  or if no lakehouse attached, resolves to the workspace of the notebook.
96
- report_workspace : str, default=None
97
- The name of the Fabric workspace in which the report resides.
98
+ report_workspace : str | List[str], default=None
99
+ The name(s) of the Fabric workspace(s) in which the report(s) reside(s).
98
100
  Defaults to None which resolves to the workspace of the attached lakehouse
99
101
  or if no lakehouse attached, resolves to the workspace of the notebook.
100
102
 
@@ -103,11 +105,7 @@ def report_rebind_all(
103
105
 
104
106
  """
105
107
 
106
- if dataset_workspace is None:
107
- dataset_workspace_id = fabric.get_workspace_id()
108
- dataset_workspace = fabric.resolve_workspace_name(dataset_workspace_id)
109
- else:
110
- dataset_workspace_id = fabric.resolve_workspace_id(dataset_workspace)
108
+ dataset_workspace = fabric.resolve_workspace_name()
111
109
 
112
110
  if new_dataset_workpace is None:
113
111
  new_dataset_workpace = dataset_workspace
@@ -115,16 +113,19 @@ def report_rebind_all(
115
113
  if report_workspace is None:
116
114
  report_workspace = dataset_workspace
117
115
 
118
- datasetId = resolve_dataset_id(dataset, dataset_workspace)
116
+ if isinstance(report_workspace, str):
117
+ report_workspace = [report_workspace]
119
118
 
120
- dfRep = fabric.list_reports(workspace=report_workspace)
121
- dfRep_filt = dfRep[dfRep["Dataset Id"] == datasetId]
119
+ datasetId = resolve_dataset_id(dataset, dataset_workspace)
122
120
 
123
- for i, r in dfRep_filt.iterrows():
124
- rptName = r["Name"]
125
- report_rebind(
126
- report=rptName,
127
- dataset=new_dataset,
128
- report_workspace=report_workspace,
129
- dataset_workspace=new_dataset_workpace,
130
- )
121
+ for rw in report_workspace:
122
+ dfRep = fabric.list_reports(workspace=rw)
123
+ dfRep_filt = dfRep[dfRep["Dataset Id"] == datasetId]
124
+ for i, r in dfRep_filt.iterrows():
125
+ rptName = r["Name"]
126
+ report_rebind(
127
+ report=rptName,
128
+ dataset=new_dataset,
129
+ report_workspace=rw,
130
+ dataset_workspace=new_dataset_workpace,
131
+ )
@@ -1,6 +1,3 @@
1
1
  from sempy_labs.tom._model import TOMWrapper, connect_semantic_model
2
2
 
3
- __all__ = [
4
- "TOMWrapper",
5
- "connect_semantic_model"
6
- ]
3
+ __all__ = ["TOMWrapper", "connect_semantic_model"]