semantic-link-labs 0.8.10__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (81) hide show
  1. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/METADATA +6 -5
  2. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/RECORD +81 -80
  3. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +34 -3
  5. sempy_labs/_authentication.py +80 -4
  6. sempy_labs/_capacities.py +770 -200
  7. sempy_labs/_capacity_migration.py +7 -37
  8. sempy_labs/_clear_cache.py +37 -35
  9. sempy_labs/_connections.py +13 -13
  10. sempy_labs/_data_pipelines.py +20 -20
  11. sempy_labs/_dataflows.py +27 -28
  12. sempy_labs/_dax.py +41 -47
  13. sempy_labs/_deployment_pipelines.py +1 -1
  14. sempy_labs/_environments.py +26 -23
  15. sempy_labs/_eventhouses.py +16 -15
  16. sempy_labs/_eventstreams.py +16 -15
  17. sempy_labs/_external_data_shares.py +18 -20
  18. sempy_labs/_gateways.py +16 -14
  19. sempy_labs/_generate_semantic_model.py +107 -62
  20. sempy_labs/_git.py +105 -43
  21. sempy_labs/_helper_functions.py +251 -194
  22. sempy_labs/_job_scheduler.py +227 -0
  23. sempy_labs/_kql_databases.py +16 -15
  24. sempy_labs/_kql_querysets.py +16 -15
  25. sempy_labs/_list_functions.py +150 -126
  26. sempy_labs/_managed_private_endpoints.py +19 -17
  27. sempy_labs/_mirrored_databases.py +51 -48
  28. sempy_labs/_mirrored_warehouses.py +5 -4
  29. sempy_labs/_ml_experiments.py +16 -15
  30. sempy_labs/_ml_models.py +15 -14
  31. sempy_labs/_model_bpa.py +210 -207
  32. sempy_labs/_model_bpa_bulk.py +2 -2
  33. sempy_labs/_model_bpa_rules.py +3 -3
  34. sempy_labs/_model_dependencies.py +55 -29
  35. sempy_labs/_notebooks.py +29 -25
  36. sempy_labs/_one_lake_integration.py +23 -26
  37. sempy_labs/_query_scale_out.py +75 -64
  38. sempy_labs/_refresh_semantic_model.py +25 -26
  39. sempy_labs/_spark.py +33 -32
  40. sempy_labs/_sql.py +19 -12
  41. sempy_labs/_translations.py +10 -7
  42. sempy_labs/_vertipaq.py +38 -33
  43. sempy_labs/_warehouses.py +26 -25
  44. sempy_labs/_workspace_identity.py +11 -10
  45. sempy_labs/_workspaces.py +40 -33
  46. sempy_labs/admin/_basic_functions.py +166 -115
  47. sempy_labs/admin/_domains.py +7 -2
  48. sempy_labs/admin/_external_data_share.py +3 -3
  49. sempy_labs/admin/_git.py +4 -1
  50. sempy_labs/admin/_items.py +11 -6
  51. sempy_labs/admin/_scanner.py +10 -5
  52. sempy_labs/directlake/_directlake_schema_compare.py +25 -16
  53. sempy_labs/directlake/_directlake_schema_sync.py +24 -12
  54. sempy_labs/directlake/_dl_helper.py +74 -55
  55. sempy_labs/directlake/_generate_shared_expression.py +10 -9
  56. sempy_labs/directlake/_get_directlake_lakehouse.py +32 -36
  57. sempy_labs/directlake/_get_shared_expression.py +4 -3
  58. sempy_labs/directlake/_guardrails.py +12 -6
  59. sempy_labs/directlake/_list_directlake_model_calc_tables.py +15 -9
  60. sempy_labs/directlake/_show_unsupported_directlake_objects.py +16 -10
  61. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +35 -31
  62. sempy_labs/directlake/_update_directlake_partition_entity.py +39 -31
  63. sempy_labs/directlake/_warm_cache.py +87 -65
  64. sempy_labs/lakehouse/_get_lakehouse_columns.py +23 -26
  65. sempy_labs/lakehouse/_get_lakehouse_tables.py +27 -38
  66. sempy_labs/lakehouse/_lakehouse.py +7 -20
  67. sempy_labs/lakehouse/_shortcuts.py +42 -23
  68. sempy_labs/migration/_create_pqt_file.py +16 -11
  69. sempy_labs/migration/_refresh_calc_tables.py +16 -10
  70. sempy_labs/report/_download_report.py +9 -8
  71. sempy_labs/report/_generate_report.py +85 -44
  72. sempy_labs/report/_paginated.py +9 -9
  73. sempy_labs/report/_report_bpa.py +15 -11
  74. sempy_labs/report/_report_functions.py +80 -91
  75. sempy_labs/report/_report_helper.py +8 -4
  76. sempy_labs/report/_report_list_functions.py +24 -13
  77. sempy_labs/report/_report_rebind.py +17 -16
  78. sempy_labs/report/_reportwrapper.py +41 -33
  79. sempy_labs/tom/_model.py +139 -21
  80. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/LICENSE +0 -0
  81. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/top_level.txt +0 -0
@@ -3,13 +3,16 @@ from typing import Optional
3
3
  import pandas as pd
4
4
  from sempy_labs._helper_functions import (
5
5
  format_dax_object_name,
6
+ resolve_workspace_name_and_id,
7
+ resolve_dataset_name_and_id,
6
8
  )
7
9
  from sempy_labs.report._reportwrapper import ReportWrapper
8
10
  from sempy_labs._list_functions import list_reports_using_semantic_model
11
+ from uuid import UUID
9
12
 
10
13
 
11
14
  def list_unused_objects_in_reports(
12
- dataset: str, workspace: Optional[str] = None
15
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
13
16
  ) -> pd.DataFrame:
14
17
  """
15
18
  Shows a list of all columns in the semantic model which are not used in any related Power BI reports (including dependencies).
@@ -17,10 +20,10 @@ def list_unused_objects_in_reports(
17
20
 
18
21
  Parameters
19
22
  ----------
20
- dataset : str
21
- Name of the semantic model.
22
- workspace : str, default=None
23
- The Fabric workspace name.
23
+ dataset : str | uuid.UUID
24
+ Name or ID of the semantic model.
25
+ workspace : str | uuid.UUID, default=None
26
+ The Fabric workspace name or ID.
24
27
  Defaults to None which resolves to the workspace of the attached lakehouse
25
28
  or if no lakehouse attached, resolves to the workspace of the notebook.
26
29
 
@@ -32,7 +35,12 @@ def list_unused_objects_in_reports(
32
35
 
33
36
  # TODO: what about relationships/RLS?
34
37
 
35
- dfR = _list_all_report_semantic_model_objects(dataset=dataset, workspace=workspace)
38
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
39
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
40
+
41
+ dfR = _list_all_report_semantic_model_objects(
42
+ dataset=dataset_id, workspace=workspace_id
43
+ )
36
44
  dfR_filt = (
37
45
  dfR[dfR["Object Type"] == "Column"][["Table Name", "Object Name"]]
38
46
  .drop_duplicates()
@@ -42,7 +50,7 @@ def list_unused_objects_in_reports(
42
50
  dfR_filt["Table Name"], dfR_filt["Object Name"]
43
51
  )
44
52
 
45
- dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
53
+ dfC = fabric.list_columns(dataset=dataset_id, workspace=workspace_id)
46
54
  dfC["Column Object"] = format_dax_object_name(dfC["Table Name"], dfC["Column Name"])
47
55
 
48
56
  df = dfC[~(dfC["Column Object"].isin(dfR_filt["Column Object"].values))]
@@ -52,7 +60,7 @@ def list_unused_objects_in_reports(
52
60
 
53
61
 
54
62
  def _list_all_report_semantic_model_objects(
55
- dataset: str, workspace: Optional[str] = None
63
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
56
64
  ) -> pd.DataFrame:
57
65
  """
58
66
  Shows a unique list of all semantic model objects (columns, measures, hierarchies) which are used in all reports which leverage the semantic model.
@@ -60,10 +68,10 @@ def _list_all_report_semantic_model_objects(
60
68
 
61
69
  Parameters
62
70
  ----------
63
- dataset : str
64
- Name of the semantic model.
65
- workspace : str, default=None
66
- The Fabric workspace name.
71
+ dataset : str | uuid.UUID
72
+ Name or ID of the semantic model.
73
+ workspace : str | uuid.UUID, default=None
74
+ The Fabric workspace name or ID.
67
75
  Defaults to None which resolves to the workspace of the attached lakehouse
68
76
  or if no lakehouse attached, resolves to the workspace of the notebook.
69
77
 
@@ -73,7 +81,10 @@ def _list_all_report_semantic_model_objects(
73
81
  A pandas dataframe.
74
82
  """
75
83
 
76
- dfR = list_reports_using_semantic_model(dataset=dataset, workspace=workspace)
84
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
85
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
86
+
87
+ dfR = list_reports_using_semantic_model(dataset=dataset_id, workspace=workspace_id)
77
88
  dfs = []
78
89
 
79
90
  for _, r in dfR.iterrows():
@@ -1,20 +1,22 @@
1
1
  import sempy.fabric as fabric
2
2
  from sempy_labs._helper_functions import (
3
3
  resolve_dataset_id,
4
+ resolve_workspace_name_and_id,
4
5
  resolve_report_id,
5
6
  )
6
7
  from typing import Optional, List
7
8
  from sempy._utils._log import log
8
9
  import sempy_labs._icons as icons
9
10
  from sempy.fabric.exceptions import FabricHTTPException
11
+ from uuid import UUID
10
12
 
11
13
 
12
14
  @log
13
15
  def report_rebind(
14
16
  report: str | List[str],
15
17
  dataset: str,
16
- report_workspace: Optional[str] = None,
17
- dataset_workspace: Optional[str] = None,
18
+ report_workspace: Optional[str | UUID] = None,
19
+ dataset_workspace: Optional[str | UUID] = None,
18
20
  ):
19
21
  """
20
22
  Rebinds a report to a semantic model.
@@ -27,23 +29,22 @@ def report_rebind(
27
29
  Name(s) of the Power BI report(s).
28
30
  dataset : str
29
31
  Name of the semantic model.
30
- report_workspace : str, default=None
31
- The name of the Fabric workspace in which the report resides.
32
+ report_workspace : str | uuid.UUID, default=None
33
+ The name or ID of the Fabric workspace in which the report resides.
32
34
  Defaults to None which resolves to the workspace of the attached lakehouse
33
35
  or if no lakehouse attached, resolves to the workspace of the notebook.
34
- dataset_workspace : str, default=None
35
- The name of the Fabric workspace in which the semantic model resides.
36
+ dataset_workspace : str | uuid.UUID, default=None
37
+ The name or ID of the Fabric workspace in which the semantic model resides.
36
38
  Defaults to None which resolves to the workspace of the attached lakehouse
37
39
  or if no lakehouse attached, resolves to the workspace of the notebook.
38
40
  """
39
41
 
40
- if report_workspace is None:
41
- report_workspace_id = fabric.get_workspace_id()
42
- report_workspace = fabric.resolve_workspace_name(report_workspace_id)
43
- else:
44
- report_workspace_id = fabric.resolve_workspace_id(report_workspace)
42
+ (report_workspace_name, report_workspace_id) = resolve_workspace_name_and_id(
43
+ report_workspace
44
+ )
45
+
45
46
  if dataset_workspace is None:
46
- dataset_workspace = report_workspace
47
+ dataset_workspace = report_workspace_name
47
48
 
48
49
  client = fabric.PowerBIRestClient()
49
50
 
@@ -51,14 +52,14 @@ def report_rebind(
51
52
  report = [report]
52
53
 
53
54
  for rpt in report:
54
- reportId = resolve_report_id(report=rpt, workspace=report_workspace)
55
- datasetId = resolve_dataset_id(dataset=dataset, workspace=dataset_workspace)
55
+ report_id = resolve_report_id(report=rpt, workspace=report_workspace_id)
56
+ dataset_id = resolve_dataset_id(dataset=dataset, workspace=dataset_workspace)
56
57
 
57
58
  # Prepare API
58
- request_body = {"datasetId": datasetId}
59
+ request_body = {"datasetId": dataset_id}
59
60
 
60
61
  response = client.post(
61
- f"/v1.0/myorg/groups/{report_workspace_id}/reports/{reportId}/Rebind",
62
+ f"/v1.0/myorg/groups/{report_workspace_id}/reports/{report_id}/Rebind",
62
63
  json=request_body,
63
64
  )
64
65
 
@@ -8,6 +8,7 @@ from sempy_labs._helper_functions import (
8
8
  _add_part,
9
9
  lro,
10
10
  _decode_b64,
11
+ resolve_workspace_name_and_id,
11
12
  )
12
13
  from typing import Optional, List
13
14
  import pandas as pd
@@ -32,8 +33,8 @@ class ReportWrapper:
32
33
  ----------
33
34
  report : str
34
35
  The name of the report.
35
- workspace : str
36
- The name of the workspace in which the report resides.
36
+ workspace : str | uuid.UUID
37
+ The name or ID of the workspace in which the report resides.
37
38
  Defaults to None which resolves to the workspace of the attached lakehouse
38
39
  or if no lakehouse attached, resolves to the workspace of the notebook.
39
40
 
@@ -50,7 +51,7 @@ class ReportWrapper:
50
51
  def __init__(
51
52
  self,
52
53
  report: str,
53
- workspace: Optional[str] = None,
54
+ workspace: Optional[str | UUID] = None,
54
55
  ):
55
56
  """
56
57
  Connects to a Power BI report and retrieves its definition.
@@ -61,8 +62,8 @@ class ReportWrapper:
61
62
  ----------
62
63
  report : str
63
64
  The name of the report.
64
- workspace : str
65
- The name of the workspace in which the report resides.
65
+ workspace : str | UUID
66
+ The name or ID of the workspace in which the report resides.
66
67
  Defaults to None which resolves to the workspace of the attached lakehouse
67
68
  or if no lakehouse attached, resolves to the workspace of the notebook.
68
69
 
@@ -77,11 +78,12 @@ class ReportWrapper:
77
78
  warnings.simplefilter(action="ignore", category=FutureWarning)
78
79
 
79
80
  self._report = report
80
- self._workspace = workspace
81
- self._workspace_id = fabric.resolve_workspace_id(workspace)
82
- self._report_id = resolve_report_id(report, workspace)
81
+ (self._workspace_name, self._workspace_id) = resolve_workspace_name_and_id(
82
+ workspace
83
+ )
84
+ self._report_id = resolve_report_id(report, self._workspace_id)
83
85
  self.rdef = get_report_definition(
84
- report=self._report, workspace=self._workspace
86
+ report=self._report, workspace=self._workspace_id
85
87
  )
86
88
 
87
89
  if len(self.rdef[self.rdef["path"] == "definition/report.json"]) == 0:
@@ -95,12 +97,14 @@ class ReportWrapper:
95
97
 
96
98
  from sempy_labs.tom import connect_semantic_model
97
99
 
98
- dataset_id, dataset_name, dataset_workspace_id, dataset_workspace = (
99
- resolve_dataset_from_report(report=self._report, workspace=self._workspace)
100
+ dataset_id, dataset_name, dataset_workspace_id, dataset_workspace_name = (
101
+ resolve_dataset_from_report(
102
+ report=self._report, workspace=self._workspace_id
103
+ )
100
104
  )
101
105
 
102
106
  with connect_semantic_model(
103
- dataset=dataset_name, readonly=True, workspace=dataset_workspace
107
+ dataset=dataset_id, readonly=True, workspace=dataset_workspace_id
104
108
  ) as tom:
105
109
  for index, row in dataframe.iterrows():
106
110
  obj_type = row["Object Type"]
@@ -218,7 +222,7 @@ class ReportWrapper:
218
222
  theme_collection = rptJson.get("themeCollection", {})
219
223
  if theme_type not in theme_collection:
220
224
  raise ValueError(
221
- f"{icons.red_dot} The {self._report} report within the '{self._workspace} workspace has no custom theme."
225
+ f"{icons.red_dot} The {self._report} report within the '{self._workspace_name} workspace has no custom theme."
222
226
  )
223
227
  ct = theme_collection.get(theme_type)
224
228
  theme_name = ct["name"]
@@ -413,7 +417,7 @@ class ReportWrapper:
413
417
  )
414
418
 
415
419
  df["Page URL"] = df["Page Name"].apply(
416
- lambda page_name: f"{helper.get_web_url(report=self._report, workspace=self._workspace)}/{page_name}"
420
+ lambda page_name: f"{helper.get_web_url(report=self._report, workspace=self._workspace_id)}/{page_name}"
417
421
  )
418
422
 
419
423
  bool_cols = ["Hidden", "Locked", "Used"]
@@ -691,7 +695,7 @@ class ReportWrapper:
691
695
  df[bool_cols] = df[bool_cols].astype(bool)
692
696
 
693
697
  df["Page URL"] = df["Page Name"].apply(
694
- lambda page_name: f"{helper.get_web_url(report=self._report, workspace=self._workspace)}/{page_name}"
698
+ lambda page_name: f"{helper.get_web_url(report=self._report, workspace=self._workspace_id)}/{page_name}"
695
699
  )
696
700
 
697
701
  return df
@@ -1170,9 +1174,9 @@ class ReportWrapper:
1170
1174
  )
1171
1175
 
1172
1176
  if extended:
1173
- dataset_id, dataset_name, dataset_workspace_id, dataset_workspace = (
1177
+ dataset_id, dataset_name, dataset_workspace_id, dataset_workspace_name = (
1174
1178
  resolve_dataset_from_report(
1175
- report=self._report, workspace=self._workspace
1179
+ report=self._report, workspace=self._workspace_id
1176
1180
  )
1177
1181
  )
1178
1182
 
@@ -1195,7 +1199,7 @@ class ReportWrapper:
1195
1199
  return object_validators.get(row["Object Type"], lambda: False)()
1196
1200
 
1197
1201
  with connect_semantic_model(
1198
- dataset=dataset_name, readonly=True, workspace=dataset_workspace
1202
+ dataset=dataset_id, readonly=True, workspace=dataset_workspace_id
1199
1203
  ) as tom:
1200
1204
  df["Valid Semantic Model Object"] = df.apply(
1201
1205
  lambda row: check_validity(tom, row), axis=1
@@ -1214,11 +1218,13 @@ class ReportWrapper:
1214
1218
  .drop_duplicates()
1215
1219
  .reset_index(drop=True)
1216
1220
  )
1217
- dataset_id, dataset_name, dataset_workspace_id, dataset_workspace = (
1218
- resolve_dataset_from_report(report=self._report, workspace=self._workspace)
1221
+ dataset_id, dataset_name, dataset_workspace_id, dataset_workspace_name = (
1222
+ resolve_dataset_from_report(
1223
+ report=self._report, workspace=self._workspace_id
1224
+ )
1219
1225
  )
1220
1226
  dep = get_measure_dependencies(
1221
- dataset=dataset_name, workspace=dataset_workspace
1227
+ dataset=dataset_id, workspace=dataset_workspace_id
1222
1228
  )
1223
1229
  rpt_measures = df[df["Object Type"] == "Measure"]["Object Name"].values
1224
1230
  new_rows = dep[dep["Object Name"].isin(rpt_measures)][
@@ -1232,7 +1238,7 @@ class ReportWrapper:
1232
1238
  )
1233
1239
 
1234
1240
  result_df["Dataset Name"] = dataset_name
1235
- result_df["Dataset Workspace Name"] = dataset_workspace
1241
+ result_df["Dataset Workspace Name"] = dataset_workspace_name
1236
1242
  colName = "Dataset Name"
1237
1243
  result_df.insert(0, colName, result_df.pop(colName))
1238
1244
  colName = "Dataset Workspace Name"
@@ -1539,7 +1545,7 @@ class ReportWrapper:
1539
1545
 
1540
1546
  self.update_report(request_body=request_body)
1541
1547
  print(
1542
- f"{icons.green_dot} The '{theme_name}' theme has been set as the theme for the '{self._report}' report within the '{self._workspace}' workspace."
1548
+ f"{icons.green_dot} The '{theme_name}' theme has been set as the theme for the '{self._report}' report within the '{self._workspace_name}' workspace."
1543
1549
  )
1544
1550
 
1545
1551
  def set_active_page(self, page_name: str):
@@ -1567,7 +1573,7 @@ class ReportWrapper:
1567
1573
  self._update_single_file(file_name=pages_file, new_payload=file_payload)
1568
1574
 
1569
1575
  print(
1570
- f"{icons.green_dot} The '{page_display_name}' page has been set as the active page in the '{self._report}' report within the '{self._workspace}' workspace."
1576
+ f"{icons.green_dot} The '{page_display_name}' page has been set as the active page in the '{self._report}' report within the '{self._workspace_name}' workspace."
1571
1577
  )
1572
1578
 
1573
1579
  def set_page_type(self, page_name: str, page_type: str):
@@ -1640,7 +1646,7 @@ class ReportWrapper:
1640
1646
  cv_remove_display.append(cv_display)
1641
1647
  if len(cv_remove) == 0:
1642
1648
  print(
1643
- f"{icons.green_dot} There are no unnecessary custom visuals in the '{self._report}' report within the '{self._workspace}' workspace."
1649
+ f"{icons.green_dot} There are no unnecessary custom visuals in the '{self._report}' report within the '{self._workspace_name}' workspace."
1644
1650
  )
1645
1651
  return
1646
1652
 
@@ -1662,7 +1668,7 @@ class ReportWrapper:
1662
1668
 
1663
1669
  self.update_report(request_body=request_body)
1664
1670
  print(
1665
- f"{icons.green_dot} The {cv_remove_display} custom visuals have been removed from the '{self._report}' report within the '{self._workspace}' workspace."
1671
+ f"{icons.green_dot} The {cv_remove_display} custom visuals have been removed from the '{self._report}' report within the '{self._workspace_name}' workspace."
1666
1672
  )
1667
1673
 
1668
1674
  def migrate_report_level_measures(self, measures: Optional[str | List[str]] = None):
@@ -1681,12 +1687,14 @@ class ReportWrapper:
1681
1687
  rlm = self.list_report_level_measures()
1682
1688
  if len(rlm) == 0:
1683
1689
  print(
1684
- f"{icons.green_dot} The '{self._report}' report within the '{self._workspace}' workspace has no report-level measures."
1690
+ f"{icons.green_dot} The '{self._report}' report within the '{self._workspace_name}' workspace has no report-level measures."
1685
1691
  )
1686
1692
  return
1687
1693
 
1688
- dataset_id, dataset_name, dataset_workspace_id, dataset_workspace = (
1689
- resolve_dataset_from_report(report=self._report, workspace=self._workspace)
1694
+ dataset_id, dataset_name, dataset_workspace_id, dataset_workspace_name = (
1695
+ resolve_dataset_from_report(
1696
+ report=self._report, workspace=self._workspace_id
1697
+ )
1690
1698
  )
1691
1699
 
1692
1700
  if isinstance(measures, str):
@@ -1703,7 +1711,7 @@ class ReportWrapper:
1703
1711
 
1704
1712
  mCount = 0
1705
1713
  with connect_semantic_model(
1706
- dataset=dataset_name, readonly=False, workspace=dataset_workspace
1714
+ dataset=dataset_id, readonly=False, workspace=dataset_workspace_id
1707
1715
  ) as tom:
1708
1716
  for _, r in rlm.iterrows():
1709
1717
  tableName = r["Table Name"]
@@ -1748,7 +1756,7 @@ class ReportWrapper:
1748
1756
 
1749
1757
  self.update_report(request_body=request_body)
1750
1758
  print(
1751
- f"{icons.green_dot} The report-level measures have been migrated to the '{dataset_name}' semantic model within the '{dataset_workspace}' workspace."
1759
+ f"{icons.green_dot} The report-level measures have been migrated to the '{dataset_name}' semantic model within the '{dataset_workspace_name}' workspace."
1752
1760
  )
1753
1761
 
1754
1762
  def set_page_visibility(self, page_name: str, hidden: bool):
@@ -1798,7 +1806,7 @@ class ReportWrapper:
1798
1806
 
1799
1807
  if len(dfP_filt) == 0:
1800
1808
  print(
1801
- f"{icons.green_dot} There are no Tooltip or Drillthrough pages in the '{self._report}' report within the '{self._workspace}' workspace."
1809
+ f"{icons.green_dot} There are no Tooltip or Drillthrough pages in the '{self._report}' report within the '{self._workspace_name}' workspace."
1802
1810
  )
1803
1811
  return
1804
1812
 
@@ -1837,7 +1845,7 @@ class ReportWrapper:
1837
1845
 
1838
1846
  self.update_report(request_body=request_body)
1839
1847
  print(
1840
- f"{icons.green_dot} Show items with data has been disabled for all visuals in the '{self._report}' report within the '{self._workspace}' workspace."
1848
+ f"{icons.green_dot} Show items with data has been disabled for all visuals in the '{self._report}' report within the '{self._workspace_name}' workspace."
1841
1849
  )
1842
1850
 
1843
1851
  # Set Annotations
sempy_labs/tom/_model.py CHANGED
@@ -20,6 +20,8 @@ import sempy_labs._icons as icons
20
20
  from sempy.fabric.exceptions import FabricHTTPException
21
21
  import ast
22
22
  from uuid import UUID
23
+ import sempy_labs._authentication as auth
24
+
23
25
 
24
26
  if TYPE_CHECKING:
25
27
  import Microsoft.AnalysisServices.Tabular
@@ -44,19 +46,91 @@ class TOMWrapper:
44
46
 
45
47
  def __init__(self, dataset, workspace, readonly):
46
48
 
47
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
48
- (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
49
- self._dataset_id = dataset_id
50
- self._dataset_name = dataset_name
51
- self._workspace_name = workspace_name
52
- self._workspace_id = workspace_id
49
+ self._is_azure_as = False
50
+ prefix = "asazure"
51
+ prefix_full = f"{prefix}://"
52
+ read_write = ":rw"
53
+ self._token_provider = auth.token_provider.get()
54
+
55
+ # Azure AS workspace logic
56
+ if workspace is not None and workspace.startswith(prefix_full):
57
+ # Set read or read/write accordingly
58
+ if readonly is False and not workspace.endswith(read_write):
59
+ workspace += read_write
60
+ elif readonly is True and workspace.endswith(read_write):
61
+ workspace = workspace[: -len(read_write)]
62
+ self._workspace_name = workspace
63
+ self._workspace_id = workspace
64
+ self._dataset_id = dataset
65
+ self._dataset_name = dataset
66
+ self._is_azure_as = True
67
+ if self._token_provider is None:
68
+ raise ValueError(
69
+ f"{icons.red_dot} A token provider must be provided when connecting to an Azure AS workspace."
70
+ )
71
+ else:
72
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
73
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(
74
+ dataset, workspace_id
75
+ )
76
+ self._dataset_id = dataset_id
77
+ self._dataset_name = dataset_name
78
+ self._workspace_name = workspace_name
79
+ self._workspace_id = workspace_id
53
80
  self._readonly = readonly
54
81
  self._tables_added = []
55
82
 
56
- self._tom_server = fabric.create_tom_server(
57
- readonly=readonly, workspace=workspace_id
58
- )
59
- self.model = self._tom_server.Databases[dataset_id].Model
83
+ # No token provider (standard authentication)
84
+ if self._token_provider is None:
85
+ self._tom_server = fabric.create_tom_server(
86
+ readonly=readonly, workspace=workspace_id
87
+ )
88
+ # Service Principal Authentication for Azure AS via token provider
89
+ elif self._is_azure_as:
90
+ import Microsoft.AnalysisServices.Tabular as TOM
91
+
92
+ # Extract region from the workspace
93
+ match = re.search(rf"{prefix_full}(.*?).{prefix}", self._workspace_name)
94
+ if match:
95
+ region = match.group(1)
96
+ if self._token_provider is None:
97
+ raise ValueError(
98
+ f"{icons.red_dot} A token provider must be provided when connecting to Azure Analysis Services."
99
+ )
100
+ token = self._token_provider(audience="asazure", region=region)
101
+ connection_str = f'Provider=MSOLAP;Data Source={self._workspace_name};Password="{token}";Persist Security Info=True;Impersonation Level=Impersonate'
102
+ self._tom_server = TOM.Server()
103
+ self._tom_server.Connect(connection_str)
104
+ # Service Principal Authentication for Power BI via token provider
105
+ else:
106
+ from sempy.fabric._client._utils import _build_adomd_connection_string
107
+ import Microsoft.AnalysisServices.Tabular as TOM
108
+ from Microsoft.AnalysisServices import AccessToken
109
+ from sempy.fabric._token_provider import (
110
+ create_on_access_token_expired_callback,
111
+ ConstantTokenProvider,
112
+ )
113
+ from System import Func
114
+
115
+ token = self._token_provider(audience="pbi")
116
+ self._tom_server = TOM.Server()
117
+ get_access_token = create_on_access_token_expired_callback(
118
+ ConstantTokenProvider(token)
119
+ )
120
+ self._tom_server.AccessToken = get_access_token(None)
121
+ self._tom_server.OnAccessTokenExpired = Func[AccessToken, AccessToken](
122
+ get_access_token
123
+ )
124
+ workspace_url = f"powerbi://api.powerbi.com/v1.0/myorg/{workspace}"
125
+ connection_str = _build_adomd_connection_string(
126
+ workspace_url, readonly=readonly
127
+ )
128
+ self._tom_server.Connect(connection_str)
129
+
130
+ if self._is_azure_as:
131
+ self.model = self._tom_server.Databases.GetByName(self._dataset_name).Model
132
+ else:
133
+ self.model = self._tom_server.Databases[dataset_id].Model
60
134
 
61
135
  self._table_map = {}
62
136
  self._column_map = {}
@@ -3287,6 +3361,10 @@ class TOMWrapper:
3287
3361
  """
3288
3362
  import Microsoft.AnalysisServices.Tabular as TOM
3289
3363
 
3364
+ dependencies = dependencies[
3365
+ dependencies["Object Name"] == dependencies["Parent Node"]
3366
+ ]
3367
+
3290
3368
  for obj in self.depends_on(object=object, dependencies=dependencies):
3291
3369
  if obj.ObjectType == TOM.ObjectType.Measure:
3292
3370
  if (f"{obj.Parent.Name}[{obj.Name}]" in object.Expression) or (
@@ -3313,12 +3391,16 @@ class TOMWrapper:
3313
3391
  """
3314
3392
  import Microsoft.AnalysisServices.Tabular as TOM
3315
3393
 
3394
+ dependencies = dependencies[
3395
+ dependencies["Object Name"] == dependencies["Parent Node"]
3396
+ ]
3397
+
3316
3398
  def create_pattern(tableList, b):
3317
3399
  patterns = [
3318
- r"(?<!" + re.escape(table) + r"\[)(?<!" + re.escape(table) + r"'\[)"
3400
+ r"(?<!" + re.escape(table) + r")(?<!'" + re.escape(table) + r"')"
3319
3401
  for table in tableList
3320
3402
  ]
3321
- combined_pattern = "".join(patterns) + re.escape(b)
3403
+ combined_pattern = "".join(patterns) + re.escape(f"[{b}]")
3322
3404
  return combined_pattern
3323
3405
 
3324
3406
  for obj in self.depends_on(object=object, dependencies=dependencies):
@@ -4396,8 +4478,6 @@ class TOMWrapper:
4396
4478
  if isinstance(measure_name, str):
4397
4479
  measure_name = [measure_name]
4398
4480
 
4399
- client = fabric.FabricRestClient()
4400
-
4401
4481
  if len(measure_name) > max_batch_size:
4402
4482
  measure_lists = [
4403
4483
  measure_name[i : i + max_batch_size]
@@ -4438,6 +4518,7 @@ class TOMWrapper:
4438
4518
  "modelItems"
4439
4519
  ].append(new_item)
4440
4520
 
4521
+ client = fabric.FabricRestClient()
4441
4522
  response = client.post("/explore/v202304/nl2nl/completions", json=payload)
4442
4523
  if response.status_code != 200:
4443
4524
  raise FabricHTTPException(response)
@@ -4529,7 +4610,7 @@ class TOMWrapper:
4529
4610
 
4530
4611
  def add_role_member(self, role_name: str, member: str | List[str]):
4531
4612
  """
4532
- Adds a external model role member (AzureAD) to a role.
4613
+ Adds an external model role member (AzureAD) to a role.
4533
4614
 
4534
4615
  Parameters
4535
4616
  ----------
@@ -4561,6 +4642,35 @@ class TOMWrapper:
4561
4642
  f"{icons.yellow_dot} '{m}' is already a member in the '{role_name}' role."
4562
4643
  )
4563
4644
 
4645
+ def remove_role_member(self, role_name: str, member: str | List[str]):
4646
+ """
4647
+ Removes an external model role member (AzureAD) from a role.
4648
+
4649
+ Parameters
4650
+ ----------
4651
+ role_name : str
4652
+ The role name.
4653
+ member : str | List[str]
4654
+ The email address(es) of the member(s) to remove.
4655
+ """
4656
+
4657
+ if isinstance(member, str):
4658
+ member = [member]
4659
+
4660
+ role = self.model.Roles[role_name]
4661
+ current_members = {m.MemberName: m.Name for m in role.Members}
4662
+ for m in member:
4663
+ name = current_members.get(m)
4664
+ if name is not None:
4665
+ role.Members.Remove(role.Members[name])
4666
+ print(
4667
+ f"{icons.green_dot} The '{m}' member has been removed from the '{role_name}' role."
4668
+ )
4669
+ else:
4670
+ print(
4671
+ f"{icons.yellow_dot} '{m}' is not a member of the '{role_name}' role."
4672
+ )
4673
+
4564
4674
  def close(self):
4565
4675
 
4566
4676
  if not self._readonly and self.model is not None:
@@ -4630,22 +4740,26 @@ class TOMWrapper:
4630
4740
  @log
4631
4741
  @contextmanager
4632
4742
  def connect_semantic_model(
4633
- dataset: str | UUID, readonly: bool = True, workspace: Optional[str] = None
4743
+ dataset: str | UUID,
4744
+ readonly: bool = True,
4745
+ workspace: Optional[str | UUID] = None,
4634
4746
  ) -> Iterator[TOMWrapper]:
4635
4747
  """
4636
4748
  Connects to the Tabular Object Model (TOM) within a semantic model.
4637
4749
 
4750
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
4751
+
4638
4752
  Parameters
4639
4753
  ----------
4640
- dataset : str | UUID
4754
+ dataset : str | uuid.UUID
4641
4755
  Name or ID of the semantic model.
4642
4756
  readonly: bool, default=True
4643
4757
  Whether the connection is read-only or read/write. Setting this to False enables read/write which saves the changes made back to the server.
4644
- workspace : str, default=None
4645
- The Fabric workspace name.
4758
+ workspace : str | uuid.UUID, default=None
4759
+ The Fabric workspace name or ID. Also supports Azure Analysis Services (Service Principal Authentication required).
4760
+ If connecting to Azure Analysis Services, enter the workspace parameter in the following format: 'asazure://<region>.asazure.windows.net/<server_name>'.
4646
4761
  Defaults to None which resolves to the workspace of the attached lakehouse
4647
4762
  or if no lakehouse attached, resolves to the workspace of the notebook.
4648
-
4649
4763
  Returns
4650
4764
  -------
4651
4765
  typing.Iterator[TOMWrapper]
@@ -4655,7 +4769,11 @@ def connect_semantic_model(
4655
4769
  # initialize .NET to make sure System and Microsoft.AnalysisServices.Tabular is defined
4656
4770
  sempy.fabric._client._utils._init_analysis_services()
4657
4771
 
4658
- tw = TOMWrapper(dataset=dataset, workspace=workspace, readonly=readonly)
4772
+ tw = TOMWrapper(
4773
+ dataset=dataset,
4774
+ workspace=workspace,
4775
+ readonly=readonly,
4776
+ )
4659
4777
  try:
4660
4778
  yield tw
4661
4779
  finally: