semantic-link-labs 0.8.9__py3-none-any.whl → 0.8.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (76) hide show
  1. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/METADATA +5 -2
  2. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/RECORD +76 -75
  3. sempy_labs/__init__.py +14 -2
  4. sempy_labs/_authentication.py +31 -2
  5. sempy_labs/_clear_cache.py +39 -37
  6. sempy_labs/_connections.py +13 -13
  7. sempy_labs/_data_pipelines.py +20 -20
  8. sempy_labs/_dataflows.py +27 -28
  9. sempy_labs/_dax.py +41 -47
  10. sempy_labs/_environments.py +26 -23
  11. sempy_labs/_eventhouses.py +16 -15
  12. sempy_labs/_eventstreams.py +16 -15
  13. sempy_labs/_external_data_shares.py +18 -20
  14. sempy_labs/_gateways.py +57 -11
  15. sempy_labs/_generate_semantic_model.py +100 -71
  16. sempy_labs/_git.py +134 -67
  17. sempy_labs/_helper_functions.py +199 -145
  18. sempy_labs/_job_scheduler.py +92 -0
  19. sempy_labs/_kql_databases.py +16 -15
  20. sempy_labs/_kql_querysets.py +16 -15
  21. sempy_labs/_list_functions.py +281 -120
  22. sempy_labs/_managed_private_endpoints.py +19 -17
  23. sempy_labs/_mirrored_databases.py +51 -48
  24. sempy_labs/_mirrored_warehouses.py +5 -4
  25. sempy_labs/_ml_experiments.py +16 -15
  26. sempy_labs/_ml_models.py +15 -14
  27. sempy_labs/_model_bpa.py +27 -25
  28. sempy_labs/_model_bpa_bulk.py +3 -3
  29. sempy_labs/_model_dependencies.py +60 -28
  30. sempy_labs/_notebooks.py +73 -39
  31. sempy_labs/_one_lake_integration.py +23 -26
  32. sempy_labs/_query_scale_out.py +67 -64
  33. sempy_labs/_refresh_semantic_model.py +47 -42
  34. sempy_labs/_spark.py +33 -32
  35. sempy_labs/_sql.py +12 -9
  36. sempy_labs/_translations.py +10 -7
  37. sempy_labs/_vertipaq.py +34 -31
  38. sempy_labs/_warehouses.py +22 -21
  39. sempy_labs/_workspace_identity.py +11 -10
  40. sempy_labs/_workspaces.py +40 -33
  41. sempy_labs/admin/__init__.py +4 -0
  42. sempy_labs/admin/_basic_functions.py +44 -12
  43. sempy_labs/admin/_external_data_share.py +3 -3
  44. sempy_labs/admin/_items.py +4 -4
  45. sempy_labs/admin/_scanner.py +7 -5
  46. sempy_labs/directlake/_directlake_schema_compare.py +18 -14
  47. sempy_labs/directlake/_directlake_schema_sync.py +18 -12
  48. sempy_labs/directlake/_dl_helper.py +36 -32
  49. sempy_labs/directlake/_generate_shared_expression.py +10 -9
  50. sempy_labs/directlake/_get_directlake_lakehouse.py +16 -13
  51. sempy_labs/directlake/_get_shared_expression.py +4 -3
  52. sempy_labs/directlake/_guardrails.py +12 -6
  53. sempy_labs/directlake/_list_directlake_model_calc_tables.py +15 -9
  54. sempy_labs/directlake/_show_unsupported_directlake_objects.py +16 -10
  55. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +35 -31
  56. sempy_labs/directlake/_update_directlake_partition_entity.py +34 -31
  57. sempy_labs/directlake/_warm_cache.py +87 -65
  58. sempy_labs/lakehouse/_get_lakehouse_columns.py +10 -8
  59. sempy_labs/lakehouse/_get_lakehouse_tables.py +10 -9
  60. sempy_labs/lakehouse/_lakehouse.py +17 -13
  61. sempy_labs/lakehouse/_shortcuts.py +42 -23
  62. sempy_labs/migration/_create_pqt_file.py +16 -11
  63. sempy_labs/migration/_refresh_calc_tables.py +16 -10
  64. sempy_labs/report/_download_report.py +9 -8
  65. sempy_labs/report/_generate_report.py +40 -44
  66. sempy_labs/report/_paginated.py +9 -9
  67. sempy_labs/report/_report_bpa.py +13 -9
  68. sempy_labs/report/_report_functions.py +80 -91
  69. sempy_labs/report/_report_helper.py +8 -4
  70. sempy_labs/report/_report_list_functions.py +24 -13
  71. sempy_labs/report/_report_rebind.py +17 -16
  72. sempy_labs/report/_reportwrapper.py +41 -33
  73. sempy_labs/tom/_model.py +117 -38
  74. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/LICENSE +0 -0
  75. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/WHEEL +0 -0
  76. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/top_level.txt +0 -0
@@ -1,20 +1,22 @@
1
1
  import sempy.fabric as fabric
2
2
  from sempy_labs._helper_functions import (
3
3
  resolve_dataset_id,
4
+ resolve_workspace_name_and_id,
4
5
  resolve_report_id,
5
6
  )
6
7
  from typing import Optional, List
7
8
  from sempy._utils._log import log
8
9
  import sempy_labs._icons as icons
9
10
  from sempy.fabric.exceptions import FabricHTTPException
11
+ from uuid import UUID
10
12
 
11
13
 
12
14
  @log
13
15
  def report_rebind(
14
16
  report: str | List[str],
15
17
  dataset: str,
16
- report_workspace: Optional[str] = None,
17
- dataset_workspace: Optional[str] = None,
18
+ report_workspace: Optional[str | UUID] = None,
19
+ dataset_workspace: Optional[str | UUID] = None,
18
20
  ):
19
21
  """
20
22
  Rebinds a report to a semantic model.
@@ -27,23 +29,22 @@ def report_rebind(
27
29
  Name(s) of the Power BI report(s).
28
30
  dataset : str
29
31
  Name of the semantic model.
30
- report_workspace : str, default=None
31
- The name of the Fabric workspace in which the report resides.
32
+ report_workspace : str | uuid.UUID, default=None
33
+ The name or ID of the Fabric workspace in which the report resides.
32
34
  Defaults to None which resolves to the workspace of the attached lakehouse
33
35
  or if no lakehouse attached, resolves to the workspace of the notebook.
34
- dataset_workspace : str, default=None
35
- The name of the Fabric workspace in which the semantic model resides.
36
+ dataset_workspace : str | uuid.UUID, default=None
37
+ The name or ID of the Fabric workspace in which the semantic model resides.
36
38
  Defaults to None which resolves to the workspace of the attached lakehouse
37
39
  or if no lakehouse attached, resolves to the workspace of the notebook.
38
40
  """
39
41
 
40
- if report_workspace is None:
41
- report_workspace_id = fabric.get_workspace_id()
42
- report_workspace = fabric.resolve_workspace_name(report_workspace_id)
43
- else:
44
- report_workspace_id = fabric.resolve_workspace_id(report_workspace)
42
+ (report_workspace_name, report_workspace_id) = resolve_workspace_name_and_id(
43
+ report_workspace
44
+ )
45
+
45
46
  if dataset_workspace is None:
46
- dataset_workspace = report_workspace
47
+ dataset_workspace = report_workspace_name
47
48
 
48
49
  client = fabric.PowerBIRestClient()
49
50
 
@@ -51,14 +52,14 @@ def report_rebind(
51
52
  report = [report]
52
53
 
53
54
  for rpt in report:
54
- reportId = resolve_report_id(report=rpt, workspace=report_workspace)
55
- datasetId = resolve_dataset_id(dataset=dataset, workspace=dataset_workspace)
55
+ report_id = resolve_report_id(report=rpt, workspace=report_workspace_id)
56
+ dataset_id = resolve_dataset_id(dataset=dataset, workspace=dataset_workspace)
56
57
 
57
58
  # Prepare API
58
- request_body = {"datasetId": datasetId}
59
+ request_body = {"datasetId": dataset_id}
59
60
 
60
61
  response = client.post(
61
- f"/v1.0/myorg/groups/{report_workspace_id}/reports/{reportId}/Rebind",
62
+ f"/v1.0/myorg/groups/{report_workspace_id}/reports/{report_id}/Rebind",
62
63
  json=request_body,
63
64
  )
64
65
 
@@ -8,6 +8,7 @@ from sempy_labs._helper_functions import (
8
8
  _add_part,
9
9
  lro,
10
10
  _decode_b64,
11
+ resolve_workspace_name_and_id,
11
12
  )
12
13
  from typing import Optional, List
13
14
  import pandas as pd
@@ -32,8 +33,8 @@ class ReportWrapper:
32
33
  ----------
33
34
  report : str
34
35
  The name of the report.
35
- workspace : str
36
- The name of the workspace in which the report resides.
36
+ workspace : str | uuid.UUID
37
+ The name or ID of the workspace in which the report resides.
37
38
  Defaults to None which resolves to the workspace of the attached lakehouse
38
39
  or if no lakehouse attached, resolves to the workspace of the notebook.
39
40
 
@@ -50,7 +51,7 @@ class ReportWrapper:
50
51
  def __init__(
51
52
  self,
52
53
  report: str,
53
- workspace: Optional[str] = None,
54
+ workspace: Optional[str | UUID] = None,
54
55
  ):
55
56
  """
56
57
  Connects to a Power BI report and retrieves its definition.
@@ -61,8 +62,8 @@ class ReportWrapper:
61
62
  ----------
62
63
  report : str
63
64
  The name of the report.
64
- workspace : str
65
- The name of the workspace in which the report resides.
65
+ workspace : str | UUID
66
+ The name or ID of the workspace in which the report resides.
66
67
  Defaults to None which resolves to the workspace of the attached lakehouse
67
68
  or if no lakehouse attached, resolves to the workspace of the notebook.
68
69
 
@@ -77,11 +78,12 @@ class ReportWrapper:
77
78
  warnings.simplefilter(action="ignore", category=FutureWarning)
78
79
 
79
80
  self._report = report
80
- self._workspace = workspace
81
- self._workspace_id = fabric.resolve_workspace_id(workspace)
82
- self._report_id = resolve_report_id(report, workspace)
81
+ (self._workspace_name, self._workspace_id) = resolve_workspace_name_and_id(
82
+ workspace
83
+ )
84
+ self._report_id = resolve_report_id(report, self._workspace_id)
83
85
  self.rdef = get_report_definition(
84
- report=self._report, workspace=self._workspace
86
+ report=self._report, workspace=self._workspace_id
85
87
  )
86
88
 
87
89
  if len(self.rdef[self.rdef["path"] == "definition/report.json"]) == 0:
@@ -95,12 +97,14 @@ class ReportWrapper:
95
97
 
96
98
  from sempy_labs.tom import connect_semantic_model
97
99
 
98
- dataset_id, dataset_name, dataset_workspace_id, dataset_workspace = (
99
- resolve_dataset_from_report(report=self._report, workspace=self._workspace)
100
+ dataset_id, dataset_name, dataset_workspace_id, dataset_workspace_name = (
101
+ resolve_dataset_from_report(
102
+ report=self._report, workspace=self._workspace_id
103
+ )
100
104
  )
101
105
 
102
106
  with connect_semantic_model(
103
- dataset=dataset_name, readonly=True, workspace=dataset_workspace
107
+ dataset=dataset_id, readonly=True, workspace=dataset_workspace_id
104
108
  ) as tom:
105
109
  for index, row in dataframe.iterrows():
106
110
  obj_type = row["Object Type"]
@@ -218,7 +222,7 @@ class ReportWrapper:
218
222
  theme_collection = rptJson.get("themeCollection", {})
219
223
  if theme_type not in theme_collection:
220
224
  raise ValueError(
221
- f"{icons.red_dot} The {self._report} report within the '{self._workspace} workspace has no custom theme."
225
+ f"{icons.red_dot} The {self._report} report within the '{self._workspace_name} workspace has no custom theme."
222
226
  )
223
227
  ct = theme_collection.get(theme_type)
224
228
  theme_name = ct["name"]
@@ -413,7 +417,7 @@ class ReportWrapper:
413
417
  )
414
418
 
415
419
  df["Page URL"] = df["Page Name"].apply(
416
- lambda page_name: f"{helper.get_web_url(report=self._report, workspace=self._workspace)}/{page_name}"
420
+ lambda page_name: f"{helper.get_web_url(report=self._report, workspace=self._workspace_id)}/{page_name}"
417
421
  )
418
422
 
419
423
  bool_cols = ["Hidden", "Locked", "Used"]
@@ -691,7 +695,7 @@ class ReportWrapper:
691
695
  df[bool_cols] = df[bool_cols].astype(bool)
692
696
 
693
697
  df["Page URL"] = df["Page Name"].apply(
694
- lambda page_name: f"{helper.get_web_url(report=self._report, workspace=self._workspace)}/{page_name}"
698
+ lambda page_name: f"{helper.get_web_url(report=self._report, workspace=self._workspace_id)}/{page_name}"
695
699
  )
696
700
 
697
701
  return df
@@ -1170,9 +1174,9 @@ class ReportWrapper:
1170
1174
  )
1171
1175
 
1172
1176
  if extended:
1173
- dataset_id, dataset_name, dataset_workspace_id, dataset_workspace = (
1177
+ dataset_id, dataset_name, dataset_workspace_id, dataset_workspace_name = (
1174
1178
  resolve_dataset_from_report(
1175
- report=self._report, workspace=self._workspace
1179
+ report=self._report, workspace=self._workspace_id
1176
1180
  )
1177
1181
  )
1178
1182
 
@@ -1195,7 +1199,7 @@ class ReportWrapper:
1195
1199
  return object_validators.get(row["Object Type"], lambda: False)()
1196
1200
 
1197
1201
  with connect_semantic_model(
1198
- dataset=dataset_name, readonly=True, workspace=dataset_workspace
1202
+ dataset=dataset_id, readonly=True, workspace=dataset_workspace_id
1199
1203
  ) as tom:
1200
1204
  df["Valid Semantic Model Object"] = df.apply(
1201
1205
  lambda row: check_validity(tom, row), axis=1
@@ -1214,11 +1218,13 @@ class ReportWrapper:
1214
1218
  .drop_duplicates()
1215
1219
  .reset_index(drop=True)
1216
1220
  )
1217
- dataset_id, dataset_name, dataset_workspace_id, dataset_workspace = (
1218
- resolve_dataset_from_report(report=self._report, workspace=self._workspace)
1221
+ dataset_id, dataset_name, dataset_workspace_id, dataset_workspace_name = (
1222
+ resolve_dataset_from_report(
1223
+ report=self._report, workspace=self._workspace_id
1224
+ )
1219
1225
  )
1220
1226
  dep = get_measure_dependencies(
1221
- dataset=dataset_name, workspace=dataset_workspace
1227
+ dataset=dataset_id, workspace=dataset_workspace_id
1222
1228
  )
1223
1229
  rpt_measures = df[df["Object Type"] == "Measure"]["Object Name"].values
1224
1230
  new_rows = dep[dep["Object Name"].isin(rpt_measures)][
@@ -1232,7 +1238,7 @@ class ReportWrapper:
1232
1238
  )
1233
1239
 
1234
1240
  result_df["Dataset Name"] = dataset_name
1235
- result_df["Dataset Workspace Name"] = dataset_workspace
1241
+ result_df["Dataset Workspace Name"] = dataset_workspace_name
1236
1242
  colName = "Dataset Name"
1237
1243
  result_df.insert(0, colName, result_df.pop(colName))
1238
1244
  colName = "Dataset Workspace Name"
@@ -1539,7 +1545,7 @@ class ReportWrapper:
1539
1545
 
1540
1546
  self.update_report(request_body=request_body)
1541
1547
  print(
1542
- f"{icons.green_dot} The '{theme_name}' theme has been set as the theme for the '{self._report}' report within the '{self._workspace}' workspace."
1548
+ f"{icons.green_dot} The '{theme_name}' theme has been set as the theme for the '{self._report}' report within the '{self._workspace_name}' workspace."
1543
1549
  )
1544
1550
 
1545
1551
  def set_active_page(self, page_name: str):
@@ -1567,7 +1573,7 @@ class ReportWrapper:
1567
1573
  self._update_single_file(file_name=pages_file, new_payload=file_payload)
1568
1574
 
1569
1575
  print(
1570
- f"{icons.green_dot} The '{page_display_name}' page has been set as the active page in the '{self._report}' report within the '{self._workspace}' workspace."
1576
+ f"{icons.green_dot} The '{page_display_name}' page has been set as the active page in the '{self._report}' report within the '{self._workspace_name}' workspace."
1571
1577
  )
1572
1578
 
1573
1579
  def set_page_type(self, page_name: str, page_type: str):
@@ -1640,7 +1646,7 @@ class ReportWrapper:
1640
1646
  cv_remove_display.append(cv_display)
1641
1647
  if len(cv_remove) == 0:
1642
1648
  print(
1643
- f"{icons.green_dot} There are no unnecessary custom visuals in the '{self._report}' report within the '{self._workspace}' workspace."
1649
+ f"{icons.green_dot} There are no unnecessary custom visuals in the '{self._report}' report within the '{self._workspace_name}' workspace."
1644
1650
  )
1645
1651
  return
1646
1652
 
@@ -1662,7 +1668,7 @@ class ReportWrapper:
1662
1668
 
1663
1669
  self.update_report(request_body=request_body)
1664
1670
  print(
1665
- f"{icons.green_dot} The {cv_remove_display} custom visuals have been removed from the '{self._report}' report within the '{self._workspace}' workspace."
1671
+ f"{icons.green_dot} The {cv_remove_display} custom visuals have been removed from the '{self._report}' report within the '{self._workspace_name}' workspace."
1666
1672
  )
1667
1673
 
1668
1674
  def migrate_report_level_measures(self, measures: Optional[str | List[str]] = None):
@@ -1681,12 +1687,14 @@ class ReportWrapper:
1681
1687
  rlm = self.list_report_level_measures()
1682
1688
  if len(rlm) == 0:
1683
1689
  print(
1684
- f"{icons.green_dot} The '{self._report}' report within the '{self._workspace}' workspace has no report-level measures."
1690
+ f"{icons.green_dot} The '{self._report}' report within the '{self._workspace_name}' workspace has no report-level measures."
1685
1691
  )
1686
1692
  return
1687
1693
 
1688
- dataset_id, dataset_name, dataset_workspace_id, dataset_workspace = (
1689
- resolve_dataset_from_report(report=self._report, workspace=self._workspace)
1694
+ dataset_id, dataset_name, dataset_workspace_id, dataset_workspace_name = (
1695
+ resolve_dataset_from_report(
1696
+ report=self._report, workspace=self._workspace_id
1697
+ )
1690
1698
  )
1691
1699
 
1692
1700
  if isinstance(measures, str):
@@ -1703,7 +1711,7 @@ class ReportWrapper:
1703
1711
 
1704
1712
  mCount = 0
1705
1713
  with connect_semantic_model(
1706
- dataset=dataset_name, readonly=False, workspace=dataset_workspace
1714
+ dataset=dataset_id, readonly=False, workspace=dataset_workspace_id
1707
1715
  ) as tom:
1708
1716
  for _, r in rlm.iterrows():
1709
1717
  tableName = r["Table Name"]
@@ -1748,7 +1756,7 @@ class ReportWrapper:
1748
1756
 
1749
1757
  self.update_report(request_body=request_body)
1750
1758
  print(
1751
- f"{icons.green_dot} The report-level measures have been migrated to the '{dataset_name}' semantic model within the '{dataset_workspace}' workspace."
1759
+ f"{icons.green_dot} The report-level measures have been migrated to the '{dataset_name}' semantic model within the '{dataset_workspace_name}' workspace."
1752
1760
  )
1753
1761
 
1754
1762
  def set_page_visibility(self, page_name: str, hidden: bool):
@@ -1798,7 +1806,7 @@ class ReportWrapper:
1798
1806
 
1799
1807
  if len(dfP_filt) == 0:
1800
1808
  print(
1801
- f"{icons.green_dot} There are no Tooltip or Drillthrough pages in the '{self._report}' report within the '{self._workspace}' workspace."
1809
+ f"{icons.green_dot} There are no Tooltip or Drillthrough pages in the '{self._report}' report within the '{self._workspace_name}' workspace."
1802
1810
  )
1803
1811
  return
1804
1812
 
@@ -1837,7 +1845,7 @@ class ReportWrapper:
1837
1845
 
1838
1846
  self.update_report(request_body=request_body)
1839
1847
  print(
1840
- f"{icons.green_dot} Show items with data has been disabled for all visuals in the '{self._report}' report within the '{self._workspace}' workspace."
1848
+ f"{icons.green_dot} Show items with data has been disabled for all visuals in the '{self._report}' report within the '{self._workspace_name}' workspace."
1841
1849
  )
1842
1850
 
1843
1851
  # Set Annotations
sempy_labs/tom/_model.py CHANGED
@@ -7,6 +7,8 @@ from sempy_labs._helper_functions import (
7
7
  format_dax_object_name,
8
8
  generate_guid,
9
9
  _make_list_unique,
10
+ resolve_dataset_name_and_id,
11
+ resolve_workspace_name_and_id,
10
12
  )
11
13
  from sempy_labs._list_functions import list_relationships
12
14
  from sempy_labs._refresh_semantic_model import refresh_semantic_model
@@ -17,6 +19,7 @@ from sempy._utils._log import log
17
19
  import sempy_labs._icons as icons
18
20
  from sempy.fabric.exceptions import FabricHTTPException
19
21
  import ast
22
+ from uuid import UUID
20
23
 
21
24
  if TYPE_CHECKING:
22
25
  import Microsoft.AnalysisServices.Tabular
@@ -27,27 +30,33 @@ class TOMWrapper:
27
30
  """
28
31
  Convenience wrapper around the TOM object model for a semantic model. Always use the connect_semantic_model function to make sure the TOM object is initialized correctly.
29
32
 
30
- `XMLA read/write endpoints <https://learn.microsoft.com/power-bi/enterprise/service-premium-connect-tools#to-enable-read-write-for-a-premium-capacity>`_ must
31
- be enabled if setting the readonly parameter to False.
33
+ `XMLA read/write endpoints <https://learn.microsoft.com/power-bi/enterprise/service-premium-connect-tools#to-enable-read-write-for-a-premium-capacity>`_ must be enabled if setting the readonly parameter to False.
32
34
  """
33
35
 
34
- _dataset: str
35
- _workspace: str
36
+ _dataset_id: UUID
37
+ _dataset_name: str
38
+ _workspace_id: UUID
39
+ _workspace_name: str
36
40
  _readonly: bool
37
41
  _tables_added: List[str]
38
42
  _table_map = dict
39
43
  _column_map = dict
40
44
 
41
45
  def __init__(self, dataset, workspace, readonly):
42
- self._dataset = dataset
43
- self._workspace = workspace
46
+
47
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
48
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
49
+ self._dataset_id = dataset_id
50
+ self._dataset_name = dataset_name
51
+ self._workspace_name = workspace_name
52
+ self._workspace_id = workspace_id
44
53
  self._readonly = readonly
45
54
  self._tables_added = []
46
55
 
47
56
  self._tom_server = fabric.create_tom_server(
48
- readonly=readonly, workspace=workspace
57
+ readonly=readonly, workspace=workspace_id
49
58
  )
50
- self.model = self._tom_server.Databases.GetByName(dataset).Model
59
+ self.model = self._tom_server.Databases[dataset_id].Model
51
60
 
52
61
  self._table_map = {}
53
62
  self._column_map = {}
@@ -2160,7 +2169,9 @@ class TOMWrapper:
2160
2169
  )
2161
2170
  """
2162
2171
  df = fabric.evaluate_dax(
2163
- dataset=self._dataset, workspace=self._workspace, dax_string=dax_query
2172
+ dataset=self._dataset_id,
2173
+ workspace=self._workspace_id,
2174
+ dax_string=dax_query,
2164
2175
  )
2165
2176
  value = df["[1]"].iloc[0]
2166
2177
  if value != "1":
@@ -2424,7 +2435,7 @@ class TOMWrapper:
2424
2435
  )
2425
2436
  except Exception:
2426
2437
  raise ValueError(
2427
- f"{icons.red_dot} The '{measure_name}' measure does not exist in the '{self._dataset}' semantic model within the '{self._workspace}'."
2438
+ f"{icons.red_dot} The '{measure_name}' measure does not exist in the '{self._dataset_name}' semantic model within the '{self._workspace_name}'."
2428
2439
  )
2429
2440
 
2430
2441
  graphics = [
@@ -2467,7 +2478,7 @@ class TOMWrapper:
2467
2478
  )
2468
2479
  except Exception:
2469
2480
  raise ValueError(
2470
- f"{icons.red_dot} The '{target}' measure does not exist in the '{self._dataset}' semantic model within the '{self._workspace}'."
2481
+ f"{icons.red_dot} The '{target}' measure does not exist in the '{self._dataset_name}' semantic model within the '{self._workspace_name}'."
2471
2482
  )
2472
2483
 
2473
2484
  if measure_target:
@@ -2793,7 +2804,7 @@ class TOMWrapper:
2793
2804
  success = True
2794
2805
  if not success:
2795
2806
  raise ValueError(
2796
- f"{icons.red_dot} The '{obj}' object was not found in the '{self._dataset}' semantic model."
2807
+ f"{icons.red_dot} The '{obj}' object was not found in the '{self._dataset_name}' semantic model."
2797
2808
  )
2798
2809
  else:
2799
2810
  i += 1
@@ -2881,19 +2892,19 @@ class TOMWrapper:
2881
2892
  from sempy_labs._list_functions import list_tables
2882
2893
 
2883
2894
  dfT = list_tables(
2884
- dataset=self._dataset, workspace=self._workspace, extended=True
2895
+ dataset=self._dataset_id, workspace=self._workspace_id, extended=True
2885
2896
  )
2886
2897
  dfC = fabric.list_columns(
2887
- dataset=self._dataset, workspace=self._workspace, extended=True
2898
+ dataset=self._dataset_id, workspace=self._workspace_id, extended=True
2888
2899
  )
2889
2900
  dfP = fabric.list_partitions(
2890
- dataset=self._dataset, workspace=self._workspace, extended=True
2901
+ dataset=self._dataset_id, workspace=self._workspace_id, extended=True
2891
2902
  )
2892
2903
  dfH = fabric.list_hierarchies(
2893
- dataset=self._dataset, workspace=self._workspace, extended=True
2904
+ dataset=self._dataset_id, workspace=self._workspace_id, extended=True
2894
2905
  )
2895
2906
  dfR = list_relationships(
2896
- dataset=self._dataset, workspace=self._workspace, extended=True
2907
+ dataset=self._dataset_id, workspace=self._workspace_id, extended=True
2897
2908
  )
2898
2909
 
2899
2910
  for t in self.model.Tables:
@@ -3276,6 +3287,10 @@ class TOMWrapper:
3276
3287
  """
3277
3288
  import Microsoft.AnalysisServices.Tabular as TOM
3278
3289
 
3290
+ dependencies = dependencies[
3291
+ dependencies["Object Name"] == dependencies["Parent Node"]
3292
+ ]
3293
+
3279
3294
  for obj in self.depends_on(object=object, dependencies=dependencies):
3280
3295
  if obj.ObjectType == TOM.ObjectType.Measure:
3281
3296
  if (f"{obj.Parent.Name}[{obj.Name}]" in object.Expression) or (
@@ -3302,12 +3317,16 @@ class TOMWrapper:
3302
3317
  """
3303
3318
  import Microsoft.AnalysisServices.Tabular as TOM
3304
3319
 
3320
+ dependencies = dependencies[
3321
+ dependencies["Object Name"] == dependencies["Parent Node"]
3322
+ ]
3323
+
3305
3324
  def create_pattern(tableList, b):
3306
3325
  patterns = [
3307
- r"(?<!" + re.escape(table) + r"\[)(?<!" + re.escape(table) + r"'\[)"
3326
+ r"(?<!" + re.escape(table) + r")(?<!'" + re.escape(table) + r"')"
3308
3327
  for table in tableList
3309
3328
  ]
3310
- combined_pattern = "".join(patterns) + re.escape(b)
3329
+ combined_pattern = "".join(patterns) + re.escape(f"[{b}]")
3311
3330
  return combined_pattern
3312
3331
 
3313
3332
  for obj in self.depends_on(object=object, dependencies=dependencies):
@@ -3338,7 +3357,9 @@ class TOMWrapper:
3338
3357
  usingView = False
3339
3358
 
3340
3359
  if self.is_direct_lake():
3341
- df = check_fallback_reason(dataset=self._dataset, workspace=self._workspace)
3360
+ df = check_fallback_reason(
3361
+ dataset=self._dataset_id, workspace=self._workspace_id
3362
+ )
3342
3363
  df_filt = df[df["FallbackReasonID"] == 2]
3343
3364
 
3344
3365
  if len(df_filt) > 0:
@@ -3385,7 +3406,7 @@ class TOMWrapper:
3385
3406
 
3386
3407
  if rp is None:
3387
3408
  print(
3388
- f"{icons.yellow_dot} The '{table_name}' table in the '{self._dataset}' semantic model within the '{self._workspace}' workspace does not have an incremental refresh policy."
3409
+ f"{icons.yellow_dot} The '{table_name}' table in the '{self._dataset_name}' semantic model within the '{self._workspace_name}' workspace does not have an incremental refresh policy."
3389
3410
  )
3390
3411
  else:
3391
3412
  print(f"Table Name: {table_name}")
@@ -3884,14 +3905,14 @@ class TOMWrapper:
3884
3905
 
3885
3906
  if table_name is None:
3886
3907
  raise ValueError(
3887
- f"{icons.red_dot} The '{measure_name}' is not a valid measure in the '{self._dataset}' semantic model within the '{self._workspace}' workspace."
3908
+ f"{icons.red_dot} The '{measure_name}' is not a valid measure in the '{self._dataset_name}' semantic model within the '{self._workspace_name}' workspace."
3888
3909
  )
3889
3910
 
3890
3911
  table_name = matching_measures[0]
3891
3912
  # Validate date table
3892
3913
  if not self.is_date_table(date_table):
3893
3914
  raise ValueError(
3894
- f"{icons.red_dot} The '{date_table}' table is not a valid date table in the '{self._dataset}' wemantic model within the '{self._workspace}' workspace."
3915
+ f"{icons.red_dot} The '{date_table}' table is not a valid date table in the '{self._dataset_name}' wemantic model within the '{self._workspace_name}' workspace."
3895
3916
  )
3896
3917
 
3897
3918
  # Extract date key from date table
@@ -3903,7 +3924,7 @@ class TOMWrapper:
3903
3924
 
3904
3925
  if not matching_columns:
3905
3926
  raise ValueError(
3906
- f"{icons.red_dot} The '{date_table}' table does not have a date key column in the '{self._dataset}' semantic model within the '{self._workspace}' workspace."
3927
+ f"{icons.red_dot} The '{date_table}' table does not have a date key column in the '{self._dataset_name}' semantic model within the '{self._workspace_name}' workspace."
3907
3928
  )
3908
3929
 
3909
3930
  date_key = matching_columns[0]
@@ -4383,7 +4404,6 @@ class TOMWrapper:
4383
4404
  if isinstance(measure_name, str):
4384
4405
  measure_name = [measure_name]
4385
4406
 
4386
- workspace_id = fabric.resolve_workspace_id(self._workspace)
4387
4407
  client = fabric.FabricRestClient()
4388
4408
 
4389
4409
  if len(measure_name) > max_batch_size:
@@ -4402,7 +4422,7 @@ class TOMWrapper:
4402
4422
  "modelItems": [],
4403
4423
  },
4404
4424
  },
4405
- "workspaceId": workspace_id,
4425
+ "workspaceId": self._workspace_id,
4406
4426
  "artifactInfo": {"artifactType": "SemanticModel"},
4407
4427
  }
4408
4428
  for m_name in measure_list:
@@ -4413,7 +4433,7 @@ class TOMWrapper:
4413
4433
  )
4414
4434
  if t_name is None:
4415
4435
  raise ValueError(
4416
- f"{icons.red_dot} The '{m_name}' measure does not exist in the '{self._dataset}' semantic model within the '{self._workspace}' workspace."
4436
+ f"{icons.red_dot} The '{m_name}' measure does not exist in the '{self._dataset_name}' semantic model within the '{self._workspace_name}' workspace."
4417
4437
  )
4418
4438
 
4419
4439
  new_item = {
@@ -4515,6 +4535,69 @@ class TOMWrapper:
4515
4535
  TOM.ValueFilterBehaviorType, value_filter_behavior
4516
4536
  )
4517
4537
 
4538
+ def add_role_member(self, role_name: str, member: str | List[str]):
4539
+ """
4540
+ Adds an external model role member (AzureAD) to a role.
4541
+
4542
+ Parameters
4543
+ ----------
4544
+ role_name : str
4545
+ The role name.
4546
+ member : str | List[str]
4547
+ The email address(es) of the member(s) to add.
4548
+ """
4549
+
4550
+ import Microsoft.AnalysisServices.Tabular as TOM
4551
+
4552
+ if isinstance(member, str):
4553
+ member = [member]
4554
+
4555
+ role = self.model.Roles[role_name]
4556
+ current_members = [m.MemberName for m in role.Members]
4557
+
4558
+ for m in member:
4559
+ if m not in current_members:
4560
+ rm = TOM.ExternalModelRoleMember()
4561
+ rm.IdentityProvider = "AzureAD"
4562
+ rm.MemberName = m
4563
+ role.Members.Add(rm)
4564
+ print(
4565
+ f"{icons.green_dot} '{m}' has been added as a member of the '{role_name}' role."
4566
+ )
4567
+ else:
4568
+ print(
4569
+ f"{icons.yellow_dot} '{m}' is already a member in the '{role_name}' role."
4570
+ )
4571
+
4572
+ def remove_role_member(self, role_name: str, member: str | List[str]):
4573
+ """
4574
+ Removes an external model role member (AzureAD) from a role.
4575
+
4576
+ Parameters
4577
+ ----------
4578
+ role_name : str
4579
+ The role name.
4580
+ member : str | List[str]
4581
+ The email address(es) of the member(s) to remove.
4582
+ """
4583
+
4584
+ if isinstance(member, str):
4585
+ member = [member]
4586
+
4587
+ role = self.model.Roles[role_name]
4588
+ current_members = {m.MemberName: m.Name for m in role.Members}
4589
+ for m in member:
4590
+ name = current_members.get(m)
4591
+ if name is not None:
4592
+ role.Members.Remove(role.Members[name])
4593
+ print(
4594
+ f"{icons.green_dot} The '{m}' member has been removed from the '{role_name}' role."
4595
+ )
4596
+ else:
4597
+ print(
4598
+ f"{icons.yellow_dot} '{m}' is not a member of the '{role_name}' role."
4599
+ )
4600
+
4518
4601
  def close(self):
4519
4602
 
4520
4603
  if not self._readonly and self.model is not None:
@@ -4572,9 +4655,9 @@ class TOMWrapper:
4572
4655
 
4573
4656
  if len(self._tables_added) > 0:
4574
4657
  refresh_semantic_model(
4575
- dataset=self._dataset,
4658
+ dataset=self._dataset_id,
4576
4659
  tables=self._tables_added,
4577
- workspace=self._workspace,
4660
+ workspace=self._workspace_id,
4578
4661
  )
4579
4662
  self.model = None
4580
4663
 
@@ -4584,19 +4667,19 @@ class TOMWrapper:
4584
4667
  @log
4585
4668
  @contextmanager
4586
4669
  def connect_semantic_model(
4587
- dataset: str, readonly: bool = True, workspace: Optional[str] = None
4670
+ dataset: str | UUID, readonly: bool = True, workspace: Optional[str] = None
4588
4671
  ) -> Iterator[TOMWrapper]:
4589
4672
  """
4590
4673
  Connects to the Tabular Object Model (TOM) within a semantic model.
4591
4674
 
4592
4675
  Parameters
4593
4676
  ----------
4594
- dataset : str
4595
- Name of the semantic model.
4677
+ dataset : str | uuid.UUID
4678
+ Name or ID of the semantic model.
4596
4679
  readonly: bool, default=True
4597
4680
  Whether the connection is read-only or read/write. Setting this to False enables read/write which saves the changes made back to the server.
4598
- workspace : str, default=None
4599
- The Fabric workspace name.
4681
+ workspace : str | uuid.UUID, default=None
4682
+ The Fabric workspace name or ID.
4600
4683
  Defaults to None which resolves to the workspace of the attached lakehouse
4601
4684
  or if no lakehouse attached, resolves to the workspace of the notebook.
4602
4685
 
@@ -4609,10 +4692,6 @@ def connect_semantic_model(
4609
4692
  # initialize .NET to make sure System and Microsoft.AnalysisServices.Tabular is defined
4610
4693
  sempy.fabric._client._utils._init_analysis_services()
4611
4694
 
4612
- if workspace is None:
4613
- workspace_id = fabric.get_workspace_id()
4614
- workspace = fabric.resolve_workspace_name(workspace_id)
4615
-
4616
4695
  tw = TOMWrapper(dataset=dataset, workspace=workspace, readonly=readonly)
4617
4696
  try:
4618
4697
  yield tw