semantic-link-labs 0.8.9__py3-none-any.whl → 0.8.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

@@ -5,6 +5,7 @@ from sempy_labs._helper_functions import (
5
5
  resolve_workspace_name_and_id,
6
6
  _get_partition_map,
7
7
  _process_and_display_chart,
8
+ resolve_dataset_name_and_id,
8
9
  )
9
10
  from typing import Any, List, Optional, Union
10
11
  from sempy._utils._log import log
@@ -14,11 +15,12 @@ import pandas as pd
14
15
  import warnings
15
16
  import ipywidgets as widgets
16
17
  import json
18
+ from uuid import UUID
17
19
 
18
20
 
19
21
  @log
20
22
  def refresh_semantic_model(
21
- dataset: str,
23
+ dataset: str | UUID,
22
24
  tables: Optional[Union[str, List[str]]] = None,
23
25
  partitions: Optional[Union[str, List[str]]] = None,
24
26
  refresh_type: str = "full",
@@ -34,8 +36,8 @@ def refresh_semantic_model(
34
36
 
35
37
  Parameters
36
38
  ----------
37
- dataset : str
38
- Name of the semantic model.
39
+ dataset : str | UUID
40
+ Name or ID of the semantic model.
39
41
  tables : str, List[str], default=None
40
42
  A string or a list of tables to refresh.
41
43
  partitions: str, List[str], default=None
@@ -65,7 +67,8 @@ def refresh_semantic_model(
65
67
  If 'visualize' is set to True, returns a pandas dataframe showing the SSAS trace output used to generate the visualization.
66
68
  """
67
69
 
68
- workspace = fabric.resolve_workspace_name(workspace)
70
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
71
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
69
72
 
70
73
  if isinstance(tables, str):
71
74
  tables = [tables]
@@ -118,11 +121,11 @@ def refresh_semantic_model(
118
121
  def extract_failure_error():
119
122
  error_messages = []
120
123
  combined_messages = ""
121
- final_message = f"{icons.red_dot} The refresh of the '{dataset}' semantic model within the '{workspace}' workspace has failed."
124
+ final_message = f"{icons.red_dot} The refresh of the '{dataset_name}' semantic model within the '{workspace_name}' workspace has failed."
122
125
  for _, r in fabric.get_refresh_execution_details(
123
126
  refresh_request_id=request_id,
124
- dataset=dataset,
125
- workspace=workspace,
127
+ dataset=dataset_id,
128
+ workspace=workspace_id,
126
129
  ).messages.iterrows():
127
130
  error_messages.append(f"{r['Type']}: {r['Message']}")
128
131
 
@@ -135,8 +138,8 @@ def refresh_semantic_model(
135
138
  # Function to perform dataset refresh
136
139
  def refresh_dataset():
137
140
  return fabric.refresh_dataset(
138
- dataset=dataset,
139
- workspace=workspace,
141
+ dataset=dataset_id,
142
+ workspace=workspace_id,
140
143
  refresh_type=refresh_type,
141
144
  retry_count=retry_count,
142
145
  apply_refresh_policy=apply_refresh_policy,
@@ -147,7 +150,9 @@ def refresh_semantic_model(
147
150
 
148
151
  def check_refresh_status(request_id):
149
152
  request_details = fabric.get_refresh_execution_details(
150
- dataset=dataset, refresh_request_id=request_id, workspace=workspace
153
+ dataset=dataset_id,
154
+ refresh_request_id=request_id,
155
+ workspace=workspace_id,
151
156
  )
152
157
  return request_details.status
153
158
 
@@ -169,7 +174,8 @@ def refresh_semantic_model(
169
174
  right_on="PartitionID",
170
175
  how="left",
171
176
  )
172
- _process_and_display_chart(df, title=title, widget=widget)
177
+ if not df.empty:
178
+ _process_and_display_chart(df, title=title, widget=widget)
173
179
  if stop:
174
180
  df.drop(["Object Name", "PartitionID"], axis=1, inplace=True)
175
181
  df.rename(columns={"TableName": "Table Name"}, inplace=True)
@@ -180,7 +186,7 @@ def refresh_semantic_model(
180
186
  if not visualize:
181
187
  request_id = refresh_dataset()
182
188
  print(
183
- f"{icons.in_progress} Refresh of the '{dataset}' semantic model within the '{workspace}' workspace is in progress..."
189
+ f"{icons.in_progress} Refresh of the '{dataset_name}' semantic model within the '{workspace_name}' workspace is in progress..."
184
190
  )
185
191
 
186
192
  # Monitor refresh progress and handle tracing if visualize is enabled
@@ -189,7 +195,7 @@ def refresh_semantic_model(
189
195
  widget = widgets.Output()
190
196
 
191
197
  with fabric.create_trace_connection(
192
- dataset=dataset, workspace=workspace
198
+ dataset=dataset_id, workspace=workspace_id
193
199
  ) as trace_connection:
194
200
  with trace_connection.create_trace(icons.refresh_event_schema) as trace:
195
201
  trace.start()
@@ -204,7 +210,7 @@ def refresh_semantic_model(
204
210
  raise ValueError(extract_failure_error())
205
211
  elif status == "Cancelled":
206
212
  print(
207
- f"{icons.yellow_dot} The refresh of the '{dataset}' semantic model within the '{workspace}' workspace has been cancelled."
213
+ f"{icons.yellow_dot} The refresh of the '{dataset_name}' semantic model within the '{workspace_name}' workspace has been cancelled."
208
214
  )
209
215
  return
210
216
 
@@ -231,7 +237,7 @@ def refresh_semantic_model(
231
237
  )
232
238
 
233
239
  print(
234
- f"{icons.green_dot} Refresh '{refresh_type}' of the '{dataset}' semantic model within the '{workspace}' workspace is complete."
240
+ f"{icons.green_dot} Refresh '{refresh_type}' of the '{dataset_name}' semantic model within the '{workspace_name}' workspace is complete."
235
241
  )
236
242
  return final_df
237
243
 
@@ -245,14 +251,14 @@ def refresh_semantic_model(
245
251
  raise ValueError(extract_failure_error())
246
252
  elif status == "Cancelled":
247
253
  print(
248
- f"{icons.yellow_dot} The refresh of the '{dataset}' semantic model within the '{workspace}' workspace has been cancelled."
254
+ f"{icons.yellow_dot} The refresh of the '{dataset_name}' semantic model within the '{workspace_name}' workspace has been cancelled."
249
255
  )
250
256
  return
251
257
 
252
258
  time.sleep(3)
253
259
 
254
260
  print(
255
- f"{icons.green_dot} Refresh '{refresh_type}' of the '{dataset}' semantic model within the '{workspace}' workspace is complete."
261
+ f"{icons.green_dot} Refresh '{refresh_type}' of the '{dataset_name}' semantic model within the '{workspace_name}' workspace is complete."
256
262
  )
257
263
 
258
264
  final_output = refresh_and_trace_dataset(
@@ -1,3 +1,6 @@
1
+ from sempy_labs.admin._scanner import (
2
+ scan_workspaces,
3
+ )
1
4
  from sempy_labs.admin._basic_functions import (
2
5
  assign_workspaces_to_capacity,
3
6
  unassign_workspaces_from_capacity,
@@ -66,4 +69,5 @@ __all__ = [
66
69
  "list_git_connections",
67
70
  "list_reports",
68
71
  "get_capacity_assignment_status",
72
+ "scan_workspaces",
69
73
  ]
@@ -777,6 +777,23 @@ def list_activity_events(
777
777
  "Object Display Name",
778
778
  "Experience",
779
779
  "Refresh Enforcement Policy",
780
+ "Is Success",
781
+ "Activity Id",
782
+ "Item Name",
783
+ "Dataset Name",
784
+ "Report Name",
785
+ "Capacity Id",
786
+ "Capacity Name",
787
+ "App Name",
788
+ "Dataset Id",
789
+ "Report Id",
790
+ "Artifact Id",
791
+ "Artifact Name",
792
+ "Report Type",
793
+ "App Report Id",
794
+ "Distribution Method",
795
+ "Consumption Method",
796
+ "Artifact Kind",
780
797
  ]
781
798
  )
782
799
 
@@ -825,6 +842,23 @@ def list_activity_events(
825
842
  "Object Display Name": i.get("ObjectDisplayName"),
826
843
  "Experience": i.get("Experience"),
827
844
  "Refresh Enforcement Policy": i.get("RefreshEnforcementPolicy"),
845
+ "Is Success": i.get("IsSuccess"),
846
+ "Activity Id": i.get("ActivityId"),
847
+ "Item Name": i.get("ItemName"),
848
+ "Dataset Name": i.get("DatasetName"),
849
+ "Report Name": i.get("ReportName"),
850
+ "Capacity Id": i.get("CapacityId"),
851
+ "Capacity Name": i.get("CapacityName"),
852
+ "App Name": i.get("AppName"),
853
+ "Dataset Id": i.get("DatasetId"),
854
+ "Report Id": i.get("ReportId"),
855
+ "Artifact Id": i.get("ArtifactId"),
856
+ "Artifact Name": i.get("ArtifactName"),
857
+ "Report Type": i.get("ReportType"),
858
+ "App Report Id": i.get("AppReportId"),
859
+ "Distribution Method": i.get("DistributionMethod"),
860
+ "Consumption Method": i.get("ConsumptionMethod"),
861
+ "Artifact Kind": i.get("ArtifactKind"),
828
862
  }
829
863
  df = pd.concat(
830
864
  [df, pd.DataFrame(new_data, index=[0])],
@@ -16,12 +16,12 @@ def scan_workspaces(
16
16
  workspace: Optional[str | List[str] | UUID | List[UUID]] = None,
17
17
  ) -> dict:
18
18
  """
19
- Get the inventory and details of the tenant.
19
+ Gets the scan result for the specified scan.
20
20
 
21
21
  This is a wrapper function for the following APIs:
22
- `Admin - WorkspaceInfo PostWorkspaceInfo <https://learn.microsoft.com/en-gb/rest/api/power-bi/admin/workspace-info-post-workspace-info>`_.
23
- `Admin - WorkspaceInfo GetScanStatus <https://learn.microsoft.com/en-gb/rest/api/power-bi/admin/workspace-info-get-scan-status>`_.
24
- `Admin - WorkspaceInfo GetScanResult <https://learn.microsoft.com/en-gb/rest/api/power-bi/admin/workspace-info-get-scan-result>`_.
22
+ `Admin - WorkspaceInfo PostWorkspaceInfo <https://learn.microsoft.com/rest/api/power-bi/admin/workspace-info-post-workspace-info>`_.
23
+ `Admin - WorkspaceInfo GetScanStatus <https://learn.microsoft.com/rest/api/power-bi/admin/workspace-info-get-scan-status>`_.
24
+ `Admin - WorkspaceInfo GetScanResult <https://learn.microsoft.com/rest/api/power-bi/admin/workspace-info-get-scan-result>`_.
25
25
 
26
26
  Parameters
27
27
  ----------
@@ -10,19 +10,21 @@ from sempy_labs._helper_functions import (
10
10
  resolve_dataset_id,
11
11
  resolve_lakehouse_name,
12
12
  _convert_data_type,
13
+ resolve_dataset_name_and_id,
14
+ resolve_workspace_name_and_id,
13
15
  )
14
16
 
15
17
 
16
18
  def check_fallback_reason(
17
- dataset: str, workspace: Optional[str] = None
19
+ dataset: str | UUID, workspace: Optional[str] = None
18
20
  ) -> pd.DataFrame:
19
21
  """
20
22
  Shows the reason a table in a Direct Lake semantic model would fallback to DirectQuery.
21
23
 
22
24
  Parameters
23
25
  ----------
24
- dataset : str
25
- Name of the semantic model.
26
+ dataset : str | UUID
27
+ Name or ID of the semantic model.
26
28
  workspace : str, default=None
27
29
  The Fabric workspace name.
28
30
  Defaults to None which resolves to the workspace of the attached lakehouse
@@ -35,19 +37,22 @@ def check_fallback_reason(
35
37
  """
36
38
  from sempy_labs.tom import connect_semantic_model
37
39
 
38
- workspace = fabric.resolve_workspace_name(workspace)
40
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
41
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(
42
+ dataset, workspace=workspace_id
43
+ )
39
44
 
40
45
  with connect_semantic_model(
41
- dataset=dataset, workspace=workspace, readonly=True
46
+ dataset=dataset_id, workspace=workspace_id, readonly=True
42
47
  ) as tom:
43
48
  if not tom.is_direct_lake():
44
49
  raise ValueError(
45
- f"{icons.red_dot} The '{dataset}' semantic model is not in Direct Lake. This function is only applicable to Direct Lake semantic models."
50
+ f"{icons.red_dot} The '{dataset_name}' semantic model is not in Direct Lake. This function is only applicable to Direct Lake semantic models."
46
51
  )
47
52
 
48
53
  df = fabric.evaluate_dax(
49
- dataset=dataset,
50
- workspace=workspace,
54
+ dataset=dataset_id,
55
+ workspace=workspace_id,
51
56
  dax_string="""
52
57
  SELECT [TableName] AS [Table Name],[FallbackReason] AS [FallbackReasonID]
53
58
  FROM $SYSTEM.TMSCHEMA_DELTA_TABLE_METADATA_STORAGES
sempy_labs/tom/_model.py CHANGED
@@ -7,6 +7,8 @@ from sempy_labs._helper_functions import (
7
7
  format_dax_object_name,
8
8
  generate_guid,
9
9
  _make_list_unique,
10
+ resolve_dataset_name_and_id,
11
+ resolve_workspace_name_and_id,
10
12
  )
11
13
  from sempy_labs._list_functions import list_relationships
12
14
  from sempy_labs._refresh_semantic_model import refresh_semantic_model
@@ -17,6 +19,7 @@ from sempy._utils._log import log
17
19
  import sempy_labs._icons as icons
18
20
  from sempy.fabric.exceptions import FabricHTTPException
19
21
  import ast
22
+ from uuid import UUID
20
23
 
21
24
  if TYPE_CHECKING:
22
25
  import Microsoft.AnalysisServices.Tabular
@@ -27,27 +30,33 @@ class TOMWrapper:
27
30
  """
28
31
  Convenience wrapper around the TOM object model for a semantic model. Always use the connect_semantic_model function to make sure the TOM object is initialized correctly.
29
32
 
30
- `XMLA read/write endpoints <https://learn.microsoft.com/power-bi/enterprise/service-premium-connect-tools#to-enable-read-write-for-a-premium-capacity>`_ must
31
- be enabled if setting the readonly parameter to False.
33
+ `XMLA read/write endpoints <https://learn.microsoft.com/power-bi/enterprise/service-premium-connect-tools#to-enable-read-write-for-a-premium-capacity>`_ must be enabled if setting the readonly parameter to False.
32
34
  """
33
35
 
34
- _dataset: str
35
- _workspace: str
36
+ _dataset_id: UUID
37
+ _dataset_name: str
38
+ _workspace_id: UUID
39
+ _workspace_name: str
36
40
  _readonly: bool
37
41
  _tables_added: List[str]
38
42
  _table_map = dict
39
43
  _column_map = dict
40
44
 
41
45
  def __init__(self, dataset, workspace, readonly):
42
- self._dataset = dataset
43
- self._workspace = workspace
46
+
47
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
48
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
49
+ self._dataset_id = dataset_id
50
+ self._dataset_name = dataset_name
51
+ self._workspace_name = workspace_name
52
+ self._workspace_id = workspace_id
44
53
  self._readonly = readonly
45
54
  self._tables_added = []
46
55
 
47
56
  self._tom_server = fabric.create_tom_server(
48
- readonly=readonly, workspace=workspace
57
+ readonly=readonly, workspace=workspace_id
49
58
  )
50
- self.model = self._tom_server.Databases.GetByName(dataset).Model
59
+ self.model = self._tom_server.Databases[dataset_id].Model
51
60
 
52
61
  self._table_map = {}
53
62
  self._column_map = {}
@@ -2160,7 +2169,9 @@ class TOMWrapper:
2160
2169
  )
2161
2170
  """
2162
2171
  df = fabric.evaluate_dax(
2163
- dataset=self._dataset, workspace=self._workspace, dax_string=dax_query
2172
+ dataset=self._dataset_id,
2173
+ workspace=self._workspace_id,
2174
+ dax_string=dax_query,
2164
2175
  )
2165
2176
  value = df["[1]"].iloc[0]
2166
2177
  if value != "1":
@@ -2424,7 +2435,7 @@ class TOMWrapper:
2424
2435
  )
2425
2436
  except Exception:
2426
2437
  raise ValueError(
2427
- f"{icons.red_dot} The '{measure_name}' measure does not exist in the '{self._dataset}' semantic model within the '{self._workspace}'."
2438
+ f"{icons.red_dot} The '{measure_name}' measure does not exist in the '{self._dataset_name}' semantic model within the '{self._workspace_name}'."
2428
2439
  )
2429
2440
 
2430
2441
  graphics = [
@@ -2467,7 +2478,7 @@ class TOMWrapper:
2467
2478
  )
2468
2479
  except Exception:
2469
2480
  raise ValueError(
2470
- f"{icons.red_dot} The '{target}' measure does not exist in the '{self._dataset}' semantic model within the '{self._workspace}'."
2481
+ f"{icons.red_dot} The '{target}' measure does not exist in the '{self._dataset_name}' semantic model within the '{self._workspace_name}'."
2471
2482
  )
2472
2483
 
2473
2484
  if measure_target:
@@ -2793,7 +2804,7 @@ class TOMWrapper:
2793
2804
  success = True
2794
2805
  if not success:
2795
2806
  raise ValueError(
2796
- f"{icons.red_dot} The '{obj}' object was not found in the '{self._dataset}' semantic model."
2807
+ f"{icons.red_dot} The '{obj}' object was not found in the '{self._dataset_name}' semantic model."
2797
2808
  )
2798
2809
  else:
2799
2810
  i += 1
@@ -2881,19 +2892,19 @@ class TOMWrapper:
2881
2892
  from sempy_labs._list_functions import list_tables
2882
2893
 
2883
2894
  dfT = list_tables(
2884
- dataset=self._dataset, workspace=self._workspace, extended=True
2895
+ dataset=self._dataset_id, workspace=self._workspace_id, extended=True
2885
2896
  )
2886
2897
  dfC = fabric.list_columns(
2887
- dataset=self._dataset, workspace=self._workspace, extended=True
2898
+ dataset=self._dataset_id, workspace=self._workspace_id, extended=True
2888
2899
  )
2889
2900
  dfP = fabric.list_partitions(
2890
- dataset=self._dataset, workspace=self._workspace, extended=True
2901
+ dataset=self._dataset_id, workspace=self._workspace_id, extended=True
2891
2902
  )
2892
2903
  dfH = fabric.list_hierarchies(
2893
- dataset=self._dataset, workspace=self._workspace, extended=True
2904
+ dataset=self._dataset_id, workspace=self._workspace_id, extended=True
2894
2905
  )
2895
2906
  dfR = list_relationships(
2896
- dataset=self._dataset, workspace=self._workspace, extended=True
2907
+ dataset=self._dataset_id, workspace=self._workspace_id, extended=True
2897
2908
  )
2898
2909
 
2899
2910
  for t in self.model.Tables:
@@ -3338,7 +3349,9 @@ class TOMWrapper:
3338
3349
  usingView = False
3339
3350
 
3340
3351
  if self.is_direct_lake():
3341
- df = check_fallback_reason(dataset=self._dataset, workspace=self._workspace)
3352
+ df = check_fallback_reason(
3353
+ dataset=self._dataset_id, workspace=self._workspace_id
3354
+ )
3342
3355
  df_filt = df[df["FallbackReasonID"] == 2]
3343
3356
 
3344
3357
  if len(df_filt) > 0:
@@ -3385,7 +3398,7 @@ class TOMWrapper:
3385
3398
 
3386
3399
  if rp is None:
3387
3400
  print(
3388
- f"{icons.yellow_dot} The '{table_name}' table in the '{self._dataset}' semantic model within the '{self._workspace}' workspace does not have an incremental refresh policy."
3401
+ f"{icons.yellow_dot} The '{table_name}' table in the '{self._dataset_name}' semantic model within the '{self._workspace_name}' workspace does not have an incremental refresh policy."
3389
3402
  )
3390
3403
  else:
3391
3404
  print(f"Table Name: {table_name}")
@@ -3884,14 +3897,14 @@ class TOMWrapper:
3884
3897
 
3885
3898
  if table_name is None:
3886
3899
  raise ValueError(
3887
- f"{icons.red_dot} The '{measure_name}' is not a valid measure in the '{self._dataset}' semantic model within the '{self._workspace}' workspace."
3900
+ f"{icons.red_dot} The '{measure_name}' is not a valid measure in the '{self._dataset_name}' semantic model within the '{self._workspace_name}' workspace."
3888
3901
  )
3889
3902
 
3890
3903
  table_name = matching_measures[0]
3891
3904
  # Validate date table
3892
3905
  if not self.is_date_table(date_table):
3893
3906
  raise ValueError(
3894
- f"{icons.red_dot} The '{date_table}' table is not a valid date table in the '{self._dataset}' wemantic model within the '{self._workspace}' workspace."
3907
+ f"{icons.red_dot} The '{date_table}' table is not a valid date table in the '{self._dataset_name}' wemantic model within the '{self._workspace_name}' workspace."
3895
3908
  )
3896
3909
 
3897
3910
  # Extract date key from date table
@@ -3903,7 +3916,7 @@ class TOMWrapper:
3903
3916
 
3904
3917
  if not matching_columns:
3905
3918
  raise ValueError(
3906
- f"{icons.red_dot} The '{date_table}' table does not have a date key column in the '{self._dataset}' semantic model within the '{self._workspace}' workspace."
3919
+ f"{icons.red_dot} The '{date_table}' table does not have a date key column in the '{self._dataset_name}' semantic model within the '{self._workspace_name}' workspace."
3907
3920
  )
3908
3921
 
3909
3922
  date_key = matching_columns[0]
@@ -4383,7 +4396,6 @@ class TOMWrapper:
4383
4396
  if isinstance(measure_name, str):
4384
4397
  measure_name = [measure_name]
4385
4398
 
4386
- workspace_id = fabric.resolve_workspace_id(self._workspace)
4387
4399
  client = fabric.FabricRestClient()
4388
4400
 
4389
4401
  if len(measure_name) > max_batch_size:
@@ -4402,7 +4414,7 @@ class TOMWrapper:
4402
4414
  "modelItems": [],
4403
4415
  },
4404
4416
  },
4405
- "workspaceId": workspace_id,
4417
+ "workspaceId": self._workspace_id,
4406
4418
  "artifactInfo": {"artifactType": "SemanticModel"},
4407
4419
  }
4408
4420
  for m_name in measure_list:
@@ -4413,7 +4425,7 @@ class TOMWrapper:
4413
4425
  )
4414
4426
  if t_name is None:
4415
4427
  raise ValueError(
4416
- f"{icons.red_dot} The '{m_name}' measure does not exist in the '{self._dataset}' semantic model within the '{self._workspace}' workspace."
4428
+ f"{icons.red_dot} The '{m_name}' measure does not exist in the '{self._dataset_name}' semantic model within the '{self._workspace_name}' workspace."
4417
4429
  )
4418
4430
 
4419
4431
  new_item = {
@@ -4515,6 +4527,40 @@ class TOMWrapper:
4515
4527
  TOM.ValueFilterBehaviorType, value_filter_behavior
4516
4528
  )
4517
4529
 
4530
+ def add_role_member(self, role_name: str, member: str | List[str]):
4531
+ """
4532
+ Adds a external model role member (AzureAD) to a role.
4533
+
4534
+ Parameters
4535
+ ----------
4536
+ role_name : str
4537
+ The role name.
4538
+ member : str | List[str]
4539
+ The email address(es) of the member(s) to add.
4540
+ """
4541
+
4542
+ import Microsoft.AnalysisServices.Tabular as TOM
4543
+
4544
+ if isinstance(member, str):
4545
+ member = [member]
4546
+
4547
+ role = self.model.Roles[role_name]
4548
+ current_members = [m.MemberName for m in role.Members]
4549
+
4550
+ for m in member:
4551
+ if m not in current_members:
4552
+ rm = TOM.ExternalModelRoleMember()
4553
+ rm.IdentityProvider = "AzureAD"
4554
+ rm.MemberName = m
4555
+ role.Members.Add(rm)
4556
+ print(
4557
+ f"{icons.green_dot} '{m}' has been added as a member of the '{role_name}' role."
4558
+ )
4559
+ else:
4560
+ print(
4561
+ f"{icons.yellow_dot} '{m}' is already a member in the '{role_name}' role."
4562
+ )
4563
+
4518
4564
  def close(self):
4519
4565
 
4520
4566
  if not self._readonly and self.model is not None:
@@ -4572,9 +4618,9 @@ class TOMWrapper:
4572
4618
 
4573
4619
  if len(self._tables_added) > 0:
4574
4620
  refresh_semantic_model(
4575
- dataset=self._dataset,
4621
+ dataset=self._dataset_id,
4576
4622
  tables=self._tables_added,
4577
- workspace=self._workspace,
4623
+ workspace=self._workspace_id,
4578
4624
  )
4579
4625
  self.model = None
4580
4626
 
@@ -4584,15 +4630,15 @@ class TOMWrapper:
4584
4630
  @log
4585
4631
  @contextmanager
4586
4632
  def connect_semantic_model(
4587
- dataset: str, readonly: bool = True, workspace: Optional[str] = None
4633
+ dataset: str | UUID, readonly: bool = True, workspace: Optional[str] = None
4588
4634
  ) -> Iterator[TOMWrapper]:
4589
4635
  """
4590
4636
  Connects to the Tabular Object Model (TOM) within a semantic model.
4591
4637
 
4592
4638
  Parameters
4593
4639
  ----------
4594
- dataset : str
4595
- Name of the semantic model.
4640
+ dataset : str | UUID
4641
+ Name or ID of the semantic model.
4596
4642
  readonly: bool, default=True
4597
4643
  Whether the connection is read-only or read/write. Setting this to False enables read/write which saves the changes made back to the server.
4598
4644
  workspace : str, default=None
@@ -4609,10 +4655,6 @@ def connect_semantic_model(
4609
4655
  # initialize .NET to make sure System and Microsoft.AnalysisServices.Tabular is defined
4610
4656
  sempy.fabric._client._utils._init_analysis_services()
4611
4657
 
4612
- if workspace is None:
4613
- workspace_id = fabric.get_workspace_id()
4614
- workspace = fabric.resolve_workspace_name(workspace_id)
4615
-
4616
4658
  tw = TOMWrapper(dataset=dataset, workspace=workspace, readonly=readonly)
4617
4659
  try:
4618
4660
  yield tw