semantic-link-labs 0.9.4__py3-none-any.whl → 0.9.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (49) hide show
  1. {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.5.dist-info}/METADATA +18 -2
  2. {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.5.dist-info}/RECORD +49 -43
  3. sempy_labs/__init__.py +18 -3
  4. sempy_labs/_capacities.py +22 -127
  5. sempy_labs/_capacity_migration.py +8 -7
  6. sempy_labs/_dashboards.py +60 -0
  7. sempy_labs/_data_pipelines.py +5 -31
  8. sempy_labs/_environments.py +20 -48
  9. sempy_labs/_eventhouses.py +22 -52
  10. sempy_labs/_eventstreams.py +16 -34
  11. sempy_labs/_gateways.py +4 -4
  12. sempy_labs/_generate_semantic_model.py +0 -1
  13. sempy_labs/_git.py +90 -1
  14. sempy_labs/_graphQL.py +3 -20
  15. sempy_labs/_helper_functions.py +171 -43
  16. sempy_labs/_kql_databases.py +19 -34
  17. sempy_labs/_kql_querysets.py +15 -32
  18. sempy_labs/_list_functions.py +12 -155
  19. sempy_labs/_mirrored_databases.py +14 -48
  20. sempy_labs/_ml_experiments.py +5 -30
  21. sempy_labs/_ml_models.py +4 -28
  22. sempy_labs/_model_bpa.py +2 -0
  23. sempy_labs/_mounted_data_factories.py +119 -0
  24. sempy_labs/_notebooks.py +16 -26
  25. sempy_labs/_sql.py +7 -6
  26. sempy_labs/_utils.py +42 -0
  27. sempy_labs/_vertipaq.py +17 -2
  28. sempy_labs/_warehouses.py +5 -17
  29. sempy_labs/_workloads.py +23 -9
  30. sempy_labs/_workspaces.py +13 -5
  31. sempy_labs/admin/__init__.py +21 -1
  32. sempy_labs/admin/_apps.py +1 -1
  33. sempy_labs/admin/_artifacts.py +62 -0
  34. sempy_labs/admin/_basic_functions.py +0 -52
  35. sempy_labs/admin/_capacities.py +61 -0
  36. sempy_labs/admin/_reports.py +74 -0
  37. sempy_labs/admin/_shared.py +4 -2
  38. sempy_labs/admin/_users.py +133 -0
  39. sempy_labs/admin/_workspaces.py +148 -0
  40. sempy_labs/directlake/_update_directlake_partition_entity.py +9 -1
  41. sempy_labs/lakehouse/__init__.py +2 -0
  42. sempy_labs/lakehouse/_lakehouse.py +6 -7
  43. sempy_labs/lakehouse/_shortcuts.py +192 -53
  44. sempy_labs/report/_generate_report.py +9 -17
  45. sempy_labs/report/_report_bpa.py +12 -19
  46. sempy_labs/tom/_model.py +34 -16
  47. {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.5.dist-info}/LICENSE +0 -0
  48. {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.5.dist-info}/WHEEL +0 -0
  49. {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.5.dist-info}/top_level.txt +0 -0
@@ -1,16 +1,19 @@
1
1
  import sempy.fabric as fabric
2
+ import pandas as pd
2
3
  from sempy_labs._helper_functions import (
3
- resolve_lakehouse_name,
4
- resolve_lakehouse_id,
4
+ resolve_lakehouse_name_and_id,
5
5
  resolve_workspace_name_and_id,
6
6
  _base_api,
7
+ _create_dataframe,
7
8
  )
9
+ from sempy._utils._log import log
8
10
  from typing import Optional
9
11
  import sempy_labs._icons as icons
10
- from sempy.fabric.exceptions import FabricHTTPException
11
12
  from uuid import UUID
13
+ from sempy.fabric.exceptions import FabricHTTPException
12
14
 
13
15
 
16
+ @log
14
17
  def create_shortcut_onelake(
15
18
  table_name: str,
16
19
  source_lakehouse: str,
@@ -62,21 +65,18 @@ def create_shortcut_onelake(
62
65
  (source_workspace_name, source_workspace_id) = resolve_workspace_name_and_id(
63
66
  source_workspace
64
67
  )
65
- source_lakehouse_id = resolve_lakehouse_id(source_lakehouse, source_workspace_id)
66
- source_lakehouse_name = fabric.resolve_item_name(
67
- item_id=source_lakehouse_id, type="Lakehouse", workspace=source_workspace_id
68
- )
69
68
 
70
- (destination_workspace_name, destination_workspace_id) = resolve_workspace_name_and_id(
71
- destination_workspace
69
+ (source_lakehouse_name, source_lakehouse_id) = resolve_lakehouse_name_and_id(
70
+ lakehouse=source_lakehouse, workspace=source_workspace_id
72
71
  )
73
- destination_lakehouse_id = resolve_lakehouse_id(
74
- destination_lakehouse, destination_workspace
72
+
73
+ (destination_workspace_name, destination_workspace_id) = (
74
+ resolve_workspace_name_and_id(destination_workspace)
75
75
  )
76
- destination_lakehouse_name = fabric.resolve_item_name(
77
- item_id=destination_lakehouse_id,
78
- type="Lakehouse",
79
- workspace=destination_workspace_id,
76
+ (destination_lakehouse_name, destination_lakehouse_id) = (
77
+ resolve_lakehouse_name_and_id(
78
+ lakehouse=destination_lakehouse, workspace=destination_workspace_id
79
+ )
80
80
  )
81
81
 
82
82
  if shortcut_name is None:
@@ -84,18 +84,39 @@ def create_shortcut_onelake(
84
84
 
85
85
  source_full_path = f"{source_path}/{table_name}"
86
86
 
87
+ actual_shortcut_name = shortcut_name.replace(" ", "")
88
+
87
89
  payload = {
88
90
  "path": destination_path,
89
- "name": shortcut_name.replace(" ", ""),
91
+ "name": actual_shortcut_name,
90
92
  "target": {
91
93
  "oneLake": {
92
- "workspaceId": source_workspace_id,
93
94
  "itemId": source_lakehouse_id,
94
95
  "path": source_full_path,
96
+ "workspaceId": source_workspace_id,
95
97
  }
96
98
  },
97
99
  }
98
100
 
101
+ # Check if the shortcut already exists
102
+ try:
103
+ response = _base_api(
104
+ request=f"/v1/workspaces/{destination_workspace_id}/items/{destination_lakehouse_id}/shortcuts/{destination_path}/{actual_shortcut_name}"
105
+ )
106
+ response_json = response.json()
107
+ del response_json["target"]["type"]
108
+ if response_json.get("target") == payload.get("target"):
109
+ print(
110
+ f"{icons.info} The '{actual_shortcut_name}' shortcut already exists in the '{destination_lakehouse_name}' lakehouse within the '{destination_workspace_name}' workspace."
111
+ )
112
+ return
113
+ else:
114
+ raise ValueError(
115
+ f"{icons.red_dot} The '{actual_shortcut_name}' shortcut already exists in the '{destination_lakehouse_name} lakehouse within the '{destination_workspace_name}' workspace but has a different source."
116
+ )
117
+ except FabricHTTPException:
118
+ pass
119
+
99
120
  _base_api(
100
121
  request=f"/v1/workspaces/{destination_workspace_id}/items/{destination_lakehouse_id}/shortcuts",
101
122
  payload=payload,
@@ -146,17 +167,14 @@ def create_shortcut(
146
167
 
147
168
  sourceTitle = source_titles[source]
148
169
 
149
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
150
-
151
- if lakehouse is None:
152
- lakehouse_id = fabric.get_lakehouse_id()
153
- else:
154
- lakehouse_id = resolve_lakehouse_id(lakehouse, workspace)
170
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
171
+ (lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id(
172
+ lakehouse=lakehouse, workspace=workspace_id
173
+ )
155
174
 
156
- client = fabric.FabricRestClient()
157
175
  shortcutActualName = shortcut_name.replace(" ", "")
158
176
 
159
- request_body = {
177
+ payload = {
160
178
  "path": "Tables",
161
179
  "name": shortcutActualName,
162
180
  "target": {
@@ -168,22 +186,16 @@ def create_shortcut(
168
186
  },
169
187
  }
170
188
 
171
- try:
172
- response = client.post(
173
- f"/v1/workspaces/{workspace_id}/items/{lakehouse_id}/shortcuts",
174
- json=request_body,
175
- )
176
- if response.status_code == 201:
177
- print(
178
- f"{icons.green_dot} The shortcut '{shortcutActualName}' was created in the '{lakehouse}' lakehouse within"
179
- f" the '{workspace} workspace. It is based on the '{subpath}' table in '{sourceTitle}'."
180
- )
181
- else:
182
- print(response.status_code)
183
- except Exception as e:
184
- raise ValueError(
185
- f"{icons.red_dot} Failed to create a shortcut for the '{shortcut_name}' table."
186
- ) from e
189
+ _base_api(
190
+ request=f"/v1/workspaces/{workspace_id}/items/{lakehouse_id}/shortcuts",
191
+ method="post",
192
+ payload=payload,
193
+ status_codes=201,
194
+ )
195
+ print(
196
+ f"{icons.green_dot} The shortcut '{shortcutActualName}' was created in the '{lakehouse_name}' lakehouse within"
197
+ f" the '{workspace_name}' workspace. It is based on the '{subpath}' table in '{sourceTitle}'."
198
+ )
187
199
 
188
200
 
189
201
  def delete_shortcut(
@@ -203,7 +215,7 @@ def delete_shortcut(
203
215
  The name of the shortcut.
204
216
  shortcut_path : str = "Tables"
205
217
  The path of the shortcut to be deleted. Must start with either "Files" or "Tables". Examples: Tables/FolderName/SubFolderName; Files/FolderName/SubFolderName.
206
- lakehouse : str, default=None
218
+ lakehouse : str | uuid.UUID, default=None
207
219
  The Fabric lakehouse name in which the shortcut resides.
208
220
  Defaults to None which resolves to the lakehouse attached to the notebook.
209
221
  workspace : str | UUID, default=None
@@ -213,20 +225,15 @@ def delete_shortcut(
213
225
  """
214
226
 
215
227
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
228
+ (lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id(
229
+ lakehouse=lakehouse, workspace=workspace_id
230
+ )
216
231
 
217
- if lakehouse is None:
218
- lakehouse_id = fabric.get_lakehouse_id()
219
- lakehouse = resolve_lakehouse_name(lakehouse_id, workspace_id)
220
- else:
221
- lakehouse_id = resolve_lakehouse_id(lakehouse, workspace_id)
222
-
223
- client = fabric.FabricRestClient()
224
- response = client.delete(
225
- f"/v1/workspaces/{workspace_id}/items/{lakehouse_id}/shortcuts/{shortcut_path}/{shortcut_name}"
232
+ _base_api(
233
+ request=f"/v1/workspaces/{workspace_id}/items/{lakehouse_id}/shortcuts/{shortcut_path}/{shortcut_name}",
234
+ method="delete",
226
235
  )
227
236
 
228
- if response.status_code != 200:
229
- raise FabricHTTPException(response)
230
237
  print(
231
238
  f"{icons.green_dot} The '{shortcut_name}' shortcut in the '{lakehouse}' within the '{workspace_name}' workspace has been deleted."
232
239
  )
@@ -258,3 +265,135 @@ def reset_shortcut_cache(workspace: Optional[str | UUID] = None):
258
265
  print(
259
266
  f"{icons.green_dot} The shortcut cache has been reset for the '{workspace_name}' workspace."
260
267
  )
268
+
269
+
270
+ @log
271
+ def list_shortcuts(
272
+ lakehouse: Optional[str | UUID] = None,
273
+ workspace: Optional[str | UUID] = None,
274
+ path: Optional[str] = None,
275
+ ) -> pd.DataFrame:
276
+ """
277
+ Shows all shortcuts which exist in a Fabric lakehouse and their properties.
278
+
279
+ Parameters
280
+ ----------
281
+ lakehouse : str | uuid.UUID, default=None
282
+ The Fabric lakehouse name or ID.
283
+ Defaults to None which resolves to the lakehouse attached to the notebook.
284
+ workspace : str | uuid.UUID, default=None
285
+ The name or ID of the Fabric workspace in which lakehouse resides.
286
+ Defaults to None which resolves to the workspace of the attached lakehouse
287
+ or if no lakehouse attached, resolves to the workspace of the notebook.
288
+ path: str, default=None
289
+ The path within lakehouse where to look for shortcuts. If provied, must start with either "Files" or "Tables". Examples: Tables/FolderName/SubFolderName; Files/FolderName/SubFolderName.
290
+ Defaults to None which will retun all shortcuts on the given lakehouse
291
+
292
+ Returns
293
+ -------
294
+ pandas.DataFrame
295
+ A pandas dataframe showing all the shortcuts which exist in the specified lakehouse.
296
+ """
297
+
298
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
299
+ (lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id(
300
+ lakehouse=lakehouse, workspace=workspace_id
301
+ )
302
+
303
+ columns = {
304
+ "Shortcut Name": "string",
305
+ "Shortcut Path": "string",
306
+ "Source Type": "string",
307
+ "Source Workspace Id": "string",
308
+ "Source Workspace Name": "string",
309
+ "Source Item Id": "string",
310
+ "Source Item Name": "string",
311
+ "Source Item Type": "string",
312
+ "OneLake Path": "string",
313
+ "Connection Id": "string",
314
+ "Location": "string",
315
+ "Bucket": "string",
316
+ "SubPath": "string",
317
+ "Source Properties Raw": "string",
318
+ }
319
+ df = _create_dataframe(columns=columns)
320
+
321
+ # To improve performance create a dataframe to cache all items for a given workspace
322
+ itm_clms = {
323
+ "Id": "string",
324
+ "Display Name": "string",
325
+ "Description": "string",
326
+ "Type": "string",
327
+ "Workspace Id": "string",
328
+ }
329
+ source_items_df = _create_dataframe(columns=itm_clms)
330
+
331
+ url = f"/v1/workspaces/{workspace_id}/items/{lakehouse_id}/shortcuts"
332
+
333
+ if path is not None:
334
+ url += f"?parentPath={path}"
335
+
336
+ responses = _base_api(
337
+ request=url,
338
+ uses_pagination=True,
339
+ )
340
+
341
+ sources = {
342
+ "AdlsGen2": "adlsGen2",
343
+ "AmazonS3": "amazonS3",
344
+ "Dataverse": "dataverse",
345
+ "ExternalDataShare": "externalDataShare",
346
+ "GoogleCloudStorage": "googleCloudStorage",
347
+ "OneLake": "oneLake",
348
+ "S3Compatible": "s3Compatible",
349
+ }
350
+
351
+ for r in responses:
352
+ for i in r.get("value", []):
353
+ tgt = i.get("target", {})
354
+ tgt_type = tgt.get("type")
355
+ connection_id = tgt.get(sources.get(tgt_type), {}).get("connectionId")
356
+ location = tgt.get(sources.get(tgt_type), {}).get("location")
357
+ sub_path = tgt.get(sources.get(tgt_type), {}).get("subpath")
358
+ source_workspace_id = tgt.get(sources.get(tgt_type), {}).get("workspaceId")
359
+ source_item_id = tgt.get(sources.get(tgt_type), {}).get("itemId")
360
+ bucket = tgt.get(sources.get(tgt_type), {}).get("bucket")
361
+ source_workspace_name = (
362
+ fabric.resolve_workspace_name(source_workspace_id)
363
+ if source_workspace_id is not None
364
+ else None
365
+ )
366
+ # Cache and use it to getitem type and name
367
+ source_item_type = None
368
+ source_item_name = None
369
+ dfI = source_items_df[
370
+ source_items_df["Workspace Id"] == source_workspace_id
371
+ ]
372
+ if dfI.empty:
373
+ dfI = fabric.list_items(workspace=source_workspace_id)
374
+ source_items_df = pd.concat([source_items_df, dfI], ignore_index=True)
375
+
376
+ dfI_filt = dfI[dfI["Id"] == source_item_id]
377
+ if not dfI_filt.empty:
378
+ source_item_type = dfI_filt["Type"].iloc[0]
379
+ source_item_name = dfI_filt["Display Name"].iloc[0]
380
+
381
+ new_data = {
382
+ "Shortcut Name": i.get("name"),
383
+ "Shortcut Path": i.get("path"),
384
+ "Source Type": tgt_type,
385
+ "Source Workspace Id": source_workspace_id,
386
+ "Source Workspace Name": source_workspace_name,
387
+ "Source Item Id": source_item_id,
388
+ "Source Item Name": source_item_name,
389
+ "Source Item Type": source_item_type,
390
+ "OneLake Path": tgt.get(sources.get("oneLake"), {}).get("path"),
391
+ "Connection Id": connection_id,
392
+ "Location": location,
393
+ "Bucket": bucket,
394
+ "SubPath": sub_path,
395
+ "Source Properties Raw": str(tgt),
396
+ }
397
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
398
+
399
+ return df
@@ -11,6 +11,7 @@ from sempy_labs._helper_functions import (
11
11
  _update_dataframe_datatypes,
12
12
  _base_api,
13
13
  resolve_item_id,
14
+ get_item_definition,
14
15
  )
15
16
  import sempy_labs._icons as icons
16
17
  from sempy._utils._log import log
@@ -178,7 +179,9 @@ def update_report_from_reportjson(
178
179
 
179
180
 
180
181
  def get_report_definition(
181
- report: str, workspace: Optional[str | UUID] = None, return_dataframe: bool = True
182
+ report: str | UUID,
183
+ workspace: Optional[str | UUID] = None,
184
+ return_dataframe: bool = True,
182
185
  ) -> pd.DataFrame | dict:
183
186
  """
184
187
  Gets the collection of definition files of a report.
@@ -187,8 +190,8 @@ def get_report_definition(
187
190
 
188
191
  Parameters
189
192
  ----------
190
- report : str
191
- Name of the report.
193
+ report : str | uuid.UUID
194
+ Name or ID of the report.
192
195
  workspace : str | uuid.UUID, default=None
193
196
  The Fabric workspace name or ID in which the report resides.
194
197
  Defaults to None which resolves to the workspace of the attached lakehouse
@@ -198,25 +201,14 @@ def get_report_definition(
198
201
 
199
202
  Returns
200
203
  -------
201
- pandas.DataFrame | dict
204
+ pandas.DataFrame
202
205
  The collection of report definition files within a pandas dataframe.
203
206
  """
204
207
 
205
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
206
- report_id = resolve_item_id(item=report, type="Report", workspace=workspace)
207
-
208
- result = _base_api(
209
- request=f"/v1/workspaces/{workspace_id}/reports/{report_id}/getDefinition",
210
- method="post",
211
- lro_return_json=True,
212
- status_codes=None,
208
+ return get_item_definition(
209
+ item=report, type="Report", workspace=workspace, return_dataframe=True
213
210
  )
214
211
 
215
- if return_dataframe:
216
- return pd.json_normalize(result["definition"]["parts"])
217
- else:
218
- return result
219
-
220
212
 
221
213
  @log
222
214
  def create_model_bpa_report(
@@ -1,4 +1,3 @@
1
- import sempy.fabric as fabric
2
1
  from typing import Optional
3
2
  import pandas as pd
4
3
  import datetime
@@ -7,8 +6,7 @@ from sempy_labs.report import ReportWrapper, report_bpa_rules
7
6
  from sempy_labs._helper_functions import (
8
7
  format_dax_object_name,
9
8
  save_as_delta_table,
10
- resolve_report_id,
11
- resolve_lakehouse_name,
9
+ resolve_item_name_and_id,
12
10
  resolve_workspace_capacity,
13
11
  _get_column_aggregate,
14
12
  resolve_workspace_name_and_id,
@@ -54,9 +52,7 @@ def run_report_bpa(
54
52
  A pandas dataframe in HTML format showing report objects which violated the best practice analyzer rules.
55
53
  """
56
54
 
57
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
58
-
59
- rpt = ReportWrapper(report=report, workspace=workspace_id)
55
+ rpt = ReportWrapper(report=report, workspace=workspace)
60
56
 
61
57
  dfCV = rpt.list_custom_visuals()
62
58
  dfP = rpt.list_pages()
@@ -149,7 +145,7 @@ def run_report_bpa(
149
145
  df_output["Description"] = row["Description"]
150
146
  df_output["URL"] = row["URL"]
151
147
  df_output["Report URL"] = helper.get_web_url(
152
- report=report, workspace=workspace_id
148
+ report=report, workspace=workspace
153
149
  )
154
150
 
155
151
  page_mapping_dict = dfP.set_index("Page Display Name")["Page URL"].to_dict()
@@ -205,31 +201,28 @@ def run_report_bpa(
205
201
 
206
202
  now = datetime.datetime.now()
207
203
  delta_table_name = "reportbparesults"
208
- lakehouse_id = fabric.get_lakehouse_id()
209
- lake_workspace = fabric.resolve_workspace_name()
210
- lakehouse = resolve_lakehouse_name(
211
- lakehouse_id=lakehouse_id, workspace=lake_workspace
212
- )
213
-
214
- lakeT = get_lakehouse_tables(lakehouse=lakehouse, workspace=lake_workspace)
204
+ lakeT = get_lakehouse_tables()
215
205
  lakeT_filt = lakeT[lakeT["Table Name"] == delta_table_name]
216
206
 
217
207
  if len(lakeT_filt) == 0:
218
208
  runId = 1
219
209
  else:
220
- max_run_id = _get_column_aggregate(
221
- lakehouse=lakehouse, table_name=delta_table_name
222
- )
210
+ max_run_id = _get_column_aggregate(table_name=delta_table_name)
223
211
  runId = max_run_id + 1
224
212
 
213
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
214
+ (report_name, report_id) = resolve_item_name_and_id(
215
+ item=report, type="Report", workspace=workspace_id
216
+ )
217
+
225
218
  export_df = finalDF.copy()
226
219
  capacity_id, capacity_name = resolve_workspace_capacity(workspace=workspace_id)
227
220
  export_df["Capacity Name"] = capacity_name
228
221
  export_df["Capacity Id"] = capacity_id
229
222
  export_df["Workspace Name"] = workspace_name
230
223
  export_df["Workspace Id"] = workspace_id
231
- export_df["Report Name"] = report
232
- export_df["Report Id"] = resolve_report_id(report, workspace_id)
224
+ export_df["Report Name"] = report_name
225
+ export_df["Report Id"] = report_id
233
226
  export_df["RunId"] = runId
234
227
  export_df["Timestamp"] = now
235
228
  export_df["RunId"] = export_df["RunId"].astype(int)
sempy_labs/tom/_model.py CHANGED
@@ -1172,8 +1172,8 @@ class TOMWrapper:
1172
1172
  Name of the table.
1173
1173
  entity_name : str
1174
1174
  Name of the lakehouse/warehouse table.
1175
- expression : TOM Object, default=None
1176
- The expression used by the table.
1175
+ expression : str, default=None
1176
+ The name of the expression used by the partition.
1177
1177
  Defaults to None which resolves to the 'DatabaseQuery' expression.
1178
1178
  description : str, default=None
1179
1179
  A description for the partition.
@@ -1543,6 +1543,7 @@ class TOMWrapper:
1543
1543
  self,
1544
1544
  object: Union["TOM.Table", "TOM.Column", "TOM.Measure", "TOM.Hierarchy"],
1545
1545
  perspective_name: str,
1546
+ include_all: bool = True,
1546
1547
  ):
1547
1548
  """
1548
1549
  Adds an object to a `perspective <https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.perspective?view=analysisservices-dotnet>`_.
@@ -1553,6 +1554,8 @@ class TOMWrapper:
1553
1554
  An object (i.e. table/column/measure) within a semantic model.
1554
1555
  perspective_name : str
1555
1556
  Name of the perspective.
1557
+ include_all : bool, default=True
1558
+ Relevant to tables only, if set to True, includes all columns, measures, and hierarchies within that table in the perspective.
1556
1559
  """
1557
1560
  import Microsoft.AnalysisServices.Tabular as TOM
1558
1561
 
@@ -1578,6 +1581,8 @@ class TOMWrapper:
1578
1581
 
1579
1582
  if objectType == TOM.ObjectType.Table:
1580
1583
  pt = TOM.PerspectiveTable()
1584
+ if include_all:
1585
+ pt.IncludeAll = True
1581
1586
  pt.Table = object
1582
1587
  object.Model.Perspectives[perspective_name].PerspectiveTables.Add(pt)
1583
1588
  elif objectType == TOM.ObjectType.Column:
@@ -3517,14 +3522,14 @@ class TOMWrapper:
3517
3522
 
3518
3523
  return usingView
3519
3524
 
3520
- def has_incremental_refresh_policy(self, table_name: str):
3525
+ def has_incremental_refresh_policy(self, object):
3521
3526
  """
3522
3527
  Identifies whether a table has an `incremental refresh <https://learn.microsoft.com/power-bi/connect-data/incremental-refresh-overview>`_ policy.
3523
3528
 
3524
3529
  Parameters
3525
3530
  ----------
3526
- table_name : str
3527
- Name of the table.
3531
+ object : TOM Object
3532
+ The TOM object within the semantic model. Accepts either a table or the model object.
3528
3533
 
3529
3534
  Returns
3530
3535
  -------
@@ -3532,13 +3537,21 @@ class TOMWrapper:
3532
3537
  An indicator whether a table has an incremental refresh policy.
3533
3538
  """
3534
3539
 
3535
- hasRP = False
3536
- rp = self.model.Tables[table_name].RefreshPolicy
3537
-
3538
- if rp is not None:
3539
- hasRP = True
3540
+ import Microsoft.AnalysisServices.Tabular as TOM
3540
3541
 
3541
- return hasRP
3542
+ if object.ObjectType == TOM.ObjectType.Table:
3543
+ if object.RefreshPolicy is not None:
3544
+ return True
3545
+ else:
3546
+ return False
3547
+ elif object.ObjectType == TOM.ObjectType.Model:
3548
+ rp = False
3549
+ for t in self.model.Tables:
3550
+ if t.RefreshPolicy is not None:
3551
+ rp = True
3552
+ return rp
3553
+ else:
3554
+ raise NotImplementedError
3542
3555
 
3543
3556
  def show_incremental_refresh_policy(self, table_name: str):
3544
3557
  """
@@ -4760,17 +4773,20 @@ class TOMWrapper:
4760
4773
 
4761
4774
  import Microsoft.AnalysisServices.Tabular as TOM
4762
4775
 
4763
- return (
4776
+ bim = (
4764
4777
  json.loads(TOM.JsonScripter.ScriptCreate(self.model.Database))
4765
4778
  .get("create")
4766
4779
  .get("database")
4767
4780
  )
4768
4781
 
4782
+ return bim
4783
+
4769
4784
  def _reduce_model(self, perspective_name: str):
4770
4785
  """
4771
4786
  Reduces a model's objects based on a perspective. Adds the dependent objects within a perspective to that perspective.
4772
4787
  """
4773
4788
 
4789
+ import Microsoft.AnalysisServices.Tabular as TOM
4774
4790
  from sempy_labs._model_dependencies import get_model_calc_dependencies
4775
4791
 
4776
4792
  fabric.refresh_tom_cache(workspace=self._workspace_id)
@@ -4852,7 +4868,7 @@ class TOMWrapper:
4852
4868
  object=obj, perspective_name=perspective_name
4853
4869
  ):
4854
4870
  self.add_to_perspective(
4855
- object=obj, perspective_name=perspective_name
4871
+ object=obj, perspective_name=perspective_name, include_all=False
4856
4872
  )
4857
4873
  added = True
4858
4874
  elif obj_type == "Measure":
@@ -4861,7 +4877,7 @@ class TOMWrapper:
4861
4877
  object=obj, perspective_name=perspective_name
4862
4878
  ):
4863
4879
  self.add_to_perspective(
4864
- object=obj, perspective_name=perspective_name
4880
+ object=obj, perspective_name=perspective_name, include_all=False
4865
4881
  )
4866
4882
  added = True
4867
4883
  elif obj_type == "Table":
@@ -4870,7 +4886,7 @@ class TOMWrapper:
4870
4886
  object=obj, perspective_name=perspective_name
4871
4887
  ):
4872
4888
  self.add_to_perspective(
4873
- object=obj, perspective_name=perspective_name
4889
+ object=obj, perspective_name=perspective_name, include_all=False
4874
4890
  )
4875
4891
  added = True
4876
4892
  if added:
@@ -4917,7 +4933,9 @@ class TOMWrapper:
4917
4933
  else:
4918
4934
  for attr in ["Columns", "Measures", "Hierarchies"]:
4919
4935
  for obj in getattr(t, attr):
4920
- if not self.in_perspective(
4936
+ if attr == "Columns" and obj.Type == TOM.ColumnType.RowNumber:
4937
+ pass
4938
+ elif not self.in_perspective(
4921
4939
  object=obj, perspective_name=perspective_name
4922
4940
  ):
4923
4941
  self.remove_object(object=obj)