semantic-link-labs 0.9.3__py3-none-any.whl → 0.9.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (68) hide show
  1. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/METADATA +25 -6
  2. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/RECORD +68 -52
  3. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +45 -4
  5. sempy_labs/_capacities.py +22 -127
  6. sempy_labs/_capacity_migration.py +11 -9
  7. sempy_labs/_dashboards.py +60 -0
  8. sempy_labs/_data_pipelines.py +5 -31
  9. sempy_labs/_dax.py +17 -3
  10. sempy_labs/_delta_analyzer.py +279 -127
  11. sempy_labs/_environments.py +20 -48
  12. sempy_labs/_eventhouses.py +69 -30
  13. sempy_labs/_eventstreams.py +16 -34
  14. sempy_labs/_gateways.py +4 -4
  15. sempy_labs/_generate_semantic_model.py +30 -10
  16. sempy_labs/_git.py +90 -1
  17. sempy_labs/_graphQL.py +3 -20
  18. sempy_labs/_helper_functions.py +201 -44
  19. sempy_labs/_job_scheduler.py +226 -2
  20. sempy_labs/_kql_databases.py +19 -34
  21. sempy_labs/_kql_querysets.py +15 -32
  22. sempy_labs/_list_functions.py +14 -133
  23. sempy_labs/_mirrored_databases.py +14 -48
  24. sempy_labs/_ml_experiments.py +5 -30
  25. sempy_labs/_ml_models.py +4 -28
  26. sempy_labs/_model_bpa.py +17 -0
  27. sempy_labs/_model_bpa_rules.py +12 -2
  28. sempy_labs/_mounted_data_factories.py +119 -0
  29. sempy_labs/_notebooks.py +16 -26
  30. sempy_labs/_semantic_models.py +117 -0
  31. sempy_labs/_sql.py +78 -10
  32. sempy_labs/_sqldatabase.py +227 -0
  33. sempy_labs/_utils.py +42 -0
  34. sempy_labs/_vertipaq.py +17 -2
  35. sempy_labs/_warehouses.py +5 -17
  36. sempy_labs/_workloads.py +23 -9
  37. sempy_labs/_workspaces.py +13 -5
  38. sempy_labs/admin/__init__.py +70 -9
  39. sempy_labs/admin/_activities.py +166 -0
  40. sempy_labs/admin/_apps.py +143 -0
  41. sempy_labs/admin/_artifacts.py +62 -0
  42. sempy_labs/admin/_basic_functions.py +32 -704
  43. sempy_labs/admin/_capacities.py +311 -0
  44. sempy_labs/admin/_datasets.py +184 -0
  45. sempy_labs/admin/_domains.py +1 -1
  46. sempy_labs/admin/_items.py +3 -1
  47. sempy_labs/admin/_reports.py +239 -0
  48. sempy_labs/admin/_scanner.py +0 -1
  49. sempy_labs/admin/_shared.py +76 -0
  50. sempy_labs/admin/_tenant.py +489 -0
  51. sempy_labs/admin/_users.py +133 -0
  52. sempy_labs/admin/_workspaces.py +148 -0
  53. sempy_labs/directlake/_dl_helper.py +0 -1
  54. sempy_labs/directlake/_update_directlake_partition_entity.py +14 -0
  55. sempy_labs/graph/_teams.py +1 -1
  56. sempy_labs/graph/_users.py +9 -1
  57. sempy_labs/lakehouse/__init__.py +2 -0
  58. sempy_labs/lakehouse/_lakehouse.py +6 -7
  59. sempy_labs/lakehouse/_shortcuts.py +216 -64
  60. sempy_labs/report/__init__.py +3 -1
  61. sempy_labs/report/_download_report.py +4 -1
  62. sempy_labs/report/_export_report.py +272 -0
  63. sempy_labs/report/_generate_report.py +9 -17
  64. sempy_labs/report/_report_bpa.py +12 -19
  65. sempy_labs/report/_report_functions.py +9 -261
  66. sempy_labs/tom/_model.py +307 -40
  67. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/LICENSE +0 -0
  68. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/top_level.txt +0 -0
sempy_labs/tom/_model.py CHANGED
@@ -2,6 +2,7 @@ import sempy
2
2
  import sempy.fabric as fabric
3
3
  import pandas as pd
4
4
  import re
5
+ import json
5
6
  from datetime import datetime
6
7
  from sempy_labs._helper_functions import (
7
8
  format_dax_object_name,
@@ -1171,8 +1172,8 @@ class TOMWrapper:
1171
1172
  Name of the table.
1172
1173
  entity_name : str
1173
1174
  Name of the lakehouse/warehouse table.
1174
- expression : TOM Object, default=None
1175
- The expression used by the table.
1175
+ expression : str, default=None
1176
+ The name of the expression used by the partition.
1176
1177
  Defaults to None which resolves to the 'DatabaseQuery' expression.
1177
1178
  description : str, default=None
1178
1179
  A description for the partition.
@@ -1542,6 +1543,7 @@ class TOMWrapper:
1542
1543
  self,
1543
1544
  object: Union["TOM.Table", "TOM.Column", "TOM.Measure", "TOM.Hierarchy"],
1544
1545
  perspective_name: str,
1546
+ include_all: bool = True,
1545
1547
  ):
1546
1548
  """
1547
1549
  Adds an object to a `perspective <https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.perspective?view=analysisservices-dotnet>`_.
@@ -1552,6 +1554,8 @@ class TOMWrapper:
1552
1554
  An object (i.e. table/column/measure) within a semantic model.
1553
1555
  perspective_name : str
1554
1556
  Name of the perspective.
1557
+ include_all : bool, default=True
1558
+ Relevant to tables only, if set to True, includes all columns, measures, and hierarchies within that table in the perspective.
1555
1559
  """
1556
1560
  import Microsoft.AnalysisServices.Tabular as TOM
1557
1561
 
@@ -1577,6 +1581,8 @@ class TOMWrapper:
1577
1581
 
1578
1582
  if objectType == TOM.ObjectType.Table:
1579
1583
  pt = TOM.PerspectiveTable()
1584
+ if include_all:
1585
+ pt.IncludeAll = True
1580
1586
  pt.Table = object
1581
1587
  object.Model.Perspectives[perspective_name].PerspectiveTables.Add(pt)
1582
1588
  elif objectType == TOM.ObjectType.Column:
@@ -1748,6 +1754,7 @@ class TOMWrapper:
1748
1754
  "TOM.Table", "TOM.Column", "TOM.Measure", "TOM.Hierarchy", "TOM.Level"
1749
1755
  ],
1750
1756
  language: str,
1757
+ property: str = "Name",
1751
1758
  ):
1752
1759
  """
1753
1760
  Removes an object's `translation <https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.culture?view=analysisservices-dotnet>`_ value.
@@ -1758,13 +1765,28 @@ class TOMWrapper:
1758
1765
  An object (i.e. table/column/measure) within a semantic model.
1759
1766
  language : str
1760
1767
  The language code.
1768
+ property : str, default="Name"
1769
+ The property to set. Options: 'Name', 'Description', 'Display Folder'.
1761
1770
  """
1762
1771
  import Microsoft.AnalysisServices.Tabular as TOM
1763
1772
 
1764
- o = object.Model.Cultures[language].ObjectTranslations[
1765
- object, TOM.TranslatedProperty.Caption
1766
- ]
1767
- object.Model.Cultures[language].ObjectTranslations.Remove(o)
1773
+ if property in ["Caption", "Name"]:
1774
+ prop = TOM.TranslatedProperty.Caption
1775
+ elif property == "Description":
1776
+ prop = TOM.TranslatedProperty.Description
1777
+ else:
1778
+ prop = TOM.TranslatedProperty.DisplayFolder
1779
+
1780
+ if property == "DisplayFolder" and object.ObjectType not in [
1781
+ TOM.ObjectType.Table,
1782
+ TOM.ObjectType.Column,
1783
+ TOM.ObjectType.Measure,
1784
+ TOM.ObjectType.Hierarchy,
1785
+ ]:
1786
+ pass
1787
+ else:
1788
+ o = object.Model.Cultures[language].ObjectTranslations[object, prop]
1789
+ object.Model.Cultures[language].ObjectTranslations.Remove(o)
1768
1790
 
1769
1791
  def remove_object(self, object):
1770
1792
  """
@@ -1779,6 +1801,8 @@ class TOMWrapper:
1779
1801
 
1780
1802
  objType = object.ObjectType
1781
1803
 
1804
+ properties = ["Name", "Description", "DisplayFolder"]
1805
+
1782
1806
  # Have to remove translations and perspectives on the object before removing it.
1783
1807
  if objType in [
1784
1808
  TOM.ObjectType.Table,
@@ -1789,7 +1813,10 @@ class TOMWrapper:
1789
1813
  ]:
1790
1814
  for lang in object.Model.Cultures:
1791
1815
  try:
1792
- self.remove_translation(object=object, language=lang.Name)
1816
+ for property in properties:
1817
+ self.remove_translation(
1818
+ object=object, language=lang.Name, property=property
1819
+ )
1793
1820
  except Exception:
1794
1821
  pass
1795
1822
  if objType in [
@@ -2985,19 +3012,19 @@ class TOMWrapper:
2985
3012
 
2986
3013
  for t in self.model.Tables:
2987
3014
  dfT_filt = dfT[dfT["Name"] == t.Name]
2988
- if len(dfT_filt) > 0:
3015
+ if not dfT_filt.empty:
2989
3016
  row = dfT_filt.iloc[0]
2990
3017
  rowCount = str(row["Row Count"])
2991
3018
  totalSize = str(row["Total Size"])
2992
3019
  self.set_annotation(object=t, name="Vertipaq_RowCount", value=rowCount)
2993
3020
  self.set_annotation(
2994
- object=t, name="Vertipaq_TableSize", value=totalSize
3021
+ object=t, name="Vertipaq_TotalSize", value=totalSize
2995
3022
  )
2996
3023
  for c in t.Columns:
2997
3024
  dfC_filt = dfC[
2998
3025
  (dfC["Table Name"] == t.Name) & (dfC["Column Name"] == c.Name)
2999
3026
  ]
3000
- if len(dfC_filt) > 0:
3027
+ if not dfC_filt.empty:
3001
3028
  row = dfC_filt.iloc[0]
3002
3029
  totalSize = str(row["Total Size"])
3003
3030
  dataSize = str(row["Data Size"])
@@ -3023,7 +3050,7 @@ class TOMWrapper:
3023
3050
  dfP_filt = dfP[
3024
3051
  (dfP["Table Name"] == t.Name) & (dfP["Partition Name"] == p.Name)
3025
3052
  ]
3026
- if len(dfP_filt) > 0:
3053
+ if not dfP_filt.empty:
3027
3054
  row = dfP_filt.iloc[0]
3028
3055
  recordCount = str(row["Record Count"])
3029
3056
  segmentCount = str(row["Segment Count"])
@@ -3041,14 +3068,14 @@ class TOMWrapper:
3041
3068
  dfH_filt = dfH[
3042
3069
  (dfH["Table Name"] == t.Name) & (dfH["Hierarchy Name"] == h.Name)
3043
3070
  ]
3044
- if len(dfH_filt) > 0:
3071
+ if not dfH_filt.empty:
3045
3072
  usedSize = str(dfH_filt["Used Size"].iloc[0])
3046
3073
  self.set_annotation(
3047
3074
  object=h, name="Vertipaq_UsedSize", value=usedSize
3048
3075
  )
3049
3076
  for r in self.model.Relationships:
3050
3077
  dfR_filt = dfR[dfR["Relationship Name"] == r.Name]
3051
- if len(dfR_filt) > 0:
3078
+ if not dfR_filt.empty:
3052
3079
  relSize = str(dfR_filt["Used Size"].iloc[0])
3053
3080
  self.set_annotation(object=r, name="Vertipaq_UsedSize", value=relSize)
3054
3081
  try:
@@ -3201,12 +3228,12 @@ class TOMWrapper:
3201
3228
  """
3202
3229
  import Microsoft.AnalysisServices.Tabular as TOM
3203
3230
 
3204
- objType = object.ObjectType
3231
+ if object.ObjectType not in [TOM.ObjectType.Table, TOM.ObjectType.Column]:
3232
+ raise ValueError(
3233
+ f"{icons.red_dot} The 'object' parameter must be a Table or Column object."
3234
+ )
3205
3235
 
3206
- if objType == TOM.ObjectType.Column:
3207
- result = self.get_annotation_value(object=object, name="Vertipaq_TotalSize")
3208
- elif objType == TOM.ObjectType.Table:
3209
- result = self.get_annotation_value(object=object, name="Vertipaq_TotalSize")
3236
+ result = self.get_annotation_value(object=object, name="Vertipaq_TotalSize")
3210
3237
 
3211
3238
  return int(result) if result is not None else 0
3212
3239
 
@@ -3253,17 +3280,28 @@ class TOMWrapper:
3253
3280
  """
3254
3281
  import Microsoft.AnalysisServices.Tabular as TOM
3255
3282
 
3256
- objType = object.ObjectType
3257
- objName = object.Name
3258
- objParentName = object.Parent.Name
3283
+ obj_type = object.ObjectType
3284
+ obj_name = object.Name
3259
3285
 
3260
- if objType == TOM.ObjectType.Table:
3261
- objParentName = objName
3286
+ if object.ObjectType == TOM.ObjectType.CalculationItem:
3287
+ obj_parent_name = object.Parent.Table.Name
3288
+ else:
3289
+ obj_parent_name = object.Parent.Name
3290
+
3291
+ if obj_type == TOM.ObjectType.Table:
3292
+ obj_parent_name = obj_name
3293
+ object_types = ["Table", "Calc Table"]
3294
+ elif obj_type == TOM.ObjectType.Column:
3295
+ object_types = ["Column", "Calc Column"]
3296
+ elif obj_type == TOM.ObjectType.CalculationItem:
3297
+ object_types = ["Calculation Item"]
3298
+ else:
3299
+ object_types = [str(obj_type)]
3262
3300
 
3263
3301
  fil = dependencies[
3264
- (dependencies["Object Type"] == str(objType))
3265
- & (dependencies["Table Name"] == objParentName)
3266
- & (dependencies["Object Name"] == objName)
3302
+ (dependencies["Object Type"].isin(object_types))
3303
+ & (dependencies["Table Name"] == obj_parent_name)
3304
+ & (dependencies["Object Name"] == obj_name)
3267
3305
  ]
3268
3306
  meas = (
3269
3307
  fil[fil["Referenced Object Type"] == "Measure"]["Referenced Object"]
@@ -3343,6 +3381,41 @@ class TOMWrapper:
3343
3381
  if t.Name in tbls:
3344
3382
  yield t
3345
3383
 
3384
+ def _get_expression(self, object):
3385
+ """
3386
+ Helper function to get the expression for any given TOM object.
3387
+ """
3388
+
3389
+ import Microsoft.AnalysisServices.Tabular as TOM
3390
+
3391
+ valid_objects = [
3392
+ TOM.ObjectType.Measure,
3393
+ TOM.ObjectType.Table,
3394
+ TOM.ObjectType.Column,
3395
+ TOM.ObjectType.CalculationItem,
3396
+ ]
3397
+
3398
+ if object.ObjectType not in valid_objects:
3399
+ raise ValueError(
3400
+ f"{icons.red_dot} The 'object' parameter must be one of these types: {valid_objects}."
3401
+ )
3402
+
3403
+ if object.ObjectType == TOM.ObjectType.Measure:
3404
+ expr = object.Expression
3405
+ elif object.ObjectType == TOM.ObjectType.Table:
3406
+ part = next(p for p in object.Partitions)
3407
+ if part.SourceType == TOM.PartitionSourceType.Calculated:
3408
+ expr = part.Source.Expression
3409
+ elif object.ObjectType == TOM.ObjectType.Column:
3410
+ if object.Type == TOM.ColumnType.Calculated:
3411
+ expr = object.Expression
3412
+ elif object.ObjectType == TOM.ObjectType.CalculationItem:
3413
+ expr = object.Expression
3414
+ else:
3415
+ return
3416
+
3417
+ return expr
3418
+
3346
3419
  def fully_qualified_measures(
3347
3420
  self, object: "TOM.Measure", dependencies: pd.DataFrame
3348
3421
  ):
@@ -3367,15 +3440,16 @@ class TOMWrapper:
3367
3440
  dependencies["Object Name"] == dependencies["Parent Node"]
3368
3441
  ]
3369
3442
 
3443
+ expr = self._get_expression(object=object)
3444
+
3370
3445
  for obj in self.depends_on(object=object, dependencies=dependencies):
3371
3446
  if obj.ObjectType == TOM.ObjectType.Measure:
3372
- if (f"{obj.Parent.Name}[{obj.Name}]" in object.Expression) or (
3373
- format_dax_object_name(obj.Parent.Name, obj.Name)
3374
- in object.Expression
3447
+ if (f"{obj.Parent.Name}[{obj.Name}]" in expr) or (
3448
+ format_dax_object_name(obj.Parent.Name, obj.Name) in expr
3375
3449
  ):
3376
3450
  yield obj
3377
3451
 
3378
- def unqualified_columns(self, object: "TOM.Column", dependencies: pd.DataFrame):
3452
+ def unqualified_columns(self, object, dependencies: pd.DataFrame):
3379
3453
  """
3380
3454
  Obtains all unqualified column references for a given object.
3381
3455
 
@@ -3397,6 +3471,8 @@ class TOMWrapper:
3397
3471
  dependencies["Object Name"] == dependencies["Parent Node"]
3398
3472
  ]
3399
3473
 
3474
+ expr = self._get_expression(object=object)
3475
+
3400
3476
  def create_pattern(tableList, b):
3401
3477
  patterns = [
3402
3478
  r"(?<!" + re.escape(table) + r")(?<!'" + re.escape(table) + r"')"
@@ -3414,7 +3490,7 @@ class TOMWrapper:
3414
3490
  if (
3415
3491
  re.search(
3416
3492
  create_pattern(tableList, re.escape(obj.Name)),
3417
- object.Expression,
3493
+ expr,
3418
3494
  )
3419
3495
  is not None
3420
3496
  ):
@@ -3446,14 +3522,14 @@ class TOMWrapper:
3446
3522
 
3447
3523
  return usingView
3448
3524
 
3449
- def has_incremental_refresh_policy(self, table_name: str):
3525
+ def has_incremental_refresh_policy(self, object):
3450
3526
  """
3451
3527
  Identifies whether a table has an `incremental refresh <https://learn.microsoft.com/power-bi/connect-data/incremental-refresh-overview>`_ policy.
3452
3528
 
3453
3529
  Parameters
3454
3530
  ----------
3455
- table_name : str
3456
- Name of the table.
3531
+ object : TOM Object
3532
+ The TOM object within the semantic model. Accepts either a table or the model object.
3457
3533
 
3458
3534
  Returns
3459
3535
  -------
@@ -3461,13 +3537,21 @@ class TOMWrapper:
3461
3537
  An indicator whether a table has an incremental refresh policy.
3462
3538
  """
3463
3539
 
3464
- hasRP = False
3465
- rp = self.model.Tables[table_name].RefreshPolicy
3466
-
3467
- if rp is not None:
3468
- hasRP = True
3540
+ import Microsoft.AnalysisServices.Tabular as TOM
3469
3541
 
3470
- return hasRP
3542
+ if object.ObjectType == TOM.ObjectType.Table:
3543
+ if object.RefreshPolicy is not None:
3544
+ return True
3545
+ else:
3546
+ return False
3547
+ elif object.ObjectType == TOM.ObjectType.Model:
3548
+ rp = False
3549
+ for t in self.model.Tables:
3550
+ if t.RefreshPolicy is not None:
3551
+ rp = True
3552
+ return rp
3553
+ else:
3554
+ raise NotImplementedError
3471
3555
 
3472
3556
  def show_incremental_refresh_policy(self, table_name: str):
3473
3557
  """
@@ -4677,6 +4761,188 @@ class TOMWrapper:
4677
4761
  f"{icons.yellow_dot} '{m}' is not a member of the '{role_name}' role."
4678
4762
  )
4679
4763
 
4764
+ def get_bim(self) -> dict:
4765
+ """
4766
+ Retrieves the .bim file for the semantic model.
4767
+
4768
+ Returns
4769
+ -------
4770
+ dict
4771
+ The .bim file.
4772
+ """
4773
+
4774
+ import Microsoft.AnalysisServices.Tabular as TOM
4775
+
4776
+ bim = (
4777
+ json.loads(TOM.JsonScripter.ScriptCreate(self.model.Database))
4778
+ .get("create")
4779
+ .get("database")
4780
+ )
4781
+
4782
+ return bim
4783
+
4784
+ def _reduce_model(self, perspective_name: str):
4785
+ """
4786
+ Reduces a model's objects based on a perspective. Adds the dependent objects within a perspective to that perspective.
4787
+ """
4788
+
4789
+ import Microsoft.AnalysisServices.Tabular as TOM
4790
+ from sempy_labs._model_dependencies import get_model_calc_dependencies
4791
+
4792
+ fabric.refresh_tom_cache(workspace=self._workspace_id)
4793
+ dfP = fabric.list_perspectives(
4794
+ dataset=self._dataset_id, workspace=self._workspace_id
4795
+ )
4796
+ dfP = dfP[dfP["Perspective Name"] == perspective_name]
4797
+ if dfP.empty:
4798
+ raise ValueError(
4799
+ f"{icons.red_dot} The '{perspective_name}' is not a valid perspective in the '{self._dataset_name}' semantic model within the '{self._workspace_name}' workspace."
4800
+ )
4801
+
4802
+ dep = get_model_calc_dependencies(
4803
+ dataset=self._dataset_id, workspace=self._workspace_id
4804
+ )
4805
+ dep_filt = dep[
4806
+ dep["Object Type"].isin(
4807
+ [
4808
+ "Rows Allowed",
4809
+ "Measure",
4810
+ "Calc Item",
4811
+ "Calc Column",
4812
+ "Calc Table",
4813
+ "Hierarchy",
4814
+ ]
4815
+ )
4816
+ ]
4817
+
4818
+ tables = dfP[dfP["Object Type"] == "Table"]["Table Name"].tolist()
4819
+ measures = dfP[dfP["Object Type"] == "Measure"]["Object Name"].tolist()
4820
+ columns = dfP[dfP["Object Type"] == "Column"][["Table Name", "Object Name"]]
4821
+ cols = [
4822
+ f"'{row[0]}'[{row[1]}]"
4823
+ for row in columns.itertuples(index=False, name=None)
4824
+ ]
4825
+ hierarchies = dfP[dfP["Object Type"] == "Hierarchy"][
4826
+ ["Table Name", "Object Name"]
4827
+ ]
4828
+ hier = [
4829
+ f"'{row[0]}'[{row[1]}]"
4830
+ for row in hierarchies.itertuples(index=False, name=None)
4831
+ ]
4832
+ filt = dep_filt[
4833
+ (dep_filt["Object Type"].isin(["Rows Allowed", "Calc Item"]))
4834
+ | (dep_filt["Object Type"] == "Measure")
4835
+ & (dep_filt["Object Name"].isin(measures))
4836
+ | (dep_filt["Object Type"] == "Calc Table")
4837
+ & (dep_filt["Object Name"].isin(tables))
4838
+ | (
4839
+ (dep_filt["Object Type"].isin(["Calc Column"]))
4840
+ & (
4841
+ dep_filt.apply(
4842
+ lambda row: f"'{row['Table Name']}'[{row['Object Name']}]",
4843
+ axis=1,
4844
+ ).isin(cols)
4845
+ )
4846
+ )
4847
+ | (
4848
+ (dep_filt["Object Type"].isin(["Hierarchy"]))
4849
+ & (
4850
+ dep_filt.apply(
4851
+ lambda row: f"'{row['Table Name']}'[{row['Object Name']}]",
4852
+ axis=1,
4853
+ ).isin(hier)
4854
+ )
4855
+ )
4856
+ ]
4857
+
4858
+ result_df = pd.DataFrame(columns=["Table Name", "Object Name", "Object Type"])
4859
+
4860
+ for _, r in filt.iterrows():
4861
+ added = False
4862
+ obj_type = r["Referenced Object Type"]
4863
+ table_name = r["Referenced Table"]
4864
+ object_name = r["Referenced Object"]
4865
+ if obj_type in ["Column", "Attribute Hierarchy"]:
4866
+ obj = self.model.Tables[table_name].Columns[object_name]
4867
+ if not self.in_perspective(
4868
+ object=obj, perspective_name=perspective_name
4869
+ ):
4870
+ self.add_to_perspective(
4871
+ object=obj, perspective_name=perspective_name, include_all=False
4872
+ )
4873
+ added = True
4874
+ elif obj_type == "Measure":
4875
+ obj = self.model.Tables[table_name].Measures[object_name]
4876
+ if not self.in_perspective(
4877
+ object=obj, perspective_name=perspective_name
4878
+ ):
4879
+ self.add_to_perspective(
4880
+ object=obj, perspective_name=perspective_name, include_all=False
4881
+ )
4882
+ added = True
4883
+ elif obj_type == "Table":
4884
+ obj = self.model.Tables[table_name]
4885
+ if not self.in_perspective(
4886
+ object=obj, perspective_name=perspective_name
4887
+ ):
4888
+ self.add_to_perspective(
4889
+ object=obj, perspective_name=perspective_name, include_all=False
4890
+ )
4891
+ added = True
4892
+ if added:
4893
+ new_data = {
4894
+ "Table Name": table_name,
4895
+ "Object Name": object_name,
4896
+ "Object Type": obj_type,
4897
+ }
4898
+
4899
+ result_df = pd.concat(
4900
+ [result_df, pd.DataFrame(new_data, index=[0])], ignore_index=True
4901
+ )
4902
+
4903
+ # Reduce model...
4904
+
4905
+ # Remove unnecessary relationships
4906
+ for r in self.model.Relationships:
4907
+ if (
4908
+ not self.in_perspective(
4909
+ object=r.FromTable, perspective_name=perspective_name
4910
+ )
4911
+ ) or (
4912
+ not self.in_perspective(
4913
+ object=r.ToTable, perspective_name=perspective_name
4914
+ )
4915
+ ):
4916
+ self.remove_object(object=r)
4917
+
4918
+ # Ensure relationships in reduced model have base columns
4919
+ for r in self.model.Relationships:
4920
+ if not self.in_perspective(r.FromColumn, perspective_name=perspective_name):
4921
+ self.add_to_perspective(
4922
+ object=r.FromColumn, perspective_name=perspective_name
4923
+ )
4924
+ if not self.in_perspective(r.ToColumn, perspective_name=perspective_name):
4925
+ self.add_to_perspective(
4926
+ object=r.ToColumn, perspective_name=perspective_name
4927
+ )
4928
+
4929
+ # Remove objects not in the perspective
4930
+ for t in self.model.Tables:
4931
+ if not self.in_perspective(object=t, perspective_name=perspective_name):
4932
+ self.remove_object(object=t)
4933
+ else:
4934
+ for attr in ["Columns", "Measures", "Hierarchies"]:
4935
+ for obj in getattr(t, attr):
4936
+ if attr == "Columns" and obj.Type == TOM.ColumnType.RowNumber:
4937
+ pass
4938
+ elif not self.in_perspective(
4939
+ object=obj, perspective_name=perspective_name
4940
+ ):
4941
+ self.remove_object(object=obj)
4942
+
4943
+ # Return the objects added to the perspective based on dependencies
4944
+ return result_df.drop_duplicates()
4945
+
4680
4946
  def close(self):
4681
4947
 
4682
4948
  if not self._readonly and self.model is not None:
@@ -4766,6 +5032,7 @@ def connect_semantic_model(
4766
5032
  If connecting to Azure Analysis Services, enter the workspace parameter in the following format: 'asazure://<region>.asazure.windows.net/<server_name>'.
4767
5033
  Defaults to None which resolves to the workspace of the attached lakehouse
4768
5034
  or if no lakehouse attached, resolves to the workspace of the notebook.
5035
+
4769
5036
  Returns
4770
5037
  -------
4771
5038
  typing.Iterator[TOMWrapper]