semantic-link-labs 0.9.2__py3-none-any.whl → 0.9.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.9.2.dist-info → semantic_link_labs-0.9.4.dist-info}/METADATA +10 -6
- {semantic_link_labs-0.9.2.dist-info → semantic_link_labs-0.9.4.dist-info}/RECORD +54 -44
- {semantic_link_labs-0.9.2.dist-info → semantic_link_labs-0.9.4.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +27 -1
- sempy_labs/_ai.py +8 -5
- sempy_labs/_capacity_migration.py +3 -2
- sempy_labs/_connections.py +45 -9
- sempy_labs/_dax.py +17 -3
- sempy_labs/_delta_analyzer.py +308 -138
- sempy_labs/_eventhouses.py +70 -1
- sempy_labs/_gateways.py +56 -8
- sempy_labs/_generate_semantic_model.py +30 -9
- sempy_labs/_helper_functions.py +84 -9
- sempy_labs/_job_scheduler.py +226 -2
- sempy_labs/_list_functions.py +42 -19
- sempy_labs/_ml_experiments.py +1 -1
- sempy_labs/_model_bpa.py +17 -2
- sempy_labs/_model_bpa_rules.py +20 -8
- sempy_labs/_semantic_models.py +117 -0
- sempy_labs/_sql.py +73 -6
- sempy_labs/_sqldatabase.py +227 -0
- sempy_labs/_translations.py +2 -2
- sempy_labs/_vertipaq.py +3 -3
- sempy_labs/_warehouses.py +1 -1
- sempy_labs/admin/__init__.py +49 -8
- sempy_labs/admin/_activities.py +166 -0
- sempy_labs/admin/_apps.py +143 -0
- sempy_labs/admin/_basic_functions.py +32 -652
- sempy_labs/admin/_capacities.py +250 -0
- sempy_labs/admin/_datasets.py +184 -0
- sempy_labs/admin/_domains.py +1 -3
- sempy_labs/admin/_items.py +3 -1
- sempy_labs/admin/_reports.py +165 -0
- sempy_labs/admin/_scanner.py +53 -49
- sempy_labs/admin/_shared.py +74 -0
- sempy_labs/admin/_tenant.py +489 -0
- sempy_labs/directlake/_dl_helper.py +0 -1
- sempy_labs/directlake/_update_directlake_partition_entity.py +6 -0
- sempy_labs/graph/_teams.py +1 -1
- sempy_labs/graph/_users.py +9 -1
- sempy_labs/lakehouse/_get_lakehouse_columns.py +2 -2
- sempy_labs/lakehouse/_get_lakehouse_tables.py +2 -2
- sempy_labs/lakehouse/_lakehouse.py +3 -3
- sempy_labs/lakehouse/_shortcuts.py +29 -16
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +2 -2
- sempy_labs/migration/_refresh_calc_tables.py +2 -2
- sempy_labs/report/__init__.py +3 -1
- sempy_labs/report/_download_report.py +4 -1
- sempy_labs/report/_export_report.py +272 -0
- sempy_labs/report/_report_functions.py +11 -263
- sempy_labs/report/_report_rebind.py +1 -1
- sempy_labs/tom/_model.py +281 -29
- {semantic_link_labs-0.9.2.dist-info → semantic_link_labs-0.9.4.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.9.2.dist-info → semantic_link_labs-0.9.4.dist-info}/top_level.txt +0 -0
sempy_labs/tom/_model.py
CHANGED
|
@@ -2,6 +2,7 @@ import sempy
|
|
|
2
2
|
import sempy.fabric as fabric
|
|
3
3
|
import pandas as pd
|
|
4
4
|
import re
|
|
5
|
+
import json
|
|
5
6
|
from datetime import datetime
|
|
6
7
|
from sempy_labs._helper_functions import (
|
|
7
8
|
format_dax_object_name,
|
|
@@ -1748,6 +1749,7 @@ class TOMWrapper:
|
|
|
1748
1749
|
"TOM.Table", "TOM.Column", "TOM.Measure", "TOM.Hierarchy", "TOM.Level"
|
|
1749
1750
|
],
|
|
1750
1751
|
language: str,
|
|
1752
|
+
property: str = "Name",
|
|
1751
1753
|
):
|
|
1752
1754
|
"""
|
|
1753
1755
|
Removes an object's `translation <https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.culture?view=analysisservices-dotnet>`_ value.
|
|
@@ -1758,13 +1760,28 @@ class TOMWrapper:
|
|
|
1758
1760
|
An object (i.e. table/column/measure) within a semantic model.
|
|
1759
1761
|
language : str
|
|
1760
1762
|
The language code.
|
|
1763
|
+
property : str, default="Name"
|
|
1764
|
+
The property to set. Options: 'Name', 'Description', 'Display Folder'.
|
|
1761
1765
|
"""
|
|
1762
1766
|
import Microsoft.AnalysisServices.Tabular as TOM
|
|
1763
1767
|
|
|
1764
|
-
|
|
1765
|
-
|
|
1766
|
-
|
|
1767
|
-
|
|
1768
|
+
if property in ["Caption", "Name"]:
|
|
1769
|
+
prop = TOM.TranslatedProperty.Caption
|
|
1770
|
+
elif property == "Description":
|
|
1771
|
+
prop = TOM.TranslatedProperty.Description
|
|
1772
|
+
else:
|
|
1773
|
+
prop = TOM.TranslatedProperty.DisplayFolder
|
|
1774
|
+
|
|
1775
|
+
if property == "DisplayFolder" and object.ObjectType not in [
|
|
1776
|
+
TOM.ObjectType.Table,
|
|
1777
|
+
TOM.ObjectType.Column,
|
|
1778
|
+
TOM.ObjectType.Measure,
|
|
1779
|
+
TOM.ObjectType.Hierarchy,
|
|
1780
|
+
]:
|
|
1781
|
+
pass
|
|
1782
|
+
else:
|
|
1783
|
+
o = object.Model.Cultures[language].ObjectTranslations[object, prop]
|
|
1784
|
+
object.Model.Cultures[language].ObjectTranslations.Remove(o)
|
|
1768
1785
|
|
|
1769
1786
|
def remove_object(self, object):
|
|
1770
1787
|
"""
|
|
@@ -1779,6 +1796,8 @@ class TOMWrapper:
|
|
|
1779
1796
|
|
|
1780
1797
|
objType = object.ObjectType
|
|
1781
1798
|
|
|
1799
|
+
properties = ["Name", "Description", "DisplayFolder"]
|
|
1800
|
+
|
|
1782
1801
|
# Have to remove translations and perspectives on the object before removing it.
|
|
1783
1802
|
if objType in [
|
|
1784
1803
|
TOM.ObjectType.Table,
|
|
@@ -1789,7 +1808,10 @@ class TOMWrapper:
|
|
|
1789
1808
|
]:
|
|
1790
1809
|
for lang in object.Model.Cultures:
|
|
1791
1810
|
try:
|
|
1792
|
-
|
|
1811
|
+
for property in properties:
|
|
1812
|
+
self.remove_translation(
|
|
1813
|
+
object=object, language=lang.Name, property=property
|
|
1814
|
+
)
|
|
1793
1815
|
except Exception:
|
|
1794
1816
|
pass
|
|
1795
1817
|
if objType in [
|
|
@@ -2985,19 +3007,19 @@ class TOMWrapper:
|
|
|
2985
3007
|
|
|
2986
3008
|
for t in self.model.Tables:
|
|
2987
3009
|
dfT_filt = dfT[dfT["Name"] == t.Name]
|
|
2988
|
-
if
|
|
3010
|
+
if not dfT_filt.empty:
|
|
2989
3011
|
row = dfT_filt.iloc[0]
|
|
2990
3012
|
rowCount = str(row["Row Count"])
|
|
2991
3013
|
totalSize = str(row["Total Size"])
|
|
2992
3014
|
self.set_annotation(object=t, name="Vertipaq_RowCount", value=rowCount)
|
|
2993
3015
|
self.set_annotation(
|
|
2994
|
-
object=t, name="
|
|
3016
|
+
object=t, name="Vertipaq_TotalSize", value=totalSize
|
|
2995
3017
|
)
|
|
2996
3018
|
for c in t.Columns:
|
|
2997
3019
|
dfC_filt = dfC[
|
|
2998
3020
|
(dfC["Table Name"] == t.Name) & (dfC["Column Name"] == c.Name)
|
|
2999
3021
|
]
|
|
3000
|
-
if
|
|
3022
|
+
if not dfC_filt.empty:
|
|
3001
3023
|
row = dfC_filt.iloc[0]
|
|
3002
3024
|
totalSize = str(row["Total Size"])
|
|
3003
3025
|
dataSize = str(row["Data Size"])
|
|
@@ -3023,7 +3045,7 @@ class TOMWrapper:
|
|
|
3023
3045
|
dfP_filt = dfP[
|
|
3024
3046
|
(dfP["Table Name"] == t.Name) & (dfP["Partition Name"] == p.Name)
|
|
3025
3047
|
]
|
|
3026
|
-
if
|
|
3048
|
+
if not dfP_filt.empty:
|
|
3027
3049
|
row = dfP_filt.iloc[0]
|
|
3028
3050
|
recordCount = str(row["Record Count"])
|
|
3029
3051
|
segmentCount = str(row["Segment Count"])
|
|
@@ -3041,14 +3063,14 @@ class TOMWrapper:
|
|
|
3041
3063
|
dfH_filt = dfH[
|
|
3042
3064
|
(dfH["Table Name"] == t.Name) & (dfH["Hierarchy Name"] == h.Name)
|
|
3043
3065
|
]
|
|
3044
|
-
if
|
|
3066
|
+
if not dfH_filt.empty:
|
|
3045
3067
|
usedSize = str(dfH_filt["Used Size"].iloc[0])
|
|
3046
3068
|
self.set_annotation(
|
|
3047
3069
|
object=h, name="Vertipaq_UsedSize", value=usedSize
|
|
3048
3070
|
)
|
|
3049
3071
|
for r in self.model.Relationships:
|
|
3050
3072
|
dfR_filt = dfR[dfR["Relationship Name"] == r.Name]
|
|
3051
|
-
if
|
|
3073
|
+
if not dfR_filt.empty:
|
|
3052
3074
|
relSize = str(dfR_filt["Used Size"].iloc[0])
|
|
3053
3075
|
self.set_annotation(object=r, name="Vertipaq_UsedSize", value=relSize)
|
|
3054
3076
|
try:
|
|
@@ -3201,12 +3223,12 @@ class TOMWrapper:
|
|
|
3201
3223
|
"""
|
|
3202
3224
|
import Microsoft.AnalysisServices.Tabular as TOM
|
|
3203
3225
|
|
|
3204
|
-
|
|
3226
|
+
if object.ObjectType not in [TOM.ObjectType.Table, TOM.ObjectType.Column]:
|
|
3227
|
+
raise ValueError(
|
|
3228
|
+
f"{icons.red_dot} The 'object' parameter must be a Table or Column object."
|
|
3229
|
+
)
|
|
3205
3230
|
|
|
3206
|
-
|
|
3207
|
-
result = self.get_annotation_value(object=object, name="Vertipaq_TotalSize")
|
|
3208
|
-
elif objType == TOM.ObjectType.Table:
|
|
3209
|
-
result = self.get_annotation_value(object=object, name="Vertipaq_TotalSize")
|
|
3231
|
+
result = self.get_annotation_value(object=object, name="Vertipaq_TotalSize")
|
|
3210
3232
|
|
|
3211
3233
|
return int(result) if result is not None else 0
|
|
3212
3234
|
|
|
@@ -3253,17 +3275,28 @@ class TOMWrapper:
|
|
|
3253
3275
|
"""
|
|
3254
3276
|
import Microsoft.AnalysisServices.Tabular as TOM
|
|
3255
3277
|
|
|
3256
|
-
|
|
3257
|
-
|
|
3258
|
-
objParentName = object.Parent.Name
|
|
3278
|
+
obj_type = object.ObjectType
|
|
3279
|
+
obj_name = object.Name
|
|
3259
3280
|
|
|
3260
|
-
if
|
|
3261
|
-
|
|
3281
|
+
if object.ObjectType == TOM.ObjectType.CalculationItem:
|
|
3282
|
+
obj_parent_name = object.Parent.Table.Name
|
|
3283
|
+
else:
|
|
3284
|
+
obj_parent_name = object.Parent.Name
|
|
3285
|
+
|
|
3286
|
+
if obj_type == TOM.ObjectType.Table:
|
|
3287
|
+
obj_parent_name = obj_name
|
|
3288
|
+
object_types = ["Table", "Calc Table"]
|
|
3289
|
+
elif obj_type == TOM.ObjectType.Column:
|
|
3290
|
+
object_types = ["Column", "Calc Column"]
|
|
3291
|
+
elif obj_type == TOM.ObjectType.CalculationItem:
|
|
3292
|
+
object_types = ["Calculation Item"]
|
|
3293
|
+
else:
|
|
3294
|
+
object_types = [str(obj_type)]
|
|
3262
3295
|
|
|
3263
3296
|
fil = dependencies[
|
|
3264
|
-
(dependencies["Object Type"]
|
|
3265
|
-
& (dependencies["Table Name"] ==
|
|
3266
|
-
& (dependencies["Object Name"] ==
|
|
3297
|
+
(dependencies["Object Type"].isin(object_types))
|
|
3298
|
+
& (dependencies["Table Name"] == obj_parent_name)
|
|
3299
|
+
& (dependencies["Object Name"] == obj_name)
|
|
3267
3300
|
]
|
|
3268
3301
|
meas = (
|
|
3269
3302
|
fil[fil["Referenced Object Type"] == "Measure"]["Referenced Object"]
|
|
@@ -3343,6 +3376,41 @@ class TOMWrapper:
|
|
|
3343
3376
|
if t.Name in tbls:
|
|
3344
3377
|
yield t
|
|
3345
3378
|
|
|
3379
|
+
def _get_expression(self, object):
|
|
3380
|
+
"""
|
|
3381
|
+
Helper function to get the expression for any given TOM object.
|
|
3382
|
+
"""
|
|
3383
|
+
|
|
3384
|
+
import Microsoft.AnalysisServices.Tabular as TOM
|
|
3385
|
+
|
|
3386
|
+
valid_objects = [
|
|
3387
|
+
TOM.ObjectType.Measure,
|
|
3388
|
+
TOM.ObjectType.Table,
|
|
3389
|
+
TOM.ObjectType.Column,
|
|
3390
|
+
TOM.ObjectType.CalculationItem,
|
|
3391
|
+
]
|
|
3392
|
+
|
|
3393
|
+
if object.ObjectType not in valid_objects:
|
|
3394
|
+
raise ValueError(
|
|
3395
|
+
f"{icons.red_dot} The 'object' parameter must be one of these types: {valid_objects}."
|
|
3396
|
+
)
|
|
3397
|
+
|
|
3398
|
+
if object.ObjectType == TOM.ObjectType.Measure:
|
|
3399
|
+
expr = object.Expression
|
|
3400
|
+
elif object.ObjectType == TOM.ObjectType.Table:
|
|
3401
|
+
part = next(p for p in object.Partitions)
|
|
3402
|
+
if part.SourceType == TOM.PartitionSourceType.Calculated:
|
|
3403
|
+
expr = part.Source.Expression
|
|
3404
|
+
elif object.ObjectType == TOM.ObjectType.Column:
|
|
3405
|
+
if object.Type == TOM.ColumnType.Calculated:
|
|
3406
|
+
expr = object.Expression
|
|
3407
|
+
elif object.ObjectType == TOM.ObjectType.CalculationItem:
|
|
3408
|
+
expr = object.Expression
|
|
3409
|
+
else:
|
|
3410
|
+
return
|
|
3411
|
+
|
|
3412
|
+
return expr
|
|
3413
|
+
|
|
3346
3414
|
def fully_qualified_measures(
|
|
3347
3415
|
self, object: "TOM.Measure", dependencies: pd.DataFrame
|
|
3348
3416
|
):
|
|
@@ -3367,15 +3435,16 @@ class TOMWrapper:
|
|
|
3367
3435
|
dependencies["Object Name"] == dependencies["Parent Node"]
|
|
3368
3436
|
]
|
|
3369
3437
|
|
|
3438
|
+
expr = self._get_expression(object=object)
|
|
3439
|
+
|
|
3370
3440
|
for obj in self.depends_on(object=object, dependencies=dependencies):
|
|
3371
3441
|
if obj.ObjectType == TOM.ObjectType.Measure:
|
|
3372
|
-
if (f"{obj.Parent.Name}[{obj.Name}]" in
|
|
3373
|
-
format_dax_object_name(obj.Parent.Name, obj.Name)
|
|
3374
|
-
in object.Expression
|
|
3442
|
+
if (f"{obj.Parent.Name}[{obj.Name}]" in expr) or (
|
|
3443
|
+
format_dax_object_name(obj.Parent.Name, obj.Name) in expr
|
|
3375
3444
|
):
|
|
3376
3445
|
yield obj
|
|
3377
3446
|
|
|
3378
|
-
def unqualified_columns(self, object
|
|
3447
|
+
def unqualified_columns(self, object, dependencies: pd.DataFrame):
|
|
3379
3448
|
"""
|
|
3380
3449
|
Obtains all unqualified column references for a given object.
|
|
3381
3450
|
|
|
@@ -3397,6 +3466,8 @@ class TOMWrapper:
|
|
|
3397
3466
|
dependencies["Object Name"] == dependencies["Parent Node"]
|
|
3398
3467
|
]
|
|
3399
3468
|
|
|
3469
|
+
expr = self._get_expression(object=object)
|
|
3470
|
+
|
|
3400
3471
|
def create_pattern(tableList, b):
|
|
3401
3472
|
patterns = [
|
|
3402
3473
|
r"(?<!" + re.escape(table) + r")(?<!'" + re.escape(table) + r"')"
|
|
@@ -3412,7 +3483,10 @@ class TOMWrapper:
|
|
|
3412
3483
|
if c.Name == obj.Name:
|
|
3413
3484
|
tableList.append(c.Parent.Name)
|
|
3414
3485
|
if (
|
|
3415
|
-
re.search(
|
|
3486
|
+
re.search(
|
|
3487
|
+
create_pattern(tableList, re.escape(obj.Name)),
|
|
3488
|
+
expr,
|
|
3489
|
+
)
|
|
3416
3490
|
is not None
|
|
3417
3491
|
):
|
|
3418
3492
|
yield obj
|
|
@@ -4674,6 +4748,183 @@ class TOMWrapper:
|
|
|
4674
4748
|
f"{icons.yellow_dot} '{m}' is not a member of the '{role_name}' role."
|
|
4675
4749
|
)
|
|
4676
4750
|
|
|
4751
|
+
def get_bim(self) -> dict:
|
|
4752
|
+
"""
|
|
4753
|
+
Retrieves the .bim file for the semantic model.
|
|
4754
|
+
|
|
4755
|
+
Returns
|
|
4756
|
+
-------
|
|
4757
|
+
dict
|
|
4758
|
+
The .bim file.
|
|
4759
|
+
"""
|
|
4760
|
+
|
|
4761
|
+
import Microsoft.AnalysisServices.Tabular as TOM
|
|
4762
|
+
|
|
4763
|
+
return (
|
|
4764
|
+
json.loads(TOM.JsonScripter.ScriptCreate(self.model.Database))
|
|
4765
|
+
.get("create")
|
|
4766
|
+
.get("database")
|
|
4767
|
+
)
|
|
4768
|
+
|
|
4769
|
+
def _reduce_model(self, perspective_name: str):
|
|
4770
|
+
"""
|
|
4771
|
+
Reduces a model's objects based on a perspective. Adds the dependent objects within a perspective to that perspective.
|
|
4772
|
+
"""
|
|
4773
|
+
|
|
4774
|
+
from sempy_labs._model_dependencies import get_model_calc_dependencies
|
|
4775
|
+
|
|
4776
|
+
fabric.refresh_tom_cache(workspace=self._workspace_id)
|
|
4777
|
+
dfP = fabric.list_perspectives(
|
|
4778
|
+
dataset=self._dataset_id, workspace=self._workspace_id
|
|
4779
|
+
)
|
|
4780
|
+
dfP = dfP[dfP["Perspective Name"] == perspective_name]
|
|
4781
|
+
if dfP.empty:
|
|
4782
|
+
raise ValueError(
|
|
4783
|
+
f"{icons.red_dot} The '{perspective_name}' is not a valid perspective in the '{self._dataset_name}' semantic model within the '{self._workspace_name}' workspace."
|
|
4784
|
+
)
|
|
4785
|
+
|
|
4786
|
+
dep = get_model_calc_dependencies(
|
|
4787
|
+
dataset=self._dataset_id, workspace=self._workspace_id
|
|
4788
|
+
)
|
|
4789
|
+
dep_filt = dep[
|
|
4790
|
+
dep["Object Type"].isin(
|
|
4791
|
+
[
|
|
4792
|
+
"Rows Allowed",
|
|
4793
|
+
"Measure",
|
|
4794
|
+
"Calc Item",
|
|
4795
|
+
"Calc Column",
|
|
4796
|
+
"Calc Table",
|
|
4797
|
+
"Hierarchy",
|
|
4798
|
+
]
|
|
4799
|
+
)
|
|
4800
|
+
]
|
|
4801
|
+
|
|
4802
|
+
tables = dfP[dfP["Object Type"] == "Table"]["Table Name"].tolist()
|
|
4803
|
+
measures = dfP[dfP["Object Type"] == "Measure"]["Object Name"].tolist()
|
|
4804
|
+
columns = dfP[dfP["Object Type"] == "Column"][["Table Name", "Object Name"]]
|
|
4805
|
+
cols = [
|
|
4806
|
+
f"'{row[0]}'[{row[1]}]"
|
|
4807
|
+
for row in columns.itertuples(index=False, name=None)
|
|
4808
|
+
]
|
|
4809
|
+
hierarchies = dfP[dfP["Object Type"] == "Hierarchy"][
|
|
4810
|
+
["Table Name", "Object Name"]
|
|
4811
|
+
]
|
|
4812
|
+
hier = [
|
|
4813
|
+
f"'{row[0]}'[{row[1]}]"
|
|
4814
|
+
for row in hierarchies.itertuples(index=False, name=None)
|
|
4815
|
+
]
|
|
4816
|
+
filt = dep_filt[
|
|
4817
|
+
(dep_filt["Object Type"].isin(["Rows Allowed", "Calc Item"]))
|
|
4818
|
+
| (dep_filt["Object Type"] == "Measure")
|
|
4819
|
+
& (dep_filt["Object Name"].isin(measures))
|
|
4820
|
+
| (dep_filt["Object Type"] == "Calc Table")
|
|
4821
|
+
& (dep_filt["Object Name"].isin(tables))
|
|
4822
|
+
| (
|
|
4823
|
+
(dep_filt["Object Type"].isin(["Calc Column"]))
|
|
4824
|
+
& (
|
|
4825
|
+
dep_filt.apply(
|
|
4826
|
+
lambda row: f"'{row['Table Name']}'[{row['Object Name']}]",
|
|
4827
|
+
axis=1,
|
|
4828
|
+
).isin(cols)
|
|
4829
|
+
)
|
|
4830
|
+
)
|
|
4831
|
+
| (
|
|
4832
|
+
(dep_filt["Object Type"].isin(["Hierarchy"]))
|
|
4833
|
+
& (
|
|
4834
|
+
dep_filt.apply(
|
|
4835
|
+
lambda row: f"'{row['Table Name']}'[{row['Object Name']}]",
|
|
4836
|
+
axis=1,
|
|
4837
|
+
).isin(hier)
|
|
4838
|
+
)
|
|
4839
|
+
)
|
|
4840
|
+
]
|
|
4841
|
+
|
|
4842
|
+
result_df = pd.DataFrame(columns=["Table Name", "Object Name", "Object Type"])
|
|
4843
|
+
|
|
4844
|
+
for _, r in filt.iterrows():
|
|
4845
|
+
added = False
|
|
4846
|
+
obj_type = r["Referenced Object Type"]
|
|
4847
|
+
table_name = r["Referenced Table"]
|
|
4848
|
+
object_name = r["Referenced Object"]
|
|
4849
|
+
if obj_type in ["Column", "Attribute Hierarchy"]:
|
|
4850
|
+
obj = self.model.Tables[table_name].Columns[object_name]
|
|
4851
|
+
if not self.in_perspective(
|
|
4852
|
+
object=obj, perspective_name=perspective_name
|
|
4853
|
+
):
|
|
4854
|
+
self.add_to_perspective(
|
|
4855
|
+
object=obj, perspective_name=perspective_name
|
|
4856
|
+
)
|
|
4857
|
+
added = True
|
|
4858
|
+
elif obj_type == "Measure":
|
|
4859
|
+
obj = self.model.Tables[table_name].Measures[object_name]
|
|
4860
|
+
if not self.in_perspective(
|
|
4861
|
+
object=obj, perspective_name=perspective_name
|
|
4862
|
+
):
|
|
4863
|
+
self.add_to_perspective(
|
|
4864
|
+
object=obj, perspective_name=perspective_name
|
|
4865
|
+
)
|
|
4866
|
+
added = True
|
|
4867
|
+
elif obj_type == "Table":
|
|
4868
|
+
obj = self.model.Tables[table_name]
|
|
4869
|
+
if not self.in_perspective(
|
|
4870
|
+
object=obj, perspective_name=perspective_name
|
|
4871
|
+
):
|
|
4872
|
+
self.add_to_perspective(
|
|
4873
|
+
object=obj, perspective_name=perspective_name
|
|
4874
|
+
)
|
|
4875
|
+
added = True
|
|
4876
|
+
if added:
|
|
4877
|
+
new_data = {
|
|
4878
|
+
"Table Name": table_name,
|
|
4879
|
+
"Object Name": object_name,
|
|
4880
|
+
"Object Type": obj_type,
|
|
4881
|
+
}
|
|
4882
|
+
|
|
4883
|
+
result_df = pd.concat(
|
|
4884
|
+
[result_df, pd.DataFrame(new_data, index=[0])], ignore_index=True
|
|
4885
|
+
)
|
|
4886
|
+
|
|
4887
|
+
# Reduce model...
|
|
4888
|
+
|
|
4889
|
+
# Remove unnecessary relationships
|
|
4890
|
+
for r in self.model.Relationships:
|
|
4891
|
+
if (
|
|
4892
|
+
not self.in_perspective(
|
|
4893
|
+
object=r.FromTable, perspective_name=perspective_name
|
|
4894
|
+
)
|
|
4895
|
+
) or (
|
|
4896
|
+
not self.in_perspective(
|
|
4897
|
+
object=r.ToTable, perspective_name=perspective_name
|
|
4898
|
+
)
|
|
4899
|
+
):
|
|
4900
|
+
self.remove_object(object=r)
|
|
4901
|
+
|
|
4902
|
+
# Ensure relationships in reduced model have base columns
|
|
4903
|
+
for r in self.model.Relationships:
|
|
4904
|
+
if not self.in_perspective(r.FromColumn, perspective_name=perspective_name):
|
|
4905
|
+
self.add_to_perspective(
|
|
4906
|
+
object=r.FromColumn, perspective_name=perspective_name
|
|
4907
|
+
)
|
|
4908
|
+
if not self.in_perspective(r.ToColumn, perspective_name=perspective_name):
|
|
4909
|
+
self.add_to_perspective(
|
|
4910
|
+
object=r.ToColumn, perspective_name=perspective_name
|
|
4911
|
+
)
|
|
4912
|
+
|
|
4913
|
+
# Remove objects not in the perspective
|
|
4914
|
+
for t in self.model.Tables:
|
|
4915
|
+
if not self.in_perspective(object=t, perspective_name=perspective_name):
|
|
4916
|
+
self.remove_object(object=t)
|
|
4917
|
+
else:
|
|
4918
|
+
for attr in ["Columns", "Measures", "Hierarchies"]:
|
|
4919
|
+
for obj in getattr(t, attr):
|
|
4920
|
+
if not self.in_perspective(
|
|
4921
|
+
object=obj, perspective_name=perspective_name
|
|
4922
|
+
):
|
|
4923
|
+
self.remove_object(object=obj)
|
|
4924
|
+
|
|
4925
|
+
# Return the objects added to the perspective based on dependencies
|
|
4926
|
+
return result_df.drop_duplicates()
|
|
4927
|
+
|
|
4677
4928
|
def close(self):
|
|
4678
4929
|
|
|
4679
4930
|
if not self._readonly and self.model is not None:
|
|
@@ -4763,6 +5014,7 @@ def connect_semantic_model(
|
|
|
4763
5014
|
If connecting to Azure Analysis Services, enter the workspace parameter in the following format: 'asazure://<region>.asazure.windows.net/<server_name>'.
|
|
4764
5015
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
4765
5016
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
5017
|
+
|
|
4766
5018
|
Returns
|
|
4767
5019
|
-------
|
|
4768
5020
|
typing.Iterator[TOMWrapper]
|
|
File without changes
|
|
File without changes
|