semantic-link-labs 0.12.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- semantic_link_labs-0.12.8.dist-info/METADATA +354 -0
- semantic_link_labs-0.12.8.dist-info/RECORD +243 -0
- semantic_link_labs-0.12.8.dist-info/WHEEL +5 -0
- semantic_link_labs-0.12.8.dist-info/licenses/LICENSE +21 -0
- semantic_link_labs-0.12.8.dist-info/top_level.txt +1 -0
- sempy_labs/__init__.py +606 -0
- sempy_labs/_a_lib_info.py +2 -0
- sempy_labs/_ai.py +437 -0
- sempy_labs/_authentication.py +264 -0
- sempy_labs/_bpa_translation/_model/_translations_am-ET.po +869 -0
- sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +908 -0
- sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +968 -0
- sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +963 -0
- sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +943 -0
- sempy_labs/_bpa_translation/_model/_translations_da-DK.po +945 -0
- sempy_labs/_bpa_translation/_model/_translations_de-DE.po +988 -0
- sempy_labs/_bpa_translation/_model/_translations_el-GR.po +993 -0
- sempy_labs/_bpa_translation/_model/_translations_es-ES.po +971 -0
- sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +933 -0
- sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +942 -0
- sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +994 -0
- sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +967 -0
- sempy_labs/_bpa_translation/_model/_translations_he-IL.po +902 -0
- sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +944 -0
- sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +963 -0
- sempy_labs/_bpa_translation/_model/_translations_id-ID.po +946 -0
- sempy_labs/_bpa_translation/_model/_translations_is-IS.po +939 -0
- sempy_labs/_bpa_translation/_model/_translations_it-IT.po +986 -0
- sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +846 -0
- sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +839 -0
- sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +967 -0
- sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +978 -0
- sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +962 -0
- sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +962 -0
- sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +957 -0
- sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +968 -0
- sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +964 -0
- sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +952 -0
- sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +950 -0
- sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +942 -0
- sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +976 -0
- sempy_labs/_bpa_translation/_model/_translations_te-IN.po +947 -0
- sempy_labs/_bpa_translation/_model/_translations_th-TH.po +924 -0
- sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +953 -0
- sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +961 -0
- sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +804 -0
- sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +969 -0
- sempy_labs/_capacities.py +1198 -0
- sempy_labs/_capacity_migration.py +660 -0
- sempy_labs/_clear_cache.py +351 -0
- sempy_labs/_connections.py +610 -0
- sempy_labs/_dashboards.py +69 -0
- sempy_labs/_data_access_security.py +98 -0
- sempy_labs/_data_pipelines.py +162 -0
- sempy_labs/_dataflows.py +668 -0
- sempy_labs/_dax.py +501 -0
- sempy_labs/_daxformatter.py +80 -0
- sempy_labs/_delta_analyzer.py +467 -0
- sempy_labs/_delta_analyzer_history.py +301 -0
- sempy_labs/_dictionary_diffs.py +221 -0
- sempy_labs/_documentation.py +147 -0
- sempy_labs/_domains.py +51 -0
- sempy_labs/_eventhouses.py +182 -0
- sempy_labs/_external_data_shares.py +230 -0
- sempy_labs/_gateways.py +521 -0
- sempy_labs/_generate_semantic_model.py +521 -0
- sempy_labs/_get_connection_string.py +84 -0
- sempy_labs/_git.py +543 -0
- sempy_labs/_graphQL.py +90 -0
- sempy_labs/_helper_functions.py +2833 -0
- sempy_labs/_icons.py +149 -0
- sempy_labs/_job_scheduler.py +609 -0
- sempy_labs/_kql_databases.py +149 -0
- sempy_labs/_kql_querysets.py +124 -0
- sempy_labs/_kusto.py +137 -0
- sempy_labs/_labels.py +124 -0
- sempy_labs/_list_functions.py +1720 -0
- sempy_labs/_managed_private_endpoints.py +253 -0
- sempy_labs/_mirrored_databases.py +416 -0
- sempy_labs/_mirrored_warehouses.py +60 -0
- sempy_labs/_ml_experiments.py +113 -0
- sempy_labs/_model_auto_build.py +140 -0
- sempy_labs/_model_bpa.py +557 -0
- sempy_labs/_model_bpa_bulk.py +378 -0
- sempy_labs/_model_bpa_rules.py +859 -0
- sempy_labs/_model_dependencies.py +343 -0
- sempy_labs/_mounted_data_factories.py +123 -0
- sempy_labs/_notebooks.py +441 -0
- sempy_labs/_one_lake_integration.py +151 -0
- sempy_labs/_onelake.py +131 -0
- sempy_labs/_query_scale_out.py +433 -0
- sempy_labs/_refresh_semantic_model.py +435 -0
- sempy_labs/_semantic_models.py +468 -0
- sempy_labs/_spark.py +455 -0
- sempy_labs/_sql.py +241 -0
- sempy_labs/_sql_audit_settings.py +207 -0
- sempy_labs/_sql_endpoints.py +214 -0
- sempy_labs/_tags.py +201 -0
- sempy_labs/_translations.py +43 -0
- sempy_labs/_user_delegation_key.py +44 -0
- sempy_labs/_utils.py +79 -0
- sempy_labs/_vertipaq.py +1021 -0
- sempy_labs/_vpax.py +388 -0
- sempy_labs/_warehouses.py +234 -0
- sempy_labs/_workloads.py +140 -0
- sempy_labs/_workspace_identity.py +72 -0
- sempy_labs/_workspaces.py +595 -0
- sempy_labs/admin/__init__.py +170 -0
- sempy_labs/admin/_activities.py +167 -0
- sempy_labs/admin/_apps.py +145 -0
- sempy_labs/admin/_artifacts.py +65 -0
- sempy_labs/admin/_basic_functions.py +463 -0
- sempy_labs/admin/_capacities.py +508 -0
- sempy_labs/admin/_dataflows.py +45 -0
- sempy_labs/admin/_datasets.py +186 -0
- sempy_labs/admin/_domains.py +522 -0
- sempy_labs/admin/_external_data_share.py +100 -0
- sempy_labs/admin/_git.py +72 -0
- sempy_labs/admin/_items.py +265 -0
- sempy_labs/admin/_labels.py +211 -0
- sempy_labs/admin/_reports.py +241 -0
- sempy_labs/admin/_scanner.py +118 -0
- sempy_labs/admin/_shared.py +82 -0
- sempy_labs/admin/_sharing_links.py +110 -0
- sempy_labs/admin/_tags.py +131 -0
- sempy_labs/admin/_tenant.py +503 -0
- sempy_labs/admin/_tenant_keys.py +89 -0
- sempy_labs/admin/_users.py +140 -0
- sempy_labs/admin/_workspaces.py +236 -0
- sempy_labs/deployment_pipeline/__init__.py +23 -0
- sempy_labs/deployment_pipeline/_items.py +580 -0
- sempy_labs/directlake/__init__.py +57 -0
- sempy_labs/directlake/_autosync.py +58 -0
- sempy_labs/directlake/_directlake_schema_compare.py +120 -0
- sempy_labs/directlake/_directlake_schema_sync.py +161 -0
- sempy_labs/directlake/_dl_helper.py +274 -0
- sempy_labs/directlake/_generate_shared_expression.py +94 -0
- sempy_labs/directlake/_get_directlake_lakehouse.py +62 -0
- sempy_labs/directlake/_get_shared_expression.py +34 -0
- sempy_labs/directlake/_guardrails.py +96 -0
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +70 -0
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +90 -0
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +239 -0
- sempy_labs/directlake/_update_directlake_partition_entity.py +259 -0
- sempy_labs/directlake/_warm_cache.py +236 -0
- sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
- sempy_labs/environment/__init__.py +23 -0
- sempy_labs/environment/_items.py +212 -0
- sempy_labs/environment/_pubstage.py +223 -0
- sempy_labs/eventstream/__init__.py +37 -0
- sempy_labs/eventstream/_items.py +263 -0
- sempy_labs/eventstream/_topology.py +652 -0
- sempy_labs/graph/__init__.py +59 -0
- sempy_labs/graph/_groups.py +651 -0
- sempy_labs/graph/_sensitivity_labels.py +120 -0
- sempy_labs/graph/_teams.py +125 -0
- sempy_labs/graph/_user_licenses.py +96 -0
- sempy_labs/graph/_users.py +516 -0
- sempy_labs/graph_model/__init__.py +15 -0
- sempy_labs/graph_model/_background_jobs.py +63 -0
- sempy_labs/graph_model/_items.py +149 -0
- sempy_labs/lakehouse/__init__.py +67 -0
- sempy_labs/lakehouse/_blobs.py +247 -0
- sempy_labs/lakehouse/_get_lakehouse_columns.py +102 -0
- sempy_labs/lakehouse/_get_lakehouse_tables.py +274 -0
- sempy_labs/lakehouse/_helper.py +250 -0
- sempy_labs/lakehouse/_lakehouse.py +351 -0
- sempy_labs/lakehouse/_livy_sessions.py +143 -0
- sempy_labs/lakehouse/_materialized_lake_views.py +157 -0
- sempy_labs/lakehouse/_partitioning.py +165 -0
- sempy_labs/lakehouse/_schemas.py +217 -0
- sempy_labs/lakehouse/_shortcuts.py +440 -0
- sempy_labs/migration/__init__.py +35 -0
- sempy_labs/migration/_create_pqt_file.py +238 -0
- sempy_labs/migration/_direct_lake_to_import.py +105 -0
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +398 -0
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +148 -0
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +533 -0
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +172 -0
- sempy_labs/migration/_migration_validation.py +71 -0
- sempy_labs/migration/_refresh_calc_tables.py +131 -0
- sempy_labs/mirrored_azure_databricks_catalog/__init__.py +15 -0
- sempy_labs/mirrored_azure_databricks_catalog/_discover.py +213 -0
- sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +45 -0
- sempy_labs/ml_model/__init__.py +23 -0
- sempy_labs/ml_model/_functions.py +427 -0
- sempy_labs/report/_BPAReportTemplate.json +232 -0
- sempy_labs/report/__init__.py +55 -0
- sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
- sempy_labs/report/_bpareporttemplate/.platform +11 -0
- sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
- sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
- sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
- sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
- sempy_labs/report/_download_report.py +76 -0
- sempy_labs/report/_export_report.py +257 -0
- sempy_labs/report/_generate_report.py +427 -0
- sempy_labs/report/_paginated.py +76 -0
- sempy_labs/report/_report_bpa.py +354 -0
- sempy_labs/report/_report_bpa_rules.py +115 -0
- sempy_labs/report/_report_functions.py +581 -0
- sempy_labs/report/_report_helper.py +227 -0
- sempy_labs/report/_report_list_functions.py +110 -0
- sempy_labs/report/_report_rebind.py +149 -0
- sempy_labs/report/_reportwrapper.py +3100 -0
- sempy_labs/report/_save_report.py +147 -0
- sempy_labs/snowflake_database/__init__.py +10 -0
- sempy_labs/snowflake_database/_items.py +105 -0
- sempy_labs/sql_database/__init__.py +21 -0
- sempy_labs/sql_database/_items.py +201 -0
- sempy_labs/sql_database/_mirroring.py +79 -0
- sempy_labs/theme/__init__.py +12 -0
- sempy_labs/theme/_org_themes.py +129 -0
- sempy_labs/tom/__init__.py +3 -0
- sempy_labs/tom/_model.py +5977 -0
- sempy_labs/variable_library/__init__.py +19 -0
- sempy_labs/variable_library/_functions.py +403 -0
- sempy_labs/warehouse/__init__.py +28 -0
- sempy_labs/warehouse/_items.py +234 -0
- sempy_labs/warehouse/_restore_points.py +309 -0
|
@@ -0,0 +1,343 @@
|
|
|
1
|
+
import sempy.fabric as fabric
|
|
2
|
+
import pandas as pd
|
|
3
|
+
from sempy_labs._helper_functions import (
|
|
4
|
+
format_dax_object_name,
|
|
5
|
+
resolve_dataset_name_and_id,
|
|
6
|
+
resolve_workspace_name_and_id,
|
|
7
|
+
)
|
|
8
|
+
import sempy_labs._icons as icons
|
|
9
|
+
from typing import Any, Dict, Optional
|
|
10
|
+
from anytree import Node, RenderTree
|
|
11
|
+
from sempy._utils._log import log
|
|
12
|
+
from uuid import UUID
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@log
|
|
16
|
+
def get_measure_dependencies(
|
|
17
|
+
dataset: str | UUID, workspace: Optional[str | UUID] = None
|
|
18
|
+
) -> pd.DataFrame:
|
|
19
|
+
"""
|
|
20
|
+
Shows all dependencies for all measures in a semantic model.
|
|
21
|
+
|
|
22
|
+
Parameters
|
|
23
|
+
----------
|
|
24
|
+
dataset : str | uuid.UUID
|
|
25
|
+
Name or ID of the semantic model.
|
|
26
|
+
workspace : str | uuid.UUID, default=None
|
|
27
|
+
The Fabric workspace name or ID.
|
|
28
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
29
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
30
|
+
|
|
31
|
+
Returns
|
|
32
|
+
-------
|
|
33
|
+
pandas.DataFrame
|
|
34
|
+
Shows all dependencies for all measures in the semantic model.
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
dep = fabric.evaluate_dax(
|
|
38
|
+
dataset=dataset,
|
|
39
|
+
workspace=workspace,
|
|
40
|
+
dax_string="""
|
|
41
|
+
SELECT
|
|
42
|
+
[TABLE] AS [Table Name]
|
|
43
|
+
,[OBJECT] AS [Object Name]
|
|
44
|
+
,[OBJECT_TYPE] AS [Object Type]
|
|
45
|
+
,[REFERENCED_TABLE] AS [Referenced Table]
|
|
46
|
+
,[REFERENCED_OBJECT] AS [Referenced Object]
|
|
47
|
+
,[REFERENCED_OBJECT_TYPE] AS [Referenced Object Type]
|
|
48
|
+
FROM $SYSTEM.DISCOVER_CALC_DEPENDENCY
|
|
49
|
+
WHERE [OBJECT_TYPE] = 'MEASURE'
|
|
50
|
+
""",
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
dep["Object Type"] = dep["Object Type"].str.capitalize()
|
|
54
|
+
dep["Referenced Object Type"] = dep["Referenced Object Type"].str.capitalize()
|
|
55
|
+
|
|
56
|
+
dep["Full Object Name"] = format_dax_object_name(
|
|
57
|
+
dep["Table Name"], dep["Object Name"]
|
|
58
|
+
)
|
|
59
|
+
dep["Referenced Full Object Name"] = format_dax_object_name(
|
|
60
|
+
dep["Referenced Table"], dep["Referenced Object"]
|
|
61
|
+
)
|
|
62
|
+
dep["Parent Node"] = dep["Object Name"]
|
|
63
|
+
|
|
64
|
+
df = dep
|
|
65
|
+
|
|
66
|
+
df["Done"] = df.apply(
|
|
67
|
+
lambda row: False if row["Referenced Object Type"] == "Measure" else True,
|
|
68
|
+
axis=1,
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
while any(df["Done"] == False):
|
|
72
|
+
for i, r in df.iterrows():
|
|
73
|
+
rObjFull = r["Referenced Full Object Name"]
|
|
74
|
+
rObj = r["Referenced Object"]
|
|
75
|
+
if r["Done"] == False:
|
|
76
|
+
dep_filt = dep[dep["Full Object Name"] == rObjFull]
|
|
77
|
+
|
|
78
|
+
for index, dependency in dep_filt.iterrows():
|
|
79
|
+
d = True
|
|
80
|
+
if dependency.iloc[5] == "Measure":
|
|
81
|
+
d = False
|
|
82
|
+
df = pd.concat(
|
|
83
|
+
[
|
|
84
|
+
df,
|
|
85
|
+
pd.DataFrame(
|
|
86
|
+
[
|
|
87
|
+
{
|
|
88
|
+
"Table Name": r["Table Name"],
|
|
89
|
+
"Object Name": r["Object Name"],
|
|
90
|
+
"Object Type": r["Object Type"],
|
|
91
|
+
"Referenced Object": dependency.iloc[4],
|
|
92
|
+
"Referenced Table": dependency.iloc[3],
|
|
93
|
+
"Referenced Object Type": dependency.iloc[
|
|
94
|
+
5
|
|
95
|
+
],
|
|
96
|
+
"Done": d,
|
|
97
|
+
"Full Object Name": r["Full Object Name"],
|
|
98
|
+
"Referenced Full Object Name": dependency.iloc[
|
|
99
|
+
7
|
|
100
|
+
],
|
|
101
|
+
"Parent Node": rObj,
|
|
102
|
+
}
|
|
103
|
+
]
|
|
104
|
+
),
|
|
105
|
+
],
|
|
106
|
+
ignore_index=True,
|
|
107
|
+
)
|
|
108
|
+
else:
|
|
109
|
+
df = pd.concat(
|
|
110
|
+
[
|
|
111
|
+
df,
|
|
112
|
+
pd.DataFrame(
|
|
113
|
+
[
|
|
114
|
+
{
|
|
115
|
+
"Table Name": r["Table Name"],
|
|
116
|
+
"Object Name": r["Object Name"],
|
|
117
|
+
"Object Type": r["Object Type"],
|
|
118
|
+
"Referenced Object": dependency.iloc[4],
|
|
119
|
+
"Referenced Table": dependency.iloc[3],
|
|
120
|
+
"Referenced Object Type": dependency.iloc[
|
|
121
|
+
5
|
|
122
|
+
],
|
|
123
|
+
"Done": d,
|
|
124
|
+
"Full Object Name": r["Full Object Name"],
|
|
125
|
+
"Referenced Full Object Name": dependency.iloc[
|
|
126
|
+
7
|
|
127
|
+
],
|
|
128
|
+
"Parent Node": rObj,
|
|
129
|
+
}
|
|
130
|
+
]
|
|
131
|
+
),
|
|
132
|
+
],
|
|
133
|
+
ignore_index=True,
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
df.loc[i, "Done"] = True
|
|
137
|
+
|
|
138
|
+
df = df.drop(["Done", "Full Object Name", "Referenced Full Object Name"], axis=1)
|
|
139
|
+
|
|
140
|
+
return df
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
@log
|
|
144
|
+
def get_model_calc_dependencies(
|
|
145
|
+
dataset: str | UUID,
|
|
146
|
+
workspace: Optional[str] = None,
|
|
147
|
+
) -> pd.DataFrame:
|
|
148
|
+
"""
|
|
149
|
+
Shows all dependencies for all objects in a semantic model.
|
|
150
|
+
|
|
151
|
+
Parameters
|
|
152
|
+
----------
|
|
153
|
+
dataset : str | uuid.UUID
|
|
154
|
+
Name or ID of the semantic model.
|
|
155
|
+
workspace : str | uuid.UUID, default=None
|
|
156
|
+
The Fabric workspace name or ID.
|
|
157
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
158
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
159
|
+
|
|
160
|
+
Returns
|
|
161
|
+
-------
|
|
162
|
+
pandas.DataFrame
|
|
163
|
+
Shows all dependencies for all objects in the semantic model.
|
|
164
|
+
"""
|
|
165
|
+
|
|
166
|
+
dep = fabric.evaluate_dax(
|
|
167
|
+
dataset=dataset,
|
|
168
|
+
workspace=workspace,
|
|
169
|
+
dax_string="""
|
|
170
|
+
SELECT
|
|
171
|
+
[TABLE] AS [Table Name],
|
|
172
|
+
[OBJECT] AS [Object Name],
|
|
173
|
+
[OBJECT_TYPE] AS [Object Type],
|
|
174
|
+
[EXPRESSION] AS [Expression],
|
|
175
|
+
[REFERENCED_TABLE] AS [Referenced Table],
|
|
176
|
+
[REFERENCED_OBJECT] AS [Referenced Object],
|
|
177
|
+
[REFERENCED_OBJECT_TYPE] AS [Referenced Object Type]
|
|
178
|
+
FROM $SYSTEM.DISCOVER_CALC_DEPENDENCY
|
|
179
|
+
""",
|
|
180
|
+
)
|
|
181
|
+
# Format data columns
|
|
182
|
+
dep["Object Type"] = dep["Object Type"].str.replace("_", " ").str.title()
|
|
183
|
+
dep["Referenced Object Type"] = (
|
|
184
|
+
dep["Referenced Object Type"].str.replace("_", " ").str.title()
|
|
185
|
+
)
|
|
186
|
+
dep["Full Object Name"] = format_dax_object_name(
|
|
187
|
+
dep["Table Name"], dep["Object Name"]
|
|
188
|
+
)
|
|
189
|
+
dep["Referenced Full Object Name"] = format_dax_object_name(
|
|
190
|
+
dep["Referenced Table"], dep["Referenced Object"]
|
|
191
|
+
)
|
|
192
|
+
dep["Parent Node"] = dep["Object Name"]
|
|
193
|
+
|
|
194
|
+
# Initialize dependency DataFrame with 'Done' status
|
|
195
|
+
df = dep.copy()
|
|
196
|
+
objs = {"Measure", "Calc Column", "Calculation Item", "Calc Table"}
|
|
197
|
+
df["Done"] = (
|
|
198
|
+
df["Referenced Object Type"].apply(lambda x: x not in objs).astype(bool)
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
# Set to track visited dependencies to prevent circular references
|
|
202
|
+
visited = set(
|
|
203
|
+
zip(
|
|
204
|
+
df["Full Object Name"],
|
|
205
|
+
df["Referenced Full Object Name"],
|
|
206
|
+
df["Object Type"],
|
|
207
|
+
df["Referenced Object Type"],
|
|
208
|
+
)
|
|
209
|
+
)
|
|
210
|
+
|
|
211
|
+
# Expand dependencies iteratively
|
|
212
|
+
while not df["Done"].all():
|
|
213
|
+
incomplete_rows = df[df["Done"] == False]
|
|
214
|
+
for _, row in incomplete_rows.iterrows():
|
|
215
|
+
referenced_full_name = row["Referenced Full Object Name"]
|
|
216
|
+
referenced_object_type = row["Referenced Object Type"]
|
|
217
|
+
dep_filt = dep[
|
|
218
|
+
(dep["Full Object Name"] == referenced_full_name)
|
|
219
|
+
& (dep["Object Type"] == referenced_object_type)
|
|
220
|
+
]
|
|
221
|
+
# Expand dependencies and update 'Done' status as needed
|
|
222
|
+
new_rows = []
|
|
223
|
+
for _, dependency in dep_filt.iterrows():
|
|
224
|
+
# Check if the dependency has already been visited
|
|
225
|
+
dependency_pair = (
|
|
226
|
+
row["Full Object Name"],
|
|
227
|
+
dependency["Referenced Full Object Name"],
|
|
228
|
+
row["Object Type"],
|
|
229
|
+
dependency["Referenced Object Type"],
|
|
230
|
+
)
|
|
231
|
+
if dependency_pair in visited:
|
|
232
|
+
continue # Skip already visited dependencies
|
|
233
|
+
|
|
234
|
+
visited.add(dependency_pair) # Mark as visited
|
|
235
|
+
|
|
236
|
+
is_done = dependency["Referenced Object Type"] not in objs
|
|
237
|
+
new_row = {
|
|
238
|
+
"Table Name": row["Table Name"],
|
|
239
|
+
"Object Name": row["Object Name"],
|
|
240
|
+
"Object Type": row["Object Type"],
|
|
241
|
+
"Expression": row["Expression"],
|
|
242
|
+
"Referenced Table": dependency["Referenced Table"],
|
|
243
|
+
"Referenced Object": dependency["Referenced Object"],
|
|
244
|
+
"Referenced Object Type": dependency["Referenced Object Type"],
|
|
245
|
+
"Done": is_done,
|
|
246
|
+
"Full Object Name": row["Full Object Name"],
|
|
247
|
+
"Referenced Full Object Name": dependency[
|
|
248
|
+
"Referenced Full Object Name"
|
|
249
|
+
],
|
|
250
|
+
"Parent Node": row["Referenced Object"],
|
|
251
|
+
}
|
|
252
|
+
new_rows.append(new_row)
|
|
253
|
+
|
|
254
|
+
if new_rows:
|
|
255
|
+
new_rows_df = pd.DataFrame(new_rows)
|
|
256
|
+
new_rows_df = new_rows_df.dropna(
|
|
257
|
+
axis=1, how="all"
|
|
258
|
+
) # Drop empty columns
|
|
259
|
+
df = pd.concat([df, new_rows_df], ignore_index=True)
|
|
260
|
+
|
|
261
|
+
df.loc[df.index == row.name, "Done"] = True
|
|
262
|
+
# Finalize DataFrame and yield result
|
|
263
|
+
df = df.drop(columns=["Done"])
|
|
264
|
+
|
|
265
|
+
return df
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
@log
|
|
269
|
+
def measure_dependency_tree(
|
|
270
|
+
dataset: str | UUID, measure_name: str, workspace: Optional[str | UUID] = None
|
|
271
|
+
):
|
|
272
|
+
"""
|
|
273
|
+
Prints a measure dependency tree of all dependent objects for a measure in a semantic model.
|
|
274
|
+
|
|
275
|
+
Parameters
|
|
276
|
+
----------
|
|
277
|
+
dataset : str | uuid.UUID
|
|
278
|
+
Name or ID of the semantic model.
|
|
279
|
+
measure_name : str
|
|
280
|
+
Name of the measure.
|
|
281
|
+
workspace : str | uuid.UUID, default=None
|
|
282
|
+
The Fabric workspace name or ID.
|
|
283
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
284
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
285
|
+
"""
|
|
286
|
+
|
|
287
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
288
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
289
|
+
|
|
290
|
+
fabric.refresh_tom_cache(workspace=workspace)
|
|
291
|
+
|
|
292
|
+
dfM = fabric.list_measures(dataset=dataset_id, workspace=workspace_id)
|
|
293
|
+
dfM_filt = dfM[dfM["Measure Name"] == measure_name]
|
|
294
|
+
|
|
295
|
+
if len(dfM_filt) == 0:
|
|
296
|
+
print(
|
|
297
|
+
f"{icons.red_dot} The '{measure_name}' measure does not exist in the '{dataset_name}' semantic model in the '{workspace_name}' workspace."
|
|
298
|
+
)
|
|
299
|
+
return
|
|
300
|
+
|
|
301
|
+
md = get_measure_dependencies(dataset_id, workspace_id)
|
|
302
|
+
df_filt = md[md["Object Name"] == measure_name]
|
|
303
|
+
|
|
304
|
+
# Create a dictionary to hold references to nodes
|
|
305
|
+
node_dict: Dict[str, Any] = {}
|
|
306
|
+
|
|
307
|
+
# Populate the tree
|
|
308
|
+
for _, row in df_filt.iterrows():
|
|
309
|
+
# measure_name = row['Object Name']
|
|
310
|
+
ref_obj_table_name = row["Referenced Table"]
|
|
311
|
+
ref_obj_name = row["Referenced Object"]
|
|
312
|
+
ref_obj_type = row["Referenced Object Type"]
|
|
313
|
+
parent_node_name = row["Parent Node"]
|
|
314
|
+
|
|
315
|
+
# Create or get the parent node
|
|
316
|
+
parent_node = node_dict.get(parent_node_name)
|
|
317
|
+
if parent_node is None:
|
|
318
|
+
parent_node = Node(parent_node_name)
|
|
319
|
+
node_dict[parent_node_name] = parent_node
|
|
320
|
+
parent_node.custom_property = f"{icons.measure_icon} "
|
|
321
|
+
|
|
322
|
+
# Create the child node
|
|
323
|
+
child_node_name = ref_obj_name
|
|
324
|
+
child_node = Node(child_node_name, parent=parent_node)
|
|
325
|
+
if ref_obj_type == "Column":
|
|
326
|
+
child_node.custom_property = f"{icons.column_icon} '{ref_obj_table_name}'"
|
|
327
|
+
elif ref_obj_type == "Table":
|
|
328
|
+
child_node.custom_property = f"{icons.table_icon} "
|
|
329
|
+
elif ref_obj_type == "Measure":
|
|
330
|
+
child_node.custom_property = f"{icons.measure_icon} "
|
|
331
|
+
|
|
332
|
+
# Update the dictionary with the child node
|
|
333
|
+
node_dict[child_node_name] = child_node
|
|
334
|
+
|
|
335
|
+
# Visualize the tree structure using RenderTree
|
|
336
|
+
for pre, _, node in RenderTree(node_dict[measure_name]):
|
|
337
|
+
if (
|
|
338
|
+
hasattr(node, "custom_property")
|
|
339
|
+
and icons.table_icon in node.custom_property
|
|
340
|
+
):
|
|
341
|
+
print(f"{pre}{node.custom_property}'{node.name}'")
|
|
342
|
+
else:
|
|
343
|
+
print(f"{pre}'{node.name}'")
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
import pandas as pd
|
|
2
|
+
from typing import Optional
|
|
3
|
+
from sempy_labs._helper_functions import (
|
|
4
|
+
resolve_workspace_id,
|
|
5
|
+
_base_api,
|
|
6
|
+
_create_dataframe,
|
|
7
|
+
_update_dataframe_datatypes,
|
|
8
|
+
_get_item_definition,
|
|
9
|
+
delete_item,
|
|
10
|
+
)
|
|
11
|
+
|
|
12
|
+
from uuid import UUID
|
|
13
|
+
from sempy._utils._log import log
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@log
|
|
17
|
+
def list_mounted_data_factories(
|
|
18
|
+
workspace: Optional[str | UUID] = None,
|
|
19
|
+
) -> pd.DataFrame:
|
|
20
|
+
"""
|
|
21
|
+
Shows a list of mounted data factories from the specified workspace.
|
|
22
|
+
|
|
23
|
+
This is a wrapper function for the following API: `Items - List Mounted Data Factories <https://learn.microsoft.com/rest/api/fabric/mounteddatafactory/items/list-mounted-data-factories>`_.
|
|
24
|
+
|
|
25
|
+
Parameters
|
|
26
|
+
----------
|
|
27
|
+
workspace : str | uuid.UUID, default=None
|
|
28
|
+
The Fabric workspace name or ID.
|
|
29
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
30
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
31
|
+
|
|
32
|
+
Returns
|
|
33
|
+
-------
|
|
34
|
+
pandas.DataFrame
|
|
35
|
+
A pandas dataframe showing a list of mounted data factories from the specified workspace.
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
39
|
+
|
|
40
|
+
columns = {
|
|
41
|
+
"Mounted Data Factory Name": "str",
|
|
42
|
+
"Mounted Data Factory Id": "str",
|
|
43
|
+
"Description": "str",
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
df = _create_dataframe(columns=columns)
|
|
47
|
+
responses = _base_api(
|
|
48
|
+
request=f"/v1/workspaces/{workspace_id}/mountedDataFactories",
|
|
49
|
+
uses_pagination=True,
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
rows = []
|
|
53
|
+
for r in responses:
|
|
54
|
+
for v in r.get("value", []):
|
|
55
|
+
rows.append(
|
|
56
|
+
{
|
|
57
|
+
"Mounted Data Factory Name": v.get("displayName"),
|
|
58
|
+
"Mounted Data Factory Id": v.get("id"),
|
|
59
|
+
"Description": v.get("description"),
|
|
60
|
+
}
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
if rows:
|
|
64
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
65
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
66
|
+
|
|
67
|
+
return df
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
@log
|
|
71
|
+
def get_mounted_data_factory_definition(
|
|
72
|
+
mounted_data_factory: str | UUID, workspace: Optional[str | UUID] = None
|
|
73
|
+
) -> dict:
|
|
74
|
+
"""
|
|
75
|
+
Returns the specified MountedDataFactory public definition.
|
|
76
|
+
|
|
77
|
+
This is a wrapper function for the following API: `Items - Get Mounted Data Factory Definition <https://learn.microsoft.com/rest/api/fabric/mounteddatafactory/items/get-mounted-data-factory-definition>`_.
|
|
78
|
+
|
|
79
|
+
Parameters
|
|
80
|
+
----------
|
|
81
|
+
mounted_data_factory : str | uuid.UUID
|
|
82
|
+
The name or ID of the mounted data factory.
|
|
83
|
+
workspace : str | uuid.UUID, default=None
|
|
84
|
+
The Fabric workspace name or ID.
|
|
85
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
86
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
87
|
+
|
|
88
|
+
Returns
|
|
89
|
+
-------
|
|
90
|
+
dict
|
|
91
|
+
The 'mountedDataFactory-content.json' file from the mounted data factory definition.
|
|
92
|
+
"""
|
|
93
|
+
|
|
94
|
+
return _get_item_definition(
|
|
95
|
+
item=mounted_data_factory,
|
|
96
|
+
type="MountedDataFactory",
|
|
97
|
+
workspace=workspace,
|
|
98
|
+
return_dataframe=False,
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
@log
|
|
103
|
+
def delete_mounted_data_factory(
|
|
104
|
+
mounted_data_factory: str | UUID, workspace: Optional[str | UUID]
|
|
105
|
+
):
|
|
106
|
+
"""
|
|
107
|
+
Deletes the specified mounted data factory.
|
|
108
|
+
|
|
109
|
+
This is a wrapper function for the following API: `Items - Delete Mounted Data Factory <https://learn.microsoft.com/rest/api/fabric/mounteddatafactory/items/delete-mounted-data-factory>`_.
|
|
110
|
+
|
|
111
|
+
Parameters
|
|
112
|
+
----------
|
|
113
|
+
mounted_data_factory : str | uuid.UUID
|
|
114
|
+
The name or ID of the mounted data factory.
|
|
115
|
+
workspace : str | uuid.UUID, default=None
|
|
116
|
+
The Fabric workspace name or ID.
|
|
117
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
118
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
119
|
+
"""
|
|
120
|
+
|
|
121
|
+
delete_item(
|
|
122
|
+
item=mounted_data_factory, type="MountedDataFactory", workspace=workspace
|
|
123
|
+
)
|