semantic-link-labs 0.12.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- semantic_link_labs-0.12.8.dist-info/METADATA +354 -0
- semantic_link_labs-0.12.8.dist-info/RECORD +243 -0
- semantic_link_labs-0.12.8.dist-info/WHEEL +5 -0
- semantic_link_labs-0.12.8.dist-info/licenses/LICENSE +21 -0
- semantic_link_labs-0.12.8.dist-info/top_level.txt +1 -0
- sempy_labs/__init__.py +606 -0
- sempy_labs/_a_lib_info.py +2 -0
- sempy_labs/_ai.py +437 -0
- sempy_labs/_authentication.py +264 -0
- sempy_labs/_bpa_translation/_model/_translations_am-ET.po +869 -0
- sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +908 -0
- sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +968 -0
- sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +963 -0
- sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +943 -0
- sempy_labs/_bpa_translation/_model/_translations_da-DK.po +945 -0
- sempy_labs/_bpa_translation/_model/_translations_de-DE.po +988 -0
- sempy_labs/_bpa_translation/_model/_translations_el-GR.po +993 -0
- sempy_labs/_bpa_translation/_model/_translations_es-ES.po +971 -0
- sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +933 -0
- sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +942 -0
- sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +994 -0
- sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +967 -0
- sempy_labs/_bpa_translation/_model/_translations_he-IL.po +902 -0
- sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +944 -0
- sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +963 -0
- sempy_labs/_bpa_translation/_model/_translations_id-ID.po +946 -0
- sempy_labs/_bpa_translation/_model/_translations_is-IS.po +939 -0
- sempy_labs/_bpa_translation/_model/_translations_it-IT.po +986 -0
- sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +846 -0
- sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +839 -0
- sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +967 -0
- sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +978 -0
- sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +962 -0
- sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +962 -0
- sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +957 -0
- sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +968 -0
- sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +964 -0
- sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +952 -0
- sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +950 -0
- sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +942 -0
- sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +976 -0
- sempy_labs/_bpa_translation/_model/_translations_te-IN.po +947 -0
- sempy_labs/_bpa_translation/_model/_translations_th-TH.po +924 -0
- sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +953 -0
- sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +961 -0
- sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +804 -0
- sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +969 -0
- sempy_labs/_capacities.py +1198 -0
- sempy_labs/_capacity_migration.py +660 -0
- sempy_labs/_clear_cache.py +351 -0
- sempy_labs/_connections.py +610 -0
- sempy_labs/_dashboards.py +69 -0
- sempy_labs/_data_access_security.py +98 -0
- sempy_labs/_data_pipelines.py +162 -0
- sempy_labs/_dataflows.py +668 -0
- sempy_labs/_dax.py +501 -0
- sempy_labs/_daxformatter.py +80 -0
- sempy_labs/_delta_analyzer.py +467 -0
- sempy_labs/_delta_analyzer_history.py +301 -0
- sempy_labs/_dictionary_diffs.py +221 -0
- sempy_labs/_documentation.py +147 -0
- sempy_labs/_domains.py +51 -0
- sempy_labs/_eventhouses.py +182 -0
- sempy_labs/_external_data_shares.py +230 -0
- sempy_labs/_gateways.py +521 -0
- sempy_labs/_generate_semantic_model.py +521 -0
- sempy_labs/_get_connection_string.py +84 -0
- sempy_labs/_git.py +543 -0
- sempy_labs/_graphQL.py +90 -0
- sempy_labs/_helper_functions.py +2833 -0
- sempy_labs/_icons.py +149 -0
- sempy_labs/_job_scheduler.py +609 -0
- sempy_labs/_kql_databases.py +149 -0
- sempy_labs/_kql_querysets.py +124 -0
- sempy_labs/_kusto.py +137 -0
- sempy_labs/_labels.py +124 -0
- sempy_labs/_list_functions.py +1720 -0
- sempy_labs/_managed_private_endpoints.py +253 -0
- sempy_labs/_mirrored_databases.py +416 -0
- sempy_labs/_mirrored_warehouses.py +60 -0
- sempy_labs/_ml_experiments.py +113 -0
- sempy_labs/_model_auto_build.py +140 -0
- sempy_labs/_model_bpa.py +557 -0
- sempy_labs/_model_bpa_bulk.py +378 -0
- sempy_labs/_model_bpa_rules.py +859 -0
- sempy_labs/_model_dependencies.py +343 -0
- sempy_labs/_mounted_data_factories.py +123 -0
- sempy_labs/_notebooks.py +441 -0
- sempy_labs/_one_lake_integration.py +151 -0
- sempy_labs/_onelake.py +131 -0
- sempy_labs/_query_scale_out.py +433 -0
- sempy_labs/_refresh_semantic_model.py +435 -0
- sempy_labs/_semantic_models.py +468 -0
- sempy_labs/_spark.py +455 -0
- sempy_labs/_sql.py +241 -0
- sempy_labs/_sql_audit_settings.py +207 -0
- sempy_labs/_sql_endpoints.py +214 -0
- sempy_labs/_tags.py +201 -0
- sempy_labs/_translations.py +43 -0
- sempy_labs/_user_delegation_key.py +44 -0
- sempy_labs/_utils.py +79 -0
- sempy_labs/_vertipaq.py +1021 -0
- sempy_labs/_vpax.py +388 -0
- sempy_labs/_warehouses.py +234 -0
- sempy_labs/_workloads.py +140 -0
- sempy_labs/_workspace_identity.py +72 -0
- sempy_labs/_workspaces.py +595 -0
- sempy_labs/admin/__init__.py +170 -0
- sempy_labs/admin/_activities.py +167 -0
- sempy_labs/admin/_apps.py +145 -0
- sempy_labs/admin/_artifacts.py +65 -0
- sempy_labs/admin/_basic_functions.py +463 -0
- sempy_labs/admin/_capacities.py +508 -0
- sempy_labs/admin/_dataflows.py +45 -0
- sempy_labs/admin/_datasets.py +186 -0
- sempy_labs/admin/_domains.py +522 -0
- sempy_labs/admin/_external_data_share.py +100 -0
- sempy_labs/admin/_git.py +72 -0
- sempy_labs/admin/_items.py +265 -0
- sempy_labs/admin/_labels.py +211 -0
- sempy_labs/admin/_reports.py +241 -0
- sempy_labs/admin/_scanner.py +118 -0
- sempy_labs/admin/_shared.py +82 -0
- sempy_labs/admin/_sharing_links.py +110 -0
- sempy_labs/admin/_tags.py +131 -0
- sempy_labs/admin/_tenant.py +503 -0
- sempy_labs/admin/_tenant_keys.py +89 -0
- sempy_labs/admin/_users.py +140 -0
- sempy_labs/admin/_workspaces.py +236 -0
- sempy_labs/deployment_pipeline/__init__.py +23 -0
- sempy_labs/deployment_pipeline/_items.py +580 -0
- sempy_labs/directlake/__init__.py +57 -0
- sempy_labs/directlake/_autosync.py +58 -0
- sempy_labs/directlake/_directlake_schema_compare.py +120 -0
- sempy_labs/directlake/_directlake_schema_sync.py +161 -0
- sempy_labs/directlake/_dl_helper.py +274 -0
- sempy_labs/directlake/_generate_shared_expression.py +94 -0
- sempy_labs/directlake/_get_directlake_lakehouse.py +62 -0
- sempy_labs/directlake/_get_shared_expression.py +34 -0
- sempy_labs/directlake/_guardrails.py +96 -0
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +70 -0
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +90 -0
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +239 -0
- sempy_labs/directlake/_update_directlake_partition_entity.py +259 -0
- sempy_labs/directlake/_warm_cache.py +236 -0
- sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
- sempy_labs/environment/__init__.py +23 -0
- sempy_labs/environment/_items.py +212 -0
- sempy_labs/environment/_pubstage.py +223 -0
- sempy_labs/eventstream/__init__.py +37 -0
- sempy_labs/eventstream/_items.py +263 -0
- sempy_labs/eventstream/_topology.py +652 -0
- sempy_labs/graph/__init__.py +59 -0
- sempy_labs/graph/_groups.py +651 -0
- sempy_labs/graph/_sensitivity_labels.py +120 -0
- sempy_labs/graph/_teams.py +125 -0
- sempy_labs/graph/_user_licenses.py +96 -0
- sempy_labs/graph/_users.py +516 -0
- sempy_labs/graph_model/__init__.py +15 -0
- sempy_labs/graph_model/_background_jobs.py +63 -0
- sempy_labs/graph_model/_items.py +149 -0
- sempy_labs/lakehouse/__init__.py +67 -0
- sempy_labs/lakehouse/_blobs.py +247 -0
- sempy_labs/lakehouse/_get_lakehouse_columns.py +102 -0
- sempy_labs/lakehouse/_get_lakehouse_tables.py +274 -0
- sempy_labs/lakehouse/_helper.py +250 -0
- sempy_labs/lakehouse/_lakehouse.py +351 -0
- sempy_labs/lakehouse/_livy_sessions.py +143 -0
- sempy_labs/lakehouse/_materialized_lake_views.py +157 -0
- sempy_labs/lakehouse/_partitioning.py +165 -0
- sempy_labs/lakehouse/_schemas.py +217 -0
- sempy_labs/lakehouse/_shortcuts.py +440 -0
- sempy_labs/migration/__init__.py +35 -0
- sempy_labs/migration/_create_pqt_file.py +238 -0
- sempy_labs/migration/_direct_lake_to_import.py +105 -0
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +398 -0
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +148 -0
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +533 -0
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +172 -0
- sempy_labs/migration/_migration_validation.py +71 -0
- sempy_labs/migration/_refresh_calc_tables.py +131 -0
- sempy_labs/mirrored_azure_databricks_catalog/__init__.py +15 -0
- sempy_labs/mirrored_azure_databricks_catalog/_discover.py +213 -0
- sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +45 -0
- sempy_labs/ml_model/__init__.py +23 -0
- sempy_labs/ml_model/_functions.py +427 -0
- sempy_labs/report/_BPAReportTemplate.json +232 -0
- sempy_labs/report/__init__.py +55 -0
- sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
- sempy_labs/report/_bpareporttemplate/.platform +11 -0
- sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
- sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
- sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
- sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
- sempy_labs/report/_download_report.py +76 -0
- sempy_labs/report/_export_report.py +257 -0
- sempy_labs/report/_generate_report.py +427 -0
- sempy_labs/report/_paginated.py +76 -0
- sempy_labs/report/_report_bpa.py +354 -0
- sempy_labs/report/_report_bpa_rules.py +115 -0
- sempy_labs/report/_report_functions.py +581 -0
- sempy_labs/report/_report_helper.py +227 -0
- sempy_labs/report/_report_list_functions.py +110 -0
- sempy_labs/report/_report_rebind.py +149 -0
- sempy_labs/report/_reportwrapper.py +3100 -0
- sempy_labs/report/_save_report.py +147 -0
- sempy_labs/snowflake_database/__init__.py +10 -0
- sempy_labs/snowflake_database/_items.py +105 -0
- sempy_labs/sql_database/__init__.py +21 -0
- sempy_labs/sql_database/_items.py +201 -0
- sempy_labs/sql_database/_mirroring.py +79 -0
- sempy_labs/theme/__init__.py +12 -0
- sempy_labs/theme/_org_themes.py +129 -0
- sempy_labs/tom/__init__.py +3 -0
- sempy_labs/tom/_model.py +5977 -0
- sempy_labs/variable_library/__init__.py +19 -0
- sempy_labs/variable_library/_functions.py +403 -0
- sempy_labs/warehouse/__init__.py +28 -0
- sempy_labs/warehouse/_items.py +234 -0
- sempy_labs/warehouse/_restore_points.py +309 -0
|
@@ -0,0 +1,259 @@
|
|
|
1
|
+
from sempy_labs.tom import connect_semantic_model
|
|
2
|
+
from sempy_labs._refresh_semantic_model import refresh_semantic_model
|
|
3
|
+
from sempy_labs.directlake._dl_helper import get_direct_lake_source
|
|
4
|
+
from sempy_labs._helper_functions import (
|
|
5
|
+
_convert_data_type,
|
|
6
|
+
resolve_dataset_name_and_id,
|
|
7
|
+
resolve_workspace_name_and_id,
|
|
8
|
+
resolve_workspace_name,
|
|
9
|
+
)
|
|
10
|
+
from sempy._utils._log import log
|
|
11
|
+
from typing import List, Optional, Union
|
|
12
|
+
import sempy_labs._icons as icons
|
|
13
|
+
from uuid import UUID
|
|
14
|
+
import json
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@log
|
|
18
|
+
def update_direct_lake_partition_entity(
|
|
19
|
+
dataset: str | UUID,
|
|
20
|
+
table_name: Union[str, List[str]],
|
|
21
|
+
entity_name: Union[str, List[str]],
|
|
22
|
+
schema: Optional[str] = None,
|
|
23
|
+
workspace: Optional[str | UUID] = None,
|
|
24
|
+
):
|
|
25
|
+
"""
|
|
26
|
+
Remaps a table (or tables) in a Direct Lake semantic model to a table in a lakehouse.
|
|
27
|
+
|
|
28
|
+
Parameters
|
|
29
|
+
----------
|
|
30
|
+
dataset : str | uuid.UUID
|
|
31
|
+
Name or ID of the semantic model.
|
|
32
|
+
table_name : str, List[str]
|
|
33
|
+
Name of the table(s) in the semantic model.
|
|
34
|
+
entity_name : str, List[str]
|
|
35
|
+
Name of the lakehouse table to be mapped to the semantic model table.
|
|
36
|
+
schema : str, default=None
|
|
37
|
+
The schema of the lakehouse table to be mapped to the semantic model table.
|
|
38
|
+
Defaults to None which resolves to the existing schema of the lakehouse table.
|
|
39
|
+
workspace : str | uuid.UUID, default=None
|
|
40
|
+
The Fabric workspace name or ID in which the semantic model exists.
|
|
41
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
42
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
43
|
+
"""
|
|
44
|
+
|
|
45
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
46
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
47
|
+
|
|
48
|
+
# Support both str & list types
|
|
49
|
+
if isinstance(table_name, str):
|
|
50
|
+
table_name = [table_name]
|
|
51
|
+
if isinstance(entity_name, str):
|
|
52
|
+
entity_name = [entity_name]
|
|
53
|
+
|
|
54
|
+
if len(table_name) != len(entity_name):
|
|
55
|
+
raise ValueError(
|
|
56
|
+
f"{icons.red_dot} The 'table_name' and 'entity_name' arrays must be of equal length."
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
icons.sll_tags.append("UpdateDLPartition")
|
|
60
|
+
|
|
61
|
+
with connect_semantic_model(
|
|
62
|
+
dataset=dataset_id, readonly=False, workspace=workspace_id
|
|
63
|
+
) as tom:
|
|
64
|
+
|
|
65
|
+
if not tom.is_direct_lake():
|
|
66
|
+
raise ValueError(
|
|
67
|
+
f"{icons.red_dot} The '{dataset_name}' semantic model within the '{workspace_name}' workspace is not in Direct Lake mode."
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
for tName in table_name:
|
|
71
|
+
i = table_name.index(tName)
|
|
72
|
+
eName = entity_name[i]
|
|
73
|
+
part_name = next(
|
|
74
|
+
p.Name
|
|
75
|
+
for t in tom.model.Tables
|
|
76
|
+
for p in t.Partitions
|
|
77
|
+
if t.Name == tName
|
|
78
|
+
)
|
|
79
|
+
current_slt = tom.model.Tables[tName].SourceLineageTag
|
|
80
|
+
|
|
81
|
+
if part_name is None:
|
|
82
|
+
raise ValueError(
|
|
83
|
+
f"{icons.red_dot} The '{tName}' table in the '{dataset_name}' semantic model has not been updated."
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
tom.model.Tables[tName].Partitions[part_name].Source.EntityName = eName
|
|
87
|
+
|
|
88
|
+
# Update source lineage tag
|
|
89
|
+
if schema:
|
|
90
|
+
# Only set schema for DL over SQL (not DL over OneLake)
|
|
91
|
+
expression_source_name = (
|
|
92
|
+
tom.model.Tables[tName]
|
|
93
|
+
.Partitions[part_name]
|
|
94
|
+
.Source.ExpressionSource.Name
|
|
95
|
+
)
|
|
96
|
+
expr = tom.model.Expressions[expression_source_name].Expression
|
|
97
|
+
if "Sql.Database" in expr:
|
|
98
|
+
tom.model.Tables[tName].Partitions[
|
|
99
|
+
part_name
|
|
100
|
+
].Source.SchemaName = schema
|
|
101
|
+
tom.model.Tables[tName].SourceLineageTag = f"[{schema}].[{eName}]"
|
|
102
|
+
else:
|
|
103
|
+
tom.model.Tables[tName].SourceLineageTag = f"[dbo].[{eName}]"
|
|
104
|
+
|
|
105
|
+
new_slt = tom.model.Tables[tName].SourceLineageTag
|
|
106
|
+
|
|
107
|
+
# PBI_RemovedChildren logic
|
|
108
|
+
try:
|
|
109
|
+
e_name = (
|
|
110
|
+
tom.model.Tables[tName]
|
|
111
|
+
.Partitions[part_name]
|
|
112
|
+
.Source.ExpressionSource.Name
|
|
113
|
+
)
|
|
114
|
+
ann = tom.get_annotation_value(
|
|
115
|
+
object=tom.model.Expressions[e_name], name="PBI_RemovedChildren"
|
|
116
|
+
)
|
|
117
|
+
if ann:
|
|
118
|
+
e = json.loads(ann)
|
|
119
|
+
for i in e:
|
|
120
|
+
sltag = (
|
|
121
|
+
i.get("remoteItemId", {})
|
|
122
|
+
.get("analysisServicesObject", {})
|
|
123
|
+
.get("sourceLineageTag", {})
|
|
124
|
+
)
|
|
125
|
+
if sltag == current_slt:
|
|
126
|
+
i["remoteItemId"]["analysisServicesObject"][
|
|
127
|
+
"sourceLineageTag"
|
|
128
|
+
] = new_slt
|
|
129
|
+
tom.set_annotation(
|
|
130
|
+
object=tom.model.Expressions[e_name],
|
|
131
|
+
name="PBI_RemovedChildren",
|
|
132
|
+
value=json.dumps(e),
|
|
133
|
+
)
|
|
134
|
+
except Exception as e:
|
|
135
|
+
print(
|
|
136
|
+
f"{icons.yellow_dot} Warning: Could not update PBI_RemovedChildren annotation for table '{tName}'. {str(e)}"
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
print(
|
|
140
|
+
f"{icons.green_dot} The '{tName}' table in the '{dataset_name}' semantic model within the '{workspace_name}' workspace has been updated to point to the '{eName}' table."
|
|
141
|
+
)
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
@log
|
|
145
|
+
def add_table_to_direct_lake_semantic_model(
|
|
146
|
+
dataset: str | UUID,
|
|
147
|
+
table_name: str,
|
|
148
|
+
lakehouse_table_name: str,
|
|
149
|
+
refresh: bool = True,
|
|
150
|
+
workspace: Optional[str | UUID] = None,
|
|
151
|
+
columns: Optional[List[str] | str] = None,
|
|
152
|
+
):
|
|
153
|
+
"""
|
|
154
|
+
Adds a table and all of its columns to a Direct Lake semantic model, based on a Fabric lakehouse table.
|
|
155
|
+
|
|
156
|
+
Parameters
|
|
157
|
+
----------
|
|
158
|
+
dataset : str | uuid.UUID
|
|
159
|
+
Name or ID of the semantic model.
|
|
160
|
+
table_name : str, List[str]
|
|
161
|
+
Name of the table in the semantic model.
|
|
162
|
+
lakehouse_table_name : str
|
|
163
|
+
The name of the Fabric lakehouse table.
|
|
164
|
+
refresh : bool, default=True
|
|
165
|
+
Refreshes the table after it is added to the semantic model.
|
|
166
|
+
workspace : str | uuid.UUID, default=None
|
|
167
|
+
The name or ID of the Fabric workspace in which the semantic model resides.
|
|
168
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
169
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
170
|
+
columns : List[str] | str, default=None
|
|
171
|
+
A list of column names to add to the table. If None, all columns from the
|
|
172
|
+
lakehouse table will be added.
|
|
173
|
+
"""
|
|
174
|
+
|
|
175
|
+
from sempy_labs.lakehouse._get_lakehouse_columns import get_lakehouse_columns
|
|
176
|
+
|
|
177
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
178
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
179
|
+
|
|
180
|
+
artifact_type, lakehouse_name, lakehouse_id, lakehouse_workspace_id = (
|
|
181
|
+
get_direct_lake_source(dataset=dataset_id, workspace=workspace_id)
|
|
182
|
+
)
|
|
183
|
+
|
|
184
|
+
if artifact_type == "Warehouse":
|
|
185
|
+
raise ValueError(
|
|
186
|
+
f"{icons.red_dot} This function is only valid for Direct Lake semantic models which source from Fabric lakehouses (not warehouses)."
|
|
187
|
+
)
|
|
188
|
+
|
|
189
|
+
if artifact_type is None:
|
|
190
|
+
raise ValueError(
|
|
191
|
+
f"{icons.red_dot} This function only supports Direct Lake semantic models where the source lakehouse resides in the same workpace as the semantic model."
|
|
192
|
+
)
|
|
193
|
+
|
|
194
|
+
if isinstance(columns, str):
|
|
195
|
+
columns = [columns]
|
|
196
|
+
|
|
197
|
+
lakehouse_workspace = resolve_workspace_name(workspace_id=lakehouse_workspace_id)
|
|
198
|
+
|
|
199
|
+
with connect_semantic_model(
|
|
200
|
+
dataset=dataset_id, readonly=False, workspace=workspace_id
|
|
201
|
+
) as tom:
|
|
202
|
+
|
|
203
|
+
table_count = tom.model.Tables.Count
|
|
204
|
+
|
|
205
|
+
if not tom.is_direct_lake() and table_count > 0:
|
|
206
|
+
raise ValueError(
|
|
207
|
+
f"{icons.red_dot} This function is only valid for Direct Lake semantic models or semantic models with no tables."
|
|
208
|
+
)
|
|
209
|
+
|
|
210
|
+
if any(t.Name == table_name for t in tom.model.Tables):
|
|
211
|
+
raise ValueError(
|
|
212
|
+
f"The '{table_name}' table already exists in the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
|
|
213
|
+
)
|
|
214
|
+
|
|
215
|
+
dfLC = get_lakehouse_columns(
|
|
216
|
+
lakehouse=lakehouse_name, workspace=lakehouse_workspace
|
|
217
|
+
)
|
|
218
|
+
dfLC_filt = dfLC[dfLC["Table Name"] == lakehouse_table_name]
|
|
219
|
+
if dfLC_filt.empty:
|
|
220
|
+
raise ValueError(
|
|
221
|
+
f"{icons.red_dot} The '{lakehouse_table_name}' table was not found in the '{lakehouse_name}' lakehouse within the '{lakehouse_workspace}' workspace."
|
|
222
|
+
)
|
|
223
|
+
if columns:
|
|
224
|
+
dfLC_filt = dfLC_filt[dfLC_filt["Column Name"].isin(columns)]
|
|
225
|
+
|
|
226
|
+
if dfLC_filt.empty:
|
|
227
|
+
raise ValueError(
|
|
228
|
+
f"{icons.red_dot} No matching columns were found in the '{lakehouse_table_name}' table in the '{lakehouse_name}' lakehouse within the '{lakehouse_workspace}' workspace."
|
|
229
|
+
)
|
|
230
|
+
|
|
231
|
+
tom.add_table(name=table_name)
|
|
232
|
+
print(
|
|
233
|
+
f"{icons.green_dot} The '{table_name}' table has been added to the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
|
|
234
|
+
)
|
|
235
|
+
tom.add_entity_partition(
|
|
236
|
+
table_name=table_name, entity_name=lakehouse_table_name
|
|
237
|
+
)
|
|
238
|
+
print(
|
|
239
|
+
f"{icons.green_dot} The '{lakehouse_table_name}' partition has been added to the '{table_name}' table in the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
|
|
240
|
+
)
|
|
241
|
+
|
|
242
|
+
for _, r in dfLC_filt.iterrows():
|
|
243
|
+
lakeCName = r["Column Name"]
|
|
244
|
+
dType = r["Data Type"]
|
|
245
|
+
dt = _convert_data_type(dType)
|
|
246
|
+
tom.add_data_column(
|
|
247
|
+
table_name=table_name,
|
|
248
|
+
column_name=lakeCName,
|
|
249
|
+
source_column=lakeCName,
|
|
250
|
+
data_type=dt,
|
|
251
|
+
)
|
|
252
|
+
print(
|
|
253
|
+
f"{icons.green_dot} The '{lakeCName}' column has been added to the '{table_name}' table as a '{dt}' data type in the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
|
|
254
|
+
)
|
|
255
|
+
|
|
256
|
+
if refresh:
|
|
257
|
+
refresh_semantic_model(
|
|
258
|
+
dataset=dataset_id, tables=table_name, workspace=workspace_id
|
|
259
|
+
)
|
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
import sempy.fabric as fabric
|
|
2
|
+
import pandas as pd
|
|
3
|
+
from tqdm.auto import tqdm
|
|
4
|
+
import numpy as np
|
|
5
|
+
import time
|
|
6
|
+
from .._helper_functions import (
|
|
7
|
+
format_dax_object_name,
|
|
8
|
+
resolve_dataset_name_and_id,
|
|
9
|
+
resolve_workspace_name_and_id,
|
|
10
|
+
)
|
|
11
|
+
from .._refresh_semantic_model import refresh_semantic_model
|
|
12
|
+
from .._model_dependencies import get_measure_dependencies
|
|
13
|
+
from typing import Optional
|
|
14
|
+
from sempy._utils._log import log
|
|
15
|
+
import sempy_labs._icons as icons
|
|
16
|
+
from uuid import UUID
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@log
|
|
20
|
+
def warm_direct_lake_cache_perspective(
|
|
21
|
+
dataset: str | UUID,
|
|
22
|
+
perspective: str,
|
|
23
|
+
add_dependencies: bool = False,
|
|
24
|
+
workspace: Optional[str | UUID] = None,
|
|
25
|
+
) -> pd.DataFrame:
|
|
26
|
+
"""
|
|
27
|
+
Warms the cache of a Direct Lake semantic model by running a simple DAX query against the columns in a perspective.
|
|
28
|
+
|
|
29
|
+
Parameters
|
|
30
|
+
----------
|
|
31
|
+
dataset : str | uuid.UUID
|
|
32
|
+
Name or ID of the semantic model.
|
|
33
|
+
perspective : str
|
|
34
|
+
Name of the perspective which contains objects to be used for warming the cache.
|
|
35
|
+
add_dependencies : bool, default=False
|
|
36
|
+
Includes object dependencies in the cache warming process.
|
|
37
|
+
workspace : str | uuid.UUID, default=None
|
|
38
|
+
The Fabric workspace name or ID.
|
|
39
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
40
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
41
|
+
|
|
42
|
+
Returns
|
|
43
|
+
-------
|
|
44
|
+
pandas.DataFrame
|
|
45
|
+
Returns a pandas dataframe showing the columns that have been put into memory.
|
|
46
|
+
"""
|
|
47
|
+
|
|
48
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
49
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
50
|
+
|
|
51
|
+
fabric.refresh_tom_cache(workspace=workspace)
|
|
52
|
+
|
|
53
|
+
dfP = fabric.list_partitions(dataset=dataset_id, workspace=workspace_id)
|
|
54
|
+
if not any(r["Mode"] == "DirectLake" for _, r in dfP.iterrows()):
|
|
55
|
+
raise ValueError(
|
|
56
|
+
f"{icons.red_dot} The '{dataset_name}' semantic model in the '{workspace_name}' workspace is not in Direct Lake mode. This function is specifically for semantic models in Direct Lake mode."
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
dfPersp = fabric.list_perspectives(dataset=dataset_id, workspace=workspace_id)
|
|
60
|
+
dfPersp["DAX Object Name"] = format_dax_object_name(
|
|
61
|
+
dfPersp["Table Name"], dfPersp["Object Name"]
|
|
62
|
+
)
|
|
63
|
+
dfPersp_filt = dfPersp[dfPersp["Perspective Name"] == perspective]
|
|
64
|
+
|
|
65
|
+
if len(dfPersp_filt) == 0:
|
|
66
|
+
raise ValueError(
|
|
67
|
+
f"{icons.red_dot} The '{perspective} perspective does not exist or contains no objects within the '{dataset}' semantic model in the '{workspace}' workspace."
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
dfPersp_c = dfPersp_filt[dfPersp_filt["Object Type"] == "Column"]
|
|
71
|
+
|
|
72
|
+
column_values = dfPersp_c["DAX Object Name"].tolist()
|
|
73
|
+
|
|
74
|
+
if add_dependencies:
|
|
75
|
+
# Measure dependencies
|
|
76
|
+
md = get_measure_dependencies(dataset_id, workspace_id)
|
|
77
|
+
md["Referenced Full Object"] = format_dax_object_name(
|
|
78
|
+
md["Referenced Table"], md["Referenced Object"]
|
|
79
|
+
)
|
|
80
|
+
dfPersp_m = dfPersp_filt[(dfPersp_filt["Object Type"] == "Measure")]
|
|
81
|
+
md_filt = md[
|
|
82
|
+
(md["Object Name"].isin(dfPersp_m["Object Name"].values))
|
|
83
|
+
& (md["Referenced Object Type"] == "Column")
|
|
84
|
+
]
|
|
85
|
+
measureDep = md_filt["Referenced Full Object"].unique()
|
|
86
|
+
|
|
87
|
+
# Hierarchy dependencies
|
|
88
|
+
dfPersp_h = dfPersp_filt[(dfPersp_filt["Object Type"] == "Hierarchy")]
|
|
89
|
+
dfH = fabric.list_hierarchies(dataset=dataset_id, workspace=workspace_id)
|
|
90
|
+
dfH["Hierarchy Object"] = format_dax_object_name(
|
|
91
|
+
dfH["Table Name"], dfH["Hierarchy Name"]
|
|
92
|
+
)
|
|
93
|
+
dfH["Column Object"] = format_dax_object_name(
|
|
94
|
+
dfH["Table Name"], dfH["Column Name"]
|
|
95
|
+
)
|
|
96
|
+
dfH_filt = dfH[
|
|
97
|
+
dfH["Hierarchy Object"].isin(dfPersp_h["DAX Object Name"].values)
|
|
98
|
+
]
|
|
99
|
+
hierarchyDep = dfH_filt["Column Object"].unique()
|
|
100
|
+
|
|
101
|
+
# Relationship dependencies
|
|
102
|
+
unique_table_names = dfPersp_filt["Table Name"].unique()
|
|
103
|
+
dfR = fabric.list_relationships(dataset=dataset_id, workspace=workspace_id)
|
|
104
|
+
dfR["From Object"] = format_dax_object_name(
|
|
105
|
+
dfR["From Table"], dfR["From Column"]
|
|
106
|
+
)
|
|
107
|
+
dfR["To Object"] = format_dax_object_name(dfR["To Table"], dfR["To Column"])
|
|
108
|
+
filtered_dfR = dfR[
|
|
109
|
+
dfR["From Table"].isin(unique_table_names)
|
|
110
|
+
& dfR["To Table"].isin(unique_table_names)
|
|
111
|
+
]
|
|
112
|
+
|
|
113
|
+
fromObjects = filtered_dfR["From Object"].unique()
|
|
114
|
+
toObjects = filtered_dfR["To Object"].unique()
|
|
115
|
+
|
|
116
|
+
merged_list = np.concatenate(
|
|
117
|
+
[column_values, measureDep, hierarchyDep, fromObjects, toObjects]
|
|
118
|
+
)
|
|
119
|
+
merged_list_unique = list(set(merged_list))
|
|
120
|
+
|
|
121
|
+
else:
|
|
122
|
+
merged_list_unique = column_values
|
|
123
|
+
|
|
124
|
+
df = pd.DataFrame(merged_list_unique, columns=["DAX Object Name"])
|
|
125
|
+
df[["Table Name", "Column Name"]] = df["DAX Object Name"].str.split(
|
|
126
|
+
"[", expand=True
|
|
127
|
+
)
|
|
128
|
+
df["Table Name"] = df["Table Name"].str[1:-1]
|
|
129
|
+
df["Column Name"] = df["Column Name"].str[0:-1]
|
|
130
|
+
|
|
131
|
+
return _put_columns_into_memory(dataset=dataset, workspace=workspace, col_df=df)
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
@log
|
|
135
|
+
def warm_direct_lake_cache_isresident(
|
|
136
|
+
dataset: str | UUID, workspace: Optional[str | UUID] = None
|
|
137
|
+
) -> pd.DataFrame:
|
|
138
|
+
"""
|
|
139
|
+
Performs a refresh on the semantic model and puts the columns which were in memory prior to the refresh back into memory.
|
|
140
|
+
|
|
141
|
+
Parameters
|
|
142
|
+
----------
|
|
143
|
+
dataset : str | uuid.UUID
|
|
144
|
+
Name or ID of the semantic model.
|
|
145
|
+
workspace : str | uuid.UUID, default=None
|
|
146
|
+
The Fabric workspace name or ID.
|
|
147
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
148
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
149
|
+
|
|
150
|
+
Returns
|
|
151
|
+
-------
|
|
152
|
+
pandas.DataFrame
|
|
153
|
+
Returns a pandas dataframe showing the columns that have been put into memory.
|
|
154
|
+
"""
|
|
155
|
+
|
|
156
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
157
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
158
|
+
|
|
159
|
+
dfP = fabric.list_partitions(dataset=dataset_id, workspace=workspace_id)
|
|
160
|
+
if not any(r["Mode"] == "DirectLake" for _, r in dfP.iterrows()):
|
|
161
|
+
raise ValueError(
|
|
162
|
+
f"{icons.red_dot} The '{dataset_name}' semantic model in the '{workspace_name}' workspace is not in Direct Lake mode. This function is specifically for semantic models in Direct Lake mode."
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
# Identify columns which are currently in memory (Is Resident = True)
|
|
166
|
+
dfC = fabric.list_columns(dataset=dataset_id, workspace=workspace_id, extended=True)
|
|
167
|
+
dfC_filtered = dfC[dfC["Is Resident"] == True]
|
|
168
|
+
|
|
169
|
+
if len(dfC_filtered) == 0:
|
|
170
|
+
raise ValueError(
|
|
171
|
+
f"{icons.yellow_dot} At present, no columns are in memory in the '{dataset_name}' semantic model in the '{workspace_name}' workspace."
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
# Refresh/frame dataset
|
|
175
|
+
refresh_semantic_model(
|
|
176
|
+
dataset=dataset_id, refresh_type="full", workspace=workspace_id
|
|
177
|
+
)
|
|
178
|
+
time.sleep(2)
|
|
179
|
+
|
|
180
|
+
return _put_columns_into_memory(
|
|
181
|
+
dataset=dataset, workspace=workspace, col_df=dfC_filtered
|
|
182
|
+
)
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
@log
|
|
186
|
+
def _put_columns_into_memory(dataset, workspace, col_df, return_dataframe: bool = True):
|
|
187
|
+
|
|
188
|
+
row_limit = 1000000
|
|
189
|
+
|
|
190
|
+
dfT = fabric.list_tables(dataset=dataset, workspace=workspace, extended=True)
|
|
191
|
+
col_df = col_df.copy()
|
|
192
|
+
|
|
193
|
+
col_df["DAX Object"] = format_dax_object_name(
|
|
194
|
+
col_df["Table Name"], col_df["Column Name"]
|
|
195
|
+
)
|
|
196
|
+
tbls = col_df["Table Name"].unique()
|
|
197
|
+
|
|
198
|
+
for table_name in (bar := tqdm(tbls)):
|
|
199
|
+
dfT_filt = dfT[dfT["Name"] == table_name]
|
|
200
|
+
col_df_filt = col_df[col_df["Table Name"] == table_name]
|
|
201
|
+
if not dfT_filt.empty:
|
|
202
|
+
row_count = dfT_filt["Row Count"].iloc[0]
|
|
203
|
+
bar.set_description(f"Warming the '{table_name}' table...")
|
|
204
|
+
if pd.isna(row_count):
|
|
205
|
+
pass
|
|
206
|
+
elif row_count < row_limit:
|
|
207
|
+
columns = col_df_filt["DAX Object"].tolist()
|
|
208
|
+
css = ", ".join(columns)
|
|
209
|
+
dax = f"EVALUATE TOPN(1, SELECTCOLUMNS('{table_name}', {css}))"
|
|
210
|
+
fabric.evaluate_dax(
|
|
211
|
+
dataset=dataset, dax_string=dax, workspace=workspace
|
|
212
|
+
)
|
|
213
|
+
else:
|
|
214
|
+
for _, r in col_df_filt.iterrows():
|
|
215
|
+
dax_object = r["DAX Object"]
|
|
216
|
+
dax = f"""EVALUATE TOPN(1, SELECTCOLUMNS('{table_name}', {dax_object}))"""
|
|
217
|
+
fabric.evaluate_dax(
|
|
218
|
+
dataset=dataset, dax_string=dax, workspace=workspace
|
|
219
|
+
)
|
|
220
|
+
|
|
221
|
+
if return_dataframe:
|
|
222
|
+
print(
|
|
223
|
+
f"{icons.green_dot} The following columns have been put into memory. Temperature indicates the current column temperature."
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
dfC = fabric.list_columns(dataset=dataset, workspace=workspace, extended=True)
|
|
227
|
+
dfC["DAX Object"] = format_dax_object_name(
|
|
228
|
+
dfC["Table Name"], dfC["Column Name"]
|
|
229
|
+
)
|
|
230
|
+
dfC_filt = dfC[dfC["DAX Object"].isin(col_df["DAX Object"].values)]
|
|
231
|
+
|
|
232
|
+
return (
|
|
233
|
+
dfC_filt[["Table Name", "Column Name", "Is Resident", "Temperature"]]
|
|
234
|
+
.sort_values(by=["Table Name", "Column Name"], ascending=True)
|
|
235
|
+
.reset_index(drop=True)
|
|
236
|
+
)
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
from ._items import (
|
|
2
|
+
list_environments,
|
|
3
|
+
create_environment,
|
|
4
|
+
delete_environment,
|
|
5
|
+
publish_environment,
|
|
6
|
+
)
|
|
7
|
+
from ._pubstage import (
|
|
8
|
+
get_published_spark_compute,
|
|
9
|
+
get_staging_spark_compute,
|
|
10
|
+
list_published_libraries,
|
|
11
|
+
list_staging_libraries,
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
__all__ = [
|
|
15
|
+
"list_environments",
|
|
16
|
+
"create_environment",
|
|
17
|
+
"delete_environment",
|
|
18
|
+
"publish_environment",
|
|
19
|
+
"get_published_spark_compute",
|
|
20
|
+
"get_staging_spark_compute",
|
|
21
|
+
"list_published_libraries",
|
|
22
|
+
"list_staging_libraries",
|
|
23
|
+
]
|