semantic-link-labs 0.12.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- semantic_link_labs-0.12.8.dist-info/METADATA +354 -0
- semantic_link_labs-0.12.8.dist-info/RECORD +243 -0
- semantic_link_labs-0.12.8.dist-info/WHEEL +5 -0
- semantic_link_labs-0.12.8.dist-info/licenses/LICENSE +21 -0
- semantic_link_labs-0.12.8.dist-info/top_level.txt +1 -0
- sempy_labs/__init__.py +606 -0
- sempy_labs/_a_lib_info.py +2 -0
- sempy_labs/_ai.py +437 -0
- sempy_labs/_authentication.py +264 -0
- sempy_labs/_bpa_translation/_model/_translations_am-ET.po +869 -0
- sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +908 -0
- sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +968 -0
- sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +963 -0
- sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +943 -0
- sempy_labs/_bpa_translation/_model/_translations_da-DK.po +945 -0
- sempy_labs/_bpa_translation/_model/_translations_de-DE.po +988 -0
- sempy_labs/_bpa_translation/_model/_translations_el-GR.po +993 -0
- sempy_labs/_bpa_translation/_model/_translations_es-ES.po +971 -0
- sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +933 -0
- sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +942 -0
- sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +994 -0
- sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +967 -0
- sempy_labs/_bpa_translation/_model/_translations_he-IL.po +902 -0
- sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +944 -0
- sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +963 -0
- sempy_labs/_bpa_translation/_model/_translations_id-ID.po +946 -0
- sempy_labs/_bpa_translation/_model/_translations_is-IS.po +939 -0
- sempy_labs/_bpa_translation/_model/_translations_it-IT.po +986 -0
- sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +846 -0
- sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +839 -0
- sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +967 -0
- sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +978 -0
- sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +962 -0
- sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +962 -0
- sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +957 -0
- sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +968 -0
- sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +964 -0
- sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +952 -0
- sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +950 -0
- sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +942 -0
- sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +976 -0
- sempy_labs/_bpa_translation/_model/_translations_te-IN.po +947 -0
- sempy_labs/_bpa_translation/_model/_translations_th-TH.po +924 -0
- sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +953 -0
- sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +961 -0
- sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +804 -0
- sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +969 -0
- sempy_labs/_capacities.py +1198 -0
- sempy_labs/_capacity_migration.py +660 -0
- sempy_labs/_clear_cache.py +351 -0
- sempy_labs/_connections.py +610 -0
- sempy_labs/_dashboards.py +69 -0
- sempy_labs/_data_access_security.py +98 -0
- sempy_labs/_data_pipelines.py +162 -0
- sempy_labs/_dataflows.py +668 -0
- sempy_labs/_dax.py +501 -0
- sempy_labs/_daxformatter.py +80 -0
- sempy_labs/_delta_analyzer.py +467 -0
- sempy_labs/_delta_analyzer_history.py +301 -0
- sempy_labs/_dictionary_diffs.py +221 -0
- sempy_labs/_documentation.py +147 -0
- sempy_labs/_domains.py +51 -0
- sempy_labs/_eventhouses.py +182 -0
- sempy_labs/_external_data_shares.py +230 -0
- sempy_labs/_gateways.py +521 -0
- sempy_labs/_generate_semantic_model.py +521 -0
- sempy_labs/_get_connection_string.py +84 -0
- sempy_labs/_git.py +543 -0
- sempy_labs/_graphQL.py +90 -0
- sempy_labs/_helper_functions.py +2833 -0
- sempy_labs/_icons.py +149 -0
- sempy_labs/_job_scheduler.py +609 -0
- sempy_labs/_kql_databases.py +149 -0
- sempy_labs/_kql_querysets.py +124 -0
- sempy_labs/_kusto.py +137 -0
- sempy_labs/_labels.py +124 -0
- sempy_labs/_list_functions.py +1720 -0
- sempy_labs/_managed_private_endpoints.py +253 -0
- sempy_labs/_mirrored_databases.py +416 -0
- sempy_labs/_mirrored_warehouses.py +60 -0
- sempy_labs/_ml_experiments.py +113 -0
- sempy_labs/_model_auto_build.py +140 -0
- sempy_labs/_model_bpa.py +557 -0
- sempy_labs/_model_bpa_bulk.py +378 -0
- sempy_labs/_model_bpa_rules.py +859 -0
- sempy_labs/_model_dependencies.py +343 -0
- sempy_labs/_mounted_data_factories.py +123 -0
- sempy_labs/_notebooks.py +441 -0
- sempy_labs/_one_lake_integration.py +151 -0
- sempy_labs/_onelake.py +131 -0
- sempy_labs/_query_scale_out.py +433 -0
- sempy_labs/_refresh_semantic_model.py +435 -0
- sempy_labs/_semantic_models.py +468 -0
- sempy_labs/_spark.py +455 -0
- sempy_labs/_sql.py +241 -0
- sempy_labs/_sql_audit_settings.py +207 -0
- sempy_labs/_sql_endpoints.py +214 -0
- sempy_labs/_tags.py +201 -0
- sempy_labs/_translations.py +43 -0
- sempy_labs/_user_delegation_key.py +44 -0
- sempy_labs/_utils.py +79 -0
- sempy_labs/_vertipaq.py +1021 -0
- sempy_labs/_vpax.py +388 -0
- sempy_labs/_warehouses.py +234 -0
- sempy_labs/_workloads.py +140 -0
- sempy_labs/_workspace_identity.py +72 -0
- sempy_labs/_workspaces.py +595 -0
- sempy_labs/admin/__init__.py +170 -0
- sempy_labs/admin/_activities.py +167 -0
- sempy_labs/admin/_apps.py +145 -0
- sempy_labs/admin/_artifacts.py +65 -0
- sempy_labs/admin/_basic_functions.py +463 -0
- sempy_labs/admin/_capacities.py +508 -0
- sempy_labs/admin/_dataflows.py +45 -0
- sempy_labs/admin/_datasets.py +186 -0
- sempy_labs/admin/_domains.py +522 -0
- sempy_labs/admin/_external_data_share.py +100 -0
- sempy_labs/admin/_git.py +72 -0
- sempy_labs/admin/_items.py +265 -0
- sempy_labs/admin/_labels.py +211 -0
- sempy_labs/admin/_reports.py +241 -0
- sempy_labs/admin/_scanner.py +118 -0
- sempy_labs/admin/_shared.py +82 -0
- sempy_labs/admin/_sharing_links.py +110 -0
- sempy_labs/admin/_tags.py +131 -0
- sempy_labs/admin/_tenant.py +503 -0
- sempy_labs/admin/_tenant_keys.py +89 -0
- sempy_labs/admin/_users.py +140 -0
- sempy_labs/admin/_workspaces.py +236 -0
- sempy_labs/deployment_pipeline/__init__.py +23 -0
- sempy_labs/deployment_pipeline/_items.py +580 -0
- sempy_labs/directlake/__init__.py +57 -0
- sempy_labs/directlake/_autosync.py +58 -0
- sempy_labs/directlake/_directlake_schema_compare.py +120 -0
- sempy_labs/directlake/_directlake_schema_sync.py +161 -0
- sempy_labs/directlake/_dl_helper.py +274 -0
- sempy_labs/directlake/_generate_shared_expression.py +94 -0
- sempy_labs/directlake/_get_directlake_lakehouse.py +62 -0
- sempy_labs/directlake/_get_shared_expression.py +34 -0
- sempy_labs/directlake/_guardrails.py +96 -0
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +70 -0
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +90 -0
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +239 -0
- sempy_labs/directlake/_update_directlake_partition_entity.py +259 -0
- sempy_labs/directlake/_warm_cache.py +236 -0
- sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
- sempy_labs/environment/__init__.py +23 -0
- sempy_labs/environment/_items.py +212 -0
- sempy_labs/environment/_pubstage.py +223 -0
- sempy_labs/eventstream/__init__.py +37 -0
- sempy_labs/eventstream/_items.py +263 -0
- sempy_labs/eventstream/_topology.py +652 -0
- sempy_labs/graph/__init__.py +59 -0
- sempy_labs/graph/_groups.py +651 -0
- sempy_labs/graph/_sensitivity_labels.py +120 -0
- sempy_labs/graph/_teams.py +125 -0
- sempy_labs/graph/_user_licenses.py +96 -0
- sempy_labs/graph/_users.py +516 -0
- sempy_labs/graph_model/__init__.py +15 -0
- sempy_labs/graph_model/_background_jobs.py +63 -0
- sempy_labs/graph_model/_items.py +149 -0
- sempy_labs/lakehouse/__init__.py +67 -0
- sempy_labs/lakehouse/_blobs.py +247 -0
- sempy_labs/lakehouse/_get_lakehouse_columns.py +102 -0
- sempy_labs/lakehouse/_get_lakehouse_tables.py +274 -0
- sempy_labs/lakehouse/_helper.py +250 -0
- sempy_labs/lakehouse/_lakehouse.py +351 -0
- sempy_labs/lakehouse/_livy_sessions.py +143 -0
- sempy_labs/lakehouse/_materialized_lake_views.py +157 -0
- sempy_labs/lakehouse/_partitioning.py +165 -0
- sempy_labs/lakehouse/_schemas.py +217 -0
- sempy_labs/lakehouse/_shortcuts.py +440 -0
- sempy_labs/migration/__init__.py +35 -0
- sempy_labs/migration/_create_pqt_file.py +238 -0
- sempy_labs/migration/_direct_lake_to_import.py +105 -0
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +398 -0
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +148 -0
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +533 -0
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +172 -0
- sempy_labs/migration/_migration_validation.py +71 -0
- sempy_labs/migration/_refresh_calc_tables.py +131 -0
- sempy_labs/mirrored_azure_databricks_catalog/__init__.py +15 -0
- sempy_labs/mirrored_azure_databricks_catalog/_discover.py +213 -0
- sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +45 -0
- sempy_labs/ml_model/__init__.py +23 -0
- sempy_labs/ml_model/_functions.py +427 -0
- sempy_labs/report/_BPAReportTemplate.json +232 -0
- sempy_labs/report/__init__.py +55 -0
- sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
- sempy_labs/report/_bpareporttemplate/.platform +11 -0
- sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
- sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
- sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
- sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
- sempy_labs/report/_download_report.py +76 -0
- sempy_labs/report/_export_report.py +257 -0
- sempy_labs/report/_generate_report.py +427 -0
- sempy_labs/report/_paginated.py +76 -0
- sempy_labs/report/_report_bpa.py +354 -0
- sempy_labs/report/_report_bpa_rules.py +115 -0
- sempy_labs/report/_report_functions.py +581 -0
- sempy_labs/report/_report_helper.py +227 -0
- sempy_labs/report/_report_list_functions.py +110 -0
- sempy_labs/report/_report_rebind.py +149 -0
- sempy_labs/report/_reportwrapper.py +3100 -0
- sempy_labs/report/_save_report.py +147 -0
- sempy_labs/snowflake_database/__init__.py +10 -0
- sempy_labs/snowflake_database/_items.py +105 -0
- sempy_labs/sql_database/__init__.py +21 -0
- sempy_labs/sql_database/_items.py +201 -0
- sempy_labs/sql_database/_mirroring.py +79 -0
- sempy_labs/theme/__init__.py +12 -0
- sempy_labs/theme/_org_themes.py +129 -0
- sempy_labs/tom/__init__.py +3 -0
- sempy_labs/tom/_model.py +5977 -0
- sempy_labs/variable_library/__init__.py +19 -0
- sempy_labs/variable_library/_functions.py +403 -0
- sempy_labs/warehouse/__init__.py +28 -0
- sempy_labs/warehouse/_items.py +234 -0
- sempy_labs/warehouse/_restore_points.py +309 -0
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import sempy.fabric as fabric
|
|
2
|
+
from .._helper_functions import (
|
|
3
|
+
resolve_lakehouse_id,
|
|
4
|
+
)
|
|
5
|
+
from typing import Optional, Tuple
|
|
6
|
+
from uuid import UUID
|
|
7
|
+
import sempy_labs._icons as icons
|
|
8
|
+
from sempy._utils._log import log
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@log
|
|
12
|
+
def get_direct_lake_lakehouse(
|
|
13
|
+
dataset: str | UUID,
|
|
14
|
+
workspace: Optional[str | UUID] = None,
|
|
15
|
+
lakehouse: Optional[str] = None,
|
|
16
|
+
lakehouse_workspace: Optional[str | UUID] = None,
|
|
17
|
+
) -> Tuple[str, UUID]:
|
|
18
|
+
"""
|
|
19
|
+
Identifies the lakehouse used by a Direct Lake semantic model.
|
|
20
|
+
|
|
21
|
+
Parameters
|
|
22
|
+
----------
|
|
23
|
+
dataset : str | uuid.UUID
|
|
24
|
+
Name or ID of the semantic model.
|
|
25
|
+
workspace : str | uuid.UUID, default=None
|
|
26
|
+
The Fabric workspace name or ID.
|
|
27
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
28
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
29
|
+
lakehouse : str, default=None
|
|
30
|
+
The Fabric lakehouse used by the Direct Lake semantic model.
|
|
31
|
+
Defaults to None which resolves to the lakehouse attached to the notebook.
|
|
32
|
+
lakehouse_workspace : str | uuid.UUID, default=None
|
|
33
|
+
The Fabric workspace name or ID used by the lakehouse.
|
|
34
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
35
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
36
|
+
|
|
37
|
+
Returns
|
|
38
|
+
-------
|
|
39
|
+
Tuple[str, uuid.UUID]
|
|
40
|
+
The lakehouse name and lakehouse ID.
|
|
41
|
+
"""
|
|
42
|
+
|
|
43
|
+
from sempy_labs.directlake._dl_helper import get_direct_lake_source
|
|
44
|
+
|
|
45
|
+
artifact_type, artifact_name, artifact_id, workspace_id = get_direct_lake_source(
|
|
46
|
+
dataset=dataset, workspace=workspace
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
if artifact_type in ["Lakehouse", "Warehouse"]:
|
|
50
|
+
return artifact_name, artifact_id
|
|
51
|
+
else:
|
|
52
|
+
fabric.refresh_tom_cache(workspace=workspace)
|
|
53
|
+
dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
|
|
54
|
+
dfP_filt = dfP[dfP["Mode"] == "DirectLake"]
|
|
55
|
+
if dfP_filt.empty:
|
|
56
|
+
raise ValueError(
|
|
57
|
+
f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode."
|
|
58
|
+
)
|
|
59
|
+
lakehouse_id = resolve_lakehouse_id(
|
|
60
|
+
lakehouse=lakehouse, workspace=lakehouse_workspace
|
|
61
|
+
)
|
|
62
|
+
return lakehouse, lakehouse_id
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
from typing import Optional
|
|
2
|
+
from uuid import UUID
|
|
3
|
+
from sempy._utils._log import log
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
@log
|
|
7
|
+
def get_shared_expression(
|
|
8
|
+
lakehouse: Optional[str] = None, workspace: Optional[str | UUID] = None
|
|
9
|
+
) -> str:
|
|
10
|
+
"""
|
|
11
|
+
Dynamically generates the M expression used by a Direct Lake model for a given lakehouse.
|
|
12
|
+
|
|
13
|
+
Parameters
|
|
14
|
+
----------
|
|
15
|
+
lakehouse : str, default=None
|
|
16
|
+
The Fabric lakehouse used by the Direct Lake semantic model.
|
|
17
|
+
Defaults to None which resolves to the lakehouse attached to the notebook.
|
|
18
|
+
workspace : str | uuid.UUID, default=None
|
|
19
|
+
The Fabric workspace name or ID used by the lakehouse.
|
|
20
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
21
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
22
|
+
|
|
23
|
+
Returns
|
|
24
|
+
-------
|
|
25
|
+
str
|
|
26
|
+
Shows the expression which can be used to connect a Direct Lake semantic model to its SQL Endpoint.
|
|
27
|
+
"""
|
|
28
|
+
from sempy_labs.directlake._generate_shared_expression import (
|
|
29
|
+
generate_shared_expression,
|
|
30
|
+
)
|
|
31
|
+
|
|
32
|
+
return generate_shared_expression(
|
|
33
|
+
item_name=lakehouse, item_type="Lakehouse", workspace=workspace
|
|
34
|
+
)
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
import sempy.fabric as fabric
|
|
2
|
+
import pandas as pd
|
|
3
|
+
from typing import Optional
|
|
4
|
+
import sempy_labs._icons as icons
|
|
5
|
+
from uuid import UUID
|
|
6
|
+
from .._helper_functions import (
|
|
7
|
+
resolve_workspace_name_and_id,
|
|
8
|
+
)
|
|
9
|
+
from sempy._utils._log import log
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@log
|
|
13
|
+
def get_direct_lake_guardrails() -> pd.DataFrame:
|
|
14
|
+
"""
|
|
15
|
+
Shows the guardrails for when Direct Lake semantic models will fallback to Direct Query
|
|
16
|
+
based on Microsoft's `online documentation <https://learn.microsoft.com/power-bi/enterprise/directlake-overview>`_.
|
|
17
|
+
|
|
18
|
+
Returns
|
|
19
|
+
-------
|
|
20
|
+
pandas.DataFrame
|
|
21
|
+
A table showing the Direct Lake guardrails by SKU.
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
url = "https://learn.microsoft.com/power-bi/enterprise/directlake-overview"
|
|
25
|
+
|
|
26
|
+
tables = pd.read_html(url)
|
|
27
|
+
for df in tables:
|
|
28
|
+
first_column_name = df.columns[0]
|
|
29
|
+
if first_column_name.startswith("Fabric"):
|
|
30
|
+
df[first_column_name] = df[first_column_name].str.split("/")
|
|
31
|
+
df = df.explode(first_column_name, ignore_index=True)
|
|
32
|
+
break
|
|
33
|
+
|
|
34
|
+
return df
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def get_sku_size(workspace: Optional[str | UUID] = None) -> str:
|
|
38
|
+
"""
|
|
39
|
+
Shows the SKU size for a workspace.
|
|
40
|
+
|
|
41
|
+
Parameters
|
|
42
|
+
----------
|
|
43
|
+
workspace : str | uuid.UUID, default=None
|
|
44
|
+
The Fabric workspace name or ID.
|
|
45
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
46
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
47
|
+
|
|
48
|
+
Returns
|
|
49
|
+
-------
|
|
50
|
+
str
|
|
51
|
+
The SKU size for a workspace.
|
|
52
|
+
"""
|
|
53
|
+
from sempy_labs._capacities import list_capacities
|
|
54
|
+
|
|
55
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
56
|
+
|
|
57
|
+
dfW = fabric.list_workspaces(filter=f"id eq '{workspace_id}'")
|
|
58
|
+
|
|
59
|
+
if len(dfW) == 0:
|
|
60
|
+
raise ValueError(
|
|
61
|
+
f"{icons.red_dot} The '{workspace_name}' is not a valid workspace."
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
capacity_id = dfW["Capacity Id"].iloc[0]
|
|
65
|
+
dfC = list_capacities()
|
|
66
|
+
dfC_filt = dfC[dfC["Id"] == capacity_id]
|
|
67
|
+
|
|
68
|
+
if len(dfC_filt) == 0:
|
|
69
|
+
raise ValueError(
|
|
70
|
+
f"{icons.red_dot} The '{capacity_id}' Id is not a valid capacity Id."
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
return dfC_filt["Sku"].iloc[0]
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def get_directlake_guardrails_for_sku(sku_size: str) -> pd.DataFrame:
|
|
77
|
+
"""
|
|
78
|
+
Shows the guardrails for Direct Lake based on the SKU used by your workspace's capacity.
|
|
79
|
+
* Use the result of the 'get_sku_size' function as an input for this function's sku_size parameter.*
|
|
80
|
+
|
|
81
|
+
Parameters
|
|
82
|
+
----------
|
|
83
|
+
sku_size : str
|
|
84
|
+
Sku size of a workspace/capacity.
|
|
85
|
+
|
|
86
|
+
Returns
|
|
87
|
+
-------
|
|
88
|
+
pandas.DataFrame
|
|
89
|
+
A table showing the Direct Lake guardrails for the given SKU.
|
|
90
|
+
"""
|
|
91
|
+
|
|
92
|
+
df = get_direct_lake_guardrails()
|
|
93
|
+
col_name = df.columns[0]
|
|
94
|
+
filtered_df = df[df[col_name] == sku_size]
|
|
95
|
+
|
|
96
|
+
return filtered_df
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
import sempy.fabric as fabric
|
|
2
|
+
import pandas as pd
|
|
3
|
+
from .._list_functions import list_tables
|
|
4
|
+
from ..tom import connect_semantic_model
|
|
5
|
+
from typing import Optional
|
|
6
|
+
from sempy._utils._log import log
|
|
7
|
+
import sempy_labs._icons as icons
|
|
8
|
+
from uuid import UUID
|
|
9
|
+
from .._helper_functions import (
|
|
10
|
+
resolve_dataset_name_and_id,
|
|
11
|
+
resolve_workspace_name_and_id,
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@log
|
|
16
|
+
def list_direct_lake_model_calc_tables(
|
|
17
|
+
dataset: str | UUID, workspace: Optional[str | UUID] = None
|
|
18
|
+
) -> pd.DataFrame:
|
|
19
|
+
"""
|
|
20
|
+
Shows the calculated tables and their respective DAX expression for a Direct Lake model (which has been migrated from import/DirectQuery).
|
|
21
|
+
|
|
22
|
+
Parameters
|
|
23
|
+
----------
|
|
24
|
+
dataset : str | uuid.UUID
|
|
25
|
+
Name or ID of the semantic model.
|
|
26
|
+
workspace : str | uuid.UUID, default=None
|
|
27
|
+
The Fabric workspace name or ID.
|
|
28
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
29
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
30
|
+
|
|
31
|
+
Returns
|
|
32
|
+
-------
|
|
33
|
+
pandas.DataFrame
|
|
34
|
+
A pandas dataframe showing the calculated tables which were migrated to Direct Lake and whose DAX expressions are stored as model annotations.
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
38
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
39
|
+
|
|
40
|
+
df = pd.DataFrame(columns=["Table Name", "Source Expression"])
|
|
41
|
+
|
|
42
|
+
with connect_semantic_model(
|
|
43
|
+
dataset=dataset_id, readonly=True, workspace=workspace_id
|
|
44
|
+
) as tom:
|
|
45
|
+
|
|
46
|
+
is_direct_lake = tom.is_direct_lake()
|
|
47
|
+
|
|
48
|
+
if not is_direct_lake:
|
|
49
|
+
raise ValueError(
|
|
50
|
+
f"{icons.red_dot} The '{dataset}' semantic model is not in Direct Lake mode."
|
|
51
|
+
)
|
|
52
|
+
else:
|
|
53
|
+
fabric.refresh_tom_cache(workspace=workspace)
|
|
54
|
+
dfA = fabric.list_annotations(dataset=dataset_id, workspace=workspace_id)
|
|
55
|
+
dfT = list_tables(dataset_id, workspace_id)
|
|
56
|
+
dfA_filt = dfA[
|
|
57
|
+
(dfA["Object Type"] == "Model")
|
|
58
|
+
& (dfA["Annotation Name"].isin(dfT["Name"]))
|
|
59
|
+
]
|
|
60
|
+
|
|
61
|
+
for i, r in dfA_filt.iterrows():
|
|
62
|
+
tName = r["Annotation Name"]
|
|
63
|
+
se = r["Annotation Value"]
|
|
64
|
+
|
|
65
|
+
new_data = {"Table Name": tName, "Source Expression": se}
|
|
66
|
+
df = pd.concat(
|
|
67
|
+
[df, pd.DataFrame(new_data, index=[0])], ignore_index=True
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
return df
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
import sempy.fabric as fabric
|
|
2
|
+
import pandas as pd
|
|
3
|
+
from .._helper_functions import (
|
|
4
|
+
format_dax_object_name,
|
|
5
|
+
resolve_dataset_name_and_id,
|
|
6
|
+
resolve_workspace_name_and_id,
|
|
7
|
+
)
|
|
8
|
+
from typing import Optional, Tuple
|
|
9
|
+
from sempy._utils._log import log
|
|
10
|
+
from uuid import UUID
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@log
|
|
14
|
+
def show_unsupported_direct_lake_objects(
|
|
15
|
+
dataset: str | UUID, workspace: Optional[str | UUID] = None
|
|
16
|
+
) -> Tuple[pd.DataFrame, pd.DataFrame, pd.DataFrame]:
|
|
17
|
+
"""
|
|
18
|
+
Returns a list of a semantic model's objects which are not supported by Direct Lake based on
|
|
19
|
+
`official documentation <https://learn.microsoft.com/power-bi/enterprise/directlake-overview#known-issues-and-limitations>`_.
|
|
20
|
+
|
|
21
|
+
Parameters
|
|
22
|
+
----------
|
|
23
|
+
dataset : str | uuid.UUID
|
|
24
|
+
Name or ID of the semantic model.
|
|
25
|
+
workspace : str | uuid.UUID, default=None
|
|
26
|
+
The Fabric workspace name or ID.
|
|
27
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
28
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
29
|
+
|
|
30
|
+
Returns
|
|
31
|
+
-------
|
|
32
|
+
pandas.DataFrame, pandas.DataFrame, pandas.DataFrame
|
|
33
|
+
3 pandas dataframes showing objects in a semantic model which are not supported by Direct Lake.
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
pd.options.mode.chained_assignment = None
|
|
37
|
+
|
|
38
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
39
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
40
|
+
|
|
41
|
+
fabric.refresh_tom_cache(workspace=workspace)
|
|
42
|
+
|
|
43
|
+
dfT = fabric.list_tables(dataset=dataset_id, workspace=workspace_id)
|
|
44
|
+
dfC = fabric.list_columns(dataset=dataset_id, workspace=workspace_id)
|
|
45
|
+
dfR = fabric.list_relationships(dataset=dataset_id, workspace=workspace_id)
|
|
46
|
+
|
|
47
|
+
# Calc tables
|
|
48
|
+
dfT_filt = dfT[dfT["Type"] == "Calculated Table"]
|
|
49
|
+
dfT_filt.rename(columns={"Name": "Table Name"}, inplace=True)
|
|
50
|
+
t = dfT_filt[["Table Name", "Type"]]
|
|
51
|
+
|
|
52
|
+
# Calc columns
|
|
53
|
+
dfC_filt = dfC[(dfC["Type"] == "Calculated") | (dfC["Data Type"] == "Binary")]
|
|
54
|
+
c = dfC_filt[["Table Name", "Column Name", "Type", "Data Type", "Source"]]
|
|
55
|
+
|
|
56
|
+
# Relationships
|
|
57
|
+
dfC["Column Object"] = format_dax_object_name(dfC["Table Name"], dfC["Column Name"])
|
|
58
|
+
dfR["From Object"] = format_dax_object_name(dfR["From Table"], dfR["From Column"])
|
|
59
|
+
dfR["To Object"] = format_dax_object_name(dfR["To Table"], dfR["To Column"])
|
|
60
|
+
merged_from = pd.merge(
|
|
61
|
+
dfR, dfC, left_on="From Object", right_on="Column Object", how="left"
|
|
62
|
+
)
|
|
63
|
+
merged_to = pd.merge(
|
|
64
|
+
dfR, dfC, left_on="To Object", right_on="Column Object", how="left"
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
dfR["From Column Data Type"] = merged_from["Data Type"]
|
|
68
|
+
dfR["To Column Data Type"] = merged_to["Data Type"]
|
|
69
|
+
|
|
70
|
+
dfR_filt = dfR[(dfR["From Column Data Type"] != dfR["To Column Data Type"])]
|
|
71
|
+
r = dfR_filt[
|
|
72
|
+
[
|
|
73
|
+
"From Table",
|
|
74
|
+
"From Column",
|
|
75
|
+
"To Table",
|
|
76
|
+
"To Column",
|
|
77
|
+
"From Column Data Type",
|
|
78
|
+
"To Column Data Type",
|
|
79
|
+
]
|
|
80
|
+
]
|
|
81
|
+
|
|
82
|
+
# print('Calculated Tables are not supported...')
|
|
83
|
+
# display(t)
|
|
84
|
+
# print("Learn more about Direct Lake limitations here: https://learn.microsoft.com/power-bi/enterprise/directlake-overview#known-issues-and-limitations")
|
|
85
|
+
# print('Calculated columns are not supported. Columns of binary data type are not supported.')
|
|
86
|
+
# display(c)
|
|
87
|
+
# print('Columns used for relationship cannot be of data type datetime and they also must be of the same data type.')
|
|
88
|
+
# display(r)
|
|
89
|
+
|
|
90
|
+
return t, c, r
|
|
@@ -0,0 +1,239 @@
|
|
|
1
|
+
from ._generate_shared_expression import generate_shared_expression
|
|
2
|
+
from .._helper_functions import (
|
|
3
|
+
resolve_dataset_name_and_id,
|
|
4
|
+
resolve_workspace_name_and_id,
|
|
5
|
+
resolve_item_name_and_id,
|
|
6
|
+
resolve_lakehouse_name_and_id,
|
|
7
|
+
)
|
|
8
|
+
from sempy._utils._log import log
|
|
9
|
+
from ..tom import connect_semantic_model
|
|
10
|
+
from typing import Optional, List
|
|
11
|
+
import sempy_labs._icons as icons
|
|
12
|
+
from uuid import UUID
|
|
13
|
+
import re
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@log
|
|
17
|
+
def _extract_expression_list(expression):
|
|
18
|
+
"""
|
|
19
|
+
Finds the pattern for DL/SQL & DL/OL expressions in the semantic model.
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
pattern_sql = r'Sql\.Database\s*\(\s*"([^"]+)"\s*,\s*"([^"]+)"\s*\)'
|
|
23
|
+
pattern_no_sql = (
|
|
24
|
+
r'AzureStorage\.DataLake\(".*?/([0-9a-fA-F\-]{36})/([0-9a-fA-F\-]{36})"'
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
match_sql = re.search(pattern_sql, expression)
|
|
28
|
+
match_no_sql = re.search(pattern_no_sql, expression)
|
|
29
|
+
|
|
30
|
+
result = []
|
|
31
|
+
if match_sql:
|
|
32
|
+
value_1, value_2 = match_sql.groups()
|
|
33
|
+
result = [value_1, value_2, True]
|
|
34
|
+
elif match_no_sql:
|
|
35
|
+
value_1, value_2 = match_no_sql.groups()
|
|
36
|
+
result = [value_1, value_2, False]
|
|
37
|
+
|
|
38
|
+
return result
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
@log
|
|
42
|
+
def _get_direct_lake_expressions(
|
|
43
|
+
dataset: str | UUID, workspace: Optional[str | UUID] = None
|
|
44
|
+
) -> dict:
|
|
45
|
+
"""
|
|
46
|
+
Extracts a dictionary of all Direct Lake expressions from a semantic model.
|
|
47
|
+
"""
|
|
48
|
+
|
|
49
|
+
from sempy_labs.tom import connect_semantic_model
|
|
50
|
+
|
|
51
|
+
result = {}
|
|
52
|
+
|
|
53
|
+
with connect_semantic_model(dataset=dataset, workspace=workspace) as tom:
|
|
54
|
+
for e in tom.model.Expressions:
|
|
55
|
+
expr_name = e.Name
|
|
56
|
+
expr = e.Expression
|
|
57
|
+
|
|
58
|
+
list_values = _extract_expression_list(expr)
|
|
59
|
+
if list_values:
|
|
60
|
+
result[expr_name] = list_values
|
|
61
|
+
|
|
62
|
+
return result
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
@log
|
|
66
|
+
def update_direct_lake_model_lakehouse_connection(
|
|
67
|
+
dataset: str | UUID,
|
|
68
|
+
workspace: Optional[str | UUID] = None,
|
|
69
|
+
lakehouse: Optional[str] = None,
|
|
70
|
+
lakehouse_workspace: Optional[str | UUID] = None,
|
|
71
|
+
):
|
|
72
|
+
"""
|
|
73
|
+
Remaps a Direct Lake semantic model's SQL Endpoint connection to a new lakehouse.
|
|
74
|
+
|
|
75
|
+
Parameters
|
|
76
|
+
----------
|
|
77
|
+
dataset : str | UUID
|
|
78
|
+
Name or ID of the semantic model.
|
|
79
|
+
workspace : str | UUID, default=None
|
|
80
|
+
The Fabric workspace name or ID in which the semantic model exists.
|
|
81
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
82
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
83
|
+
lakehouse : str, default=None
|
|
84
|
+
The Fabric lakehouse used by the Direct Lake semantic model.
|
|
85
|
+
Defaults to None which resolves to the lakehouse attached to the notebook.
|
|
86
|
+
lakehouse_workspace : str | UUID, default=None
|
|
87
|
+
The Fabric workspace name or ID used by the lakehouse.
|
|
88
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
89
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
90
|
+
"""
|
|
91
|
+
|
|
92
|
+
update_direct_lake_model_connection(
|
|
93
|
+
dataset=dataset,
|
|
94
|
+
workspace=workspace,
|
|
95
|
+
source=lakehouse,
|
|
96
|
+
source_type="Lakehouse",
|
|
97
|
+
source_workspace=lakehouse_workspace,
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
@log
|
|
102
|
+
def update_direct_lake_model_connection(
|
|
103
|
+
dataset: str | UUID,
|
|
104
|
+
workspace: Optional[str | UUID] = None,
|
|
105
|
+
source: Optional[str] = None,
|
|
106
|
+
source_type: str = "Lakehouse",
|
|
107
|
+
source_workspace: Optional[str | UUID] = None,
|
|
108
|
+
use_sql_endpoint: bool = True,
|
|
109
|
+
tables: Optional[str | List[str]] = None,
|
|
110
|
+
):
|
|
111
|
+
"""
|
|
112
|
+
Remaps a Direct Lake semantic model's SQL Endpoint connection to a new lakehouse/warehouse.
|
|
113
|
+
|
|
114
|
+
Parameters
|
|
115
|
+
----------
|
|
116
|
+
dataset : str | uuid.UUID
|
|
117
|
+
Name or ID of the semantic model.
|
|
118
|
+
workspace : str | uuid.UUID, default=None
|
|
119
|
+
The Fabric workspace name or ID in which the semantic model exists.
|
|
120
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
121
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
122
|
+
source : str, default=None
|
|
123
|
+
The name of the Fabric lakehouse/warehouse used by the Direct Lake semantic model.
|
|
124
|
+
Defaults to None which resolves to the lakehouse attached to the notebook.
|
|
125
|
+
source_type : str, default="Lakehouse"
|
|
126
|
+
The type of source for the Direct Lake semantic model. Valid options: "Lakehouse", "Warehouse".
|
|
127
|
+
source_workspace : str | uuid.UUID, default=None
|
|
128
|
+
The Fabric workspace name or ID used by the lakehouse/warehouse.
|
|
129
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
130
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
131
|
+
use_sql_endpoint : bool, default=True
|
|
132
|
+
If True, the SQL Endpoint will be used for the connection.
|
|
133
|
+
If False, Direct Lake over OneLake will be used.
|
|
134
|
+
tables : str | List[str], default=None
|
|
135
|
+
The name(s) of the table(s) to update in the Direct Lake semantic model.
|
|
136
|
+
If None, all tables will be updated (if there is only one expression).
|
|
137
|
+
If multiple tables are specified, they must be provided as a list.
|
|
138
|
+
"""
|
|
139
|
+
if use_sql_endpoint:
|
|
140
|
+
icons.sll_tags.append("UpdateDLConnection_SQL")
|
|
141
|
+
else:
|
|
142
|
+
icons.sll_tags.append("UpdateDLConnection_DLOL")
|
|
143
|
+
|
|
144
|
+
if isinstance(tables, str):
|
|
145
|
+
tables = [tables]
|
|
146
|
+
|
|
147
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
148
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
149
|
+
|
|
150
|
+
source_type = source_type.capitalize()
|
|
151
|
+
|
|
152
|
+
if source_type not in ["Lakehouse", "Warehouse"]:
|
|
153
|
+
raise ValueError(
|
|
154
|
+
f"{icons.red_dot} The 'source_type' must be either 'Lakehouse' or 'Warehouse'."
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
if source_workspace is None:
|
|
158
|
+
source_workspace = workspace_name
|
|
159
|
+
|
|
160
|
+
if source_type == "Lakehouse":
|
|
161
|
+
(source_name, source_id) = resolve_lakehouse_name_and_id(
|
|
162
|
+
lakehouse=source, workspace=source_workspace
|
|
163
|
+
)
|
|
164
|
+
else:
|
|
165
|
+
(source_name, source_id) = resolve_item_name_and_id(
|
|
166
|
+
item=source, type=source_type, workspace=source_workspace
|
|
167
|
+
)
|
|
168
|
+
|
|
169
|
+
shared_expression = generate_shared_expression(
|
|
170
|
+
item_name=source_name,
|
|
171
|
+
item_type=source_type,
|
|
172
|
+
workspace=source_workspace,
|
|
173
|
+
use_sql_endpoint=use_sql_endpoint,
|
|
174
|
+
)
|
|
175
|
+
|
|
176
|
+
expression_dict = _get_direct_lake_expressions(dataset=dataset, workspace=workspace)
|
|
177
|
+
expressions = list(expression_dict.keys())
|
|
178
|
+
|
|
179
|
+
with connect_semantic_model(
|
|
180
|
+
dataset=dataset_id, readonly=False, workspace=workspace_id
|
|
181
|
+
) as tom:
|
|
182
|
+
|
|
183
|
+
if not tom.is_direct_lake():
|
|
184
|
+
raise ValueError(
|
|
185
|
+
f"{icons.red_dot} The '{dataset_name}' semantic model within the '{workspace_name}' workspace is not in Direct Lake. This function is only applicable to Direct Lake semantic models."
|
|
186
|
+
)
|
|
187
|
+
|
|
188
|
+
# Update the single connection expression
|
|
189
|
+
if len(expressions) > 1 and not tables:
|
|
190
|
+
print(
|
|
191
|
+
f"{icons.info} Multiple expressions found in the model. Please specify the tables to update using the 'tables parameter."
|
|
192
|
+
)
|
|
193
|
+
return
|
|
194
|
+
elif len(expressions) == 1 and not tables:
|
|
195
|
+
expr = expressions[0]
|
|
196
|
+
tom.model.Expressions[expr].Expression = shared_expression
|
|
197
|
+
|
|
198
|
+
print(
|
|
199
|
+
f"{icons.green_dot} The expression in the '{dataset_name}' semantic model within the '{workspace_name}' workspace has been updated to point to the '{source}' {source_type.lower()} in the '{source_workspace}' workspace."
|
|
200
|
+
)
|
|
201
|
+
else:
|
|
202
|
+
import sempy
|
|
203
|
+
|
|
204
|
+
sempy.fabric._client._utils._init_analysis_services()
|
|
205
|
+
import Microsoft.AnalysisServices.Tabular as TOM
|
|
206
|
+
|
|
207
|
+
expr_list = _extract_expression_list(shared_expression)
|
|
208
|
+
|
|
209
|
+
expr_name = next(
|
|
210
|
+
(name for name, exp in expression_dict.items() if exp == expr_list),
|
|
211
|
+
None,
|
|
212
|
+
)
|
|
213
|
+
|
|
214
|
+
# If the expression does not already exist, create it
|
|
215
|
+
def generate_unique_name(existing_names):
|
|
216
|
+
i = 1
|
|
217
|
+
while True:
|
|
218
|
+
candidate = f"DatabaseQuery{i}"
|
|
219
|
+
if candidate not in existing_names:
|
|
220
|
+
return candidate
|
|
221
|
+
i += 1
|
|
222
|
+
|
|
223
|
+
if not expr_name:
|
|
224
|
+
expr_name = generate_unique_name(expressions)
|
|
225
|
+
tom.add_expression(name=expr_name, expression=shared_expression)
|
|
226
|
+
|
|
227
|
+
all_tables = [t.Name for t in tom.model.Tables]
|
|
228
|
+
for t_name in tables:
|
|
229
|
+
if t_name not in all_tables:
|
|
230
|
+
raise ValueError(
|
|
231
|
+
f"{icons.red_dot} The table '{t_name}' does not exist in the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
|
|
232
|
+
)
|
|
233
|
+
p = next(p for p in tom.model.Tables[t_name].Partitions)
|
|
234
|
+
if p.Mode != TOM.ModeType.DirectLake:
|
|
235
|
+
raise ValueError(
|
|
236
|
+
f"{icons.red_dot} The table '{t_name}' in the '{dataset_name}' semantic model within the '{workspace_name}' workspace is not in Direct Lake mode. This function is only applicable to Direct Lake tables."
|
|
237
|
+
)
|
|
238
|
+
|
|
239
|
+
p.Source.ExpressionSource = tom.model.Expressions[expr_name]
|