semantic-link-labs 0.12.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- semantic_link_labs-0.12.8.dist-info/METADATA +354 -0
- semantic_link_labs-0.12.8.dist-info/RECORD +243 -0
- semantic_link_labs-0.12.8.dist-info/WHEEL +5 -0
- semantic_link_labs-0.12.8.dist-info/licenses/LICENSE +21 -0
- semantic_link_labs-0.12.8.dist-info/top_level.txt +1 -0
- sempy_labs/__init__.py +606 -0
- sempy_labs/_a_lib_info.py +2 -0
- sempy_labs/_ai.py +437 -0
- sempy_labs/_authentication.py +264 -0
- sempy_labs/_bpa_translation/_model/_translations_am-ET.po +869 -0
- sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +908 -0
- sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +968 -0
- sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +963 -0
- sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +943 -0
- sempy_labs/_bpa_translation/_model/_translations_da-DK.po +945 -0
- sempy_labs/_bpa_translation/_model/_translations_de-DE.po +988 -0
- sempy_labs/_bpa_translation/_model/_translations_el-GR.po +993 -0
- sempy_labs/_bpa_translation/_model/_translations_es-ES.po +971 -0
- sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +933 -0
- sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +942 -0
- sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +994 -0
- sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +967 -0
- sempy_labs/_bpa_translation/_model/_translations_he-IL.po +902 -0
- sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +944 -0
- sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +963 -0
- sempy_labs/_bpa_translation/_model/_translations_id-ID.po +946 -0
- sempy_labs/_bpa_translation/_model/_translations_is-IS.po +939 -0
- sempy_labs/_bpa_translation/_model/_translations_it-IT.po +986 -0
- sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +846 -0
- sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +839 -0
- sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +967 -0
- sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +978 -0
- sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +962 -0
- sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +962 -0
- sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +957 -0
- sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +968 -0
- sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +964 -0
- sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +952 -0
- sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +950 -0
- sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +942 -0
- sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +976 -0
- sempy_labs/_bpa_translation/_model/_translations_te-IN.po +947 -0
- sempy_labs/_bpa_translation/_model/_translations_th-TH.po +924 -0
- sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +953 -0
- sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +961 -0
- sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +804 -0
- sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +969 -0
- sempy_labs/_capacities.py +1198 -0
- sempy_labs/_capacity_migration.py +660 -0
- sempy_labs/_clear_cache.py +351 -0
- sempy_labs/_connections.py +610 -0
- sempy_labs/_dashboards.py +69 -0
- sempy_labs/_data_access_security.py +98 -0
- sempy_labs/_data_pipelines.py +162 -0
- sempy_labs/_dataflows.py +668 -0
- sempy_labs/_dax.py +501 -0
- sempy_labs/_daxformatter.py +80 -0
- sempy_labs/_delta_analyzer.py +467 -0
- sempy_labs/_delta_analyzer_history.py +301 -0
- sempy_labs/_dictionary_diffs.py +221 -0
- sempy_labs/_documentation.py +147 -0
- sempy_labs/_domains.py +51 -0
- sempy_labs/_eventhouses.py +182 -0
- sempy_labs/_external_data_shares.py +230 -0
- sempy_labs/_gateways.py +521 -0
- sempy_labs/_generate_semantic_model.py +521 -0
- sempy_labs/_get_connection_string.py +84 -0
- sempy_labs/_git.py +543 -0
- sempy_labs/_graphQL.py +90 -0
- sempy_labs/_helper_functions.py +2833 -0
- sempy_labs/_icons.py +149 -0
- sempy_labs/_job_scheduler.py +609 -0
- sempy_labs/_kql_databases.py +149 -0
- sempy_labs/_kql_querysets.py +124 -0
- sempy_labs/_kusto.py +137 -0
- sempy_labs/_labels.py +124 -0
- sempy_labs/_list_functions.py +1720 -0
- sempy_labs/_managed_private_endpoints.py +253 -0
- sempy_labs/_mirrored_databases.py +416 -0
- sempy_labs/_mirrored_warehouses.py +60 -0
- sempy_labs/_ml_experiments.py +113 -0
- sempy_labs/_model_auto_build.py +140 -0
- sempy_labs/_model_bpa.py +557 -0
- sempy_labs/_model_bpa_bulk.py +378 -0
- sempy_labs/_model_bpa_rules.py +859 -0
- sempy_labs/_model_dependencies.py +343 -0
- sempy_labs/_mounted_data_factories.py +123 -0
- sempy_labs/_notebooks.py +441 -0
- sempy_labs/_one_lake_integration.py +151 -0
- sempy_labs/_onelake.py +131 -0
- sempy_labs/_query_scale_out.py +433 -0
- sempy_labs/_refresh_semantic_model.py +435 -0
- sempy_labs/_semantic_models.py +468 -0
- sempy_labs/_spark.py +455 -0
- sempy_labs/_sql.py +241 -0
- sempy_labs/_sql_audit_settings.py +207 -0
- sempy_labs/_sql_endpoints.py +214 -0
- sempy_labs/_tags.py +201 -0
- sempy_labs/_translations.py +43 -0
- sempy_labs/_user_delegation_key.py +44 -0
- sempy_labs/_utils.py +79 -0
- sempy_labs/_vertipaq.py +1021 -0
- sempy_labs/_vpax.py +388 -0
- sempy_labs/_warehouses.py +234 -0
- sempy_labs/_workloads.py +140 -0
- sempy_labs/_workspace_identity.py +72 -0
- sempy_labs/_workspaces.py +595 -0
- sempy_labs/admin/__init__.py +170 -0
- sempy_labs/admin/_activities.py +167 -0
- sempy_labs/admin/_apps.py +145 -0
- sempy_labs/admin/_artifacts.py +65 -0
- sempy_labs/admin/_basic_functions.py +463 -0
- sempy_labs/admin/_capacities.py +508 -0
- sempy_labs/admin/_dataflows.py +45 -0
- sempy_labs/admin/_datasets.py +186 -0
- sempy_labs/admin/_domains.py +522 -0
- sempy_labs/admin/_external_data_share.py +100 -0
- sempy_labs/admin/_git.py +72 -0
- sempy_labs/admin/_items.py +265 -0
- sempy_labs/admin/_labels.py +211 -0
- sempy_labs/admin/_reports.py +241 -0
- sempy_labs/admin/_scanner.py +118 -0
- sempy_labs/admin/_shared.py +82 -0
- sempy_labs/admin/_sharing_links.py +110 -0
- sempy_labs/admin/_tags.py +131 -0
- sempy_labs/admin/_tenant.py +503 -0
- sempy_labs/admin/_tenant_keys.py +89 -0
- sempy_labs/admin/_users.py +140 -0
- sempy_labs/admin/_workspaces.py +236 -0
- sempy_labs/deployment_pipeline/__init__.py +23 -0
- sempy_labs/deployment_pipeline/_items.py +580 -0
- sempy_labs/directlake/__init__.py +57 -0
- sempy_labs/directlake/_autosync.py +58 -0
- sempy_labs/directlake/_directlake_schema_compare.py +120 -0
- sempy_labs/directlake/_directlake_schema_sync.py +161 -0
- sempy_labs/directlake/_dl_helper.py +274 -0
- sempy_labs/directlake/_generate_shared_expression.py +94 -0
- sempy_labs/directlake/_get_directlake_lakehouse.py +62 -0
- sempy_labs/directlake/_get_shared_expression.py +34 -0
- sempy_labs/directlake/_guardrails.py +96 -0
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +70 -0
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +90 -0
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +239 -0
- sempy_labs/directlake/_update_directlake_partition_entity.py +259 -0
- sempy_labs/directlake/_warm_cache.py +236 -0
- sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
- sempy_labs/environment/__init__.py +23 -0
- sempy_labs/environment/_items.py +212 -0
- sempy_labs/environment/_pubstage.py +223 -0
- sempy_labs/eventstream/__init__.py +37 -0
- sempy_labs/eventstream/_items.py +263 -0
- sempy_labs/eventstream/_topology.py +652 -0
- sempy_labs/graph/__init__.py +59 -0
- sempy_labs/graph/_groups.py +651 -0
- sempy_labs/graph/_sensitivity_labels.py +120 -0
- sempy_labs/graph/_teams.py +125 -0
- sempy_labs/graph/_user_licenses.py +96 -0
- sempy_labs/graph/_users.py +516 -0
- sempy_labs/graph_model/__init__.py +15 -0
- sempy_labs/graph_model/_background_jobs.py +63 -0
- sempy_labs/graph_model/_items.py +149 -0
- sempy_labs/lakehouse/__init__.py +67 -0
- sempy_labs/lakehouse/_blobs.py +247 -0
- sempy_labs/lakehouse/_get_lakehouse_columns.py +102 -0
- sempy_labs/lakehouse/_get_lakehouse_tables.py +274 -0
- sempy_labs/lakehouse/_helper.py +250 -0
- sempy_labs/lakehouse/_lakehouse.py +351 -0
- sempy_labs/lakehouse/_livy_sessions.py +143 -0
- sempy_labs/lakehouse/_materialized_lake_views.py +157 -0
- sempy_labs/lakehouse/_partitioning.py +165 -0
- sempy_labs/lakehouse/_schemas.py +217 -0
- sempy_labs/lakehouse/_shortcuts.py +440 -0
- sempy_labs/migration/__init__.py +35 -0
- sempy_labs/migration/_create_pqt_file.py +238 -0
- sempy_labs/migration/_direct_lake_to_import.py +105 -0
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +398 -0
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +148 -0
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +533 -0
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +172 -0
- sempy_labs/migration/_migration_validation.py +71 -0
- sempy_labs/migration/_refresh_calc_tables.py +131 -0
- sempy_labs/mirrored_azure_databricks_catalog/__init__.py +15 -0
- sempy_labs/mirrored_azure_databricks_catalog/_discover.py +213 -0
- sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +45 -0
- sempy_labs/ml_model/__init__.py +23 -0
- sempy_labs/ml_model/_functions.py +427 -0
- sempy_labs/report/_BPAReportTemplate.json +232 -0
- sempy_labs/report/__init__.py +55 -0
- sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
- sempy_labs/report/_bpareporttemplate/.platform +11 -0
- sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
- sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
- sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
- sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
- sempy_labs/report/_download_report.py +76 -0
- sempy_labs/report/_export_report.py +257 -0
- sempy_labs/report/_generate_report.py +427 -0
- sempy_labs/report/_paginated.py +76 -0
- sempy_labs/report/_report_bpa.py +354 -0
- sempy_labs/report/_report_bpa_rules.py +115 -0
- sempy_labs/report/_report_functions.py +581 -0
- sempy_labs/report/_report_helper.py +227 -0
- sempy_labs/report/_report_list_functions.py +110 -0
- sempy_labs/report/_report_rebind.py +149 -0
- sempy_labs/report/_reportwrapper.py +3100 -0
- sempy_labs/report/_save_report.py +147 -0
- sempy_labs/snowflake_database/__init__.py +10 -0
- sempy_labs/snowflake_database/_items.py +105 -0
- sempy_labs/sql_database/__init__.py +21 -0
- sempy_labs/sql_database/_items.py +201 -0
- sempy_labs/sql_database/_mirroring.py +79 -0
- sempy_labs/theme/__init__.py +12 -0
- sempy_labs/theme/_org_themes.py +129 -0
- sempy_labs/tom/__init__.py +3 -0
- sempy_labs/tom/_model.py +5977 -0
- sempy_labs/variable_library/__init__.py +19 -0
- sempy_labs/variable_library/_functions.py +403 -0
- sempy_labs/warehouse/__init__.py +28 -0
- sempy_labs/warehouse/_items.py +234 -0
- sempy_labs/warehouse/_restore_points.py +309 -0
|
@@ -0,0 +1,533 @@
|
|
|
1
|
+
import sempy
|
|
2
|
+
import sempy.fabric as fabric
|
|
3
|
+
import re
|
|
4
|
+
from sempy_labs._helper_functions import (
|
|
5
|
+
create_relationship_name,
|
|
6
|
+
retry,
|
|
7
|
+
format_dax_object_name,
|
|
8
|
+
)
|
|
9
|
+
from sempy_labs.tom import connect_semantic_model
|
|
10
|
+
from typing import Optional
|
|
11
|
+
from sempy._utils._log import log
|
|
12
|
+
import sempy_labs._icons as icons
|
|
13
|
+
from uuid import UUID
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@log
|
|
17
|
+
def migrate_model_objects_to_semantic_model(
|
|
18
|
+
dataset: str,
|
|
19
|
+
new_dataset: str,
|
|
20
|
+
workspace: Optional[str | UUID] = None,
|
|
21
|
+
new_dataset_workspace: Optional[str | UUID] = None,
|
|
22
|
+
):
|
|
23
|
+
"""
|
|
24
|
+
Adds the rest of the model objects (besides tables/columns) and their properties to a Direct Lake semantic model based on an import/DirectQuery semantic model.
|
|
25
|
+
|
|
26
|
+
Parameters
|
|
27
|
+
----------
|
|
28
|
+
dataset : str
|
|
29
|
+
Name of the import/DirectQuery semantic model.
|
|
30
|
+
new_dataset : str
|
|
31
|
+
Name of the Direct Lake semantic model.
|
|
32
|
+
workspace : str | uuid.UUID, default=None
|
|
33
|
+
The Fabric workspace name in which the import/DirectQuery semantic model exists.
|
|
34
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
35
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
36
|
+
new_dataset_workspace : str | uuid.UUID, default=None
|
|
37
|
+
The Fabric workspace name in which the Direct Lake semantic model will be created.
|
|
38
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
39
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
40
|
+
"""
|
|
41
|
+
|
|
42
|
+
sempy.fabric._client._utils._init_analysis_services()
|
|
43
|
+
import Microsoft.AnalysisServices.Tabular as TOM
|
|
44
|
+
import System
|
|
45
|
+
|
|
46
|
+
if dataset == new_dataset:
|
|
47
|
+
raise ValueError(
|
|
48
|
+
f"{icons.red_dot} The 'dataset' and 'new_dataset' parameters are both set to '{dataset}'. These parameters must be set to different values."
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
fabric.refresh_tom_cache(workspace=workspace)
|
|
52
|
+
icons.sll_tags.append("DirectLakeMigration")
|
|
53
|
+
|
|
54
|
+
dfT = fabric.list_tables(dataset=dataset, workspace=workspace)
|
|
55
|
+
dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
|
|
56
|
+
dfM = fabric.list_measures(dataset=dataset, workspace=workspace)
|
|
57
|
+
dfRole = fabric.get_roles(dataset=dataset, workspace=workspace)
|
|
58
|
+
dfRLS = fabric.get_row_level_security_permissions(
|
|
59
|
+
dataset=dataset, workspace=workspace
|
|
60
|
+
)
|
|
61
|
+
dfCI = fabric.list_calculation_items(dataset=dataset, workspace=workspace)
|
|
62
|
+
dfP = fabric.list_perspectives(dataset=dataset, workspace=workspace)
|
|
63
|
+
dfTranslation = fabric.list_translations(dataset=dataset, workspace=workspace)
|
|
64
|
+
dfH = fabric.list_hierarchies(dataset=dataset, workspace=workspace)
|
|
65
|
+
dfPar = fabric.list_partitions(dataset=dataset, workspace=workspace)
|
|
66
|
+
|
|
67
|
+
dfP_cc = dfPar[(dfPar["Source Type"] == "Calculated")]
|
|
68
|
+
dfP_fp = dfP_cc[dfP_cc["Query"].str.contains("NAMEOF")]
|
|
69
|
+
|
|
70
|
+
print(f"{icons.in_progress} Updating '{new_dataset}' based on '{dataset}'...")
|
|
71
|
+
|
|
72
|
+
@retry(
|
|
73
|
+
sleep_time=1,
|
|
74
|
+
timeout_error_message=f"{icons.red_dot} Function timed out after 1 minute",
|
|
75
|
+
)
|
|
76
|
+
def dyn_connect():
|
|
77
|
+
with connect_semantic_model(
|
|
78
|
+
dataset=new_dataset, readonly=True, workspace=new_dataset_workspace
|
|
79
|
+
) as tom:
|
|
80
|
+
|
|
81
|
+
tom.model
|
|
82
|
+
|
|
83
|
+
dyn_connect()
|
|
84
|
+
|
|
85
|
+
@retry(
|
|
86
|
+
sleep_time=1,
|
|
87
|
+
timeout_error_message=f"{icons.red_dot} Function timed out after 1 minute",
|
|
88
|
+
)
|
|
89
|
+
def dyn_connect2():
|
|
90
|
+
with connect_semantic_model(
|
|
91
|
+
dataset=dataset, readonly=True, workspace=workspace
|
|
92
|
+
) as tom:
|
|
93
|
+
|
|
94
|
+
tom.model
|
|
95
|
+
|
|
96
|
+
dyn_connect2()
|
|
97
|
+
|
|
98
|
+
with connect_semantic_model(
|
|
99
|
+
dataset=new_dataset, readonly=False, workspace=new_dataset_workspace
|
|
100
|
+
) as tom:
|
|
101
|
+
|
|
102
|
+
isDirectLake = tom.is_direct_lake()
|
|
103
|
+
|
|
104
|
+
print(f"\n{icons.in_progress} Updating table properties...")
|
|
105
|
+
for t in tom.model.Tables:
|
|
106
|
+
t.IsHidden = bool(dfT.loc[dfT["Name"] == t.Name, "Hidden"].iloc[0])
|
|
107
|
+
t.Description = dfT.loc[dfT["Name"] == t.Name, "Description"].iloc[0]
|
|
108
|
+
t.DataCategory = dfT.loc[dfT["Name"] == t.Name, "Data Category"].iloc[0]
|
|
109
|
+
|
|
110
|
+
print(
|
|
111
|
+
f"{icons.green_dot} The '{t.Name}' table's properties have been updated."
|
|
112
|
+
)
|
|
113
|
+
|
|
114
|
+
print(f"\n{icons.in_progress} Updating column properties...")
|
|
115
|
+
for t in tom.model.Tables:
|
|
116
|
+
if (
|
|
117
|
+
t.Name not in dfP_fp["Table Name"].values
|
|
118
|
+
): # do not include field parameters
|
|
119
|
+
dfT_filtered = dfT[dfT["Name"] == t.Name]
|
|
120
|
+
tType = dfT_filtered["Type"].iloc[0]
|
|
121
|
+
for c in t.Columns:
|
|
122
|
+
if not c.Name.startswith("RowNumber-"):
|
|
123
|
+
dfC_filt = dfC[
|
|
124
|
+
(dfC["Table Name"] == t.Name)
|
|
125
|
+
& (dfC["Column Name"] == c.Name)
|
|
126
|
+
]
|
|
127
|
+
cName = dfC_filt["Column Name"].iloc[0]
|
|
128
|
+
c.Name = cName
|
|
129
|
+
# if tType == "Table":
|
|
130
|
+
# c.SourceColumn = cName.replace(" ", "_")
|
|
131
|
+
c.IsHidden = bool(dfC_filt["Hidden"].iloc[0])
|
|
132
|
+
c.DataType = System.Enum.Parse(
|
|
133
|
+
TOM.DataType, dfC_filt["Data Type"].iloc[0]
|
|
134
|
+
)
|
|
135
|
+
c.DisplayFolder = dfC_filt["Display Folder"].iloc[0]
|
|
136
|
+
c.FormatString = dfC_filt["Format String"].iloc[0]
|
|
137
|
+
c.SummarizeBy = System.Enum.Parse(
|
|
138
|
+
TOM.AggregateFunction,
|
|
139
|
+
dfC_filt["Summarize By"].iloc[0],
|
|
140
|
+
)
|
|
141
|
+
c.DataCategory = dfC_filt["Data Category"].iloc[0]
|
|
142
|
+
c.IsKey = bool(dfC_filt["Key"].iloc[0])
|
|
143
|
+
sbc = dfC_filt["Sort By Column"].iloc[0]
|
|
144
|
+
|
|
145
|
+
if sbc is not None:
|
|
146
|
+
if any(
|
|
147
|
+
o.Name == sbc and o.Parent.Name == c.Parent.Name
|
|
148
|
+
for o in tom.all_columns()
|
|
149
|
+
):
|
|
150
|
+
c.SortByColumn = tom.model.Tables[t.Name].Columns[sbc]
|
|
151
|
+
else:
|
|
152
|
+
print(
|
|
153
|
+
f"{icons.red_dot} Failed to create '{sbc}' as a Sort By Column for the '{c.Name}' in the '{t.Name}' table."
|
|
154
|
+
)
|
|
155
|
+
print(
|
|
156
|
+
f"{icons.green_dot} The '{t.Name}'[{c.Name}] column's properties have been updated."
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
print(f"\n{icons.in_progress} Creating hierarchies...")
|
|
160
|
+
dfH_grouped = (
|
|
161
|
+
dfH.groupby(
|
|
162
|
+
[
|
|
163
|
+
"Table Name",
|
|
164
|
+
"Hierarchy Name",
|
|
165
|
+
"Hierarchy Hidden",
|
|
166
|
+
"Hierarchy Description",
|
|
167
|
+
]
|
|
168
|
+
)
|
|
169
|
+
.agg({"Level Name": list, "Column Name": list})
|
|
170
|
+
.reset_index()
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
for i, r in dfH_grouped.iterrows():
|
|
174
|
+
tName = r["Table Name"]
|
|
175
|
+
hName = r["Hierarchy Name"]
|
|
176
|
+
hDesc = r["Hierarchy Description"]
|
|
177
|
+
hHid = bool(r["Hierarchy Hidden"])
|
|
178
|
+
cols = r["Column Name"]
|
|
179
|
+
lvls = r["Level Name"]
|
|
180
|
+
|
|
181
|
+
missing_columns = []
|
|
182
|
+
for col in cols:
|
|
183
|
+
if not any(
|
|
184
|
+
c.Name == col
|
|
185
|
+
for t in tom.model.Tables
|
|
186
|
+
if t.Name == tName
|
|
187
|
+
for c in t.Columns
|
|
188
|
+
):
|
|
189
|
+
missing_columns.append(col)
|
|
190
|
+
|
|
191
|
+
if any(
|
|
192
|
+
t.Name == tName and h.Name == hName
|
|
193
|
+
for t in tom.model.Tables
|
|
194
|
+
for h in t.Hierarchies
|
|
195
|
+
):
|
|
196
|
+
print(
|
|
197
|
+
f"{icons.warning} The '{hName}' hierarchy within the '{tName}' table already exists."
|
|
198
|
+
)
|
|
199
|
+
elif len(missing_columns) > 0:
|
|
200
|
+
print(
|
|
201
|
+
f"{icons.red_dot} The '{hName}' hierarchy within the '{tName}' table cannot be created as the {missing_columns} column)s) do not exist."
|
|
202
|
+
)
|
|
203
|
+
else:
|
|
204
|
+
tom.add_hierarchy(
|
|
205
|
+
table_name=tName,
|
|
206
|
+
hierarchy_name=hName,
|
|
207
|
+
hierarchy_description=hDesc,
|
|
208
|
+
hierarchy_hidden=hHid,
|
|
209
|
+
columns=cols,
|
|
210
|
+
levels=lvls,
|
|
211
|
+
)
|
|
212
|
+
print(f"{icons.green_dot} The '{hName}' hierarchy has been added.")
|
|
213
|
+
|
|
214
|
+
print(f"\n{icons.in_progress} Creating measures...")
|
|
215
|
+
for i, r in dfM.iterrows():
|
|
216
|
+
tName = r["Table Name"]
|
|
217
|
+
mName = r["Measure Name"]
|
|
218
|
+
mExpr = r["Measure Expression"]
|
|
219
|
+
mHidden = bool(r["Measure Hidden"])
|
|
220
|
+
mDF = r["Measure Display Folder"]
|
|
221
|
+
mDesc = r["Measure Description"]
|
|
222
|
+
mFS = r["Format String"]
|
|
223
|
+
|
|
224
|
+
if not any(m.Name == mName for m in tom.all_measures()):
|
|
225
|
+
tom.add_measure(
|
|
226
|
+
table_name=tName,
|
|
227
|
+
measure_name=mName,
|
|
228
|
+
expression=mExpr,
|
|
229
|
+
hidden=mHidden,
|
|
230
|
+
display_folder=mDF,
|
|
231
|
+
description=mDesc,
|
|
232
|
+
format_string=mFS,
|
|
233
|
+
)
|
|
234
|
+
print(f"{icons.green_dot} The '{mName}' measure has been added.")
|
|
235
|
+
print(f"\n{icons.in_progress} Creating calculation groups...")
|
|
236
|
+
for cgName in dfCI["Calculation Group Name"].unique():
|
|
237
|
+
|
|
238
|
+
isHidden = bool(
|
|
239
|
+
dfCI.loc[(dfCI["Calculation Group Name"] == cgName), "Hidden"].iloc[0]
|
|
240
|
+
)
|
|
241
|
+
prec = int(
|
|
242
|
+
dfCI.loc[(dfCI["Calculation Group Name"] == cgName), "Precedence"].iloc[
|
|
243
|
+
0
|
|
244
|
+
]
|
|
245
|
+
)
|
|
246
|
+
desc = dfCI.loc[
|
|
247
|
+
(dfCI["Calculation Group Name"] == cgName), "Description"
|
|
248
|
+
].iloc[0]
|
|
249
|
+
|
|
250
|
+
if not any(t.Name == cgName for t in tom.model.Tables):
|
|
251
|
+
tom.add_calculation_group(
|
|
252
|
+
name=cgName,
|
|
253
|
+
description=desc,
|
|
254
|
+
precedence=prec,
|
|
255
|
+
hidden=isHidden,
|
|
256
|
+
)
|
|
257
|
+
print(
|
|
258
|
+
f"{icons.green_dot} The '{cgName}' calculation group has been added."
|
|
259
|
+
)
|
|
260
|
+
tom.model.DiscourageImplicitMeasures = True
|
|
261
|
+
|
|
262
|
+
# print(
|
|
263
|
+
# f"\n{icons.in_progress} Updating calculation group column names..."
|
|
264
|
+
# )
|
|
265
|
+
dfC_filt = dfC[(dfC["Table Name"] == cgName) & (dfC["Hidden"] == False)]
|
|
266
|
+
colName = dfC_filt["Column Name"].iloc[0]
|
|
267
|
+
tom.model.Tables[cgName].Columns["Name"].Name = colName
|
|
268
|
+
|
|
269
|
+
calcItems = dfCI.loc[
|
|
270
|
+
dfCI["Calculation Group Name"] == cgName,
|
|
271
|
+
"Calculation Item Name",
|
|
272
|
+
].unique()
|
|
273
|
+
|
|
274
|
+
print(f"\n{icons.in_progress} Creating calculation items...")
|
|
275
|
+
for calcItem in calcItems:
|
|
276
|
+
ordinal = int(
|
|
277
|
+
dfCI.loc[
|
|
278
|
+
(dfCI["Calculation Group Name"] == cgName)
|
|
279
|
+
& (dfCI["Calculation Item Name"] == calcItem),
|
|
280
|
+
"Ordinal",
|
|
281
|
+
].iloc[0]
|
|
282
|
+
)
|
|
283
|
+
expr = dfCI.loc[
|
|
284
|
+
(dfCI["Calculation Group Name"] == cgName)
|
|
285
|
+
& (dfCI["Calculation Item Name"] == calcItem),
|
|
286
|
+
"Expression",
|
|
287
|
+
].iloc[0]
|
|
288
|
+
fse = dfCI.loc[
|
|
289
|
+
(dfCI["Calculation Group Name"] == cgName)
|
|
290
|
+
& (dfCI["Calculation Item Name"] == calcItem),
|
|
291
|
+
"Format String Expression",
|
|
292
|
+
].iloc[0]
|
|
293
|
+
|
|
294
|
+
if not any(
|
|
295
|
+
ci.CalculationGroup.Parent.Name == cgName and ci.Name == calcItem
|
|
296
|
+
for ci in tom.all_calculation_items()
|
|
297
|
+
):
|
|
298
|
+
tom.add_calculation_item(
|
|
299
|
+
table_name=cgName,
|
|
300
|
+
calculation_item_name=calcItem,
|
|
301
|
+
expression=expr,
|
|
302
|
+
format_string_expression=fse,
|
|
303
|
+
ordinal=ordinal,
|
|
304
|
+
)
|
|
305
|
+
print(
|
|
306
|
+
f"{icons.green_dot} The '{calcItem}' has been added to the '{cgName}' calculation group."
|
|
307
|
+
)
|
|
308
|
+
|
|
309
|
+
print(f"\n{icons.in_progress} Creating relationships...")
|
|
310
|
+
with connect_semantic_model(
|
|
311
|
+
dataset=dataset, readonly=True, workspace=workspace
|
|
312
|
+
) as tom_old:
|
|
313
|
+
|
|
314
|
+
for r in tom_old.model.Relationships:
|
|
315
|
+
relName = create_relationship_name(
|
|
316
|
+
r.FromTable.Name, r.FromColumn.Name, r.ToTable.Name, r.ToColumn.Name
|
|
317
|
+
)
|
|
318
|
+
|
|
319
|
+
# Relationship already exists
|
|
320
|
+
if any(
|
|
321
|
+
rel.FromTable.Name == r.FromTable.Name
|
|
322
|
+
and rel.FromColumn.Name == r.FromColumn.Name
|
|
323
|
+
and rel.ToTable.Name == r.ToTable.Name
|
|
324
|
+
and rel.ToColumn.Name == r.ToColumn.Name
|
|
325
|
+
for rel in tom.model.Relationships
|
|
326
|
+
):
|
|
327
|
+
print(
|
|
328
|
+
f"{icons.warning} The {relName} relationship was not created as it already exists in the '{new_dataset}' semantic model within the '{new_dataset_workspace}' workspace."
|
|
329
|
+
)
|
|
330
|
+
|
|
331
|
+
# Direct Lake with incompatible column data types
|
|
332
|
+
elif isDirectLake and r.FromColumn.DataType != r.ToColumn.DataType:
|
|
333
|
+
print(
|
|
334
|
+
f"{icons.warning} The {relName} relationship was not created as Direct Lake does not support relationships based on columns with different data types."
|
|
335
|
+
)
|
|
336
|
+
# Columns do not exist in the new semantic model
|
|
337
|
+
elif not any(
|
|
338
|
+
c.Name == r.FromColumn.Name and c.Parent.Name == r.FromTable.Name
|
|
339
|
+
for c in tom.all_columns()
|
|
340
|
+
) or not any(
|
|
341
|
+
c.Name == r.ToColumn.Name and c.Parent.Name == r.ToTable.Name
|
|
342
|
+
for c in tom.all_columns()
|
|
343
|
+
):
|
|
344
|
+
# Direct lake and based on calculated column
|
|
345
|
+
if isDirectLake and (
|
|
346
|
+
any(
|
|
347
|
+
c.Name == r.FromColumn.Name
|
|
348
|
+
and c.Parent.Name == r.FromTable.Name
|
|
349
|
+
for c in tom_old.all_calculated_columns()
|
|
350
|
+
)
|
|
351
|
+
or any(
|
|
352
|
+
c.Name == r.ToColumn.Name
|
|
353
|
+
and c.Parent.Name == r.ToTable.Name
|
|
354
|
+
for c in tom_old.all_calculated_columns()
|
|
355
|
+
)
|
|
356
|
+
):
|
|
357
|
+
print(
|
|
358
|
+
f"{icons.red_dot} The {relName} relationship was not created as the necssary column(s) do not exist. This is due to Direct Lake not supporting calculated columns."
|
|
359
|
+
)
|
|
360
|
+
elif not any(
|
|
361
|
+
c.Name == r.FromColumn.Name
|
|
362
|
+
and c.Parent.Name == r.FromTable.Name
|
|
363
|
+
for c in tom.all_columns()
|
|
364
|
+
):
|
|
365
|
+
print(
|
|
366
|
+
f"{icons.red_dot} The {relName} relationship cannot be created because the {format_dax_object_name(r.FromTable.Name, r.FromColumn.Name)} column is not available in the '{new_dataset}' semantic model within the '{new_dataset_workspace}' workspace."
|
|
367
|
+
)
|
|
368
|
+
elif not any(
|
|
369
|
+
c.Name == r.ToColumn.Name and c.Parent.Name == r.ToTable.Name
|
|
370
|
+
for c in tom.all_columns()
|
|
371
|
+
):
|
|
372
|
+
print(
|
|
373
|
+
f"{icons.red_dot} The {relName} relationship cannot be created because the {format_dax_object_name(r.ToTable.Name, r.ToColumn.Name)} column is not available in the '{new_dataset}' semantic model within the '{new_dataset_workspace}' workspace."
|
|
374
|
+
)
|
|
375
|
+
else:
|
|
376
|
+
tom.add_relationship(
|
|
377
|
+
from_table=r.FromTable.Name,
|
|
378
|
+
from_column=r.FromColumn.Name,
|
|
379
|
+
to_table=r.ToTable.Name,
|
|
380
|
+
to_column=r.ToColumn.Name,
|
|
381
|
+
from_cardinality=str(r.FromCardinality),
|
|
382
|
+
to_cardinality=str(r.ToCardinality),
|
|
383
|
+
cross_filtering_behavior=str(r.CrossFilteringBehavior),
|
|
384
|
+
security_filtering_behavior=str(r.SecurityFilteringBehavior),
|
|
385
|
+
rely_on_referential_integrity=r.RelyOnReferentialIntegrity,
|
|
386
|
+
is_active=r.IsActive,
|
|
387
|
+
)
|
|
388
|
+
print(
|
|
389
|
+
f"{icons.green_dot} The {relName} relationship has been added."
|
|
390
|
+
)
|
|
391
|
+
|
|
392
|
+
print(f"\n{icons.in_progress} Creating roles...")
|
|
393
|
+
for index, row in dfRole.iterrows():
|
|
394
|
+
roleName = row["Role"]
|
|
395
|
+
roleDesc = row["Description"]
|
|
396
|
+
modPerm = row["Model Permission"]
|
|
397
|
+
|
|
398
|
+
if not any(r.Name == roleName for r in tom.model.Roles):
|
|
399
|
+
tom.add_role(
|
|
400
|
+
role_name=roleName,
|
|
401
|
+
model_permission=modPerm,
|
|
402
|
+
description=roleDesc,
|
|
403
|
+
)
|
|
404
|
+
print(f"{icons.green_dot} The '{roleName}' role has been added.")
|
|
405
|
+
|
|
406
|
+
print(f"\n{icons.in_progress} Creating row level security...")
|
|
407
|
+
for index, row in dfRLS.iterrows():
|
|
408
|
+
roleName = row["Role"]
|
|
409
|
+
tName = row["Table"]
|
|
410
|
+
expr = row["Filter Expression"]
|
|
411
|
+
|
|
412
|
+
if any(t.Name == tName for t in tom.model.Tables):
|
|
413
|
+
tom.set_rls(
|
|
414
|
+
role_name=roleName, table_name=tName, filter_expression=expr
|
|
415
|
+
)
|
|
416
|
+
print(
|
|
417
|
+
f"{icons.green_dot} Row level security for the '{tName}' table within the '{roleName}' role has been set."
|
|
418
|
+
)
|
|
419
|
+
else:
|
|
420
|
+
print(
|
|
421
|
+
f"{icons.red_dot} Row level security for the '{tName}' table within the '{roleName}' role was not set."
|
|
422
|
+
)
|
|
423
|
+
|
|
424
|
+
print(f"\n{icons.in_progress} Creating perspectives...")
|
|
425
|
+
for pName in dfP["Perspective Name"].unique():
|
|
426
|
+
|
|
427
|
+
if not any(p.Name == pName for p in tom.model.Perspectives):
|
|
428
|
+
tom.add_perspective(perspective_name=pName)
|
|
429
|
+
print(f"{icons.green_dot} The '{pName}' perspective has been added.")
|
|
430
|
+
|
|
431
|
+
print(f"\n{icons.in_progress} Adding objects to perspectives...")
|
|
432
|
+
for index, row in dfP.iterrows():
|
|
433
|
+
pName = row["Perspective Name"]
|
|
434
|
+
tName = row["Table Name"]
|
|
435
|
+
oName = row["Object Name"]
|
|
436
|
+
oType = row["Object Type"]
|
|
437
|
+
tType = dfT.loc[(dfT["Name"] == tName), "Type"].iloc[0]
|
|
438
|
+
|
|
439
|
+
try:
|
|
440
|
+
if oType == "Table":
|
|
441
|
+
tom.add_to_perspective(
|
|
442
|
+
object=tom.model.Tables[tName], perspective_name=pName
|
|
443
|
+
)
|
|
444
|
+
elif oType == "Column":
|
|
445
|
+
tom.add_to_perspective(
|
|
446
|
+
object=tom.model.Tables[tName].Columns[oName],
|
|
447
|
+
perspective_name=pName,
|
|
448
|
+
)
|
|
449
|
+
elif oType == "Measure":
|
|
450
|
+
tom.add_to_perspective(
|
|
451
|
+
object=tom.model.Tables[tName].Measures[oName],
|
|
452
|
+
perspective_name=pName,
|
|
453
|
+
)
|
|
454
|
+
elif oType == "Hierarchy":
|
|
455
|
+
tom.add_to_perspective(
|
|
456
|
+
object=tom.model.Tables[tName].Hierarchies[oName],
|
|
457
|
+
perspective_name=pName,
|
|
458
|
+
)
|
|
459
|
+
except Exception:
|
|
460
|
+
pass
|
|
461
|
+
|
|
462
|
+
print(f"\n{icons.in_progress} Creating translation languages...")
|
|
463
|
+
for trName in dfTranslation["Culture Name"].unique():
|
|
464
|
+
if not any(c.Name == trName for c in tom.model.Cultures):
|
|
465
|
+
tom.add_translation(trName)
|
|
466
|
+
print(
|
|
467
|
+
f"{icons.green_dot} The '{trName}' translation language has been added."
|
|
468
|
+
)
|
|
469
|
+
|
|
470
|
+
print(f"\n{icons.in_progress} Creating translation values...")
|
|
471
|
+
for index, row in dfTranslation.iterrows():
|
|
472
|
+
trName = row["Culture Name"]
|
|
473
|
+
tName = row["Table Name"]
|
|
474
|
+
oName = row["Object Name"]
|
|
475
|
+
oType = row["Object Type"]
|
|
476
|
+
translation = row["Translation"]
|
|
477
|
+
prop = row["Property"]
|
|
478
|
+
|
|
479
|
+
if prop == "Caption":
|
|
480
|
+
prop = "Name"
|
|
481
|
+
elif prop == "DisplayFolder":
|
|
482
|
+
prop = "Display Folder"
|
|
483
|
+
|
|
484
|
+
try:
|
|
485
|
+
if oType == "Table":
|
|
486
|
+
tom.set_translation(
|
|
487
|
+
object=tom.model.Tables[tName],
|
|
488
|
+
language=trName,
|
|
489
|
+
property=prop,
|
|
490
|
+
value=translation,
|
|
491
|
+
)
|
|
492
|
+
elif oType == "Column":
|
|
493
|
+
tom.set_translation(
|
|
494
|
+
object=tom.model.Tables[tName].Columns[oName],
|
|
495
|
+
language=trName,
|
|
496
|
+
property=prop,
|
|
497
|
+
value=translation,
|
|
498
|
+
)
|
|
499
|
+
elif oType == "Measure":
|
|
500
|
+
tom.set_translation(
|
|
501
|
+
object=tom.model.Tables[tName].Measures[oName],
|
|
502
|
+
language=trName,
|
|
503
|
+
property=prop,
|
|
504
|
+
value=translation,
|
|
505
|
+
)
|
|
506
|
+
elif oType == "Hierarchy":
|
|
507
|
+
tom.set_translation(
|
|
508
|
+
object=tom.model.Tables[tName].Hierarchies[oName],
|
|
509
|
+
language=trName,
|
|
510
|
+
property=prop,
|
|
511
|
+
value=translation,
|
|
512
|
+
)
|
|
513
|
+
elif oType == "Level":
|
|
514
|
+
|
|
515
|
+
pattern = r"\[([^]]+)\]"
|
|
516
|
+
matches = re.findall(pattern, oName)
|
|
517
|
+
lName = matches[0]
|
|
518
|
+
|
|
519
|
+
pattern = r"'([^']+)'"
|
|
520
|
+
matches = re.findall(pattern, oName)
|
|
521
|
+
hName = matches[0]
|
|
522
|
+
tom.set_translation(
|
|
523
|
+
object=tom.model.Tables[tName].Hierarchies[hName].Levels[lName],
|
|
524
|
+
language=trName,
|
|
525
|
+
property=prop,
|
|
526
|
+
value=translation,
|
|
527
|
+
)
|
|
528
|
+
except Exception:
|
|
529
|
+
pass
|
|
530
|
+
|
|
531
|
+
print(
|
|
532
|
+
f"\n{icons.green_dot} Migration of objects from '{dataset}' -> '{new_dataset}' is complete."
|
|
533
|
+
)
|