semantic-link-labs 0.12.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- semantic_link_labs-0.12.8.dist-info/METADATA +354 -0
- semantic_link_labs-0.12.8.dist-info/RECORD +243 -0
- semantic_link_labs-0.12.8.dist-info/WHEEL +5 -0
- semantic_link_labs-0.12.8.dist-info/licenses/LICENSE +21 -0
- semantic_link_labs-0.12.8.dist-info/top_level.txt +1 -0
- sempy_labs/__init__.py +606 -0
- sempy_labs/_a_lib_info.py +2 -0
- sempy_labs/_ai.py +437 -0
- sempy_labs/_authentication.py +264 -0
- sempy_labs/_bpa_translation/_model/_translations_am-ET.po +869 -0
- sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +908 -0
- sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +968 -0
- sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +963 -0
- sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +943 -0
- sempy_labs/_bpa_translation/_model/_translations_da-DK.po +945 -0
- sempy_labs/_bpa_translation/_model/_translations_de-DE.po +988 -0
- sempy_labs/_bpa_translation/_model/_translations_el-GR.po +993 -0
- sempy_labs/_bpa_translation/_model/_translations_es-ES.po +971 -0
- sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +933 -0
- sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +942 -0
- sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +994 -0
- sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +967 -0
- sempy_labs/_bpa_translation/_model/_translations_he-IL.po +902 -0
- sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +944 -0
- sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +963 -0
- sempy_labs/_bpa_translation/_model/_translations_id-ID.po +946 -0
- sempy_labs/_bpa_translation/_model/_translations_is-IS.po +939 -0
- sempy_labs/_bpa_translation/_model/_translations_it-IT.po +986 -0
- sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +846 -0
- sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +839 -0
- sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +967 -0
- sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +978 -0
- sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +962 -0
- sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +962 -0
- sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +957 -0
- sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +968 -0
- sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +964 -0
- sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +952 -0
- sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +950 -0
- sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +942 -0
- sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +976 -0
- sempy_labs/_bpa_translation/_model/_translations_te-IN.po +947 -0
- sempy_labs/_bpa_translation/_model/_translations_th-TH.po +924 -0
- sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +953 -0
- sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +961 -0
- sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +804 -0
- sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +969 -0
- sempy_labs/_capacities.py +1198 -0
- sempy_labs/_capacity_migration.py +660 -0
- sempy_labs/_clear_cache.py +351 -0
- sempy_labs/_connections.py +610 -0
- sempy_labs/_dashboards.py +69 -0
- sempy_labs/_data_access_security.py +98 -0
- sempy_labs/_data_pipelines.py +162 -0
- sempy_labs/_dataflows.py +668 -0
- sempy_labs/_dax.py +501 -0
- sempy_labs/_daxformatter.py +80 -0
- sempy_labs/_delta_analyzer.py +467 -0
- sempy_labs/_delta_analyzer_history.py +301 -0
- sempy_labs/_dictionary_diffs.py +221 -0
- sempy_labs/_documentation.py +147 -0
- sempy_labs/_domains.py +51 -0
- sempy_labs/_eventhouses.py +182 -0
- sempy_labs/_external_data_shares.py +230 -0
- sempy_labs/_gateways.py +521 -0
- sempy_labs/_generate_semantic_model.py +521 -0
- sempy_labs/_get_connection_string.py +84 -0
- sempy_labs/_git.py +543 -0
- sempy_labs/_graphQL.py +90 -0
- sempy_labs/_helper_functions.py +2833 -0
- sempy_labs/_icons.py +149 -0
- sempy_labs/_job_scheduler.py +609 -0
- sempy_labs/_kql_databases.py +149 -0
- sempy_labs/_kql_querysets.py +124 -0
- sempy_labs/_kusto.py +137 -0
- sempy_labs/_labels.py +124 -0
- sempy_labs/_list_functions.py +1720 -0
- sempy_labs/_managed_private_endpoints.py +253 -0
- sempy_labs/_mirrored_databases.py +416 -0
- sempy_labs/_mirrored_warehouses.py +60 -0
- sempy_labs/_ml_experiments.py +113 -0
- sempy_labs/_model_auto_build.py +140 -0
- sempy_labs/_model_bpa.py +557 -0
- sempy_labs/_model_bpa_bulk.py +378 -0
- sempy_labs/_model_bpa_rules.py +859 -0
- sempy_labs/_model_dependencies.py +343 -0
- sempy_labs/_mounted_data_factories.py +123 -0
- sempy_labs/_notebooks.py +441 -0
- sempy_labs/_one_lake_integration.py +151 -0
- sempy_labs/_onelake.py +131 -0
- sempy_labs/_query_scale_out.py +433 -0
- sempy_labs/_refresh_semantic_model.py +435 -0
- sempy_labs/_semantic_models.py +468 -0
- sempy_labs/_spark.py +455 -0
- sempy_labs/_sql.py +241 -0
- sempy_labs/_sql_audit_settings.py +207 -0
- sempy_labs/_sql_endpoints.py +214 -0
- sempy_labs/_tags.py +201 -0
- sempy_labs/_translations.py +43 -0
- sempy_labs/_user_delegation_key.py +44 -0
- sempy_labs/_utils.py +79 -0
- sempy_labs/_vertipaq.py +1021 -0
- sempy_labs/_vpax.py +388 -0
- sempy_labs/_warehouses.py +234 -0
- sempy_labs/_workloads.py +140 -0
- sempy_labs/_workspace_identity.py +72 -0
- sempy_labs/_workspaces.py +595 -0
- sempy_labs/admin/__init__.py +170 -0
- sempy_labs/admin/_activities.py +167 -0
- sempy_labs/admin/_apps.py +145 -0
- sempy_labs/admin/_artifacts.py +65 -0
- sempy_labs/admin/_basic_functions.py +463 -0
- sempy_labs/admin/_capacities.py +508 -0
- sempy_labs/admin/_dataflows.py +45 -0
- sempy_labs/admin/_datasets.py +186 -0
- sempy_labs/admin/_domains.py +522 -0
- sempy_labs/admin/_external_data_share.py +100 -0
- sempy_labs/admin/_git.py +72 -0
- sempy_labs/admin/_items.py +265 -0
- sempy_labs/admin/_labels.py +211 -0
- sempy_labs/admin/_reports.py +241 -0
- sempy_labs/admin/_scanner.py +118 -0
- sempy_labs/admin/_shared.py +82 -0
- sempy_labs/admin/_sharing_links.py +110 -0
- sempy_labs/admin/_tags.py +131 -0
- sempy_labs/admin/_tenant.py +503 -0
- sempy_labs/admin/_tenant_keys.py +89 -0
- sempy_labs/admin/_users.py +140 -0
- sempy_labs/admin/_workspaces.py +236 -0
- sempy_labs/deployment_pipeline/__init__.py +23 -0
- sempy_labs/deployment_pipeline/_items.py +580 -0
- sempy_labs/directlake/__init__.py +57 -0
- sempy_labs/directlake/_autosync.py +58 -0
- sempy_labs/directlake/_directlake_schema_compare.py +120 -0
- sempy_labs/directlake/_directlake_schema_sync.py +161 -0
- sempy_labs/directlake/_dl_helper.py +274 -0
- sempy_labs/directlake/_generate_shared_expression.py +94 -0
- sempy_labs/directlake/_get_directlake_lakehouse.py +62 -0
- sempy_labs/directlake/_get_shared_expression.py +34 -0
- sempy_labs/directlake/_guardrails.py +96 -0
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +70 -0
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +90 -0
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +239 -0
- sempy_labs/directlake/_update_directlake_partition_entity.py +259 -0
- sempy_labs/directlake/_warm_cache.py +236 -0
- sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
- sempy_labs/environment/__init__.py +23 -0
- sempy_labs/environment/_items.py +212 -0
- sempy_labs/environment/_pubstage.py +223 -0
- sempy_labs/eventstream/__init__.py +37 -0
- sempy_labs/eventstream/_items.py +263 -0
- sempy_labs/eventstream/_topology.py +652 -0
- sempy_labs/graph/__init__.py +59 -0
- sempy_labs/graph/_groups.py +651 -0
- sempy_labs/graph/_sensitivity_labels.py +120 -0
- sempy_labs/graph/_teams.py +125 -0
- sempy_labs/graph/_user_licenses.py +96 -0
- sempy_labs/graph/_users.py +516 -0
- sempy_labs/graph_model/__init__.py +15 -0
- sempy_labs/graph_model/_background_jobs.py +63 -0
- sempy_labs/graph_model/_items.py +149 -0
- sempy_labs/lakehouse/__init__.py +67 -0
- sempy_labs/lakehouse/_blobs.py +247 -0
- sempy_labs/lakehouse/_get_lakehouse_columns.py +102 -0
- sempy_labs/lakehouse/_get_lakehouse_tables.py +274 -0
- sempy_labs/lakehouse/_helper.py +250 -0
- sempy_labs/lakehouse/_lakehouse.py +351 -0
- sempy_labs/lakehouse/_livy_sessions.py +143 -0
- sempy_labs/lakehouse/_materialized_lake_views.py +157 -0
- sempy_labs/lakehouse/_partitioning.py +165 -0
- sempy_labs/lakehouse/_schemas.py +217 -0
- sempy_labs/lakehouse/_shortcuts.py +440 -0
- sempy_labs/migration/__init__.py +35 -0
- sempy_labs/migration/_create_pqt_file.py +238 -0
- sempy_labs/migration/_direct_lake_to_import.py +105 -0
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +398 -0
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +148 -0
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +533 -0
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +172 -0
- sempy_labs/migration/_migration_validation.py +71 -0
- sempy_labs/migration/_refresh_calc_tables.py +131 -0
- sempy_labs/mirrored_azure_databricks_catalog/__init__.py +15 -0
- sempy_labs/mirrored_azure_databricks_catalog/_discover.py +213 -0
- sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +45 -0
- sempy_labs/ml_model/__init__.py +23 -0
- sempy_labs/ml_model/_functions.py +427 -0
- sempy_labs/report/_BPAReportTemplate.json +232 -0
- sempy_labs/report/__init__.py +55 -0
- sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
- sempy_labs/report/_bpareporttemplate/.platform +11 -0
- sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
- sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
- sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
- sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
- sempy_labs/report/_download_report.py +76 -0
- sempy_labs/report/_export_report.py +257 -0
- sempy_labs/report/_generate_report.py +427 -0
- sempy_labs/report/_paginated.py +76 -0
- sempy_labs/report/_report_bpa.py +354 -0
- sempy_labs/report/_report_bpa_rules.py +115 -0
- sempy_labs/report/_report_functions.py +581 -0
- sempy_labs/report/_report_helper.py +227 -0
- sempy_labs/report/_report_list_functions.py +110 -0
- sempy_labs/report/_report_rebind.py +149 -0
- sempy_labs/report/_reportwrapper.py +3100 -0
- sempy_labs/report/_save_report.py +147 -0
- sempy_labs/snowflake_database/__init__.py +10 -0
- sempy_labs/snowflake_database/_items.py +105 -0
- sempy_labs/sql_database/__init__.py +21 -0
- sempy_labs/sql_database/_items.py +201 -0
- sempy_labs/sql_database/_mirroring.py +79 -0
- sempy_labs/theme/__init__.py +12 -0
- sempy_labs/theme/_org_themes.py +129 -0
- sempy_labs/tom/__init__.py +3 -0
- sempy_labs/tom/_model.py +5977 -0
- sempy_labs/variable_library/__init__.py +19 -0
- sempy_labs/variable_library/_functions.py +403 -0
- sempy_labs/warehouse/__init__.py +28 -0
- sempy_labs/warehouse/_items.py +234 -0
- sempy_labs/warehouse/_restore_points.py +309 -0
|
@@ -0,0 +1,238 @@
|
|
|
1
|
+
import sempy
|
|
2
|
+
import json
|
|
3
|
+
import os
|
|
4
|
+
import shutil
|
|
5
|
+
from sempy_labs.lakehouse._lakehouse import lakehouse_attached
|
|
6
|
+
from sempy._utils._log import log
|
|
7
|
+
from typing import Optional
|
|
8
|
+
import sempy_labs._icons as icons
|
|
9
|
+
from uuid import UUID
|
|
10
|
+
from sempy_labs._helper_functions import (
|
|
11
|
+
resolve_dataset_name_and_id,
|
|
12
|
+
resolve_workspace_name_and_id,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@log
|
|
17
|
+
def create_pqt_file(
|
|
18
|
+
dataset: str | UUID,
|
|
19
|
+
workspace: Optional[str | UUID] = None,
|
|
20
|
+
file_name: str = "PowerQueryTemplate",
|
|
21
|
+
):
|
|
22
|
+
"""
|
|
23
|
+
Dynamically generates a `Power Query Template <https://learn.microsoft.com/power-query/power-query-template>`_ file based on the semantic model. The .pqt file is
|
|
24
|
+
saved within the Files section of your lakehouse.
|
|
25
|
+
|
|
26
|
+
Dataflows Gen2 has a `limit of 50 tables <https://learn.microsoft.com/power-query/power-query-online-limits>`_. If there are more than 50 tables, this will save multiple Power Query Template
|
|
27
|
+
files (with each file having a max of 50 tables).
|
|
28
|
+
|
|
29
|
+
Parameters
|
|
30
|
+
----------
|
|
31
|
+
dataset : str | uuid.UUID
|
|
32
|
+
Name or ID of the semantic model.
|
|
33
|
+
workspace : str | uuid.UUID, default=None
|
|
34
|
+
The Fabric workspace name or ID.
|
|
35
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
36
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
37
|
+
file_name : str, default='PowerQueryTemplate'
|
|
38
|
+
The name of the Power Query Template file to be generated.
|
|
39
|
+
"""
|
|
40
|
+
|
|
41
|
+
sempy.fabric._client._utils._init_analysis_services()
|
|
42
|
+
import Microsoft.AnalysisServices.Tabular as TOM
|
|
43
|
+
from sempy_labs.tom import connect_semantic_model
|
|
44
|
+
|
|
45
|
+
if not lakehouse_attached():
|
|
46
|
+
raise ValueError(
|
|
47
|
+
f"{icons.red_dot} In order to run the 'create_pqt_file' function, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
51
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
52
|
+
|
|
53
|
+
folderPath = "/lakehouse/default/Files"
|
|
54
|
+
subFolderPath = os.path.join(folderPath, "pqtnewfolder")
|
|
55
|
+
|
|
56
|
+
with connect_semantic_model(
|
|
57
|
+
dataset=dataset_id, workspace=workspace_id, readonly=True
|
|
58
|
+
) as tom:
|
|
59
|
+
if not any(
|
|
60
|
+
p.SourceType == TOM.PartitionSourceType.M for p in tom.all_partitions()
|
|
61
|
+
) and not any(t.RefreshPolicy for t in tom.model.Tables):
|
|
62
|
+
print(
|
|
63
|
+
f"{icons.info} The '{dataset_name}' semantic model within the '{workspace_name}' workspace has no Power Query logic."
|
|
64
|
+
)
|
|
65
|
+
return
|
|
66
|
+
|
|
67
|
+
table_map = {}
|
|
68
|
+
expr_map = {}
|
|
69
|
+
|
|
70
|
+
for t in tom.model.Tables:
|
|
71
|
+
table_name = t.Name
|
|
72
|
+
for char in icons.special_characters:
|
|
73
|
+
table_name = table_name.replace(char, "")
|
|
74
|
+
if t.RefreshPolicy:
|
|
75
|
+
table_map[table_name] = t.RefreshPolicy.SourceExpression
|
|
76
|
+
elif any(p.SourceType == TOM.PartitionSourceType.M for p in t.Partitions):
|
|
77
|
+
part_name = next(
|
|
78
|
+
p.Name
|
|
79
|
+
for p in t.Partitions
|
|
80
|
+
if p.SourceType == TOM.PartitionSourceType.M
|
|
81
|
+
)
|
|
82
|
+
expr = t.Partitions[part_name].Source.Expression
|
|
83
|
+
table_map[table_name] = expr
|
|
84
|
+
|
|
85
|
+
for e in tom.model.Expressions:
|
|
86
|
+
expr_map[e.Name] = [str(e.Kind), e.Expression]
|
|
87
|
+
|
|
88
|
+
expressions = tom.model.Expressions.Count
|
|
89
|
+
|
|
90
|
+
# Dataflows Gen2 max table limit is 50.
|
|
91
|
+
max_length = 50 - expressions
|
|
92
|
+
table_chunks = [
|
|
93
|
+
dict(list(table_map.items())[i : i + max_length])
|
|
94
|
+
for i in range(0, len(table_map), max_length)
|
|
95
|
+
]
|
|
96
|
+
|
|
97
|
+
def create_pqt(table_map: dict, expr_map: dict, file_name: str):
|
|
98
|
+
|
|
99
|
+
os.makedirs(subFolderPath, exist_ok=True)
|
|
100
|
+
|
|
101
|
+
class QueryMetadata:
|
|
102
|
+
def __init__(
|
|
103
|
+
self,
|
|
104
|
+
QueryName,
|
|
105
|
+
QueryGroupId=None,
|
|
106
|
+
LastKnownIsParameter=None,
|
|
107
|
+
LastKnownResultTypeName=None,
|
|
108
|
+
LoadEnabled=True,
|
|
109
|
+
IsHidden=False,
|
|
110
|
+
):
|
|
111
|
+
self.QueryName = QueryName
|
|
112
|
+
self.QueryGroupId = QueryGroupId
|
|
113
|
+
self.LastKnownIsParameter = LastKnownIsParameter
|
|
114
|
+
self.LastKnownResultTypeName = LastKnownResultTypeName
|
|
115
|
+
self.LoadEnabled = LoadEnabled
|
|
116
|
+
self.IsHidden = IsHidden
|
|
117
|
+
|
|
118
|
+
class RootObject:
|
|
119
|
+
def __init__(
|
|
120
|
+
self,
|
|
121
|
+
DocumentLocale,
|
|
122
|
+
EngineVersion,
|
|
123
|
+
QueriesMetadata,
|
|
124
|
+
QueryGroups=None,
|
|
125
|
+
):
|
|
126
|
+
if QueryGroups is None:
|
|
127
|
+
QueryGroups = []
|
|
128
|
+
self.DocumentLocale = DocumentLocale
|
|
129
|
+
self.EngineVersion = EngineVersion
|
|
130
|
+
self.QueriesMetadata = QueriesMetadata
|
|
131
|
+
self.QueryGroups = QueryGroups
|
|
132
|
+
|
|
133
|
+
# STEP 1: Create MashupDocument.pq
|
|
134
|
+
mdfileName = "MashupDocument.pq"
|
|
135
|
+
mdFilePath = os.path.join(subFolderPath, mdfileName)
|
|
136
|
+
sb = "section Section1;"
|
|
137
|
+
for t_name, query in table_map.items():
|
|
138
|
+
sb = f'{sb}\nshared #"{t_name}" = '
|
|
139
|
+
if query is not None:
|
|
140
|
+
pQueryNoSpaces = (
|
|
141
|
+
query.replace(" ", "")
|
|
142
|
+
.replace("\n", "")
|
|
143
|
+
.replace("\t", "")
|
|
144
|
+
.replace("\r", "")
|
|
145
|
+
)
|
|
146
|
+
if pQueryNoSpaces.startswith('letSource=""'):
|
|
147
|
+
query = 'let\n\tSource = ""\nin\n\tSource'
|
|
148
|
+
sb = f"{sb}{query};"
|
|
149
|
+
|
|
150
|
+
for e_name, kind_expr in expr_map.items():
|
|
151
|
+
expr = kind_expr[1]
|
|
152
|
+
sb = f'{sb}\nshared #"{e_name}" = {expr};'
|
|
153
|
+
|
|
154
|
+
with open(mdFilePath, "w") as file:
|
|
155
|
+
file.write(sb)
|
|
156
|
+
|
|
157
|
+
# STEP 2: Create the MashupMetadata.json file
|
|
158
|
+
mmfileName = "MashupMetadata.json"
|
|
159
|
+
mmFilePath = os.path.join(subFolderPath, mmfileName)
|
|
160
|
+
queryMetadata = []
|
|
161
|
+
|
|
162
|
+
for t_name, query in table_map.items():
|
|
163
|
+
queryMetadata.append(
|
|
164
|
+
QueryMetadata(t_name, None, None, None, True, False)
|
|
165
|
+
)
|
|
166
|
+
for e_name, kind_expr in expr_map.items():
|
|
167
|
+
e_kind = kind_expr[0]
|
|
168
|
+
if e_kind == "M":
|
|
169
|
+
queryMetadata.append(
|
|
170
|
+
QueryMetadata(e_name, None, None, None, True, False)
|
|
171
|
+
)
|
|
172
|
+
else:
|
|
173
|
+
queryMetadata.append(
|
|
174
|
+
QueryMetadata(e_name, None, None, None, False, False)
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
rootObject = RootObject(
|
|
178
|
+
"en-US", "2.132.328.0", queryMetadata
|
|
179
|
+
) # "2.126.453.0"
|
|
180
|
+
|
|
181
|
+
def obj_to_dict(obj):
|
|
182
|
+
if isinstance(obj, list):
|
|
183
|
+
return [obj_to_dict(e) for e in obj]
|
|
184
|
+
elif hasattr(obj, "__dict__"):
|
|
185
|
+
return {k: obj_to_dict(v) for k, v in obj.__dict__.items()}
|
|
186
|
+
else:
|
|
187
|
+
return obj
|
|
188
|
+
|
|
189
|
+
jsonContent = json.dumps(obj_to_dict(rootObject), indent=4)
|
|
190
|
+
|
|
191
|
+
with open(mmFilePath, "w") as json_file:
|
|
192
|
+
json_file.write(jsonContent)
|
|
193
|
+
|
|
194
|
+
# STEP 3: Create Metadata.json file
|
|
195
|
+
mFileName = "Metadata.json"
|
|
196
|
+
mFilePath = os.path.join(subFolderPath, mFileName)
|
|
197
|
+
metaData = {"Name": f"{file_name}", "Description": "", "Version": "1.0.0.0"}
|
|
198
|
+
jsonContent = json.dumps(metaData, indent=4)
|
|
199
|
+
|
|
200
|
+
with open(mFilePath, "w") as json_file:
|
|
201
|
+
json_file.write(jsonContent)
|
|
202
|
+
|
|
203
|
+
# STEP 4: Create [Content_Types].xml file:
|
|
204
|
+
xml_content = """<?xml version="1.0" encoding="utf-8"?><Types xmlns="http://schemas.openxmlformats.org/package/2006/content-types"><Default Extension="json" ContentType="application/json" /><Default Extension="pq" ContentType="application/x-ms-m" /></Types>"""
|
|
205
|
+
xmlFileName = "[Content_Types].xml"
|
|
206
|
+
xmlFilePath = os.path.join(subFolderPath, xmlFileName)
|
|
207
|
+
with open(xmlFilePath, "w", encoding="utf-8") as file:
|
|
208
|
+
file.write(xml_content)
|
|
209
|
+
|
|
210
|
+
# STEP 5: Zip up the 4 files
|
|
211
|
+
zipFileName = f"{file_name}.zip"
|
|
212
|
+
zipFilePath = os.path.join(folderPath, zipFileName)
|
|
213
|
+
shutil.make_archive(zipFilePath[:-4], "zip", subFolderPath)
|
|
214
|
+
|
|
215
|
+
# STEP 6: Convert the zip file back into a .pqt file
|
|
216
|
+
newExt = ".pqt"
|
|
217
|
+
directory = os.path.dirname(zipFilePath)
|
|
218
|
+
fileNameWithoutExtension = os.path.splitext(os.path.basename(zipFilePath))[
|
|
219
|
+
0
|
|
220
|
+
]
|
|
221
|
+
newFilePath = os.path.join(directory, fileNameWithoutExtension + newExt)
|
|
222
|
+
shutil.move(zipFilePath, newFilePath)
|
|
223
|
+
|
|
224
|
+
# STEP 7: Delete subFolder directory which is no longer needed
|
|
225
|
+
shutil.rmtree(subFolderPath, ignore_errors=True)
|
|
226
|
+
|
|
227
|
+
print(
|
|
228
|
+
f"{icons.green_dot} '{file_name}.pqt' has been created based on the '{dataset_name}' semantic model in the '{workspace_name}' workspace within the Files section of your lakehouse."
|
|
229
|
+
)
|
|
230
|
+
|
|
231
|
+
a = 0
|
|
232
|
+
for t_map in table_chunks:
|
|
233
|
+
if a > 0:
|
|
234
|
+
save_file_name = f"{file_name}_{a}"
|
|
235
|
+
else:
|
|
236
|
+
save_file_name = file_name
|
|
237
|
+
a += 1
|
|
238
|
+
create_pqt(t_map, expr_map, file_name=save_file_name)
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
import sempy
|
|
2
|
+
from uuid import UUID
|
|
3
|
+
import sempy_labs._icons as icons
|
|
4
|
+
from typing import Optional
|
|
5
|
+
from sempy._utils._log import log
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@log
|
|
9
|
+
def migrate_direct_lake_to_import(
|
|
10
|
+
dataset: str | UUID,
|
|
11
|
+
workspace: Optional[str | UUID] = None,
|
|
12
|
+
mode: str = "import",
|
|
13
|
+
):
|
|
14
|
+
"""
|
|
15
|
+
Migrates a semantic model or specific table(s) from a Direct Lake mode to import or DirectQuery mode. After running this function, you must go to the semantic model settings and update the cloud connection. Not doing so will result in an inablity to refresh/use the semantic model.
|
|
16
|
+
|
|
17
|
+
Parameters
|
|
18
|
+
----------
|
|
19
|
+
dataset : str | uuid.UUID
|
|
20
|
+
Name or ID of the semantic model.
|
|
21
|
+
workspace : str | uuid.UUID, default=None
|
|
22
|
+
The Fabric workspace name or ID.
|
|
23
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
24
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
25
|
+
mode : str, default="import"
|
|
26
|
+
The mode to migrate to. Can be either "import" or "directquery".
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
sempy.fabric._client._utils._init_analysis_services()
|
|
30
|
+
import Microsoft.AnalysisServices.Tabular as TOM
|
|
31
|
+
from sempy_labs.tom import connect_semantic_model
|
|
32
|
+
|
|
33
|
+
modes = {
|
|
34
|
+
"import": "Import",
|
|
35
|
+
"directquery": "DirectQuery",
|
|
36
|
+
"dq": "DirectQuery",
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
# Resolve mode
|
|
40
|
+
mode = mode.lower()
|
|
41
|
+
actual_mode = modes.get(mode)
|
|
42
|
+
if actual_mode is None:
|
|
43
|
+
raise ValueError(f"Invalid mode '{mode}'. Must be one of {list(modes.keys())}.")
|
|
44
|
+
|
|
45
|
+
# if isinstance(tables, str):
|
|
46
|
+
# tables = [tables]
|
|
47
|
+
|
|
48
|
+
with connect_semantic_model(
|
|
49
|
+
dataset=dataset, workspace=workspace, readonly=False
|
|
50
|
+
) as tom:
|
|
51
|
+
|
|
52
|
+
if not tom.is_direct_lake():
|
|
53
|
+
print(
|
|
54
|
+
f"{icons.warning} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode."
|
|
55
|
+
)
|
|
56
|
+
return
|
|
57
|
+
|
|
58
|
+
# if tables is None:
|
|
59
|
+
table_list = [t for t in tom.model.Tables]
|
|
60
|
+
# else:
|
|
61
|
+
# table_list = [t for t in tom.model.Tables if t.Name in tables]
|
|
62
|
+
# if not table_list:
|
|
63
|
+
# raise ValueError(f"{icons.red_dot} No tables found to migrate.")
|
|
64
|
+
|
|
65
|
+
for t in table_list:
|
|
66
|
+
table_name = t.Name
|
|
67
|
+
if t.Partitions.Count == 1 and all(
|
|
68
|
+
p.Mode == TOM.ModeType.DirectLake for p in t.Partitions
|
|
69
|
+
):
|
|
70
|
+
p = next(p for p in t.Partitions)
|
|
71
|
+
partition_name = p.Name
|
|
72
|
+
entity_name = p.Source.EntityName
|
|
73
|
+
schema_name = p.Source.SchemaName or "dbo"
|
|
74
|
+
# Rename Direct Lake partition
|
|
75
|
+
t.Partitions[partition_name].Name = f"{partition_name}_remove"
|
|
76
|
+
|
|
77
|
+
# Generate M expression for import partition
|
|
78
|
+
expression = f"""let\n\tSource = DatabaseQuery,\n\tData = Source{{[Schema="{schema_name}",Item="{entity_name}"]}}[Data]\nin\n\tData"""
|
|
79
|
+
|
|
80
|
+
# Generate M partition
|
|
81
|
+
tom.add_m_partition(
|
|
82
|
+
table_name=table_name,
|
|
83
|
+
partition_name=partition_name,
|
|
84
|
+
expression=expression,
|
|
85
|
+
mode=actual_mode,
|
|
86
|
+
)
|
|
87
|
+
# Remove Direct Lake partition
|
|
88
|
+
tom.remove_object(object=p)
|
|
89
|
+
# if tables is not None:
|
|
90
|
+
# print(
|
|
91
|
+
# f"{icons.green_dot} The '{table_name}' table has been migrated to '{actual_mode}' mode."
|
|
92
|
+
# )
|
|
93
|
+
|
|
94
|
+
tom.model.Model.DefaultMode = TOM.ModeType.Import
|
|
95
|
+
# if tables is None:
|
|
96
|
+
print(
|
|
97
|
+
f"{icons.green_dot} All tables which were in Direct Lake mode have been migrated to '{actual_mode}' mode."
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
# Check
|
|
101
|
+
# for t in tom.model.Tables:
|
|
102
|
+
# if t.Partitions.Count == 1 and all(p.Mode == TOM.ModeType.Import for p in t.Partitions) and t.CalculationGroup is None:
|
|
103
|
+
# p = next(p for p in t.Partitions)
|
|
104
|
+
# print(p.Name)
|
|
105
|
+
# print(p.Source.Expression)
|