semantic-link-labs 0.12.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- semantic_link_labs-0.12.8.dist-info/METADATA +354 -0
- semantic_link_labs-0.12.8.dist-info/RECORD +243 -0
- semantic_link_labs-0.12.8.dist-info/WHEEL +5 -0
- semantic_link_labs-0.12.8.dist-info/licenses/LICENSE +21 -0
- semantic_link_labs-0.12.8.dist-info/top_level.txt +1 -0
- sempy_labs/__init__.py +606 -0
- sempy_labs/_a_lib_info.py +2 -0
- sempy_labs/_ai.py +437 -0
- sempy_labs/_authentication.py +264 -0
- sempy_labs/_bpa_translation/_model/_translations_am-ET.po +869 -0
- sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +908 -0
- sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +968 -0
- sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +963 -0
- sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +943 -0
- sempy_labs/_bpa_translation/_model/_translations_da-DK.po +945 -0
- sempy_labs/_bpa_translation/_model/_translations_de-DE.po +988 -0
- sempy_labs/_bpa_translation/_model/_translations_el-GR.po +993 -0
- sempy_labs/_bpa_translation/_model/_translations_es-ES.po +971 -0
- sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +933 -0
- sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +942 -0
- sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +994 -0
- sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +967 -0
- sempy_labs/_bpa_translation/_model/_translations_he-IL.po +902 -0
- sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +944 -0
- sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +963 -0
- sempy_labs/_bpa_translation/_model/_translations_id-ID.po +946 -0
- sempy_labs/_bpa_translation/_model/_translations_is-IS.po +939 -0
- sempy_labs/_bpa_translation/_model/_translations_it-IT.po +986 -0
- sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +846 -0
- sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +839 -0
- sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +967 -0
- sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +978 -0
- sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +962 -0
- sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +962 -0
- sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +957 -0
- sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +968 -0
- sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +964 -0
- sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +952 -0
- sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +950 -0
- sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +942 -0
- sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +976 -0
- sempy_labs/_bpa_translation/_model/_translations_te-IN.po +947 -0
- sempy_labs/_bpa_translation/_model/_translations_th-TH.po +924 -0
- sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +953 -0
- sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +961 -0
- sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +804 -0
- sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +969 -0
- sempy_labs/_capacities.py +1198 -0
- sempy_labs/_capacity_migration.py +660 -0
- sempy_labs/_clear_cache.py +351 -0
- sempy_labs/_connections.py +610 -0
- sempy_labs/_dashboards.py +69 -0
- sempy_labs/_data_access_security.py +98 -0
- sempy_labs/_data_pipelines.py +162 -0
- sempy_labs/_dataflows.py +668 -0
- sempy_labs/_dax.py +501 -0
- sempy_labs/_daxformatter.py +80 -0
- sempy_labs/_delta_analyzer.py +467 -0
- sempy_labs/_delta_analyzer_history.py +301 -0
- sempy_labs/_dictionary_diffs.py +221 -0
- sempy_labs/_documentation.py +147 -0
- sempy_labs/_domains.py +51 -0
- sempy_labs/_eventhouses.py +182 -0
- sempy_labs/_external_data_shares.py +230 -0
- sempy_labs/_gateways.py +521 -0
- sempy_labs/_generate_semantic_model.py +521 -0
- sempy_labs/_get_connection_string.py +84 -0
- sempy_labs/_git.py +543 -0
- sempy_labs/_graphQL.py +90 -0
- sempy_labs/_helper_functions.py +2833 -0
- sempy_labs/_icons.py +149 -0
- sempy_labs/_job_scheduler.py +609 -0
- sempy_labs/_kql_databases.py +149 -0
- sempy_labs/_kql_querysets.py +124 -0
- sempy_labs/_kusto.py +137 -0
- sempy_labs/_labels.py +124 -0
- sempy_labs/_list_functions.py +1720 -0
- sempy_labs/_managed_private_endpoints.py +253 -0
- sempy_labs/_mirrored_databases.py +416 -0
- sempy_labs/_mirrored_warehouses.py +60 -0
- sempy_labs/_ml_experiments.py +113 -0
- sempy_labs/_model_auto_build.py +140 -0
- sempy_labs/_model_bpa.py +557 -0
- sempy_labs/_model_bpa_bulk.py +378 -0
- sempy_labs/_model_bpa_rules.py +859 -0
- sempy_labs/_model_dependencies.py +343 -0
- sempy_labs/_mounted_data_factories.py +123 -0
- sempy_labs/_notebooks.py +441 -0
- sempy_labs/_one_lake_integration.py +151 -0
- sempy_labs/_onelake.py +131 -0
- sempy_labs/_query_scale_out.py +433 -0
- sempy_labs/_refresh_semantic_model.py +435 -0
- sempy_labs/_semantic_models.py +468 -0
- sempy_labs/_spark.py +455 -0
- sempy_labs/_sql.py +241 -0
- sempy_labs/_sql_audit_settings.py +207 -0
- sempy_labs/_sql_endpoints.py +214 -0
- sempy_labs/_tags.py +201 -0
- sempy_labs/_translations.py +43 -0
- sempy_labs/_user_delegation_key.py +44 -0
- sempy_labs/_utils.py +79 -0
- sempy_labs/_vertipaq.py +1021 -0
- sempy_labs/_vpax.py +388 -0
- sempy_labs/_warehouses.py +234 -0
- sempy_labs/_workloads.py +140 -0
- sempy_labs/_workspace_identity.py +72 -0
- sempy_labs/_workspaces.py +595 -0
- sempy_labs/admin/__init__.py +170 -0
- sempy_labs/admin/_activities.py +167 -0
- sempy_labs/admin/_apps.py +145 -0
- sempy_labs/admin/_artifacts.py +65 -0
- sempy_labs/admin/_basic_functions.py +463 -0
- sempy_labs/admin/_capacities.py +508 -0
- sempy_labs/admin/_dataflows.py +45 -0
- sempy_labs/admin/_datasets.py +186 -0
- sempy_labs/admin/_domains.py +522 -0
- sempy_labs/admin/_external_data_share.py +100 -0
- sempy_labs/admin/_git.py +72 -0
- sempy_labs/admin/_items.py +265 -0
- sempy_labs/admin/_labels.py +211 -0
- sempy_labs/admin/_reports.py +241 -0
- sempy_labs/admin/_scanner.py +118 -0
- sempy_labs/admin/_shared.py +82 -0
- sempy_labs/admin/_sharing_links.py +110 -0
- sempy_labs/admin/_tags.py +131 -0
- sempy_labs/admin/_tenant.py +503 -0
- sempy_labs/admin/_tenant_keys.py +89 -0
- sempy_labs/admin/_users.py +140 -0
- sempy_labs/admin/_workspaces.py +236 -0
- sempy_labs/deployment_pipeline/__init__.py +23 -0
- sempy_labs/deployment_pipeline/_items.py +580 -0
- sempy_labs/directlake/__init__.py +57 -0
- sempy_labs/directlake/_autosync.py +58 -0
- sempy_labs/directlake/_directlake_schema_compare.py +120 -0
- sempy_labs/directlake/_directlake_schema_sync.py +161 -0
- sempy_labs/directlake/_dl_helper.py +274 -0
- sempy_labs/directlake/_generate_shared_expression.py +94 -0
- sempy_labs/directlake/_get_directlake_lakehouse.py +62 -0
- sempy_labs/directlake/_get_shared_expression.py +34 -0
- sempy_labs/directlake/_guardrails.py +96 -0
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +70 -0
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +90 -0
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +239 -0
- sempy_labs/directlake/_update_directlake_partition_entity.py +259 -0
- sempy_labs/directlake/_warm_cache.py +236 -0
- sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
- sempy_labs/environment/__init__.py +23 -0
- sempy_labs/environment/_items.py +212 -0
- sempy_labs/environment/_pubstage.py +223 -0
- sempy_labs/eventstream/__init__.py +37 -0
- sempy_labs/eventstream/_items.py +263 -0
- sempy_labs/eventstream/_topology.py +652 -0
- sempy_labs/graph/__init__.py +59 -0
- sempy_labs/graph/_groups.py +651 -0
- sempy_labs/graph/_sensitivity_labels.py +120 -0
- sempy_labs/graph/_teams.py +125 -0
- sempy_labs/graph/_user_licenses.py +96 -0
- sempy_labs/graph/_users.py +516 -0
- sempy_labs/graph_model/__init__.py +15 -0
- sempy_labs/graph_model/_background_jobs.py +63 -0
- sempy_labs/graph_model/_items.py +149 -0
- sempy_labs/lakehouse/__init__.py +67 -0
- sempy_labs/lakehouse/_blobs.py +247 -0
- sempy_labs/lakehouse/_get_lakehouse_columns.py +102 -0
- sempy_labs/lakehouse/_get_lakehouse_tables.py +274 -0
- sempy_labs/lakehouse/_helper.py +250 -0
- sempy_labs/lakehouse/_lakehouse.py +351 -0
- sempy_labs/lakehouse/_livy_sessions.py +143 -0
- sempy_labs/lakehouse/_materialized_lake_views.py +157 -0
- sempy_labs/lakehouse/_partitioning.py +165 -0
- sempy_labs/lakehouse/_schemas.py +217 -0
- sempy_labs/lakehouse/_shortcuts.py +440 -0
- sempy_labs/migration/__init__.py +35 -0
- sempy_labs/migration/_create_pqt_file.py +238 -0
- sempy_labs/migration/_direct_lake_to_import.py +105 -0
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +398 -0
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +148 -0
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +533 -0
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +172 -0
- sempy_labs/migration/_migration_validation.py +71 -0
- sempy_labs/migration/_refresh_calc_tables.py +131 -0
- sempy_labs/mirrored_azure_databricks_catalog/__init__.py +15 -0
- sempy_labs/mirrored_azure_databricks_catalog/_discover.py +213 -0
- sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +45 -0
- sempy_labs/ml_model/__init__.py +23 -0
- sempy_labs/ml_model/_functions.py +427 -0
- sempy_labs/report/_BPAReportTemplate.json +232 -0
- sempy_labs/report/__init__.py +55 -0
- sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
- sempy_labs/report/_bpareporttemplate/.platform +11 -0
- sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
- sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
- sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
- sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
- sempy_labs/report/_download_report.py +76 -0
- sempy_labs/report/_export_report.py +257 -0
- sempy_labs/report/_generate_report.py +427 -0
- sempy_labs/report/_paginated.py +76 -0
- sempy_labs/report/_report_bpa.py +354 -0
- sempy_labs/report/_report_bpa_rules.py +115 -0
- sempy_labs/report/_report_functions.py +581 -0
- sempy_labs/report/_report_helper.py +227 -0
- sempy_labs/report/_report_list_functions.py +110 -0
- sempy_labs/report/_report_rebind.py +149 -0
- sempy_labs/report/_reportwrapper.py +3100 -0
- sempy_labs/report/_save_report.py +147 -0
- sempy_labs/snowflake_database/__init__.py +10 -0
- sempy_labs/snowflake_database/_items.py +105 -0
- sempy_labs/sql_database/__init__.py +21 -0
- sempy_labs/sql_database/_items.py +201 -0
- sempy_labs/sql_database/_mirroring.py +79 -0
- sempy_labs/theme/__init__.py +12 -0
- sempy_labs/theme/_org_themes.py +129 -0
- sempy_labs/tom/__init__.py +3 -0
- sempy_labs/tom/_model.py +5977 -0
- sempy_labs/variable_library/__init__.py +19 -0
- sempy_labs/variable_library/_functions.py +403 -0
- sempy_labs/warehouse/__init__.py +28 -0
- sempy_labs/warehouse/_items.py +234 -0
- sempy_labs/warehouse/_restore_points.py +309 -0
|
@@ -0,0 +1,351 @@
|
|
|
1
|
+
import sempy.fabric as fabric
|
|
2
|
+
from sempy_labs._helper_functions import (
|
|
3
|
+
is_default_semantic_model,
|
|
4
|
+
_get_adls_client,
|
|
5
|
+
resolve_workspace_name_and_id,
|
|
6
|
+
resolve_dataset_name_and_id,
|
|
7
|
+
_update_dataframe_datatypes,
|
|
8
|
+
_base_api,
|
|
9
|
+
_create_dataframe,
|
|
10
|
+
)
|
|
11
|
+
from typing import Optional
|
|
12
|
+
import sempy_labs._icons as icons
|
|
13
|
+
from sempy._utils._log import log
|
|
14
|
+
import pandas as pd
|
|
15
|
+
from uuid import UUID
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@log
|
|
19
|
+
def clear_cache(dataset: str | UUID, workspace: Optional[str | UUID] = None):
|
|
20
|
+
"""
|
|
21
|
+
Clears the cache of a semantic model.
|
|
22
|
+
See `here <https://learn.microsoft.com/analysis-services/instances/clear-the-analysis-services-caches?view=asallproducts-allversions>`_ for documentation.
|
|
23
|
+
|
|
24
|
+
Parameters
|
|
25
|
+
----------
|
|
26
|
+
dataset : str | uuid.UUID
|
|
27
|
+
Name or ID of the semantic model.
|
|
28
|
+
workspace : str | uuid.UUID, default=None
|
|
29
|
+
The Fabric workspace name or ID.
|
|
30
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
31
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
35
|
+
if is_default_semantic_model(dataset=dataset, workspace=workspace):
|
|
36
|
+
raise ValueError(
|
|
37
|
+
f"{icons.red_dot} Cannot run XMLA operations against a default semantic model. Please choose a different semantic model. "
|
|
38
|
+
"See here for more information: https://learn.microsoft.com/fabric/data-warehouse/semantic-models"
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
42
|
+
|
|
43
|
+
xmla = f"""
|
|
44
|
+
<ClearCache xmlns="http://schemas.microsoft.com/analysisservices/2003/engine">
|
|
45
|
+
<Object>
|
|
46
|
+
<DatabaseID>{dataset_id}</DatabaseID>
|
|
47
|
+
</Object>
|
|
48
|
+
</ClearCache>
|
|
49
|
+
"""
|
|
50
|
+
fabric.execute_xmla(dataset=dataset_id, xmla_command=xmla, workspace=workspace_id)
|
|
51
|
+
print(
|
|
52
|
+
f"{icons.green_dot} Cache cleared for the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
@log
|
|
57
|
+
def backup_semantic_model(
|
|
58
|
+
dataset: str | UUID,
|
|
59
|
+
file_path: str,
|
|
60
|
+
allow_overwrite: bool = True,
|
|
61
|
+
apply_compression: bool = True,
|
|
62
|
+
workspace: Optional[str | UUID] = None,
|
|
63
|
+
password: Optional[str] = None,
|
|
64
|
+
):
|
|
65
|
+
"""
|
|
66
|
+
`Backs up <https://learn.microsoft.com/azure/analysis-services/analysis-services-backup>`_ a semantic model to the ADLS Gen2 storage account connected to the workspace.
|
|
67
|
+
|
|
68
|
+
Parameters
|
|
69
|
+
----------
|
|
70
|
+
dataset : str | uuid.UUID
|
|
71
|
+
Name or ID of the semantic model.
|
|
72
|
+
file_path : str
|
|
73
|
+
The ADLS Gen2 storage account location in which to backup the semantic model. Always saves within the 'power-bi-backup/<workspace name>' folder.
|
|
74
|
+
Must end in '.abf'.
|
|
75
|
+
Example 1: file_path = 'MyModel.abf'
|
|
76
|
+
Example 2: file_path = 'MyFolder/MyModel.abf'
|
|
77
|
+
password : Optional[str], default=None
|
|
78
|
+
Password to encrypt the backup file. If None, no password is used.
|
|
79
|
+
allow_overwrite : bool, default=True
|
|
80
|
+
If True, overwrites backup files of the same name. If False, the file you are saving cannot have the same name as a file that already exists in the same location.
|
|
81
|
+
apply_compression : bool, default=True
|
|
82
|
+
If True, compresses the backup file. Compressed backup files save disk space, but require slightly higher CPU utilization.
|
|
83
|
+
workspace : str | uuid.UUID, default=None
|
|
84
|
+
The Fabric workspace name or ID.
|
|
85
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
86
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
87
|
+
"""
|
|
88
|
+
|
|
89
|
+
if not file_path.endswith(".abf"):
|
|
90
|
+
raise ValueError(
|
|
91
|
+
f"{icons.red_dot} The backup file for restoring must be in the .abf format."
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
95
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
96
|
+
|
|
97
|
+
tmsl = {
|
|
98
|
+
"backup": {
|
|
99
|
+
"database": dataset_name,
|
|
100
|
+
"file": file_path,
|
|
101
|
+
"allowOverwrite": allow_overwrite,
|
|
102
|
+
"applyCompression": apply_compression,
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
if password:
|
|
107
|
+
tmsl["backup"]["password"] = password # Add password only if provided
|
|
108
|
+
|
|
109
|
+
fabric.execute_tmsl(script=tmsl, workspace=workspace_id)
|
|
110
|
+
print(
|
|
111
|
+
f"{icons.green_dot} The '{dataset_name}' semantic model within the '{workspace_name}' workspace has been backed up to the '{file_path}' location."
|
|
112
|
+
)
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
@log
|
|
116
|
+
def restore_semantic_model(
|
|
117
|
+
dataset: str,
|
|
118
|
+
file_path: str,
|
|
119
|
+
allow_overwrite: bool = True,
|
|
120
|
+
ignore_incompatibilities: bool = True,
|
|
121
|
+
force_restore: bool = False,
|
|
122
|
+
workspace: Optional[str | UUID] = None,
|
|
123
|
+
password: Optional[str] = None,
|
|
124
|
+
):
|
|
125
|
+
"""
|
|
126
|
+
`Restores <https://learn.microsoft.com/power-bi/enterprise/service-premium-backup-restore-dataset>`_ a semantic model based on a backup (.abf) file
|
|
127
|
+
within the ADLS Gen2 storage account connected to the workspace.
|
|
128
|
+
|
|
129
|
+
Parameters
|
|
130
|
+
----------
|
|
131
|
+
dataset : str
|
|
132
|
+
Name of the semantic model.
|
|
133
|
+
file_path : str
|
|
134
|
+
The location in which to backup the semantic model. Must end in '.abf'.
|
|
135
|
+
Example 1: file_path = 'MyModel.abf'
|
|
136
|
+
Example 2: file_path = 'MyFolder/MyModel.abf'
|
|
137
|
+
password : Optional[str], default=None
|
|
138
|
+
Password to decrypt the backup file. If None, no password is used.
|
|
139
|
+
allow_overwrite : bool, default=True
|
|
140
|
+
If True, overwrites backup files of the same name. If False, the file you are saving cannot have the same name as a file that already exists in the same location.
|
|
141
|
+
ignore_incompatibilities : bool, default=True
|
|
142
|
+
If True, ignores incompatibilities between Azure Analysis Services and Power BI Premium.
|
|
143
|
+
force_restore: bool, default=False
|
|
144
|
+
If True, restores the semantic model with the existing semantic model unloaded and offline.
|
|
145
|
+
workspace : str | uuid.UUID, default=None
|
|
146
|
+
The Fabric workspace name or ID.
|
|
147
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
148
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
149
|
+
"""
|
|
150
|
+
|
|
151
|
+
if not file_path.endswith(".abf"):
|
|
152
|
+
raise ValueError(
|
|
153
|
+
f"{icons.red_dot} The backup file for restoring must be in the .abf format."
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
157
|
+
|
|
158
|
+
tmsl = {
|
|
159
|
+
"restore": {
|
|
160
|
+
"database": dataset,
|
|
161
|
+
"file": file_path,
|
|
162
|
+
"allowOverwrite": allow_overwrite,
|
|
163
|
+
"security": "copyAll",
|
|
164
|
+
"ignoreIncompatibilities": ignore_incompatibilities,
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
if password:
|
|
169
|
+
tmsl["restore"]["password"] = password
|
|
170
|
+
|
|
171
|
+
if force_restore:
|
|
172
|
+
tmsl["restore"]["forceRestore"] = force_restore
|
|
173
|
+
|
|
174
|
+
fabric.execute_tmsl(script=tmsl, workspace=workspace_id)
|
|
175
|
+
|
|
176
|
+
print(
|
|
177
|
+
f"{icons.green_dot} The '{dataset}' semantic model has been restored to the '{workspace_name}' workspace based on the '{file_path}' backup file."
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
@log
|
|
182
|
+
def copy_semantic_model_backup_file(
|
|
183
|
+
source_workspace: str,
|
|
184
|
+
target_workspace: str,
|
|
185
|
+
source_file_name: str,
|
|
186
|
+
target_file_name: str,
|
|
187
|
+
storage_account: str,
|
|
188
|
+
source_file_system: str = "power-bi-backup",
|
|
189
|
+
target_file_system: str = "power-bi-backup",
|
|
190
|
+
):
|
|
191
|
+
"""
|
|
192
|
+
Copies a semantic model backup file (.abf) from an Azure storage account to another location within the Azure storage account.
|
|
193
|
+
|
|
194
|
+
Requirements:
|
|
195
|
+
1. Must have an Azure storage account and connect it to both the source and target workspace.
|
|
196
|
+
2. Must be a 'Storage Blob Data Contributor' for the storage account.
|
|
197
|
+
Steps:
|
|
198
|
+
1. Navigate to the storage account within the Azure Portal
|
|
199
|
+
2. Navigate to 'Access Control (IAM)'
|
|
200
|
+
3. Click '+ Add' -> Add Role Assignment
|
|
201
|
+
4. Search for 'Storage Blob Data Contributor', select it and click 'Next'
|
|
202
|
+
5. Add yourself as a member, click 'Next'
|
|
203
|
+
6. Click 'Review + assign'
|
|
204
|
+
|
|
205
|
+
Parameters
|
|
206
|
+
----------
|
|
207
|
+
source_workspace : str
|
|
208
|
+
The workspace name of the source semantic model backup file.
|
|
209
|
+
target_workspace : str
|
|
210
|
+
The workspace name of the target semantic model backup file destination.
|
|
211
|
+
source_file_name : str
|
|
212
|
+
The name of the source backup file (i.e. MyModel.abf).
|
|
213
|
+
target_file_name : str
|
|
214
|
+
The name of the target backup file (i.e. MyModel.abf).
|
|
215
|
+
storage_account : str
|
|
216
|
+
The name of the storage account.
|
|
217
|
+
source_file_system : str, default="power-bi-backup"
|
|
218
|
+
The container in which the source backup file is located.
|
|
219
|
+
target_file_system : str, default="power-bi-backup"
|
|
220
|
+
The container in which the target backup file will be saved.
|
|
221
|
+
"""
|
|
222
|
+
|
|
223
|
+
suffix = ".abf"
|
|
224
|
+
|
|
225
|
+
if not source_file_name.endswith(suffix):
|
|
226
|
+
source_file_name = f"{source_file_name}{suffix}"
|
|
227
|
+
if not target_file_name.endswith(suffix):
|
|
228
|
+
target_file_name = f"{target_file_name}{suffix}"
|
|
229
|
+
|
|
230
|
+
source_path = f"/{source_workspace}/{source_file_name}"
|
|
231
|
+
target_path = f"/{target_workspace}/{target_file_name}"
|
|
232
|
+
|
|
233
|
+
client = _get_adls_client(account_name=storage_account)
|
|
234
|
+
|
|
235
|
+
source_file_system_client = client.get_file_system_client(
|
|
236
|
+
file_system=source_file_system
|
|
237
|
+
)
|
|
238
|
+
destination_file_system_client = client.get_file_system_client(
|
|
239
|
+
file_system=target_file_system
|
|
240
|
+
)
|
|
241
|
+
|
|
242
|
+
source_file_client = source_file_system_client.get_file_client(source_path)
|
|
243
|
+
destination_file_client = destination_file_system_client.get_file_client(
|
|
244
|
+
target_path
|
|
245
|
+
)
|
|
246
|
+
|
|
247
|
+
download = source_file_client.download_file()
|
|
248
|
+
file_content = download.readall()
|
|
249
|
+
|
|
250
|
+
# Upload the content to the destination file
|
|
251
|
+
destination_file_client.create_file() # Create the destination file
|
|
252
|
+
destination_file_client.append_data(
|
|
253
|
+
data=file_content, offset=0, length=len(file_content)
|
|
254
|
+
)
|
|
255
|
+
destination_file_client.flush_data(len(file_content))
|
|
256
|
+
|
|
257
|
+
print(
|
|
258
|
+
f"{icons.green_dot} The backup file of the '{source_file_name}' semantic model from the '{source_workspace}' workspace has been copied as the '{target_file_name}' semantic model backup file within the '{target_workspace}'."
|
|
259
|
+
)
|
|
260
|
+
|
|
261
|
+
|
|
262
|
+
@log
|
|
263
|
+
def list_backups(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
264
|
+
"""
|
|
265
|
+
Shows a list of backup files contained within a workspace's ADLS Gen2 storage account.
|
|
266
|
+
Requirement: An ADLS Gen2 storage account must be `connected to the workspace <https://learn.microsoft.com/power-bi/transform-model/dataflows/dataflows-azure-data-lake-storage-integration#connect-to-an-azure-data-lake-gen-2-at-a-workspace-level>`_.
|
|
267
|
+
|
|
268
|
+
Parameters
|
|
269
|
+
----------
|
|
270
|
+
workspace : str | uuid.UUID, default=None
|
|
271
|
+
The Fabric workspace name or ID.
|
|
272
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
273
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
274
|
+
|
|
275
|
+
Returns
|
|
276
|
+
-------
|
|
277
|
+
pandas.DataFrame
|
|
278
|
+
A pandas dataframe showing a list of backup files contained within a workspace's ADLS Gen2 storage account.
|
|
279
|
+
"""
|
|
280
|
+
|
|
281
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
282
|
+
response = _base_api(
|
|
283
|
+
request=f"/v1.0/myorg/resources?resourceType=StorageAccount&folderObjectId={workspace_id}"
|
|
284
|
+
)
|
|
285
|
+
|
|
286
|
+
v = response.json().get("value", [])
|
|
287
|
+
if not v:
|
|
288
|
+
raise ValueError(
|
|
289
|
+
f"{icons.red_dot} A storage account is not associated with the '{workspace_name}' workspace."
|
|
290
|
+
)
|
|
291
|
+
storage_account = v[0]["resourceName"]
|
|
292
|
+
|
|
293
|
+
df = list_storage_account_files(storage_account=storage_account)
|
|
294
|
+
colName = "Storage Account Name"
|
|
295
|
+
df.insert(0, colName, df.pop(colName))
|
|
296
|
+
|
|
297
|
+
return df
|
|
298
|
+
|
|
299
|
+
|
|
300
|
+
@log
|
|
301
|
+
def list_storage_account_files(
|
|
302
|
+
storage_account: str, container: str = "power-bi-backup"
|
|
303
|
+
) -> pd.DataFrame:
|
|
304
|
+
"""
|
|
305
|
+
Shows a list of files within an ADLS Gen2 storage account.
|
|
306
|
+
|
|
307
|
+
Parameters
|
|
308
|
+
----------
|
|
309
|
+
storage_account: str
|
|
310
|
+
The name of the ADLS Gen2 storage account.
|
|
311
|
+
container : str, default='power-bi-backup'
|
|
312
|
+
The name of the container.
|
|
313
|
+
|
|
314
|
+
Returns
|
|
315
|
+
-------
|
|
316
|
+
pandas.DataFrame
|
|
317
|
+
A pandas dataframe showing a list of files contained within an ADLS Gen2 storage account.
|
|
318
|
+
"""
|
|
319
|
+
|
|
320
|
+
columns = {
|
|
321
|
+
"File Path": "str",
|
|
322
|
+
"File Size": "int",
|
|
323
|
+
"Creation Time": "datetime",
|
|
324
|
+
"Last Modified": "datetime",
|
|
325
|
+
"Expiry Time": "datetime",
|
|
326
|
+
"Encryption Scope": "str",
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
df = _create_dataframe(columns=columns)
|
|
330
|
+
client = _get_adls_client(storage_account)
|
|
331
|
+
fs = client.get_file_system_client(container)
|
|
332
|
+
|
|
333
|
+
rows = []
|
|
334
|
+
for x in list(fs.get_paths()):
|
|
335
|
+
if not x.is_directory:
|
|
336
|
+
rows.append(
|
|
337
|
+
{
|
|
338
|
+
"File Path": x.name,
|
|
339
|
+
"File Size": x.content_length,
|
|
340
|
+
"Creation Time": x.creation_time,
|
|
341
|
+
"Last Modified": x.last_modified,
|
|
342
|
+
"Expiry Time": x.expiry_time,
|
|
343
|
+
"Encryption Scope": x.encryption_scope,
|
|
344
|
+
}
|
|
345
|
+
)
|
|
346
|
+
|
|
347
|
+
if rows:
|
|
348
|
+
df = pd.DataFrame(rows)
|
|
349
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
350
|
+
|
|
351
|
+
return df
|