semantic-link-labs 0.12.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- semantic_link_labs-0.12.8.dist-info/METADATA +354 -0
- semantic_link_labs-0.12.8.dist-info/RECORD +243 -0
- semantic_link_labs-0.12.8.dist-info/WHEEL +5 -0
- semantic_link_labs-0.12.8.dist-info/licenses/LICENSE +21 -0
- semantic_link_labs-0.12.8.dist-info/top_level.txt +1 -0
- sempy_labs/__init__.py +606 -0
- sempy_labs/_a_lib_info.py +2 -0
- sempy_labs/_ai.py +437 -0
- sempy_labs/_authentication.py +264 -0
- sempy_labs/_bpa_translation/_model/_translations_am-ET.po +869 -0
- sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +908 -0
- sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +968 -0
- sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +963 -0
- sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +943 -0
- sempy_labs/_bpa_translation/_model/_translations_da-DK.po +945 -0
- sempy_labs/_bpa_translation/_model/_translations_de-DE.po +988 -0
- sempy_labs/_bpa_translation/_model/_translations_el-GR.po +993 -0
- sempy_labs/_bpa_translation/_model/_translations_es-ES.po +971 -0
- sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +933 -0
- sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +942 -0
- sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +994 -0
- sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +967 -0
- sempy_labs/_bpa_translation/_model/_translations_he-IL.po +902 -0
- sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +944 -0
- sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +963 -0
- sempy_labs/_bpa_translation/_model/_translations_id-ID.po +946 -0
- sempy_labs/_bpa_translation/_model/_translations_is-IS.po +939 -0
- sempy_labs/_bpa_translation/_model/_translations_it-IT.po +986 -0
- sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +846 -0
- sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +839 -0
- sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +967 -0
- sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +978 -0
- sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +962 -0
- sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +962 -0
- sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +957 -0
- sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +968 -0
- sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +964 -0
- sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +952 -0
- sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +950 -0
- sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +942 -0
- sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +976 -0
- sempy_labs/_bpa_translation/_model/_translations_te-IN.po +947 -0
- sempy_labs/_bpa_translation/_model/_translations_th-TH.po +924 -0
- sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +953 -0
- sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +961 -0
- sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +804 -0
- sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +969 -0
- sempy_labs/_capacities.py +1198 -0
- sempy_labs/_capacity_migration.py +660 -0
- sempy_labs/_clear_cache.py +351 -0
- sempy_labs/_connections.py +610 -0
- sempy_labs/_dashboards.py +69 -0
- sempy_labs/_data_access_security.py +98 -0
- sempy_labs/_data_pipelines.py +162 -0
- sempy_labs/_dataflows.py +668 -0
- sempy_labs/_dax.py +501 -0
- sempy_labs/_daxformatter.py +80 -0
- sempy_labs/_delta_analyzer.py +467 -0
- sempy_labs/_delta_analyzer_history.py +301 -0
- sempy_labs/_dictionary_diffs.py +221 -0
- sempy_labs/_documentation.py +147 -0
- sempy_labs/_domains.py +51 -0
- sempy_labs/_eventhouses.py +182 -0
- sempy_labs/_external_data_shares.py +230 -0
- sempy_labs/_gateways.py +521 -0
- sempy_labs/_generate_semantic_model.py +521 -0
- sempy_labs/_get_connection_string.py +84 -0
- sempy_labs/_git.py +543 -0
- sempy_labs/_graphQL.py +90 -0
- sempy_labs/_helper_functions.py +2833 -0
- sempy_labs/_icons.py +149 -0
- sempy_labs/_job_scheduler.py +609 -0
- sempy_labs/_kql_databases.py +149 -0
- sempy_labs/_kql_querysets.py +124 -0
- sempy_labs/_kusto.py +137 -0
- sempy_labs/_labels.py +124 -0
- sempy_labs/_list_functions.py +1720 -0
- sempy_labs/_managed_private_endpoints.py +253 -0
- sempy_labs/_mirrored_databases.py +416 -0
- sempy_labs/_mirrored_warehouses.py +60 -0
- sempy_labs/_ml_experiments.py +113 -0
- sempy_labs/_model_auto_build.py +140 -0
- sempy_labs/_model_bpa.py +557 -0
- sempy_labs/_model_bpa_bulk.py +378 -0
- sempy_labs/_model_bpa_rules.py +859 -0
- sempy_labs/_model_dependencies.py +343 -0
- sempy_labs/_mounted_data_factories.py +123 -0
- sempy_labs/_notebooks.py +441 -0
- sempy_labs/_one_lake_integration.py +151 -0
- sempy_labs/_onelake.py +131 -0
- sempy_labs/_query_scale_out.py +433 -0
- sempy_labs/_refresh_semantic_model.py +435 -0
- sempy_labs/_semantic_models.py +468 -0
- sempy_labs/_spark.py +455 -0
- sempy_labs/_sql.py +241 -0
- sempy_labs/_sql_audit_settings.py +207 -0
- sempy_labs/_sql_endpoints.py +214 -0
- sempy_labs/_tags.py +201 -0
- sempy_labs/_translations.py +43 -0
- sempy_labs/_user_delegation_key.py +44 -0
- sempy_labs/_utils.py +79 -0
- sempy_labs/_vertipaq.py +1021 -0
- sempy_labs/_vpax.py +388 -0
- sempy_labs/_warehouses.py +234 -0
- sempy_labs/_workloads.py +140 -0
- sempy_labs/_workspace_identity.py +72 -0
- sempy_labs/_workspaces.py +595 -0
- sempy_labs/admin/__init__.py +170 -0
- sempy_labs/admin/_activities.py +167 -0
- sempy_labs/admin/_apps.py +145 -0
- sempy_labs/admin/_artifacts.py +65 -0
- sempy_labs/admin/_basic_functions.py +463 -0
- sempy_labs/admin/_capacities.py +508 -0
- sempy_labs/admin/_dataflows.py +45 -0
- sempy_labs/admin/_datasets.py +186 -0
- sempy_labs/admin/_domains.py +522 -0
- sempy_labs/admin/_external_data_share.py +100 -0
- sempy_labs/admin/_git.py +72 -0
- sempy_labs/admin/_items.py +265 -0
- sempy_labs/admin/_labels.py +211 -0
- sempy_labs/admin/_reports.py +241 -0
- sempy_labs/admin/_scanner.py +118 -0
- sempy_labs/admin/_shared.py +82 -0
- sempy_labs/admin/_sharing_links.py +110 -0
- sempy_labs/admin/_tags.py +131 -0
- sempy_labs/admin/_tenant.py +503 -0
- sempy_labs/admin/_tenant_keys.py +89 -0
- sempy_labs/admin/_users.py +140 -0
- sempy_labs/admin/_workspaces.py +236 -0
- sempy_labs/deployment_pipeline/__init__.py +23 -0
- sempy_labs/deployment_pipeline/_items.py +580 -0
- sempy_labs/directlake/__init__.py +57 -0
- sempy_labs/directlake/_autosync.py +58 -0
- sempy_labs/directlake/_directlake_schema_compare.py +120 -0
- sempy_labs/directlake/_directlake_schema_sync.py +161 -0
- sempy_labs/directlake/_dl_helper.py +274 -0
- sempy_labs/directlake/_generate_shared_expression.py +94 -0
- sempy_labs/directlake/_get_directlake_lakehouse.py +62 -0
- sempy_labs/directlake/_get_shared_expression.py +34 -0
- sempy_labs/directlake/_guardrails.py +96 -0
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +70 -0
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +90 -0
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +239 -0
- sempy_labs/directlake/_update_directlake_partition_entity.py +259 -0
- sempy_labs/directlake/_warm_cache.py +236 -0
- sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
- sempy_labs/environment/__init__.py +23 -0
- sempy_labs/environment/_items.py +212 -0
- sempy_labs/environment/_pubstage.py +223 -0
- sempy_labs/eventstream/__init__.py +37 -0
- sempy_labs/eventstream/_items.py +263 -0
- sempy_labs/eventstream/_topology.py +652 -0
- sempy_labs/graph/__init__.py +59 -0
- sempy_labs/graph/_groups.py +651 -0
- sempy_labs/graph/_sensitivity_labels.py +120 -0
- sempy_labs/graph/_teams.py +125 -0
- sempy_labs/graph/_user_licenses.py +96 -0
- sempy_labs/graph/_users.py +516 -0
- sempy_labs/graph_model/__init__.py +15 -0
- sempy_labs/graph_model/_background_jobs.py +63 -0
- sempy_labs/graph_model/_items.py +149 -0
- sempy_labs/lakehouse/__init__.py +67 -0
- sempy_labs/lakehouse/_blobs.py +247 -0
- sempy_labs/lakehouse/_get_lakehouse_columns.py +102 -0
- sempy_labs/lakehouse/_get_lakehouse_tables.py +274 -0
- sempy_labs/lakehouse/_helper.py +250 -0
- sempy_labs/lakehouse/_lakehouse.py +351 -0
- sempy_labs/lakehouse/_livy_sessions.py +143 -0
- sempy_labs/lakehouse/_materialized_lake_views.py +157 -0
- sempy_labs/lakehouse/_partitioning.py +165 -0
- sempy_labs/lakehouse/_schemas.py +217 -0
- sempy_labs/lakehouse/_shortcuts.py +440 -0
- sempy_labs/migration/__init__.py +35 -0
- sempy_labs/migration/_create_pqt_file.py +238 -0
- sempy_labs/migration/_direct_lake_to_import.py +105 -0
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +398 -0
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +148 -0
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +533 -0
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +172 -0
- sempy_labs/migration/_migration_validation.py +71 -0
- sempy_labs/migration/_refresh_calc_tables.py +131 -0
- sempy_labs/mirrored_azure_databricks_catalog/__init__.py +15 -0
- sempy_labs/mirrored_azure_databricks_catalog/_discover.py +213 -0
- sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +45 -0
- sempy_labs/ml_model/__init__.py +23 -0
- sempy_labs/ml_model/_functions.py +427 -0
- sempy_labs/report/_BPAReportTemplate.json +232 -0
- sempy_labs/report/__init__.py +55 -0
- sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
- sempy_labs/report/_bpareporttemplate/.platform +11 -0
- sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
- sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
- sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
- sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
- sempy_labs/report/_download_report.py +76 -0
- sempy_labs/report/_export_report.py +257 -0
- sempy_labs/report/_generate_report.py +427 -0
- sempy_labs/report/_paginated.py +76 -0
- sempy_labs/report/_report_bpa.py +354 -0
- sempy_labs/report/_report_bpa_rules.py +115 -0
- sempy_labs/report/_report_functions.py +581 -0
- sempy_labs/report/_report_helper.py +227 -0
- sempy_labs/report/_report_list_functions.py +110 -0
- sempy_labs/report/_report_rebind.py +149 -0
- sempy_labs/report/_reportwrapper.py +3100 -0
- sempy_labs/report/_save_report.py +147 -0
- sempy_labs/snowflake_database/__init__.py +10 -0
- sempy_labs/snowflake_database/_items.py +105 -0
- sempy_labs/sql_database/__init__.py +21 -0
- sempy_labs/sql_database/_items.py +201 -0
- sempy_labs/sql_database/_mirroring.py +79 -0
- sempy_labs/theme/__init__.py +12 -0
- sempy_labs/theme/_org_themes.py +129 -0
- sempy_labs/tom/__init__.py +3 -0
- sempy_labs/tom/_model.py +5977 -0
- sempy_labs/variable_library/__init__.py +19 -0
- sempy_labs/variable_library/_functions.py +403 -0
- sempy_labs/warehouse/__init__.py +28 -0
- sempy_labs/warehouse/_items.py +234 -0
- sempy_labs/warehouse/_restore_points.py +309 -0
|
@@ -0,0 +1,3100 @@
|
|
|
1
|
+
from typing import Optional, Tuple, List, Literal
|
|
2
|
+
from contextlib import contextmanager
|
|
3
|
+
from sempy._utils._log import log
|
|
4
|
+
from uuid import UUID
|
|
5
|
+
from sempy_labs._helper_functions import (
|
|
6
|
+
resolve_workspace_name_and_id,
|
|
7
|
+
resolve_item_name_and_id,
|
|
8
|
+
_base_api,
|
|
9
|
+
_create_dataframe,
|
|
10
|
+
_update_dataframe_datatypes,
|
|
11
|
+
format_dax_object_name,
|
|
12
|
+
resolve_dataset_from_report,
|
|
13
|
+
generate_number_guid,
|
|
14
|
+
decode_payload,
|
|
15
|
+
is_base64,
|
|
16
|
+
generate_hex,
|
|
17
|
+
get_jsonpath_value,
|
|
18
|
+
set_json_value,
|
|
19
|
+
remove_json_value,
|
|
20
|
+
get_tenant_id,
|
|
21
|
+
)
|
|
22
|
+
from sempy_labs._dictionary_diffs import (
|
|
23
|
+
diff_parts,
|
|
24
|
+
)
|
|
25
|
+
import json
|
|
26
|
+
import sempy_labs._icons as icons
|
|
27
|
+
import copy
|
|
28
|
+
import pandas as pd
|
|
29
|
+
from jsonpath_ng.ext import parse
|
|
30
|
+
import sempy_labs.report._report_helper as helper
|
|
31
|
+
from .._model_dependencies import get_measure_dependencies
|
|
32
|
+
import requests
|
|
33
|
+
import re
|
|
34
|
+
import base64
|
|
35
|
+
from pathlib import Path
|
|
36
|
+
from urllib.parse import urlparse
|
|
37
|
+
import os
|
|
38
|
+
import fnmatch
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class ReportWrapper:
|
|
42
|
+
"""
|
|
43
|
+
Connects to a Power BI report and retrieves its definition.
|
|
44
|
+
|
|
45
|
+
The ReportWrapper and all functions which depend on it require the report to be in the `PBIR <https://powerbi.microsoft.com/blog/power-bi-enhanced-report-format-pbir-in-power-bi-desktop-developer-mode-preview>`_ format.
|
|
46
|
+
|
|
47
|
+
Parameters
|
|
48
|
+
----------
|
|
49
|
+
report : str | uuid.UUID
|
|
50
|
+
The name or ID of the report.
|
|
51
|
+
workspace : str | uuid.UUID
|
|
52
|
+
The name or ID of the workspace in which the report resides.
|
|
53
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
54
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
55
|
+
readonly: bool, default=True
|
|
56
|
+
Whether the connection is read-only or read/write. Setting this to False enables read/write which saves the changes made back to the server.
|
|
57
|
+
show_diffs: bool, default=True
|
|
58
|
+
Whether to show the differences between the current report definition in the service and the new report definition.
|
|
59
|
+
|
|
60
|
+
Returns
|
|
61
|
+
-------
|
|
62
|
+
None
|
|
63
|
+
A connection to the report is established and the report definition is retrieved.
|
|
64
|
+
"""
|
|
65
|
+
|
|
66
|
+
_report_name: str
|
|
67
|
+
_report_id: str
|
|
68
|
+
_workspace_name: str
|
|
69
|
+
_workspace_id: str
|
|
70
|
+
_readonly: bool
|
|
71
|
+
_report_file_path = "definition/report.json"
|
|
72
|
+
_pages_file_path = "definition/pages/pages.json"
|
|
73
|
+
_report_extensions_path = "definition/reportExtensions.json"
|
|
74
|
+
|
|
75
|
+
# Visuals
|
|
76
|
+
_title_path = (
|
|
77
|
+
"$.visual.visualContainerObjects.title[*].properties.text.expr.Literal.Value"
|
|
78
|
+
)
|
|
79
|
+
_subtitle_path = (
|
|
80
|
+
"$.visual.visualContainerObjects.subTitle[*].properties.text.expr.Literal.Value"
|
|
81
|
+
)
|
|
82
|
+
_visual_x_path = "$.position.x"
|
|
83
|
+
_visual_y_path = "$.position.y"
|
|
84
|
+
|
|
85
|
+
@log
|
|
86
|
+
def __init__(
|
|
87
|
+
self,
|
|
88
|
+
report: str | UUID,
|
|
89
|
+
workspace: Optional[str | UUID] = None,
|
|
90
|
+
readonly: bool = True,
|
|
91
|
+
show_diffs: bool = True,
|
|
92
|
+
):
|
|
93
|
+
(self._workspace_name, self._workspace_id) = resolve_workspace_name_and_id(
|
|
94
|
+
workspace
|
|
95
|
+
)
|
|
96
|
+
(self._report_name, self._report_id) = resolve_item_name_and_id(
|
|
97
|
+
item=report, type="Report", workspace=self._workspace_id
|
|
98
|
+
)
|
|
99
|
+
self._readonly = readonly
|
|
100
|
+
self._show_diffs = show_diffs
|
|
101
|
+
|
|
102
|
+
result = _base_api(
|
|
103
|
+
request=f"/v1/workspaces/{self._workspace_id}/items/{self._report_id}/getDefinition",
|
|
104
|
+
method="post",
|
|
105
|
+
status_codes=None,
|
|
106
|
+
lro_return_json=True,
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
# def is_zip_file(data: bytes) -> bool:
|
|
110
|
+
# return data.startswith(b"PK\x03\x04")
|
|
111
|
+
|
|
112
|
+
# Check that the report is in the PBIR format
|
|
113
|
+
parts = result.get("definition", {}).get("parts", [])
|
|
114
|
+
if self._report_file_path not in [p.get("path") for p in parts]:
|
|
115
|
+
self.format = "PBIR-Legacy"
|
|
116
|
+
else:
|
|
117
|
+
self.format = "PBIR"
|
|
118
|
+
self._report_definition = {"parts": []}
|
|
119
|
+
for part in parts:
|
|
120
|
+
path = part.get("path")
|
|
121
|
+
payload = part.get("payload")
|
|
122
|
+
|
|
123
|
+
# decoded_bytes = base64.b64decode(payload)
|
|
124
|
+
# decoded_payload = json.loads(_decode_b64(payload))
|
|
125
|
+
# try:
|
|
126
|
+
# decoded_payload = json.loads(base64.b64decode(payload).decode("utf-8"))
|
|
127
|
+
# except Exception:
|
|
128
|
+
# decoded_payload = base64.b64decode(payload)
|
|
129
|
+
decoded_payload = decode_payload(payload)
|
|
130
|
+
|
|
131
|
+
# if is_zip_file(decoded_bytes):
|
|
132
|
+
# merged_payload = {}
|
|
133
|
+
# with zipfile.ZipFile(BytesIO(decoded_bytes)) as zip_file:
|
|
134
|
+
# for filename in zip_file.namelist():
|
|
135
|
+
# if filename.endswith(".json"):
|
|
136
|
+
# with zip_file.open(filename) as f:
|
|
137
|
+
# content = f.read()
|
|
138
|
+
# part_data = json.loads(content.decode("utf-8"))
|
|
139
|
+
|
|
140
|
+
# if isinstance(part_data, dict):
|
|
141
|
+
# merged_payload.update(part_data)
|
|
142
|
+
# else:
|
|
143
|
+
# # For non-dict top-level json (rare), store under filename
|
|
144
|
+
# merged_payload[filename] = part_data
|
|
145
|
+
|
|
146
|
+
# self._report_definition["parts"].append(
|
|
147
|
+
# {"path": path, "payload": merged_payload}
|
|
148
|
+
# )
|
|
149
|
+
# else:
|
|
150
|
+
# decoded_payload = json.loads(decoded_bytes.decode("utf-8"))
|
|
151
|
+
self._report_definition["parts"].append(
|
|
152
|
+
{"path": path, "payload": decoded_payload}
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
self._current_report_definition = copy.deepcopy(self._report_definition)
|
|
156
|
+
|
|
157
|
+
# self.report = self.Report(self)
|
|
158
|
+
|
|
159
|
+
helper.populate_custom_visual_display_names()
|
|
160
|
+
|
|
161
|
+
def _ensure_pbir(self):
|
|
162
|
+
|
|
163
|
+
if self.format != "PBIR":
|
|
164
|
+
raise NotImplementedError(
|
|
165
|
+
f"{icons.red_dot} This ReportWrapper function requires the report to be in the PBIR format."
|
|
166
|
+
"See here for details: https://powerbi.microsoft.com/blog/power-bi-enhanced-report-format-pbir-in-power-bi-desktop-developer-mode-preview/"
|
|
167
|
+
)
|
|
168
|
+
|
|
169
|
+
# Basic functions
|
|
170
|
+
def get(
|
|
171
|
+
self,
|
|
172
|
+
file_path: str,
|
|
173
|
+
json_path: Optional[str] = None,
|
|
174
|
+
verbose: bool = True,
|
|
175
|
+
) -> dict | List[Tuple[str, dict]]:
|
|
176
|
+
"""
|
|
177
|
+
Get the json content of the specified report definition file.
|
|
178
|
+
|
|
179
|
+
Parameters
|
|
180
|
+
----------
|
|
181
|
+
file_path : str
|
|
182
|
+
The path of the report definition file. For example: "definition/pages/pages.json". You may also use wildcards. For example: "definition/pages/*/page.json".
|
|
183
|
+
json_path : str, default=None
|
|
184
|
+
The json path to the specific part of the file to be retrieved. If None, the entire file content is returned.
|
|
185
|
+
verbose : bool, default=True
|
|
186
|
+
If True, prints messages about the retrieval process. If False, suppresses these messages.
|
|
187
|
+
|
|
188
|
+
Returns
|
|
189
|
+
-------
|
|
190
|
+
dict | List[Tuple[str, dict]]
|
|
191
|
+
The json content of the specified report definition file.
|
|
192
|
+
"""
|
|
193
|
+
|
|
194
|
+
parts = self._report_definition.get("parts")
|
|
195
|
+
|
|
196
|
+
# Find matching parts
|
|
197
|
+
if "*" in file_path:
|
|
198
|
+
results = []
|
|
199
|
+
matching_parts = [
|
|
200
|
+
(part.get("path"), part.get("payload"))
|
|
201
|
+
for part in parts
|
|
202
|
+
if fnmatch.fnmatch(part.get("path"), file_path)
|
|
203
|
+
]
|
|
204
|
+
|
|
205
|
+
if not matching_parts:
|
|
206
|
+
if verbose:
|
|
207
|
+
print(
|
|
208
|
+
f"{icons.red_dot} No files match the wildcard path '{file_path}'."
|
|
209
|
+
)
|
|
210
|
+
return results
|
|
211
|
+
|
|
212
|
+
results = []
|
|
213
|
+
for path, payload in matching_parts:
|
|
214
|
+
if not json_path:
|
|
215
|
+
results.append((path, payload))
|
|
216
|
+
elif not isinstance(payload, dict):
|
|
217
|
+
raise ValueError(
|
|
218
|
+
f"{icons.red_dot} The payload of the file '{path}' is not a dictionary."
|
|
219
|
+
)
|
|
220
|
+
else:
|
|
221
|
+
jsonpath_expr = parse(json_path)
|
|
222
|
+
matches = jsonpath_expr.find(payload)
|
|
223
|
+
if matches:
|
|
224
|
+
results.append((path, matches[0].value))
|
|
225
|
+
# else:
|
|
226
|
+
# raise ValueError(
|
|
227
|
+
# f"{icons.red_dot} No match found for '{json_path}' in '{path}'."
|
|
228
|
+
# )
|
|
229
|
+
if not results and verbose:
|
|
230
|
+
print(
|
|
231
|
+
f"{icons.red_dot} No match found for '{json_path}' in any of the files matching the wildcard path '{file_path}'."
|
|
232
|
+
)
|
|
233
|
+
return results
|
|
234
|
+
|
|
235
|
+
# Exact path match
|
|
236
|
+
for part in parts:
|
|
237
|
+
if part.get("path") == file_path:
|
|
238
|
+
payload = part.get("payload")
|
|
239
|
+
if not json_path:
|
|
240
|
+
return payload
|
|
241
|
+
elif not isinstance(payload, dict):
|
|
242
|
+
raise ValueError(
|
|
243
|
+
f"{icons.red_dot} The payload of the file '{file_path}' is not a dictionary."
|
|
244
|
+
)
|
|
245
|
+
else:
|
|
246
|
+
jsonpath_expr = parse(json_path)
|
|
247
|
+
matches = jsonpath_expr.find(payload)
|
|
248
|
+
if matches:
|
|
249
|
+
return matches[0].value
|
|
250
|
+
elif verbose:
|
|
251
|
+
print(f"{icons.red_dot} No match found for '{json_path}'.")
|
|
252
|
+
|
|
253
|
+
if verbose:
|
|
254
|
+
print(f"{icons.red_dot} File '{file_path}' not found in report definition.")
|
|
255
|
+
|
|
256
|
+
def add(self, file_path: str, payload: dict | bytes):
|
|
257
|
+
"""
|
|
258
|
+
Add a new file to the report definition.
|
|
259
|
+
|
|
260
|
+
Parameters
|
|
261
|
+
----------
|
|
262
|
+
file_path : str
|
|
263
|
+
The path of the file to be added. For example: "definition/pages/pages.json".
|
|
264
|
+
payload : dict | bytes
|
|
265
|
+
The json content of the file to be added. This can be a dictionary or a base64 encoded string.
|
|
266
|
+
"""
|
|
267
|
+
|
|
268
|
+
decoded_payload = decode_payload(payload)
|
|
269
|
+
|
|
270
|
+
if file_path in self.list_paths().get("Path").values:
|
|
271
|
+
raise ValueError(
|
|
272
|
+
f"{icons.red_dot} Cannot add the '{file_path}' file as this file path already exists in the report definition."
|
|
273
|
+
)
|
|
274
|
+
|
|
275
|
+
self._report_definition["parts"].append(
|
|
276
|
+
{"path": file_path, "payload": decoded_payload}
|
|
277
|
+
)
|
|
278
|
+
|
|
279
|
+
def remove(self, file_path: str, json_path: Optional[str] = None, verbose=True):
|
|
280
|
+
"""
|
|
281
|
+
Removes a file from the report definition.
|
|
282
|
+
|
|
283
|
+
Parameters
|
|
284
|
+
----------
|
|
285
|
+
file_path : str
|
|
286
|
+
The path of the file to be removed. For example: "definition/pages/fjdis323484/page.json".
|
|
287
|
+
json_path : str, default=None
|
|
288
|
+
The json path to the specific part of the file to be removed. If None, the entire file is removed. Wildcards are supported (i.e. "definition/pages/*/page.json").
|
|
289
|
+
verbose : bool, default=True
|
|
290
|
+
If True, prints messages about the removal process. If False, suppresses these messages.
|
|
291
|
+
"""
|
|
292
|
+
|
|
293
|
+
parts = self._report_definition.get("parts")
|
|
294
|
+
matching_parts = []
|
|
295
|
+
|
|
296
|
+
if "*" in file_path:
|
|
297
|
+
matching_parts = [
|
|
298
|
+
part for part in parts if fnmatch.fnmatch(part.get("path"), file_path)
|
|
299
|
+
]
|
|
300
|
+
else:
|
|
301
|
+
matching_parts = [part for part in parts if part.get("path") == file_path]
|
|
302
|
+
|
|
303
|
+
if not matching_parts:
|
|
304
|
+
raise ValueError(
|
|
305
|
+
f"{icons.red_dot} No file(s) found for path '{file_path}'."
|
|
306
|
+
)
|
|
307
|
+
|
|
308
|
+
for part in matching_parts:
|
|
309
|
+
path = part.get("path")
|
|
310
|
+
payload = part.get("payload")
|
|
311
|
+
|
|
312
|
+
if not json_path:
|
|
313
|
+
self._report_definition["parts"].remove(part)
|
|
314
|
+
if verbose:
|
|
315
|
+
print(
|
|
316
|
+
f"{icons.green_dot} The file '{path}' has been removed from the report definition."
|
|
317
|
+
)
|
|
318
|
+
else:
|
|
319
|
+
remove_json_value(
|
|
320
|
+
path=path, payload=payload, json_path=json_path, verbose=verbose
|
|
321
|
+
)
|
|
322
|
+
|
|
323
|
+
def update(self, file_path: str, payload: dict | bytes):
|
|
324
|
+
"""
|
|
325
|
+
Updates the payload of a file in the report definition.
|
|
326
|
+
|
|
327
|
+
Parameters
|
|
328
|
+
----------
|
|
329
|
+
file_path : str
|
|
330
|
+
The path of the file to be updated. For example: "definition/pages/pages.json".
|
|
331
|
+
payload : dict | bytes
|
|
332
|
+
The new json content of the file to be updated. This can be a dictionary or a base64 encoded string.
|
|
333
|
+
"""
|
|
334
|
+
|
|
335
|
+
decoded_payload = decode_payload(payload)
|
|
336
|
+
|
|
337
|
+
for part in self._report_definition.get("parts"):
|
|
338
|
+
if part.get("path") == file_path:
|
|
339
|
+
part["payload"] = decoded_payload
|
|
340
|
+
# if not self._readonly:
|
|
341
|
+
# print(
|
|
342
|
+
# f"The file '{file_path}' has been updated in the report definition."
|
|
343
|
+
# )
|
|
344
|
+
return
|
|
345
|
+
|
|
346
|
+
raise ValueError(
|
|
347
|
+
f"The '{file_path}' file was not found in the report definition."
|
|
348
|
+
)
|
|
349
|
+
|
|
350
|
+
def set_json(self, file_path: str, json_path: str, json_value: str | dict | List):
|
|
351
|
+
"""
|
|
352
|
+
Sets the JSON value of a file in the report definition. If the json_path does not exist, it will be created.
|
|
353
|
+
|
|
354
|
+
Parameters
|
|
355
|
+
----------
|
|
356
|
+
file_path : str
|
|
357
|
+
The file path of the JSON file to be updated. For example: "definition/pages/ReportSection1/visuals/a1d8f99b81dcc2d59035/visual.json". Also supports wildcards.
|
|
358
|
+
json_path : str
|
|
359
|
+
The JSON path to the value to be updated or created. This must be a valid JSONPath expression.
|
|
360
|
+
Examples:
|
|
361
|
+
"$.objects.outspace"
|
|
362
|
+
"$.hi.def[*].vv"
|
|
363
|
+
json_value : str | dict | List
|
|
364
|
+
The new value to be set at the specified JSON path. This can be a string, dictionary, or list.
|
|
365
|
+
"""
|
|
366
|
+
|
|
367
|
+
files = self.get(file_path=file_path)
|
|
368
|
+
|
|
369
|
+
if isinstance(files, dict):
|
|
370
|
+
files = [(file_path, files)]
|
|
371
|
+
|
|
372
|
+
for file in files:
|
|
373
|
+
path = file[0]
|
|
374
|
+
payload = file[1]
|
|
375
|
+
new_payload = set_json_value(
|
|
376
|
+
payload=payload, json_path=json_path, json_value=json_value
|
|
377
|
+
)
|
|
378
|
+
|
|
379
|
+
self.update(file_path=path, payload=new_payload)
|
|
380
|
+
|
|
381
|
+
def list_paths(self) -> pd.DataFrame:
|
|
382
|
+
"""
|
|
383
|
+
List all file paths in the report definition.
|
|
384
|
+
|
|
385
|
+
Returns
|
|
386
|
+
-------
|
|
387
|
+
pandas.DataFrame
|
|
388
|
+
A pandas dataframe containing a list of all paths in the report definition.
|
|
389
|
+
"""
|
|
390
|
+
|
|
391
|
+
existing_paths = [
|
|
392
|
+
part.get("path") for part in self._report_definition.get("parts")
|
|
393
|
+
]
|
|
394
|
+
return pd.DataFrame(existing_paths, columns=["Path"])
|
|
395
|
+
|
|
396
|
+
def __all_pages(self):
|
|
397
|
+
|
|
398
|
+
self._ensure_pbir()
|
|
399
|
+
|
|
400
|
+
return [
|
|
401
|
+
o
|
|
402
|
+
for o in self._report_definition.get("parts")
|
|
403
|
+
if o.get("path").endswith("/page.json")
|
|
404
|
+
]
|
|
405
|
+
|
|
406
|
+
def __all_visuals(self):
|
|
407
|
+
|
|
408
|
+
self._ensure_pbir()
|
|
409
|
+
|
|
410
|
+
return [
|
|
411
|
+
o
|
|
412
|
+
for o in self._report_definition.get("parts")
|
|
413
|
+
if o.get("path").endswith("/visual.json")
|
|
414
|
+
]
|
|
415
|
+
|
|
416
|
+
# Helper functions
|
|
417
|
+
def __resolve_page_list(self, page: Optional[str | List[str]] = None) -> List[str]:
|
|
418
|
+
|
|
419
|
+
if isinstance(page, str):
|
|
420
|
+
page = [page]
|
|
421
|
+
|
|
422
|
+
# Resolve page list
|
|
423
|
+
return (
|
|
424
|
+
[self.resolve_page_name(p) for p in page]
|
|
425
|
+
if page
|
|
426
|
+
else [
|
|
427
|
+
p["payload"]["name"]
|
|
428
|
+
for p in self.__all_pages()
|
|
429
|
+
if "payload" in p and "name" in p["payload"]
|
|
430
|
+
]
|
|
431
|
+
)
|
|
432
|
+
|
|
433
|
+
def _get_url(
|
|
434
|
+
self, page_name: Optional[str] = None, visual_name: Optional[str] = None
|
|
435
|
+
) -> str:
|
|
436
|
+
"""
|
|
437
|
+
Gets the URL of the report. If specified, gets the URL of the specified page.
|
|
438
|
+
|
|
439
|
+
Parameters
|
|
440
|
+
----------
|
|
441
|
+
page_name : str, default=None
|
|
442
|
+
The name of the page. If None, gets the URL of the report.
|
|
443
|
+
If specified, gets the URL of the specified page.
|
|
444
|
+
|
|
445
|
+
Returns
|
|
446
|
+
-------
|
|
447
|
+
str
|
|
448
|
+
The URL of the report or the specified page.
|
|
449
|
+
"""
|
|
450
|
+
|
|
451
|
+
url = f"https://app.powerbi.com/groups/{self._workspace_id}/reports/{self._report_id}"
|
|
452
|
+
|
|
453
|
+
if page_name:
|
|
454
|
+
if page_name in [page["payload"]["name"] for page in self.__all_pages()]:
|
|
455
|
+
pass
|
|
456
|
+
else:
|
|
457
|
+
page_name = self.resolve_page_name(page_name)
|
|
458
|
+
url += f"/{page_name}"
|
|
459
|
+
|
|
460
|
+
if visual_name:
|
|
461
|
+
tenant_id = get_tenant_id()
|
|
462
|
+
url += f"?ctid={tenant_id}&pbi_source=shareVisual&visual={visual_name}"
|
|
463
|
+
|
|
464
|
+
return url
|
|
465
|
+
|
|
466
|
+
def __resolve_page_name_and_display_name_file_path(
|
|
467
|
+
self, page: str, return_error: bool = True
|
|
468
|
+
) -> Tuple[str, str, str]:
|
|
469
|
+
|
|
470
|
+
self._ensure_pbir()
|
|
471
|
+
page_map = {
|
|
472
|
+
p["path"]: [p["payload"]["name"], p["payload"]["displayName"]]
|
|
473
|
+
for p in self._report_definition.get("parts", [])
|
|
474
|
+
if p.get("path", "").endswith("/page.json") and "payload" in p
|
|
475
|
+
}
|
|
476
|
+
|
|
477
|
+
# Build lookup: page_id → (path, display_name)
|
|
478
|
+
id_lookup = {v[0]: (k, v[1]) for k, v in page_map.items()}
|
|
479
|
+
|
|
480
|
+
# Build lookup: display_name → (path, page_id)
|
|
481
|
+
name_lookup = {v[1]: (k, v[0]) for k, v in page_map.items()}
|
|
482
|
+
|
|
483
|
+
if page in id_lookup:
|
|
484
|
+
path, display_name = id_lookup[page]
|
|
485
|
+
return path, page, display_name
|
|
486
|
+
elif page in name_lookup:
|
|
487
|
+
path, page_id = name_lookup[page]
|
|
488
|
+
return path, page_id, page
|
|
489
|
+
elif return_error:
|
|
490
|
+
raise ValueError(
|
|
491
|
+
f"{icons.red_dot} Invalid page display name. The '{page}' page does not exist in the '{self._report_name}' report within the '{self._workspace_name}' workspace."
|
|
492
|
+
)
|
|
493
|
+
else:
|
|
494
|
+
return (None, page, page)
|
|
495
|
+
|
|
496
|
+
def _resolve_page_name_and_display_name(
|
|
497
|
+
self, page: str, return_error: bool = True
|
|
498
|
+
) -> Tuple[str, str]:
|
|
499
|
+
"""
|
|
500
|
+
Obtains the page name, page display name for a given page in a report.
|
|
501
|
+
|
|
502
|
+
Parameters
|
|
503
|
+
----------
|
|
504
|
+
page : str
|
|
505
|
+
The page name or display name.
|
|
506
|
+
return_error : bool, default=True
|
|
507
|
+
Whether to raise an error if the page does not exist.
|
|
508
|
+
|
|
509
|
+
Returns
|
|
510
|
+
-------
|
|
511
|
+
Tuple[str, str]
|
|
512
|
+
The page name and display name.
|
|
513
|
+
"""
|
|
514
|
+
|
|
515
|
+
(_, page_id, page_name) = self.__resolve_page_name_and_display_name_file_path(
|
|
516
|
+
page,
|
|
517
|
+
return_error=return_error,
|
|
518
|
+
)
|
|
519
|
+
|
|
520
|
+
return (page_id, page_name)
|
|
521
|
+
|
|
522
|
+
def resolve_page_name(self, page_display_name: str) -> str:
|
|
523
|
+
"""
|
|
524
|
+
Obtains the page name, page display name, and the file path for a given page in a report.
|
|
525
|
+
|
|
526
|
+
Parameters
|
|
527
|
+
----------
|
|
528
|
+
page_display_name : str
|
|
529
|
+
The display name of the page of the report.
|
|
530
|
+
|
|
531
|
+
Returns
|
|
532
|
+
-------
|
|
533
|
+
str
|
|
534
|
+
The page name.
|
|
535
|
+
"""
|
|
536
|
+
|
|
537
|
+
(path, page_id, page_name) = (
|
|
538
|
+
self.__resolve_page_name_and_display_name_file_path(page_display_name)
|
|
539
|
+
)
|
|
540
|
+
return page_id
|
|
541
|
+
|
|
542
|
+
def resolve_page_display_name(self, page_name: str) -> str:
|
|
543
|
+
"""
|
|
544
|
+
Obtains the page dispaly name.
|
|
545
|
+
|
|
546
|
+
Parameters
|
|
547
|
+
----------
|
|
548
|
+
page_name : str
|
|
549
|
+
The name of the page of the report.
|
|
550
|
+
|
|
551
|
+
Returns
|
|
552
|
+
-------
|
|
553
|
+
str
|
|
554
|
+
The page display name.
|
|
555
|
+
"""
|
|
556
|
+
|
|
557
|
+
(path, page_id, page_name) = (
|
|
558
|
+
self.__resolve_page_name_and_display_name_file_path(page_name)
|
|
559
|
+
)
|
|
560
|
+
return page_name
|
|
561
|
+
|
|
562
|
+
def __add_to_registered_resources(self, name: str, path: str, type: str):
|
|
563
|
+
|
|
564
|
+
type = type.capitalize()
|
|
565
|
+
|
|
566
|
+
report_file = self.get(file_path=self._report_file_path)
|
|
567
|
+
rp_names = [rp.get("name") for rp in report_file.get("resourcePackages")]
|
|
568
|
+
|
|
569
|
+
new_item = {"name": name, "path": path, "type": type}
|
|
570
|
+
if "RegisteredResources" not in rp_names:
|
|
571
|
+
res = {
|
|
572
|
+
"name": "RegisteredResources",
|
|
573
|
+
"type": "RegisteredResources",
|
|
574
|
+
"items": [new_item],
|
|
575
|
+
}
|
|
576
|
+
report_file.get("resourcePackages").append(res)
|
|
577
|
+
else:
|
|
578
|
+
for rp in report_file.get("resourcePackages"):
|
|
579
|
+
if rp.get("name") == "RegisteredResources":
|
|
580
|
+
for item in rp.get("items"):
|
|
581
|
+
item_name = item.get("name")
|
|
582
|
+
item_type = item.get("type")
|
|
583
|
+
item_path = item.get("path")
|
|
584
|
+
if (
|
|
585
|
+
item_name == name
|
|
586
|
+
and item_type == type
|
|
587
|
+
and item_path == path
|
|
588
|
+
):
|
|
589
|
+
print(
|
|
590
|
+
f"{icons.info} The '{item_name}' {type.lower()} already exists in the report definition."
|
|
591
|
+
)
|
|
592
|
+
raise ValueError()
|
|
593
|
+
|
|
594
|
+
# Add the new item to the existing RegisteredResources
|
|
595
|
+
rp["items"].append(new_item)
|
|
596
|
+
|
|
597
|
+
self.update(file_path=self._report_file_path, payload=report_file)
|
|
598
|
+
|
|
599
|
+
def _add_extended(self, dataframe):
|
|
600
|
+
|
|
601
|
+
from sempy_labs.tom import connect_semantic_model
|
|
602
|
+
|
|
603
|
+
dataset_id, dataset_name, dataset_workspace_id, dataset_workspace_name = (
|
|
604
|
+
resolve_dataset_from_report(
|
|
605
|
+
report=self._report_id, workspace=self._workspace_id
|
|
606
|
+
)
|
|
607
|
+
)
|
|
608
|
+
|
|
609
|
+
report_level_measures = list(
|
|
610
|
+
self.list_report_level_measures()["Measure Name"].values
|
|
611
|
+
)
|
|
612
|
+
with connect_semantic_model(
|
|
613
|
+
dataset=dataset_id, readonly=True, workspace=dataset_workspace_id
|
|
614
|
+
) as tom:
|
|
615
|
+
measure_names = {m.Name for m in tom.all_measures()}
|
|
616
|
+
measure_names.update(report_level_measures)
|
|
617
|
+
column_names = {
|
|
618
|
+
format_dax_object_name(c.Parent.Name, c.Name) for c in tom.all_columns()
|
|
619
|
+
}
|
|
620
|
+
hierarchy_names = {
|
|
621
|
+
format_dax_object_name(h.Parent.Name, h.Name)
|
|
622
|
+
for h in tom.all_hierarchies()
|
|
623
|
+
}
|
|
624
|
+
|
|
625
|
+
# Vectorized checks
|
|
626
|
+
def is_valid(row):
|
|
627
|
+
obj_type = row["Object Type"]
|
|
628
|
+
obj_name = row["Object Name"]
|
|
629
|
+
if obj_type == "Measure":
|
|
630
|
+
return obj_name in measure_names
|
|
631
|
+
elif obj_type == "Column":
|
|
632
|
+
return (
|
|
633
|
+
format_dax_object_name(row["Table Name"], obj_name) in column_names
|
|
634
|
+
)
|
|
635
|
+
elif obj_type == "Hierarchy":
|
|
636
|
+
return (
|
|
637
|
+
format_dax_object_name(row["Table Name"], obj_name)
|
|
638
|
+
in hierarchy_names
|
|
639
|
+
)
|
|
640
|
+
return False
|
|
641
|
+
|
|
642
|
+
dataframe["Valid Semantic Model Object"] = dataframe.apply(is_valid, axis=1)
|
|
643
|
+
return dataframe
|
|
644
|
+
|
|
645
|
+
def _visual_page_mapping(self) -> dict:
|
|
646
|
+
self._ensure_pbir()
|
|
647
|
+
|
|
648
|
+
page_mapping = {}
|
|
649
|
+
visual_mapping = {}
|
|
650
|
+
|
|
651
|
+
for p in self.__all_pages():
|
|
652
|
+
path = p.get("path")
|
|
653
|
+
payload = p.get("payload")
|
|
654
|
+
pattern_page = r"/pages/(.*?)/page.json"
|
|
655
|
+
page_name = re.search(pattern_page, path).group(1)
|
|
656
|
+
page_id = payload.get("name")
|
|
657
|
+
page_display = payload.get("displayName")
|
|
658
|
+
page_mapping[page_name] = (page_id, page_display)
|
|
659
|
+
|
|
660
|
+
for v in self.__all_visuals():
|
|
661
|
+
path = v.get("path")
|
|
662
|
+
payload = v.get("payload")
|
|
663
|
+
pattern_page = r"/pages/(.*?)/visuals/"
|
|
664
|
+
page_name = re.search(pattern_page, path).group(1)
|
|
665
|
+
visual_mapping[path] = (
|
|
666
|
+
page_mapping.get(page_name)[0],
|
|
667
|
+
page_mapping.get(page_name)[1],
|
|
668
|
+
)
|
|
669
|
+
|
|
670
|
+
return visual_mapping
|
|
671
|
+
|
|
672
|
+
# List functions
|
|
673
|
+
def list_custom_visuals(self) -> pd.DataFrame:
|
|
674
|
+
"""
|
|
675
|
+
Shows a list of all custom visuals used in the report.
|
|
676
|
+
|
|
677
|
+
Returns
|
|
678
|
+
-------
|
|
679
|
+
pandas.DataFrame
|
|
680
|
+
A pandas dataframe containing a list of all the custom visuals used in the report.
|
|
681
|
+
"""
|
|
682
|
+
self._ensure_pbir()
|
|
683
|
+
|
|
684
|
+
columns = {
|
|
685
|
+
"Custom Visual Name": "str",
|
|
686
|
+
"Custom Visual Display Name": "str",
|
|
687
|
+
"Is Public": "bool",
|
|
688
|
+
"Used in Report": "bool",
|
|
689
|
+
}
|
|
690
|
+
|
|
691
|
+
df = _create_dataframe(columns=columns)
|
|
692
|
+
|
|
693
|
+
visuals = []
|
|
694
|
+
rp = self.get(
|
|
695
|
+
file_path=self._report_file_path,
|
|
696
|
+
json_path="$.resourcePackages",
|
|
697
|
+
verbose=False,
|
|
698
|
+
)
|
|
699
|
+
|
|
700
|
+
if rp:
|
|
701
|
+
visuals += [
|
|
702
|
+
{"Custom Visual Name": item.get("name"), "Is Public": False}
|
|
703
|
+
for item in rp
|
|
704
|
+
if item.get("type") == "CustomVisual"
|
|
705
|
+
]
|
|
706
|
+
|
|
707
|
+
# Load public custom visuals
|
|
708
|
+
public_custom_visuals = (
|
|
709
|
+
self.get(
|
|
710
|
+
file_path=self._report_file_path,
|
|
711
|
+
json_path="$.publicCustomVisuals",
|
|
712
|
+
verbose=False,
|
|
713
|
+
)
|
|
714
|
+
or []
|
|
715
|
+
)
|
|
716
|
+
|
|
717
|
+
visuals += [
|
|
718
|
+
{
|
|
719
|
+
"Custom Visual Name": (
|
|
720
|
+
item.get("name") if isinstance(item, dict) else item
|
|
721
|
+
),
|
|
722
|
+
"Is Public": True,
|
|
723
|
+
}
|
|
724
|
+
for item in public_custom_visuals
|
|
725
|
+
]
|
|
726
|
+
|
|
727
|
+
if visuals:
|
|
728
|
+
df = pd.DataFrame(visuals, columns=list(columns.keys()))
|
|
729
|
+
|
|
730
|
+
# df["Custom Visual Name"] = report_file.get("publicCustomVisuals")
|
|
731
|
+
df["Custom Visual Display Name"] = df["Custom Visual Name"].apply(
|
|
732
|
+
lambda x: helper.vis_type_mapping.get(x, x)
|
|
733
|
+
)
|
|
734
|
+
|
|
735
|
+
visual_types = set()
|
|
736
|
+
for v in self.__all_visuals():
|
|
737
|
+
payload = v.get("payload", {})
|
|
738
|
+
visual = payload.get("visual", {})
|
|
739
|
+
visual_type = visual.get("visualType")
|
|
740
|
+
if visual_type:
|
|
741
|
+
visual_types.add(visual_type)
|
|
742
|
+
|
|
743
|
+
df["Used in Report"] = df["Custom Visual Name"].isin(visual_types)
|
|
744
|
+
|
|
745
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
746
|
+
|
|
747
|
+
return df
|
|
748
|
+
|
|
749
|
+
def list_report_filters(self, extended: bool = False) -> pd.DataFrame:
|
|
750
|
+
"""
|
|
751
|
+
Shows a list of all report filters used in the report.
|
|
752
|
+
|
|
753
|
+
Parameters
|
|
754
|
+
----------
|
|
755
|
+
extended : bool, default=False
|
|
756
|
+
If True, adds an extra column called 'Valid Semantic Model Object' which identifies whether the semantic model object used
|
|
757
|
+
in the report exists in the semantic model which feeds data to the report.
|
|
758
|
+
|
|
759
|
+
Returns
|
|
760
|
+
-------
|
|
761
|
+
pandas.DataFrame
|
|
762
|
+
A pandas dataframe containing a list of all the report filters used in the report.
|
|
763
|
+
"""
|
|
764
|
+
|
|
765
|
+
self._ensure_pbir()
|
|
766
|
+
|
|
767
|
+
report_file = self.get(file_path=self._report_file_path)
|
|
768
|
+
|
|
769
|
+
columns = {
|
|
770
|
+
"Filter Name": "str",
|
|
771
|
+
"Type": "str",
|
|
772
|
+
"Table Name": "str",
|
|
773
|
+
"Object Name": "str",
|
|
774
|
+
"Object Type": "str",
|
|
775
|
+
"Hidden": "bool",
|
|
776
|
+
"Locked": "bool",
|
|
777
|
+
"How Created": "str",
|
|
778
|
+
"Used": "bool",
|
|
779
|
+
}
|
|
780
|
+
df = _create_dataframe(columns=columns)
|
|
781
|
+
|
|
782
|
+
rows = []
|
|
783
|
+
|
|
784
|
+
if "filterConfig" in report_file:
|
|
785
|
+
for flt in report_file.get("filterConfig", {}).get("filters", {}):
|
|
786
|
+
filter_name = flt.get("name")
|
|
787
|
+
how_created = flt.get("howCreated")
|
|
788
|
+
locked = flt.get("isLockedInViewMode", False)
|
|
789
|
+
hidden = flt.get("isHiddenInViewMode", False)
|
|
790
|
+
filter_type = flt.get("type", "Basic")
|
|
791
|
+
filter_used = True if "Where" in flt.get("filter", {}) else False
|
|
792
|
+
|
|
793
|
+
entity_property_pairs = helper.find_entity_property_pairs(flt)
|
|
794
|
+
|
|
795
|
+
for object_name, properties in entity_property_pairs.items():
|
|
796
|
+
rows.append(
|
|
797
|
+
{
|
|
798
|
+
"Filter Name": filter_name,
|
|
799
|
+
"Type": filter_type,
|
|
800
|
+
"Table Name": properties[0],
|
|
801
|
+
"Object Name": object_name,
|
|
802
|
+
"Object Type": properties[1],
|
|
803
|
+
"Hidden": hidden,
|
|
804
|
+
"Locked": locked,
|
|
805
|
+
"How Created": how_created,
|
|
806
|
+
"Used": filter_used,
|
|
807
|
+
}
|
|
808
|
+
)
|
|
809
|
+
|
|
810
|
+
if rows:
|
|
811
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
812
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
813
|
+
|
|
814
|
+
if extended:
|
|
815
|
+
df = self._add_extended(dataframe=df)
|
|
816
|
+
|
|
817
|
+
return df
|
|
818
|
+
|
|
819
|
+
def list_page_filters(self, extended: bool = False) -> pd.DataFrame:
|
|
820
|
+
"""
|
|
821
|
+
Shows a list of all page filters used in the report.
|
|
822
|
+
|
|
823
|
+
Parameters
|
|
824
|
+
----------
|
|
825
|
+
extended : bool, default=False
|
|
826
|
+
If True, adds an extra column called 'Valid Semantic Model Object' which identifies whether the semantic model object used
|
|
827
|
+
in the report exists in the semantic model which feeds data to the report.
|
|
828
|
+
|
|
829
|
+
Returns
|
|
830
|
+
-------
|
|
831
|
+
pandas.DataFrame
|
|
832
|
+
A pandas dataframe containing a list of all the page filters used in the report.
|
|
833
|
+
"""
|
|
834
|
+
self._ensure_pbir()
|
|
835
|
+
|
|
836
|
+
columns = {
|
|
837
|
+
"Page Name": "str",
|
|
838
|
+
"Page Display Name": "str",
|
|
839
|
+
"Filter Name": "str",
|
|
840
|
+
"Type": "str",
|
|
841
|
+
"Table Name": "str",
|
|
842
|
+
"Object Name": "str",
|
|
843
|
+
"Object Type": "str",
|
|
844
|
+
"Hidden": "bool",
|
|
845
|
+
"Locked": "bool",
|
|
846
|
+
"How Created": "str",
|
|
847
|
+
"Used": "bool",
|
|
848
|
+
}
|
|
849
|
+
df = _create_dataframe(columns=columns)
|
|
850
|
+
|
|
851
|
+
rows = []
|
|
852
|
+
for p in self.__all_pages():
|
|
853
|
+
payload = p.get("payload")
|
|
854
|
+
page_id = payload.get("name")
|
|
855
|
+
page_display = payload.get("displayName")
|
|
856
|
+
|
|
857
|
+
if "filterConfig" in payload:
|
|
858
|
+
for flt in payload.get("filterConfig", {}).get("filters", {}):
|
|
859
|
+
filter_name = flt.get("name")
|
|
860
|
+
how_created = flt.get("howCreated")
|
|
861
|
+
locked = flt.get("isLockedInViewMode", False)
|
|
862
|
+
hidden = flt.get("isHiddenInViewMode", False)
|
|
863
|
+
filter_type = flt.get("type", "Basic")
|
|
864
|
+
filter_used = True if "Where" in flt.get("filter", {}) else False
|
|
865
|
+
|
|
866
|
+
entity_property_pairs = helper.find_entity_property_pairs(flt)
|
|
867
|
+
|
|
868
|
+
for object_name, properties in entity_property_pairs.items():
|
|
869
|
+
rows.append(
|
|
870
|
+
{
|
|
871
|
+
"Page Name": page_id,
|
|
872
|
+
"Page Display Name": page_display,
|
|
873
|
+
"Filter Name": filter_name,
|
|
874
|
+
"Type": filter_type,
|
|
875
|
+
"Table Name": properties[0],
|
|
876
|
+
"Object Name": object_name,
|
|
877
|
+
"Object Type": properties[1],
|
|
878
|
+
"Hidden": hidden,
|
|
879
|
+
"Locked": locked,
|
|
880
|
+
"How Created": how_created,
|
|
881
|
+
"Used": filter_used,
|
|
882
|
+
"Page URL": self._get_url(page_name=page_id),
|
|
883
|
+
}
|
|
884
|
+
)
|
|
885
|
+
|
|
886
|
+
if rows:
|
|
887
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
888
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
889
|
+
|
|
890
|
+
if extended:
|
|
891
|
+
df = self._add_extended(dataframe=df)
|
|
892
|
+
|
|
893
|
+
return df
|
|
894
|
+
|
|
895
|
+
def list_visual_filters(self, extended: bool = False) -> pd.DataFrame:
|
|
896
|
+
"""
|
|
897
|
+
Shows a list of all visual filters used in the report.
|
|
898
|
+
|
|
899
|
+
Parameters
|
|
900
|
+
----------
|
|
901
|
+
extended : bool, default=False
|
|
902
|
+
If True, adds an extra column called 'Valid Semantic Model Object' which identifies whether the semantic model object used
|
|
903
|
+
in the report exists in the semantic model which feeds data to the report.
|
|
904
|
+
|
|
905
|
+
Returns
|
|
906
|
+
-------
|
|
907
|
+
pandas.DataFrame
|
|
908
|
+
A pandas dataframe containing a list of all the visual filters used in the report.
|
|
909
|
+
"""
|
|
910
|
+
self._ensure_pbir()
|
|
911
|
+
|
|
912
|
+
columns = {
|
|
913
|
+
"Page Name": "str",
|
|
914
|
+
"Page Display Name": "str",
|
|
915
|
+
"Visual Name": "str",
|
|
916
|
+
"Filter Name": "str",
|
|
917
|
+
"Type": "str",
|
|
918
|
+
"Table Name": "str",
|
|
919
|
+
"Object Name": "str",
|
|
920
|
+
"Object Type": "str",
|
|
921
|
+
"Hidden": "bool",
|
|
922
|
+
"Locked": "bool",
|
|
923
|
+
"How Created": "str",
|
|
924
|
+
"Used": "bool",
|
|
925
|
+
}
|
|
926
|
+
df = _create_dataframe(columns=columns)
|
|
927
|
+
|
|
928
|
+
visual_mapping = self._visual_page_mapping()
|
|
929
|
+
|
|
930
|
+
rows = []
|
|
931
|
+
for v in self.__all_visuals():
|
|
932
|
+
path = v.get("path")
|
|
933
|
+
payload = v.get("payload")
|
|
934
|
+
page_id = visual_mapping.get(path)[0]
|
|
935
|
+
page_display = visual_mapping.get(path)[1]
|
|
936
|
+
visual_name = payload.get("name")
|
|
937
|
+
|
|
938
|
+
if "filterConfig" in payload:
|
|
939
|
+
for flt in payload.get("filterConfig", {}).get("filters", {}):
|
|
940
|
+
filter_name = flt.get("name")
|
|
941
|
+
how_created = flt.get("howCreated")
|
|
942
|
+
locked = flt.get("isLockedInViewMode", False)
|
|
943
|
+
hidden = flt.get("isHiddenInViewMode", False)
|
|
944
|
+
filter_type = flt.get("type", "Basic")
|
|
945
|
+
filter_used = True if "Where" in flt.get("filter", {}) else False
|
|
946
|
+
|
|
947
|
+
entity_property_pairs = helper.find_entity_property_pairs(flt)
|
|
948
|
+
|
|
949
|
+
for object_name, properties in entity_property_pairs.items():
|
|
950
|
+
rows.append(
|
|
951
|
+
{
|
|
952
|
+
"Page Name": page_id,
|
|
953
|
+
"Page Display Name": page_display,
|
|
954
|
+
"Visual Name": visual_name,
|
|
955
|
+
"Filter Name": filter_name,
|
|
956
|
+
"Type": filter_type,
|
|
957
|
+
"Table Name": properties[0],
|
|
958
|
+
"Object Name": object_name,
|
|
959
|
+
"Object Type": properties[1],
|
|
960
|
+
"Hidden": hidden,
|
|
961
|
+
"Locked": locked,
|
|
962
|
+
"How Created": how_created,
|
|
963
|
+
"Used": filter_used,
|
|
964
|
+
}
|
|
965
|
+
)
|
|
966
|
+
|
|
967
|
+
if rows:
|
|
968
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
969
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
970
|
+
|
|
971
|
+
if extended:
|
|
972
|
+
df = self._add_extended(dataframe=df)
|
|
973
|
+
|
|
974
|
+
return df
|
|
975
|
+
|
|
976
|
+
def list_visual_interactions(self) -> pd.DataFrame:
|
|
977
|
+
"""
|
|
978
|
+
Shows a list of all modified `visual interactions <https://learn.microsoft.com/power-bi/create-reports/service-reports-visual-interactions?tabs=powerbi-desktop>`_ used in the report.
|
|
979
|
+
|
|
980
|
+
Returns
|
|
981
|
+
-------
|
|
982
|
+
pandas.DataFrame
|
|
983
|
+
A pandas dataframe containing a list of all modified visual interactions used in the report.
|
|
984
|
+
"""
|
|
985
|
+
self._ensure_pbir()
|
|
986
|
+
|
|
987
|
+
columns = {
|
|
988
|
+
"Page Name": "str",
|
|
989
|
+
"Page Display Name": "str",
|
|
990
|
+
"Source Visual Name": "str",
|
|
991
|
+
"Target Visual Name": "str",
|
|
992
|
+
"Type": "str",
|
|
993
|
+
}
|
|
994
|
+
df = _create_dataframe(columns=columns)
|
|
995
|
+
|
|
996
|
+
rows = []
|
|
997
|
+
for p in self.__all_pages():
|
|
998
|
+
payload = p.get("payload")
|
|
999
|
+
page_name = payload.get("name")
|
|
1000
|
+
page_display = payload.get("displayName")
|
|
1001
|
+
|
|
1002
|
+
for vizInt in payload.get("visualInteractions", []):
|
|
1003
|
+
sourceVisual = vizInt.get("source")
|
|
1004
|
+
targetVisual = vizInt.get("target")
|
|
1005
|
+
vizIntType = vizInt.get("type")
|
|
1006
|
+
|
|
1007
|
+
rows.append(
|
|
1008
|
+
{
|
|
1009
|
+
"Page Name": page_name,
|
|
1010
|
+
"Page Display Name": page_display,
|
|
1011
|
+
"Source Visual Name": sourceVisual,
|
|
1012
|
+
"Target Visual Name": targetVisual,
|
|
1013
|
+
"Type": vizIntType,
|
|
1014
|
+
}
|
|
1015
|
+
)
|
|
1016
|
+
|
|
1017
|
+
if rows:
|
|
1018
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
1019
|
+
|
|
1020
|
+
return df
|
|
1021
|
+
|
|
1022
|
+
def list_visual_calculations(self) -> pd.DataFrame:
|
|
1023
|
+
"""
|
|
1024
|
+
Shows a list of all `visual calculations <https://learn.microsoft.com/power-bi/transform-model/desktop-visual-calculations-overview>`_.
|
|
1025
|
+
|
|
1026
|
+
Returns
|
|
1027
|
+
-------
|
|
1028
|
+
pandas.DataFrame
|
|
1029
|
+
A pandas dataframe containing a list of all visual calculations within the report.
|
|
1030
|
+
"""
|
|
1031
|
+
|
|
1032
|
+
self._ensure_pbir()
|
|
1033
|
+
|
|
1034
|
+
columns = {
|
|
1035
|
+
"Page Display Name": "str",
|
|
1036
|
+
"Visual Name": "str",
|
|
1037
|
+
"Name": "str",
|
|
1038
|
+
"Language": "str",
|
|
1039
|
+
"Expression": "str",
|
|
1040
|
+
}
|
|
1041
|
+
|
|
1042
|
+
df = _create_dataframe(columns=columns)
|
|
1043
|
+
visual_mapping = self._visual_page_mapping()
|
|
1044
|
+
|
|
1045
|
+
rows = []
|
|
1046
|
+
for v in self.__all_visuals():
|
|
1047
|
+
path = v.get("path")
|
|
1048
|
+
payload = v.get("payload")
|
|
1049
|
+
page_name = visual_mapping.get(path)[0]
|
|
1050
|
+
page_display_name = visual_mapping.get(path)[1]
|
|
1051
|
+
visual_name = payload.get("name")
|
|
1052
|
+
matches = parse("$..field.NativeVisualCalculation").find(payload)
|
|
1053
|
+
if matches:
|
|
1054
|
+
for match in matches:
|
|
1055
|
+
m = match.value
|
|
1056
|
+
rows.append(
|
|
1057
|
+
{
|
|
1058
|
+
"Page Display Name": page_display_name,
|
|
1059
|
+
"Page Name": page_name,
|
|
1060
|
+
"Visual Name": visual_name,
|
|
1061
|
+
"Name": m.get("Name"),
|
|
1062
|
+
"Language": m.get("Language"),
|
|
1063
|
+
"Expression": m.get("Expression"),
|
|
1064
|
+
}
|
|
1065
|
+
)
|
|
1066
|
+
|
|
1067
|
+
if rows:
|
|
1068
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
1069
|
+
|
|
1070
|
+
return df
|
|
1071
|
+
|
|
1072
|
+
def list_pages(self) -> pd.DataFrame:
|
|
1073
|
+
"""
|
|
1074
|
+
Shows a list of all pages in the report.
|
|
1075
|
+
|
|
1076
|
+
Returns
|
|
1077
|
+
-------
|
|
1078
|
+
pandas.DataFrame
|
|
1079
|
+
A pandas dataframe containing a list of all pages in the report.
|
|
1080
|
+
"""
|
|
1081
|
+
self._ensure_pbir()
|
|
1082
|
+
|
|
1083
|
+
columns = {
|
|
1084
|
+
"File Path": "str",
|
|
1085
|
+
"Page Name": "str",
|
|
1086
|
+
"Page Display Name": "str",
|
|
1087
|
+
"Hidden": "bool",
|
|
1088
|
+
"Active": "bool",
|
|
1089
|
+
"Width": "int",
|
|
1090
|
+
"Height": "int",
|
|
1091
|
+
"Display Option": "str",
|
|
1092
|
+
"Type": "str",
|
|
1093
|
+
"Alignment": "str",
|
|
1094
|
+
"Drillthrough Target Page": "bool",
|
|
1095
|
+
"Visual Count": "int",
|
|
1096
|
+
"Data Visual Count": "int",
|
|
1097
|
+
"Visible Visual Count": "int",
|
|
1098
|
+
"Page Filter Count": "int",
|
|
1099
|
+
"Page URL": "str",
|
|
1100
|
+
}
|
|
1101
|
+
df = _create_dataframe(columns=columns)
|
|
1102
|
+
|
|
1103
|
+
page = self.get(file_path=self._pages_file_path)
|
|
1104
|
+
active_page = page.get("activePageName")
|
|
1105
|
+
|
|
1106
|
+
dfV = self.list_visuals()
|
|
1107
|
+
|
|
1108
|
+
rows = []
|
|
1109
|
+
for p in self.__all_pages():
|
|
1110
|
+
file_path = p.get("path")
|
|
1111
|
+
page_prefix = file_path[0:-9]
|
|
1112
|
+
payload = p.get("payload")
|
|
1113
|
+
page_name = payload.get("name")
|
|
1114
|
+
height = payload.get("height")
|
|
1115
|
+
width = payload.get("width")
|
|
1116
|
+
|
|
1117
|
+
# Alignment
|
|
1118
|
+
alignment_value = get_jsonpath_value(
|
|
1119
|
+
data=payload,
|
|
1120
|
+
path="$.objects.displayArea[*].properties.verticalAlignment.expr.Literal.Value",
|
|
1121
|
+
default="Top",
|
|
1122
|
+
remove_quotes=True,
|
|
1123
|
+
)
|
|
1124
|
+
|
|
1125
|
+
# Drillthrough
|
|
1126
|
+
matches = parse("$.filterConfig.filters[*].howCreated").find(payload)
|
|
1127
|
+
how_created_values = [match.value for match in matches]
|
|
1128
|
+
drill_through = any(value == "Drillthrough" for value in how_created_values)
|
|
1129
|
+
|
|
1130
|
+
visual_count = len(
|
|
1131
|
+
[
|
|
1132
|
+
v
|
|
1133
|
+
for v in self._report_definition.get("parts")
|
|
1134
|
+
if v.get("path").endswith("/visual.json")
|
|
1135
|
+
and v.get("path").startswith(page_prefix)
|
|
1136
|
+
]
|
|
1137
|
+
)
|
|
1138
|
+
|
|
1139
|
+
data_visual_count = len(
|
|
1140
|
+
dfV[(dfV["Page Name"] == page_name) & (dfV["Data Visual"])]
|
|
1141
|
+
)
|
|
1142
|
+
visible_visual_count = len(
|
|
1143
|
+
dfV[(dfV["Page Name"] == page_name) & (dfV["Hidden"] == False)]
|
|
1144
|
+
)
|
|
1145
|
+
|
|
1146
|
+
# Page Filter Count
|
|
1147
|
+
page_filter_count = len(
|
|
1148
|
+
get_jsonpath_value(
|
|
1149
|
+
data=payload, path="$.filterConfig.filters", default=[]
|
|
1150
|
+
)
|
|
1151
|
+
)
|
|
1152
|
+
|
|
1153
|
+
# Hidden
|
|
1154
|
+
matches = parse("$.visibility").find(payload)
|
|
1155
|
+
is_hidden = any(match.value == "HiddenInViewMode" for match in matches)
|
|
1156
|
+
|
|
1157
|
+
rows.append(
|
|
1158
|
+
{
|
|
1159
|
+
"File Path": file_path,
|
|
1160
|
+
"Page Name": page_name,
|
|
1161
|
+
"Page Display Name": payload.get("displayName"),
|
|
1162
|
+
"Display Option": payload.get("displayOption"),
|
|
1163
|
+
"Height": height,
|
|
1164
|
+
"Width": width,
|
|
1165
|
+
"Hidden": is_hidden,
|
|
1166
|
+
"Active": True if page_name == active_page else False,
|
|
1167
|
+
"Type": helper.page_type_mapping.get((width, height), "Custom"),
|
|
1168
|
+
"Alignment": alignment_value,
|
|
1169
|
+
"Drillthrough Target Page": drill_through,
|
|
1170
|
+
"Visual Count": visual_count,
|
|
1171
|
+
"Data Visual Count": data_visual_count,
|
|
1172
|
+
"Visible Visual Count": visible_visual_count,
|
|
1173
|
+
"Page Filter Count": page_filter_count,
|
|
1174
|
+
"Page URL": self._get_url(page_name=page_name),
|
|
1175
|
+
}
|
|
1176
|
+
)
|
|
1177
|
+
|
|
1178
|
+
if rows:
|
|
1179
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
1180
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
1181
|
+
|
|
1182
|
+
return df
|
|
1183
|
+
|
|
1184
|
+
def list_visuals(self) -> pd.DataFrame:
|
|
1185
|
+
"""
|
|
1186
|
+
Shows a list of all visuals in the report.
|
|
1187
|
+
|
|
1188
|
+
Returns
|
|
1189
|
+
-------
|
|
1190
|
+
pandas.DataFrame
|
|
1191
|
+
A pandas dataframe containing a list of all visuals in the report.
|
|
1192
|
+
"""
|
|
1193
|
+
self._ensure_pbir()
|
|
1194
|
+
|
|
1195
|
+
columns = {
|
|
1196
|
+
"File Path": "str",
|
|
1197
|
+
"Page Name": "str",
|
|
1198
|
+
"Page Display Name": "str",
|
|
1199
|
+
"Visual Name": "str",
|
|
1200
|
+
"Type": "str",
|
|
1201
|
+
"Display Type": "str",
|
|
1202
|
+
"X": "float",
|
|
1203
|
+
"Y": "float",
|
|
1204
|
+
"Z": "int",
|
|
1205
|
+
"Width": "float",
|
|
1206
|
+
"Height": "float",
|
|
1207
|
+
"Tab Order": "str",
|
|
1208
|
+
"Hidden": "bool",
|
|
1209
|
+
"Title": "str",
|
|
1210
|
+
"SubTitle": "str",
|
|
1211
|
+
"Custom Visual": "bool",
|
|
1212
|
+
"Alt Text": "str",
|
|
1213
|
+
"Show Items With No Data": "bool",
|
|
1214
|
+
"Divider": "str",
|
|
1215
|
+
"Slicer Type": "str",
|
|
1216
|
+
"Row SubTotals": "bool",
|
|
1217
|
+
"Column SubTotals": "bool",
|
|
1218
|
+
"Data Visual": "bool",
|
|
1219
|
+
"Has Sparkline": "bool",
|
|
1220
|
+
"Visual Filter Count": "int",
|
|
1221
|
+
"Data Limit": "int",
|
|
1222
|
+
"URL": "str",
|
|
1223
|
+
}
|
|
1224
|
+
df = _create_dataframe(columns=columns)
|
|
1225
|
+
|
|
1226
|
+
report_file = self.get(file_path=self._report_file_path)
|
|
1227
|
+
custom_visuals = report_file.get("publicCustomVisuals", [])
|
|
1228
|
+
visual_mapping = self._visual_page_mapping()
|
|
1229
|
+
agg_type_map = helper._get_agg_type_mapping()
|
|
1230
|
+
|
|
1231
|
+
def contains_key(data, keys_to_check):
|
|
1232
|
+
matches = parse("$..*").find(data)
|
|
1233
|
+
|
|
1234
|
+
all_keys = set()
|
|
1235
|
+
for match in matches:
|
|
1236
|
+
if isinstance(match.value, dict):
|
|
1237
|
+
all_keys.update(match.value.keys())
|
|
1238
|
+
elif isinstance(match.value, list):
|
|
1239
|
+
for item in match.value:
|
|
1240
|
+
if isinstance(item, dict):
|
|
1241
|
+
all_keys.update(item.keys())
|
|
1242
|
+
|
|
1243
|
+
return any(key in all_keys for key in keys_to_check)
|
|
1244
|
+
|
|
1245
|
+
rows = []
|
|
1246
|
+
for v in self.__all_visuals():
|
|
1247
|
+
path = v.get("path")
|
|
1248
|
+
payload = v.get("payload")
|
|
1249
|
+
page_id = visual_mapping.get(path)[0]
|
|
1250
|
+
page_display = visual_mapping.get(path)[1]
|
|
1251
|
+
pos = payload.get("position")
|
|
1252
|
+
|
|
1253
|
+
# Visual Type
|
|
1254
|
+
matches = parse("$.visual.visualType").find(payload)
|
|
1255
|
+
visual_type = matches[0].value if matches else "Group"
|
|
1256
|
+
|
|
1257
|
+
visual_type_display = helper.vis_type_mapping.get(visual_type, visual_type)
|
|
1258
|
+
cst_value, rst_value, slicer_type = False, False, "N/A"
|
|
1259
|
+
|
|
1260
|
+
# Visual Filter Count
|
|
1261
|
+
matches = parse("$.filterConfig.filters[*]").find(payload)
|
|
1262
|
+
visual_filter_count = len(matches)
|
|
1263
|
+
|
|
1264
|
+
# Data Limit
|
|
1265
|
+
matches = parse(
|
|
1266
|
+
'$.filterConfig.filters[?(@.type == "VisualTopN")].filter.Where[*].Condition.VisualTopN.ItemCount'
|
|
1267
|
+
).find(payload)
|
|
1268
|
+
data_limit = matches[0].value if matches else 0
|
|
1269
|
+
|
|
1270
|
+
# Title
|
|
1271
|
+
matches = parse(
|
|
1272
|
+
"$.visual.visualContainerObjects.title[0].properties.text.expr"
|
|
1273
|
+
).find(payload)
|
|
1274
|
+
title = (
|
|
1275
|
+
helper._get_expression(matches[0].value, agg_type_map)
|
|
1276
|
+
if matches
|
|
1277
|
+
else ""
|
|
1278
|
+
)
|
|
1279
|
+
|
|
1280
|
+
# SubTitle
|
|
1281
|
+
matches = parse(
|
|
1282
|
+
"$.visual.visualContainerObjects.subTitle[0].properties.text.expr"
|
|
1283
|
+
).find(payload)
|
|
1284
|
+
sub_title = (
|
|
1285
|
+
helper._get_expression(matches[0].value, agg_type_map)
|
|
1286
|
+
if matches
|
|
1287
|
+
else ""
|
|
1288
|
+
)
|
|
1289
|
+
|
|
1290
|
+
# Alt Text
|
|
1291
|
+
matches = parse(
|
|
1292
|
+
"$.visual.visualContainerObjects.general[0].properties.altText.expr"
|
|
1293
|
+
).find(payload)
|
|
1294
|
+
alt_text = (
|
|
1295
|
+
helper._get_expression(matches[0].value, agg_type_map)
|
|
1296
|
+
if matches
|
|
1297
|
+
else ""
|
|
1298
|
+
)
|
|
1299
|
+
|
|
1300
|
+
# Show items with no data
|
|
1301
|
+
def find_show_all_with_jsonpath(obj):
|
|
1302
|
+
matches = parse("$..showAll").find(obj)
|
|
1303
|
+
return any(match.value is True for match in matches)
|
|
1304
|
+
|
|
1305
|
+
show_all_data = find_show_all_with_jsonpath(payload)
|
|
1306
|
+
|
|
1307
|
+
# Divider
|
|
1308
|
+
matches = parse(
|
|
1309
|
+
"$.visual.visualContainerObjects.divider[0].properties.show.expr.Literal.Value"
|
|
1310
|
+
).find(payload)
|
|
1311
|
+
divider = matches[0] if matches else ""
|
|
1312
|
+
|
|
1313
|
+
# Row/Column Subtotals
|
|
1314
|
+
if visual_type == "pivotTable":
|
|
1315
|
+
cst_matches = parse(
|
|
1316
|
+
"$.visual.objects.subTotals[0].properties.columnSubtotals.expr.Literal.Value"
|
|
1317
|
+
).find(payload)
|
|
1318
|
+
rst_matches = parse(
|
|
1319
|
+
"$.visual.objects.subTotals[0].properties.rowSubtotals.expr.Literal.Value"
|
|
1320
|
+
).find(payload)
|
|
1321
|
+
|
|
1322
|
+
if cst_matches:
|
|
1323
|
+
cst_value = False if cst_matches[0].value == "false" else True
|
|
1324
|
+
|
|
1325
|
+
if rst_matches:
|
|
1326
|
+
rst_value = False if rst_matches[0].value == "false" else True
|
|
1327
|
+
|
|
1328
|
+
# Slicer Type
|
|
1329
|
+
if visual_type == "slicer":
|
|
1330
|
+
matches = parse(
|
|
1331
|
+
"$.visual.objects.data[0].properties.mode.expr.Literal.Value"
|
|
1332
|
+
).find(payload)
|
|
1333
|
+
slicer_type = matches[0].value[1:-1] if matches else "N/A"
|
|
1334
|
+
|
|
1335
|
+
# Data Visual
|
|
1336
|
+
is_data_visual = contains_key(
|
|
1337
|
+
payload,
|
|
1338
|
+
[
|
|
1339
|
+
"Aggregation",
|
|
1340
|
+
"Column",
|
|
1341
|
+
"Measure",
|
|
1342
|
+
"HierarchyLevel",
|
|
1343
|
+
"NativeVisualCalculation",
|
|
1344
|
+
],
|
|
1345
|
+
)
|
|
1346
|
+
|
|
1347
|
+
# Sparkline
|
|
1348
|
+
has_sparkline = contains_key(payload, ["SparklineData"])
|
|
1349
|
+
visual_name = payload.get("name")
|
|
1350
|
+
|
|
1351
|
+
rows.append(
|
|
1352
|
+
{
|
|
1353
|
+
"File Path": path,
|
|
1354
|
+
"Page Name": page_id,
|
|
1355
|
+
"Page Display Name": page_display,
|
|
1356
|
+
"Visual Name": visual_name,
|
|
1357
|
+
"X": pos.get("x"),
|
|
1358
|
+
"Y": pos.get("y"),
|
|
1359
|
+
"Z": pos.get("z"),
|
|
1360
|
+
"Width": pos.get("width"),
|
|
1361
|
+
"Height": pos.get("height"),
|
|
1362
|
+
"Tab Order": pos.get("tabOrder"),
|
|
1363
|
+
"Hidden": payload.get("isHidden", False),
|
|
1364
|
+
"Type": visual_type,
|
|
1365
|
+
"Display Type": visual_type_display,
|
|
1366
|
+
"Title": title,
|
|
1367
|
+
"SubTitle": sub_title,
|
|
1368
|
+
"Custom Visual": visual_type in custom_visuals,
|
|
1369
|
+
"Alt Text": alt_text,
|
|
1370
|
+
"Show Items With No Data": show_all_data,
|
|
1371
|
+
"Divider": divider,
|
|
1372
|
+
"Row SubTotals": rst_value,
|
|
1373
|
+
"Column SubTotals": cst_value,
|
|
1374
|
+
"Slicer Type": slicer_type,
|
|
1375
|
+
"Data Visual": is_data_visual,
|
|
1376
|
+
"Has Sparkline": has_sparkline,
|
|
1377
|
+
"Visual Filter Count": visual_filter_count,
|
|
1378
|
+
"Data Limit": data_limit,
|
|
1379
|
+
"URL": self._get_url(page_name=page_id, visual_name=visual_name),
|
|
1380
|
+
}
|
|
1381
|
+
)
|
|
1382
|
+
|
|
1383
|
+
if rows:
|
|
1384
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
1385
|
+
|
|
1386
|
+
grouped_df = (
|
|
1387
|
+
self.list_visual_objects()
|
|
1388
|
+
.groupby(["Page Name", "Visual Name"])
|
|
1389
|
+
.size()
|
|
1390
|
+
.reset_index(name="Visual Object Count")
|
|
1391
|
+
)
|
|
1392
|
+
|
|
1393
|
+
df = pd.merge(
|
|
1394
|
+
df,
|
|
1395
|
+
grouped_df,
|
|
1396
|
+
left_on=["Page Name", "Visual Name"],
|
|
1397
|
+
right_on=["Page Name", "Visual Name"],
|
|
1398
|
+
how="left",
|
|
1399
|
+
)
|
|
1400
|
+
df["Visual Object Count"] = df["Visual Object Count"].fillna(0).astype(int)
|
|
1401
|
+
|
|
1402
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
1403
|
+
|
|
1404
|
+
return df
|
|
1405
|
+
|
|
1406
|
+
def list_visual_objects(self, extended: bool = False) -> pd.DataFrame:
|
|
1407
|
+
"""
|
|
1408
|
+
Shows a list of all semantic model objects used in each visual in the report.
|
|
1409
|
+
|
|
1410
|
+
Parameters
|
|
1411
|
+
----------
|
|
1412
|
+
extended : bool, default=False
|
|
1413
|
+
If True, adds an extra column called 'Valid Semantic Model Object' which identifies whether the semantic model object used
|
|
1414
|
+
in the report exists in the semantic model which feeds data to the report.
|
|
1415
|
+
|
|
1416
|
+
Returns
|
|
1417
|
+
-------
|
|
1418
|
+
pandas.DataFrame
|
|
1419
|
+
A pandas dataframe containing a list of all semantic model objects used in each visual in the report.
|
|
1420
|
+
"""
|
|
1421
|
+
self._ensure_pbir()
|
|
1422
|
+
|
|
1423
|
+
visual_mapping = self._visual_page_mapping()
|
|
1424
|
+
|
|
1425
|
+
columns = {
|
|
1426
|
+
"Page Name": "str",
|
|
1427
|
+
"Page Display Name": "str",
|
|
1428
|
+
"Visual Name": "str",
|
|
1429
|
+
"Table Name": "str",
|
|
1430
|
+
"Object Name": "str",
|
|
1431
|
+
"Object Type": "str",
|
|
1432
|
+
"Implicit Measure": "bool",
|
|
1433
|
+
"Sparkline": "bool",
|
|
1434
|
+
"Visual Calc": "bool",
|
|
1435
|
+
"Format": "str",
|
|
1436
|
+
"Object Display Name": "str",
|
|
1437
|
+
}
|
|
1438
|
+
df = _create_dataframe(columns=columns)
|
|
1439
|
+
|
|
1440
|
+
def contains_key(data, keys_to_check):
|
|
1441
|
+
if isinstance(data, dict):
|
|
1442
|
+
for key, value in data.items():
|
|
1443
|
+
if key in keys_to_check:
|
|
1444
|
+
return True
|
|
1445
|
+
if contains_key(value, keys_to_check):
|
|
1446
|
+
return True
|
|
1447
|
+
elif isinstance(data, list):
|
|
1448
|
+
for item in data:
|
|
1449
|
+
if contains_key(item, keys_to_check):
|
|
1450
|
+
return True
|
|
1451
|
+
return False
|
|
1452
|
+
|
|
1453
|
+
def find_entity_property_pairs(data, result=None, keys_path=None):
|
|
1454
|
+
if result is None:
|
|
1455
|
+
result = {}
|
|
1456
|
+
if keys_path is None:
|
|
1457
|
+
keys_path = []
|
|
1458
|
+
|
|
1459
|
+
if isinstance(data, dict):
|
|
1460
|
+
expression = data.get("Expression", {})
|
|
1461
|
+
source_ref = (
|
|
1462
|
+
expression.get("SourceRef", {})
|
|
1463
|
+
if isinstance(expression, dict)
|
|
1464
|
+
else {}
|
|
1465
|
+
)
|
|
1466
|
+
|
|
1467
|
+
if (
|
|
1468
|
+
isinstance(source_ref, dict)
|
|
1469
|
+
and "Entity" in source_ref
|
|
1470
|
+
and "Property" in data
|
|
1471
|
+
):
|
|
1472
|
+
entity = source_ref.get("Entity", "")
|
|
1473
|
+
property_value = data.get("Property", "")
|
|
1474
|
+
|
|
1475
|
+
object_type = (
|
|
1476
|
+
keys_path[-1].replace("HierarchyLevel", "Hierarchy")
|
|
1477
|
+
if keys_path
|
|
1478
|
+
else "Unknown"
|
|
1479
|
+
)
|
|
1480
|
+
is_agg = len(keys_path) > 2 and keys_path[-3] == "Aggregation"
|
|
1481
|
+
is_viz_calc = (
|
|
1482
|
+
len(keys_path) > 2
|
|
1483
|
+
and keys_path[-3] == "NativeVisualCalculation"
|
|
1484
|
+
)
|
|
1485
|
+
is_sparkline = (
|
|
1486
|
+
len(keys_path) > 2 and keys_path[-3] == "SparklineData"
|
|
1487
|
+
)
|
|
1488
|
+
|
|
1489
|
+
result[property_value] = (
|
|
1490
|
+
entity,
|
|
1491
|
+
object_type,
|
|
1492
|
+
is_agg,
|
|
1493
|
+
is_viz_calc,
|
|
1494
|
+
is_sparkline,
|
|
1495
|
+
)
|
|
1496
|
+
|
|
1497
|
+
# Recursively search the rest of the dictionary
|
|
1498
|
+
for key, value in data.items():
|
|
1499
|
+
find_entity_property_pairs(value, result, keys_path + [key])
|
|
1500
|
+
|
|
1501
|
+
elif isinstance(data, list):
|
|
1502
|
+
for item in data:
|
|
1503
|
+
find_entity_property_pairs(item, result, keys_path)
|
|
1504
|
+
|
|
1505
|
+
return result
|
|
1506
|
+
|
|
1507
|
+
rows = []
|
|
1508
|
+
for v in self.__all_visuals():
|
|
1509
|
+
path = v.get("path")
|
|
1510
|
+
payload = v.get("payload")
|
|
1511
|
+
page_id = visual_mapping.get(path)[0]
|
|
1512
|
+
page_display = visual_mapping.get(path)[1]
|
|
1513
|
+
|
|
1514
|
+
entity_property_pairs = find_entity_property_pairs(payload)
|
|
1515
|
+
query_state = (
|
|
1516
|
+
payload.get("visual", {}).get("query", {}).get("queryState", {})
|
|
1517
|
+
)
|
|
1518
|
+
|
|
1519
|
+
format_mapping = {}
|
|
1520
|
+
obj_display_mapping = {}
|
|
1521
|
+
for a, p in query_state.items():
|
|
1522
|
+
for proj in p.get("projections", []):
|
|
1523
|
+
query_ref = proj.get("queryRef")
|
|
1524
|
+
fmt = proj.get("format")
|
|
1525
|
+
obj_display_name = proj.get("displayName")
|
|
1526
|
+
if fmt is not None:
|
|
1527
|
+
format_mapping[query_ref] = fmt
|
|
1528
|
+
obj_display_mapping[query_ref] = obj_display_name
|
|
1529
|
+
|
|
1530
|
+
for object_name, properties in entity_property_pairs.items():
|
|
1531
|
+
table_name = properties[0]
|
|
1532
|
+
obj_full = f"{table_name}.{object_name}"
|
|
1533
|
+
is_agg = properties[2]
|
|
1534
|
+
format_value = format_mapping.get(obj_full)
|
|
1535
|
+
obj_display = obj_display_mapping.get(obj_full)
|
|
1536
|
+
|
|
1537
|
+
if is_agg:
|
|
1538
|
+
for k, v in format_mapping.items():
|
|
1539
|
+
if obj_full in k:
|
|
1540
|
+
format_value = v
|
|
1541
|
+
rows.append(
|
|
1542
|
+
{
|
|
1543
|
+
"Page Name": page_id,
|
|
1544
|
+
"Page Display Name": page_display,
|
|
1545
|
+
"Visual Name": payload.get("name"),
|
|
1546
|
+
"Table Name": table_name,
|
|
1547
|
+
"Object Name": object_name,
|
|
1548
|
+
"Object Type": properties[1],
|
|
1549
|
+
"Implicit Measure": is_agg,
|
|
1550
|
+
"Sparkline": properties[4],
|
|
1551
|
+
"Visual Calc": properties[3],
|
|
1552
|
+
"Format": format_value,
|
|
1553
|
+
"Object Display Name": obj_display,
|
|
1554
|
+
}
|
|
1555
|
+
)
|
|
1556
|
+
|
|
1557
|
+
if rows:
|
|
1558
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
1559
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
1560
|
+
|
|
1561
|
+
if extended:
|
|
1562
|
+
df = self._add_extended(dataframe=df)
|
|
1563
|
+
|
|
1564
|
+
return df
|
|
1565
|
+
|
|
1566
|
+
def list_semantic_model_objects(self, extended: bool = False) -> pd.DataFrame:
|
|
1567
|
+
"""
|
|
1568
|
+
Shows a list of all semantic model objects (measures, columns, hierarchies) that are used in the report and where the objects
|
|
1569
|
+
were used (i.e. visual, report filter, page filter, visual filter).
|
|
1570
|
+
|
|
1571
|
+
Parameters
|
|
1572
|
+
----------
|
|
1573
|
+
extended : bool, default=False
|
|
1574
|
+
If True, adds an extra column called 'Valid Semantic Model Object' which identifies whether the semantic model object used
|
|
1575
|
+
in the report exists in the semantic model which feeds data to the report.
|
|
1576
|
+
|
|
1577
|
+
Returns
|
|
1578
|
+
-------
|
|
1579
|
+
pandas.DataFrame
|
|
1580
|
+
A pandas dataframe showing the semantic model objects used in the report.
|
|
1581
|
+
"""
|
|
1582
|
+
self._ensure_pbir()
|
|
1583
|
+
|
|
1584
|
+
from sempy_labs.tom import connect_semantic_model
|
|
1585
|
+
|
|
1586
|
+
columns = {
|
|
1587
|
+
"Table Name": "str",
|
|
1588
|
+
"Object Name": "str",
|
|
1589
|
+
"Object Type": "str",
|
|
1590
|
+
"Report Source": "str",
|
|
1591
|
+
"Report Source Object": "str",
|
|
1592
|
+
}
|
|
1593
|
+
|
|
1594
|
+
df = _create_dataframe(columns=columns)
|
|
1595
|
+
rf = self.list_report_filters()
|
|
1596
|
+
pf = self.list_page_filters()
|
|
1597
|
+
vf = self.list_visual_filters()
|
|
1598
|
+
vo = self.list_visual_objects()
|
|
1599
|
+
|
|
1600
|
+
rf_subset = rf[["Table Name", "Object Name", "Object Type"]].copy()
|
|
1601
|
+
rf_subset["Report Source"] = "Report Filter"
|
|
1602
|
+
rf_subset["Report Source Object"] = self._report_name
|
|
1603
|
+
|
|
1604
|
+
pf_subset = pf[
|
|
1605
|
+
["Table Name", "Object Name", "Object Type", "Page Display Name"]
|
|
1606
|
+
].copy()
|
|
1607
|
+
pf_subset["Report Source"] = "Page Filter"
|
|
1608
|
+
pf_subset["Report Source Object"] = pf_subset["Page Display Name"]
|
|
1609
|
+
pf_subset.drop(columns=["Page Display Name"], inplace=True)
|
|
1610
|
+
|
|
1611
|
+
vf_subset = vf[
|
|
1612
|
+
[
|
|
1613
|
+
"Table Name",
|
|
1614
|
+
"Object Name",
|
|
1615
|
+
"Object Type",
|
|
1616
|
+
"Page Display Name",
|
|
1617
|
+
"Visual Name",
|
|
1618
|
+
]
|
|
1619
|
+
].copy()
|
|
1620
|
+
vf_subset["Report Source"] = "Visual Filter"
|
|
1621
|
+
vf_subset["Report Source Object"] = format_dax_object_name(
|
|
1622
|
+
vf_subset["Page Display Name"], vf_subset["Visual Name"]
|
|
1623
|
+
)
|
|
1624
|
+
vf_subset.drop(columns=["Page Display Name", "Visual Name"], inplace=True)
|
|
1625
|
+
|
|
1626
|
+
vo_subset = vo[
|
|
1627
|
+
[
|
|
1628
|
+
"Table Name",
|
|
1629
|
+
"Object Name",
|
|
1630
|
+
"Object Type",
|
|
1631
|
+
"Page Display Name",
|
|
1632
|
+
"Visual Name",
|
|
1633
|
+
]
|
|
1634
|
+
].copy()
|
|
1635
|
+
vo_subset["Report Source"] = "Visual"
|
|
1636
|
+
vo_subset["Report Source Object"] = format_dax_object_name(
|
|
1637
|
+
vo_subset["Page Display Name"], vo_subset["Visual Name"]
|
|
1638
|
+
)
|
|
1639
|
+
vo_subset.drop(columns=["Page Display Name", "Visual Name"], inplace=True)
|
|
1640
|
+
|
|
1641
|
+
df = pd.concat(
|
|
1642
|
+
[df, rf_subset, pf_subset, vf_subset, vo_subset], ignore_index=True
|
|
1643
|
+
)
|
|
1644
|
+
|
|
1645
|
+
if extended:
|
|
1646
|
+
(dataset_id, dataset_name, dataset_workspace_id, dataset_workspace_name) = (
|
|
1647
|
+
resolve_dataset_from_report(
|
|
1648
|
+
report=self._report_id, workspace=self._workspace_id
|
|
1649
|
+
)
|
|
1650
|
+
)
|
|
1651
|
+
|
|
1652
|
+
def check_validity(tom, row):
|
|
1653
|
+
object_validators = {
|
|
1654
|
+
"Measure": lambda: any(
|
|
1655
|
+
o.Name == row["Object Name"] for o in tom.all_measures()
|
|
1656
|
+
),
|
|
1657
|
+
"Column": lambda: any(
|
|
1658
|
+
format_dax_object_name(c.Parent.Name, c.Name)
|
|
1659
|
+
== format_dax_object_name(row["Table Name"], row["Object Name"])
|
|
1660
|
+
for c in tom.all_columns()
|
|
1661
|
+
),
|
|
1662
|
+
"Hierarchy": lambda: any(
|
|
1663
|
+
format_dax_object_name(h.Parent.Name, h.Name)
|
|
1664
|
+
== format_dax_object_name(row["Table Name"], row["Object Name"])
|
|
1665
|
+
for h in tom.all_hierarchies()
|
|
1666
|
+
),
|
|
1667
|
+
}
|
|
1668
|
+
return object_validators.get(row["Object Type"], lambda: False)()
|
|
1669
|
+
|
|
1670
|
+
with connect_semantic_model(
|
|
1671
|
+
dataset=dataset_id, readonly=True, workspace=dataset_workspace_id
|
|
1672
|
+
) as tom:
|
|
1673
|
+
df["Valid Semantic Model Object"] = df.apply(
|
|
1674
|
+
lambda row: check_validity(tom, row), axis=1
|
|
1675
|
+
)
|
|
1676
|
+
|
|
1677
|
+
return df
|
|
1678
|
+
|
|
1679
|
+
def _list_all_semantic_model_objects(self):
|
|
1680
|
+
|
|
1681
|
+
# Includes dependencies
|
|
1682
|
+
|
|
1683
|
+
df = (
|
|
1684
|
+
self.list_semantic_model_objects()[
|
|
1685
|
+
["Table Name", "Object Name", "Object Type"]
|
|
1686
|
+
]
|
|
1687
|
+
.drop_duplicates()
|
|
1688
|
+
.reset_index(drop=True)
|
|
1689
|
+
)
|
|
1690
|
+
dataset_id, dataset_name, dataset_workspace_id, dataset_workspace_name = (
|
|
1691
|
+
resolve_dataset_from_report(
|
|
1692
|
+
report=self._report_id, workspace=self._workspace_id
|
|
1693
|
+
)
|
|
1694
|
+
)
|
|
1695
|
+
dep = get_measure_dependencies(
|
|
1696
|
+
dataset=dataset_id, workspace=dataset_workspace_id
|
|
1697
|
+
)
|
|
1698
|
+
rpt_measures = df[df["Object Type"] == "Measure"]["Object Name"].values
|
|
1699
|
+
new_rows = dep[dep["Object Name"].isin(rpt_measures)][
|
|
1700
|
+
["Referenced Table", "Referenced Object", "Referenced Object Type"]
|
|
1701
|
+
]
|
|
1702
|
+
new_rows.columns = ["Table Name", "Object Name", "Object Type"]
|
|
1703
|
+
result_df = (
|
|
1704
|
+
pd.concat([df, new_rows], ignore_index=True)
|
|
1705
|
+
.drop_duplicates()
|
|
1706
|
+
.reset_index(drop=True)
|
|
1707
|
+
)
|
|
1708
|
+
|
|
1709
|
+
result_df["Dataset Name"] = dataset_name
|
|
1710
|
+
result_df["Dataset Workspace Name"] = dataset_workspace_name
|
|
1711
|
+
colName = "Dataset Name"
|
|
1712
|
+
result_df.insert(0, colName, result_df.pop(colName))
|
|
1713
|
+
colName = "Dataset Workspace Name"
|
|
1714
|
+
result_df.insert(1, colName, result_df.pop(colName))
|
|
1715
|
+
|
|
1716
|
+
return result_df
|
|
1717
|
+
|
|
1718
|
+
def list_bookmarks(self) -> pd.DataFrame:
|
|
1719
|
+
"""
|
|
1720
|
+
Shows a list of all bookmarks in the report.
|
|
1721
|
+
|
|
1722
|
+
Returns
|
|
1723
|
+
-------
|
|
1724
|
+
pandas.DataFrame
|
|
1725
|
+
A pandas dataframe containing a list of all bookmarks in the report.
|
|
1726
|
+
"""
|
|
1727
|
+
self._ensure_pbir()
|
|
1728
|
+
|
|
1729
|
+
columns = {
|
|
1730
|
+
"File Path": "str",
|
|
1731
|
+
"Bookmark Name": "str",
|
|
1732
|
+
"Bookmark Display Name": "str",
|
|
1733
|
+
"Page Name": "str",
|
|
1734
|
+
"Page Display Name": "str",
|
|
1735
|
+
"Visual Name": "str",
|
|
1736
|
+
"Visual Hidden": "bool",
|
|
1737
|
+
"Suppress Data": "bool",
|
|
1738
|
+
"Current Page Selected": "bool",
|
|
1739
|
+
"Apply Visual Display State": "bool",
|
|
1740
|
+
"Apply To All Visuals": "bool",
|
|
1741
|
+
}
|
|
1742
|
+
df = _create_dataframe(columns=columns)
|
|
1743
|
+
|
|
1744
|
+
bookmarks = [
|
|
1745
|
+
o
|
|
1746
|
+
for o in self._report_definition.get("parts")
|
|
1747
|
+
if o.get("path").endswith(".bookmark.json")
|
|
1748
|
+
]
|
|
1749
|
+
|
|
1750
|
+
rows = []
|
|
1751
|
+
for b in bookmarks:
|
|
1752
|
+
path = b.get("path")
|
|
1753
|
+
payload = b.get("payload")
|
|
1754
|
+
|
|
1755
|
+
bookmark_name = payload.get("name")
|
|
1756
|
+
bookmark_display = payload.get("displayName")
|
|
1757
|
+
rpt_page_id = payload.get("explorationState", {}).get("activeSection")
|
|
1758
|
+
suppress_data = payload.get("options", {}).get("suppressData", False)
|
|
1759
|
+
suppress_active_section = payload.get("options", {}).get(
|
|
1760
|
+
"suppressActiveSection", False
|
|
1761
|
+
)
|
|
1762
|
+
suppress_display = payload.get("options", {}).get("suppressDisplay", False)
|
|
1763
|
+
apply_only_to_target_visuals = payload.get("options", {}).get(
|
|
1764
|
+
"applyOnlyToTargetVisuals", False
|
|
1765
|
+
)
|
|
1766
|
+
(page_id, page_display) = self._resolve_page_name_and_display_name(
|
|
1767
|
+
page=rpt_page_id, return_error=False
|
|
1768
|
+
)
|
|
1769
|
+
|
|
1770
|
+
for rptPg in payload.get("explorationState", {}).get("sections", {}):
|
|
1771
|
+
for visual_name in (
|
|
1772
|
+
payload.get("explorationState", {})
|
|
1773
|
+
.get("sections", {})
|
|
1774
|
+
.get(rptPg, {})
|
|
1775
|
+
.get("visualContainers", {})
|
|
1776
|
+
):
|
|
1777
|
+
if (
|
|
1778
|
+
payload.get("explorationState", {})
|
|
1779
|
+
.get("sections", {})
|
|
1780
|
+
.get(rptPg, {})
|
|
1781
|
+
.get("visualContainers", {})
|
|
1782
|
+
.get(visual_name, {})
|
|
1783
|
+
.get("singleVisual", {})
|
|
1784
|
+
.get("display", {})
|
|
1785
|
+
.get("mode", {})
|
|
1786
|
+
== "hidden"
|
|
1787
|
+
):
|
|
1788
|
+
visual_hidden = True
|
|
1789
|
+
else:
|
|
1790
|
+
visual_hidden = False
|
|
1791
|
+
|
|
1792
|
+
rows.append(
|
|
1793
|
+
{
|
|
1794
|
+
"File Path": path,
|
|
1795
|
+
"Bookmark Name": bookmark_name,
|
|
1796
|
+
"Bookmark Display Name": bookmark_display,
|
|
1797
|
+
"Page Name": page_id,
|
|
1798
|
+
"Page Display Name": page_display,
|
|
1799
|
+
"Visual Name": visual_name,
|
|
1800
|
+
"Visual Hidden": visual_hidden,
|
|
1801
|
+
"Suppress Data": suppress_data,
|
|
1802
|
+
"Current Page Selected": not suppress_active_section,
|
|
1803
|
+
"Apply Visual Display State": not suppress_display,
|
|
1804
|
+
"Apply To All Visuals": not apply_only_to_target_visuals,
|
|
1805
|
+
}
|
|
1806
|
+
)
|
|
1807
|
+
|
|
1808
|
+
if rows:
|
|
1809
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
1810
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
1811
|
+
|
|
1812
|
+
return df
|
|
1813
|
+
|
|
1814
|
+
def list_report_level_measures(self) -> pd.DataFrame:
|
|
1815
|
+
"""
|
|
1816
|
+
Shows a list of all `report-level measures <https://learn.microsoft.com/power-bi/transform-model/desktop-measures#report-level-measures>`_ in the report.
|
|
1817
|
+
|
|
1818
|
+
Parameters
|
|
1819
|
+
----------
|
|
1820
|
+
|
|
1821
|
+
Returns
|
|
1822
|
+
-------
|
|
1823
|
+
pandas.DataFrame
|
|
1824
|
+
A pandas dataframe containing a list of all report-level measures in the report.
|
|
1825
|
+
"""
|
|
1826
|
+
|
|
1827
|
+
self._ensure_pbir()
|
|
1828
|
+
|
|
1829
|
+
columns = {
|
|
1830
|
+
"Measure Name": "str",
|
|
1831
|
+
"Table Name": "str",
|
|
1832
|
+
"Expression": "str",
|
|
1833
|
+
"Data Type": "str",
|
|
1834
|
+
"Format String": "str",
|
|
1835
|
+
"Data Category": "str",
|
|
1836
|
+
}
|
|
1837
|
+
|
|
1838
|
+
df = _create_dataframe(columns=columns)
|
|
1839
|
+
|
|
1840
|
+
# If no report extensions path, return empty DataFrame
|
|
1841
|
+
if self._report_extensions_path not in self.list_paths()["Path"].values:
|
|
1842
|
+
return df
|
|
1843
|
+
|
|
1844
|
+
report_file = self.get(file_path=self._report_extensions_path)
|
|
1845
|
+
|
|
1846
|
+
rows = []
|
|
1847
|
+
for e in report_file.get("entities", []):
|
|
1848
|
+
table_name = e.get("name")
|
|
1849
|
+
for m in e.get("measures", []):
|
|
1850
|
+
measure_name = m.get("name")
|
|
1851
|
+
expr = m.get("expression")
|
|
1852
|
+
data_type = m.get("dataType")
|
|
1853
|
+
format_string = m.get("formatString")
|
|
1854
|
+
data_category = m.get("dataCategory")
|
|
1855
|
+
|
|
1856
|
+
rows.append(
|
|
1857
|
+
{
|
|
1858
|
+
"Measure Name": measure_name,
|
|
1859
|
+
"Table Name": table_name,
|
|
1860
|
+
"Expression": expr,
|
|
1861
|
+
"Data Type": data_type,
|
|
1862
|
+
"Format String": format_string,
|
|
1863
|
+
"Data Category": data_category,
|
|
1864
|
+
}
|
|
1865
|
+
)
|
|
1866
|
+
|
|
1867
|
+
if rows:
|
|
1868
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
1869
|
+
|
|
1870
|
+
return df
|
|
1871
|
+
|
|
1872
|
+
def get_theme(self, theme_type: str = "baseTheme") -> dict:
|
|
1873
|
+
"""
|
|
1874
|
+
Obtains the theme file of the report.
|
|
1875
|
+
|
|
1876
|
+
Parameters
|
|
1877
|
+
----------
|
|
1878
|
+
theme_type : str, default="baseTheme"
|
|
1879
|
+
The theme type. Options: "baseTheme", "customTheme".
|
|
1880
|
+
|
|
1881
|
+
Returns
|
|
1882
|
+
-------
|
|
1883
|
+
dict
|
|
1884
|
+
The theme.json file
|
|
1885
|
+
"""
|
|
1886
|
+
|
|
1887
|
+
self._ensure_pbir()
|
|
1888
|
+
|
|
1889
|
+
theme_types = ["baseTheme", "customTheme"]
|
|
1890
|
+
theme_type = theme_type.lower()
|
|
1891
|
+
|
|
1892
|
+
if "custom" in theme_type:
|
|
1893
|
+
theme_type = "customTheme"
|
|
1894
|
+
elif "base" in theme_type:
|
|
1895
|
+
theme_type = "baseTheme"
|
|
1896
|
+
if theme_type not in theme_types:
|
|
1897
|
+
raise ValueError(
|
|
1898
|
+
f"{icons.red_dot} Invalid theme type. Valid options: {theme_types}."
|
|
1899
|
+
)
|
|
1900
|
+
|
|
1901
|
+
report_file = self.get(file_path=self._report_file_path)
|
|
1902
|
+
theme_collection = report_file.get("themeCollection", {})
|
|
1903
|
+
if theme_type not in theme_collection:
|
|
1904
|
+
raise ValueError(
|
|
1905
|
+
f"{icons.red_dot} The {self._report} report within the '{self._workspace_name} workspace has no custom theme."
|
|
1906
|
+
)
|
|
1907
|
+
ct = theme_collection.get(theme_type)
|
|
1908
|
+
theme_name = ct["name"]
|
|
1909
|
+
theme_location = ct["type"]
|
|
1910
|
+
theme_file_path = f"StaticResources/{theme_location}/{theme_name}"
|
|
1911
|
+
if theme_type == "baseTheme":
|
|
1912
|
+
theme_file_path = (
|
|
1913
|
+
f"StaticResources/{theme_location}/BaseThemes/{theme_name}"
|
|
1914
|
+
)
|
|
1915
|
+
if not theme_file_path.endswith(".json"):
|
|
1916
|
+
theme_file_path = f"{theme_file_path}.json"
|
|
1917
|
+
|
|
1918
|
+
return self.get(file_path=theme_file_path)
|
|
1919
|
+
|
|
1920
|
+
# Action functions
|
|
1921
|
+
def set_theme(
|
|
1922
|
+
self, theme_file_path: Optional[str] = None, theme_json: Optional[dict] = None
|
|
1923
|
+
):
|
|
1924
|
+
"""
|
|
1925
|
+
Sets a custom theme for a report based on a theme .json file.
|
|
1926
|
+
|
|
1927
|
+
Parameters
|
|
1928
|
+
----------
|
|
1929
|
+
theme_file_path : str, default=None
|
|
1930
|
+
The file path of the theme.json file. This can either be from a Fabric lakehouse or from the web.
|
|
1931
|
+
Example for lakehouse: file_path = '/lakehouse/default/Files/CY23SU09.json'
|
|
1932
|
+
Example for web url: file_path = 'https://raw.githubusercontent.com/PowerBiDevCamp/FabricUserApiDemo/main/FabricUserApiDemo/DefinitionTemplates/Shared/Reports/StaticResources/SharedResources/BaseThemes/CY23SU08.json'
|
|
1933
|
+
theme_json : dict, default=None
|
|
1934
|
+
The theme file in .json format. Must specify either the theme_file_path or the theme_json.
|
|
1935
|
+
"""
|
|
1936
|
+
|
|
1937
|
+
if theme_file_path and theme_json:
|
|
1938
|
+
raise ValueError(
|
|
1939
|
+
f"{icons.red_dot} Please specify either the 'theme_file_path' or the 'theme_json' parameter, not both."
|
|
1940
|
+
)
|
|
1941
|
+
if not theme_file_path and not theme_json:
|
|
1942
|
+
raise ValueError(
|
|
1943
|
+
f"{icons.red_dot} Please specify either the 'theme_file_path' or the 'theme_json' parameter."
|
|
1944
|
+
)
|
|
1945
|
+
|
|
1946
|
+
self._ensure_pbir()
|
|
1947
|
+
|
|
1948
|
+
# Extract theme_json from theme_file_path
|
|
1949
|
+
if theme_file_path:
|
|
1950
|
+
# Open file
|
|
1951
|
+
if not theme_file_path.endswith(".json"):
|
|
1952
|
+
raise ValueError(
|
|
1953
|
+
f"{icons.red_dot} The '{theme_file_path}' theme file path must be a .json file."
|
|
1954
|
+
)
|
|
1955
|
+
elif theme_file_path.startswith("https://"):
|
|
1956
|
+
response = requests.get(theme_file_path)
|
|
1957
|
+
theme_json = response.json()
|
|
1958
|
+
elif theme_file_path.startswith("/lakehouse") or theme_file_path.startswith(
|
|
1959
|
+
"/synfs/"
|
|
1960
|
+
):
|
|
1961
|
+
with open(theme_file_path, "r", encoding="utf-8-sig") as file:
|
|
1962
|
+
theme_json = json.load(file)
|
|
1963
|
+
else:
|
|
1964
|
+
ValueError(
|
|
1965
|
+
f"{icons.red_dot} Incorrect theme file path value '{theme_file_path}'."
|
|
1966
|
+
)
|
|
1967
|
+
|
|
1968
|
+
theme_name = theme_json.get("name")
|
|
1969
|
+
theme_name_full = f"{theme_name}.json"
|
|
1970
|
+
|
|
1971
|
+
# Add theme.json file
|
|
1972
|
+
try:
|
|
1973
|
+
self.add(
|
|
1974
|
+
file_path=f"StaticResources/RegisteredResources/{theme_name_full}",
|
|
1975
|
+
payload=theme_json,
|
|
1976
|
+
)
|
|
1977
|
+
except Exception:
|
|
1978
|
+
self.update(
|
|
1979
|
+
file_path=f"StaticResources/RegisteredResources/{theme_name_full}",
|
|
1980
|
+
payload=theme_json,
|
|
1981
|
+
)
|
|
1982
|
+
|
|
1983
|
+
rpt_version_at_import = self.get(
|
|
1984
|
+
file_path=self._report_file_path,
|
|
1985
|
+
json_path="$.themeCollection.baseTheme.reportVersionAtImport",
|
|
1986
|
+
)
|
|
1987
|
+
|
|
1988
|
+
custom_theme = {
|
|
1989
|
+
"name": theme_name_full,
|
|
1990
|
+
"reportVersionAtImport": rpt_version_at_import,
|
|
1991
|
+
"type": "RegisteredResources",
|
|
1992
|
+
}
|
|
1993
|
+
|
|
1994
|
+
self.set_json(
|
|
1995
|
+
file_path=self._report_file_path,
|
|
1996
|
+
json_path="$.themeCollection.customTheme",
|
|
1997
|
+
json_value=custom_theme,
|
|
1998
|
+
)
|
|
1999
|
+
|
|
2000
|
+
# Update
|
|
2001
|
+
report_file = self.get(
|
|
2002
|
+
file_path=self._report_file_path, json_path="$.resourcePackages"
|
|
2003
|
+
)
|
|
2004
|
+
new_item = {
|
|
2005
|
+
"name": theme_name_full,
|
|
2006
|
+
"path": theme_name_full,
|
|
2007
|
+
"type": "CustomTheme",
|
|
2008
|
+
}
|
|
2009
|
+
# Find or create RegisteredResources
|
|
2010
|
+
registered = next(
|
|
2011
|
+
(res for res in report_file if res["name"] == "RegisteredResources"), None
|
|
2012
|
+
)
|
|
2013
|
+
|
|
2014
|
+
if not registered:
|
|
2015
|
+
registered = {
|
|
2016
|
+
"name": "RegisteredResources",
|
|
2017
|
+
"type": "RegisteredResources",
|
|
2018
|
+
"items": [new_item],
|
|
2019
|
+
}
|
|
2020
|
+
report_file.append(registered)
|
|
2021
|
+
else:
|
|
2022
|
+
# Check for duplicate by 'name'
|
|
2023
|
+
if all(item["name"] != new_item["name"] for item in registered["items"]):
|
|
2024
|
+
registered["items"].append(new_item)
|
|
2025
|
+
|
|
2026
|
+
self.set_json(
|
|
2027
|
+
file_path=self._report_file_path,
|
|
2028
|
+
json_path="$.resourcePackages",
|
|
2029
|
+
json_value=report_file,
|
|
2030
|
+
)
|
|
2031
|
+
|
|
2032
|
+
if not self._readonly:
|
|
2033
|
+
print(
|
|
2034
|
+
f"{icons.green_dot} The '{theme_name}' theme has been set as the theme for the '{self._report_name}' report within the '{self._workspace_name}' workspace."
|
|
2035
|
+
)
|
|
2036
|
+
|
|
2037
|
+
def set_active_page(self, page_name: str):
|
|
2038
|
+
"""
|
|
2039
|
+
Sets the active page (first page displayed when opening a report) for a report.
|
|
2040
|
+
|
|
2041
|
+
Parameters
|
|
2042
|
+
----------
|
|
2043
|
+
page_name : str
|
|
2044
|
+
The page name or page display name of the report.
|
|
2045
|
+
"""
|
|
2046
|
+
self._ensure_pbir()
|
|
2047
|
+
|
|
2048
|
+
(page_id, page_display_name) = self._resolve_page_name_and_display_name(
|
|
2049
|
+
page_name
|
|
2050
|
+
)
|
|
2051
|
+
self.set_json(
|
|
2052
|
+
file_path=self._pages_file_path,
|
|
2053
|
+
json_path="$.activePageName",
|
|
2054
|
+
json_value=page_id,
|
|
2055
|
+
)
|
|
2056
|
+
|
|
2057
|
+
if not self._readonly:
|
|
2058
|
+
print(
|
|
2059
|
+
f"{icons.green_dot} The '{page_display_name}' page has been set as the active page in the '{self._report_name}' report within the '{self._workspace_name}' workspace."
|
|
2060
|
+
)
|
|
2061
|
+
|
|
2062
|
+
def set_page_type(self, page_name: str, page_type: str):
|
|
2063
|
+
"""
|
|
2064
|
+
Changes the page type of a report page.
|
|
2065
|
+
|
|
2066
|
+
Parameters
|
|
2067
|
+
----------
|
|
2068
|
+
page_name : str
|
|
2069
|
+
Name or display name of the report page.
|
|
2070
|
+
page_type : str
|
|
2071
|
+
The page type. Valid page types: 'Tooltip', 'Letter', '4:3', '16:9'.
|
|
2072
|
+
"""
|
|
2073
|
+
self._ensure_pbir()
|
|
2074
|
+
|
|
2075
|
+
if page_type not in helper.page_types:
|
|
2076
|
+
raise ValueError(
|
|
2077
|
+
f"{icons.red_dot} Invalid page type. Valid options: {helper.page_types}."
|
|
2078
|
+
)
|
|
2079
|
+
|
|
2080
|
+
letter_key = next(
|
|
2081
|
+
(
|
|
2082
|
+
key
|
|
2083
|
+
for key, value in helper.page_type_mapping.items()
|
|
2084
|
+
if value == page_type
|
|
2085
|
+
),
|
|
2086
|
+
None,
|
|
2087
|
+
)
|
|
2088
|
+
if letter_key:
|
|
2089
|
+
width, height = letter_key
|
|
2090
|
+
else:
|
|
2091
|
+
raise ValueError(
|
|
2092
|
+
f"{icons.red_dot} Invalid page_type parameter. Valid options: ['Tooltip', 'Letter', '4:3', '16:9']."
|
|
2093
|
+
)
|
|
2094
|
+
|
|
2095
|
+
(file_path, page_id, page_display_name) = (
|
|
2096
|
+
self.__resolve_page_name_and_display_name_file_path(page_name)
|
|
2097
|
+
)
|
|
2098
|
+
|
|
2099
|
+
self.set_json(file_path=file_path, json_path="$.width", json_value=width)
|
|
2100
|
+
self.set_json(file_path=file_path, json_path="$.height", json_value=height)
|
|
2101
|
+
|
|
2102
|
+
if not self._readonly:
|
|
2103
|
+
print(
|
|
2104
|
+
f"{icons.green_dot} The '{page_display_name}' page has been updated to the '{page_type}' page type."
|
|
2105
|
+
)
|
|
2106
|
+
|
|
2107
|
+
# def set_page_vertical_alignment(self, page: str, vertical_alignment: Literal["Top", "Middle"] = "Top"):
|
|
2108
|
+
|
|
2109
|
+
def set_page_visibility(self, page_name: str, hidden: bool):
|
|
2110
|
+
"""
|
|
2111
|
+
Sets whether a report page is visible or hidden.
|
|
2112
|
+
|
|
2113
|
+
Parameters
|
|
2114
|
+
----------
|
|
2115
|
+
page_name : str
|
|
2116
|
+
The page name or page display name of the report.
|
|
2117
|
+
hidden : bool
|
|
2118
|
+
If set to True, hides the report page.
|
|
2119
|
+
If set to False, makes the report page visible.
|
|
2120
|
+
"""
|
|
2121
|
+
self._ensure_pbir()
|
|
2122
|
+
(file_path, page_id, page_display_name) = (
|
|
2123
|
+
self.__resolve_page_name_and_display_name_file_path(page_name)
|
|
2124
|
+
)
|
|
2125
|
+
|
|
2126
|
+
if hidden:
|
|
2127
|
+
self.set_json(
|
|
2128
|
+
file_path=file_path,
|
|
2129
|
+
json_path="$.visibility",
|
|
2130
|
+
json_value="HiddenInViewMode",
|
|
2131
|
+
)
|
|
2132
|
+
else:
|
|
2133
|
+
self.remove(file_path=file_path, json_path="$.visibility", verbose=False)
|
|
2134
|
+
|
|
2135
|
+
visibility = "visible" if hidden is False else "hidden"
|
|
2136
|
+
|
|
2137
|
+
if not self._readonly:
|
|
2138
|
+
print(
|
|
2139
|
+
f"{icons.green_dot} The '{page_display_name}' page has been set to '{visibility}' in the '{self._report_name}' report within the '{self._workspace_name}' workspace."
|
|
2140
|
+
)
|
|
2141
|
+
|
|
2142
|
+
def hide_tooltip_drillthrough_pages(self):
|
|
2143
|
+
"""
|
|
2144
|
+
Hides all tooltip pages and drillthrough pages in a report.
|
|
2145
|
+
"""
|
|
2146
|
+
|
|
2147
|
+
dfP = self.list_pages()
|
|
2148
|
+
dfP_filt = dfP[
|
|
2149
|
+
(dfP["Type"] == "Tooltip") | (dfP["Drillthrough Target Page"] == True)
|
|
2150
|
+
]
|
|
2151
|
+
|
|
2152
|
+
if dfP_filt.empty:
|
|
2153
|
+
print(
|
|
2154
|
+
f"{icons.green_dot} There are no Tooltip or Drillthrough pages in the '{self._report_name}' report within the '{self._workspace_name}' workspace."
|
|
2155
|
+
)
|
|
2156
|
+
return
|
|
2157
|
+
|
|
2158
|
+
for _, r in dfP_filt.iterrows():
|
|
2159
|
+
page_name = r["Page Name"]
|
|
2160
|
+
self.set_page_visibility(page_name=page_name, hidden=True)
|
|
2161
|
+
|
|
2162
|
+
def disable_show_items_with_no_data(self):
|
|
2163
|
+
"""
|
|
2164
|
+
Disables the `show items with no data <https://learn.microsoft.com/power-bi/create-reports/desktop-show-items-no-data>`_ property in all visuals within the report.
|
|
2165
|
+
"""
|
|
2166
|
+
|
|
2167
|
+
self.remove(
|
|
2168
|
+
file_path="definition/pages/*/visual.json",
|
|
2169
|
+
json_path="$..showAll",
|
|
2170
|
+
verbose=False,
|
|
2171
|
+
)
|
|
2172
|
+
|
|
2173
|
+
if not self._readonly:
|
|
2174
|
+
print(
|
|
2175
|
+
f"{icons.green_dot} Show items with data has been disabled for all visuals in the '{self._report_name}' report within the '{self._workspace_name}' workspace."
|
|
2176
|
+
)
|
|
2177
|
+
|
|
2178
|
+
def remove_unnecessary_custom_visuals(self):
|
|
2179
|
+
"""
|
|
2180
|
+
Removes any custom visuals within the report that are not used in the report.
|
|
2181
|
+
"""
|
|
2182
|
+
|
|
2183
|
+
dfCV = self.list_custom_visuals()
|
|
2184
|
+
df = dfCV[dfCV["Used in Report"] == False]
|
|
2185
|
+
|
|
2186
|
+
if not df.empty:
|
|
2187
|
+
cv_remove = df["Custom Visual Name"].values
|
|
2188
|
+
cv_remove_display = df["Custom Visual Display Name"].values
|
|
2189
|
+
else:
|
|
2190
|
+
print(
|
|
2191
|
+
f"{icons.red_dot} There are no unnecessary custom visuals in the '{self._report_name}' report within the '{self._workspace_name}' workspace."
|
|
2192
|
+
)
|
|
2193
|
+
return
|
|
2194
|
+
|
|
2195
|
+
json_path = "$.publicCustomVisuals"
|
|
2196
|
+
custom_visuals = self.get(file_path=self._report_file_path, json_path=json_path)
|
|
2197
|
+
updated_custom_visuals = [
|
|
2198
|
+
item for item in custom_visuals if item not in cv_remove
|
|
2199
|
+
]
|
|
2200
|
+
self.set_json(
|
|
2201
|
+
file_path=self._report_file_path,
|
|
2202
|
+
json_path=json_path,
|
|
2203
|
+
json_value=updated_custom_visuals,
|
|
2204
|
+
)
|
|
2205
|
+
|
|
2206
|
+
if not self._readonly:
|
|
2207
|
+
print(
|
|
2208
|
+
f"{icons.green_dot} The {cv_remove_display} custom visuals have been removed from the '{self._report_name}' report within the '{self._workspace_name}' workspace."
|
|
2209
|
+
)
|
|
2210
|
+
|
|
2211
|
+
def migrate_report_level_measures(self, measures: Optional[str | List[str]] = None):
|
|
2212
|
+
"""
|
|
2213
|
+
Moves all report-level measures from the report to the semantic model on which the report is based.
|
|
2214
|
+
|
|
2215
|
+
Parameters
|
|
2216
|
+
----------
|
|
2217
|
+
measures : str | List[str], default=None
|
|
2218
|
+
A measure or list of measures to move to the semantic model.
|
|
2219
|
+
Defaults to None which resolves to moving all report-level measures to the semantic model.
|
|
2220
|
+
"""
|
|
2221
|
+
self._ensure_pbir()
|
|
2222
|
+
|
|
2223
|
+
from sempy_labs.tom import connect_semantic_model
|
|
2224
|
+
|
|
2225
|
+
rlm = self.list_report_level_measures()
|
|
2226
|
+
if rlm.empty:
|
|
2227
|
+
print(
|
|
2228
|
+
f"{icons.info} The '{self._report_name}' report within the '{self._workspace_name}' workspace has no report-level measures."
|
|
2229
|
+
)
|
|
2230
|
+
return
|
|
2231
|
+
|
|
2232
|
+
dataset_id, dataset_name, dataset_workspace_id, dataset_workspace_name = (
|
|
2233
|
+
resolve_dataset_from_report(
|
|
2234
|
+
report=self._report_id, workspace=self._workspace_id
|
|
2235
|
+
)
|
|
2236
|
+
)
|
|
2237
|
+
|
|
2238
|
+
if isinstance(measures, str):
|
|
2239
|
+
measures = [measures]
|
|
2240
|
+
|
|
2241
|
+
entities = self.get(
|
|
2242
|
+
file_path=self._report_extensions_path, json_path="$.entities"
|
|
2243
|
+
)
|
|
2244
|
+
with connect_semantic_model(
|
|
2245
|
+
dataset=dataset_id, readonly=self._readonly, workspace=dataset_workspace_id
|
|
2246
|
+
) as tom:
|
|
2247
|
+
existing_measures = [m.Name for m in tom.all_measures()]
|
|
2248
|
+
# Add measure to semantic model
|
|
2249
|
+
for _, r in rlm.iterrows():
|
|
2250
|
+
table_name = r["Table Name"]
|
|
2251
|
+
measure_name = r["Measure Name"]
|
|
2252
|
+
expr = r["Expression"]
|
|
2253
|
+
# mDataType = r["Data Type"]
|
|
2254
|
+
format_string = r["Format String"]
|
|
2255
|
+
# Add measures to the model
|
|
2256
|
+
if (
|
|
2257
|
+
measures is None or measure_name in measures
|
|
2258
|
+
) and measure_name not in existing_measures:
|
|
2259
|
+
tom.add_measure(
|
|
2260
|
+
table_name=table_name,
|
|
2261
|
+
measure_name=measure_name,
|
|
2262
|
+
expression=expr,
|
|
2263
|
+
format_string=format_string,
|
|
2264
|
+
)
|
|
2265
|
+
tom.set_annotation(
|
|
2266
|
+
object=tom.model.Tables[table_name].Measures[measure_name],
|
|
2267
|
+
name="semanticlinklabs",
|
|
2268
|
+
value="reportlevelmeasure",
|
|
2269
|
+
)
|
|
2270
|
+
|
|
2271
|
+
for entity in entities:
|
|
2272
|
+
if entity.get("name") == table_name:
|
|
2273
|
+
entity["measures"] = [
|
|
2274
|
+
m
|
|
2275
|
+
for m in entity.get("measures", [])
|
|
2276
|
+
if m.get("name") != measure_name
|
|
2277
|
+
]
|
|
2278
|
+
entities = [e for e in entities if e.get("measures")]
|
|
2279
|
+
self.set_json(
|
|
2280
|
+
file_path=self._report_extensions_path,
|
|
2281
|
+
json_path="$.entities",
|
|
2282
|
+
json_value=entities,
|
|
2283
|
+
)
|
|
2284
|
+
if not entities:
|
|
2285
|
+
self.remove(
|
|
2286
|
+
file_path=self._report_extensions_path,
|
|
2287
|
+
verbose=False,
|
|
2288
|
+
)
|
|
2289
|
+
|
|
2290
|
+
if not self._readonly:
|
|
2291
|
+
print(
|
|
2292
|
+
f"{icons.green_dot} The report-level measures have been migrated to the '{dataset_name}' semantic model within the '{dataset_workspace_name}' workspace."
|
|
2293
|
+
)
|
|
2294
|
+
|
|
2295
|
+
# In progress...
|
|
2296
|
+
def _list_annotations(self) -> pd.DataFrame:
|
|
2297
|
+
"""
|
|
2298
|
+
Shows a list of annotations in the report.
|
|
2299
|
+
|
|
2300
|
+
Returns
|
|
2301
|
+
-------
|
|
2302
|
+
pandas.DataFrame
|
|
2303
|
+
A pandas dataframe showing a list of report, page and visual annotations in the report.
|
|
2304
|
+
"""
|
|
2305
|
+
|
|
2306
|
+
columns = {
|
|
2307
|
+
"Type": "str",
|
|
2308
|
+
"Object Name": "str",
|
|
2309
|
+
"Annotation Name": "str",
|
|
2310
|
+
"Annotation Value": "str",
|
|
2311
|
+
}
|
|
2312
|
+
df = _create_dataframe(columns=columns)
|
|
2313
|
+
|
|
2314
|
+
visual_mapping = self._visual_page_mapping()
|
|
2315
|
+
report_file = self.get(file_path="definition/report.json")
|
|
2316
|
+
|
|
2317
|
+
rows = []
|
|
2318
|
+
if "annotations" in report_file:
|
|
2319
|
+
for ann in report_file["annotations"]:
|
|
2320
|
+
rows.append(
|
|
2321
|
+
{
|
|
2322
|
+
"Type": "Report",
|
|
2323
|
+
"Object Name": self._report_name,
|
|
2324
|
+
"Annotation Name": ann.get("name"),
|
|
2325
|
+
"Annotation Value": ann.get("value"),
|
|
2326
|
+
}
|
|
2327
|
+
)
|
|
2328
|
+
|
|
2329
|
+
for p in self.__all_pages():
|
|
2330
|
+
path = p.get("path")
|
|
2331
|
+
payload = p.get("payload")
|
|
2332
|
+
page_name = payload.get("displayName")
|
|
2333
|
+
if "annotations" in payload:
|
|
2334
|
+
for ann in payload["annotations"]:
|
|
2335
|
+
rows.append(
|
|
2336
|
+
{
|
|
2337
|
+
"Type": "Page",
|
|
2338
|
+
"Object Name": page_name,
|
|
2339
|
+
"Annotation Name": ann.get("name"),
|
|
2340
|
+
"Annotation Value": ann.get("value"),
|
|
2341
|
+
}
|
|
2342
|
+
)
|
|
2343
|
+
|
|
2344
|
+
for v in self.__all_visuals():
|
|
2345
|
+
path = v.get("path")
|
|
2346
|
+
payload = v.get("payload")
|
|
2347
|
+
page_display = visual_mapping.get(path)[1]
|
|
2348
|
+
visual_name = payload.get("name")
|
|
2349
|
+
if "annotations" in payload:
|
|
2350
|
+
for ann in payload["annotations"]:
|
|
2351
|
+
rows.append(
|
|
2352
|
+
{
|
|
2353
|
+
"Type": "Visual",
|
|
2354
|
+
"Object Name": f"'{page_display}'[{visual_name}]",
|
|
2355
|
+
"Annotation Name": ann.get("name"),
|
|
2356
|
+
"Annotation Value": ann.get("value"),
|
|
2357
|
+
}
|
|
2358
|
+
)
|
|
2359
|
+
|
|
2360
|
+
if rows:
|
|
2361
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
2362
|
+
|
|
2363
|
+
return df
|
|
2364
|
+
|
|
2365
|
+
def _add_image(self, image_path: str, resource_name: Optional[str] = None) -> str:
|
|
2366
|
+
"""
|
|
2367
|
+
Add an image to the report definition. The image will be added to the StaticResources/RegisteredResources folder in the report definition. If the image_name already exists as a file in the report definition it will be updated.
|
|
2368
|
+
|
|
2369
|
+
Parameters
|
|
2370
|
+
----------
|
|
2371
|
+
image_path : str
|
|
2372
|
+
The path of the image file to be added. For example: "./builtin/MyImage.png".
|
|
2373
|
+
resource_name : str, default=None
|
|
2374
|
+
The name of the image file to be added. For example: "MyImage.png". If not specified, the name will be derived from the image path and a unique ID will be appended to it.
|
|
2375
|
+
|
|
2376
|
+
Returns
|
|
2377
|
+
-------
|
|
2378
|
+
str
|
|
2379
|
+
The name of the image file added to the report definition.
|
|
2380
|
+
"""
|
|
2381
|
+
self._ensure_pbir()
|
|
2382
|
+
|
|
2383
|
+
id = generate_number_guid()
|
|
2384
|
+
|
|
2385
|
+
if image_path.startswith("http://") or image_path.startswith("https://"):
|
|
2386
|
+
response = requests.get(image_path)
|
|
2387
|
+
response.raise_for_status()
|
|
2388
|
+
image_bytes = response.content
|
|
2389
|
+
# Extract the suffix (extension) from the URL path
|
|
2390
|
+
suffix = Path(urlparse(image_path).path).suffix
|
|
2391
|
+
else:
|
|
2392
|
+
with open(image_path, "rb") as image_file:
|
|
2393
|
+
image_bytes = image_file.read()
|
|
2394
|
+
suffix = Path(image_path).suffix
|
|
2395
|
+
|
|
2396
|
+
payload = base64.b64encode(image_bytes).decode("utf-8")
|
|
2397
|
+
if resource_name is None:
|
|
2398
|
+
resource_name = os.path.splitext(os.path.basename(image_path))[0]
|
|
2399
|
+
file_name = f"{resource_name}{id}{suffix}"
|
|
2400
|
+
else:
|
|
2401
|
+
file_name = resource_name
|
|
2402
|
+
file_path = f"StaticResources/RegisteredResources/{file_name}"
|
|
2403
|
+
|
|
2404
|
+
# Add StaticResources/RegisteredResources file. If the file already exists, update it.
|
|
2405
|
+
try:
|
|
2406
|
+
self.get(file_path=file_path)
|
|
2407
|
+
self.update(file_path=file_path, payload=payload)
|
|
2408
|
+
except Exception:
|
|
2409
|
+
self.add(
|
|
2410
|
+
file_path=file_path,
|
|
2411
|
+
payload=payload,
|
|
2412
|
+
)
|
|
2413
|
+
|
|
2414
|
+
# Add to report.json file
|
|
2415
|
+
self.__add_to_registered_resources(
|
|
2416
|
+
name=file_name,
|
|
2417
|
+
path=file_name,
|
|
2418
|
+
type="Image",
|
|
2419
|
+
)
|
|
2420
|
+
|
|
2421
|
+
return file_name
|
|
2422
|
+
|
|
2423
|
+
def _remove_wallpaper(self, page: Optional[str | List[str]] = None):
|
|
2424
|
+
"""
|
|
2425
|
+
Remove the wallpaper image from a page.
|
|
2426
|
+
|
|
2427
|
+
Parameters
|
|
2428
|
+
----------
|
|
2429
|
+
page : str | List[str], default=None
|
|
2430
|
+
The name or display name of the page(s) from which the wallpaper image will be removed.
|
|
2431
|
+
If None, removes from all pages.
|
|
2432
|
+
"""
|
|
2433
|
+
self._ensure_pbir()
|
|
2434
|
+
|
|
2435
|
+
if isinstance(page, str):
|
|
2436
|
+
page = [page]
|
|
2437
|
+
|
|
2438
|
+
page_list = []
|
|
2439
|
+
if page:
|
|
2440
|
+
for p in page:
|
|
2441
|
+
page_id = self.resolve_page_name(p)
|
|
2442
|
+
page_list.append(page_id)
|
|
2443
|
+
else:
|
|
2444
|
+
page_list = [
|
|
2445
|
+
p.get("payload", {}).get("name")
|
|
2446
|
+
for p in self.__all_pages()
|
|
2447
|
+
if p.get("payload") and "name" in p["payload"]
|
|
2448
|
+
]
|
|
2449
|
+
|
|
2450
|
+
for p in self.__all_pages():
|
|
2451
|
+
path = p.get("path")
|
|
2452
|
+
payload = p.get("payload")
|
|
2453
|
+
page_name = payload.get("name")
|
|
2454
|
+
page_display_name = payload.get("displayName")
|
|
2455
|
+
if page_name in page_list:
|
|
2456
|
+
self.remove(file_path=path, json_path="$.objects.outspace")
|
|
2457
|
+
print(
|
|
2458
|
+
f"{icons.green_dot} The wallpaper has been removed from the '{page_display_name}' page."
|
|
2459
|
+
)
|
|
2460
|
+
|
|
2461
|
+
def _set_wallpaper_color(
|
|
2462
|
+
self,
|
|
2463
|
+
color_value: str,
|
|
2464
|
+
page: Optional[str | List[str]] = None,
|
|
2465
|
+
transparency: int = 0,
|
|
2466
|
+
theme_color_percent: float = 0.0,
|
|
2467
|
+
):
|
|
2468
|
+
"""
|
|
2469
|
+
Set the wallpaper color of a page (or pages).
|
|
2470
|
+
|
|
2471
|
+
Parameters
|
|
2472
|
+
----------
|
|
2473
|
+
color_value : str
|
|
2474
|
+
The color value to be set. This can be a hex color code (e.g., "#FF5733") or an integer based on the theme color.
|
|
2475
|
+
page : str | List[str], default=None
|
|
2476
|
+
The name or display name of the page(s) to which the wallpaper color will be applied.
|
|
2477
|
+
If None, applies to all pages.
|
|
2478
|
+
transparency : int, default=0
|
|
2479
|
+
The transparency level of the wallpaper color. Valid values are between 0 and 100.
|
|
2480
|
+
theme_color_percent : float, default=0.0
|
|
2481
|
+
The percentage of the theme color to be applied. Valid values are between -0.6 and 0.6.
|
|
2482
|
+
"""
|
|
2483
|
+
self._ensure_pbir()
|
|
2484
|
+
|
|
2485
|
+
if transparency < 0 or transparency > 100:
|
|
2486
|
+
raise ValueError(f"{icons.red_dot} Transparency must be between 0 and 100.")
|
|
2487
|
+
|
|
2488
|
+
if theme_color_percent < -0.6 or theme_color_percent > 0.6:
|
|
2489
|
+
raise ValueError(
|
|
2490
|
+
f"{icons.red_dot} Theme color percentage must be between -0.6 and 0.6."
|
|
2491
|
+
)
|
|
2492
|
+
|
|
2493
|
+
page_list = self.__resolve_page_list(page)
|
|
2494
|
+
|
|
2495
|
+
# Define the color dictionary based on color_value type
|
|
2496
|
+
if isinstance(color_value, int):
|
|
2497
|
+
color_expr = {
|
|
2498
|
+
"ThemeDataColor": {
|
|
2499
|
+
"ColorId": color_value,
|
|
2500
|
+
"Percent": theme_color_percent,
|
|
2501
|
+
}
|
|
2502
|
+
}
|
|
2503
|
+
elif isinstance(color_value, str) and color_value.startswith("#"):
|
|
2504
|
+
color_expr = {"Literal": {"Value": f"'{color_value}'"}}
|
|
2505
|
+
else:
|
|
2506
|
+
raise NotImplementedError(
|
|
2507
|
+
f"{icons.red_dot} The color value '{color_value}' is not supported. Please provide a hex color code or an integer based on the color theme."
|
|
2508
|
+
)
|
|
2509
|
+
|
|
2510
|
+
color_dict = ({"solid": {"color": {"expr": color_expr}}},)
|
|
2511
|
+
transparency_dict = {"expr": {"Literal": {"Value": f"{transparency}D"}}}
|
|
2512
|
+
|
|
2513
|
+
for p in self.__all_pages():
|
|
2514
|
+
path = p.get("path")
|
|
2515
|
+
payload = p.get("payload", {})
|
|
2516
|
+
page_name = payload.get("name")
|
|
2517
|
+
|
|
2518
|
+
if page_name in page_list:
|
|
2519
|
+
self.set_json(
|
|
2520
|
+
file_path=path,
|
|
2521
|
+
json_path="$.objects.outspace[*].properties.color",
|
|
2522
|
+
json_value=color_dict,
|
|
2523
|
+
)
|
|
2524
|
+
self.set_json(
|
|
2525
|
+
file_path=path,
|
|
2526
|
+
json_path="$.objects.outspace[*].properties.transparency",
|
|
2527
|
+
json_value=transparency_dict,
|
|
2528
|
+
)
|
|
2529
|
+
|
|
2530
|
+
def _set_wallpaper_image(
|
|
2531
|
+
self,
|
|
2532
|
+
image_path: str,
|
|
2533
|
+
page: Optional[str | List[str]] = None,
|
|
2534
|
+
transparency: int = 0,
|
|
2535
|
+
image_fit: Literal["Normal", "Fit", "Fill"] = "Normal",
|
|
2536
|
+
):
|
|
2537
|
+
"""
|
|
2538
|
+
Add an image as the wallpaper of a page.
|
|
2539
|
+
|
|
2540
|
+
Parameters
|
|
2541
|
+
----------
|
|
2542
|
+
image_path : str
|
|
2543
|
+
The path of the image file to be added. For example: "./builtin/MyImage.png".
|
|
2544
|
+
page : str | List[str], default=None
|
|
2545
|
+
The name or display name of the page(s) to which the wallpaper image will be applied.
|
|
2546
|
+
If None, applies to all pages.
|
|
2547
|
+
transparency : int, default=0
|
|
2548
|
+
The transparency level of the wallpaper image. Valid values are between 0 and 100.
|
|
2549
|
+
image_fit : str, default="Normal"
|
|
2550
|
+
The fit type of the wallpaper image. Valid options: "Normal", "Fit", "Fill".
|
|
2551
|
+
"""
|
|
2552
|
+
self._ensure_pbir()
|
|
2553
|
+
|
|
2554
|
+
image_fits = ["Normal", "Fit", "Fill"]
|
|
2555
|
+
image_fit = image_fit.capitalize()
|
|
2556
|
+
if image_fit not in image_fits:
|
|
2557
|
+
raise ValueError(
|
|
2558
|
+
f"{icons.red_dot} Invalid image fit. Valid options: {image_fits}."
|
|
2559
|
+
)
|
|
2560
|
+
if transparency < 0 or transparency > 100:
|
|
2561
|
+
raise ValueError(f"{icons.red_dot} Transparency must be between 0 and 100.")
|
|
2562
|
+
|
|
2563
|
+
page_list = self.__resolve_page_list(page)
|
|
2564
|
+
|
|
2565
|
+
image_name = os.path.splitext(os.path.basename(image_path))[0]
|
|
2566
|
+
image_file_path = self._add_image(image_path=image_path, image_name=image_name)
|
|
2567
|
+
|
|
2568
|
+
image_dict = {
|
|
2569
|
+
"image": {
|
|
2570
|
+
"name": {"expr": {"Literal": {"Value": f"'{image_file_path}'"}}},
|
|
2571
|
+
"url": {
|
|
2572
|
+
"expr": {
|
|
2573
|
+
"ResourcePackageItem": {
|
|
2574
|
+
"PackageName": "RegisteredResources",
|
|
2575
|
+
"PackageType": 1,
|
|
2576
|
+
"ItemName": image_file_path,
|
|
2577
|
+
}
|
|
2578
|
+
}
|
|
2579
|
+
},
|
|
2580
|
+
"scaling": {"expr": {"Literal": {"Value": f"'{image_fit}'"}}},
|
|
2581
|
+
}
|
|
2582
|
+
}
|
|
2583
|
+
transparency_dict = {"expr": {"Literal": {"Value": f"{transparency}D"}}}
|
|
2584
|
+
|
|
2585
|
+
for p in self.__all_pages():
|
|
2586
|
+
path = p.get("path")
|
|
2587
|
+
payload = p.get("payload")
|
|
2588
|
+
page_name = payload.get("name")
|
|
2589
|
+
if page_name in page_list:
|
|
2590
|
+
self.set_json(
|
|
2591
|
+
file_path=path,
|
|
2592
|
+
json_path="$.objects.outspace[*].properties.image",
|
|
2593
|
+
json_value=image_dict,
|
|
2594
|
+
)
|
|
2595
|
+
self.set_json(
|
|
2596
|
+
file_path=path,
|
|
2597
|
+
json_path="$.objects.outspace[*].properties.transparency",
|
|
2598
|
+
json_value=transparency_dict,
|
|
2599
|
+
)
|
|
2600
|
+
|
|
2601
|
+
def _add_blank_page(
|
|
2602
|
+
self,
|
|
2603
|
+
name: str,
|
|
2604
|
+
width: int = 1280,
|
|
2605
|
+
height: int = 720,
|
|
2606
|
+
display_option: str = "FitToPage",
|
|
2607
|
+
):
|
|
2608
|
+
self._ensure_pbir()
|
|
2609
|
+
|
|
2610
|
+
page_id = generate_hex()
|
|
2611
|
+
payload = {
|
|
2612
|
+
"$schema": "https://developer.microsoft.com/json-schemas/fabric/item/report/definition/page/1.4.0/schema.json",
|
|
2613
|
+
"name": page_id,
|
|
2614
|
+
"displayName": name,
|
|
2615
|
+
"displayOption": display_option,
|
|
2616
|
+
"height": height,
|
|
2617
|
+
"width": width,
|
|
2618
|
+
}
|
|
2619
|
+
self.add(file_path=f"definition/pages/{page_id}/page.json", payload=payload)
|
|
2620
|
+
|
|
2621
|
+
# Add the page to the pages.json file
|
|
2622
|
+
pages_file = self.get(file_path=self._pages_file_path)
|
|
2623
|
+
pages_file["pageOrder"].append(page_id)
|
|
2624
|
+
|
|
2625
|
+
def _add_page(self, payload: dict | bytes, generate_id: bool = True):
|
|
2626
|
+
"""
|
|
2627
|
+
Add a new page to the report.
|
|
2628
|
+
|
|
2629
|
+
Parameters
|
|
2630
|
+
----------
|
|
2631
|
+
payload : dict | bytes
|
|
2632
|
+
The json content of the page to be added. This can be a dictionary or a base64 encoded string.
|
|
2633
|
+
generate_id : bool, default=True
|
|
2634
|
+
Whether to generate a new page ID. If False, the page ID will be taken from the payload.
|
|
2635
|
+
"""
|
|
2636
|
+
self._ensure_pbir()
|
|
2637
|
+
|
|
2638
|
+
page_file = decode_payload(payload)
|
|
2639
|
+
page_file_copy = copy.deepcopy(page_file)
|
|
2640
|
+
|
|
2641
|
+
if generate_id:
|
|
2642
|
+
# Generate a new page ID and update the page file accordingly
|
|
2643
|
+
page_id = generate_hex()
|
|
2644
|
+
page_file_copy["name"] = page_id
|
|
2645
|
+
else:
|
|
2646
|
+
page_id = page_file_copy.get("name")
|
|
2647
|
+
|
|
2648
|
+
self.add(
|
|
2649
|
+
file_path=f"definition/pages/{page_id}/page.json", payload=page_file_copy
|
|
2650
|
+
)
|
|
2651
|
+
|
|
2652
|
+
def _add_visual(self, page: str, payload: dict | bytes, generate_id: bool = True):
|
|
2653
|
+
"""
|
|
2654
|
+
Add a new visual to a page in the report.
|
|
2655
|
+
|
|
2656
|
+
Parameters
|
|
2657
|
+
----------
|
|
2658
|
+
page : str
|
|
2659
|
+
The name or display name of the page to which the visual will be added.
|
|
2660
|
+
payload : dict | bytes
|
|
2661
|
+
The json content of the visual to be added. This can be a dictionary or a base64 encoded string.
|
|
2662
|
+
generate_id : bool, default=True
|
|
2663
|
+
Whether to generate a new visual ID. If False, the visual ID will be taken from the payload.
|
|
2664
|
+
"""
|
|
2665
|
+
self._ensure_pbir()
|
|
2666
|
+
|
|
2667
|
+
visual_file = decode_payload(payload)
|
|
2668
|
+
visual_file_copy = copy.deepcopy(visual_file)
|
|
2669
|
+
|
|
2670
|
+
if generate_id:
|
|
2671
|
+
# Generate a new visual ID and update the visual file accordingly
|
|
2672
|
+
visual_id = generate_hex()
|
|
2673
|
+
visual_file_copy["name"] = visual_id
|
|
2674
|
+
else:
|
|
2675
|
+
visual_id = visual_file_copy.get("name")
|
|
2676
|
+
(page_file_path, page_id, page_name) = (
|
|
2677
|
+
self.__resolve_page_name_and_display_name_file_path(page)
|
|
2678
|
+
)
|
|
2679
|
+
visual_file_path = helper.generate_visual_file_path(page_file_path, visual_id)
|
|
2680
|
+
|
|
2681
|
+
self.add(file_path=visual_file_path, payload=visual_file_copy)
|
|
2682
|
+
|
|
2683
|
+
def _add_new_visual(
|
|
2684
|
+
self,
|
|
2685
|
+
page: str,
|
|
2686
|
+
type: str,
|
|
2687
|
+
x: int,
|
|
2688
|
+
y: int,
|
|
2689
|
+
height: int = 720,
|
|
2690
|
+
width: int = 1280,
|
|
2691
|
+
):
|
|
2692
|
+
self._ensure_pbir()
|
|
2693
|
+
|
|
2694
|
+
type = helper.resolve_visual_type(type)
|
|
2695
|
+
visual_id = generate_hex()
|
|
2696
|
+
(page_file_path, page_id, page_name) = (
|
|
2697
|
+
self.__resolve_page_name_and_display_name_file_path(page)
|
|
2698
|
+
)
|
|
2699
|
+
visual_file_path = helper.generate_visual_file_path(page_file_path, visual_id)
|
|
2700
|
+
|
|
2701
|
+
payload = {
|
|
2702
|
+
"$schema": "https://developer.microsoft.com/json-schemas/fabric/item/report/definition/visualContainer/2.0.0/schema.json",
|
|
2703
|
+
"name": visual_id,
|
|
2704
|
+
"position": {
|
|
2705
|
+
"x": x,
|
|
2706
|
+
"y": y,
|
|
2707
|
+
"z": 0,
|
|
2708
|
+
"height": height,
|
|
2709
|
+
"width": width,
|
|
2710
|
+
"tabOrder": 0,
|
|
2711
|
+
},
|
|
2712
|
+
"visual": {"visualType": type, "drillFilterOtherVisuals": True},
|
|
2713
|
+
}
|
|
2714
|
+
|
|
2715
|
+
self.add(file_path=visual_file_path, payload=payload)
|
|
2716
|
+
|
|
2717
|
+
def _update_to_theme_colors(self, mapping: dict[str, tuple[int, float]]):
|
|
2718
|
+
"""
|
|
2719
|
+
Updates the report definition to use theme colors instead of hex colors.
|
|
2720
|
+
|
|
2721
|
+
Parameters
|
|
2722
|
+
----------
|
|
2723
|
+
mapping : dict[str, tuple[int, float]
|
|
2724
|
+
A dictionary mapping color names to their corresponding theme color IDs.
|
|
2725
|
+
Example: {"#FF0000": (1, 0), "#00FF00": (2, 0)}
|
|
2726
|
+
The first value in the tuple is the theme color ID and the second value is the percentage (a value between -0.6 and 0.6).
|
|
2727
|
+
"""
|
|
2728
|
+
self._ensure_pbir()
|
|
2729
|
+
|
|
2730
|
+
# Ensure theme color mapping is in the correct format (with Percent value)
|
|
2731
|
+
mapping = {k: (v, 0) if isinstance(v, int) else v for k, v in mapping.items()}
|
|
2732
|
+
|
|
2733
|
+
out_of_range = {
|
|
2734
|
+
color: value
|
|
2735
|
+
for color, value in mapping.items()
|
|
2736
|
+
if len(value) > 1 and not (-0.6 <= value[1] <= 0.6)
|
|
2737
|
+
}
|
|
2738
|
+
|
|
2739
|
+
if out_of_range:
|
|
2740
|
+
print(
|
|
2741
|
+
f"{icons.red_dot} The following mapping entries have Percent values out of range [-0.6, 0.6]:"
|
|
2742
|
+
)
|
|
2743
|
+
for color, val in out_of_range.items():
|
|
2744
|
+
print(f" {color}: Percent = {val[1]}")
|
|
2745
|
+
raise ValueError(
|
|
2746
|
+
f"{icons.red_dot} The Percent values must be between -0.6 and 0.6."
|
|
2747
|
+
)
|
|
2748
|
+
|
|
2749
|
+
json_path = "$..color.expr.Literal.Value"
|
|
2750
|
+
jsonpath_expr = parse(json_path)
|
|
2751
|
+
|
|
2752
|
+
for part in [
|
|
2753
|
+
part
|
|
2754
|
+
for part in self._report_definition.get("parts")
|
|
2755
|
+
if part.get("path").endswith(".json")
|
|
2756
|
+
]:
|
|
2757
|
+
file_path = part.get("path")
|
|
2758
|
+
payload = part.get("payload")
|
|
2759
|
+
matches = jsonpath_expr.find(payload)
|
|
2760
|
+
if matches:
|
|
2761
|
+
for match in matches:
|
|
2762
|
+
color_string = match.value.strip("'")
|
|
2763
|
+
if color_string in mapping:
|
|
2764
|
+
color_data = mapping[color_string]
|
|
2765
|
+
if isinstance(color_data, int):
|
|
2766
|
+
color_data = [color_data, 0]
|
|
2767
|
+
|
|
2768
|
+
# Get reference to parent of 'Value' (i.e. 'Literal')
|
|
2769
|
+
# literal_dict = match.context.value
|
|
2770
|
+
# Get reference to parent of 'Literal' (i.e. 'expr')
|
|
2771
|
+
expr_dict = match.context.context.value
|
|
2772
|
+
|
|
2773
|
+
# Replace the 'expr' with new structure
|
|
2774
|
+
expr_dict.clear()
|
|
2775
|
+
expr_dict["ThemeDataColor"] = {
|
|
2776
|
+
"ColorId": color_data[0],
|
|
2777
|
+
"Percent": color_data[1],
|
|
2778
|
+
}
|
|
2779
|
+
|
|
2780
|
+
self.update(file_path=file_path, payload=payload)
|
|
2781
|
+
|
|
2782
|
+
def _rename_fields(self, mapping: dict):
|
|
2783
|
+
"""
|
|
2784
|
+
Renames fields in the report definition based on the provided rename mapping.
|
|
2785
|
+
|
|
2786
|
+
Parameters
|
|
2787
|
+
----------
|
|
2788
|
+
mapping : dict
|
|
2789
|
+
A dictionary containing the mapping of old field names to new field names.
|
|
2790
|
+
Example:
|
|
2791
|
+
|
|
2792
|
+
{
|
|
2793
|
+
"columns": {
|
|
2794
|
+
("TableName", "OldColumnName1"): "NewColumnName1",
|
|
2795
|
+
("TableName", "OldColumnName2"): "NewColumnName2",
|
|
2796
|
+
},
|
|
2797
|
+
"measures": {
|
|
2798
|
+
("TableName", "OldMeasureName1"): "NewMeasureName1",
|
|
2799
|
+
("TableName", "OldMeasureName2"): "NewMeasureName2",
|
|
2800
|
+
}
|
|
2801
|
+
}
|
|
2802
|
+
"""
|
|
2803
|
+
self._ensure_pbir()
|
|
2804
|
+
|
|
2805
|
+
selector_mapping = {
|
|
2806
|
+
key: {
|
|
2807
|
+
variant: f"{table}.{new_col}"
|
|
2808
|
+
for (table, col), new_col in value.items()
|
|
2809
|
+
for variant in (
|
|
2810
|
+
f"({table}.{col})",
|
|
2811
|
+
f"[{table}.{col}]",
|
|
2812
|
+
)
|
|
2813
|
+
}
|
|
2814
|
+
for key, value in mapping.items()
|
|
2815
|
+
}
|
|
2816
|
+
|
|
2817
|
+
for part in [
|
|
2818
|
+
part
|
|
2819
|
+
for part in self._report_definition.get("parts")
|
|
2820
|
+
if part.get("path").endswith(".json")
|
|
2821
|
+
]:
|
|
2822
|
+
file_path = part.get("path")
|
|
2823
|
+
payload = part.get("payload")
|
|
2824
|
+
|
|
2825
|
+
# Paths for columns, measures, and expressions
|
|
2826
|
+
col_expr_path = parse("$..Column")
|
|
2827
|
+
meas_expr_path = parse("$..Measure")
|
|
2828
|
+
entity_ref_path = parse("$..Expression.SourceRef.Entity")
|
|
2829
|
+
query_ref_path = parse("$..queryRef")
|
|
2830
|
+
native_query_ref_path = parse("$..nativeQueryRef")
|
|
2831
|
+
filter_expr_path = parse("$..filterConfig.filters[*].filter.From")
|
|
2832
|
+
source_ref_path = parse("$..Expression.SourceRef.Source")
|
|
2833
|
+
metadata_ref_path = parse("$..selector.metadata")
|
|
2834
|
+
|
|
2835
|
+
# Populate table alias map
|
|
2836
|
+
alias_map = {}
|
|
2837
|
+
for match in filter_expr_path.find(payload):
|
|
2838
|
+
alias_list = match.value
|
|
2839
|
+
for alias in alias_list:
|
|
2840
|
+
alias_name = alias.get("Name")
|
|
2841
|
+
alias_entity = alias.get("Entity")
|
|
2842
|
+
alias_map[alias_name] = alias_entity
|
|
2843
|
+
|
|
2844
|
+
# Rename selector.metadata objects
|
|
2845
|
+
for match in metadata_ref_path.find(payload):
|
|
2846
|
+
obj = match.value
|
|
2847
|
+
|
|
2848
|
+
# Check both measures and columns
|
|
2849
|
+
for category in ["measures", "columns"]:
|
|
2850
|
+
for i, value in selector_mapping.get(category).items():
|
|
2851
|
+
if i in obj:
|
|
2852
|
+
prefix = i[0]
|
|
2853
|
+
if prefix == "[":
|
|
2854
|
+
new_value = obj.replace(i, f"[{value}]")
|
|
2855
|
+
else:
|
|
2856
|
+
new_value = obj.replace(i, f"({value})")
|
|
2857
|
+
match.context.value["metadata"] = new_value
|
|
2858
|
+
break
|
|
2859
|
+
|
|
2860
|
+
# Rename Column Properties
|
|
2861
|
+
for match in col_expr_path.find(payload):
|
|
2862
|
+
col_obj = match.value
|
|
2863
|
+
parent = match.context.value
|
|
2864
|
+
|
|
2865
|
+
# Extract table name from SourceRef
|
|
2866
|
+
source_matches = entity_ref_path.find(parent)
|
|
2867
|
+
if source_matches:
|
|
2868
|
+
table = source_matches[0].value
|
|
2869
|
+
else:
|
|
2870
|
+
alias = source_ref_path.find(parent)
|
|
2871
|
+
table = alias_map.get(alias[0].value)
|
|
2872
|
+
|
|
2873
|
+
if not table:
|
|
2874
|
+
continue # skip if can't resolve table
|
|
2875
|
+
|
|
2876
|
+
old_name = col_obj.get("Property")
|
|
2877
|
+
if (table, old_name) in mapping.get("columns", {}):
|
|
2878
|
+
col_obj["Property"] = mapping["columns"][(table, old_name)]
|
|
2879
|
+
|
|
2880
|
+
# Rename Measure Properties
|
|
2881
|
+
for match in meas_expr_path.find(payload):
|
|
2882
|
+
meas_obj = match.value
|
|
2883
|
+
parent = match.context.value
|
|
2884
|
+
|
|
2885
|
+
source_matches = entity_ref_path.find(parent)
|
|
2886
|
+
if source_matches:
|
|
2887
|
+
table = source_matches[0].value
|
|
2888
|
+
else:
|
|
2889
|
+
alias = source_ref_path.find(parent)
|
|
2890
|
+
table = alias_map.get(alias[0].value)
|
|
2891
|
+
|
|
2892
|
+
if not table:
|
|
2893
|
+
continue # skip if can't resolve table
|
|
2894
|
+
|
|
2895
|
+
old_name = meas_obj.get("Property")
|
|
2896
|
+
if (table, old_name) in mapping.get("measures", {}):
|
|
2897
|
+
meas_obj["Property"] = mapping["measures"][(table, old_name)]
|
|
2898
|
+
|
|
2899
|
+
# Update queryRef and nativeQueryRef
|
|
2900
|
+
def update_refs(path_expr):
|
|
2901
|
+
for match in path_expr.find(payload):
|
|
2902
|
+
ref_key = match.path.fields[0]
|
|
2903
|
+
ref_value = match.value
|
|
2904
|
+
parent = match.context.value
|
|
2905
|
+
|
|
2906
|
+
for (tbl, old_name), new_name in mapping.get("columns", {}).items():
|
|
2907
|
+
pattern = rf"\b{re.escape(tbl)}\.{re.escape(old_name)}\b"
|
|
2908
|
+
if re.search(pattern, ref_value):
|
|
2909
|
+
if ref_key == "queryRef":
|
|
2910
|
+
ref_value = re.sub(
|
|
2911
|
+
pattern, f"{tbl}.{new_name}", ref_value
|
|
2912
|
+
)
|
|
2913
|
+
elif ref_key == "nativeQueryRef":
|
|
2914
|
+
agg_match = re.match(
|
|
2915
|
+
rf"(?i)([a-z]+)\s*\(\s*{re.escape(tbl)}\.{re.escape(old_name)}\s*\)",
|
|
2916
|
+
ref_value,
|
|
2917
|
+
)
|
|
2918
|
+
if agg_match:
|
|
2919
|
+
func = agg_match.group(1).capitalize()
|
|
2920
|
+
ref_value = f"{func} of {new_name}"
|
|
2921
|
+
else:
|
|
2922
|
+
ref_value = ref_value.replace(old_name, new_name)
|
|
2923
|
+
parent[ref_key] = ref_value
|
|
2924
|
+
|
|
2925
|
+
for (tbl, old_name), new_name in mapping.get(
|
|
2926
|
+
"measures", {}
|
|
2927
|
+
).items():
|
|
2928
|
+
pattern = rf"\b{re.escape(tbl)}\.{re.escape(old_name)}\b"
|
|
2929
|
+
if re.search(pattern, ref_value):
|
|
2930
|
+
if ref_key == "queryRef":
|
|
2931
|
+
ref_value = re.sub(
|
|
2932
|
+
pattern, f"{tbl}.{new_name}", ref_value
|
|
2933
|
+
)
|
|
2934
|
+
elif ref_key == "nativeQueryRef":
|
|
2935
|
+
agg_match = re.match(
|
|
2936
|
+
rf"(?i)([a-z]+)\s*\(\s*{re.escape(tbl)}\.{re.escape(old_name)}\s*\)",
|
|
2937
|
+
ref_value,
|
|
2938
|
+
)
|
|
2939
|
+
if agg_match:
|
|
2940
|
+
func = agg_match.group(1).capitalize()
|
|
2941
|
+
ref_value = f"{func} of {new_name}"
|
|
2942
|
+
else:
|
|
2943
|
+
ref_value = ref_value.replace(old_name, new_name)
|
|
2944
|
+
parent[ref_key] = ref_value
|
|
2945
|
+
|
|
2946
|
+
update_refs(query_ref_path)
|
|
2947
|
+
update_refs(native_query_ref_path)
|
|
2948
|
+
|
|
2949
|
+
self.update(file_path=file_path, payload=payload)
|
|
2950
|
+
|
|
2951
|
+
def _list_color_codes(self) -> List[str]:
|
|
2952
|
+
"""
|
|
2953
|
+
Shows a list of all the hex color codes used in the report.
|
|
2954
|
+
|
|
2955
|
+
Returns
|
|
2956
|
+
-------
|
|
2957
|
+
list[str]
|
|
2958
|
+
A list of hex color codes used in the report.
|
|
2959
|
+
"""
|
|
2960
|
+
self._ensure_pbir()
|
|
2961
|
+
|
|
2962
|
+
file = self.get("*.json", json_path="$..color.expr.Literal.Value")
|
|
2963
|
+
|
|
2964
|
+
return [x[1].strip("'") for x in file]
|
|
2965
|
+
|
|
2966
|
+
def __update_visual_image(self, file_path: str, image_path: str):
|
|
2967
|
+
"""
|
|
2968
|
+
Update the image of a visual in the report definition. Only supported for 'image' visual types.
|
|
2969
|
+
|
|
2970
|
+
Parameters
|
|
2971
|
+
----------
|
|
2972
|
+
file_path : str
|
|
2973
|
+
The file path of the visual to be updated. For example: "definition/pages/ReportSection1/visuals/a1d8f99b81dcc2d59035/visual.json".
|
|
2974
|
+
image_path : str
|
|
2975
|
+
The name of the image file to be added. For example: "MyImage".
|
|
2976
|
+
"""
|
|
2977
|
+
|
|
2978
|
+
if image_path not in self.list_paths().get("Path").values:
|
|
2979
|
+
raise ValueError(
|
|
2980
|
+
f"Image path '{image_path}' not found in the report definition."
|
|
2981
|
+
)
|
|
2982
|
+
if not image_path.startswith("StaticResources/RegisteredResources/"):
|
|
2983
|
+
raise ValueError(
|
|
2984
|
+
f"Image path must start with 'StaticResources/RegisteredResources/'. Provided: {image_path}"
|
|
2985
|
+
)
|
|
2986
|
+
|
|
2987
|
+
image_name = image_path.split("RegisteredResources/")[1]
|
|
2988
|
+
|
|
2989
|
+
if not file_path.endswith("/visual.json"):
|
|
2990
|
+
raise ValueError(
|
|
2991
|
+
f"File path must end with '/visual.json'. Provided: {file_path}"
|
|
2992
|
+
)
|
|
2993
|
+
|
|
2994
|
+
file = self.get(file_path=file_path)
|
|
2995
|
+
if file.get("visual").get("visualType") != "image":
|
|
2996
|
+
raise ValueError("This function is only valid for image visuals.")
|
|
2997
|
+
file.get("visual").get("objects").get("general")[0].get("properties").get(
|
|
2998
|
+
"imageUrl"
|
|
2999
|
+
).get("expr").get("ResourcePackageItem")["ItemName"] == image_name
|
|
3000
|
+
|
|
3001
|
+
def save_changes(self):
|
|
3002
|
+
|
|
3003
|
+
if self._readonly:
|
|
3004
|
+
print(
|
|
3005
|
+
f"{icons.warning} The connection is read-only. Set 'readonly' to False to save changes."
|
|
3006
|
+
)
|
|
3007
|
+
else:
|
|
3008
|
+
# Convert the report definition to base64
|
|
3009
|
+
if self._current_report_definition == self._report_definition:
|
|
3010
|
+
print(f"{icons.info} No changes were made to the report definition.")
|
|
3011
|
+
return
|
|
3012
|
+
new_report_definition = copy.deepcopy(self._report_definition)
|
|
3013
|
+
|
|
3014
|
+
for part in new_report_definition.get("parts"):
|
|
3015
|
+
part["payloadType"] = "InlineBase64"
|
|
3016
|
+
path = part.get("path")
|
|
3017
|
+
payload = part.get("payload")
|
|
3018
|
+
if isinstance(payload, dict):
|
|
3019
|
+
converted_json = json.dumps(part["payload"])
|
|
3020
|
+
part["payload"] = base64.b64encode(
|
|
3021
|
+
converted_json.encode("utf-8")
|
|
3022
|
+
).decode("utf-8")
|
|
3023
|
+
elif isinstance(payload, bytes):
|
|
3024
|
+
part["payload"] = base64.b64encode(part["payload"]).decode("utf-8")
|
|
3025
|
+
elif is_base64(payload):
|
|
3026
|
+
part["payload"] = payload
|
|
3027
|
+
else:
|
|
3028
|
+
raise NotImplementedError(
|
|
3029
|
+
f"{icons.red_dot} Unsupported payload type: {type(payload)} for the '{path}' file."
|
|
3030
|
+
)
|
|
3031
|
+
|
|
3032
|
+
# Generate payload for the updateDefinition API
|
|
3033
|
+
new_payload = {"definition": {"parts": new_report_definition.get("parts")}}
|
|
3034
|
+
|
|
3035
|
+
# Update item definition
|
|
3036
|
+
_base_api(
|
|
3037
|
+
request=f"/v1/workspaces/{self._workspace_id}/reports/{self._report_id}/updateDefinition",
|
|
3038
|
+
method="post",
|
|
3039
|
+
payload=new_payload,
|
|
3040
|
+
lro_return_status_code=True,
|
|
3041
|
+
status_codes=None,
|
|
3042
|
+
)
|
|
3043
|
+
print(
|
|
3044
|
+
f"{icons.green_dot} The report definition has been updated successfully."
|
|
3045
|
+
)
|
|
3046
|
+
|
|
3047
|
+
def close(self):
|
|
3048
|
+
|
|
3049
|
+
if self._show_diffs and (
|
|
3050
|
+
self._current_report_definition != self._report_definition
|
|
3051
|
+
):
|
|
3052
|
+
diff_parts(
|
|
3053
|
+
self._current_report_definition.get("parts"),
|
|
3054
|
+
self._report_definition.get("parts"),
|
|
3055
|
+
)
|
|
3056
|
+
# Save the changes to the service if the connection is read/write
|
|
3057
|
+
if not self._readonly:
|
|
3058
|
+
self.save_changes()
|
|
3059
|
+
|
|
3060
|
+
|
|
3061
|
+
@log
|
|
3062
|
+
@contextmanager
|
|
3063
|
+
def connect_report(
|
|
3064
|
+
report: str | UUID,
|
|
3065
|
+
workspace: Optional[str | UUID] = None,
|
|
3066
|
+
readonly: bool = True,
|
|
3067
|
+
show_diffs: bool = True,
|
|
3068
|
+
):
|
|
3069
|
+
"""
|
|
3070
|
+
Connects to the report.
|
|
3071
|
+
|
|
3072
|
+
Parameters
|
|
3073
|
+
----------
|
|
3074
|
+
report : str | uuid.UUID
|
|
3075
|
+
Name or ID of the report.
|
|
3076
|
+
workspace : str | uuid.UUID, default=None
|
|
3077
|
+
The workspace name or ID.
|
|
3078
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
3079
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
3080
|
+
readonly: bool, default=True
|
|
3081
|
+
Whether the connection is read-only or read/write. Setting this to False enables read/write which saves the changes made back to the server.
|
|
3082
|
+
show_diffs: bool, default=True
|
|
3083
|
+
Whether to show the differences between the current report definition in the service and the new report definition.
|
|
3084
|
+
|
|
3085
|
+
Returns
|
|
3086
|
+
-------
|
|
3087
|
+
typing.Iterator[ReportWrapper]
|
|
3088
|
+
A connection to the report's metadata.
|
|
3089
|
+
"""
|
|
3090
|
+
|
|
3091
|
+
rw = ReportWrapper(
|
|
3092
|
+
report=report,
|
|
3093
|
+
workspace=workspace,
|
|
3094
|
+
readonly=readonly,
|
|
3095
|
+
show_diffs=show_diffs,
|
|
3096
|
+
)
|
|
3097
|
+
try:
|
|
3098
|
+
yield rw
|
|
3099
|
+
finally:
|
|
3100
|
+
rw.close()
|