semantic-link-labs 0.8.8__py3-none-any.whl → 0.8.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.8.8.dist-info → semantic_link_labs-0.8.10.dist-info}/METADATA +5 -2
- {semantic_link_labs-0.8.8.dist-info → semantic_link_labs-0.8.10.dist-info}/RECORD +28 -28
- sempy_labs/__init__.py +10 -0
- sempy_labs/_authentication.py +31 -2
- sempy_labs/_dataflows.py +1 -1
- sempy_labs/_dax.py +69 -54
- sempy_labs/_gateways.py +46 -0
- sempy_labs/_generate_semantic_model.py +74 -27
- sempy_labs/_git.py +32 -27
- sempy_labs/_helper_functions.py +60 -23
- sempy_labs/_list_functions.py +178 -32
- sempy_labs/_model_bpa.py +25 -23
- sempy_labs/_model_bpa_bulk.py +5 -5
- sempy_labs/_model_dependencies.py +17 -8
- sempy_labs/_notebooks.py +50 -18
- sempy_labs/_refresh_semantic_model.py +23 -17
- sempy_labs/_translations.py +80 -148
- sempy_labs/_workspaces.py +1 -1
- sempy_labs/admin/__init__.py +6 -0
- sempy_labs/admin/_basic_functions.py +120 -40
- sempy_labs/admin/_domains.py +3 -2
- sempy_labs/admin/_scanner.py +5 -5
- sempy_labs/directlake/_dl_helper.py +13 -8
- sempy_labs/report/_reportwrapper.py +14 -9
- sempy_labs/tom/_model.py +77 -35
- {semantic_link_labs-0.8.8.dist-info → semantic_link_labs-0.8.10.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.8.8.dist-info → semantic_link_labs-0.8.10.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.8.8.dist-info → semantic_link_labs-0.8.10.dist-info}/top_level.txt +0 -0
sempy_labs/admin/__init__.py
CHANGED
|
@@ -1,3 +1,6 @@
|
|
|
1
|
+
from sempy_labs.admin._scanner import (
|
|
2
|
+
scan_workspaces,
|
|
3
|
+
)
|
|
1
4
|
from sempy_labs.admin._basic_functions import (
|
|
2
5
|
assign_workspaces_to_capacity,
|
|
3
6
|
unassign_workspaces_from_capacity,
|
|
@@ -11,6 +14,7 @@ from sempy_labs.admin._basic_functions import (
|
|
|
11
14
|
list_capacities_delegated_tenant_settings,
|
|
12
15
|
list_access_entities,
|
|
13
16
|
list_activity_events,
|
|
17
|
+
get_capacity_assignment_status,
|
|
14
18
|
)
|
|
15
19
|
from sempy_labs.admin._domains import (
|
|
16
20
|
list_domains,
|
|
@@ -64,4 +68,6 @@ __all__ = [
|
|
|
64
68
|
"list_modified_workspaces",
|
|
65
69
|
"list_git_connections",
|
|
66
70
|
"list_reports",
|
|
71
|
+
"get_capacity_assignment_status",
|
|
72
|
+
"scan_workspaces",
|
|
67
73
|
]
|
|
@@ -11,7 +11,6 @@ from sempy_labs._helper_functions import (
|
|
|
11
11
|
import numpy as np
|
|
12
12
|
import pandas as pd
|
|
13
13
|
from dateutil.parser import parse as dtparser
|
|
14
|
-
import urllib.parse
|
|
15
14
|
|
|
16
15
|
|
|
17
16
|
def list_workspaces(
|
|
@@ -126,7 +125,7 @@ def list_capacities(
|
|
|
126
125
|
capacity: Optional[str | UUID] = None,
|
|
127
126
|
) -> pd.DataFrame:
|
|
128
127
|
"""
|
|
129
|
-
Shows the a list of capacities and their properties.
|
|
128
|
+
Shows the a list of capacities and their properties.
|
|
130
129
|
|
|
131
130
|
This is a wrapper function for the following API: `Admin - Get Capacities As Admin <https://learn.microsoft.com/rest/api/power-bi/admin/get-capacities-as-admin>`_.
|
|
132
131
|
|
|
@@ -138,7 +137,7 @@ def list_capacities(
|
|
|
138
137
|
Returns
|
|
139
138
|
-------
|
|
140
139
|
pandas.DataFrame
|
|
141
|
-
A pandas dataframe showing the capacities and their properties
|
|
140
|
+
A pandas dataframe showing the capacities and their properties.
|
|
142
141
|
"""
|
|
143
142
|
client = fabric.FabricRestClient()
|
|
144
143
|
|
|
@@ -214,6 +213,7 @@ def assign_workspaces_to_capacity(
|
|
|
214
213
|
if source_capacity is None:
|
|
215
214
|
dfW = list_workspaces()
|
|
216
215
|
else:
|
|
216
|
+
source_capacity_id = _resolve_capacity_name_and_id(source_capacity)[1]
|
|
217
217
|
dfW = list_workspaces(capacity=source_capacity_id)
|
|
218
218
|
|
|
219
219
|
# Extract names and IDs that are mapped in dfW
|
|
@@ -230,10 +230,10 @@ def assign_workspaces_to_capacity(
|
|
|
230
230
|
if item not in workspaces_names and item not in workspaces_ids
|
|
231
231
|
]
|
|
232
232
|
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
233
|
+
if len(workspace) != len(workspaces):
|
|
234
|
+
raise ValueError(
|
|
235
|
+
f"{icons.red_dot} The following workspaces are invalid or not found in source capacity: {unmapped_workspaces}."
|
|
236
|
+
)
|
|
237
237
|
|
|
238
238
|
target_capacity_id = _resolve_capacity_name_and_id(target_capacity)[1]
|
|
239
239
|
|
|
@@ -777,38 +777,38 @@ def list_activity_events(
|
|
|
777
777
|
"Object Display Name",
|
|
778
778
|
"Experience",
|
|
779
779
|
"Refresh Enforcement Policy",
|
|
780
|
+
"Is Success",
|
|
781
|
+
"Activity Id",
|
|
782
|
+
"Item Name",
|
|
783
|
+
"Dataset Name",
|
|
784
|
+
"Report Name",
|
|
785
|
+
"Capacity Id",
|
|
786
|
+
"Capacity Name",
|
|
787
|
+
"App Name",
|
|
788
|
+
"Dataset Id",
|
|
789
|
+
"Report Id",
|
|
790
|
+
"Artifact Id",
|
|
791
|
+
"Artifact Name",
|
|
792
|
+
"Report Type",
|
|
793
|
+
"App Report Id",
|
|
794
|
+
"Distribution Method",
|
|
795
|
+
"Consumption Method",
|
|
796
|
+
"Artifact Kind",
|
|
780
797
|
]
|
|
781
798
|
)
|
|
782
799
|
|
|
783
|
-
|
|
784
|
-
|
|
785
|
-
tic = "%27"
|
|
786
|
-
space = "%20"
|
|
800
|
+
response_json = {"activityEventEntities": []}
|
|
787
801
|
client = fabric.PowerBIRestClient()
|
|
788
|
-
|
|
789
|
-
params = {}
|
|
790
|
-
url = "/v1.0/myorg/admin/activityevents"
|
|
791
|
-
|
|
792
|
-
if start_dt is not None:
|
|
793
|
-
params["startDateTime"] = f"'{start_dt.isoformat(timespec='milliseconds')}'"
|
|
794
|
-
|
|
795
|
-
if end_dt is not None:
|
|
796
|
-
params["endDateTime"] = f"'{end_dt.isoformat(timespec='milliseconds')}'"
|
|
802
|
+
url = f"/v1.0/myorg/admin/activityevents?startDateTime='{start_time}'&endDateTime='{end_time}'"
|
|
797
803
|
|
|
798
804
|
conditions = []
|
|
799
|
-
|
|
800
805
|
if activity_filter is not None:
|
|
801
|
-
conditions.append(f"Activity
|
|
802
|
-
|
|
806
|
+
conditions.append(f"Activity eq '{activity_filter}'")
|
|
803
807
|
if user_id_filter is not None:
|
|
804
|
-
conditions.append(f"UserId
|
|
808
|
+
conditions.append(f"UserId eq '{user_id_filter}'")
|
|
805
809
|
|
|
806
810
|
if conditions:
|
|
807
|
-
|
|
808
|
-
|
|
809
|
-
url_parts = list(urllib.parse.urlparse(url))
|
|
810
|
-
url_parts[4] = urllib.parse.urlencode(params)
|
|
811
|
-
url = urllib.parse.urlunparse(url_parts)
|
|
811
|
+
url += f"&$filter={f' and '.join(conditions)}"
|
|
812
812
|
|
|
813
813
|
response = client.get(url)
|
|
814
814
|
|
|
@@ -842,21 +842,38 @@ def list_activity_events(
|
|
|
842
842
|
"Object Display Name": i.get("ObjectDisplayName"),
|
|
843
843
|
"Experience": i.get("Experience"),
|
|
844
844
|
"Refresh Enforcement Policy": i.get("RefreshEnforcementPolicy"),
|
|
845
|
+
"Is Success": i.get("IsSuccess"),
|
|
846
|
+
"Activity Id": i.get("ActivityId"),
|
|
847
|
+
"Item Name": i.get("ItemName"),
|
|
848
|
+
"Dataset Name": i.get("DatasetName"),
|
|
849
|
+
"Report Name": i.get("ReportName"),
|
|
850
|
+
"Capacity Id": i.get("CapacityId"),
|
|
851
|
+
"Capacity Name": i.get("CapacityName"),
|
|
852
|
+
"App Name": i.get("AppName"),
|
|
853
|
+
"Dataset Id": i.get("DatasetId"),
|
|
854
|
+
"Report Id": i.get("ReportId"),
|
|
855
|
+
"Artifact Id": i.get("ArtifactId"),
|
|
856
|
+
"Artifact Name": i.get("ArtifactName"),
|
|
857
|
+
"Report Type": i.get("ReportType"),
|
|
858
|
+
"App Report Id": i.get("AppReportId"),
|
|
859
|
+
"Distribution Method": i.get("DistributionMethod"),
|
|
860
|
+
"Consumption Method": i.get("ConsumptionMethod"),
|
|
861
|
+
"Artifact Kind": i.get("ArtifactKind"),
|
|
845
862
|
}
|
|
846
863
|
df = pd.concat(
|
|
847
864
|
[df, pd.DataFrame(new_data, index=[0])],
|
|
848
865
|
ignore_index=True,
|
|
849
866
|
)
|
|
850
867
|
else:
|
|
851
|
-
|
|
868
|
+
response_json["activityEventEntities"].extend(
|
|
869
|
+
r.get("activityEventEntities")
|
|
870
|
+
)
|
|
852
871
|
|
|
853
872
|
if return_dataframe:
|
|
854
873
|
df["Creation Time"] = pd.to_datetime(df["Creation Time"])
|
|
855
|
-
|
|
874
|
+
return df
|
|
856
875
|
else:
|
|
857
|
-
|
|
858
|
-
|
|
859
|
-
return activity_events
|
|
876
|
+
return response_json
|
|
860
877
|
|
|
861
878
|
|
|
862
879
|
def _resolve_capacity_name_and_id(
|
|
@@ -917,12 +934,18 @@ def _resolve_workspace_name_and_id(
|
|
|
917
934
|
workspace: str | UUID,
|
|
918
935
|
) -> Tuple[str, UUID]:
|
|
919
936
|
|
|
920
|
-
|
|
921
|
-
|
|
922
|
-
workspace_name =
|
|
923
|
-
|
|
924
|
-
|
|
925
|
-
|
|
937
|
+
if workspace is None:
|
|
938
|
+
workspace_id = fabric.get_workspace_id()
|
|
939
|
+
workspace_name = fabric.resolve_workspace_name(workspace_id)
|
|
940
|
+
else:
|
|
941
|
+
dfW = list_workspaces(workspace=workspace)
|
|
942
|
+
if not dfW.empty:
|
|
943
|
+
workspace_name = dfW["Name"].iloc[0]
|
|
944
|
+
workspace_id = dfW["Id"].iloc[0]
|
|
945
|
+
else:
|
|
946
|
+
raise ValueError(
|
|
947
|
+
f"{icons.red_dot} The '{workspace}' workspace was not found."
|
|
948
|
+
)
|
|
926
949
|
|
|
927
950
|
return workspace_name, workspace_id
|
|
928
951
|
|
|
@@ -1004,3 +1027,60 @@ def list_reports(
|
|
|
1004
1027
|
df["Modified Date"] = pd.to_datetime(df["Modified Date"], errors="coerce")
|
|
1005
1028
|
|
|
1006
1029
|
return df
|
|
1030
|
+
|
|
1031
|
+
|
|
1032
|
+
def get_capacity_assignment_status(workspace: Optional[str | UUID] = None):
|
|
1033
|
+
"""
|
|
1034
|
+
Gets the status of the assignment-to-capacity operation for the specified workspace.
|
|
1035
|
+
|
|
1036
|
+
This is a wrapper function for the following API: `Capacities - Groups CapacityAssignmentStatus <https://learn.microsoft.com/rest/api/power-bi/capacities/groups-capacity-assignment-status>`_.
|
|
1037
|
+
|
|
1038
|
+
Parameters
|
|
1039
|
+
----------
|
|
1040
|
+
workspace : str | UUID, default=None
|
|
1041
|
+
The Fabric workspace name or id.
|
|
1042
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
1043
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
1044
|
+
|
|
1045
|
+
Returns
|
|
1046
|
+
-------
|
|
1047
|
+
pandas.DataFrame
|
|
1048
|
+
A pandas dataframe showing the status of the assignment-to-capacity operation for the specified workspace.
|
|
1049
|
+
"""
|
|
1050
|
+
|
|
1051
|
+
(workspace_name, workspace_id) = _resolve_workspace_name_and_id(workspace)
|
|
1052
|
+
|
|
1053
|
+
df = pd.DataFrame(
|
|
1054
|
+
columns=[
|
|
1055
|
+
"Status",
|
|
1056
|
+
"Activity Id",
|
|
1057
|
+
"Start Time",
|
|
1058
|
+
"End Time",
|
|
1059
|
+
"Capacity Id",
|
|
1060
|
+
"Capacity Name",
|
|
1061
|
+
]
|
|
1062
|
+
)
|
|
1063
|
+
|
|
1064
|
+
client = fabric.FabricRestClient()
|
|
1065
|
+
response = client.get(f"/v1.0/myorg/groups/{workspace_id}/CapacityAssignmentStatus")
|
|
1066
|
+
|
|
1067
|
+
if response.status_code != 200:
|
|
1068
|
+
raise FabricHTTPException(response)
|
|
1069
|
+
|
|
1070
|
+
v = response.json()
|
|
1071
|
+
capacity_id = v.get("capacityId")
|
|
1072
|
+
|
|
1073
|
+
(capacity_name, capacity_id) = _resolve_capacity_name_and_id(capacity=capacity_id)
|
|
1074
|
+
|
|
1075
|
+
new_data = {
|
|
1076
|
+
"Status": v.get("status"),
|
|
1077
|
+
"Activity Id": v.get("activityId"),
|
|
1078
|
+
"Start Time": v.get("startTime"),
|
|
1079
|
+
"End Time": v.get("endTime"),
|
|
1080
|
+
"Capacity Id": capacity_id,
|
|
1081
|
+
"Capacity Name": capacity_name,
|
|
1082
|
+
}
|
|
1083
|
+
|
|
1084
|
+
df = pd.concat([df, pd.DataFrame([new_data])], ignore_index=True)
|
|
1085
|
+
|
|
1086
|
+
return df
|
sempy_labs/admin/_domains.py
CHANGED
|
@@ -5,6 +5,7 @@ from sempy_labs._helper_functions import lro
|
|
|
5
5
|
from sempy.fabric.exceptions import FabricHTTPException
|
|
6
6
|
import pandas as pd
|
|
7
7
|
from uuid import UUID
|
|
8
|
+
from sempy_labs.admin._basic_functions import list_workspaces
|
|
8
9
|
|
|
9
10
|
|
|
10
11
|
def resolve_domain_id(domain_name: str) -> UUID:
|
|
@@ -302,7 +303,7 @@ def assign_domain_workspaces(domain_name: str, workspace_names: str | List[str])
|
|
|
302
303
|
if isinstance(workspace_names, str):
|
|
303
304
|
workspace_names = [workspace_names]
|
|
304
305
|
|
|
305
|
-
dfW =
|
|
306
|
+
dfW = list_workspaces()
|
|
306
307
|
|
|
307
308
|
# Check for invalid capacities
|
|
308
309
|
invalid_workspaces = [
|
|
@@ -379,7 +380,7 @@ def unassign_domain_workspaces(domain_name: str, workspace_names: str | List[str
|
|
|
379
380
|
if isinstance(workspace_names, str):
|
|
380
381
|
workspace_names = [workspace_names]
|
|
381
382
|
|
|
382
|
-
dfW =
|
|
383
|
+
dfW = list_workspaces()
|
|
383
384
|
|
|
384
385
|
# Check for invalid capacities
|
|
385
386
|
invalid_workspaces = [
|
sempy_labs/admin/_scanner.py
CHANGED
|
@@ -16,12 +16,12 @@ def scan_workspaces(
|
|
|
16
16
|
workspace: Optional[str | List[str] | UUID | List[UUID]] = None,
|
|
17
17
|
) -> dict:
|
|
18
18
|
"""
|
|
19
|
-
|
|
19
|
+
Gets the scan result for the specified scan.
|
|
20
20
|
|
|
21
21
|
This is a wrapper function for the following APIs:
|
|
22
|
-
`Admin - WorkspaceInfo PostWorkspaceInfo <https://learn.microsoft.com/
|
|
23
|
-
`Admin - WorkspaceInfo GetScanStatus <https://learn.microsoft.com/
|
|
24
|
-
`Admin - WorkspaceInfo GetScanResult <https://learn.microsoft.com/
|
|
22
|
+
`Admin - WorkspaceInfo PostWorkspaceInfo <https://learn.microsoft.com/rest/api/power-bi/admin/workspace-info-post-workspace-info>`_.
|
|
23
|
+
`Admin - WorkspaceInfo GetScanStatus <https://learn.microsoft.com/rest/api/power-bi/admin/workspace-info-get-scan-status>`_.
|
|
24
|
+
`Admin - WorkspaceInfo GetScanResult <https://learn.microsoft.com/rest/api/power-bi/admin/workspace-info-get-scan-result>`_.
|
|
25
25
|
|
|
26
26
|
Parameters
|
|
27
27
|
----------
|
|
@@ -40,7 +40,7 @@ def scan_workspaces(
|
|
|
40
40
|
|
|
41
41
|
Returns
|
|
42
42
|
-------
|
|
43
|
-
|
|
43
|
+
dict
|
|
44
44
|
A json object with the scan result.
|
|
45
45
|
"""
|
|
46
46
|
scan_result = {
|
|
@@ -10,19 +10,21 @@ from sempy_labs._helper_functions import (
|
|
|
10
10
|
resolve_dataset_id,
|
|
11
11
|
resolve_lakehouse_name,
|
|
12
12
|
_convert_data_type,
|
|
13
|
+
resolve_dataset_name_and_id,
|
|
14
|
+
resolve_workspace_name_and_id,
|
|
13
15
|
)
|
|
14
16
|
|
|
15
17
|
|
|
16
18
|
def check_fallback_reason(
|
|
17
|
-
dataset: str, workspace: Optional[str] = None
|
|
19
|
+
dataset: str | UUID, workspace: Optional[str] = None
|
|
18
20
|
) -> pd.DataFrame:
|
|
19
21
|
"""
|
|
20
22
|
Shows the reason a table in a Direct Lake semantic model would fallback to DirectQuery.
|
|
21
23
|
|
|
22
24
|
Parameters
|
|
23
25
|
----------
|
|
24
|
-
dataset : str
|
|
25
|
-
Name of the semantic model.
|
|
26
|
+
dataset : str | UUID
|
|
27
|
+
Name or ID of the semantic model.
|
|
26
28
|
workspace : str, default=None
|
|
27
29
|
The Fabric workspace name.
|
|
28
30
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
@@ -35,19 +37,22 @@ def check_fallback_reason(
|
|
|
35
37
|
"""
|
|
36
38
|
from sempy_labs.tom import connect_semantic_model
|
|
37
39
|
|
|
38
|
-
|
|
40
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
41
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(
|
|
42
|
+
dataset, workspace=workspace_id
|
|
43
|
+
)
|
|
39
44
|
|
|
40
45
|
with connect_semantic_model(
|
|
41
|
-
dataset=
|
|
46
|
+
dataset=dataset_id, workspace=workspace_id, readonly=True
|
|
42
47
|
) as tom:
|
|
43
48
|
if not tom.is_direct_lake():
|
|
44
49
|
raise ValueError(
|
|
45
|
-
f"{icons.red_dot} The '{
|
|
50
|
+
f"{icons.red_dot} The '{dataset_name}' semantic model is not in Direct Lake. This function is only applicable to Direct Lake semantic models."
|
|
46
51
|
)
|
|
47
52
|
|
|
48
53
|
df = fabric.evaluate_dax(
|
|
49
|
-
dataset=
|
|
50
|
-
workspace=
|
|
54
|
+
dataset=dataset_id,
|
|
55
|
+
workspace=workspace_id,
|
|
51
56
|
dax_string="""
|
|
52
57
|
SELECT [TableName] AS [Table Name],[FallbackReason] AS [FallbackReasonID]
|
|
53
58
|
FROM $SYSTEM.TMSCHEMA_DELTA_TABLE_METADATA_STORAGES
|
|
@@ -970,6 +970,7 @@ class ReportWrapper:
|
|
|
970
970
|
"Sparkline",
|
|
971
971
|
"Visual Calc",
|
|
972
972
|
"Format",
|
|
973
|
+
"Object Display Name",
|
|
973
974
|
]
|
|
974
975
|
)
|
|
975
976
|
|
|
@@ -1038,23 +1039,26 @@ class ReportWrapper:
|
|
|
1038
1039
|
|
|
1039
1040
|
entity_property_pairs = find_entity_property_pairs(visual_json)
|
|
1040
1041
|
query_state = (
|
|
1041
|
-
visual_json.get("visual", {})
|
|
1042
|
-
.get("query", {})
|
|
1043
|
-
.get("queryState", {})
|
|
1044
|
-
.get("Values", {})
|
|
1042
|
+
visual_json.get("visual", {}).get("query", {}).get("queryState", {})
|
|
1045
1043
|
)
|
|
1044
|
+
|
|
1046
1045
|
format_mapping = {}
|
|
1047
|
-
|
|
1048
|
-
|
|
1049
|
-
|
|
1050
|
-
|
|
1051
|
-
|
|
1046
|
+
obj_display_mapping = {}
|
|
1047
|
+
for a, p in query_state.items():
|
|
1048
|
+
for proj in p.get("projections", []):
|
|
1049
|
+
query_ref = proj.get("queryRef")
|
|
1050
|
+
fmt = proj.get("format")
|
|
1051
|
+
obj_display_name = proj.get("displayName")
|
|
1052
|
+
if fmt is not None:
|
|
1053
|
+
format_mapping[query_ref] = fmt
|
|
1054
|
+
obj_display_mapping[query_ref] = obj_display_name
|
|
1052
1055
|
|
|
1053
1056
|
for object_name, properties in entity_property_pairs.items():
|
|
1054
1057
|
table_name = properties[0]
|
|
1055
1058
|
obj_full = f"{table_name}.{object_name}"
|
|
1056
1059
|
is_agg = properties[2]
|
|
1057
1060
|
format_value = format_mapping.get(obj_full)
|
|
1061
|
+
obj_display = obj_display_mapping.get(obj_full)
|
|
1058
1062
|
|
|
1059
1063
|
if is_agg:
|
|
1060
1064
|
for k, v in format_mapping.items():
|
|
@@ -1071,6 +1075,7 @@ class ReportWrapper:
|
|
|
1071
1075
|
"Sparkline": properties[4],
|
|
1072
1076
|
"Visual Calc": properties[3],
|
|
1073
1077
|
"Format": format_value,
|
|
1078
|
+
"Object Display Name": obj_display,
|
|
1074
1079
|
}
|
|
1075
1080
|
|
|
1076
1081
|
df = pd.concat(
|