semantic-link-labs 0.11.0__py3-none-any.whl → 0.11.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (131) hide show
  1. {semantic_link_labs-0.11.0.dist-info → semantic_link_labs-0.11.2.dist-info}/METADATA +6 -4
  2. semantic_link_labs-0.11.2.dist-info/RECORD +210 -0
  3. sempy_labs/__init__.py +56 -56
  4. sempy_labs/_a_lib_info.py +1 -1
  5. sempy_labs/_ai.py +1 -1
  6. sempy_labs/_capacities.py +2 -2
  7. sempy_labs/_capacity_migration.py +5 -5
  8. sempy_labs/_clear_cache.py +1 -1
  9. sempy_labs/_connections.py +2 -2
  10. sempy_labs/_dashboards.py +16 -16
  11. sempy_labs/_data_pipelines.py +1 -1
  12. sempy_labs/_dataflows.py +101 -26
  13. sempy_labs/_dax.py +3 -3
  14. sempy_labs/_dax_query_view.py +1 -1
  15. sempy_labs/_delta_analyzer.py +4 -4
  16. sempy_labs/_delta_analyzer_history.py +1 -1
  17. sempy_labs/_deployment_pipelines.py +1 -1
  18. sempy_labs/_environments.py +22 -21
  19. sempy_labs/_eventhouses.py +12 -11
  20. sempy_labs/_eventstreams.py +12 -11
  21. sempy_labs/_external_data_shares.py +23 -22
  22. sempy_labs/_gateways.py +47 -45
  23. sempy_labs/_generate_semantic_model.py +3 -3
  24. sempy_labs/_git.py +1 -1
  25. sempy_labs/_graphQL.py +12 -11
  26. sempy_labs/_job_scheduler.py +56 -54
  27. sempy_labs/_kql_databases.py +16 -17
  28. sempy_labs/_kql_querysets.py +12 -11
  29. sempy_labs/_kusto.py +2 -2
  30. sempy_labs/_list_functions.py +1 -1
  31. sempy_labs/_managed_private_endpoints.py +18 -15
  32. sempy_labs/_mirrored_databases.py +16 -15
  33. sempy_labs/_mirrored_warehouses.py +12 -11
  34. sempy_labs/_ml_experiments.py +11 -10
  35. sempy_labs/_ml_models.py +11 -10
  36. sempy_labs/_model_auto_build.py +3 -3
  37. sempy_labs/_model_bpa.py +5 -5
  38. sempy_labs/_model_bpa_bulk.py +3 -3
  39. sempy_labs/_model_dependencies.py +1 -1
  40. sempy_labs/_mounted_data_factories.py +12 -12
  41. sempy_labs/_notebooks.py +1 -1
  42. sempy_labs/_one_lake_integration.py +1 -1
  43. sempy_labs/_query_scale_out.py +1 -1
  44. sempy_labs/_refresh_semantic_model.py +1 -1
  45. sempy_labs/_semantic_models.py +30 -28
  46. sempy_labs/_spark.py +1 -1
  47. sempy_labs/_sql.py +1 -1
  48. sempy_labs/_sql_endpoints.py +12 -11
  49. sempy_labs/_sqldatabase.py +15 -15
  50. sempy_labs/_tags.py +11 -10
  51. sempy_labs/_translations.py +1 -1
  52. sempy_labs/_user_delegation_key.py +2 -2
  53. sempy_labs/_variable_libraries.py +13 -12
  54. sempy_labs/_vertipaq.py +3 -3
  55. sempy_labs/_vpax.py +1 -1
  56. sempy_labs/_warehouses.py +15 -14
  57. sempy_labs/_workloads.py +1 -1
  58. sempy_labs/_workspace_identity.py +1 -1
  59. sempy_labs/_workspaces.py +14 -13
  60. sempy_labs/admin/__init__.py +18 -18
  61. sempy_labs/admin/_activities.py +46 -46
  62. sempy_labs/admin/_apps.py +28 -26
  63. sempy_labs/admin/_artifacts.py +15 -15
  64. sempy_labs/admin/_basic_functions.py +1 -2
  65. sempy_labs/admin/_capacities.py +86 -82
  66. sempy_labs/admin/_dataflows.py +2 -2
  67. sempy_labs/admin/_datasets.py +50 -48
  68. sempy_labs/admin/_domains.py +25 -19
  69. sempy_labs/admin/_external_data_share.py +24 -22
  70. sempy_labs/admin/_git.py +17 -17
  71. sempy_labs/admin/_items.py +47 -45
  72. sempy_labs/admin/_reports.py +61 -58
  73. sempy_labs/admin/_scanner.py +2 -2
  74. sempy_labs/admin/_shared.py +18 -18
  75. sempy_labs/admin/_tags.py +2 -2
  76. sempy_labs/admin/_tenant.py +57 -51
  77. sempy_labs/admin/_users.py +16 -15
  78. sempy_labs/admin/_workspaces.py +2 -2
  79. sempy_labs/directlake/__init__.py +12 -12
  80. sempy_labs/directlake/_directlake_schema_compare.py +3 -3
  81. sempy_labs/directlake/_directlake_schema_sync.py +9 -7
  82. sempy_labs/directlake/_dl_helper.py +1 -1
  83. sempy_labs/directlake/_generate_shared_expression.py +1 -1
  84. sempy_labs/directlake/_get_directlake_lakehouse.py +1 -1
  85. sempy_labs/directlake/_guardrails.py +1 -1
  86. sempy_labs/directlake/_list_directlake_model_calc_tables.py +3 -3
  87. sempy_labs/directlake/_show_unsupported_directlake_objects.py +1 -1
  88. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +3 -3
  89. sempy_labs/directlake/_update_directlake_partition_entity.py +4 -4
  90. sempy_labs/directlake/_warm_cache.py +3 -3
  91. sempy_labs/graph/__init__.py +3 -3
  92. sempy_labs/graph/_groups.py +81 -78
  93. sempy_labs/graph/_teams.py +21 -21
  94. sempy_labs/graph/_users.py +111 -10
  95. sempy_labs/lakehouse/__init__.py +7 -7
  96. sempy_labs/lakehouse/_blobs.py +30 -30
  97. sempy_labs/lakehouse/_get_lakehouse_columns.py +2 -2
  98. sempy_labs/lakehouse/_get_lakehouse_tables.py +29 -27
  99. sempy_labs/lakehouse/_helper.py +30 -2
  100. sempy_labs/lakehouse/_lakehouse.py +2 -2
  101. sempy_labs/lakehouse/_livy_sessions.py +47 -42
  102. sempy_labs/lakehouse/_shortcuts.py +22 -21
  103. sempy_labs/migration/__init__.py +8 -8
  104. sempy_labs/migration/_create_pqt_file.py +2 -2
  105. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +3 -3
  106. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +3 -4
  107. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +2 -2
  108. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +4 -4
  109. sempy_labs/migration/_migration_validation.py +1 -2
  110. sempy_labs/migration/_refresh_calc_tables.py +2 -2
  111. sempy_labs/mirrored_azure_databricks_catalog/__init__.py +2 -2
  112. sempy_labs/mirrored_azure_databricks_catalog/_discover.py +40 -40
  113. sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +1 -1
  114. sempy_labs/report/__init__.py +10 -10
  115. sempy_labs/report/_download_report.py +2 -2
  116. sempy_labs/report/_export_report.py +2 -2
  117. sempy_labs/report/_generate_report.py +1 -1
  118. sempy_labs/report/_paginated.py +1 -1
  119. sempy_labs/report/_report_bpa.py +4 -3
  120. sempy_labs/report/_report_functions.py +3 -3
  121. sempy_labs/report/_report_list_functions.py +3 -3
  122. sempy_labs/report/_report_rebind.py +1 -1
  123. sempy_labs/report/_reportwrapper.py +247 -249
  124. sempy_labs/report/_save_report.py +3 -3
  125. sempy_labs/theme/_org_themes.py +35 -1
  126. sempy_labs/tom/__init__.py +1 -1
  127. sempy_labs/tom/_model.py +23 -20
  128. semantic_link_labs-0.11.0.dist-info/RECORD +0 -210
  129. {semantic_link_labs-0.11.0.dist-info → semantic_link_labs-0.11.2.dist-info}/WHEEL +0 -0
  130. {semantic_link_labs-0.11.0.dist-info → semantic_link_labs-0.11.2.dist-info}/licenses/LICENSE +0 -0
  131. {semantic_link_labs-0.11.0.dist-info → semantic_link_labs-0.11.2.dist-info}/top_level.txt +0 -0
@@ -2,7 +2,7 @@ from typing import Optional, Tuple, List, Literal
2
2
  from contextlib import contextmanager
3
3
  from sempy._utils._log import log
4
4
  from uuid import UUID
5
- from sempy_labs._helper_functions import (
5
+ from .._helper_functions import (
6
6
  resolve_workspace_name_and_id,
7
7
  resolve_item_name_and_id,
8
8
  _base_api,
@@ -19,7 +19,7 @@ from sempy_labs._helper_functions import (
19
19
  remove_json_value,
20
20
  get_tenant_id,
21
21
  )
22
- from sempy_labs._dictionary_diffs import (
22
+ from .._dictionary_diffs import (
23
23
  diff_parts,
24
24
  )
25
25
  import json
@@ -28,7 +28,7 @@ import copy
28
28
  import pandas as pd
29
29
  from jsonpath_ng.ext import parse
30
30
  import sempy_labs.report._report_helper as helper
31
- from sempy_labs._model_dependencies import get_measure_dependencies
31
+ from .._model_dependencies import get_measure_dependencies
32
32
  import requests
33
33
  import re
34
34
  import base64
@@ -308,9 +308,10 @@ class ReportWrapper:
308
308
 
309
309
  if not json_path:
310
310
  self._report_definition["parts"].remove(part)
311
- print(
312
- f"{icons.green_dot} The file '{path}' has been removed from the report definition."
313
- )
311
+ if verbose:
312
+ print(
313
+ f"{icons.green_dot} The file '{path}' has been removed from the report definition."
314
+ )
314
315
  else:
315
316
  remove_json_value(
316
317
  path=path, payload=payload, json_path=json_path, verbose=verbose
@@ -736,7 +737,7 @@ class ReportWrapper:
736
737
  }
737
738
  df = _create_dataframe(columns=columns)
738
739
 
739
- dfs = []
740
+ rows = []
740
741
 
741
742
  if "filterConfig" in report_file:
742
743
  for flt in report_file.get("filterConfig", {}).get("filters", {}):
@@ -750,24 +751,23 @@ class ReportWrapper:
750
751
  entity_property_pairs = helper.find_entity_property_pairs(flt)
751
752
 
752
753
  for object_name, properties in entity_property_pairs.items():
753
- new_data = {
754
- "Filter Name": filter_name,
755
- "Type": filter_type,
756
- "Table Name": properties[0],
757
- "Object Name": object_name,
758
- "Object Type": properties[1],
759
- "Hidden": hidden,
760
- "Locked": locked,
761
- "How Created": how_created,
762
- "Used": filter_used,
763
- }
764
-
765
- dfs.append(pd.DataFrame(new_data, index=[0]))
766
-
767
- if dfs:
768
- df = pd.concat(dfs, ignore_index=True)
754
+ rows.append(
755
+ {
756
+ "Filter Name": filter_name,
757
+ "Type": filter_type,
758
+ "Table Name": properties[0],
759
+ "Object Name": object_name,
760
+ "Object Type": properties[1],
761
+ "Hidden": hidden,
762
+ "Locked": locked,
763
+ "How Created": how_created,
764
+ "Used": filter_used,
765
+ }
766
+ )
769
767
 
770
- _update_dataframe_datatypes(dataframe=df, column_map=columns)
768
+ if rows:
769
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
770
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
771
771
 
772
772
  if extended:
773
773
  df = self._add_extended(dataframe=df)
@@ -806,7 +806,7 @@ class ReportWrapper:
806
806
  }
807
807
  df = _create_dataframe(columns=columns)
808
808
 
809
- dfs = []
809
+ rows = []
810
810
  for p in self.__all_pages():
811
811
  payload = p.get("payload")
812
812
  page_id = payload.get("name")
@@ -824,27 +824,26 @@ class ReportWrapper:
824
824
  entity_property_pairs = helper.find_entity_property_pairs(flt)
825
825
 
826
826
  for object_name, properties in entity_property_pairs.items():
827
- new_data = {
828
- "Page Name": page_id,
829
- "Page Display Name": page_display,
830
- "Filter Name": filter_name,
831
- "Type": filter_type,
832
- "Table Name": properties[0],
833
- "Object Name": object_name,
834
- "Object Type": properties[1],
835
- "Hidden": hidden,
836
- "Locked": locked,
837
- "How Created": how_created,
838
- "Used": filter_used,
839
- "Page URL": self._get_url(page_name=page_id),
840
- }
841
-
842
- dfs.append(pd.DataFrame(new_data, index=[0]))
843
-
844
- if dfs:
845
- df = pd.concat(dfs, ignore_index=True)
827
+ rows.append(
828
+ {
829
+ "Page Name": page_id,
830
+ "Page Display Name": page_display,
831
+ "Filter Name": filter_name,
832
+ "Type": filter_type,
833
+ "Table Name": properties[0],
834
+ "Object Name": object_name,
835
+ "Object Type": properties[1],
836
+ "Hidden": hidden,
837
+ "Locked": locked,
838
+ "How Created": how_created,
839
+ "Used": filter_used,
840
+ "Page URL": self._get_url(page_name=page_id),
841
+ }
842
+ )
846
843
 
847
- _update_dataframe_datatypes(dataframe=df, column_map=columns)
844
+ if rows:
845
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
846
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
848
847
 
849
848
  if extended:
850
849
  df = self._add_extended(dataframe=df)
@@ -886,7 +885,7 @@ class ReportWrapper:
886
885
 
887
886
  visual_mapping = self._visual_page_mapping()
888
887
 
889
- dfs = []
888
+ rows = []
890
889
  for v in self.__all_visuals():
891
890
  path = v.get("path")
892
891
  payload = v.get("payload")
@@ -906,27 +905,26 @@ class ReportWrapper:
906
905
  entity_property_pairs = helper.find_entity_property_pairs(flt)
907
906
 
908
907
  for object_name, properties in entity_property_pairs.items():
909
- new_data = {
910
- "Page Name": page_id,
911
- "Page Display Name": page_display,
912
- "Visual Name": visual_name,
913
- "Filter Name": filter_name,
914
- "Type": filter_type,
915
- "Table Name": properties[0],
916
- "Object Name": object_name,
917
- "Object Type": properties[1],
918
- "Hidden": hidden,
919
- "Locked": locked,
920
- "How Created": how_created,
921
- "Used": filter_used,
922
- }
923
-
924
- dfs.append(pd.DataFrame(new_data, index=[0]))
925
-
926
- if dfs:
927
- df = pd.concat(dfs, ignore_index=True)
908
+ rows.append(
909
+ {
910
+ "Page Name": page_id,
911
+ "Page Display Name": page_display,
912
+ "Visual Name": visual_name,
913
+ "Filter Name": filter_name,
914
+ "Type": filter_type,
915
+ "Table Name": properties[0],
916
+ "Object Name": object_name,
917
+ "Object Type": properties[1],
918
+ "Hidden": hidden,
919
+ "Locked": locked,
920
+ "How Created": how_created,
921
+ "Used": filter_used,
922
+ }
923
+ )
928
924
 
929
- _update_dataframe_datatypes(dataframe=df, column_map=columns)
925
+ if rows:
926
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
927
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
930
928
 
931
929
  if extended:
932
930
  df = self._add_extended(dataframe=df)
@@ -956,7 +954,7 @@ class ReportWrapper:
956
954
  }
957
955
  df = _create_dataframe(columns=columns)
958
956
 
959
- dfs = []
957
+ rows = []
960
958
  for p in self.__all_pages():
961
959
  payload = p.get("payload")
962
960
  page_name = payload.get("name")
@@ -967,17 +965,18 @@ class ReportWrapper:
967
965
  targetVisual = vizInt.get("target")
968
966
  vizIntType = vizInt.get("type")
969
967
 
970
- new_data = {
971
- "Page Name": page_name,
972
- "Page Display Name": page_display,
973
- "Source Visual Name": sourceVisual,
974
- "Target Visual Name": targetVisual,
975
- "Type": vizIntType,
976
- }
977
- dfs.append(pd.DataFrame(new_data, index=[0]))
968
+ rows.append(
969
+ {
970
+ "Page Name": page_name,
971
+ "Page Display Name": page_display,
972
+ "Source Visual Name": sourceVisual,
973
+ "Target Visual Name": targetVisual,
974
+ "Type": vizIntType,
975
+ }
976
+ )
978
977
 
979
- if dfs:
980
- df = pd.concat(dfs, ignore_index=True)
978
+ if rows:
979
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
981
980
 
982
981
  return df
983
982
 
@@ -1017,7 +1016,7 @@ class ReportWrapper:
1017
1016
 
1018
1017
  dfV = self.list_visuals()
1019
1018
 
1020
- dfs = []
1019
+ rows = []
1021
1020
  for p in self.__all_pages():
1022
1021
  file_path = p.get("path")
1023
1022
  page_prefix = file_path[0:-9]
@@ -1066,30 +1065,30 @@ class ReportWrapper:
1066
1065
  matches = parse("$.visibility").find(payload)
1067
1066
  is_hidden = any(match.value == "HiddenInViewMode" for match in matches)
1068
1067
 
1069
- new_data = {
1070
- "File Path": file_path,
1071
- "Page Name": page_name,
1072
- "Page Display Name": payload.get("displayName"),
1073
- "Display Option": payload.get("displayOption"),
1074
- "Height": height,
1075
- "Width": width,
1076
- "Hidden": is_hidden,
1077
- "Active": True if page_name == active_page else False,
1078
- "Type": helper.page_type_mapping.get((width, height), "Custom"),
1079
- "Alignment": alignment_value,
1080
- "Drillthrough Target Page": drill_through,
1081
- "Visual Count": visual_count,
1082
- "Data Visual Count": data_visual_count,
1083
- "Visible Visual Count": visible_visual_count,
1084
- "Page Filter Count": page_filter_count,
1085
- "Page URL": self._get_url(page_name=page_name),
1086
- }
1087
- dfs.append(pd.DataFrame(new_data, index=[0]))
1088
-
1089
- if dfs:
1090
- df = pd.concat(dfs, ignore_index=True)
1068
+ rows.append(
1069
+ {
1070
+ "File Path": file_path,
1071
+ "Page Name": page_name,
1072
+ "Page Display Name": payload.get("displayName"),
1073
+ "Display Option": payload.get("displayOption"),
1074
+ "Height": height,
1075
+ "Width": width,
1076
+ "Hidden": is_hidden,
1077
+ "Active": True if page_name == active_page else False,
1078
+ "Type": helper.page_type_mapping.get((width, height), "Custom"),
1079
+ "Alignment": alignment_value,
1080
+ "Drillthrough Target Page": drill_through,
1081
+ "Visual Count": visual_count,
1082
+ "Data Visual Count": data_visual_count,
1083
+ "Visible Visual Count": visible_visual_count,
1084
+ "Page Filter Count": page_filter_count,
1085
+ "Page URL": self._get_url(page_name=page_name),
1086
+ }
1087
+ )
1091
1088
 
1092
- _update_dataframe_datatypes(dataframe=df, column_map=columns)
1089
+ if rows:
1090
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
1091
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
1093
1092
 
1094
1093
  return df
1095
1094
 
@@ -1154,8 +1153,7 @@ class ReportWrapper:
1154
1153
 
1155
1154
  return any(key in all_keys for key in keys_to_check)
1156
1155
 
1157
- dfs = []
1158
-
1156
+ rows = []
1159
1157
  for v in self.__all_visuals():
1160
1158
  path = v.get("path")
1161
1159
  payload = v.get("payload")
@@ -1261,39 +1259,40 @@ class ReportWrapper:
1261
1259
  has_sparkline = contains_key(payload, ["SparklineData"])
1262
1260
  visual_name = payload.get("name")
1263
1261
 
1264
- new_data = {
1265
- "File Path": path,
1266
- "Page Name": page_id,
1267
- "Page Display Name": page_display,
1268
- "Visual Name": visual_name,
1269
- "X": pos.get("x"),
1270
- "Y": pos.get("y"),
1271
- "Z": pos.get("z"),
1272
- "Width": pos.get("width"),
1273
- "Height": pos.get("height"),
1274
- "Tab Order": pos.get("tabOrder"),
1275
- "Hidden": payload.get("isHidden", False),
1276
- "Type": visual_type,
1277
- "Display Type": visual_type_display,
1278
- "Title": title,
1279
- "SubTitle": sub_title,
1280
- "Custom Visual": visual_type in custom_visuals,
1281
- "Alt Text": alt_text,
1282
- "Show Items With No Data": show_all_data,
1283
- "Divider": divider,
1284
- "Row SubTotals": rst_value,
1285
- "Column SubTotals": cst_value,
1286
- "Slicer Type": slicer_type,
1287
- "Data Visual": is_data_visual,
1288
- "Has Sparkline": has_sparkline,
1289
- "Visual Filter Count": visual_filter_count,
1290
- "Data Limit": data_limit,
1291
- "URL": self._get_url(page_name=page_id, visual_name=visual_name),
1292
- }
1293
- dfs.append(pd.DataFrame(new_data, index=[0]))
1262
+ rows.append(
1263
+ {
1264
+ "File Path": path,
1265
+ "Page Name": page_id,
1266
+ "Page Display Name": page_display,
1267
+ "Visual Name": visual_name,
1268
+ "X": pos.get("x"),
1269
+ "Y": pos.get("y"),
1270
+ "Z": pos.get("z"),
1271
+ "Width": pos.get("width"),
1272
+ "Height": pos.get("height"),
1273
+ "Tab Order": pos.get("tabOrder"),
1274
+ "Hidden": payload.get("isHidden", False),
1275
+ "Type": visual_type,
1276
+ "Display Type": visual_type_display,
1277
+ "Title": title,
1278
+ "SubTitle": sub_title,
1279
+ "Custom Visual": visual_type in custom_visuals,
1280
+ "Alt Text": alt_text,
1281
+ "Show Items With No Data": show_all_data,
1282
+ "Divider": divider,
1283
+ "Row SubTotals": rst_value,
1284
+ "Column SubTotals": cst_value,
1285
+ "Slicer Type": slicer_type,
1286
+ "Data Visual": is_data_visual,
1287
+ "Has Sparkline": has_sparkline,
1288
+ "Visual Filter Count": visual_filter_count,
1289
+ "Data Limit": data_limit,
1290
+ "URL": self._get_url(page_name=page_id, visual_name=visual_name),
1291
+ }
1292
+ )
1294
1293
 
1295
- if dfs:
1296
- df = pd.concat(dfs, ignore_index=True)
1294
+ if rows:
1295
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
1297
1296
 
1298
1297
  grouped_df = (
1299
1298
  self.list_visual_objects()
@@ -1416,7 +1415,7 @@ class ReportWrapper:
1416
1415
 
1417
1416
  return result
1418
1417
 
1419
- dfs = []
1418
+ rows = []
1420
1419
  for v in self.__all_visuals():
1421
1420
  path = v.get("path")
1422
1421
  payload = v.get("payload")
@@ -1450,30 +1449,29 @@ class ReportWrapper:
1450
1449
  for k, v in format_mapping.items():
1451
1450
  if obj_full in k:
1452
1451
  format_value = v
1453
- new_data = {
1454
- "Page Name": page_id,
1455
- "Page Display Name": page_display,
1456
- "Visual Name": payload.get("name"),
1457
- "Table Name": table_name,
1458
- "Object Name": object_name,
1459
- "Object Type": properties[1],
1460
- "Implicit Measure": is_agg,
1461
- "Sparkline": properties[4],
1462
- "Visual Calc": properties[3],
1463
- "Format": format_value,
1464
- "Object Display Name": obj_display,
1465
- }
1466
-
1467
- dfs.append(pd.DataFrame(new_data, index=[0]))
1452
+ rows.append(
1453
+ {
1454
+ "Page Name": page_id,
1455
+ "Page Display Name": page_display,
1456
+ "Visual Name": payload.get("name"),
1457
+ "Table Name": table_name,
1458
+ "Object Name": object_name,
1459
+ "Object Type": properties[1],
1460
+ "Implicit Measure": is_agg,
1461
+ "Sparkline": properties[4],
1462
+ "Visual Calc": properties[3],
1463
+ "Format": format_value,
1464
+ "Object Display Name": obj_display,
1465
+ }
1466
+ )
1468
1467
 
1469
- if dfs:
1470
- df = pd.concat(dfs, ignore_index=True)
1468
+ if rows:
1469
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
1470
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
1471
1471
 
1472
1472
  if extended:
1473
1473
  df = self._add_extended(dataframe=df)
1474
1474
 
1475
- _update_dataframe_datatypes(dataframe=df, column_map=columns)
1476
-
1477
1475
  return df
1478
1476
 
1479
1477
  def list_semantic_model_objects(self, extended: bool = False) -> pd.DataFrame:
@@ -1656,8 +1654,7 @@ class ReportWrapper:
1656
1654
  if o.get("path").endswith("/bookmark.json")
1657
1655
  ]
1658
1656
 
1659
- dfs = []
1660
-
1657
+ rows = []
1661
1658
  for b in bookmarks:
1662
1659
  path = b.get("path")
1663
1660
  payload = b.get("payload")
@@ -1691,21 +1688,21 @@ class ReportWrapper:
1691
1688
  else:
1692
1689
  visual_hidden = False
1693
1690
 
1694
- new_data = {
1695
- "File Path": path,
1696
- "Bookmark Name": bookmark_name,
1697
- "Bookmark Display Name": bookmark_display,
1698
- "Page Name": page_id,
1699
- "Page Display Name": page_display,
1700
- "Visual Name": visual_name,
1701
- "Visual Hidden": visual_hidden,
1702
- }
1703
- dfs.append(pd.DataFrame(new_data, index=[0]))
1704
-
1705
- if dfs:
1706
- df = pd.concat(dfs, ignore_index=True)
1691
+ rows.append(
1692
+ {
1693
+ "File Path": path,
1694
+ "Bookmark Name": bookmark_name,
1695
+ "Bookmark Display Name": bookmark_display,
1696
+ "Page Name": page_id,
1697
+ "Page Display Name": page_display,
1698
+ "Visual Name": visual_name,
1699
+ "Visual Hidden": visual_hidden,
1700
+ }
1701
+ )
1707
1702
 
1708
- _update_dataframe_datatypes(dataframe=df, column_map=columns)
1703
+ if rows:
1704
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
1705
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
1709
1706
 
1710
1707
  return df
1711
1708
 
@@ -1741,7 +1738,7 @@ class ReportWrapper:
1741
1738
 
1742
1739
  report_file = self.get(file_path=self._report_extensions_path)
1743
1740
 
1744
- dfs = []
1741
+ rows = []
1745
1742
  for e in report_file.get("entities", []):
1746
1743
  table_name = e.get("name")
1747
1744
  for m in e.get("measures", []):
@@ -1751,18 +1748,19 @@ class ReportWrapper:
1751
1748
  format_string = m.get("formatString")
1752
1749
  data_category = m.get("dataCategory")
1753
1750
 
1754
- new_data = {
1755
- "Measure Name": measure_name,
1756
- "Table Name": table_name,
1757
- "Expression": expr,
1758
- "Data Type": data_type,
1759
- "Format String": format_string,
1760
- "Data Category": data_category,
1761
- }
1762
- dfs.append(pd.DataFrame(new_data, index=[0]))
1751
+ rows.append(
1752
+ {
1753
+ "Measure Name": measure_name,
1754
+ "Table Name": table_name,
1755
+ "Expression": expr,
1756
+ "Data Type": data_type,
1757
+ "Format String": format_string,
1758
+ "Data Category": data_category,
1759
+ }
1760
+ )
1763
1761
 
1764
- if dfs:
1765
- df = pd.concat(dfs, ignore_index=True)
1762
+ if rows:
1763
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
1766
1764
 
1767
1765
  return df
1768
1766
 
@@ -2125,13 +2123,14 @@ class ReportWrapper:
2125
2123
  if isinstance(measures, str):
2126
2124
  measures = [measures]
2127
2125
 
2128
- file = self.get(file_path=self._report_extensions_path)
2129
-
2130
- mCount = 0
2126
+ entities = self.get(
2127
+ file_path=self._report_extensions_path, json_path="$.entities"
2128
+ )
2131
2129
  with connect_semantic_model(
2132
- dataset=dataset_id, readonly=False, workspace=dataset_workspace_id
2130
+ dataset=dataset_id, readonly=self._readonly, workspace=dataset_workspace_id
2133
2131
  ) as tom:
2134
2132
  existing_measures = [m.Name for m in tom.all_measures()]
2133
+ # Add measure to semantic model
2135
2134
  for _, r in rlm.iterrows():
2136
2135
  table_name = r["Table Name"]
2137
2136
  measure_name = r["Measure Name"]
@@ -2153,20 +2152,25 @@ class ReportWrapper:
2153
2152
  name="semanticlinklabs",
2154
2153
  value="reportlevelmeasure",
2155
2154
  )
2156
- mCount += 1
2157
- # Remove measures from the json
2158
- if measures is not None and len(measures) < mCount:
2159
- for e in file["entities"]:
2160
- e["measures"] = [
2161
- measure
2162
- for measure in e["measures"]
2163
- if measure["name"] not in measures
2164
- ]
2165
- file["entities"] = [
2166
- entity for entity in file["entities"] if entity["measures"]
2167
- ]
2168
- self.update(file_path=self._report_extensions_path, payload=file)
2169
- # what about if measures is None?
2155
+
2156
+ for entity in entities:
2157
+ if entity.get("name") == table_name:
2158
+ entity["measures"] = [
2159
+ m
2160
+ for m in entity.get("measures", [])
2161
+ if m.get("name") != measure_name
2162
+ ]
2163
+ entities = [e for e in entities if e.get("measures")]
2164
+ self.set_json(
2165
+ file_path=self._report_extensions_path,
2166
+ json_path="$.entities",
2167
+ json_value=entities,
2168
+ )
2169
+ if not entities:
2170
+ self.remove(
2171
+ file_path=self._report_extensions_path,
2172
+ verbose=False,
2173
+ )
2170
2174
 
2171
2175
  if not self._readonly:
2172
2176
  print(
@@ -2195,16 +2199,17 @@ class ReportWrapper:
2195
2199
  visual_mapping = self._visual_page_mapping()
2196
2200
  report_file = self.get(file_path="definition/report.json")
2197
2201
 
2198
- dfs = []
2202
+ rows = []
2199
2203
  if "annotations" in report_file:
2200
2204
  for ann in report_file["annotations"]:
2201
- new_data = {
2202
- "Type": "Report",
2203
- "Object Name": self._report_name,
2204
- "Annotation Name": ann.get("name"),
2205
- "Annotation Value": ann.get("value"),
2206
- }
2207
- dfs.append(pd.DataFrame(new_data, index=[0]))
2205
+ rows.append(
2206
+ {
2207
+ "Type": "Report",
2208
+ "Object Name": self._report_name,
2209
+ "Annotation Name": ann.get("name"),
2210
+ "Annotation Value": ann.get("value"),
2211
+ }
2212
+ )
2208
2213
 
2209
2214
  for p in self.__all_pages():
2210
2215
  path = p.get("path")
@@ -2212,13 +2217,14 @@ class ReportWrapper:
2212
2217
  page_name = payload.get("displayName")
2213
2218
  if "annotations" in payload:
2214
2219
  for ann in payload["annotations"]:
2215
- new_data = {
2216
- "Type": "Page",
2217
- "Object Name": page_name,
2218
- "Annotation Name": ann.get("name"),
2219
- "Annotation Value": ann.get("value"),
2220
- }
2221
- dfs.append(pd.DataFrame(new_data, index=[0]))
2220
+ rows.append(
2221
+ {
2222
+ "Type": "Page",
2223
+ "Object Name": page_name,
2224
+ "Annotation Name": ann.get("name"),
2225
+ "Annotation Value": ann.get("value"),
2226
+ }
2227
+ )
2222
2228
 
2223
2229
  for v in self.__all_visuals():
2224
2230
  path = v.get("path")
@@ -2227,16 +2233,17 @@ class ReportWrapper:
2227
2233
  visual_name = payload.get("name")
2228
2234
  if "annotations" in payload:
2229
2235
  for ann in payload["annotations"]:
2230
- new_data = {
2231
- "Type": "Visual",
2232
- "Object Name": f"'{page_display}'[{visual_name}]",
2233
- "Annotation Name": ann.get("name"),
2234
- "Annotation Value": ann.get("value"),
2235
- }
2236
- dfs.append(pd.DataFrame(new_data, index=[0]))
2236
+ rows.append(
2237
+ {
2238
+ "Type": "Visual",
2239
+ "Object Name": f"'{page_display}'[{visual_name}]",
2240
+ "Annotation Name": ann.get("name"),
2241
+ "Annotation Value": ann.get("value"),
2242
+ }
2243
+ )
2237
2244
 
2238
- if dfs:
2239
- df = pd.concat(dfs, ignore_index=True)
2245
+ if rows:
2246
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
2240
2247
 
2241
2248
  return df
2242
2249
 
@@ -2682,8 +2689,12 @@ class ReportWrapper:
2682
2689
 
2683
2690
  selector_mapping = {
2684
2691
  key: {
2685
- ".".join(k): v # join tuple with '.' to form the string
2686
- for k, v in value.items()
2692
+ variant: f"{table}.{new_col}"
2693
+ for (table, col), new_col in value.items()
2694
+ for variant in (
2695
+ f"({table}.{col})",
2696
+ f"[{table}.{col}]",
2697
+ )
2687
2698
  }
2688
2699
  for key, value in mapping.items()
2689
2700
  }
@@ -2721,28 +2732,15 @@ class ReportWrapper:
2721
2732
 
2722
2733
  # Check both measures and columns
2723
2734
  for category in ["measures", "columns"]:
2724
- if obj in selector_mapping.get(category, {}):
2725
- value = selector_mapping[category][obj]
2726
-
2727
- # Find original tuple key from mapping for this category
2728
- for tup_key in mapping.get(category, {}).keys():
2729
- if ".".join(tup_key) == obj:
2730
- key = tup_key[
2731
- 0
2732
- ] # first element of tuple, like table name
2733
- new_value = f"{key}.{value}"
2734
-
2735
- # Update the dictionary node holding "metadata"
2736
- if isinstance(match.context.value, dict):
2737
- match.context.value["metadata"] = new_value
2738
- else:
2739
- print(
2740
- f"Warning: Cannot assign metadata, context is {type(match.context.value)}"
2741
- )
2742
- break
2743
-
2744
- # Once found in one category, no need to check the other
2745
- break
2735
+ for i, value in selector_mapping.get(category).items():
2736
+ if i in obj:
2737
+ prefix = i[0]
2738
+ if prefix == "[":
2739
+ new_value = obj.replace(i, f"[{value}]")
2740
+ else:
2741
+ new_value = obj.replace(i, f"({value})")
2742
+ match.context.value["metadata"] = new_value
2743
+ break
2746
2744
 
2747
2745
  # Rename Column Properties
2748
2746
  for match in col_expr_path.find(payload):