castor-extractor 0.20.0__py3-none-any.whl → 0.20.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of castor-extractor might be problematic. Click here for more details.

Files changed (38) hide show
  1. CHANGELOG.md +16 -0
  2. castor_extractor/commands/extract_thoughtspot.py +18 -0
  3. castor_extractor/utils/client/api/client.py +7 -2
  4. castor_extractor/utils/client/api/safe_request.py +6 -3
  5. castor_extractor/visualization/looker/api/constants.py +0 -4
  6. castor_extractor/visualization/powerbi/__init__.py +1 -1
  7. castor_extractor/visualization/powerbi/assets.py +7 -1
  8. castor_extractor/visualization/powerbi/client/__init__.py +2 -3
  9. castor_extractor/visualization/powerbi/client/authentication.py +27 -0
  10. castor_extractor/visualization/powerbi/client/client.py +207 -0
  11. castor_extractor/visualization/powerbi/client/client_test.py +173 -0
  12. castor_extractor/visualization/powerbi/client/constants.py +0 -67
  13. castor_extractor/visualization/powerbi/client/credentials.py +3 -4
  14. castor_extractor/visualization/powerbi/client/credentials_test.py +3 -4
  15. castor_extractor/visualization/powerbi/client/endpoints.py +65 -0
  16. castor_extractor/visualization/powerbi/client/pagination.py +32 -0
  17. castor_extractor/visualization/powerbi/extract.py +14 -9
  18. castor_extractor/visualization/thoughtspot/__init__.py +3 -0
  19. castor_extractor/visualization/thoughtspot/assets.py +9 -0
  20. castor_extractor/visualization/thoughtspot/client/__init__.py +2 -0
  21. castor_extractor/visualization/thoughtspot/client/client.py +120 -0
  22. castor_extractor/visualization/thoughtspot/client/credentials.py +18 -0
  23. castor_extractor/visualization/thoughtspot/client/endpoints.py +12 -0
  24. castor_extractor/visualization/thoughtspot/client/utils.py +25 -0
  25. castor_extractor/visualization/thoughtspot/client/utils_test.py +57 -0
  26. castor_extractor/visualization/thoughtspot/extract.py +49 -0
  27. castor_extractor/warehouse/salesforce/client.py +1 -1
  28. castor_extractor/warehouse/salesforce/format.py +40 -30
  29. castor_extractor/warehouse/salesforce/format_test.py +61 -24
  30. {castor_extractor-0.20.0.dist-info → castor_extractor-0.20.4.dist-info}/METADATA +17 -1
  31. {castor_extractor-0.20.0.dist-info → castor_extractor-0.20.4.dist-info}/RECORD +34 -23
  32. {castor_extractor-0.20.0.dist-info → castor_extractor-0.20.4.dist-info}/entry_points.txt +1 -0
  33. castor_extractor/visualization/powerbi/client/rest.py +0 -305
  34. castor_extractor/visualization/powerbi/client/rest_test.py +0 -290
  35. castor_extractor/visualization/powerbi/client/utils.py +0 -19
  36. castor_extractor/visualization/powerbi/client/utils_test.py +0 -24
  37. {castor_extractor-0.20.0.dist-info → castor_extractor-0.20.4.dist-info}/LICENCE +0 -0
  38. {castor_extractor-0.20.0.dist-info → castor_extractor-0.20.4.dist-info}/WHEEL +0 -0
@@ -1,5 +1,4 @@
1
- from .constants import Urls
2
- from .credentials import PowerbiCredentials
1
+ from .credentials import DEFAULT_SCOPE, PowerbiCredentials
3
2
 
4
3
 
5
4
  def test_credentials():
@@ -13,7 +12,7 @@ def test_credentials():
13
12
  client_id=client_id,
14
13
  secret=secret,
15
14
  )
16
- assert credentials.scopes == [Urls.DEFAULT_SCOPE]
15
+ assert credentials.scopes == [DEFAULT_SCOPE]
17
16
 
18
17
  credentials = PowerbiCredentials(
19
18
  tenant_id=tenant_id,
@@ -21,7 +20,7 @@ def test_credentials():
21
20
  secret=secret,
22
21
  scopes=None,
23
22
  )
24
- assert credentials.scopes == [Urls.DEFAULT_SCOPE]
23
+ assert credentials.scopes == [DEFAULT_SCOPE]
25
24
 
26
25
  # empty scopes
27
26
  credentials = PowerbiCredentials(
@@ -0,0 +1,65 @@
1
+ from datetime import date, datetime
2
+ from typing import Optional, Tuple
3
+
4
+ from ....utils import at_midnight, format_date, yesterday
5
+
6
+ _CLIENT_APP_BASE = "https://login.microsoftonline.com"
7
+ _REST_API_BASE_PATH = "https://api.powerbi.com/v1.0/myorg"
8
+
9
+
10
+ def _time_filter(day: Optional[date]) -> Tuple[datetime, datetime]:
11
+ target_day = day or yesterday()
12
+ start = at_midnight(target_day)
13
+ end = datetime.combine(target_day, datetime.max.time())
14
+ return start, end
15
+
16
+
17
+ class PowerBiEndpointFactory:
18
+ @classmethod
19
+ def activity_events(cls, day: Optional[date]) -> str:
20
+ start, end = _time_filter(day)
21
+ url = f"{_REST_API_BASE_PATH}/admin/activityevents"
22
+ url += "?$filter=Activity eq 'viewreport'"
23
+ url += f"&startDateTime='{format_date(start)}'"
24
+ url += f"&endDateTime='{format_date(end)}'"
25
+ return url
26
+
27
+ @classmethod
28
+ def authority(cls, tenant_id: str) -> str:
29
+ return f"{_CLIENT_APP_BASE}/{tenant_id}"
30
+
31
+ @classmethod
32
+ def dashboards(cls) -> str:
33
+ return f"{_REST_API_BASE_PATH}/admin/dashboards"
34
+
35
+ @classmethod
36
+ def datasets(cls) -> str:
37
+ return f"{_REST_API_BASE_PATH}/admin/datasets"
38
+
39
+ @classmethod
40
+ def groups(cls) -> str:
41
+ return f"{_REST_API_BASE_PATH}/admin/groups"
42
+
43
+ @classmethod
44
+ def metadata_create_scan(cls) -> str:
45
+ return f"{_REST_API_BASE_PATH}/admin/workspaces/getInfo"
46
+
47
+ @classmethod
48
+ def metadata_scan_result(cls, scan_id: int) -> str:
49
+ return f"{_REST_API_BASE_PATH}/admin/workspaces/scanResult/{scan_id}"
50
+
51
+ @classmethod
52
+ def metadata_scan_status(cls, scan_id: int) -> str:
53
+ return f"{_REST_API_BASE_PATH}/admin/workspaces/scanStatus/{scan_id}"
54
+
55
+ @classmethod
56
+ def pages(cls, report_id: str) -> str:
57
+ return f"{_REST_API_BASE_PATH}/admin/reports/{report_id}/pages"
58
+
59
+ @classmethod
60
+ def reports(cls) -> str:
61
+ return f"{_REST_API_BASE_PATH}/admin/reports"
62
+
63
+ @classmethod
64
+ def workspace_ids(cls) -> str:
65
+ return f"{_REST_API_BASE_PATH}/admin/workspaces/modified"
@@ -0,0 +1,32 @@
1
+ from typing import Optional
2
+
3
+ from pydantic import ConfigDict
4
+ from pydantic.alias_generators import to_camel
5
+
6
+ from ....utils import (
7
+ FetchNextPageBy,
8
+ PaginationModel,
9
+ )
10
+
11
+
12
+ class PowerBiPagination(PaginationModel):
13
+ model_config = ConfigDict(
14
+ alias_generator=to_camel,
15
+ populate_by_name=True,
16
+ from_attributes=True,
17
+ )
18
+
19
+ fetch_by: FetchNextPageBy = FetchNextPageBy.URL
20
+
21
+ activity_event_entities: list
22
+ continuation_uri: Optional[str] = None
23
+ last_result_set: bool = False
24
+
25
+ def is_last(self) -> bool:
26
+ return self.last_result_set
27
+
28
+ def next_page_payload(self) -> Optional[str]:
29
+ return self.continuation_uri
30
+
31
+ def page_results(self) -> list:
32
+ return self.activity_event_entities
@@ -1,4 +1,5 @@
1
- from typing import Iterable, List, Tuple, Union
1
+ import logging
2
+ from typing import Iterable, Tuple, Union
2
3
 
3
4
  from ...utils import (
4
5
  OUTPUT_DIR,
@@ -9,19 +10,23 @@ from ...utils import (
9
10
  write_json,
10
11
  write_summary,
11
12
  )
12
- from .assets import METADATA_ASSETS, PowerBiAsset
13
- from .client import Client, PowerbiCredentials
13
+ from .assets import METADATA_ASSETS, REPORTS_ASSETS, PowerBiAsset
14
+ from .client import PowerbiClient, PowerbiCredentials
15
+
16
+ logger = logging.getLogger(__name__)
14
17
 
15
18
 
16
19
  def iterate_all_data(
17
- client: Client,
18
- ) -> Iterable[Tuple[PowerBiAsset, Union[List, dict]]]:
20
+ client: PowerbiClient,
21
+ ) -> Iterable[Tuple[PowerBiAsset, Union[list, dict]]]:
19
22
  for asset in PowerBiAsset:
20
- if asset in METADATA_ASSETS:
23
+ if asset in METADATA_ASSETS + REPORTS_ASSETS:
21
24
  continue
22
25
 
23
- data = client.fetch(asset)
24
- yield asset, deep_serialize(data)
26
+ logger.info(f"Extracting {asset.name} from API")
27
+ data = list(deep_serialize(client.fetch(asset)))
28
+ yield asset, data
29
+ logger.info(f"Extracted {len(data)} {asset.name} from API")
25
30
 
26
31
 
27
32
  def extract_all(**kwargs) -> None:
@@ -31,7 +36,7 @@ def extract_all(**kwargs) -> None:
31
36
  """
32
37
  _output_directory = kwargs.get("output") or from_env(OUTPUT_DIR)
33
38
  creds = PowerbiCredentials(**kwargs)
34
- client = Client(creds)
39
+ client = PowerbiClient(creds)
35
40
  ts = current_timestamp()
36
41
 
37
42
  for key, data in iterate_all_data(client):
@@ -0,0 +1,3 @@
1
+ from .assets import ThoughtspotAsset
2
+ from .client import ThoughtspotClient, ThoughtspotCredentials
3
+ from .extract import extract_all, iterate_all_data
@@ -0,0 +1,9 @@
1
+ from ...types import ExternalAsset
2
+
3
+
4
+ class ThoughtspotAsset(ExternalAsset):
5
+ """Thoughtspot assets"""
6
+
7
+ LIVEBOARDS = "liveboards"
8
+ LOGICAL_TABLES = "logical_tables"
9
+ USAGES = "usages"
@@ -0,0 +1,2 @@
1
+ from .client import ThoughtspotClient
2
+ from .credentials import ThoughtspotCredentials
@@ -0,0 +1,120 @@
1
+ from typing import Dict, Iterator, Optional
2
+
3
+ import requests
4
+
5
+ from ....utils import (
6
+ APIClient,
7
+ BearerAuth,
8
+ RequestSafeMode,
9
+ build_url,
10
+ handle_response,
11
+ )
12
+ from ..assets import (
13
+ ThoughtspotAsset,
14
+ )
15
+ from .credentials import (
16
+ ThoughtspotCredentials,
17
+ )
18
+ from .endpoints import (
19
+ ThoughtspotEndpointFactory,
20
+ )
21
+ from .utils import (
22
+ usage_liveboard_reader,
23
+ )
24
+
25
+ _AUTH_TIMEOUT_S = 60
26
+ _THOUGHTSPOT_HEADERS = {
27
+ "X-Requested-By": "ThoughtSpot",
28
+ "Accept": "application/json",
29
+ "Content-Type": "application/json",
30
+ }
31
+ _METADATA_BATCH_SIZE = 100
32
+ _USAGE_LIVEBOARD_ID = "bea79810-145f-4ad0-a02c-4177a6e7d861"
33
+ # By default, no errors are ignored for the moment
34
+ THOUGHTSPOT_SAFE_MODE = RequestSafeMode()
35
+
36
+
37
+ class ThoughtspotBearerAuth(BearerAuth):
38
+ def __init__(self, host: str, token_payload: Dict[str, str]):
39
+ auth_endpoint = ThoughtspotEndpointFactory.authentication()
40
+ self.authentication_url = build_url(host, auth_endpoint)
41
+ self.token_payload = token_payload
42
+
43
+ def fetch_token(self):
44
+ token_api_path = self.authentication_url
45
+ token_response = requests.post(
46
+ token_api_path, data=self.token_payload, timeout=_AUTH_TIMEOUT_S
47
+ )
48
+ return handle_response(token_response)["token"]
49
+
50
+
51
+ class ThoughtspotClient(APIClient):
52
+ def __init__(
53
+ self,
54
+ credentials: ThoughtspotCredentials,
55
+ safe_mode: Optional[RequestSafeMode] = None,
56
+ ):
57
+ auth = ThoughtspotBearerAuth(
58
+ host=credentials.base_url,
59
+ token_payload=credentials.dict(),
60
+ )
61
+ super().__init__(
62
+ host=credentials.base_url,
63
+ auth=auth,
64
+ headers=_THOUGHTSPOT_HEADERS,
65
+ safe_mode=safe_mode or THOUGHTSPOT_SAFE_MODE,
66
+ )
67
+
68
+ def _metadata_search(
69
+ self,
70
+ metadata_type: str,
71
+ ) -> Iterator[dict]:
72
+ offset = 0
73
+ while True:
74
+ metadata = self._post(
75
+ ThoughtspotEndpointFactory.metadata_search(),
76
+ data={
77
+ "metadata": [{"type": metadata_type}],
78
+ "include_details": True,
79
+ "record_size": _METADATA_BATCH_SIZE,
80
+ "record_offset": offset,
81
+ },
82
+ )
83
+ yield from metadata
84
+ if len(metadata) < _METADATA_BATCH_SIZE:
85
+ break
86
+ offset = offset + _METADATA_BATCH_SIZE
87
+
88
+ def _get_all_liveboards(self) -> Iterator[dict]:
89
+ yield from self._metadata_search(metadata_type="LIVEBOARD")
90
+
91
+ def _get_all_columns(self) -> Iterator[dict]:
92
+ yield from self._metadata_search(metadata_type="LOGICAL_COLUMN")
93
+
94
+ def _get_all_tables(self) -> Iterator[dict]:
95
+ yield from self._metadata_search(metadata_type="LOGICAL_TABLE")
96
+
97
+ def _get_liveboards_usages(self) -> Iterator[dict]:
98
+ data = self._post(
99
+ endpoint=ThoughtspotEndpointFactory.liveboard(),
100
+ headers={"Accept": "application/octet-stream"},
101
+ data={
102
+ "metadata_identifier": _USAGE_LIVEBOARD_ID,
103
+ "file_format": "CSV",
104
+ "visualization_identifiers": [
105
+ "Popular Liveboards Last 30 Days"
106
+ ],
107
+ },
108
+ handler=lambda x: x.text,
109
+ )
110
+ yield from usage_liveboard_reader(data)
111
+
112
+ def fetch(self, asset: ThoughtspotAsset):
113
+ if asset == ThoughtspotAsset.LIVEBOARDS:
114
+ yield from self._get_all_liveboards()
115
+
116
+ if asset == ThoughtspotAsset.USAGES:
117
+ yield from self._get_liveboards_usages()
118
+
119
+ if asset == ThoughtspotAsset.LOGICAL_TABLES:
120
+ yield from self._get_all_tables()
@@ -0,0 +1,18 @@
1
+ from pydantic import Field
2
+ from pydantic_settings import BaseSettings, SettingsConfigDict
3
+
4
+ THOUGHTSPOT_ENV_PREFIX = "CASTOR_THOUGHTSPOT_"
5
+
6
+
7
+ class ThoughtspotCredentials(BaseSettings):
8
+ """Class to handle Thoughtspot rest API permissions"""
9
+
10
+ model_config = SettingsConfigDict(
11
+ env_prefix=THOUGHTSPOT_ENV_PREFIX,
12
+ extra="ignore",
13
+ populate_by_name=True,
14
+ )
15
+
16
+ username: str
17
+ password: str = Field(repr=False)
18
+ base_url: str
@@ -0,0 +1,12 @@
1
+ class ThoughtspotEndpointFactory:
2
+ @classmethod
3
+ def authentication(cls) -> str:
4
+ return "api/rest/2.0/auth/token/full"
5
+
6
+ @classmethod
7
+ def metadata_search(cls) -> str:
8
+ return "api/rest/2.0/metadata/search"
9
+
10
+ @classmethod
11
+ def liveboard(cls) -> str:
12
+ return "api/rest/2.0/report/liveboard"
@@ -0,0 +1,25 @@
1
+ import csv
2
+ from io import StringIO
3
+ from typing import Iterator
4
+
5
+
6
+ def usage_liveboard_reader(usage_liveboard_csv: str) -> Iterator[dict]:
7
+ """
8
+ Converts a CSV string into an iterator of dictionaries after
9
+ ignoring the first 6 lines, using the 7th line as the header.
10
+ First 6 lines looks like the following:
11
+
12
+ "Data extract produced by Castor on 09/19/2024 06:54"
13
+ "Filters applied on data :"
14
+ "User Action IN [pinboard_embed_view,pinboard_tspublic_no_runtime_filter,pinboard_tspublic_runtime_filter,pinboard_view]"
15
+ "Pinboard NOT IN [mlm - availability pinboard,null]"
16
+ "Timestamp >= 20240820 00:00:00 < 20240919 00:00:00"
17
+ "Timestamp >= 20240919 00:00:00 < 20240920 00:00:00"
18
+
19
+ """
20
+ csv_file = StringIO(usage_liveboard_csv)
21
+
22
+ for _ in range(7):
23
+ next(csv_file)
24
+
25
+ yield from csv.DictReader(csv_file)
@@ -0,0 +1,57 @@
1
+ from .utils import (
2
+ usage_liveboard_reader,
3
+ )
4
+
5
+ VALID_CSV = '''"Data extract produced by Castor on 09/19/2024 06:54"
6
+ "Filters applied on data :"
7
+ "User Action IN [pinboard_embed_view,pinboard_tspublic_no_runtime_filter,pinboard_tspublic_runtime_filter,pinboard_view]"
8
+ "Pinboard NOT IN [mlm - availability pinboard,null]"
9
+ "Timestamp >= 20240820 00:00:00 < 20240919 00:00:00"
10
+ "Timestamp >= 20240919 00:00:00 < 20240920 00:00:00"
11
+ ""
12
+ "Pinboard","Pinboard Views","Unique Number of User"
13
+ "Market Report","559","19"
14
+ "Retailer report","204","14"
15
+ "Second-hand market","72","6"
16
+ "September test","25","2"'''
17
+
18
+
19
+ # Invalid CSV input (missing data rows)
20
+ INVALID_CSV = '''"Data extract produced by Castor on 09/19/2024 06:54"
21
+ "Filters applied on data :"
22
+ "User Action IN [pinboard_embed_view,pinboard_tspublic_no_runtime_filter,pinboard_tspublic_runtime_filter,pinboard_view]"
23
+ "Pinboard NOT IN [mlm - availability pinboard,null]"
24
+ "Timestamp >= 20240820 00:00:00 < 20240919 00:00:00"
25
+ "Timestamp >= 20240919 00:00:00 < 20240920 00:00:00"
26
+ ""'''
27
+
28
+
29
+ def test_usage_liveboard_reader():
30
+ expected_output = [
31
+ {
32
+ "Pinboard": "Market Report",
33
+ "Pinboard Views": "559",
34
+ "Unique Number of User": "19",
35
+ },
36
+ {
37
+ "Pinboard": "Retailer report",
38
+ "Pinboard Views": "204",
39
+ "Unique Number of User": "14",
40
+ },
41
+ {
42
+ "Pinboard": "Second-hand market",
43
+ "Pinboard Views": "72",
44
+ "Unique Number of User": "6",
45
+ },
46
+ {
47
+ "Pinboard": "September test",
48
+ "Pinboard Views": "25",
49
+ "Unique Number of User": "2",
50
+ },
51
+ ]
52
+
53
+ result = list(usage_liveboard_reader(VALID_CSV))
54
+ assert result == expected_output
55
+
56
+ result = list(usage_liveboard_reader(INVALID_CSV))
57
+ assert result == [] # Expect an empty result since there is no data
@@ -0,0 +1,49 @@
1
+ import logging
2
+ from typing import Iterable, Iterator, Tuple, Union
3
+
4
+ from ...utils import (
5
+ OUTPUT_DIR,
6
+ current_timestamp,
7
+ deep_serialize,
8
+ from_env,
9
+ get_output_filename,
10
+ write_json,
11
+ write_summary,
12
+ )
13
+ from .assets import ThoughtspotAsset
14
+ from .client import (
15
+ ThoughtspotClient,
16
+ ThoughtspotCredentials,
17
+ )
18
+
19
+ logger = logging.getLogger(__name__)
20
+
21
+
22
+ def iterate_all_data(
23
+ client: ThoughtspotClient,
24
+ ) -> Iterable[Tuple[ThoughtspotAsset, Union[list, Iterator, dict]]]:
25
+ """Iterate over the extracted data from Thoughtspot"""
26
+
27
+ for asset in ThoughtspotAsset:
28
+ logger.info(f"Extracting {asset.value} from API")
29
+ data = client.fetch(asset)
30
+ yield asset, deep_serialize(data)
31
+
32
+
33
+ def extract_all(**kwargs) -> None:
34
+ """
35
+ Extract data from Thoughtspot API
36
+ Store the output files locally under the given output_directory
37
+ """
38
+ _output_directory = kwargs.get("output") or from_env(OUTPUT_DIR)
39
+
40
+ credentials = ThoughtspotCredentials(**kwargs)
41
+ client = ThoughtspotClient(credentials=credentials)
42
+
43
+ ts = current_timestamp()
44
+
45
+ for key, data in iterate_all_data(client):
46
+ filename = get_output_filename(key.name.lower(), _output_directory, ts)
47
+ write_json(filename, list(data))
48
+
49
+ write_summary(_output_directory, ts, host=credentials.base_url)
@@ -89,4 +89,4 @@ class SalesforceClient(SalesforceBaseClient):
89
89
  ):
90
90
  fields = self.fetch_fields(api_name)
91
91
  sobject_fields[table_name] = fields
92
- return self.formatter.columns(sobject_fields)
92
+ return list(self.formatter.columns(sobject_fields))
@@ -1,12 +1,30 @@
1
1
  from typing import Any, Dict, Iterator, List
2
2
 
3
+ from ...utils import group_by
3
4
  from .constants import SCHEMA_NAME
4
5
 
6
+ _HAS_DUPLICATE_KEY = "#has_duplicate"
7
+
5
8
 
6
9
  def _clean(raw: str) -> str:
7
10
  return raw.strip('"')
8
11
 
9
12
 
13
+ def _name(sobject: dict) -> str:
14
+ """
15
+ compute name for table and columns
16
+ - when unique: label
17
+ - when label is empty or has duplicate: label (api_name)
18
+ """
19
+ label = sobject["Label"]
20
+ api_name = sobject["QualifiedApiName"]
21
+ if not label:
22
+ return api_name
23
+ if not sobject[_HAS_DUPLICATE_KEY]:
24
+ return label
25
+ return f"{label} ({api_name})"
26
+
27
+
10
28
  def _field_description(field: Dict[str, Any]) -> str:
11
29
  context: Dict[str, str] = {}
12
30
 
@@ -24,7 +42,7 @@ def _field_description(field: Dict[str, Any]) -> str:
24
42
 
25
43
 
26
44
  def _to_column_payload(field: dict, position: int, table_name: str) -> dict:
27
- field_name = field["QualifiedApiName"]
45
+ field_name = _name(field)
28
46
  return {
29
47
  "column_name": field_name,
30
48
  "data_type": field.get("DataType"),
@@ -37,33 +55,31 @@ def _to_column_payload(field: dict, position: int, table_name: str) -> dict:
37
55
  }
38
56
 
39
57
 
40
- def _to_table_payload(sobject: dict, table_name: str) -> dict:
58
+ def _to_table_payload(sobject: dict) -> dict:
59
+ name = _name(sobject)
41
60
  return {
42
- "id": table_name,
61
+ "id": name,
43
62
  "api_name": sobject["QualifiedApiName"],
44
63
  "label": sobject["Label"],
45
64
  "schema_id": SCHEMA_NAME,
46
- "table_name": table_name,
65
+ "table_name": name,
47
66
  "description": sobject.get("Description"),
48
67
  "tags": [],
49
68
  "type": "TABLE",
50
69
  }
51
70
 
52
71
 
53
- def _merge_label_and_api_name(sobject: dict) -> dict:
54
- label = sobject["Label"]
55
- api_name = sobject["QualifiedApiName"]
56
- table_name = f"{label} ({api_name})"
57
- return _to_table_payload(sobject, table_name)
58
-
59
-
60
- def _by_label(sobjects: List[dict]) -> Dict[str, List[dict]]:
61
- by_label: Dict[str, List[dict]] = dict()
72
+ def _detect_duplicates(sobjects: List[dict]) -> List[dict]:
73
+ """
74
+ enrich the given data with "has_duplicate" flag:
75
+ - True when another asset has the same Label in the list
76
+ - False otherwise
77
+ """
78
+ by_label = group_by("Label", sobjects)
62
79
  for sobject in sobjects:
63
80
  label = sobject["Label"]
64
- similar_sobjects = by_label.setdefault(label, [])
65
- similar_sobjects.append(sobject)
66
- return by_label
81
+ sobject[_HAS_DUPLICATE_KEY] = len(by_label[label]) > 1
82
+ return sobjects
67
83
 
68
84
 
69
85
  class SalesforceFormatter:
@@ -76,21 +92,15 @@ class SalesforceFormatter:
76
92
  def tables(sobjects: List[dict]) -> Iterator[dict]:
77
93
  """
78
94
  formats the raw list of sobjects to tables
79
- if two tables share the same label, then we add the api name as well
80
95
  """
81
- by_label = _by_label(sobjects)
82
- for label, similars in by_label.items():
83
- if len(similars) > 1:
84
- yield from [_merge_label_and_api_name(s) for s in similars]
85
- else:
86
- sobject = similars[0] # unique sobject on label
87
- yield _to_table_payload(sobject, label)
96
+ sobjects = _detect_duplicates(sobjects)
97
+ for sobject in sobjects:
98
+ yield _to_table_payload(sobject)
88
99
 
89
100
  @staticmethod
90
- def columns(sobject_fields: Dict[str, List[dict]]) -> List[dict]:
101
+ def columns(sobject_fields: Dict[str, List[dict]]) -> Iterator[dict]:
91
102
  """formats the raw list of sobject fields to columns"""
92
- return [
93
- _to_column_payload(field, idx, table_name)
94
- for table_name, fields in sobject_fields.items()
95
- for idx, field in enumerate(fields)
96
- ]
103
+ for table_name, fields in sobject_fields.items():
104
+ fields = _detect_duplicates(fields)
105
+ for index, field in enumerate(fields):
106
+ yield _to_column_payload(field, index, table_name)