castor-extractor 0.24.0__py3-none-any.whl → 0.24.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of castor-extractor might be problematic. Click here for more details.

Files changed (30) hide show
  1. CHANGELOG.md +8 -0
  2. README.md +2 -0
  3. castor_extractor/commands/extract_looker_studio.py +30 -0
  4. castor_extractor/commands/extract_tableau.py +2 -2
  5. castor_extractor/visualization/looker_studio/__init__.py +1 -0
  6. castor_extractor/visualization/looker_studio/assets.py +1 -0
  7. castor_extractor/visualization/looker_studio/client/client.py +56 -6
  8. castor_extractor/visualization/looker_studio/client/credentials.py +5 -0
  9. castor_extractor/visualization/looker_studio/client/queries/query.sql +52 -0
  10. castor_extractor/visualization/looker_studio/extract.py +84 -0
  11. castor_extractor/visualization/tableau/__init__.py +3 -0
  12. castor_extractor/visualization/{tableau_revamp → tableau}/assets.py +4 -4
  13. castor_extractor/visualization/tableau/client/__init__.py +2 -0
  14. castor_extractor/visualization/{tableau_revamp → tableau}/client/client.py +15 -15
  15. castor_extractor/visualization/{tableau_revamp → tableau}/client/client_metadata_api.py +9 -9
  16. castor_extractor/visualization/{tableau_revamp → tableau}/client/client_rest_api.py +6 -6
  17. castor_extractor/visualization/{tableau_revamp → tableau}/client/client_tsc.py +9 -9
  18. castor_extractor/visualization/{tableau_revamp → tableau}/client/credentials.py +2 -2
  19. castor_extractor/visualization/{tableau_revamp → tableau}/client/gql_queries.py +8 -8
  20. castor_extractor/visualization/{tableau_revamp → tableau}/client/rest_fields.py +10 -10
  21. castor_extractor/visualization/{tableau_revamp → tableau}/extract.py +7 -7
  22. {castor_extractor-0.24.0.dist-info → castor_extractor-0.24.2.dist-info}/METADATA +11 -1
  23. {castor_extractor-0.24.0.dist-info → castor_extractor-0.24.2.dist-info}/RECORD +28 -25
  24. {castor_extractor-0.24.0.dist-info → castor_extractor-0.24.2.dist-info}/entry_points.txt +1 -0
  25. castor_extractor/visualization/tableau_revamp/__init__.py +0 -3
  26. castor_extractor/visualization/tableau_revamp/client/__init__.py +0 -2
  27. /castor_extractor/visualization/{tableau_revamp → tableau}/client/errors.py +0 -0
  28. /castor_extractor/visualization/{tableau_revamp → tableau}/constants.py +0 -0
  29. {castor_extractor-0.24.0.dist-info → castor_extractor-0.24.2.dist-info}/LICENCE +0 -0
  30. {castor_extractor-0.24.0.dist-info → castor_extractor-0.24.2.dist-info}/WHEEL +0 -0
CHANGELOG.md CHANGED
@@ -1,5 +1,13 @@
1
1
  # Changelog
2
2
 
3
+ ## 0.24.2 - 2025-03-17
4
+
5
+ * Rename Revamped Tableau Connector classes
6
+
7
+ ## 0.24.1 - 2025-03-14
8
+
9
+ * Added support for Looker Studio
10
+
3
11
  ## 0.24.0 - 2025-03-10
4
12
 
5
13
  * Remove legacy Tableau Connector
README.md CHANGED
@@ -104,7 +104,9 @@ Depending on your use case, you can also install one of the following `extras`:
104
104
 
105
105
  ```bash
106
106
  pip install castor-extractor[bigquery]
107
+ pip install castor-extractor[databricks]
107
108
  pip install castor-extractor[looker]
109
+ pip install castor-extractor[lookerstudio]
108
110
  pip install castor-extractor[metabase]
109
111
  pip install castor-extractor[mysql]
110
112
  pip install castor-extractor[powerbi]
@@ -0,0 +1,30 @@
1
+ from argparse import ArgumentParser
2
+
3
+ from castor_extractor.utils import parse_filled_arguments # type: ignore
4
+ from castor_extractor.visualization import looker_studio # type: ignore
5
+
6
+
7
+ def main():
8
+ parser = ArgumentParser()
9
+ parser.add_argument(
10
+ "-c",
11
+ "--credentials",
12
+ help="File path to Service Account credentials with Looker Studio access",
13
+ )
14
+ parser.add_argument(
15
+ "-a",
16
+ "--admin-email",
17
+ help="Email of a Google Workspace user with admin access",
18
+ )
19
+ parser.add_argument(
20
+ "-b",
21
+ "--bigquery-credentials",
22
+ help=(
23
+ "Optional: file path to Service Account credentials with BigQuery access. "
24
+ "This can be the same file path as for Looker Studio."
25
+ ),
26
+ )
27
+
28
+ parser.add_argument("-o", "--output", help="Directory to write to")
29
+
30
+ looker_studio.extract_all(**parse_filled_arguments(parser))
@@ -2,7 +2,7 @@ import logging
2
2
  from argparse import ArgumentParser
3
3
 
4
4
  from castor_extractor.utils import parse_filled_arguments # type: ignore
5
- from castor_extractor.visualization import tableau_revamp # type: ignore
5
+ from castor_extractor.visualization import tableau # type: ignore
6
6
 
7
7
  logging.basicConfig(level=logging.INFO, format="%(levelname)s - %(message)s")
8
8
 
@@ -36,4 +36,4 @@ def main():
36
36
 
37
37
  parser.add_argument("-o", "--output", help="Directory to write to")
38
38
 
39
- tableau_revamp.extract_all(**parse_filled_arguments(parser))
39
+ tableau.extract_all(**parse_filled_arguments(parser))
@@ -4,3 +4,4 @@ from .client import (
4
4
  LookerStudioClient,
5
5
  LookerStudioCredentials,
6
6
  )
7
+ from .extract import extract_all
@@ -3,4 +3,5 @@ from ...types import ExternalAsset
3
3
 
4
4
  class LookerStudioAsset(ExternalAsset):
5
5
  ASSETS = "assets"
6
+ SOURCE_QUERIES = "source_queries"
6
7
  VIEW_ACTIVITY = "view_activity"
@@ -1,21 +1,49 @@
1
- from typing import Iterator
1
+ from typing import Iterator, Optional
2
2
 
3
+ from ....utils import empty_iterator
4
+ from ....warehouse.abstract import WarehouseAsset
5
+ from ....warehouse.bigquery import BigQueryClient, BigQueryQueryBuilder
3
6
  from .. import LookerStudioAsset
4
7
  from .admin_sdk_client import USER_EMAIL_FIELD, AdminSDKClient
5
8
  from .credentials import LookerStudioCredentials
6
9
  from .looker_studio_api_client import LookerStudioAPIClient
7
10
 
8
11
 
12
+ class LookerStudioQueryBuilder(BigQueryQueryBuilder):
13
+ def job_history_queries(self) -> list:
14
+ """
15
+ This class and method are a convenient workaround to build the
16
+ ExtractionQueries which retrieve BigQuery's job history, but filtered on
17
+ Looker Studio only.
18
+
19
+ Compared to the generic BigQuery query history, only the SQL "template"
20
+ changes. By defining this class here, this will pick the SQL file
21
+ `queries/query.sql` located in the same directory as this file.
22
+ """
23
+ return super().build(WarehouseAsset.QUERY) # type: ignore
24
+
25
+
9
26
  class LookerStudioClient:
10
27
  """
11
28
  Acts as a wrapper class to fetch Looker Studio assets, which requires
12
29
  coordinating calls between the Admin SDK API and the Looker Studio API.
30
+
31
+ If the BigQuery credentials are provided, it can also fetch the source queries
32
+ of BigQuery data sources.
13
33
  """
14
34
 
15
- def __init__(self, credentials: LookerStudioCredentials):
35
+ def __init__(
36
+ self,
37
+ credentials: LookerStudioCredentials,
38
+ bigquery_credentials: Optional[dict] = None,
39
+ ):
16
40
  self.admin_sdk_client = AdminSDKClient(credentials)
17
41
  self.looker_studio_client = LookerStudioAPIClient(credentials)
18
42
 
43
+ self.bigquery_client: Optional[BigQueryClient] = None
44
+ if bigquery_credentials:
45
+ self.bigquery_client = BigQueryClient(bigquery_credentials)
46
+
19
47
  def _get_assets(self) -> Iterator[dict]:
20
48
  """
21
49
  Extracts reports and data sources user by user.
@@ -26,12 +54,34 @@ class LookerStudioClient:
26
54
  email = user[USER_EMAIL_FIELD]
27
55
  yield from self.looker_studio_client.fetch_user_assets(email)
28
56
 
29
- def fetch(self, asset: LookerStudioAsset) -> Iterator[dict]:
30
- if asset == LookerStudioAsset.VIEW_ACTIVITY:
31
- yield from self.admin_sdk_client.list_view_events()
57
+ def _get_source_queries(self) -> Iterator[dict]:
58
+ """
59
+ Extracts the BigQuery jobs triggered by Looker Studio. The last job
60
+ per data source is returned.
61
+ """
62
+ if not self.bigquery_client:
63
+ return empty_iterator()
64
+
65
+ query_builder = LookerStudioQueryBuilder(
66
+ regions=self.bigquery_client.get_regions(),
67
+ datasets=self.bigquery_client.get_datasets(),
68
+ extended_regions=self.bigquery_client.get_extended_regions(),
69
+ )
70
+
71
+ queries = query_builder.job_history_queries()
72
+
73
+ for query in queries:
74
+ yield from self.bigquery_client.execute(query)
32
75
 
33
- elif asset == LookerStudioAsset.ASSETS:
76
+ def fetch(self, asset: LookerStudioAsset) -> Iterator[dict]:
77
+ if asset == LookerStudioAsset.ASSETS:
34
78
  yield from self._get_assets()
35
79
 
80
+ elif asset == LookerStudioAsset.SOURCE_QUERIES:
81
+ yield from self._get_source_queries()
82
+
83
+ elif asset == LookerStudioAsset.VIEW_ACTIVITY:
84
+ yield from self.admin_sdk_client.list_view_events()
85
+
36
86
  else:
37
87
  raise ValueError(f"The asset {asset}, is not supported")
@@ -2,6 +2,11 @@ from pydantic import BaseModel, SecretStr, field_serializer
2
2
 
3
3
 
4
4
  class LookerStudioCredentials(BaseModel):
5
+ """
6
+ Looker Studio Credentials match the Service Account credentials JSON
7
+ but with an additional admin_email field.
8
+ """
9
+
5
10
  admin_email: str
6
11
  auth_provider_x509_cert_url: str
7
12
  auth_uri: str
@@ -0,0 +1,52 @@
1
+ /*
2
+ Gets the query jobs triggered by Looker Studio when refreshing a BigQuery data source. Only the latest query per
3
+ data source is selected.
4
+
5
+ The `labels` column should indicate the `looker_studio_datasource_id` that triggered the job. In some cases, it also
6
+ contains a `looker_studio_report_id` value, which gives us a link between the data source and a report.
7
+ */
8
+ WITH ranked_by_datasource AS (
9
+ SELECT
10
+ creation_time,
11
+ project_id AS database_name,
12
+ user_email,
13
+ query AS query_text,
14
+ referenced_tables,
15
+ labels,
16
+ ROW_NUMBER() OVER (
17
+ PARTITION BY (
18
+ SELECT
19
+ label.value
20
+ FROM
21
+ UNNEST(labels) AS label
22
+ WHERE
23
+ label.key = 'looker_studio_datasource_id'
24
+ )
25
+ ORDER BY
26
+ creation_time DESC
27
+ ) AS row_num
28
+ FROM
29
+ `{project}.region-{region}.INFORMATION_SCHEMA.JOBS_BY_PROJECT`
30
+ WHERE
31
+ job_type = 'QUERY'
32
+ AND EXISTS (
33
+ SELECT
34
+ 1
35
+ FROM
36
+ UNNEST(labels) AS label
37
+ WHERE
38
+ label.key = 'requestor'
39
+ AND label.value = 'looker_studio'
40
+ )
41
+ )
42
+ SELECT
43
+ creation_time,
44
+ database_name,
45
+ user_email,
46
+ query_text,
47
+ referenced_tables,
48
+ labels
49
+ FROM
50
+ ranked_by_datasource
51
+ WHERE
52
+ row_num = 1;
@@ -0,0 +1,84 @@
1
+ import json
2
+ import logging
3
+ from collections.abc import Iterable
4
+ from typing import Optional, Union, cast
5
+
6
+ from ...utils import (
7
+ OUTPUT_DIR,
8
+ current_timestamp,
9
+ deep_serialize,
10
+ from_env,
11
+ get_output_filename,
12
+ write_json,
13
+ write_summary,
14
+ )
15
+ from .assets import LookerStudioAsset
16
+ from .client import LookerStudioClient, LookerStudioCredentials
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+ APPLICATION_CREDENTIALS = "GOOGLE_APPLICATION_CREDENTIALS"
21
+ LOOKER_STUDIO_ADMIN_EMAIL = "CASTOR_LOOKER_STUDIO_ADMIN_EMAIL"
22
+
23
+
24
+ def iterate_all_data(
25
+ client: LookerStudioClient,
26
+ ) -> Iterable[tuple[LookerStudioAsset, Union[list, dict]]]:
27
+ for asset in LookerStudioAsset:
28
+ logger.info(f"Extracting {asset.name} from API")
29
+ data = list(deep_serialize(client.fetch(asset)))
30
+ yield asset, data
31
+ logger.info(f"Extracted {len(data)} {asset.name} from API")
32
+
33
+
34
+ def _credentials(params: dict) -> LookerStudioCredentials:
35
+ """
36
+ Builds the Looker Studio credentials by combining the Service Account
37
+ credentials with the admin email.
38
+ """
39
+ path = params.get("credentials") or from_env(APPLICATION_CREDENTIALS)
40
+ logger.info(f"Looker Studio credentials loaded from {path}")
41
+ with open(path) as file:
42
+ credentials = cast(dict, json.load(file))
43
+
44
+ admin_email = params.get("admin_email") or from_env(
45
+ LOOKER_STUDIO_ADMIN_EMAIL
46
+ )
47
+ credentials["admin_email"] = admin_email
48
+ return LookerStudioCredentials(**credentials)
49
+
50
+
51
+ def _bigquery_credentials_or_none(params: dict) -> Optional[dict]:
52
+ """Extracts optional GCP credentials to access BigQuery"""
53
+ path = params.get("bigquery_credentials") or from_env(
54
+ APPLICATION_CREDENTIALS,
55
+ allow_missing=True,
56
+ )
57
+ if not path:
58
+ return None
59
+
60
+ logger.info(f"BigQuery credentials loaded from {path}")
61
+ with open(path) as file:
62
+ return cast(dict, json.load(file))
63
+
64
+
65
+ def extract_all(**kwargs) -> None:
66
+ """
67
+ Extracts data from Looker Studio and stores the output files locally under
68
+ the given output_directory.
69
+ """
70
+ output_directory = kwargs.get("output") or from_env(OUTPUT_DIR)
71
+ credentials = _credentials(kwargs)
72
+ bigquery_credentials = _bigquery_credentials_or_none(kwargs)
73
+
74
+ client = LookerStudioClient(
75
+ credentials=credentials,
76
+ bigquery_credentials=bigquery_credentials,
77
+ )
78
+ ts = current_timestamp()
79
+
80
+ for key, data in iterate_all_data(client):
81
+ filename = get_output_filename(key.name.lower(), output_directory, ts)
82
+ write_json(filename, data)
83
+
84
+ write_summary(output_directory, ts)
@@ -0,0 +1,3 @@
1
+ from .assets import TableauAsset
2
+ from .client import TableauClient, TableauCredentials
3
+ from .extract import extract_all, iterate_all_data
@@ -1,7 +1,7 @@
1
1
  from ...types import ExternalAsset
2
2
 
3
3
 
4
- class TableauRevampAsset(ExternalAsset):
4
+ class TableauAsset(ExternalAsset):
5
5
  """
6
6
  Tableau assets
7
7
  """
@@ -23,7 +23,7 @@ class TableauRevampAsset(ExternalAsset):
23
23
 
24
24
  # assets that are only available for clients using Tableau Pulse
25
25
  TABLEAU_PULSE_ASSETS = (
26
- TableauRevampAsset.METRIC,
27
- TableauRevampAsset.METRIC_DEFINITION,
28
- TableauRevampAsset.SUBSCRIPTION,
26
+ TableauAsset.METRIC,
27
+ TableauAsset.METRIC_DEFINITION,
28
+ TableauAsset.SUBSCRIPTION,
29
29
  )
@@ -0,0 +1,2 @@
1
+ from .client import TableauClient
2
+ from .credentials import TableauCredentials
@@ -4,31 +4,31 @@ from typing import Optional
4
4
  import tableauserverclient as TSC # type: ignore
5
5
 
6
6
  from ....utils import SerializedAsset
7
- from ..assets import TABLEAU_PULSE_ASSETS, TableauRevampAsset
7
+ from ..assets import TABLEAU_PULSE_ASSETS, TableauAsset
8
8
  from ..constants import CREDENTIALS_SITE_ID_KEY, DEFAULT_TIMEOUT_SECONDS
9
9
  from .client_metadata_api import TableauClientMetadataApi
10
10
  from .client_rest_api import TableauClientRestApi
11
11
  from .client_tsc import TableauClientTSC
12
- from .credentials import TableauRevampCredentials
12
+ from .credentials import TableauCredentials
13
13
 
14
14
  logger = logging.getLogger(__name__)
15
15
 
16
16
  # these assets must be extracted via TableauServerClient (TSC)
17
17
  _TSC_ASSETS = (
18
18
  # projects are not available in Metadata API
19
- TableauRevampAsset.PROJECT,
19
+ TableauAsset.PROJECT,
20
20
  # view count are not available in Metadata API
21
- TableauRevampAsset.USAGE,
21
+ TableauAsset.USAGE,
22
22
  # only users who published content can be extracted from MetadataAPI
23
- TableauRevampAsset.USER,
23
+ TableauAsset.USER,
24
24
  )
25
25
 
26
26
  # these assets must be extracted via the REST API
27
27
  _REST_API_ASSETS = (
28
28
  # Tableau Pulse assets are only available in REST API
29
- TableauRevampAsset.METRIC,
30
- TableauRevampAsset.METRIC_DEFINITION,
31
- TableauRevampAsset.SUBSCRIPTION,
29
+ TableauAsset.METRIC,
30
+ TableauAsset.METRIC_DEFINITION,
31
+ TableauAsset.SUBSCRIPTION,
32
32
  )
33
33
 
34
34
  logging.getLogger("tableau.endpoint").setLevel(logging.WARNING)
@@ -109,7 +109,7 @@ def _server(
109
109
  return server
110
110
 
111
111
 
112
- class TableauRevampClient:
112
+ class TableauClient:
113
113
  """
114
114
  Connect to Tableau's API and extract assets.
115
115
 
@@ -119,7 +119,7 @@ class TableauRevampClient:
119
119
 
120
120
  def __init__(
121
121
  self,
122
- credentials: TableauRevampCredentials,
122
+ credentials: TableauCredentials,
123
123
  timeout_sec: int = DEFAULT_TIMEOUT_SECONDS,
124
124
  with_pulse: bool = False,
125
125
  override_page_size: Optional[int] = None,
@@ -186,7 +186,7 @@ class TableauRevampClient:
186
186
  )
187
187
 
188
188
  def _fetch_datasources(self) -> SerializedAsset:
189
- asset = TableauRevampAsset.DATASOURCE
189
+ asset = TableauAsset.DATASOURCE
190
190
 
191
191
  datasources = self._client_metadata.fetch(asset)
192
192
  tsc_datasources = self._client_tsc.fetch(asset)
@@ -194,7 +194,7 @@ class TableauRevampClient:
194
194
  return _merge_datasources(datasources, tsc_datasources)
195
195
 
196
196
  def _fetch_workbooks(self) -> SerializedAsset:
197
- asset = TableauRevampAsset.WORKBOOK
197
+ asset = TableauAsset.WORKBOOK
198
198
 
199
199
  site_id = self._credentials.site_id
200
200
  workbooks = self._client_metadata.fetch(asset)
@@ -206,7 +206,7 @@ class TableauRevampClient:
206
206
 
207
207
  def fetch(
208
208
  self,
209
- asset: TableauRevampAsset,
209
+ asset: TableauAsset,
210
210
  ) -> SerializedAsset:
211
211
  """
212
212
  Extract the given Tableau Asset
@@ -217,11 +217,11 @@ class TableauRevampClient:
217
217
 
218
218
  logger.info(f"Extracting {asset.name}...")
219
219
 
220
- if asset == TableauRevampAsset.DATASOURCE:
220
+ if asset == TableauAsset.DATASOURCE:
221
221
  # two APIs are required to extract datasources
222
222
  return self._fetch_datasources()
223
223
 
224
- if asset == TableauRevampAsset.WORKBOOK:
224
+ if asset == TableauAsset.WORKBOOK:
225
225
  # two APIs are required to extract workbooks
226
226
  return self._fetch_workbooks()
227
227
 
@@ -4,21 +4,21 @@ from typing import Optional
4
4
  import tableauserverclient as TSC # type: ignore
5
5
 
6
6
  from ....utils import SerializedAsset, retry
7
- from ..assets import TableauRevampAsset
7
+ from ..assets import TableauAsset
8
8
  from ..constants import DEFAULT_PAGE_SIZE
9
9
  from .errors import TableauApiError, TableauApiTimeout
10
10
  from .gql_queries import FIELDS_QUERIES, GQL_QUERIES, QUERY_TEMPLATE
11
11
 
12
12
  # increase the value when extraction is too slow
13
13
  # decrease the value when timeouts arise
14
- _CUSTOM_PAGE_SIZE: dict[TableauRevampAsset, int] = {
14
+ _CUSTOM_PAGE_SIZE: dict[TableauAsset, int] = {
15
15
  # for some clients, extraction of columns tend to hit the node limit
16
16
  # https://community.tableau.com/s/question/0D54T00000YuK60SAF/metadata-query-nodelimitexceeded-error
17
17
  # the workaround is to reduce pagination
18
- TableauRevampAsset.COLUMN: 50,
18
+ TableauAsset.COLUMN: 50,
19
19
  # fields are light but volumes are bigger
20
- TableauRevampAsset.FIELD: 1000,
21
- TableauRevampAsset.TABLE: 50,
20
+ TableauAsset.FIELD: 1000,
21
+ TableauAsset.TABLE: 50,
22
22
  }
23
23
 
24
24
  _TIMEOUT_MESSAGE = (
@@ -115,7 +115,7 @@ class TableauClientMetadataApi:
115
115
  result_pages = gql_query_scroll(self._server, query, resource)
116
116
  return [asset for page in result_pages for asset in page]
117
117
 
118
- def _page_size(self, asset: TableauRevampAsset) -> int:
118
+ def _page_size(self, asset: TableauAsset) -> int:
119
119
  return (
120
120
  self._override_page_size
121
121
  or _CUSTOM_PAGE_SIZE.get(asset)
@@ -124,7 +124,7 @@ class TableauClientMetadataApi:
124
124
 
125
125
  def _fetch_fields(self) -> SerializedAsset:
126
126
  result: SerializedAsset = []
127
- page_size = self._page_size(TableauRevampAsset.FIELD)
127
+ page_size = self._page_size(TableauAsset.FIELD)
128
128
  for resource, fields in FIELDS_QUERIES:
129
129
  current = self._call(resource, fields, page_size)
130
130
  result.extend(current)
@@ -132,9 +132,9 @@ class TableauClientMetadataApi:
132
132
 
133
133
  def fetch(
134
134
  self,
135
- asset: TableauRevampAsset,
135
+ asset: TableauAsset,
136
136
  ) -> SerializedAsset:
137
- if asset == TableauRevampAsset.FIELD:
137
+ if asset == TableauAsset.FIELD:
138
138
  return self._fetch_fields()
139
139
 
140
140
  page_size = self._page_size(asset)
@@ -5,7 +5,7 @@ import requests
5
5
  import tableauserverclient as TSC # type: ignore
6
6
 
7
7
  from ....utils import SerializedAsset, deduplicate
8
- from ..assets import TableauRevampAsset
8
+ from ..assets import TableauAsset
9
9
  from .rest_fields import REST_FIELDS
10
10
 
11
11
  logger = logging.getLogger(__name__)
@@ -17,7 +17,7 @@ _METRICS_DEFINITION_URL = "{base}/pulse/site/{site}/{definition_id}"
17
17
 
18
18
  def _pick(
19
19
  data: SerializedAsset,
20
- asset: TableauRevampAsset,
20
+ asset: TableauAsset,
21
21
  ) -> SerializedAsset:
22
22
  keys = REST_FIELDS[asset]
23
23
  return [{key: row[key] for key in keys} for row in data]
@@ -107,18 +107,18 @@ class TableauClientRestApi:
107
107
 
108
108
  def fetch(
109
109
  self,
110
- asset: TableauRevampAsset,
110
+ asset: TableauAsset,
111
111
  ) -> SerializedAsset:
112
- if asset == TableauRevampAsset.SUBSCRIPTION:
112
+ if asset == TableauAsset.SUBSCRIPTION:
113
113
  # https://help.tableau.com/current/api/rest_api/en-us/REST/rest_api_ref_pulse.htm#PulseSubscriptionService_ListSubscriptions
114
114
  data = self._call(path=_PULSE_API, target="subscriptions")
115
115
 
116
- elif asset == TableauRevampAsset.METRIC_DEFINITION:
116
+ elif asset == TableauAsset.METRIC_DEFINITION:
117
117
  # https://help.tableau.com/current/api/rest_api/en-us/REST/rest_api_ref_pulse.htm#MetricQueryService_ListDefinitions
118
118
  data = self._call(path=_PULSE_API, target="definitions")
119
119
  self._compute_metric_url(data)
120
120
 
121
- elif asset == TableauRevampAsset.METRIC:
121
+ elif asset == TableauAsset.METRIC:
122
122
  # https://help.tableau.com/current/api/rest_api/en-us/REST/rest_api_ref_pulse.htm#MetricQueryService_ListMetrics
123
123
  definitions = self._call(path=_PULSE_API, target="definitions")
124
124
  data = self._fetch_metrics(definitions)
@@ -4,7 +4,7 @@ from typing import Any
4
4
  import tableauserverclient as TSC # type: ignore
5
5
 
6
6
  from ....utils import JsonType, SerializedAsset
7
- from ..assets import TableauRevampAsset
7
+ from ..assets import TableauAsset
8
8
  from .rest_fields import REST_FIELDS
9
9
 
10
10
 
@@ -30,13 +30,13 @@ class TableauClientTSC:
30
30
  def _pick_fields(
31
31
  self,
32
32
  data: Iterable,
33
- asset: TableauRevampAsset,
33
+ asset: TableauAsset,
34
34
  ) -> Iterator[dict]:
35
35
  keys = REST_FIELDS[asset]
36
36
 
37
37
  for row in data:
38
38
  fields = {key: _pick(row, key) for key in keys}
39
- if asset == TableauRevampAsset.USER:
39
+ if asset == TableauAsset.USER:
40
40
  self._server.users.populate_groups(row)
41
41
  fields["group_ids"] = [group.id for group in row.groups]
42
42
 
@@ -44,21 +44,21 @@ class TableauClientTSC:
44
44
 
45
45
  def fetch(
46
46
  self,
47
- asset: TableauRevampAsset,
47
+ asset: TableauAsset,
48
48
  ) -> SerializedAsset:
49
- if asset == TableauRevampAsset.DATASOURCE:
49
+ if asset == TableauAsset.DATASOURCE:
50
50
  data = TSC.Pager(self._server.datasources)
51
51
 
52
- elif asset == TableauRevampAsset.PROJECT:
52
+ elif asset == TableauAsset.PROJECT:
53
53
  data = TSC.Pager(self._server.projects)
54
54
 
55
- elif asset == TableauRevampAsset.USAGE:
55
+ elif asset == TableauAsset.USAGE:
56
56
  data = TSC.Pager(self._server.views, usage=True)
57
57
 
58
- elif asset == TableauRevampAsset.USER:
58
+ elif asset == TableauAsset.USER:
59
59
  data = TSC.Pager(self._server.users)
60
60
 
61
- elif asset == TableauRevampAsset.WORKBOOK:
61
+ elif asset == TableauAsset.WORKBOOK:
62
62
  data = TSC.Pager(self._server.workbooks)
63
63
 
64
64
  else:
@@ -15,7 +15,7 @@ _DEFAULT_SITE_ID_USER_INPUT = "default"
15
15
  TABLEAU_ENV_PREFIX = "CASTOR_TABLEAU_"
16
16
 
17
17
 
18
- class TableauRevampCredentials(BaseSettings):
18
+ class TableauCredentials(BaseSettings):
19
19
  """
20
20
  Tableau's credentials to connect to both APIs (REST and GRAPHQL)
21
21
  """
@@ -42,7 +42,7 @@ class TableauRevampCredentials(BaseSettings):
42
42
  return site_id
43
43
 
44
44
  @model_validator(mode="after")
45
- def _check_user_xor_pat_login(self) -> "TableauRevampCredentials":
45
+ def _check_user_xor_pat_login(self) -> "TableauCredentials":
46
46
  """
47
47
  Checks that credentials are correctly input, it means either:
48
48
  - User and password are filled
@@ -1,4 +1,4 @@
1
- from ..assets import TableauRevampAsset
1
+ from ..assets import TableauAsset
2
2
 
3
3
  QUERY_TEMPLATE = """
4
4
  {{
@@ -128,13 +128,13 @@ workbook { id }
128
128
  """
129
129
 
130
130
 
131
- GQL_QUERIES: dict[TableauRevampAsset, tuple[str, str]] = {
132
- TableauRevampAsset.COLUMN: ("columns", _COLUMNS_QUERY),
133
- TableauRevampAsset.DASHBOARD: ("dashboards", _DASHBOARDS_QUERY),
134
- TableauRevampAsset.DATASOURCE: ("datasources", _DATASOURCES_QUERY),
135
- TableauRevampAsset.SHEET: ("sheets", _SHEETS_QUERY),
136
- TableauRevampAsset.TABLE: ("tables", _TABLES_QUERY),
137
- TableauRevampAsset.WORKBOOK: ("workbooks", _WORKBOOKS_QUERY),
131
+ GQL_QUERIES: dict[TableauAsset, tuple[str, str]] = {
132
+ TableauAsset.COLUMN: ("columns", _COLUMNS_QUERY),
133
+ TableauAsset.DASHBOARD: ("dashboards", _DASHBOARDS_QUERY),
134
+ TableauAsset.DATASOURCE: ("datasources", _DATASOURCES_QUERY),
135
+ TableauAsset.SHEET: ("sheets", _SHEETS_QUERY),
136
+ TableauAsset.TABLE: ("tables", _TABLES_QUERY),
137
+ TableauAsset.WORKBOOK: ("workbooks", _WORKBOOKS_QUERY),
138
138
  }
139
139
 
140
140
  FIELDS_QUERIES = (
@@ -1,44 +1,44 @@
1
- from ..assets import TableauRevampAsset
1
+ from ..assets import TableauAsset
2
2
 
3
3
  # list of fields to pick in REST API or TSC responses
4
- REST_FIELDS: dict[TableauRevampAsset, set[str]] = {
5
- TableauRevampAsset.DATASOURCE: {
4
+ REST_FIELDS: dict[TableauAsset, set[str]] = {
5
+ TableauAsset.DATASOURCE: {
6
6
  "id",
7
7
  "project_id",
8
8
  "webpage_url",
9
9
  },
10
- TableauRevampAsset.METRIC: {
10
+ TableauAsset.METRIC: {
11
11
  "id",
12
12
  "definition_id",
13
13
  },
14
- TableauRevampAsset.METRIC_DEFINITION: {
14
+ TableauAsset.METRIC_DEFINITION: {
15
15
  "metadata",
16
16
  "specification",
17
17
  },
18
- TableauRevampAsset.PROJECT: {
18
+ TableauAsset.PROJECT: {
19
19
  "description",
20
20
  "id",
21
21
  "name",
22
22
  "parent_id",
23
23
  },
24
- TableauRevampAsset.SUBSCRIPTION: {
24
+ TableauAsset.SUBSCRIPTION: {
25
25
  "follower",
26
26
  "id",
27
27
  "metric_id",
28
28
  },
29
- TableauRevampAsset.USAGE: {
29
+ TableauAsset.USAGE: {
30
30
  "name",
31
31
  "total_views",
32
32
  "workbook_id",
33
33
  },
34
- TableauRevampAsset.USER: {
34
+ TableauAsset.USER: {
35
35
  "email",
36
36
  "fullname",
37
37
  "id",
38
38
  "name",
39
39
  "site_role",
40
40
  },
41
- TableauRevampAsset.WORKBOOK: {
41
+ TableauAsset.WORKBOOK: {
42
42
  "id",
43
43
  "project_id",
44
44
  },
@@ -10,18 +10,18 @@ from ...utils import (
10
10
  write_json,
11
11
  write_summary,
12
12
  )
13
- from .assets import TableauRevampAsset
14
- from .client import TableauRevampClient, TableauRevampCredentials
13
+ from .assets import TableauAsset
14
+ from .client import TableauClient, TableauCredentials
15
15
 
16
16
  logger = logging.getLogger(__name__)
17
17
 
18
18
 
19
19
  def iterate_all_data(
20
- client: TableauRevampClient,
21
- ) -> Iterable[tuple[TableauRevampAsset, list]]:
20
+ client: TableauClient,
21
+ ) -> Iterable[tuple[TableauAsset, list]]:
22
22
  """Iterate over the extracted Data from Tableau"""
23
23
 
24
- for asset in TableauRevampAsset:
24
+ for asset in TableauAsset:
25
25
  data = client.fetch(asset)
26
26
  yield asset, deep_serialize(data)
27
27
 
@@ -36,8 +36,8 @@ def extract_all(**kwargs) -> None:
36
36
  page_size = kwargs.get("page_size")
37
37
  timestamp = current_timestamp()
38
38
 
39
- credentials = TableauRevampCredentials(**kwargs)
40
- client = TableauRevampClient(
39
+ credentials = TableauCredentials(**kwargs)
40
+ client = TableauClient(
41
41
  credentials,
42
42
  with_pulse=with_pulse,
43
43
  override_page_size=page_size,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: castor-extractor
3
- Version: 0.24.0
3
+ Version: 0.24.2
4
4
  Summary: Extract your metadata assets.
5
5
  Home-page: https://www.castordoc.com/
6
6
  License: EULA
@@ -170,7 +170,9 @@ Depending on your use case, you can also install one of the following `extras`:
170
170
 
171
171
  ```bash
172
172
  pip install castor-extractor[bigquery]
173
+ pip install castor-extractor[databricks]
173
174
  pip install castor-extractor[looker]
175
+ pip install castor-extractor[lookerstudio]
174
176
  pip install castor-extractor[metabase]
175
177
  pip install castor-extractor[mysql]
176
178
  pip install castor-extractor[powerbi]
@@ -208,6 +210,14 @@ For any questions or bug report, contact us at [support@castordoc.com](mailto:su
208
210
 
209
211
  # Changelog
210
212
 
213
+ ## 0.24.2 - 2025-03-17
214
+
215
+ * Rename Revamped Tableau Connector classes
216
+
217
+ ## 0.24.1 - 2025-03-14
218
+
219
+ * Added support for Looker Studio
220
+
211
221
  ## 0.24.0 - 2025-03-10
212
222
 
213
223
  * Remove legacy Tableau Connector
@@ -1,8 +1,8 @@
1
- CHANGELOG.md,sha256=Ud6kiuDEyG_gu0YAVlp9-oXxriLibyma98FDrlajJJE,15756
1
+ CHANGELOG.md,sha256=8iEypB0lozhyFumiedys3lbpowlX3HXCPnK-3QvjueE,15884
2
2
  Dockerfile,sha256=xQ05-CFfGShT3oUqaiumaldwA288dj9Yb_pxofQpufg,301
3
3
  DockerfileUsage.md,sha256=2hkJQF-5JuuzfPZ7IOxgM6QgIQW7l-9oRMFVwyXC4gE,998
4
4
  LICENCE,sha256=sL-IGa4hweyya1HgzMskrRdybbIa2cktzxb5qmUgDg8,8254
5
- README.md,sha256=j8oiToTvFY4eozLUJo4rs0LEqan-G3_eOSP98KFfxfM,3634
5
+ README.md,sha256=GlhxZBs3fkeyYUPjrB4_EGDY8_E0vvnftsaqtgv08vs,3718
6
6
  castor_extractor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
7
  castor_extractor/commands/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
8
  castor_extractor/commands/extract_bigquery.py,sha256=dU4OiYO1V0n32orvZnMh1_xtFKF_VxHNXcVsH3otY-g,1269
@@ -10,6 +10,7 @@ castor_extractor/commands/extract_confluence.py,sha256=xQjC0VZdz8jFHnugqQ0fQGjzG
10
10
  castor_extractor/commands/extract_databricks.py,sha256=SVKyoa-BBUQAM6HRHf1Wdg9-tpICic2yyvXQwHcNBhA,1264
11
11
  castor_extractor/commands/extract_domo.py,sha256=jvAawUsUTHrwCn_koK6StmQr4n_b5GyvJi6uu6WS0SM,1061
12
12
  castor_extractor/commands/extract_looker.py,sha256=cySLiolLCgrREJ9d0kMrJ7P8K3efHTBTzShalWVfI3A,1214
13
+ castor_extractor/commands/extract_looker_studio.py,sha256=e79gbyTtCexRz5pg_Pp55GWkXJZWjm6NvVclmvcR0lM,916
13
14
  castor_extractor/commands/extract_metabase_api.py,sha256=NXctea4GT_1iRDitY92nV3TKSqhjEUwYSxwPJMRS3iw,786
14
15
  castor_extractor/commands/extract_metabase_db.py,sha256=tYIhTPPgj1mN-07LyWcL6e-YoGp7HCWda58-5Ukyg_I,1255
15
16
  castor_extractor/commands/extract_mode.py,sha256=Q4iO-VAKMg4zFPejhAO-foZibL5Ht3jsnhWKwJ0oqUU,823
@@ -24,7 +25,7 @@ castor_extractor/commands/extract_salesforce_reporting.py,sha256=FdANTNiLkIPdm80
24
25
  castor_extractor/commands/extract_sigma.py,sha256=sxewHcZ1Doq35V2qnpX_zCKKXkrb1_9bYjUMg7BOW-k,643
25
26
  castor_extractor/commands/extract_snowflake.py,sha256=GwlrRxwEBjHqGs_3bs5vM9fzmv61_iwvBr1KcIgFgWM,2161
26
27
  castor_extractor/commands/extract_sqlserver.py,sha256=lwhbcNChaXHZgMgSOch3faVr7WJw-sDU6GHl3lzBt_0,1141
27
- castor_extractor/commands/extract_tableau.py,sha256=DGQaXS-61rV-uzBtqfvqtyQzjLtrLTzE1ViTXPG1eck,1379
28
+ castor_extractor/commands/extract_tableau.py,sha256=xXlLKLN8Eu_a8Kt2F4E-C5D-gq8SUmvoxJcdR_thKKY,1365
28
29
  castor_extractor/commands/extract_thoughtspot.py,sha256=caAYJlH-vK7u5IUB6OKXxcaWfLgc7d_XqnFDWK6YNS4,639
29
30
  castor_extractor/commands/file_check.py,sha256=TJx76Ymd0QCECmq35zRJMkPE8DJtSInB28MuSXWk8Ao,2644
30
31
  castor_extractor/commands/upload.py,sha256=rLXp7gQ8zb1kLbho4FT87q8eJd8Gvo_TkyIynAaQ-4s,1342
@@ -168,17 +169,19 @@ castor_extractor/visualization/looker/extract.py,sha256=O_hzRftww3Cw1cgijL-K-8gh
168
169
  castor_extractor/visualization/looker/fields.py,sha256=7oC7p-3Wp7XHBP_FT_D1wH3kINFRnc_qGVeH1a4UNZY,623
169
170
  castor_extractor/visualization/looker/fields_test.py,sha256=7Cwq8Qky6aTZg8nCHp1gmPJtd9pGNB4QeMIRRWdHo5w,782
170
171
  castor_extractor/visualization/looker/multithreading.py,sha256=Muuh3usBLqtv3sfHoyPYJ6jJ7V5ajR6N9ZJ_F-bNc60,2608
171
- castor_extractor/visualization/looker_studio/__init__.py,sha256=p3mTWz7Yk1_m9vYohxCqwxnuE7SUYbU--TH2ezhf734,142
172
- castor_extractor/visualization/looker_studio/assets.py,sha256=_ir4L2RTmGDb1WetAm6-EZ6W4tPXxi0kNppNBlmy9QE,135
172
+ castor_extractor/visualization/looker_studio/__init__.py,sha256=GccG-GJXoNhjXFPkw-rHHZ0SXVQTFKjqkMIYHVeu3T4,175
173
+ castor_extractor/visualization/looker_studio/assets.py,sha256=lFIqr8EB6eK-Mf80R_x2qAscCyX7ZUcOcHVef1CM9B0,173
173
174
  castor_extractor/visualization/looker_studio/client/__init__.py,sha256=YkQaVDJa-7KSwdOLjtgKJMRiafbGNKC_46YVx0hYZ1Q,129
174
175
  castor_extractor/visualization/looker_studio/client/admin_sdk_client.py,sha256=hYKdU6TlWKkXx07r6HsZ4Wbxhasx8DP_jO6iDCjHjgk,3508
175
- castor_extractor/visualization/looker_studio/client/client.py,sha256=AYdR46NOdn_ITK_wPAASROW0gJjx-iA0Gi43QeuU5BU,1302
176
- castor_extractor/visualization/looker_studio/client/credentials.py,sha256=yzTaiJQ5cArTnbybUPF6fZZXbX9XQ0SBq-jVI2ECovA,521
176
+ castor_extractor/visualization/looker_studio/client/client.py,sha256=6sTfLRUhuxhkqDjC2ZBEaw6YnR6ze8-_VW2rc1u9Ksk,3191
177
+ castor_extractor/visualization/looker_studio/client/credentials.py,sha256=QImJPh8VctkrGt65UiU5hM12JI4WdCMSUFt88aiOoLw,657
177
178
  castor_extractor/visualization/looker_studio/client/endpoints.py,sha256=5eY-ffqNDdlDBOOpiF7LpjyHMrzeClJktidCr1pTDUs,669
178
179
  castor_extractor/visualization/looker_studio/client/enums.py,sha256=fHgemTaQpnwee8cw1YQVDsVnH--vTyFwT4Px8aVYYHQ,167
179
180
  castor_extractor/visualization/looker_studio/client/looker_studio_api_client.py,sha256=oySC6rsppj67RSifxwSCw4bFrz1Irx6IFJhX7tc_v1E,4087
180
181
  castor_extractor/visualization/looker_studio/client/pagination.py,sha256=9HQ3Rkdiz2VB6AvYtZ0F-WouiD0pMmdZyAmkv-3wh08,783
182
+ castor_extractor/visualization/looker_studio/client/queries/query.sql,sha256=Ub4rdrJ5WTPWKI-eVmXrNMv0Ktmti4b-93zZBr0xEB0,1426
181
183
  castor_extractor/visualization/looker_studio/client/scopes.py,sha256=824cqqgZuGq4L-rPNoHJe0ibXsxkRwB0CLG_kqw9Q0g,256
184
+ castor_extractor/visualization/looker_studio/extract.py,sha256=cHyroNZ1fKoBTvIbEebnKDrU3xpkcEgIPJy75ljCL70,2607
182
185
  castor_extractor/visualization/metabase/__init__.py,sha256=3E36cmkMyEgBB6Ot5rWk-N75i0G-7k24QTlc-Iol4pM,193
183
186
  castor_extractor/visualization/metabase/assets.py,sha256=nu3FwQBU_hdS2DBvgXAwQlEEi76QiNK2tMKEtMyctaY,2874
184
187
  castor_extractor/visualization/metabase/client/__init__.py,sha256=KBvaPMofBRV3m_sZAnKNCrJGr-Z88EbpdzEzWPQ_uBk,99
@@ -257,19 +260,19 @@ castor_extractor/visualization/sigma/client/credentials.py,sha256=XddAuQSmCKpxJ7
257
260
  castor_extractor/visualization/sigma/client/endpoints.py,sha256=DBFphbgoH78_MZUGM_bKBAq28Nl7LWSZ6VRsbxrxtDg,1162
258
261
  castor_extractor/visualization/sigma/client/pagination.py,sha256=kNEhNq08tTGbypyMjxs0w4uvDtQc_iaWpOZweaa_FsU,690
259
262
  castor_extractor/visualization/sigma/extract.py,sha256=XIT1qsj6g6dgBWP8HPfj_medZexu48EaY9tUwi14gzM,2298
260
- castor_extractor/visualization/tableau_revamp/__init__.py,sha256=a3DGjQhaz17gBqW-E84TAgupKbqLC40y5Ajo1yn-ot4,156
261
- castor_extractor/visualization/tableau_revamp/assets.py,sha256=8sJsK6Qixao6xVmVaO1usvs16SjNub9sIx7o-adYV14,659
262
- castor_extractor/visualization/tableau_revamp/client/__init__.py,sha256=wmS9uLtUiqNYVloi0-DgD8d2qzu3RVZEAtWiaDp6G_M,90
263
- castor_extractor/visualization/tableau_revamp/client/client.py,sha256=Ju89lMDiLOZ2LjxylcFm5429WElxGxjc52bMIWoKCDA,7716
264
- castor_extractor/visualization/tableau_revamp/client/client_metadata_api.py,sha256=WdALsMGTji2C5oSDyRwFzq-f5HZDwX-m3W8Byx87Qh4,4357
265
- castor_extractor/visualization/tableau_revamp/client/client_rest_api.py,sha256=O2F4qfrElTHHuD5WRPfLufazSmZ65jmlzye1t5rVOaQ,4024
266
- castor_extractor/visualization/tableau_revamp/client/client_tsc.py,sha256=AzN8ytKmq6HUeApTJ118JQ7EBEPESqrg7u8n3GZXqZI,1874
267
- castor_extractor/visualization/tableau_revamp/client/credentials.py,sha256=qA-EaX-4rbQRsn8v4zWh5Kh784ndHLjJaoZwnkQgCyo,1905
268
- castor_extractor/visualization/tableau_revamp/client/errors.py,sha256=ecT8Tit5VtzrOBB9ykblA0nvd75j5-_QDFupjV48zJQ,300
269
- castor_extractor/visualization/tableau_revamp/client/gql_queries.py,sha256=uKNGRhYeoiKfJ8vxO50L0a2fHDpYQgEdG_eZfYSdHqM,2238
270
- castor_extractor/visualization/tableau_revamp/client/rest_fields.py,sha256=3kvaq48BCBLfm7GL-i5W53MpbmSSi-e0yt31dNOk8ac,948
271
- castor_extractor/visualization/tableau_revamp/constants.py,sha256=lHGB50FgVNO2nXeIhkvQKivD8ZFBIjDrflgD5cTXKJw,104
272
- castor_extractor/visualization/tableau_revamp/extract.py,sha256=Ud_lt1YDSyCBjR38sjtvINy_Ez_TnA_jtM2D-8LcxPA,1471
263
+ castor_extractor/visualization/tableau/__init__.py,sha256=eFI_1hjdkxyUiAYiy3szwyuwn3yJ5C_KbpBU0ySJDcQ,138
264
+ castor_extractor/visualization/tableau/assets.py,sha256=HbCRd8VCj1WBEeqg9jwnygnT7xOFJ6PQD7Lq7sV-XR0,635
265
+ castor_extractor/visualization/tableau/client/__init__.py,sha256=P8RKFKOC63WkH5hdEytJOwHS9vzQ8GXreLfXZetmMP8,78
266
+ castor_extractor/visualization/tableau/client/client.py,sha256=zzqhzIqKyJygo4ZNGk6cZh0e6Z9R1W5T0P9un52KC1M,7626
267
+ castor_extractor/visualization/tableau/client/client_metadata_api.py,sha256=fIBsSbRTypBABsCoigO2dkKsw4Eu3GrsEPTDfjY8A80,4303
268
+ castor_extractor/visualization/tableau/client/client_rest_api.py,sha256=x4dNw4PPJdalTlGowwkANwqiS2ZhGxzpQytkHq3KbpY,3988
269
+ castor_extractor/visualization/tableau/client/client_tsc.py,sha256=VI_PJyd1ty3HSYXHHQjshmG2ziowIbrwJRonRPCHbks,1820
270
+ castor_extractor/visualization/tableau/client/credentials.py,sha256=uQICIgeXmLZfOroTgZt7PuKNKTyqQllRGSTcOmIfrKU,1893
271
+ castor_extractor/visualization/tableau/client/errors.py,sha256=ecT8Tit5VtzrOBB9ykblA0nvd75j5-_QDFupjV48zJQ,300
272
+ castor_extractor/visualization/tableau/client/gql_queries.py,sha256=NISarYh33Ij7DhYxqjTdv681AHYpbft8kPwVUQbAZ7U,2190
273
+ castor_extractor/visualization/tableau/client/rest_fields.py,sha256=ZKYYuMxg9PXhczVXaD4rXNk7dYyWJ1_bVM8FLEXju7s,888
274
+ castor_extractor/visualization/tableau/constants.py,sha256=lHGB50FgVNO2nXeIhkvQKivD8ZFBIjDrflgD5cTXKJw,104
275
+ castor_extractor/visualization/tableau/extract.py,sha256=FnjmmUdNA9MEf3S5Tw37x6ZXxVsK8R3YnVk1UVYbaZk,1423
273
276
  castor_extractor/visualization/thoughtspot/__init__.py,sha256=NhTGUk5Kdt54oCjHYoAt0cLBmVLys5lFYiRANL6wCmI,150
274
277
  castor_extractor/visualization/thoughtspot/assets.py,sha256=SAQWPKaD2NTSDg7-GSkcRSSEkKSws0MJfOVcHkdeTSg,276
275
278
  castor_extractor/visualization/thoughtspot/client/__init__.py,sha256=svrE2rMxR-OXctjPeAHMEPePlfcra-9KDevTMcHunAA,86
@@ -401,8 +404,8 @@ castor_extractor/warehouse/sqlserver/queries/table.sql,sha256=kbBQP-TdG5px1IVgyx
401
404
  castor_extractor/warehouse/sqlserver/queries/user.sql,sha256=gOrZsMVypusR2dc4vwVs4E1a-CliRsr_UjnD2EbXs-A,94
402
405
  castor_extractor/warehouse/sqlserver/query.py,sha256=g0hPT-RmeGi2DyenAi3o72cTlQsLToXIFYojqc8E5fQ,533
403
406
  castor_extractor/warehouse/synapse/queries/column.sql,sha256=lNcFoIW3Y0PFOqoOzJEXmPvZvfAsY0AP63Mu2LuPzPo,1351
404
- castor_extractor-0.24.0.dist-info/LICENCE,sha256=sL-IGa4hweyya1HgzMskrRdybbIa2cktzxb5qmUgDg8,8254
405
- castor_extractor-0.24.0.dist-info/METADATA,sha256=HWTKSDx_akRg3FK_dMP5mRxlLg9Oc55uGUYgt2NmYaQ,22828
406
- castor_extractor-0.24.0.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
407
- castor_extractor-0.24.0.dist-info/entry_points.txt,sha256=7aVSxc-_2dicp28Ow-S4y0p4wGoTm9zGmVptMvfLdw8,1649
408
- castor_extractor-0.24.0.dist-info/RECORD,,
407
+ castor_extractor-0.24.2.dist-info/LICENCE,sha256=sL-IGa4hweyya1HgzMskrRdybbIa2cktzxb5qmUgDg8,8254
408
+ castor_extractor-0.24.2.dist-info/METADATA,sha256=FNJlgmFPbgSmHoVwHx-hXj9rvHYw2wctlcEXeGck52I,23040
409
+ castor_extractor-0.24.2.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
410
+ castor_extractor-0.24.2.dist-info/entry_points.txt,sha256=FQNShG4w4nRO95_bZnagh7FQ2oiZ-40bdt8ZdTW1-uI,1731
411
+ castor_extractor-0.24.2.dist-info/RECORD,,
@@ -4,6 +4,7 @@ castor-extract-confluence=castor_extractor.commands.extract_confluence:main
4
4
  castor-extract-databricks=castor_extractor.commands.extract_databricks:main
5
5
  castor-extract-domo=castor_extractor.commands.extract_domo:main
6
6
  castor-extract-looker=castor_extractor.commands.extract_looker:main
7
+ castor-extract-looker-studio=castor_extractor.commands.extract_looker_studio:main
7
8
  castor-extract-metabase-api=castor_extractor.commands.extract_metabase_api:main
8
9
  castor-extract-metabase-db=castor_extractor.commands.extract_metabase_db:main
9
10
  castor-extract-mode=castor_extractor.commands.extract_mode:main
@@ -1,3 +0,0 @@
1
- from .assets import TableauRevampAsset
2
- from .client import TableauRevampClient, TableauRevampCredentials
3
- from .extract import extract_all, iterate_all_data
@@ -1,2 +0,0 @@
1
- from .client import TableauRevampClient
2
- from .credentials import TableauRevampCredentials