castor-extractor 0.24.13__py3-none-any.whl → 0.24.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of castor-extractor might be problematic. Click here for more details.

CHANGELOG.md CHANGED
@@ -1,5 +1,13 @@
1
1
  # Changelog
2
2
 
3
+ ## 0.24.15 - 2025-05-12
4
+
5
+ * Tableau: Add argument to skip columns extraction
6
+
7
+ ## 0.24.14 - 2025-05-06
8
+
9
+ * Confluence: extract pages per space to allow additional filtering. by default, pages from archived or personal spaces are not extracted.
10
+
3
11
  ## 0.24.13 - 2025-05-05
4
12
 
5
13
  * Rollback cloud-storage version as it's not compatible with Keboola
@@ -16,4 +16,33 @@ def main():
16
16
  parser.add_argument("-t", "--token", help="Confluence API token")
17
17
  parser.add_argument("-u", "--username", help="Confluence username")
18
18
 
19
+ parser.add_argument(
20
+ "--include-archived-spaces",
21
+ action="store_true",
22
+ default=False,
23
+ help="Include pages from archived spaces (Optional)",
24
+ )
25
+ parser.add_argument(
26
+ "--include-personal-spaces",
27
+ action="store_true",
28
+ default=False,
29
+ help="Include pages from personal spaces (Optional)",
30
+ )
31
+ parser.add_argument(
32
+ "--space-ids-allowed",
33
+ type=str,
34
+ nargs="+",
35
+ help=(
36
+ "List of Confluence space IDs allowed for extraction (Optional). "
37
+ "Only pages from these Spaces will be extracted. "
38
+ "This overrides any other filtering (archived, personal, etc.)"
39
+ ),
40
+ )
41
+ parser.add_argument(
42
+ "--space-ids-blocked",
43
+ type=str,
44
+ nargs="+",
45
+ help="List of Confluence space IDs to exclude fom the extraction (Optional)",
46
+ )
47
+
19
48
  confluence.extract_all(**parse_filled_arguments(parser))
@@ -21,6 +21,13 @@ def main():
21
21
  parser.add_argument("-b", "--server-url", help="Tableau server url")
22
22
  parser.add_argument("-i", "--site-id", help="Tableau site ID")
23
23
 
24
+ parser.add_argument(
25
+ "--skip-columns",
26
+ dest="skip_columns",
27
+ action="store_true",
28
+ help="Option to avoid extracting Tableau columns, default to False",
29
+ )
30
+
24
31
  parser.add_argument(
25
32
  "--with-pulse",
26
33
  dest="with_pulse",
@@ -1,5 +1,6 @@
1
1
  from collections.abc import Iterator
2
2
  from functools import partial
3
+ from typing import Optional
3
4
 
4
5
  from ....utils import (
5
6
  APIClient,
@@ -17,12 +18,18 @@ _HEADERS = {
17
18
  "Accept": "application/json",
18
19
  "Content-Type": "application/json",
19
20
  }
21
+ _STATUS_ARCHIVED = "archived"
22
+ _TYPE_PERSONAL = "personal"
20
23
 
21
24
 
22
25
  class ConfluenceClient(APIClient):
23
26
  def __init__(
24
27
  self,
25
28
  credentials: ConfluenceCredentials,
29
+ include_archived_spaces: bool = False,
30
+ include_personal_spaces: bool = False,
31
+ space_ids_allowed: Optional[set[str]] = None,
32
+ space_ids_blocked: Optional[set[str]] = None,
26
33
  ):
27
34
  self.account_id = credentials.account_id
28
35
  auth = BasicAuth(
@@ -34,12 +41,61 @@ class ConfluenceClient(APIClient):
34
41
  headers=_HEADERS,
35
42
  )
36
43
 
44
+ self.include_archived_spaces = include_archived_spaces
45
+ self.include_personal_spaces = include_personal_spaces
46
+ self.space_ids_allowed = space_ids_allowed or set()
47
+ self.space_ids_blocked = space_ids_blocked or set()
48
+
37
49
  def pages(self):
50
+ """Extracts all pages from all relevant Spaces."""
51
+ for space in self.spaces():
52
+ space_id = space["id"]
53
+ request = partial(
54
+ self._get,
55
+ endpoint=ConfluenceEndpointFactory.pages(space_id),
56
+ )
57
+ yield from fetch_all_pages(request, ConfluencePagination)
58
+
59
+ def spaces(self) -> Iterator[dict]:
60
+ """
61
+ Returns the spaces meeting the conditions defined by the settings.
62
+
63
+ If `space_ids_allowed` is not empty, only matching spaces are returned.
64
+
65
+ Otherwise, all spaces are filtered by excluding the following:
66
+ * The space is in the blocked list
67
+ * The space is personal (type=personal) and skip_personal_spaces is True
68
+ * The space is archived (status=archived) and skip_archived_spaces is True
69
+ """
38
70
  request = partial(
39
71
  self._get,
40
- endpoint=ConfluenceEndpointFactory.pages(),
72
+ endpoint=ConfluenceEndpointFactory.spaces(),
41
73
  )
42
- yield from fetch_all_pages(request, ConfluencePagination)
74
+ spaces = list(fetch_all_pages(request, ConfluencePagination))
75
+
76
+ if self.space_ids_allowed:
77
+ yield from (
78
+ space
79
+ for space in spaces
80
+ if space["id"] in self.space_ids_allowed
81
+ )
82
+ return
83
+
84
+ for space in spaces:
85
+ space_id = space["id"]
86
+ type_ = space["type"]
87
+ status = space["status"]
88
+
89
+ if space_id in self.space_ids_blocked:
90
+ continue
91
+
92
+ if status == _STATUS_ARCHIVED and not self.include_archived_spaces:
93
+ continue
94
+
95
+ if type_ == _TYPE_PERSONAL and not self.include_personal_spaces:
96
+ continue
97
+
98
+ yield space
43
99
 
44
100
  def users(self):
45
101
  request_body = {"accountIds": [self.account_id]}
@@ -0,0 +1,91 @@
1
+ from unittest.mock import MagicMock, patch
2
+
3
+ from .client import ConfluenceClient
4
+
5
+
6
+ def test_ConfluenceClient_filtered_spaces_with_allowlist():
7
+ both_blocked_and_allowed_space_id = "789"
8
+ archived_space_id = "934"
9
+ random_space = "1000"
10
+
11
+ spaces = [
12
+ # Both blocked and allowed space. "Allowed" setting takes precedence.
13
+ {
14
+ "id": both_blocked_and_allowed_space_id,
15
+ "type": "global",
16
+ "status": "current",
17
+ },
18
+ {"id": archived_space_id, "type": "global", "status": "archived"},
19
+ {"id": random_space, "type": "global", "status": "current"},
20
+ ]
21
+ # the "allowed" list overrides everything else
22
+ client = ConfluenceClient(
23
+ credentials=MagicMock(),
24
+ include_archived_spaces=True,
25
+ space_ids_allowed={
26
+ both_blocked_and_allowed_space_id,
27
+ },
28
+ space_ids_blocked={
29
+ both_blocked_and_allowed_space_id,
30
+ },
31
+ )
32
+
33
+ with (
34
+ patch(
35
+ "source.packages.extractor.castor_extractor.knowledge.confluence.client.client.ConfluenceClient._get"
36
+ ),
37
+ patch(
38
+ "source.packages.extractor.castor_extractor.knowledge.confluence.client.client.fetch_all_pages"
39
+ ) as mock_fetch_all_pages,
40
+ ):
41
+ mock_fetch_all_pages.return_value = spaces
42
+
43
+ filtered_spaces = list(client.spaces())
44
+
45
+ assert len(filtered_spaces) == 1
46
+ filtered_space_ids = {space["id"] for space in filtered_spaces}
47
+ assert set(filtered_space_ids) == {both_blocked_and_allowed_space_id}
48
+
49
+
50
+ def test_ConfluenceClient_filtered_spaces():
51
+ blocked_id = "42"
52
+ personal_id = "666"
53
+ archived_id = "934"
54
+ random_id = "1000"
55
+
56
+ # test the other settings : allow personal spaces & block space "42"
57
+ client = ConfluenceClient(
58
+ credentials=MagicMock(),
59
+ include_archived_spaces=False,
60
+ include_personal_spaces=True,
61
+ space_ids_blocked={blocked_id},
62
+ )
63
+
64
+ spaces = [
65
+ # Blocked space, to be skipped
66
+ {"id": blocked_id, "type": "global", "status": "current"},
67
+ # Archived space, to be skipped
68
+ {"id": archived_id, "type": "collaboration", "status": "archived"},
69
+ # Personal space, to be included
70
+ {"id": personal_id, "type": "personal", "status": "current"},
71
+ # Valid space
72
+ {"id": random_id, "type": "knowledge_base", "status": "current"},
73
+ ]
74
+
75
+ with (
76
+ patch(
77
+ "source.packages.extractor.castor_extractor.knowledge.confluence.client.client.ConfluenceClient._get"
78
+ ),
79
+ patch(
80
+ "source.packages.extractor.castor_extractor.knowledge.confluence.client.client.fetch_all_pages"
81
+ ) as mock_fetch_all_pages,
82
+ ):
83
+ mock_fetch_all_pages.return_value = spaces
84
+
85
+ filtered_spaces = list(client.spaces())
86
+
87
+ filtered_space_ids = [space["id"] for space in filtered_spaces]
88
+
89
+ # no duplicates
90
+ assert len(filtered_space_ids) == len(set(filtered_space_ids))
91
+ assert set(filtered_space_ids) == {personal_id, random_id}
@@ -6,15 +6,24 @@ class ConfluenceEndpointFactory:
6
6
 
7
7
  API = "wiki/api/v2/"
8
8
  PAGES = "pages"
9
+ SPACES = "spaces"
9
10
  USERS = "users-bulk"
10
11
 
11
12
  @classmethod
12
- def pages(cls) -> str:
13
+ def pages(cls, space_id: str) -> str:
13
14
  """
14
- Endpoint to fetch all pages.
15
- More: https://developer.atlassian.com/cloud/confluence/rest/v2/api-group-page/#api-pages-get
15
+ Endpoint to fetch all pages in the given space.
16
+ More: https://developer.atlassian.com/cloud/confluence/rest/v2/api-group-page/#api-spaces-id-pages-get
16
17
  """
17
- return f"{cls.API}{cls.PAGES}?body-format=atlas_doc_format"
18
+ return f"{cls.API}{cls.SPACES}/{space_id}/{cls.PAGES}?body-format=atlas_doc_format"
19
+
20
+ @classmethod
21
+ def spaces(cls) -> str:
22
+ """
23
+ Endpoint to fetch all spaces.
24
+ https://developer.atlassian.com/cloud/confluence/rest/v2/api-group-space/#api-spaces-get
25
+ """
26
+ return f"{cls.API}{cls.SPACES}"
18
27
 
19
28
  @classmethod
20
29
  def users(cls) -> str:
@@ -42,7 +42,13 @@ def extract_all(**kwargs) -> None:
42
42
  output_directory = kwargs.get("output") or from_env(OUTPUT_DIR)
43
43
 
44
44
  credentials = ConfluenceCredentials(**kwargs)
45
- client = ConfluenceClient(credentials=credentials)
45
+ client = ConfluenceClient(
46
+ credentials=credentials,
47
+ include_archived_spaces=kwargs.get("include_archived_spaces") or False,
48
+ include_personal_spaces=kwargs.get("include_personal_spaces") or False,
49
+ space_ids_allowed=kwargs.get("space_ids_allowed"),
50
+ space_ids_blocked=kwargs.get("space_ids_blocked"),
51
+ )
46
52
 
47
53
  ts = current_timestamp()
48
54
 
@@ -1,7 +1,15 @@
1
1
  from http import HTTPStatus
2
2
  from typing import Iterator, Optional
3
3
 
4
- from ....utils import APIClient, BearerAuth, RequestSafeMode, SerializedAsset
4
+ import requests
5
+
6
+ from ....utils import (
7
+ APIClient,
8
+ BearerAuth,
9
+ RequestSafeMode,
10
+ SerializedAsset,
11
+ retry,
12
+ )
5
13
  from ..assets import CoalesceAsset, CoalesceQualityAsset
6
14
  from .credentials import CoalesceCredentials
7
15
  from .endpoint import (
@@ -12,6 +20,9 @@ from .utils import column_names_per_node, is_test, test_names_per_node
12
20
 
13
21
  _LIMIT_MAX = 1_000
14
22
  _MAX_ERRORS = 50
23
+ _RETRY_BASE_MS = 10 * 60 * 1000 # 10 minutes
24
+ _RETRY_COUNT = 2
25
+ _RETRY_EXCEPTIONS = [requests.exceptions.ConnectTimeout]
15
26
 
16
27
 
17
28
  def _run_result_payload(result: dict, query_result: dict) -> dict:
@@ -64,6 +75,11 @@ class CoalesceClient(APIClient):
64
75
  result = self._get(endpoint=endpoint)
65
76
  return result["data"]
66
77
 
78
+ @retry(
79
+ exceptions=_RETRY_EXCEPTIONS,
80
+ max_retries=_RETRY_COUNT,
81
+ base_ms=_RETRY_BASE_MS,
82
+ )
67
83
  def _node_details(self, environment_id: int, node_id: str) -> dict:
68
84
  endpoint = CoalesceEndpointFactory.nodes(
69
85
  environment_id=environment_id, node_id=node_id
@@ -121,11 +121,13 @@ class TableauClient:
121
121
  self,
122
122
  credentials: TableauCredentials,
123
123
  timeout_sec: int = DEFAULT_TIMEOUT_SECONDS,
124
+ with_columns: bool = True,
124
125
  with_pulse: bool = False,
125
126
  override_page_size: Optional[int] = None,
126
127
  ):
127
128
  self._credentials = credentials
128
129
  self._server = _server(credentials.server_url, timeout_sec)
130
+ self._with_columns = with_columns
129
131
  self._with_pulse = with_pulse
130
132
 
131
133
  self._client_metadata = TableauClientMetadataApi(
@@ -215,6 +217,10 @@ class TableauClient:
215
217
  logger.info(f"Skipping asset {asset} - Tableau Pulse de-activated")
216
218
  return []
217
219
 
220
+ if asset == TableauAsset.COLUMN and not self._with_columns:
221
+ logger.info(f"Skipping asset {asset} - deactivated columns")
222
+ return []
223
+
218
224
  logger.info(f"Extracting {asset.name}...")
219
225
 
220
226
  if asset == TableauAsset.DATASOURCE:
@@ -32,6 +32,7 @@ def extract_all(**kwargs) -> None:
32
32
  output_directory
33
33
  """
34
34
  output_directory = kwargs.get("output") or from_env(OUTPUT_DIR)
35
+ with_columns = not kwargs.get("skip_columns")
35
36
  with_pulse = kwargs.get("with_pulse") or False
36
37
  page_size = kwargs.get("page_size")
37
38
  timestamp = current_timestamp()
@@ -39,6 +40,7 @@ def extract_all(**kwargs) -> None:
39
40
  credentials = TableauCredentials(**kwargs)
40
41
  client = TableauClient(
41
42
  credentials,
43
+ with_columns=with_columns,
42
44
  with_pulse=with_pulse,
43
45
  override_page_size=page_size,
44
46
  )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: castor-extractor
3
- Version: 0.24.13
3
+ Version: 0.24.15
4
4
  Summary: Extract your metadata assets.
5
5
  Home-page: https://www.castordoc.com/
6
6
  License: EULA
@@ -215,6 +215,14 @@ For any questions or bug report, contact us at [support@coalesce.io](mailto:supp
215
215
 
216
216
  # Changelog
217
217
 
218
+ ## 0.24.15 - 2025-05-12
219
+
220
+ * Tableau: Add argument to skip columns extraction
221
+
222
+ ## 0.24.14 - 2025-05-06
223
+
224
+ * Confluence: extract pages per space to allow additional filtering. by default, pages from archived or personal spaces are not extracted.
225
+
218
226
  ## 0.24.13 - 2025-05-05
219
227
 
220
228
  * Rollback cloud-storage version as it's not compatible with Keboola
@@ -1,4 +1,4 @@
1
- CHANGELOG.md,sha256=u3BUqDPLtQ7K1RytlxEmtcSy4kJG1qhPtmrimODTYZU,17156
1
+ CHANGELOG.md,sha256=6XUz09FfUZSXFfROVG7BliBupfyr5eeBy0J3cQZVvys,17398
2
2
  Dockerfile,sha256=xQ05-CFfGShT3oUqaiumaldwA288dj9Yb_pxofQpufg,301
3
3
  DockerfileUsage.md,sha256=2hkJQF-5JuuzfPZ7IOxgM6QgIQW7l-9oRMFVwyXC4gE,998
4
4
  LICENCE,sha256=sL-IGa4hweyya1HgzMskrRdybbIa2cktzxb5qmUgDg8,8254
@@ -6,7 +6,7 @@ README.md,sha256=C6hTyZO60T7z7xwHbspHlii384Jn02k0Rycxu3bCX0o,3866
6
6
  castor_extractor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
7
  castor_extractor/commands/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
8
  castor_extractor/commands/extract_bigquery.py,sha256=dU4OiYO1V0n32orvZnMh1_xtFKF_VxHNXcVsH3otY-g,1269
9
- castor_extractor/commands/extract_confluence.py,sha256=xQjC0VZdz8jFHnugqQ0fQGjzGq34v49cqcP4ONeVYNs,748
9
+ castor_extractor/commands/extract_confluence.py,sha256=blYcnDqywXNKRQ1aZAD9FclhLlO7x8Y_tb0lgl85v0w,1641
10
10
  castor_extractor/commands/extract_databricks.py,sha256=SVKyoa-BBUQAM6HRHf1Wdg9-tpICic2yyvXQwHcNBhA,1264
11
11
  castor_extractor/commands/extract_domo.py,sha256=jvAawUsUTHrwCn_koK6StmQr4n_b5GyvJi6uu6WS0SM,1061
12
12
  castor_extractor/commands/extract_looker.py,sha256=cySLiolLCgrREJ9d0kMrJ7P8K3efHTBTzShalWVfI3A,1214
@@ -26,7 +26,7 @@ castor_extractor/commands/extract_sigma.py,sha256=sxewHcZ1Doq35V2qnpX_zCKKXkrb1_
26
26
  castor_extractor/commands/extract_snowflake.py,sha256=GwlrRxwEBjHqGs_3bs5vM9fzmv61_iwvBr1KcIgFgWM,2161
27
27
  castor_extractor/commands/extract_sqlserver.py,sha256=lwhbcNChaXHZgMgSOch3faVr7WJw-sDU6GHl3lzBt_0,1141
28
28
  castor_extractor/commands/extract_strategy.py,sha256=Q-pUymatPrBFGXobhyUPzFph0-t774-XOpjdCFF1dYo,821
29
- castor_extractor/commands/extract_tableau.py,sha256=xXlLKLN8Eu_a8Kt2F4E-C5D-gq8SUmvoxJcdR_thKKY,1365
29
+ castor_extractor/commands/extract_tableau.py,sha256=ngujGYohWOqOK1qjIP1Hh951jr0KNKNSeOyoaOnO450,1558
30
30
  castor_extractor/commands/extract_thoughtspot.py,sha256=caAYJlH-vK7u5IUB6OKXxcaWfLgc7d_XqnFDWK6YNS4,639
31
31
  castor_extractor/commands/file_check.py,sha256=TJx76Ymd0QCECmq35zRJMkPE8DJtSInB28MuSXWk8Ao,2644
32
32
  castor_extractor/commands/upload.py,sha256=rLXp7gQ8zb1kLbho4FT87q8eJd8Gvo_TkyIynAaQ-4s,1342
@@ -45,11 +45,12 @@ castor_extractor/knowledge/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJ
45
45
  castor_extractor/knowledge/confluence/__init__.py,sha256=pRT615pMDlB7Ifs09erVn2EdpZHgkvX5selemWU3VPE,129
46
46
  castor_extractor/knowledge/confluence/assets.py,sha256=zv2G2LB8H0fKDbVJ4kHrAjbqehXI_K-wgd_ghSXGFvs,144
47
47
  castor_extractor/knowledge/confluence/client/__init__.py,sha256=ALAzo0JEhxFzH2FnIO6HmtkAGS2_bGY8KXXMcTGV3aE,84
48
- castor_extractor/knowledge/confluence/client/client.py,sha256=F8A_ckZ4ojJC8BnAXeAIHUC2oOQMBWnTfqQwJbAyTns,1689
48
+ castor_extractor/knowledge/confluence/client/client.py,sha256=ihdagtAEgIcO5MmX5-coGEJkUg7_Tw1_7Vl50NDorhE,3731
49
+ castor_extractor/knowledge/confluence/client/client_test.py,sha256=LTT49ORl0DPTdDpKdREUErnwIA40xPy2C3uwdkVS1I0,3071
49
50
  castor_extractor/knowledge/confluence/client/credentials.py,sha256=tqUMw-SVoAi4o6I6OeGk4MeDiIPU3-ihhaomXv4CQ64,419
50
- castor_extractor/knowledge/confluence/client/endpoints.py,sha256=eWMKtjDUPGoKR8Nqq18JTIoEq913GNo1Klm9RduIOHM,765
51
+ castor_extractor/knowledge/confluence/client/endpoints.py,sha256=ClBzE8a5zqA4ngAecc8vMv9QJCdbtcv3GKuorZ8kOdA,1100
51
52
  castor_extractor/knowledge/confluence/client/pagination.py,sha256=ty4meiMEujDVSiQyOJTibd-ReYyDyGezdFuk7EAGtMA,862
52
- castor_extractor/knowledge/confluence/extract.py,sha256=6pA68CmYNC50qCJ7NgZMW0jD4ev0a_ltI5kSyBqSC0U,1565
53
+ castor_extractor/knowledge/confluence/extract.py,sha256=mOAs5uvjM0LZJzrD36uFAt_nsiqQ48kKTDFIKru3LSo,1858
53
54
  castor_extractor/knowledge/notion/__init__.py,sha256=ZDmh0eNSxHf1zVPm0aYlKPci-vzOXhAgdsWjS2hdjh4,117
54
55
  castor_extractor/knowledge/notion/assets.py,sha256=QHv1-pomt5UeN_prP2L6t_zJ-tDSqB8LgopkGAODYPQ,164
55
56
  castor_extractor/knowledge/notion/client/__init__.py,sha256=CDPorBCethuNTEtpjvHGcWnWeVfqkEq-IbakWjDKATw,76
@@ -73,7 +74,7 @@ castor_extractor/transformation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm
73
74
  castor_extractor/transformation/coalesce/__init__.py,sha256=CW_qdtEfwgJRsCyBlk5hNlxwEO-VV6mBXZvkRbND_J8,112
74
75
  castor_extractor/transformation/coalesce/assets.py,sha256=pzccYPP66c9PAnVroemx7-6MeRHw7Ft1OlTC6jIamAA,363
75
76
  castor_extractor/transformation/coalesce/client/__init__.py,sha256=VRmVpH29rOghtDQnCN7dAdA0dI0Lxseu4BC8rnwM9dU,80
76
- castor_extractor/transformation/coalesce/client/client.py,sha256=yrPzIk-6VN4MDHwti3Yxy3PCfHmxE6znjuehl_-dYTg,6151
77
+ castor_extractor/transformation/coalesce/client/client.py,sha256=-fFxWtDPPruNmDPc6FXft_6RwRKEee4JM-13d90fms0,6442
77
78
  castor_extractor/transformation/coalesce/client/credentials.py,sha256=jbJxjbdPspf-dzYKfeb7oqL_8TXd1nvkJrjAcdAnLPc,548
78
79
  castor_extractor/transformation/coalesce/client/endpoint.py,sha256=0uLh7dpA1vsR9qr_50SEYV_-heQE4BwED9oNMgYsL-w,1272
79
80
  castor_extractor/transformation/coalesce/client/type.py,sha256=oiiVP9NL0ijTXyQmaB8aJVYckc7m-m8ZgMyNIAduUKE,43
@@ -282,7 +283,7 @@ castor_extractor/visualization/strategy/extract.py,sha256=2fBuvS2xiOGXRpxXnZsE_C
282
283
  castor_extractor/visualization/tableau/__init__.py,sha256=eFI_1hjdkxyUiAYiy3szwyuwn3yJ5C_KbpBU0ySJDcQ,138
283
284
  castor_extractor/visualization/tableau/assets.py,sha256=HbCRd8VCj1WBEeqg9jwnygnT7xOFJ6PQD7Lq7sV-XR0,635
284
285
  castor_extractor/visualization/tableau/client/__init__.py,sha256=P8RKFKOC63WkH5hdEytJOwHS9vzQ8GXreLfXZetmMP8,78
285
- castor_extractor/visualization/tableau/client/client.py,sha256=zzqhzIqKyJygo4ZNGk6cZh0e6Z9R1W5T0P9un52KC1M,7626
286
+ castor_extractor/visualization/tableau/client/client.py,sha256=iJ3Y-vwPvmPyAUTs1PqFJEZelPGiLvsiwXpTI3b5THc,7867
286
287
  castor_extractor/visualization/tableau/client/client_metadata_api.py,sha256=ryRq4_qUok8vvWGhj5CNWXtwR2JlUsu1qjsov2KhQTE,6286
287
288
  castor_extractor/visualization/tableau/client/client_metadata_api_test.py,sha256=rikyQKDLFYHLJhHJTF3LwWhKJ80svtTsYp5n7n9oTU8,2665
288
289
  castor_extractor/visualization/tableau/client/client_rest_api.py,sha256=x4dNw4PPJdalTlGowwkANwqiS2ZhGxzpQytkHq3KbpY,3988
@@ -292,7 +293,7 @@ castor_extractor/visualization/tableau/client/errors.py,sha256=ecT8Tit5VtzrOBB9y
292
293
  castor_extractor/visualization/tableau/client/gql_queries.py,sha256=XJAfhpMZ5S7-AhfpOaoHMHCAdil-l5e5xB-CH4NC38M,2177
293
294
  castor_extractor/visualization/tableau/client/rest_fields.py,sha256=ZKYYuMxg9PXhczVXaD4rXNk7dYyWJ1_bVM8FLEXju7s,888
294
295
  castor_extractor/visualization/tableau/constants.py,sha256=lHGB50FgVNO2nXeIhkvQKivD8ZFBIjDrflgD5cTXKJw,104
295
- castor_extractor/visualization/tableau/extract.py,sha256=FnjmmUdNA9MEf3S5Tw37x6ZXxVsK8R3YnVk1UVYbaZk,1423
296
+ castor_extractor/visualization/tableau/extract.py,sha256=hGVr1BZVsHlIgNXOFusRN2YwUUhXvF3reOeN8g1CTEo,1508
296
297
  castor_extractor/visualization/thoughtspot/__init__.py,sha256=NhTGUk5Kdt54oCjHYoAt0cLBmVLys5lFYiRANL6wCmI,150
297
298
  castor_extractor/visualization/thoughtspot/assets.py,sha256=SAQWPKaD2NTSDg7-GSkcRSSEkKSws0MJfOVcHkdeTSg,276
298
299
  castor_extractor/visualization/thoughtspot/client/__init__.py,sha256=svrE2rMxR-OXctjPeAHMEPePlfcra-9KDevTMcHunAA,86
@@ -423,8 +424,8 @@ castor_extractor/warehouse/sqlserver/queries/table.sql,sha256=kbBQP-TdG5px1IVgyx
423
424
  castor_extractor/warehouse/sqlserver/queries/user.sql,sha256=gOrZsMVypusR2dc4vwVs4E1a-CliRsr_UjnD2EbXs-A,94
424
425
  castor_extractor/warehouse/sqlserver/query.py,sha256=g0hPT-RmeGi2DyenAi3o72cTlQsLToXIFYojqc8E5fQ,533
425
426
  castor_extractor/warehouse/synapse/queries/column.sql,sha256=lNcFoIW3Y0PFOqoOzJEXmPvZvfAsY0AP63Mu2LuPzPo,1351
426
- castor_extractor-0.24.13.dist-info/LICENCE,sha256=sL-IGa4hweyya1HgzMskrRdybbIa2cktzxb5qmUgDg8,8254
427
- castor_extractor-0.24.13.dist-info/METADATA,sha256=TBjkOLzvfC6wrL6Myv0N7IkcNXzkQDTqdd8QdbNpbxU,24609
428
- castor_extractor-0.24.13.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
429
- castor_extractor-0.24.13.dist-info/entry_points.txt,sha256=_F-qeZCybjoMkNb9ErEhnyqXuG6afHIFQhakdBHZsr4,1803
430
- castor_extractor-0.24.13.dist-info/RECORD,,
427
+ castor_extractor-0.24.15.dist-info/LICENCE,sha256=sL-IGa4hweyya1HgzMskrRdybbIa2cktzxb5qmUgDg8,8254
428
+ castor_extractor-0.24.15.dist-info/METADATA,sha256=ktAgO-d5jJmInoD_VCLwIT522Qy31paP3Smh_TGa6MI,24851
429
+ castor_extractor-0.24.15.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
430
+ castor_extractor-0.24.15.dist-info/entry_points.txt,sha256=_F-qeZCybjoMkNb9ErEhnyqXuG6afHIFQhakdBHZsr4,1803
431
+ castor_extractor-0.24.15.dist-info/RECORD,,