castor-extractor 0.24.57__py3-none-any.whl → 0.25.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of castor-extractor might be problematic. Click here for more details.

Files changed (24) hide show
  1. CHANGELOG.md +12 -0
  2. castor_extractor/commands/extract_count.py +22 -0
  3. castor_extractor/commands/extract_powerbi.py +12 -1
  4. castor_extractor/visualization/count/__init__.py +3 -0
  5. castor_extractor/visualization/count/assets.py +11 -0
  6. castor_extractor/visualization/count/client/__init__.py +2 -0
  7. castor_extractor/visualization/count/client/client.py +50 -0
  8. castor_extractor/visualization/count/client/credentials.py +10 -0
  9. castor_extractor/visualization/count/client/queries/canvas_permissions.sql +6 -0
  10. castor_extractor/visualization/count/client/queries/canvases.sql +6 -0
  11. castor_extractor/visualization/count/client/queries/cells.sql +8 -0
  12. castor_extractor/visualization/count/client/queries/projects.sql +5 -0
  13. castor_extractor/visualization/count/client/queries/users.sql +8 -0
  14. castor_extractor/visualization/count/extract.py +54 -0
  15. castor_extractor/visualization/powerbi/client/__init__.py +1 -0
  16. castor_extractor/visualization/powerbi/client/authentication.py +20 -2
  17. castor_extractor/visualization/powerbi/client/credentials.py +9 -2
  18. castor_extractor/visualization/powerbi/extract.py +23 -3
  19. castor_extractor/visualization/sigma/client/client.py +28 -8
  20. {castor_extractor-0.24.57.dist-info → castor_extractor-0.25.2.dist-info}/METADATA +15 -2
  21. {castor_extractor-0.24.57.dist-info → castor_extractor-0.25.2.dist-info}/RECORD +24 -12
  22. {castor_extractor-0.24.57.dist-info → castor_extractor-0.25.2.dist-info}/entry_points.txt +1 -0
  23. {castor_extractor-0.24.57.dist-info → castor_extractor-0.25.2.dist-info}/LICENCE +0 -0
  24. {castor_extractor-0.24.57.dist-info → castor_extractor-0.25.2.dist-info}/WHEEL +0 -0
CHANGELOG.md CHANGED
@@ -1,5 +1,17 @@
1
1
  # Changelog
2
2
 
3
+ ## 0.25.2 - 2025-09-30
4
+
5
+ * PowerBi: Support auth with private_key
6
+
7
+ ## 0.25.1 - 2025-09-29
8
+
9
+ * Sigma: catch ReadTimeouts during elements extraction
10
+
11
+ ## 0.25.0 - 2025-09-15
12
+
13
+ * Count: adding connector
14
+
3
15
  ## 0.24.57 - 2025-09-24
4
16
 
5
17
  * Sigma:
@@ -0,0 +1,22 @@
1
+ from argparse import ArgumentParser
2
+
3
+ from castor_extractor.utils import parse_filled_arguments # type: ignore
4
+ from castor_extractor.visualization import count # type: ignore
5
+
6
+
7
+ def main():
8
+ parser = ArgumentParser()
9
+
10
+ parser.add_argument(
11
+ "-c",
12
+ "--credentials",
13
+ help="GCP credentials as string",
14
+ )
15
+ parser.add_argument("-o", "--output", help="Directory to write to")
16
+ parser.add_argument(
17
+ "-d",
18
+ "--dataset_id",
19
+ help="dataset id, where count info is stored for the current customer",
20
+ )
21
+
22
+ count.extract_all(**parse_filled_arguments(parser))
@@ -9,10 +9,21 @@ logging.basicConfig(level=logging.INFO, format="%(levelname)s - %(message)s")
9
9
 
10
10
  def main():
11
11
  parser = ArgumentParser()
12
+ auth_group = parser.add_mutually_exclusive_group(required=True)
12
13
 
13
14
  parser.add_argument("-t", "--tenant_id", help="PowerBi tenant ID")
14
15
  parser.add_argument("-c", "--client_id", help="PowerBi client ID")
15
- parser.add_argument("-s", "--secret", help="PowerBi password")
16
+ auth_group.add_argument(
17
+ "-s",
18
+ "--secret",
19
+ help="PowerBi password as a string",
20
+ )
21
+ auth_group.add_argument(
22
+ "-cert",
23
+ "--certificate",
24
+ help="file path to json certificate file with"
25
+ "keys: private_key, thumbprint, public_certificate",
26
+ )
16
27
  parser.add_argument(
17
28
  "-sc",
18
29
  "--scopes",
@@ -0,0 +1,3 @@
1
+ from .assets import CountAsset
2
+ from .client import CountClient, CountCredentials
3
+ from .extract import extract_all
@@ -0,0 +1,11 @@
1
+ from ...types import ExternalAsset
2
+
3
+
4
+ class CountAsset(ExternalAsset):
5
+ """Count assets"""
6
+
7
+ CANVASES = "canvases"
8
+ CANVAS_PERMISSIONS = "canvas_permissions"
9
+ CELLS = "cells"
10
+ PROJECTS = "projects"
11
+ USERS = "users"
@@ -0,0 +1,2 @@
1
+ from .client import CountClient
2
+ from .credentials import CountCredentials
@@ -0,0 +1,50 @@
1
+ import logging
2
+ from dataclasses import asdict
3
+ from typing import Any, Iterator
4
+
5
+ from ....utils import load_file
6
+ from ....warehouse.bigquery import BigQueryClient
7
+ from ..assets import (
8
+ CountAsset,
9
+ )
10
+ from .credentials import CountCredentials
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+ _QUERIES_FOLDER = "queries"
15
+
16
+
17
+ class CountClient(BigQueryClient):
18
+ """
19
+ Count.co does not currently provide an official API.
20
+ Instead, metadata such as dashboards, users, and queries is made available through
21
+ special metadata tables stored in BigQuery.
22
+
23
+ This client extends `BigQueryClient` to access and interact with those metadata tables.
24
+ """
25
+
26
+ def __init__(self, credentials: CountCredentials):
27
+ super().__init__(asdict(credentials))
28
+ self.project_id = credentials.project_id
29
+ self.dataset_id = credentials.dataset_id
30
+
31
+ def _load_query(self, asset: CountAsset) -> str:
32
+ query = load_file(
33
+ f"{_QUERIES_FOLDER}/{asset.name.lower()}.sql", __file__
34
+ )
35
+ return query.format(
36
+ project_id=self.project_id, dataset_id=self.dataset_id
37
+ )
38
+
39
+ def fetch(self, asset: CountAsset) -> Iterator[dict[str, Any]]:
40
+ """
41
+ Fetch the asset given as param, by running a BigQuery query.
42
+ """
43
+ logger.info(f"Running BigQuery query to fetch: {asset.name}")
44
+
45
+ query_str = self._load_query(asset)
46
+ job = self.client.query(query_str)
47
+ results = job.result()
48
+
49
+ for row in results:
50
+ yield dict(row)
@@ -0,0 +1,10 @@
1
+ from pydantic.dataclasses import dataclass
2
+
3
+ from ....warehouse.bigquery import BigQueryCredentials
4
+
5
+
6
+ @dataclass
7
+ class CountCredentials(BigQueryCredentials):
8
+ """Count credentials extending BigQuery credentials with additional dataset information"""
9
+
10
+ dataset_id: str
@@ -0,0 +1,6 @@
1
+ SELECT
2
+ canvas_key,
3
+ type,
4
+ role,
5
+ user_key
6
+ FROM `{project_id}.{dataset_id}.canvas_permissions`
@@ -0,0 +1,6 @@
1
+ SELECT
2
+ key,
3
+ project_key,
4
+ title
5
+ FROM `{project_id}.{dataset_id}.canvases`
6
+
@@ -0,0 +1,8 @@
1
+ SELECT
2
+ key,
3
+ canvas_key,
4
+ name,
5
+ type,
6
+ connection_key
7
+ FROM `{project_id}.{dataset_id}.cells`
8
+
@@ -0,0 +1,5 @@
1
+ SELECT
2
+ key,
3
+ name
4
+ FROM `{project_id}.{dataset_id}.projects`
5
+
@@ -0,0 +1,8 @@
1
+ SELECT
2
+ key,
3
+ created_at,
4
+ name,
5
+ email,
6
+ role
7
+ FROM `{project_id}.{dataset_id}.users`
8
+
@@ -0,0 +1,54 @@
1
+ import logging
2
+ from typing import Iterable, Iterator, Union
3
+
4
+ from ...utils import (
5
+ OUTPUT_DIR,
6
+ current_timestamp,
7
+ deep_serialize,
8
+ from_env,
9
+ get_output_filename,
10
+ write_json,
11
+ write_summary,
12
+ )
13
+ from .assets import (
14
+ CountAsset,
15
+ )
16
+ from .client import (
17
+ CountClient,
18
+ CountCredentials,
19
+ )
20
+
21
+ logger = logging.getLogger(__name__)
22
+
23
+
24
+ def iterate_all_data(
25
+ client: CountClient,
26
+ ) -> Iterable[tuple[CountAsset, Union[list, Iterator, dict]]]:
27
+ """Iterate over the extracted data from count"""
28
+
29
+ for asset in CountAsset:
30
+ logger.info(f"Extracting {asset.value} from API")
31
+ data = client.fetch(asset)
32
+ yield asset, deep_serialize(data)
33
+
34
+
35
+ def extract_all(**kwargs) -> None:
36
+ """
37
+ Extract data from count BigQuery project
38
+ Store the output files locally under the given output_directory
39
+ """
40
+ _output_directory = kwargs.get("output") or from_env(OUTPUT_DIR)
41
+ dataset_id = kwargs.get("dataset_id")
42
+ if not dataset_id:
43
+ raise ValueError("dataset_id is required")
44
+
45
+ credentials = CountCredentials(**kwargs)
46
+ client = CountClient(credentials=credentials)
47
+
48
+ ts = current_timestamp()
49
+
50
+ for key, data in iterate_all_data(client):
51
+ filename = get_output_filename(key.name.lower(), _output_directory, ts)
52
+ write_json(filename, list(data))
53
+
54
+ write_summary(_output_directory, ts)
@@ -3,5 +3,6 @@ from .credentials import (
3
3
  CLIENT_APP_BASE,
4
4
  DEFAULT_SCOPE,
5
5
  REST_API_BASE_PATH,
6
+ PowerbiCertificate,
6
7
  PowerbiCredentials,
7
8
  )
@@ -1,11 +1,24 @@
1
+ from typing import Optional, Union
2
+
1
3
  import msal # type: ignore
2
4
 
3
5
  from ....utils import BearerAuth
4
6
  from .constants import Keys
5
- from .credentials import PowerbiCredentials
7
+ from .credentials import PowerbiCertificate, PowerbiCredentials
6
8
  from .endpoints import PowerBiEndpointFactory
7
9
 
8
10
 
11
+ def _get_client_credential(
12
+ secret: Optional[str], certificate: Optional[PowerbiCertificate]
13
+ ) -> Union[str, dict]:
14
+ if secret:
15
+ return secret
16
+ if certificate:
17
+ return certificate.model_dump()
18
+
19
+ raise ValueError("Either certificate or secret must be provided.")
20
+
21
+
9
22
  class PowerBiBearerAuth(BearerAuth):
10
23
  def __init__(self, credentials: PowerbiCredentials):
11
24
  self.credentials = credentials
@@ -14,10 +27,15 @@ class PowerBiBearerAuth(BearerAuth):
14
27
  api_base=self.credentials.api_base,
15
28
  )
16
29
  authority = endpoint_factory.authority(self.credentials.tenant_id)
30
+
31
+ client_credential = _get_client_credential(
32
+ self.credentials.secret, self.credentials.certificate
33
+ )
34
+
17
35
  self.app = msal.ConfidentialClientApplication(
18
36
  client_id=self.credentials.client_id,
19
37
  authority=authority,
20
- client_credential=self.credentials.secret,
38
+ client_credential=client_credential,
21
39
  )
22
40
 
23
41
  def fetch_token(self):
@@ -1,6 +1,6 @@
1
1
  from typing import Optional
2
2
 
3
- from pydantic import Field, field_validator
3
+ from pydantic import BaseModel, field_validator
4
4
  from pydantic_settings import BaseSettings, SettingsConfigDict
5
5
 
6
6
  DEFAULT_SCOPE = "https://analysis.windows.net/powerbi/api/.default"
@@ -10,6 +10,12 @@ CLIENT_APP_BASE = "https://login.microsoftonline.com"
10
10
  REST_API_BASE_PATH = "https://api.powerbi.com/v1.0/myorg"
11
11
 
12
12
 
13
+ class PowerbiCertificate(BaseModel):
14
+ public_certificate: Optional[str] = None
15
+ private_key: str
16
+ thumbprint: str
17
+
18
+
13
19
  class PowerbiCredentials(BaseSettings):
14
20
  """Class to handle PowerBI rest API permissions"""
15
21
 
@@ -21,7 +27,8 @@ class PowerbiCredentials(BaseSettings):
21
27
 
22
28
  client_id: str
23
29
  tenant_id: str
24
- secret: str = Field(repr=False)
30
+ secret: Optional[str] = None
31
+ certificate: Optional[PowerbiCertificate] = None
25
32
  api_base: str = REST_API_BASE_PATH
26
33
  login_url: str = CLIENT_APP_BASE
27
34
  scopes: list[str] = [DEFAULT_SCOPE]
@@ -1,6 +1,7 @@
1
+ import json
1
2
  import logging
2
3
  from collections.abc import Iterable
3
- from typing import Union
4
+ from typing import Optional, Union
4
5
 
5
6
  from ...utils import (
6
7
  OUTPUT_DIR,
@@ -12,11 +13,22 @@ from ...utils import (
12
13
  write_summary,
13
14
  )
14
15
  from .assets import PowerBiAsset
15
- from .client import PowerbiClient, PowerbiCredentials
16
+ from .client import PowerbiCertificate, PowerbiClient, PowerbiCredentials
16
17
 
17
18
  logger = logging.getLogger(__name__)
18
19
 
19
20
 
21
+ def _load_certificate(
22
+ certificate: Optional[str],
23
+ ) -> Optional[PowerbiCertificate]:
24
+ if not certificate:
25
+ return None
26
+
27
+ with open(certificate) as file:
28
+ cert = json.load(file)
29
+ return PowerbiCertificate(**cert)
30
+
31
+
20
32
  def iterate_all_data(
21
33
  client: PowerbiClient,
22
34
  ) -> Iterable[tuple[PowerBiAsset, Union[list, dict]]]:
@@ -36,7 +48,15 @@ def extract_all(**kwargs) -> None:
36
48
  Store the output files locally under the given output_directory
37
49
  """
38
50
  _output_directory = kwargs.get("output") or from_env(OUTPUT_DIR)
39
- creds = PowerbiCredentials(**kwargs)
51
+ creds = PowerbiCredentials(
52
+ client_id=kwargs.get("client_id"),
53
+ tenant_id=kwargs.get("tenant_id"),
54
+ secret=kwargs.get("secret"),
55
+ certificate=_load_certificate(kwargs.get("certificate")),
56
+ api_base=kwargs.get("api_base"),
57
+ login_url=kwargs.get("login_url"),
58
+ scopes=kwargs.get("scopes"),
59
+ )
40
60
  client = PowerbiClient(creds)
41
61
  ts = current_timestamp()
42
62
 
@@ -4,6 +4,8 @@ from functools import partial
4
4
  from http import HTTPStatus
5
5
  from typing import Callable, Iterable, Optional
6
6
 
7
+ from requests import ReadTimeout
8
+
7
9
  from ....utils import (
8
10
  APIClient,
9
11
  RequestSafeMode,
@@ -114,6 +116,31 @@ class SigmaClient(APIClient):
114
116
  request = self._get_paginated(endpoint=SigmaEndpointFactory.workbooks())
115
117
  yield from fetch_all_pages(request, SigmaPagination)
116
118
 
119
+ @staticmethod
120
+ def _safe_fetch_elements(
121
+ elements: Iterator[dict],
122
+ workbook_id: str,
123
+ page_id: str,
124
+ ) -> Iterator[dict]:
125
+ """
126
+ Safely iterates over elements with ReadTimeout handling. In case of
127
+ said error, it skips the entire rest of the page.
128
+ """
129
+ try:
130
+ for element in elements:
131
+ if element.get("type") not in _DATA_ELEMENTS:
132
+ continue
133
+ yield {
134
+ **element,
135
+ "workbook_id": workbook_id,
136
+ "page_id": page_id,
137
+ }
138
+ except ReadTimeout:
139
+ logger.warning(
140
+ f"ReadTimeout for page {page_id} in workbook {workbook_id}"
141
+ )
142
+ return
143
+
117
144
  def _get_elements_per_page(
118
145
  self, page: dict, workbook_id: str
119
146
  ) -> Iterator[dict]:
@@ -122,14 +149,7 @@ class SigmaClient(APIClient):
122
149
  SigmaEndpointFactory.elements(workbook_id, page_id)
123
150
  )
124
151
  elements = fetch_all_pages(request, SigmaPagination)
125
- for element in elements:
126
- if element.get("type") not in _DATA_ELEMENTS:
127
- continue
128
- yield {
129
- **element,
130
- "workbook_id": workbook_id,
131
- "page_id": page_id,
132
- }
152
+ yield from self._safe_fetch_elements(elements, workbook_id, page_id)
133
153
 
134
154
  def _get_all_elements(self, workbooks: list[dict]) -> Iterator[dict]:
135
155
  for workbook in workbooks:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: castor-extractor
3
- Version: 0.24.57
3
+ Version: 0.25.2
4
4
  Summary: Extract your metadata assets.
5
5
  Home-page: https://www.castordoc.com/
6
6
  License: EULA
@@ -16,6 +16,7 @@ Classifier: Programming Language :: Python :: 3.11
16
16
  Classifier: Programming Language :: Python :: 3.12
17
17
  Provides-Extra: all
18
18
  Provides-Extra: bigquery
19
+ Provides-Extra: count
19
20
  Provides-Extra: databricks
20
21
  Provides-Extra: dbt
21
22
  Provides-Extra: looker
@@ -57,7 +58,7 @@ Requires-Dist: setuptools (>=78.1)
57
58
  Requires-Dist: snowflake-connector-python (>=3.4.0,<4.0.0) ; extra == "snowflake" or extra == "all"
58
59
  Requires-Dist: snowflake-sqlalchemy (!=1.2.5,<2.0.0) ; extra == "snowflake" or extra == "all"
59
60
  Requires-Dist: sqlalchemy (>=1.4,<1.5)
60
- Requires-Dist: sqlalchemy-bigquery[bqstorage] (>=1.0.0,<=2.0.0) ; extra == "bigquery" or extra == "all"
61
+ Requires-Dist: sqlalchemy-bigquery[bqstorage] (>=1.0.0,<=2.0.0) ; extra == "bigquery" or extra == "count" or extra == "all"
61
62
  Requires-Dist: sqlalchemy-redshift (>=0.8.14,<0.9.0) ; extra == "redshift" or extra == "all"
62
63
  Requires-Dist: tableauserverclient (>=0.25.0,<0.26.0) ; extra == "tableau" or extra == "all"
63
64
  Requires-Dist: tqdm (>=4.0.0,<5.0.0)
@@ -215,6 +216,18 @@ For any questions or bug report, contact us at [support@coalesce.io](mailto:supp
215
216
 
216
217
  # Changelog
217
218
 
219
+ ## 0.25.2 - 2025-09-30
220
+
221
+ * PowerBi: Support auth with private_key
222
+
223
+ ## 0.25.1 - 2025-09-29
224
+
225
+ * Sigma: catch ReadTimeouts during elements extraction
226
+
227
+ ## 0.25.0 - 2025-09-15
228
+
229
+ * Count: adding connector
230
+
218
231
  ## 0.24.57 - 2025-09-24
219
232
 
220
233
  * Sigma:
@@ -1,4 +1,4 @@
1
- CHANGELOG.md,sha256=-WezbaTjM4tDXii_RVXSYDz39xuZYqWUsabdyqoh2Kc,20889
1
+ CHANGELOG.md,sha256=nBloUrrG3Tt7TDnWCZqsNS0x6uIBYG7TFQHoTP8Q8a8,21086
2
2
  Dockerfile,sha256=xQ05-CFfGShT3oUqaiumaldwA288dj9Yb_pxofQpufg,301
3
3
  DockerfileUsage.md,sha256=2hkJQF-5JuuzfPZ7IOxgM6QgIQW7l-9oRMFVwyXC4gE,998
4
4
  LICENCE,sha256=sL-IGa4hweyya1HgzMskrRdybbIa2cktzxb5qmUgDg8,8254
@@ -7,6 +7,7 @@ castor_extractor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
7
7
  castor_extractor/commands/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
8
  castor_extractor/commands/extract_bigquery.py,sha256=dU4OiYO1V0n32orvZnMh1_xtFKF_VxHNXcVsH3otY-g,1269
9
9
  castor_extractor/commands/extract_confluence.py,sha256=blYcnDqywXNKRQ1aZAD9FclhLlO7x8Y_tb0lgl85v0w,1641
10
+ castor_extractor/commands/extract_count.py,sha256=cITp-2UmPYjbcICvYZzxE9oWieI8NbTH1DcWxLAZxJ4,611
10
11
  castor_extractor/commands/extract_databricks.py,sha256=SVKyoa-BBUQAM6HRHf1Wdg9-tpICic2yyvXQwHcNBhA,1264
11
12
  castor_extractor/commands/extract_domo.py,sha256=jvAawUsUTHrwCn_koK6StmQr4n_b5GyvJi6uu6WS0SM,1061
12
13
  castor_extractor/commands/extract_looker.py,sha256=cySLiolLCgrREJ9d0kMrJ7P8K3efHTBTzShalWVfI3A,1214
@@ -17,7 +18,7 @@ castor_extractor/commands/extract_mode.py,sha256=Q4iO-VAKMg4zFPejhAO-foZibL5Ht3j
17
18
  castor_extractor/commands/extract_mysql.py,sha256=7AH5qMzeLTsENCOeJwtesrWg8Vo8MCEq8fx2YT74Mcw,1034
18
19
  castor_extractor/commands/extract_notion.py,sha256=uaxcF3_bT7D_-JxnIW0F7VVDphI_ZgOfQQxZzoLXo_M,504
19
20
  castor_extractor/commands/extract_postgres.py,sha256=pX0RnCPi4nw6QQ6wiAuZ_Xt3ZbDuMUG9aQKuqFgJtAU,1154
20
- castor_extractor/commands/extract_powerbi.py,sha256=RKkw9H2ZsbJ4xLE84bmNFUgYUjlrLmSXahQSVrQr_Bc,934
21
+ castor_extractor/commands/extract_powerbi.py,sha256=tM9fnQaU69zJ7E_uS1S432jprRi9WnpDJdm2NtyLjUg,1242
21
22
  castor_extractor/commands/extract_qlik.py,sha256=VBe_xFKh_nR0QSFFIncAaC8yDqBeMa6VunBAga7AeGg,891
22
23
  castor_extractor/commands/extract_redshift.py,sha256=zRBg2D_ft4GLdPSdmetRcgQVAA80DXtdRSYsQhAWIik,1334
23
24
  castor_extractor/commands/extract_salesforce.py,sha256=3j3YTmMkPAwocR-B1ozJQai0UIZPtpmAyWj-hHvdWn4,1226
@@ -160,6 +161,17 @@ castor_extractor/utils/validation.py,sha256=dRvC9SoFVecVZuLQNN3URq37yX2sBSW3-NxI
160
161
  castor_extractor/utils/validation_test.py,sha256=A7P6VmI0kYX2aGIeEN12y7LsY7Kpm8pE4bdVFhbBAMw,1184
161
162
  castor_extractor/utils/write.py,sha256=KQVWF29N766avzmSb129IUWrId5c_8BtnYhVLmU6YIs,2133
162
163
  castor_extractor/visualization/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
164
+ castor_extractor/visualization/count/__init__.py,sha256=lvxGtSe3erjTYK0aPnkOyJibcsC6Q1AFchnK-hZt558,114
165
+ castor_extractor/visualization/count/assets.py,sha256=VZCRVDKWSu6l2lVGJS4JKOOmfCUkbS8MnJiLcAY9vqw,232
166
+ castor_extractor/visualization/count/client/__init__.py,sha256=YawYDutDI0sprp72jN9tKi8bbXCoc0Ij0Ev582tKjqk,74
167
+ castor_extractor/visualization/count/client/client.py,sha256=WgljCj8G7D0Brxa0llaeOQ2Ipd7FvtDWFoLWoPyqT9A,1523
168
+ castor_extractor/visualization/count/client/credentials.py,sha256=LZWvcz7p5lrgdgoIQLcxFyv4gqUBW4Jj4qDKN-VW31I,273
169
+ castor_extractor/visualization/count/client/queries/canvas_permissions.sql,sha256=iFmMfR0zusjxTxmYUS6p0kibZCsnHOQMbAlxaNjx-H4,108
170
+ castor_extractor/visualization/count/client/queries/canvases.sql,sha256=Ur5HBD9JJH0r14xIj_rwoctnds082_F931vlfcnwi_I,86
171
+ castor_extractor/visualization/count/client/queries/cells.sql,sha256=Kkk0jyU337PD6RPshSo_ucLl5PS7kIvJZlUnVnmJUkM,111
172
+ castor_extractor/visualization/count/client/queries/projects.sql,sha256=3Jem3QCVwk4wHiWRJL7cN6Vl2Yc5RZ8yC8ndvPAkaFM,68
173
+ castor_extractor/visualization/count/client/queries/users.sql,sha256=H0n7S7P5cCAWbgPxU32psIc1epXySzsAaQ7MQ9JrkfM,102
174
+ castor_extractor/visualization/count/extract.py,sha256=ZBsJ9tMxxaq1jG8qJp_OGVK3yPDNkVUsP1_3rcUMtYg,1378
163
175
  castor_extractor/visualization/domo/__init__.py,sha256=1axOCPm4RpdIyUt9LQEvlMvbOPllW8rk63h6EjVgJ0Y,111
164
176
  castor_extractor/visualization/domo/assets.py,sha256=bK1urFR2tnlWkVkkhR32mAKMoKbESNlop-CNGx-65PY,206
165
177
  castor_extractor/visualization/domo/client/__init__.py,sha256=Do0fU4B8Hhlhahcv734gnJl_ryCztfTBDea7XNCKfB8,72
@@ -236,16 +248,16 @@ castor_extractor/visualization/mode/errors.py,sha256=SKpFT2AiLOuWx2VRLyO7jbAiKcG
236
248
  castor_extractor/visualization/mode/extract.py,sha256=PmLWWjUwplQh3TNMemiGwyFdxMcKVMvumZPxSMLJAwk,1625
237
249
  castor_extractor/visualization/powerbi/__init__.py,sha256=hoZ73ngLhMc9edqxO9PUIE3FABQlvcfY2W8fuc6DEjY,197
238
250
  castor_extractor/visualization/powerbi/assets.py,sha256=IB_XKwgdN1pZYGZ4RfeHrLjflianTzWf_6tg-4CIwu0,742
239
- castor_extractor/visualization/powerbi/client/__init__.py,sha256=UPIhMaCCdNxhiLdkItC0IPFE_AMi-SgqI_ahwjB9utI,151
240
- castor_extractor/visualization/powerbi/client/authentication.py,sha256=cTohunKr1nUDfvxB0sejJSyfE2BdCtwT1WMPecWlbyU,1045
251
+ castor_extractor/visualization/powerbi/client/__init__.py,sha256=rxWeAtmGsy1XYn2oIrGz5rIlxcTrzh2rl1V-MGxFOY4,175
252
+ castor_extractor/visualization/powerbi/client/authentication.py,sha256=1pST-w7ceqrcKSccQSJBxT4lAsLU8keceSVJro1dg8k,1516
241
253
  castor_extractor/visualization/powerbi/client/client.py,sha256=Q_WHYGFpHT4wJ6nZvJa96nBVcpUGv7E2WnyZHBftsJM,8340
242
254
  castor_extractor/visualization/powerbi/client/client_test.py,sha256=zWgfc8fOHSRn3hxiX8ujJysmNHeypIoKin9h8_h178k,6668
243
255
  castor_extractor/visualization/powerbi/client/constants.py,sha256=88R_aGachNNUZh6OSH2fkDwZtY4KTStzKm_g7HNCqqo,387
244
- castor_extractor/visualization/powerbi/client/credentials.py,sha256=OVWdhZSNODzTdLysY-sbpBZ3uUkLokeayQZnbJAqt2I,1386
256
+ castor_extractor/visualization/powerbi/client/credentials.py,sha256=Mqb9e9jbJrawE00xvLyej1i4tFM8VNiRnA0LpfqORd0,1565
245
257
  castor_extractor/visualization/powerbi/client/credentials_test.py,sha256=TzFqxsWVQ3sXR_n0bJsexK9Uz7ceXCEPVqDGWTJzW60,993
246
258
  castor_extractor/visualization/powerbi/client/endpoints.py,sha256=38ZETzSSnNq3vA9O6nLZQ8T1BVE01R9CjMC03-PRXsM,1911
247
259
  castor_extractor/visualization/powerbi/client/pagination.py,sha256=OZMjoDQPRGMoWd9QcKKrPh3aErJR20SHlrTqY_siLkk,755
248
- castor_extractor/visualization/powerbi/extract.py,sha256=Z5KbqMhMnqjWcnzged2G1-Gf6GYWJobTL9_TpAdgb8o,1309
260
+ castor_extractor/visualization/powerbi/extract.py,sha256=bZOUbciWGPNRRrtcMezSdoeClHB2yiBATBC8UqoXz5M,1904
249
261
  castor_extractor/visualization/qlik/__init__.py,sha256=u6lIfm_WOykBwt6SlaB7C0Dtx37XBliUbM5oWv26gC8,177
250
262
  castor_extractor/visualization/qlik/assets.py,sha256=Ab_kG61mHcK8GoGZbfQW7RSWyd7D9bVga9DOqnm0iSE,1625
251
263
  castor_extractor/visualization/qlik/client/__init__.py,sha256=5O5N9Jrt3d99agFEJ28lKWs2KkDaXK-lZ07IUtLj56M,130
@@ -273,7 +285,7 @@ castor_extractor/visualization/sigma/__init__.py,sha256=GINql4yJLtjfOJgjHaWNpE13
273
285
  castor_extractor/visualization/sigma/assets.py,sha256=iVZqi7XtNgSOVXy0jgeHZonVOeXi7jyikor8ztbECBc,398
274
286
  castor_extractor/visualization/sigma/client/__init__.py,sha256=YQv06FBBQHvBMFg_tN0nUcmUp2NCL2s-eFTXG8rXaBg,74
275
287
  castor_extractor/visualization/sigma/client/authentication.py,sha256=gHukrpfboIjZc_O9CcuDtrl6U-StH0J73VY2J74Bm9o,2279
276
- castor_extractor/visualization/sigma/client/client.py,sha256=uUEZoTa1WU5bJEjOrgzWqSiJMKgbru5HPBEPazyu1Hc,8272
288
+ castor_extractor/visualization/sigma/client/client.py,sha256=SxSf5OjdDr8x-WZDezm8YNOw01R6CCoYIgW0od0ZgN8,8907
277
289
  castor_extractor/visualization/sigma/client/client_test.py,sha256=ae0ZOvKutCm44jnrJ-0_A5Y6ZGyDkMf9Ml3eEP8dNkY,581
278
290
  castor_extractor/visualization/sigma/client/credentials.py,sha256=XddAuQSmCKpxJ70TQgRnOj0vMPYVtiStk_lMMQ1AiNM,693
279
291
  castor_extractor/visualization/sigma/client/endpoints.py,sha256=by9VIFml2whlzQT66f2m56RYBsqPrWdAmIP4JkTaBV4,1799
@@ -434,8 +446,8 @@ castor_extractor/warehouse/sqlserver/queries/user.sql,sha256=MAlnTis43E3Amu1e1Oz
434
446
  castor_extractor/warehouse/sqlserver/queries/view_ddl.sql,sha256=9rynvx6MWg3iZzrWPB7haZfVKEPkxulzryE2g19x804,315
435
447
  castor_extractor/warehouse/sqlserver/query.py,sha256=c8f7_SEMR17DhbtzuYphWqWDQ0sCRy-nR442RRBZVYw,1773
436
448
  castor_extractor/warehouse/synapse/queries/column.sql,sha256=lNcFoIW3Y0PFOqoOzJEXmPvZvfAsY0AP63Mu2LuPzPo,1351
437
- castor_extractor-0.24.57.dist-info/LICENCE,sha256=sL-IGa4hweyya1HgzMskrRdybbIa2cktzxb5qmUgDg8,8254
438
- castor_extractor-0.24.57.dist-info/METADATA,sha256=uSN01JxGlu1gIF4bpBnZtHM3tLQKfU9qT0uimCqtrjI,28350
439
- castor_extractor-0.24.57.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
440
- castor_extractor-0.24.57.dist-info/entry_points.txt,sha256=_F-qeZCybjoMkNb9ErEhnyqXuG6afHIFQhakdBHZsr4,1803
441
- castor_extractor-0.24.57.dist-info/RECORD,,
449
+ castor_extractor-0.25.2.dist-info/LICENCE,sha256=sL-IGa4hweyya1HgzMskrRdybbIa2cktzxb5qmUgDg8,8254
450
+ castor_extractor-0.25.2.dist-info/METADATA,sha256=Lh6TLvQYvBJ0wL4ST5GXkpGX4DUaZzNsThF9ZiBCOzk,28588
451
+ castor_extractor-0.25.2.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
452
+ castor_extractor-0.25.2.dist-info/entry_points.txt,sha256=qyTrKNByoq2HYi1xbA79OU7qxg-OWPvle8VwDqt-KnE,1869
453
+ castor_extractor-0.25.2.dist-info/RECORD,,
@@ -1,6 +1,7 @@
1
1
  [console_scripts]
2
2
  castor-extract-bigquery=castor_extractor.commands.extract_bigquery:main
3
3
  castor-extract-confluence=castor_extractor.commands.extract_confluence:main
4
+ castor-extract-count=castor_extractor.commands.extract_count:main
4
5
  castor-extract-databricks=castor_extractor.commands.extract_databricks:main
5
6
  castor-extract-domo=castor_extractor.commands.extract_domo:main
6
7
  castor-extract-looker=castor_extractor.commands.extract_looker:main