castor-extractor 0.15.2__py3-none-any.whl → 0.16.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of castor-extractor might be problematic. Click here for more details.

CHANGELOG.md CHANGED
@@ -1,5 +1,20 @@
1
1
  # Changelog
2
2
 
3
+ ## 0.16.1 - 2024-04-02
4
+
5
+ * Systematically escape nul bytes on CSV write
6
+
7
+ ## 0.16.0 - 2024-03-26
8
+
9
+ * Use pydantic v2
10
+
11
+ ## 0.15.4 - 2024-03-25
12
+
13
+ * Pagination: Fix behavior when next page token is missing
14
+
15
+ ## 0.15.3 - 2024-03-08
16
+
17
+ * Sigma: Regenerate token when expired
3
18
 
4
19
  ## 0.15.2 - 2024-03-01
5
20
 
Dockerfile CHANGED
@@ -1,6 +1,6 @@
1
1
  # syntax=docker/dockerfile:1.5
2
2
 
3
- FROM --platform=linux/amd64 python:3.10-slim
3
+ FROM --platform=linux/amd64 python:3.11-slim
4
4
 
5
5
  ARG EXTRA
6
6
  ENV EXTRA=${EXTRA}
@@ -89,7 +89,7 @@ def test_DbtClient_list_job_identifiers():
89
89
 
90
90
  with patch(infer_path, return_value=40), patch(call_path) as mocked_call:
91
91
  mocked_call.return_value = jobs
92
- credentials = DbtCredentials(token="some-token", job_id=1)
92
+ credentials = DbtCredentials(token="some-token", job_id="1")
93
93
  dbt_client = DbtClient(credentials=credentials)
94
94
 
95
95
  jobs_ids = dbt_client.list_job_identifiers()
@@ -103,7 +103,7 @@ def test_DbtClient_fetch_artifacts():
103
103
  url = "https://cloud.getdbt.com/api/v2/accounts/40/runs/{}/artifacts/{}"
104
104
 
105
105
  with patch(infer_path, return_value=40), patch(call_path) as mocked_call:
106
- credentials = DbtCredentials(token="some-token", job_id=1)
106
+ credentials = DbtCredentials(token="some-token", job_id="1")
107
107
  dbt_client = DbtClient(credentials=credentials)
108
108
 
109
109
  dbt_client.fetch_run_results(run_id)
@@ -1,16 +1,15 @@
1
- from pydantic import BaseSettings, Extra, Field
1
+ from pydantic import Field
2
+ from pydantic_settings import BaseSettings, SettingsConfigDict
2
3
 
3
- _DEFAULT_CLOUD_URL = "https://cloud.getdbt.com"
4
+ DEFAULT_DBT_CLOUD_URL = "https://cloud.getdbt.com"
4
5
 
5
6
 
6
7
  class DbtCredentials(BaseSettings):
7
8
  """dbt credentials: host has default value"""
8
9
 
9
- host: str = Field(default=_DEFAULT_CLOUD_URL, env="CASTOR_DBT_HOST")
10
- job_id: str = Field(..., env="CASTOR_DBT_JOB_ID")
11
- token: str = Field(..., env="CASTOR_DBT_TOKEN")
12
-
13
- class Config:
14
- """constructor settings: ignore extra kwargs provided"""
15
-
16
- extra = Extra.ignore
10
+ host: str = Field(
11
+ default=DEFAULT_DBT_CLOUD_URL, validation_alias="CASTOR_DBT_HOST"
12
+ )
13
+ job_id: str = Field(..., validation_alias="CASTOR_DBT_JOB_ID")
14
+ token: str = Field(..., validation_alias="CASTOR_DBT_TOKEN")
15
+ model_config = SettingsConfigDict(extra="ignore", populate_by_name=True)
@@ -30,7 +30,14 @@ def _header(row: dict) -> Sequence[str]:
30
30
 
31
31
 
32
32
  def _scalar(value: Any) -> ScalarValue:
33
- if isinstance(value, (int, float, str)):
33
+ if isinstance(value, str):
34
+ if "\x00" in value: # infrequent error caused by bad encoding
35
+ value = remove_unsupported_byte(value)
36
+ logger.warning("Removed unsupported byte to write to csv")
37
+ return value
38
+
39
+ return value
40
+ if isinstance(value, (int, float)):
34
41
  return value
35
42
  if isinstance(value, (date, datetime)):
36
43
  return value.isoformat()
@@ -46,11 +53,11 @@ def _row(header: Sequence[str], row: dict) -> List[ScalarValue]:
46
53
  return [_scalar(row.get(h)) for h in header]
47
54
 
48
55
 
49
- def remove_unsupported_byte(row: List[ScalarValue]) -> List[ScalarValue]:
50
- return [
51
- re.sub("\x00", "", element) if isinstance(element, str) else element
52
- for element in row
53
- ]
56
+ def remove_unsupported_byte(element: ScalarValue) -> ScalarValue:
57
+ if not isinstance(element, str):
58
+ return element
59
+
60
+ return re.sub("\x00", "", element)
54
61
 
55
62
 
56
63
  def to_string_array(arr_json: str) -> List[str]:
@@ -85,11 +92,7 @@ def to_csv(buffer: IO[str], data: Iterable[dict]) -> bool:
85
92
  header = _header(row)
86
93
  writer.writerow(header)
87
94
  converted = _row(header, row)
88
- try:
89
- writer.writerow(converted)
90
- except csv.Error: # infrequent error caused by bad encoding
91
- writer.writerow(remove_unsupported_byte(converted))
92
- logger.warning("Removed unsupported byte to write to csv")
95
+ writer.writerow(converted)
93
96
  return True
94
97
 
95
98
 
@@ -68,7 +68,10 @@ def test__json_formatter():
68
68
  _test(formatter)
69
69
 
70
70
 
71
- def test__remove_unsupported_byte():
72
- row = [1, "foo", "bar\x00bie"]
73
- cleaned = remove_unsupported_byte(row)
74
- assert cleaned == [1, "foo", "barbie"]
71
+ @pytest.mark.parametrize(
72
+ "element, expected_output",
73
+ [(1, 1), ("foo", "foo"), ("bar\x00bie", "barbie")],
74
+ )
75
+ def test__remove_unsupported_byte(element, expected_output):
76
+ cleaned = remove_unsupported_byte(element)
77
+ assert cleaned == expected_output
@@ -44,10 +44,9 @@ class PagerOnToken(AbstractPager):
44
44
  yield results["res"]
45
45
 
46
46
  stop = self.should_stop(nb_results, per_page, stop_on_empty_page)
47
-
48
- if stop:
47
+ page_token = results.get("next_page_token")
48
+ if stop or not page_token:
49
49
  break
50
- page_token = results["next_page_token"]
51
50
  page += 1
52
51
 
53
52
  self._logger.on_success(page, total_results)
@@ -22,6 +22,24 @@ def _make_callback_with_token(
22
22
  return _callback
23
23
 
24
24
 
25
+ def _make_callback_with_missing_token(
26
+ elements: List[Dict[str, str]],
27
+ ) -> Callable[[OStr, int], Dict[str, Any]]:
28
+ def _callback(page_token: OStr, per_page: int) -> Dict[str, Any]:
29
+ """callable with a token that indicates how to retrieve the next page
30
+ except for the last page"""
31
+ if page_token:
32
+ _start = int(page_token)
33
+ else:
34
+ _start = 0
35
+ _end = _start + per_page
36
+ if _end == len(ITEMS):
37
+ return {"res": elements[_start:_end]}
38
+ return {"res": elements[_start:_end], "next_page_token": str(_end)}
39
+
40
+ return _callback
41
+
42
+
25
43
  def test_pagerontoken__all():
26
44
  """unit test for PagerOnToken#all()"""
27
45
  pager = PagerOnToken(_make_callback_with_token(ITEMS))
@@ -32,6 +50,15 @@ def test_pagerontoken__all():
32
50
  # When per page is more than the number of ITEMS
33
51
  assert pager.all(per_page=len(ITEMS) + 20) == ITEMS
34
52
 
53
+ # Same test suite, but no token is provided at the last call
54
+ pager = PagerOnToken(_make_callback_with_missing_token(ITEMS))
55
+ # When no argument provided
56
+ assert pager.all() == ITEMS
57
+ # When per page is less than the number of ITEMS
58
+ assert pager.all(per_page=1) == ITEMS
59
+ # When per page is more than the number of ITEMS
60
+ assert pager.all(per_page=len(ITEMS) + 20) == ITEMS
61
+
35
62
 
36
63
  def test_pagerontoken__iterator__pagination():
37
64
  """unit test for PagerOnToken#iterator() (pagination)"""
@@ -1,22 +1,20 @@
1
+ from dataclasses import field
1
2
  from typing import List, Optional
2
3
 
4
+ from pydantic.dataclasses import dataclass
5
+
3
6
  from .constants import Urls
4
7
 
5
8
 
9
+ @dataclass
6
10
  class Credentials:
7
11
  """Class to handle PowerBI rest API permissions"""
8
12
 
9
- def __init__(
10
- self,
11
- *,
12
- tenant_id: str,
13
- client_id: str,
14
- secret: str,
15
- scopes: Optional[List[str]] = None,
16
- ):
17
- if scopes is None:
18
- scopes = [Urls.DEFAULT_SCOPE]
19
- self.tenant_id = tenant_id
20
- self.client_id = client_id
21
- self.secret = secret
22
- self.scopes = scopes
13
+ client_id: str
14
+ tenant_id: str
15
+ secret: str = field(metadata={"sensitive": True})
16
+ scopes: Optional[List[str]] = None
17
+
18
+ def __post_init__(self):
19
+ if self.scopes is None:
20
+ self.scopes = [Urls.DEFAULT_SCOPE]
@@ -273,6 +273,10 @@ class Client:
273
273
  self._wait_for_scan_result(scan_id)
274
274
  yield self._get_scan(scan_id)
275
275
 
276
+ def test_connection(self) -> None:
277
+ """Use credentials & verify requesting the API doesn't raise an error"""
278
+ self._header()
279
+
276
280
  def fetch(
277
281
  self,
278
282
  asset: PowerBiAsset,
@@ -13,6 +13,7 @@ logger = logging.getLogger()
13
13
 
14
14
 
15
15
  DATA_ELEMENTS: Tuple[str, str] = ("table", "visualization")
16
+ _CONTENT_TYPE = "application/x-www-form-urlencoded"
16
17
 
17
18
 
18
19
  class SigmaClient:
@@ -24,7 +25,9 @@ class SigmaClient:
24
25
  self.api_token = credentials.api_token
25
26
  self.headers: Optional[Dict[str, str]] = None
26
27
 
27
- def _get_token(self, token_api_path: str) -> Dict[str, str]:
28
+ def _get_token(self) -> Dict[str, str]:
29
+ auth_endpoint = EndpointFactory.authentication()
30
+ token_api_path = urljoin(self.host, auth_endpoint)
28
31
  token_response = requests.post( # noqa: S113
29
32
  token_api_path,
30
33
  data={
@@ -33,30 +36,42 @@ class SigmaClient:
33
36
  CredentialsKey.CLIENT_SECRET.value: self.api_token,
34
37
  },
35
38
  )
36
- if token_response.status_code != 200:
39
+ if token_response.status_code != requests.codes.OK:
37
40
  raise ValueError("Couldn't fetch the token in the API")
38
41
  return token_response.json()
39
42
 
40
- def _get_headers(self) -> Dict[str, str]:
41
- if not self.headers:
42
- headers = {"Content-Type": "application/x-www-form-urlencoded"}
43
+ def _get_headers(self, reset=False) -> Dict[str, str]:
44
+ """
45
+ If reset is True, will re-create the headers with a new authentication token
43
46
 
44
- auth_endpoint = EndpointFactory.authentication()
45
- token_api_path = urljoin(self.host, auth_endpoint)
46
-
47
- token = self._get_token(token_api_path)
47
+ Note : From this [documentation](https://help.sigmacomputing.com/docs/api-authentication-with-curl),
48
+ instead of re-creating a token we could refresh it, but I don't see any benefit.
49
+ """
50
+ if reset or not self.headers:
51
+ headers = {"Content-Type": _CONTENT_TYPE}
52
+ token = self._get_token()
48
53
  headers["Authorization"] = f"Bearer {token['access_token']}"
49
54
  self.headers = headers
50
-
51
55
  return self.headers
52
56
 
53
57
  def _get(self, endpoint_url: str) -> dict:
54
58
  url = urljoin(self.host, endpoint_url)
55
59
  result = requests.get(url, headers=self._get_headers()) # noqa: S113
60
+
61
+ if result.status_code == requests.codes.UNAUTHORIZED:
62
+ logger.info("Regenerating access token")
63
+ result = requests.get( # noqa: S113
64
+ url, headers=self._get_headers(reset=True)
65
+ )
66
+
56
67
  try:
57
68
  return result.json()
58
- except:
59
- logger.debug(result)
69
+ except Exception as e:
70
+ logger.warning(
71
+ f"Couldn't deserialize result from url {url}."
72
+ f" with status code {result.status_code} and"
73
+ f" exception {type(e)}"
74
+ )
60
75
  return dict()
61
76
 
62
77
  def _get_with_pagination(self, endpoint_url: str) -> Iterator[dict]:
@@ -23,7 +23,7 @@ def test__get(mocked_headers, mocked_request):
23
23
  "Every female within the pride is usually related.",
24
24
  "length": 109,
25
25
  }
26
- mocked_request.return_value = Mock(json=lambda: fact)
26
+ mocked_request.return_value = Mock(json=lambda: fact, status_code=200)
27
27
 
28
28
  result = client._get("https://catfact.ninja/fact")
29
29
  assert result == fact
@@ -31,7 +31,7 @@ def test__get(mocked_headers, mocked_request):
31
31
  result = client._get("https://catfact.ninja/fact")["length"]
32
32
  assert result == 109
33
33
 
34
- mocked_request.return_value = "not a json"
34
+ mocked_request.return_value = Mock("not a json", status_code=200)
35
35
 
36
36
  result = client._get("https/whatev.er")
37
37
  assert result == {}
@@ -34,6 +34,7 @@ class BigQueryClient(SqlalchemyClient):
34
34
  self._db_allowed = db_allowed
35
35
  self._db_blocked = db_blocked
36
36
  self._dataset_blocked = dataset_blocked
37
+ self.credentials = self._credentials()
37
38
  self.client = self._client()
38
39
  self._projects: List[str] | None = None
39
40
  self._datasets: List[Dataset] | None = None
@@ -63,14 +64,17 @@ class BigQueryClient(SqlalchemyClient):
63
64
  def _build_uri(self, credentials: dict) -> str:
64
65
  return BIGQUERY_URI
65
66
 
66
- def _client(self) -> GoogleCloudClient:
67
+ def _credentials(self) -> Credentials:
67
68
  assert (
68
69
  CREDENTIALS_INFO_KEY in self._options
69
70
  ), "Missing BigQuery credentials in engine's options"
70
71
  credentials = self._options[CREDENTIALS_INFO_KEY]
72
+ return Credentials.from_service_account_info(credentials)
73
+
74
+ def _client(self) -> GoogleCloudClient:
71
75
  return GoogleCloudClient(
72
- project=credentials.get(PROJECT_ID_KEY),
73
- credentials=Credentials.from_service_account_info(credentials),
76
+ project=self._options[CREDENTIALS_INFO_KEY].get(PROJECT_ID_KEY),
77
+ credentials=self.credentials,
74
78
  )
75
79
 
76
80
  def _list_datasets(self) -> List[Dataset]:
@@ -4,6 +4,7 @@ from typing import Dict, Optional
4
4
  from ...utils import AbstractStorage, LocalStorage, write_summary
5
5
  from ..abstract import (
6
6
  CATALOG_ASSETS,
7
+ EXTERNAL_LINEAGE_ASSETS,
7
8
  QUERIES_ASSETS,
8
9
  VIEWS_ASSETS,
9
10
  SupportedAssets,
@@ -20,6 +21,7 @@ DATABRICKS_ASSETS: SupportedAssets = {
20
21
  WarehouseAssetGroup.QUERY: QUERIES_ASSETS,
21
22
  WarehouseAssetGroup.ROLE: (WarehouseAsset.USER,),
22
23
  WarehouseAssetGroup.VIEW_DDL: VIEWS_ASSETS,
24
+ WarehouseAssetGroup.EXTERNAL_LINEAGE: EXTERNAL_LINEAGE_ASSETS,
23
25
  }
24
26
 
25
27
  logger = logging.getLogger(__name__)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: castor-extractor
3
- Version: 0.15.2
3
+ Version: 0.16.1
4
4
  Summary: Extract your metadata assets.
5
5
  Home-page: https://www.castordoc.com/
6
6
  License: EULA
@@ -16,6 +16,7 @@ Classifier: Programming Language :: Python :: 3.10
16
16
  Classifier: Programming Language :: Python :: 3.11
17
17
  Provides-Extra: all
18
18
  Provides-Extra: bigquery
19
+ Provides-Extra: dbt
19
20
  Provides-Extra: looker
20
21
  Provides-Extra: metabase
21
22
  Provides-Extra: mysql
@@ -38,7 +39,8 @@ Requires-Dist: looker-sdk (>=22.4.0,<=23.0.0) ; extra == "looker" or extra == "a
38
39
  Requires-Dist: msal (>=1.20.0,<2.0.0) ; extra == "powerbi" or extra == "all"
39
40
  Requires-Dist: psycopg2-binary (>=2.0.0,<3.0.0) ; extra == "metabase" or extra == "postgres" or extra == "redshift" or extra == "all"
40
41
  Requires-Dist: pycryptodome (>=3.0.0,<4.0.0) ; extra == "metabase" or extra == "all"
41
- Requires-Dist: pydantic (>=1.10,<2.0)
42
+ Requires-Dist: pydantic (>=2.6,<3.0)
43
+ Requires-Dist: pydantic-settings (>=2.2,<3.0)
42
44
  Requires-Dist: pymssql (>=2.2.11,<3.0.0) ; extra == "sqlserver" or extra == "all"
43
45
  Requires-Dist: pymysql[rsa] (>=1.1.0,<2.0.0) ; extra == "mysql" or extra == "all"
44
46
  Requires-Dist: python-dateutil (>=2.0.0,<=3.0.0)
@@ -1,5 +1,5 @@
1
- CHANGELOG.md,sha256=xSp8XAs6yUIwuv-Y8TDZd6zq0-KbzkYJjO7kUl4BFYQ,9575
2
- Dockerfile,sha256=TC6hFjG3mvnt1nkw2EpaS42hRYaGA2YIPKgWhVSKTWc,303
1
+ CHANGELOG.md,sha256=t1xfX_GaaTJcrNGAJtvhbOZ-4fAeRdFKRH8eKil6xWM,9837
2
+ Dockerfile,sha256=HcX5z8OpeSvkScQsN-Y7CNMUig_UB6vTMDl7uqzuLGE,303
3
3
  LICENCE,sha256=sL-IGa4hweyya1HgzMskrRdybbIa2cktzxb5qmUgDg8,8254
4
4
  README.md,sha256=uF6PXm9ocPITlKVSh9afTakHmpLx3TvawLf-CbMP3wM,3578
5
5
  castor_extractor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -57,16 +57,16 @@ castor_extractor/utils/constants.py,sha256=qBQprS9U66mS-RIBXiLujdTSV3WvGv40Bc0kh
57
57
  castor_extractor/utils/dbt/__init__.py,sha256=LHQROlMqYWCc7tcmhdjXtROFpJqUvCg9jPC8avHgD4I,107
58
58
  castor_extractor/utils/dbt/assets.py,sha256=JY1nKEGySZ84wNoe7dnizwAYw2q0t8NVaIfqhB2rSw0,148
59
59
  castor_extractor/utils/dbt/client.py,sha256=xBjbT-p99TXY850ooEAgjNp33yfGDwjWJRbXzeJoVaI,5538
60
- castor_extractor/utils/dbt/client_test.py,sha256=h-IIrE2Fy0oD0uemjvJzof9xMO3AmkHciOVopKCNjJs,4529
61
- castor_extractor/utils/dbt/credentials.py,sha256=qkfs43_8pNqPGmKc7xv7sb1IhbheXzgwt6yYdjyomAo,479
60
+ castor_extractor/utils/dbt/client_test.py,sha256=FO_vpnECE-hoK0rZHbqDv17oaJj3-uPhFEqTrMPzUf4,4533
61
+ castor_extractor/utils/dbt/credentials.py,sha256=pGq7GqFQTw9TwN1DXSHC-0yJ2H6B_wMAbHyQTLqJVh0,543
62
62
  castor_extractor/utils/deprecate.py,sha256=_uzQiwHiz2yEqQeNMmzvVmBY46IgBhhEbGPhTrVjZU4,817
63
63
  castor_extractor/utils/env.py,sha256=TqdtB50U8LE0993WhhEhpy89TJrHbjtIKjvg6KQ-5q0,596
64
64
  castor_extractor/utils/files.py,sha256=3C_u7P-kSZoOABVaKsuaf8lEhldRRxyxD27-K18_dEU,1545
65
65
  castor_extractor/utils/files_test.py,sha256=omRT3XSjaSAywYUoLh1SGWqYzl4UwBYKSYA9_7mXd_E,1542
66
- castor_extractor/utils/formatter.py,sha256=v50OyVWPDn-QdBm-XxSNGQibPPePeJIrcDcK3TATLRM,4934
66
+ castor_extractor/utils/formatter.py,sha256=EJpwl5ff9zahbWpCcoHcrDTVzBSqwyZQffuvh1JmPbk,4948
67
67
  castor_extractor/utils/formatter_test.csv,sha256=UCNqPs8-xrY1AdMSpuctVFXInQe3Z_EABP4rF-Jw5ks,3802
68
68
  castor_extractor/utils/formatter_test.json,sha256=yPP_z1ZEavaUskC-Hx33uGlwKoInHYOFKqsJ9NgwIFo,12527
69
- castor_extractor/utils/formatter_test.py,sha256=sFuiKmU5WYQN4ocg1tEKLN2bMpuuPWVbNqCn0x-jn8I,1752
69
+ castor_extractor/utils/formatter_test.py,sha256=VPlRTPQOaAeCySNs1wU1jd3bMppqxkVpD1dyCLt6p94,1856
70
70
  castor_extractor/utils/json_stream_write.py,sha256=OUdg4-47I47pgbpN9_a6y-lmxuod7aY6PObxjvB-wXI,2082
71
71
  castor_extractor/utils/load.py,sha256=MXwGVB_Dp_VitGwo71sNB_xDmGzQ4oQ13MhaXXyYkS0,265
72
72
  castor_extractor/utils/object.py,sha256=xCcQtoj9313TCcoyRXkLpDcMxmDeQMFMseDNx95oGc0,1959
@@ -75,8 +75,8 @@ castor_extractor/utils/pager/__init__.py,sha256=IoZnJVkALgEAg5P_fiecrPbukLFsKT8A
75
75
  castor_extractor/utils/pager/pager.py,sha256=sfVEPMMhptLVJOhhi8TgRlAlVVI0GnokjkM4EeNMqqY,2386
76
76
  castor_extractor/utils/pager/pager_on_id.py,sha256=K-YTBO4MoqcmaI_GHWU_8W-FRJivDbAtKt7spY0mBD4,1768
77
77
  castor_extractor/utils/pager/pager_on_id_test.py,sha256=CfAXhXaAmCXnm0oflj8_82An6znOS2mBk4nqvtrCqs4,1613
78
- castor_extractor/utils/pager/pager_on_token.py,sha256=wfdSczFFqrD74711k2KB24kMMOTR27up33fh-o2HURE,1558
79
- castor_extractor/utils/pager/pager_on_token_test.py,sha256=SMtBI2q-l_aOT-KJsjFmmUHr5YnFlIlCoyBAELJfOPU,1533
78
+ castor_extractor/utils/pager/pager_on_token.py,sha256=G442SKl4BXJFMPbYIIgCk5M8wl7V3jMg3K1WUUkl0I0,1579
79
+ castor_extractor/utils/pager/pager_on_token_test.py,sha256=w2GCUGKR3cD5lfmtFAsNvExtzxkYdBR0pusBrGKFQ08,2548
80
80
  castor_extractor/utils/pager/pager_test.py,sha256=QPBVShSXhkiYZUfnAMs43xnys6CD8pAhL3Jhj-Ov2Xc,1705
81
81
  castor_extractor/utils/retry.py,sha256=vYdJMiM-Nr82H1MuD7_KZdqbFz98ffQGqJ4Owbr6mpY,3252
82
82
  castor_extractor/utils/retry_test.py,sha256=nsMttlmyKygVcffX3Hay8U2S1BspkGPiCmzIXPpLKyk,2230
@@ -158,9 +158,9 @@ castor_extractor/visualization/powerbi/__init__.py,sha256=XSr_fNSsR-EPuGOFo7Ai1r
158
158
  castor_extractor/visualization/powerbi/assets.py,sha256=SASUjxtoOMag3NAlZfhpCy0sLap7WfENEMaEZuBrw6o,801
159
159
  castor_extractor/visualization/powerbi/client/__init__.py,sha256=hU8LE1gV9RttTGJiwVpEa9xDLR4IMkUdshQGthg4zzE,62
160
160
  castor_extractor/visualization/powerbi/client/constants.py,sha256=Cx4pbgyAFc7t_aRQyWj7q-qfkltJl-JgKdMzeKmC9AI,2356
161
- castor_extractor/visualization/powerbi/client/credentials.py,sha256=_YYuHWzL6QbXeGVVmEF8vSZKYnWaQk5Mx-7-13StUMo,500
161
+ castor_extractor/visualization/powerbi/client/credentials.py,sha256=iiYaCa2FM1PBHv4YA0Z1LgdX9gnaQhvHGD0LQb7Tcxw,465
162
162
  castor_extractor/visualization/powerbi/client/credentials_test.py,sha256=23ZlLCvsPB_fmqntnzULkv0mMRE8NCzBXtWS6wupJn4,787
163
- castor_extractor/visualization/powerbi/client/rest.py,sha256=qhmA49tayNUzZsX53VsdqL7cOhWfbryVLwN5bH1zGfA,9500
163
+ castor_extractor/visualization/powerbi/client/rest.py,sha256=0gwqqmmzX76MzNRGfmcNkXw_jxVRAdMTViQExTBQy2Y,9644
164
164
  castor_extractor/visualization/powerbi/client/rest_test.py,sha256=r5rS_1FMwHCDWbYdco11-zvDJ5jYk9l8-VVJcpCtbwM,7343
165
165
  castor_extractor/visualization/powerbi/client/utils.py,sha256=0RcoWcKOdvIGH4f3lYDvufmiMo4tr_ABFlITSrvXjTs,541
166
166
  castor_extractor/visualization/powerbi/client/utils_test.py,sha256=ULHL2JLrcv0xjW2r7QF_ce2OaGeeSzajkMDywJ8ZdVA,719
@@ -194,8 +194,8 @@ castor_extractor/visualization/salesforce_reporting/extract.py,sha256=6cUMNrCz46
194
194
  castor_extractor/visualization/sigma/__init__.py,sha256=m98AEysUsVHQAWT6m5nvrtLMs22SDQH9G78-IcUwBoY,130
195
195
  castor_extractor/visualization/sigma/assets.py,sha256=JZ1Cpxnml8P3mIJoTUM57hvylB18ErECQXaP5FF63O4,268
196
196
  castor_extractor/visualization/sigma/client/__init__.py,sha256=sFqsbcwilIxu75njtSbnAIsNlPdRgB39SAInNUf-nbQ,90
197
- castor_extractor/visualization/sigma/client/client.py,sha256=F-Ubz5fN3DHOWEyDwhEdj4yYXrFPe2w8rEAI4kiaAr8,6356
198
- castor_extractor/visualization/sigma/client/client_test.py,sha256=qZWlq_QSyVNHXX96ZbBfWp5GduTllQHDDowFXwQYrcU,1552
197
+ castor_extractor/visualization/sigma/client/client.py,sha256=894tBAtKlyXHqRB4-mpyM4BgaE80T58VHbqauAmime8,7112
198
+ castor_extractor/visualization/sigma/client/client_test.py,sha256=BdxJsmTyhslup4vePbkiaoTTaJ9RDY7UfK4OhBpu1Z4,1592
199
199
  castor_extractor/visualization/sigma/client/credentials.py,sha256=k3mtaXrBHQdBRk5J2rzFU0YUuO89uX-FjYU1WLqfmjU,797
200
200
  castor_extractor/visualization/sigma/client/endpoints.py,sha256=7yNDL0p9wZDE7CcVOj9Z1eP2tdJE1CUT9vbfSEV2kOY,1157
201
201
  castor_extractor/visualization/sigma/client/pagination.py,sha256=EZGMaONTzZ15VINUAtH0sOmJae0JlDjOQPKBXK_V8sg,667
@@ -245,7 +245,7 @@ castor_extractor/warehouse/abstract/query.py,sha256=GAgeISCmAdrkTKzFGO79hQDf6SA6
245
245
  castor_extractor/warehouse/abstract/time_filter.py,sha256=bggIONfMmUxffkA6TwM3BsjfS2l9WFxPq8krfsau5pw,935
246
246
  castor_extractor/warehouse/abstract/time_filter_test.py,sha256=PIkegB7KOKBdpc6zIvmyl_CeQyADeFDplyQ8HTNU5LA,448
247
247
  castor_extractor/warehouse/bigquery/__init__.py,sha256=PCGNYdi7dHv-SyanUWzRuBp-ypuQ01PkDaQjVnaNhbM,170
248
- castor_extractor/warehouse/bigquery/client.py,sha256=8a94Lmj2LIC-gwLHCIkxROQMZnpH9EdZ-PFmFFybvEQ,4348
248
+ castor_extractor/warehouse/bigquery/client.py,sha256=ypLKXvvfR0RtKex4T2mNvoef4T-jRF1T_RZGCZ6qbOM,4495
249
249
  castor_extractor/warehouse/bigquery/client_test.py,sha256=Ym8e4d--0YQwiVcNUnXLx0X-X6ZznwNMBMbMaDS5oEA,1514
250
250
  castor_extractor/warehouse/bigquery/credentials.py,sha256=oCZ8H7qpudKzwM7PRMpVAmWXt7bjIRa8Harmp-ysQJ4,425
251
251
  castor_extractor/warehouse/bigquery/extract.py,sha256=vZFxJC1LtUMph5UhfhYdJLnsEto18IOERKzrt71jqJg,2883
@@ -265,7 +265,7 @@ castor_extractor/warehouse/databricks/__init__.py,sha256=bTvDxjGQGM2J3hOnVhfNmFP
265
265
  castor_extractor/warehouse/databricks/client.py,sha256=iojSVTARx5JmGy2Tm8D2H5wHO5hqGigVG9Ql2vHNdz8,7375
266
266
  castor_extractor/warehouse/databricks/client_test.py,sha256=rsqHWmVOgvqQ3VmYKJrpWpcGATD_C9FD1sG4CJsin2E,2201
267
267
  castor_extractor/warehouse/databricks/credentials.py,sha256=sMpOAKhBklcmTpcr3mi3o8qLud__8PTZbQUT3K_TRY8,678
268
- castor_extractor/warehouse/databricks/extract.py,sha256=7Tyr20nVlbDex-IjDSdX5VHj4NWvc3sWyfyiKBqPDwI,5612
268
+ castor_extractor/warehouse/databricks/extract.py,sha256=eyt9LihZ9GfHEh8Z2c9PXAHqK6hibPsEIUOKGYfMwg8,5708
269
269
  castor_extractor/warehouse/databricks/format.py,sha256=tCBCApW5iZMBx04p-oCUs36d4JqNqJsBDHe6f-A7eiU,4925
270
270
  castor_extractor/warehouse/databricks/format_test.py,sha256=iPmdJof43fBYL1Sa_fBrCWDQHCHgm7IWCZag1kWkj9E,1970
271
271
  castor_extractor/warehouse/databricks/types.py,sha256=T2SyLy9pY_olLtstdC77moPxIiikVsuQLMxh92YMJQo,78
@@ -346,7 +346,7 @@ castor_extractor/warehouse/synapse/queries/schema.sql,sha256=aX9xNrBD_ydwl-znGSF
346
346
  castor_extractor/warehouse/synapse/queries/table.sql,sha256=mCE8bR1Vb7j7SwZW2gafcXidQ2fo1HwxcybA8wP2Kfs,1049
347
347
  castor_extractor/warehouse/synapse/queries/user.sql,sha256=sTb_SS7Zj3AXW1SggKPLNMCd0qoTpL7XI_BJRMaEpBg,67
348
348
  castor_extractor/warehouse/synapse/queries/view_ddl.sql,sha256=3EVbp5_yTgdByHFIPLHmnoOnqqLE77SrjAwFDvu4e54,249
349
- castor_extractor-0.15.2.dist-info/METADATA,sha256=yemVOpSDWVQQvBi8M_x0HUjiCzoeYX7d_Dfliv8XoYs,6347
350
- castor_extractor-0.15.2.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
351
- castor_extractor-0.15.2.dist-info/entry_points.txt,sha256=EQUCoNjSHevxmY5ZathX_fLZPcuBHng23rj0SSUrLtI,1345
352
- castor_extractor-0.15.2.dist-info/RECORD,,
349
+ castor_extractor-0.16.1.dist-info/METADATA,sha256=tjFndsdmxa0NO7qy3Ddcz1pFZGnDsfPmTQ3x3o-UEeA,6412
350
+ castor_extractor-0.16.1.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
351
+ castor_extractor-0.16.1.dist-info/entry_points.txt,sha256=EQUCoNjSHevxmY5ZathX_fLZPcuBHng23rj0SSUrLtI,1345
352
+ castor_extractor-0.16.1.dist-info/RECORD,,