castor-extractor 0.9.2__py3-none-any.whl → 0.10.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of castor-extractor might be problematic. Click here for more details.

CHANGELOG.md CHANGED
@@ -1,5 +1,13 @@
1
1
  # Changelog
2
2
 
3
+ ## 0.10.1 - 2023-12-04
4
+
5
+ * Domo: fix pagination
6
+
7
+ ## 0.10.0 - 2023-11-28
8
+
9
+ * Looker : extract all Looker Explores, even if unused in Dashboards
10
+
3
11
  ## 0.9.2 - 2023-11-23
4
12
 
5
13
  * Looker : remove deprecated all_looks parameter
@@ -128,7 +128,10 @@ class DomoClient:
128
128
  while pagination.needs_increment:
129
129
  results = self._get_many(
130
130
  endpoint=endpoint,
131
- params={"offset": pagination.offset},
131
+ params={
132
+ "offset": pagination.offset,
133
+ "limit": pagination.per_page,
134
+ },
132
135
  )
133
136
  all_results.extend(results)
134
137
  number_of_items = len(results)
@@ -1,23 +1,28 @@
1
1
  from dataclasses import dataclass
2
+ from typing import Optional
2
3
 
3
- PER_PAGE = 50
4
+ PER_PAGE = 50 # maximum value accepted by DOMO is 50
4
5
 
5
6
 
6
7
  @dataclass
7
8
  class Pagination:
8
9
  """Handles pagination within DOMO Api"""
9
10
 
10
- number_results: int = PER_PAGE # max init
11
+ number_results: Optional[int] = None
11
12
  offset: int = 0
12
13
  per_page: int = PER_PAGE
13
14
  should_stop: bool = False
14
15
 
15
16
  @property
16
17
  def needs_increment(self) -> bool:
18
+ if self.number_results is None:
19
+ return True # first iteration
20
+
17
21
  if (self.number_results < self.per_page) or self.should_stop:
18
22
  return False
23
+
19
24
  return True
20
25
 
21
26
  def increment_offset(self, number_results: int) -> None:
22
- self.offset += self.per_page
27
+ self.offset += number_results
23
28
  self.number_results = number_results
@@ -0,0 +1,22 @@
1
+ from .pagination import Pagination
2
+
3
+
4
+ def test_pagination():
5
+ per_page = 20
6
+
7
+ pagination = Pagination(per_page=per_page)
8
+
9
+ assert pagination.number_results is None
10
+ assert pagination.offset == 0
11
+
12
+ pagination.increment_offset(per_page)
13
+ assert pagination.offset == per_page
14
+ assert pagination.needs_increment
15
+
16
+ pagination.increment_offset(per_page)
17
+ assert pagination.offset == per_page * 2
18
+ assert pagination.needs_increment
19
+
20
+ pagination.increment_offset(5)
21
+ assert pagination.offset == per_page * 2 + 5
22
+ assert not pagination.needs_increment
@@ -1,9 +1,3 @@
1
- from .api import (
2
- ApiClient,
3
- Credentials,
4
- dashboard_explore_names,
5
- explore_names_associated_to_dashboards,
6
- lookml_explore_names,
7
- )
1
+ from .api import ApiClient, Credentials, lookml_explore_names
8
2
  from .assets import LookerAsset
9
3
  from .extract import extract_all, iterate_all_data
@@ -1,7 +1,3 @@
1
1
  from .client import ApiClient
2
2
  from .sdk import Credentials
3
- from .utils import (
4
- dashboard_explore_names,
5
- explore_names_associated_to_dashboards,
6
- lookml_explore_names,
7
- )
3
+ from .utils import lookml_explore_names
@@ -191,7 +191,7 @@ class ApiClient:
191
191
  def explores(
192
192
  self,
193
193
  explore_names=Iterator[Tuple[str, str]],
194
- ) -> List[LookmlModelExplore]:
194
+ ) -> Iterator[LookmlModelExplore]:
195
195
  """Iterates explores of the given Looker account for the provided model/explore names"""
196
196
 
197
197
  @safe_mode(self._safe_mode)
@@ -202,11 +202,10 @@ class ApiClient:
202
202
  self._on_api_call()
203
203
  return explore
204
204
 
205
- explores = [
206
- _call(model_name, explore_name)
207
- for model_name, explore_name in explore_names
208
- ]
209
- return list(filter(None, explores))
205
+ for lookml_model_name, lookml_explore_name_ in explore_names:
206
+ explore_ = _call(lookml_model_name, lookml_explore_name_)
207
+ if explore_ is not None:
208
+ yield explore_
210
209
 
211
210
  def connections(self) -> List[DBConnection]:
212
211
  """Lists databases connections of the given Looker account"""
@@ -1,6 +1,6 @@
1
1
  from typing import Iterable, Set, Tuple
2
2
 
3
- from .sdk import Dashboard, LookmlModel
3
+ from .sdk import LookmlModel
4
4
 
5
5
 
6
6
  def lookml_explore_names(
@@ -22,28 +22,3 @@ def lookml_explore_names(
22
22
  # accept hidden resources
23
23
  if model.name and explore.name
24
24
  }
25
-
26
-
27
- def dashboard_explore_names(
28
- dashboards: Iterable[Dashboard],
29
- ) -> Set[Tuple[str, str]]:
30
- """Explores that appear in dashboards"""
31
- elements = (
32
- element
33
- for dashboard in dashboards
34
- for element in dashboard.dashboard_elements or []
35
- )
36
-
37
- return {
38
- (element.query.model, element.query.view)
39
- for element in elements
40
- if element.query and element.query.model and element.query.view
41
- }
42
-
43
-
44
- def explore_names_associated_to_dashboards(
45
- lookmls: Iterable[LookmlModel],
46
- dashboard_explore_names_: Set[Tuple[str, str]],
47
- ):
48
- """Retrieve only explores that are associated to a looker dashboard"""
49
- return lookml_explore_names(lookmls).intersection(dashboard_explore_names_)
@@ -11,12 +11,8 @@ from ...utils import (
11
11
  write_json,
12
12
  write_summary,
13
13
  )
14
- from .api import (
15
- ApiClient,
16
- Credentials,
17
- dashboard_explore_names,
18
- explore_names_associated_to_dashboards,
19
- )
14
+ from .api import ApiClient, Credentials, lookml_explore_names
15
+ from .api.sdk import LookmlModel
20
16
  from .assets import LookerAsset
21
17
  from .multithreading import MultithreadingFetcher
22
18
  from .parameters import get_parameters
@@ -24,6 +20,15 @@ from .parameters import get_parameters
24
20
  logger = logging.getLogger(__name__)
25
21
 
26
22
 
23
+ def _extract_explores_by_name(
24
+ lookmls: Iterable[LookmlModel], client: ApiClient
25
+ ) -> Iterable[dict]:
26
+ explore_names = lookml_explore_names(lookmls)
27
+ explores = client.explores(explore_names)
28
+ for explore in explores:
29
+ yield deep_serialize(explore) # type: ignore
30
+
31
+
27
32
  def _safe_mode(directory: str) -> SafeMode:
28
33
  add_logging_file_handler(directory)
29
34
  return SafeMode((Exception,), float("inf"))
@@ -79,10 +84,8 @@ def iterate_all_data(
79
84
  if search_per_folder:
80
85
  dashboards_stream = fetcher.fetch_assets(LookerAsset.DASHBOARDS)
81
86
  yield LookerAsset.DASHBOARDS, StreamableList(dashboards_stream)
82
- dashboard_explore_names_ = fetcher.explores
83
87
  else:
84
88
  dashboards = client.dashboards()
85
- dashboard_explore_names_ = dashboard_explore_names(dashboards)
86
89
  yield LookerAsset.DASHBOARDS, deep_serialize(dashboards)
87
90
 
88
91
  logger.info("Extracting lookml models from Looker API")
@@ -90,10 +93,8 @@ def iterate_all_data(
90
93
  yield LookerAsset.LOOKML_MODELS, deep_serialize(lookmls)
91
94
 
92
95
  logger.info("Extracting explores from Looker API")
93
- explore_names = explore_names_associated_to_dashboards(
94
- lookmls, dashboard_explore_names_
95
- )
96
- yield LookerAsset.EXPLORES, deep_serialize(client.explores(explore_names))
96
+ explores = _extract_explores_by_name(lookmls, client)
97
+ yield LookerAsset.EXPLORES, StreamableList(explores)
97
98
  del lookmls
98
99
 
99
100
  logger.info("Extracting connections from Looker API")
@@ -2,13 +2,13 @@ import logging
2
2
  import sys
3
3
  from concurrent.futures import ThreadPoolExecutor
4
4
  from functools import partial
5
- from typing import Iterable, List, Set, Tuple
5
+ from typing import Iterable, List, Set
6
6
 
7
7
  from tqdm import tqdm # type: ignore
8
8
 
9
9
  from ...utils import RetryStrategy, deep_serialize, retry
10
- from . import ApiClient, dashboard_explore_names
11
- from .api.sdk import Dashboard, SDKError
10
+ from . import ApiClient
11
+ from .api.sdk import SDKError
12
12
  from .assets import LookerAsset
13
13
 
14
14
  logger = logging.getLogger(__name__)
@@ -54,16 +54,6 @@ class MultithreadingFetcher:
54
54
  self._thread_pool_size = thread_pool_size
55
55
  self._log_to_stdout = log_to_stdout
56
56
 
57
- self.explores: Set[Tuple[str, str]] = set()
58
-
59
- def _save_explore_names(self, dashboards_per_folder: Iterable[Dashboard]):
60
- """
61
- Since dashboards are streamed right to the file, we need to keep
62
- the relevant information to extract Explores later.
63
- """
64
- explores = dashboard_explore_names(dashboards_per_folder)
65
- self.explores.update(explores)
66
-
67
57
  def _progress_bar(self, fetch_results: Iterable, total: int) -> tqdm:
68
58
  """Create a tqdm progress bar with the appropriate logs destination"""
69
59
  file = sys.stderr
@@ -73,7 +63,7 @@ class MultithreadingFetcher:
73
63
 
74
64
  return tqdm(fetch_results, total=total, file=file)
75
65
 
76
- def fetch_assets(self, asset: LookerAsset):
66
+ def fetch_assets(self, asset: LookerAsset) -> Iterable[dict]:
77
67
  """
78
68
  Yields serialized Looks or Dashboards with a request per folder ID.
79
69
  Requests are parallelised.
@@ -88,12 +78,10 @@ class MultithreadingFetcher:
88
78
  fetch_results = executor.map(_fetch, self._folder_ids)
89
79
 
90
80
  for results in self._progress_bar(fetch_results, total_folders):
91
- if asset == LookerAsset.DASHBOARDS:
92
- self._save_explore_names(results)
93
-
94
81
  for result in results:
95
82
  if not result:
96
83
  continue
84
+
97
85
  total_assets_count += len(result)
98
86
  yield deep_serialize(result)
99
87
 
@@ -1,8 +1,12 @@
1
1
  from typing import List, Optional, Set, Tuple
2
2
 
3
+ from google.api_core.exceptions import Forbidden
4
+ from google.api_core.page_iterator import Iterator as PageIterator
3
5
  from google.cloud.bigquery import Client as GoogleCloudClient # type: ignore
6
+ from google.cloud.bigquery.dataset import Dataset # type: ignore
4
7
  from google.oauth2.service_account import Credentials # type: ignore
5
8
 
9
+ from ...utils import retry
6
10
  from ..abstract import SqlalchemyClient
7
11
 
8
12
  BIGQUERY_URI = "bigquery://"
@@ -10,6 +14,9 @@ BIGQUERY_URI = "bigquery://"
10
14
  CREDENTIALS_INFO_KEY = "credentials_info"
11
15
  PROJECT_ID_KEY = "project_id"
12
16
 
17
+ _RETRY_NUMBER = 1
18
+ _RETRY_BASE_MS = 60_000
19
+
13
20
 
14
21
  class BigQueryClient(SqlalchemyClient):
15
22
  """Connect to BigQuery and run SQL queries"""
@@ -25,6 +32,9 @@ class BigQueryClient(SqlalchemyClient):
25
32
  self._db_allowed = db_allowed
26
33
  self._db_blocked = db_blocked
27
34
  self._dataset_blocked = dataset_blocked
35
+ self.client = self._client()
36
+ self._projects: List[str] | None = None
37
+ self._datasets: List[Dataset] | None = None
28
38
 
29
39
  @staticmethod
30
40
  def name() -> str:
@@ -51,7 +61,7 @@ class BigQueryClient(SqlalchemyClient):
51
61
  def _build_uri(self, credentials: dict) -> str:
52
62
  return BIGQUERY_URI
53
63
 
54
- def _google_cloud_client(self) -> GoogleCloudClient:
64
+ def _client(self) -> GoogleCloudClient:
55
65
  assert (
56
66
  CREDENTIALS_INFO_KEY in self._options
57
67
  ), "Missing BigQuery credentials in engine's options"
@@ -61,25 +71,47 @@ class BigQueryClient(SqlalchemyClient):
61
71
  credentials=Credentials.from_service_account_info(credentials),
62
72
  )
63
73
 
64
- def _list_datasets(self) -> List:
65
- client = self._google_cloud_client()
66
- return [
67
- dataset
68
- for project_id in self.get_projects()
69
- for dataset in client.list_datasets(project_id)
70
- if self._keep_dataset(dataset.dataset_id)
71
- ]
74
+ def _list_datasets(self) -> List[Dataset]:
75
+ """
76
+ Returns datasets available for the given GCP client
77
+ Cache the result in self._datasets to reduce number of API calls
78
+ """
79
+ if self._datasets is None:
80
+ self._datasets = [
81
+ dataset
82
+ for project_id in self.get_projects()
83
+ for dataset in self.client.list_datasets(project_id)
84
+ if self._keep_dataset(dataset.dataset_id)
85
+ ]
86
+ return self._datasets
87
+
88
+ @retry((Forbidden,), count=_RETRY_NUMBER, base_ms=_RETRY_BASE_MS)
89
+ def _list_projects(self) -> PageIterator:
90
+ """
91
+ Note: Calling list_projects from GoogleCloudClient causes some
92
+ ```
93
+ google.api_core.exceptions.Forbidden: 403 GET https://bigquery.googleapis.com/bigquery/v2/projects?prettyPrint=false
94
+ Quota exceeded: Your user exceeded quota for concurrent project.lists requests.
95
+ ````
96
+
97
+ This function aims to isolate the call with a custom retry strategy.
98
+ Note that google allows a retry parameter on client.list_projects but
99
+ that looks way too complex to customize.
100
+ """
101
+ return self.client.list_projects()
72
102
 
73
103
  def get_projects(self) -> List[str]:
74
104
  """
75
105
  Returns distinct project_id available for the given GCP client
106
+ Cache the result in self._projects to reduce number of API calls
76
107
  """
77
- client = self._google_cloud_client()
78
- return [
79
- p.project_id
80
- for p in client.list_projects()
81
- if self._keep_project(p.project_id)
82
- ]
108
+ if self._projects is None:
109
+ self._projects = [
110
+ p.project_id
111
+ for p in self._list_projects()
112
+ if self._keep_project(p.project_id)
113
+ ]
114
+ return self._projects
83
115
 
84
116
  def get_regions(self) -> Set[Tuple[str, str]]:
85
117
  """
@@ -26,8 +26,11 @@ class MockBigQueryClient(BigQueryClient):
26
26
  self._db_allowed = ["project_2", "project_1"]
27
27
  self._dataset_blocked = ["hidden_dataset"]
28
28
  self._db_blocked = ["hidden_project"]
29
+ self._projects = None
30
+ self._datasets = None
31
+ self.client = self._client()
29
32
 
30
- def _google_cloud_client(self) -> Mock:
33
+ def _client(self) -> Mock:
31
34
  fake_client = Mock()
32
35
  fake_client.list_projects = Mock(
33
36
  return_value=[
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: castor-extractor
3
- Version: 0.9.2
3
+ Version: 0.10.1
4
4
  Summary: Extract your metadata assets.
5
5
  Home-page: https://www.castordoc.com/
6
6
  License: EULA
@@ -1,4 +1,4 @@
1
- CHANGELOG.md,sha256=t0iqGADIz1dmWKroLXNSYADYg84v9eM08zBKEWItKvk,7255
1
+ CHANGELOG.md,sha256=YHDD-wLJh1LKqODJfg07WPSIj6pPmkI8F0za9PzKFiw,7397
2
2
  Dockerfile,sha256=TC6hFjG3mvnt1nkw2EpaS42hRYaGA2YIPKgWhVSKTWc,303
3
3
  LICENCE,sha256=sL-IGa4hweyya1HgzMskrRdybbIa2cktzxb5qmUgDg8,8254
4
4
  README.md,sha256=EL6JpZxvaQFOYv5WFuSjZvSk9Hcpsf7alMlUC5IPFjA,3423
@@ -82,28 +82,29 @@ castor_extractor/visualization/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5
82
82
  castor_extractor/visualization/domo/__init__.py,sha256=_mAYVfoVLizfLGF_f6ZiwBhdPpvoJY_diySf33dt3Jo,127
83
83
  castor_extractor/visualization/domo/assets.py,sha256=JI45W7n5z_opbuRrCwgzQJuN-VikuRrilUj5g5lX7Hk,184
84
84
  castor_extractor/visualization/domo/client/__init__.py,sha256=UDszV3IXNC9Wp_j55NZ-6ey2INo0TYtAg2QNIJOjglE,88
85
- castor_extractor/visualization/domo/client/client.py,sha256=QbbvkCgCzNg1SH0W5oE2NWaIBfQypAn07dV2BaLQX-o,8994
85
+ castor_extractor/visualization/domo/client/client.py,sha256=MN-qleOpSVawZRlVL8EiglEOMj0cdvMqEbgYYBfz86w,9083
86
86
  castor_extractor/visualization/domo/client/client_test.py,sha256=5Z_C2B0fs60aGnMF78llBnRkzehH4tZqRPXPkccLvBM,583
87
87
  castor_extractor/visualization/domo/client/credentials.py,sha256=CksQ9W9X6IGjTlYN0okwGAmURMRJKAjctxODAvAJUAo,1148
88
88
  castor_extractor/visualization/domo/client/endpoints.py,sha256=-B7mRKJ44Bg0hb3E5dQXvCVK6qHzizdeSQXsSwEJEIY,1812
89
- castor_extractor/visualization/domo/client/pagination.py,sha256=BT9ZIb-GYGWEQZpK_80aAYKQ-xrr7VlfsFbZv9rcejQ,565
89
+ castor_extractor/visualization/domo/client/pagination.py,sha256=E3WMK9Uw-u5qt9LCUzwKdKh9oSzyFEC0GgnRMFgxgrs,713
90
+ castor_extractor/visualization/domo/client/pagination_test.py,sha256=nV4yZWfus13QFCr-tlBUgwva21VqfpF6P-0ks_Awwis,581
90
91
  castor_extractor/visualization/domo/constants.py,sha256=AriJZPrCY5Z3HRUANrMu-4U0b7hQK_jRDcxiB-hbrQ4,233
91
92
  castor_extractor/visualization/domo/extract.py,sha256=GWWRfPEMt4SgzBGFaTcoOabsoOqLRFIEFAtgXwb8LDI,2567
92
- castor_extractor/visualization/looker/__init__.py,sha256=nUPT7ojaYE56Xk9LCCquRfuCepBIdbe-GwGbZNO7oXo,235
93
- castor_extractor/visualization/looker/api/__init__.py,sha256=eMtzoMAE7ZnvfwJJAOh9Cooztj4Juzc40wSEHhIYXw4,181
94
- castor_extractor/visualization/looker/api/client.py,sha256=KcdvbWWE-y1_vYixspxSkBHNoI_oX8Aeie5Tpz4EXj4,9547
93
+ castor_extractor/visualization/looker/__init__.py,sha256=Xu5bJ3743kaP8szMMp2NXCgvM1EdOQgtic4utUlO9Cc,145
94
+ castor_extractor/visualization/looker/api/__init__.py,sha256=rN03VMucxIqc0yfd17dIe3ZNFpcg5CA09epn1fKJg90,99
95
+ castor_extractor/visualization/looker/api/client.py,sha256=R-hnVK1TYaCPYaJ1wvpVUwa_AqQPu2RAcZz0kOK4l58,9582
95
96
  castor_extractor/visualization/looker/api/client_test.py,sha256=wsi20-neBXHaahDqf4nwCp8Ew5fRFCmVHG3OqrePKFs,1868
96
97
  castor_extractor/visualization/looker/api/constants.py,sha256=pZpq09tqcGi2Vh8orXxn9eil8ewfPUOLKfVuqgV2W-A,4126
97
98
  castor_extractor/visualization/looker/api/sdk.py,sha256=hSNcRsCoFae3zmjWFGsMrhQCIP57TcMJ2SorMPYJwn4,3553
98
99
  castor_extractor/visualization/looker/api/sdk_test.py,sha256=NHtKZTflPhqzBFHs1TyAQaubgxfzLLwYKFT8rEqR55I,1742
99
- castor_extractor/visualization/looker/api/utils.py,sha256=9xDKekJU6FNl_X5clO2NkDxPm7Q9t4ogJC7-dbh7HXo,1315
100
+ castor_extractor/visualization/looker/api/utils.py,sha256=NpP90CA-SwdUjHhaWFBsKpJz0Z9BXgDOahIqfc3R9tk,565
100
101
  castor_extractor/visualization/looker/assets.py,sha256=4EwCI9VwIYRPrIjHU-ZnPY3Mf3cRykWACiYXanr5jFI,442
101
102
  castor_extractor/visualization/looker/constant.py,sha256=0tX6KOGdc9O6FiHcoj08j-QIZyBI2Mhh8UQc_XahDaM,694
102
103
  castor_extractor/visualization/looker/env.py,sha256=vPqirdeGKm3as2T-tBTjbpulQe8W7-3UE2j-Z57wFXk,1174
103
- castor_extractor/visualization/looker/extract.py,sha256=a0FahT3zi6izSQTKAE8xSX7vovsy1XUKGS7yQ2sBWUo,5051
104
+ castor_extractor/visualization/looker/extract.py,sha256=vOIP8Hoxv05MiRa-l79YKOCHahuNiSW9uSKjwQQQQKs,5112
104
105
  castor_extractor/visualization/looker/fields.py,sha256=WmiSehmczWTufCLg4r2Ozq2grUpzxDNvIAHyGuOoGs4,636
105
106
  castor_extractor/visualization/looker/fields_test.py,sha256=7Cwq8Qky6aTZg8nCHp1gmPJtd9pGNB4QeMIRRWdHo5w,782
106
- castor_extractor/visualization/looker/multithreading.py,sha256=SvyUiXXXbYeo0xtiqjVUdyM-btLeS-qnBjVEQvUjF5w,3122
107
+ castor_extractor/visualization/looker/multithreading.py,sha256=BB_oUkPo3LH96f1pPaywQ5Y8r2T7vo4hdAQl7y0bpPw,2596
107
108
  castor_extractor/visualization/looker/parameters.py,sha256=Nk2hfrg3L9twU-51Q7Wdp9uaxy8M2_juEebWoLfIMPc,2427
108
109
  castor_extractor/visualization/metabase/__init__.py,sha256=hSIoVgPzhQh-9H8XRUzga4EZSOYejGdH-qY_hBNGbyw,125
109
110
  castor_extractor/visualization/metabase/assets.py,sha256=XeP8UzbqsrleBtdv2nJ2LtWC_p3TAL7g0zlrcjt82TM,2814
@@ -221,8 +222,8 @@ castor_extractor/warehouse/abstract/query.py,sha256=GAgeISCmAdrkTKzFGO79hQDf6SA6
221
222
  castor_extractor/warehouse/abstract/time_filter.py,sha256=bggIONfMmUxffkA6TwM3BsjfS2l9WFxPq8krfsau5pw,935
222
223
  castor_extractor/warehouse/abstract/time_filter_test.py,sha256=PIkegB7KOKBdpc6zIvmyl_CeQyADeFDplyQ8HTNU5LA,448
223
224
  castor_extractor/warehouse/bigquery/__init__.py,sha256=cQTw-nkKg3CQemgufknJHPssvUWCPURTMVb6q5hiIcs,125
224
- castor_extractor/warehouse/bigquery/client.py,sha256=EHwTIoKR6P-W-_rGT5mYGXHdia8d7XjlxwstTVNQYmY,3062
225
- castor_extractor/warehouse/bigquery/client_test.py,sha256=ws7JMYW_-7Ewo7XLHLibuOkemfTN87goZwYc-QufCMY,1430
225
+ castor_extractor/warehouse/bigquery/client.py,sha256=hxz8nYx8O60Ve48hDdQ0vEJEEpRgXdMXos7kJj4fI3w,4483
226
+ castor_extractor/warehouse/bigquery/client_test.py,sha256=Ym8e4d--0YQwiVcNUnXLx0X-X6ZznwNMBMbMaDS5oEA,1514
226
227
  castor_extractor/warehouse/bigquery/extract.py,sha256=jo_9sxsCFl0ZaL1VdQ9JZ5iEEGJQVm_ogJhfpmVgT3k,2810
227
228
  castor_extractor/warehouse/bigquery/queries/.sqlfluff,sha256=ce8UDW2k39v6RBVxgKqjOHHYMoGN9S9f7BCZNHHhox8,30
228
229
  castor_extractor/warehouse/bigquery/queries/column.sql,sha256=NxdTnHwomHTEGSc-UoXFKUwg59I9XAOwrSau7JUqGQE,1815
@@ -289,7 +290,7 @@ castor_extractor/warehouse/synapse/queries/schema.sql,sha256=aX9xNrBD_ydwl-znGSF
289
290
  castor_extractor/warehouse/synapse/queries/table.sql,sha256=mCE8bR1Vb7j7SwZW2gafcXidQ2fo1HwxcybA8wP2Kfs,1049
290
291
  castor_extractor/warehouse/synapse/queries/user.sql,sha256=sTb_SS7Zj3AXW1SggKPLNMCd0qoTpL7XI_BJRMaEpBg,67
291
292
  castor_extractor/warehouse/synapse/queries/view_ddl.sql,sha256=3EVbp5_yTgdByHFIPLHmnoOnqqLE77SrjAwFDvu4e54,249
292
- castor_extractor-0.9.2.dist-info/METADATA,sha256=X4jmbMqEyEiFECwvadiwxz0M-9ifkn1TQZG2chp6A6w,5879
293
- castor_extractor-0.9.2.dist-info/WHEEL,sha256=d2fvjOD7sXsVzChCqf0Ty0JbHKBaLYwDbGQDwQTnJ50,88
294
- castor_extractor-0.9.2.dist-info/entry_points.txt,sha256=cvLvgE8Yi10sIiafUVL86XZPMUUyu9x11CF5PshAyiw,1045
295
- castor_extractor-0.9.2.dist-info/RECORD,,
293
+ castor_extractor-0.10.1.dist-info/METADATA,sha256=5lkBM0b76v9IbuKEBQ9lnq5vCeuP0Aqa0JyizNnbMc8,5880
294
+ castor_extractor-0.10.1.dist-info/WHEEL,sha256=d2fvjOD7sXsVzChCqf0Ty0JbHKBaLYwDbGQDwQTnJ50,88
295
+ castor_extractor-0.10.1.dist-info/entry_points.txt,sha256=cvLvgE8Yi10sIiafUVL86XZPMUUyu9x11CF5PshAyiw,1045
296
+ castor_extractor-0.10.1.dist-info/RECORD,,