castor-extractor 0.18.2__py3-none-any.whl → 0.18.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of castor-extractor might be problematic. Click here for more details.

Files changed (37) hide show
  1. CHANGELOG.md +20 -0
  2. DockerfileUsage.md +21 -0
  3. castor_extractor/commands/extract_domo.py +2 -10
  4. castor_extractor/commands/extract_looker.py +2 -13
  5. castor_extractor/commands/extract_metabase_api.py +4 -10
  6. castor_extractor/commands/extract_metabase_db.py +4 -16
  7. castor_extractor/commands/extract_mode.py +2 -12
  8. castor_extractor/commands/extract_powerbi.py +2 -8
  9. castor_extractor/commands/extract_qlik.py +2 -7
  10. castor_extractor/commands/extract_salesforce.py +3 -12
  11. castor_extractor/commands/extract_salesforce_reporting.py +2 -10
  12. castor_extractor/commands/extract_sigma.py +2 -7
  13. castor_extractor/utils/__init__.py +1 -0
  14. castor_extractor/utils/argument_parser.py +7 -0
  15. castor_extractor/utils/argument_parser_test.py +25 -0
  16. castor_extractor/visualization/domo/extract.py +4 -19
  17. castor_extractor/visualization/looker/extract.py +2 -3
  18. castor_extractor/visualization/metabase/extract.py +2 -2
  19. castor_extractor/visualization/mode/extract.py +4 -3
  20. castor_extractor/visualization/powerbi/extract.py +4 -15
  21. castor_extractor/visualization/qlik/extract.py +3 -6
  22. castor_extractor/visualization/salesforce_reporting/extract.py +6 -20
  23. castor_extractor/visualization/sigma/extract.py +4 -13
  24. castor_extractor/warehouse/bigquery/client.py +41 -6
  25. castor_extractor/warehouse/bigquery/extract.py +1 -0
  26. castor_extractor/warehouse/bigquery/query.py +23 -9
  27. castor_extractor/warehouse/bigquery/types.py +1 -2
  28. castor_extractor/warehouse/databricks/client.py +3 -0
  29. castor_extractor/warehouse/salesforce/client.py +28 -3
  30. castor_extractor/warehouse/salesforce/format.py +6 -4
  31. castor_extractor/warehouse/salesforce/format_test.py +1 -1
  32. castor_extractor/warehouse/salesforce/soql.py +6 -1
  33. {castor_extractor-0.18.2.dist-info → castor_extractor-0.18.7.dist-info}/METADATA +1 -1
  34. {castor_extractor-0.18.2.dist-info → castor_extractor-0.18.7.dist-info}/RECORD +37 -34
  35. {castor_extractor-0.18.2.dist-info → castor_extractor-0.18.7.dist-info}/LICENCE +0 -0
  36. {castor_extractor-0.18.2.dist-info → castor_extractor-0.18.7.dist-info}/WHEEL +0 -0
  37. {castor_extractor-0.18.2.dist-info → castor_extractor-0.18.7.dist-info}/entry_points.txt +0 -0
CHANGELOG.md CHANGED
@@ -1,5 +1,25 @@
1
1
  # Changelog
2
2
 
3
+ ## 0.18.7 - 2024-08-01
4
+
5
+ * Salesforce: extract table descriptions
6
+
7
+ ## 0.18.6 - 2024-07-30
8
+
9
+ * BigQuery: introduce extended regions to extract missing queries
10
+
11
+ ## 0.18.5 - 2024-07-17
12
+
13
+ * Salesforce: extract DeveloperName and tooling url
14
+
15
+ ## 0.18.4 - 2024-07-16
16
+
17
+ * Fix environment variables assignments for credentials
18
+
19
+ ## 0.18.3 - 2024-07-16
20
+
21
+ * bump dependencies (minor and patches)
22
+
3
23
  ## 0.18.2 - 2024-07-08
4
24
 
5
25
  * Added StatusCode handling to SafeMode
DockerfileUsage.md ADDED
@@ -0,0 +1,21 @@
1
+ # Dockerfile usage for CastorDoc package
2
+
3
+ ## How To
4
+
5
+ - The Dockerfile is present on the pypi package
6
+ - For building it you should use this command `docker build -t castor-extractor-looker --build-arg EXTRA=looker .` with replacing looker one or several of: [bigquery,looker,metabase,powerbi,qlik,redshift,snowflake,tableau]
7
+ - For running it you should do `docker run -v <local-path>:/data --env-file <castor-extract-looker.env> castor-extractor-looker` where `</local-path>` have to be replaced and `<castor-extract-looker.env>` have to be set.
8
+ - Extracted datas would be available on `<local-path>`. The path should exists
9
+ - `<castor-extract-looker.env>` would contain env vars for credentials, url...
10
+
11
+ #### example
12
+
13
+ ```bash
14
+ docker run -v /logs:/data --env-file /config/castor-extract-looker.env castor-extractor-looker
15
+ ```
16
+
17
+ ## Limitation
18
+
19
+ - This docker image is for a specific techno
20
+ - This docker image is based on python 3.11
21
+ - This docker image use the latest castor-extractor package version
@@ -1,6 +1,7 @@
1
1
  import logging
2
2
  from argparse import ArgumentParser
3
3
 
4
+ from castor_extractor.utils import parse_filled_arguments # type: ignore
4
5
  from castor_extractor.visualization import domo # type: ignore
5
6
 
6
7
  logging.basicConfig(level=logging.INFO, format="%(levelname)s - %(message)s")
@@ -38,13 +39,4 @@ def main():
38
39
 
39
40
  parser.add_argument("-o", "--output", help="Directory to write to")
40
41
 
41
- args = parser.parse_args()
42
-
43
- domo.extract_all(
44
- api_token=args.api_token,
45
- base_url=args.base_url,
46
- client_id=args.client_id,
47
- cloud_id=args.cloud_id,
48
- developer_token=args.developer_token,
49
- output_directory=args.output,
50
- )
42
+ domo.extract_all(**parse_filled_arguments(parser))
@@ -1,5 +1,6 @@
1
1
  from argparse import ArgumentParser
2
2
 
3
+ from castor_extractor.utils import parse_filled_arguments # type: ignore
3
4
  from castor_extractor.visualization import looker # type: ignore
4
5
 
5
6
 
@@ -33,16 +34,4 @@ def main():
33
34
  action="store_true",
34
35
  )
35
36
 
36
- args = parser.parse_args()
37
-
38
- looker.extract_all(
39
- base_url=args.base_url,
40
- client_id=args.username,
41
- client_secret=args.password,
42
- log_to_stdout=args.log_to_stdout,
43
- output_directory=args.output,
44
- safe_mode=args.safe_mode,
45
- search_per_folder=args.search_per_folder,
46
- thread_pool_size=args.thread_pool_size,
47
- timeout=args.timeout,
48
- )
37
+ looker.extract_all(**parse_filled_arguments(parser))
@@ -1,6 +1,7 @@
1
1
  import logging
2
2
  from argparse import ArgumentParser
3
3
 
4
+ from castor_extractor.utils import parse_filled_arguments # type: ignore
4
5
  from castor_extractor.visualization import metabase # type: ignore
5
6
 
6
7
  logging.basicConfig(level=logging.INFO, format="%(levelname)s - %(message)s")
@@ -15,16 +16,9 @@ def main():
15
16
 
16
17
  parser.add_argument("-o", "--output", help="Directory to write to")
17
18
 
18
- args = parser.parse_args()
19
+ args = parse_filled_arguments(parser)
19
20
 
20
- credentials = metabase.MetabaseApiCredentials(
21
- base_url=args.base_url,
22
- user=args.username,
23
- password=args.password,
24
- )
21
+ credentials = metabase.MetabaseApiCredentials(**args)
25
22
  client = metabase.ApiClient(credentials)
26
23
 
27
- metabase.extract_all(
28
- client,
29
- output_directory=args.output,
30
- )
24
+ metabase.extract_all(client, **args)
@@ -1,6 +1,7 @@
1
1
  import logging
2
2
  from argparse import ArgumentParser
3
3
 
4
+ from castor_extractor.utils import parse_filled_arguments # type: ignore
4
5
  from castor_extractor.visualization import metabase # type: ignore
5
6
 
6
7
  logging.basicConfig(level=logging.INFO, format="%(levelname)s - %(message)s")
@@ -33,22 +34,9 @@ def main():
33
34
 
34
35
  parser.add_argument("-o", "--output", help="Directory to write to")
35
36
 
36
- args = parser.parse_args()
37
-
38
- credentials = metabase.MetabaseDbCredentials(
39
- host=args.host,
40
- port=args.port,
41
- database=args.database,
42
- schema=args.schema,
43
- user=args.username,
44
- password=args.password,
45
- encryption_secret_key=args.encryption_secret_key,
46
- require_ssl=args.require_ssl,
47
- )
37
+ args = parse_filled_arguments(parser)
38
+ credentials = metabase.MetabaseDbCredentials(**args)
48
39
 
49
40
  client = metabase.DbClient(credentials)
50
41
 
51
- metabase.extract_all(
52
- client,
53
- output_directory=args.output,
54
- )
42
+ metabase.extract_all(client, **args)
@@ -1,6 +1,7 @@
1
1
  import logging
2
2
  from argparse import ArgumentParser
3
3
 
4
+ from castor_extractor.utils import parse_filled_arguments # type: ignore
4
5
  from castor_extractor.visualization import mode # type: ignore
5
6
 
6
7
  logging.basicConfig(level=logging.INFO, format="%(levelname)s - %(message)s")
@@ -24,15 +25,4 @@ def main():
24
25
 
25
26
  parser.add_argument("-o", "--output", help="Directory to write to")
26
27
 
27
- args = parser.parse_args()
28
- credentials = mode.Credentials(
29
- host=args.host,
30
- workspace=args.workspace,
31
- token=args.token,
32
- secret=args.secret,
33
- )
34
-
35
- mode.extract_all(
36
- credentials,
37
- output_directory=args.output,
38
- )
28
+ mode.extract_all(**parse_filled_arguments(parser))
@@ -1,6 +1,7 @@
1
1
  import logging
2
2
  from argparse import ArgumentParser
3
3
 
4
+ from castor_extractor.utils import parse_filled_arguments # type: ignore
4
5
  from castor_extractor.visualization import powerbi # type: ignore
5
6
 
6
7
  logging.basicConfig(level=logging.INFO, format="%(levelname)s - %(message)s")
@@ -20,11 +21,4 @@ def main():
20
21
  )
21
22
  parser.add_argument("-o", "--output", help="Directory to write to")
22
23
 
23
- args = parser.parse_args()
24
- powerbi.extract_all(
25
- tenant_id=args.tenant_id,
26
- client_id=args.client_id,
27
- secret=args.secret,
28
- scopes=args.scopes,
29
- output_directory=args.output,
30
- )
24
+ powerbi.extract_all(**parse_filled_arguments(parser))
@@ -1,6 +1,7 @@
1
1
  import logging
2
2
  from argparse import ArgumentParser
3
3
 
4
+ from castor_extractor.utils import parse_filled_arguments # type: ignore
4
5
  from castor_extractor.visualization import qlik # type: ignore
5
6
 
6
7
  logging.basicConfig(level=logging.INFO, format="%(levelname)s - %(message)s")
@@ -22,10 +23,4 @@ def main():
22
23
  "missing rights on some assets.",
23
24
  )
24
25
 
25
- args = parser.parse_args()
26
- qlik.extract_all(
27
- base_url=args.base_url,
28
- api_key=args.api_key,
29
- output_directory=args.output,
30
- except_http_error_statuses=args.except_http_error_statuses,
31
- )
26
+ qlik.extract_all(**parse_filled_arguments(parser))
@@ -1,6 +1,7 @@
1
1
  import logging
2
2
  from argparse import ArgumentParser
3
3
 
4
+ from castor_extractor.utils import parse_filled_arguments # type: ignore
4
5
  from castor_extractor.warehouse import salesforce # type: ignore
5
6
 
6
7
  logging.basicConfig(level=logging.INFO, format="%(levelname)s - %(message)s")
@@ -29,15 +30,5 @@ def main():
29
30
  )
30
31
  parser.set_defaults(skip_existing=False)
31
32
 
32
- args = parser.parse_args()
33
-
34
- salesforce.extract_all(
35
- username=args.username,
36
- password=args.password,
37
- client_id=args.client_id,
38
- client_secret=args.client_secret,
39
- security_token=args.security_token,
40
- base_url=args.base_url,
41
- output_directory=args.output,
42
- skip_existing=args.skip_existing,
43
- )
33
+ args = parse_filled_arguments(parser)
34
+ salesforce.extract_all(output_directory=args.get("output"), **args)
@@ -1,6 +1,7 @@
1
1
  import logging
2
2
  from argparse import ArgumentParser
3
3
 
4
+ from castor_extractor.utils import parse_filled_arguments # type: ignore
4
5
  from castor_extractor.visualization import salesforce_reporting # type: ignore
5
6
 
6
7
  logging.basicConfig(level=logging.INFO, format="%(levelname)s - %(message)s")
@@ -21,13 +22,4 @@ def main():
21
22
  parser.add_argument("-b", "--base-url", help="Salesforce instance URL")
22
23
  parser.add_argument("-o", "--output", help="Directory to write to")
23
24
 
24
- args = parser.parse_args()
25
- salesforce_reporting.extract_all(
26
- username=args.username,
27
- password=args.password,
28
- client_id=args.client_id,
29
- client_secret=args.client_secret,
30
- security_token=args.security_token,
31
- base_url=args.base_url,
32
- output_directory=args.output,
33
- )
25
+ salesforce_reporting.extract_all(**parse_filled_arguments(parser))
@@ -1,6 +1,7 @@
1
1
  import logging
2
2
  from argparse import ArgumentParser
3
3
 
4
+ from castor_extractor.utils import parse_filled_arguments # type: ignore
4
5
  from castor_extractor.visualization import sigma # type: ignore
5
6
 
6
7
  logging.basicConfig(level=logging.INFO, format="%(levelname)s - %(message)s")
@@ -14,10 +15,4 @@ def main():
14
15
  parser.add_argument("-a", "--api-token", help="Generated API key")
15
16
  parser.add_argument("-o", "--output", help="Directory to write to")
16
17
 
17
- args = parser.parse_args()
18
- sigma.extract_all(
19
- host=args.host,
20
- client_id=args.client_id,
21
- api_token=args.api_token,
22
- output_directory=args.output,
23
- )
18
+ sigma.extract_all(**parse_filled_arguments(parser))
@@ -1,3 +1,4 @@
1
+ from .argument_parser import parse_filled_arguments
1
2
  from .client import (
2
3
  AbstractSourceClient,
3
4
  ExtractionQuery,
@@ -0,0 +1,7 @@
1
+ from argparse import ArgumentParser
2
+
3
+
4
+ def parse_filled_arguments(parser: ArgumentParser) -> dict:
5
+ """Parse arguments and remove all those with None values"""
6
+ parsed_arguments = vars(parser.parse_args())
7
+ return {k: v for k, v in parsed_arguments.items() if v is not None}
@@ -0,0 +1,25 @@
1
+ from argparse import Namespace
2
+
3
+ from .argument_parser import parse_filled_arguments
4
+
5
+
6
+ class MockArgumentParser:
7
+
8
+ def __init__(self):
9
+ self.attributes = {}
10
+
11
+ def add_argument(self, name, value):
12
+ self.attributes[name] = value
13
+
14
+ def parse_args(self) -> Namespace:
15
+ return Namespace(**self.attributes)
16
+
17
+
18
+ def test_parse_filled_arguments():
19
+ parser = MockArgumentParser()
20
+ parser.add_argument("filled", "value")
21
+ parser.add_argument("unfilled", None)
22
+ parser.add_argument("empty_str", "")
23
+
24
+ expected = {"filled": "value", "empty_str": ""}
25
+ assert parse_filled_arguments(parser) == expected
@@ -1,5 +1,5 @@
1
1
  import logging
2
- from typing import Iterable, Iterator, Optional, Tuple, Union
2
+ from typing import Iterable, Iterator, Tuple, Union
3
3
 
4
4
  from ...utils import (
5
5
  OUTPUT_DIR,
@@ -42,28 +42,13 @@ def iterate_all_data(
42
42
  yield DomoAsset.DATAFLOWS, list(deep_serialize(dataflows))
43
43
 
44
44
 
45
- def extract_all(
46
- api_token: Optional[str] = None,
47
- base_url: Optional[str] = None,
48
- client_id: Optional[str] = None,
49
- cloud_id: Optional[str] = None,
50
- developer_token: Optional[str] = None,
51
- output_directory: Optional[str] = None,
52
- ) -> None:
45
+ def extract_all(**kwargs) -> None:
53
46
  """
54
47
  Extract data from Domo API
55
48
  Store the output files locally under the given output_directory
56
49
  """
57
-
58
- _output_directory = output_directory or from_env(OUTPUT_DIR)
59
-
60
- credentials = DomoCredentials(
61
- base_url=base_url,
62
- client_id=client_id,
63
- api_token=api_token,
64
- developer_token=developer_token,
65
- cloud_id=cloud_id,
66
- )
50
+ _output_directory = kwargs.get("output") or from_env(OUTPUT_DIR)
51
+ credentials = DomoCredentials(**kwargs)
67
52
  client = DomoClient(credentials=credentials)
68
53
 
69
54
  ts = current_timestamp()
@@ -126,11 +126,10 @@ def extract_all(**kwargs) -> None:
126
126
  Extract Data From looker and store it locally in files under the
127
127
  output_directory
128
128
  """
129
- args = {arg: value for arg, value in kwargs.items() if value is not None}
130
- extraction_parameters = ExtractionParameters(**args)
129
+ extraction_parameters = ExtractionParameters(**kwargs)
131
130
  output_directory = extraction_parameters.output_directory
132
131
 
133
- credentials = LookerCredentials(**args)
132
+ credentials = LookerCredentials(**kwargs)
134
133
 
135
134
  if extraction_parameters.log_to_stdout:
136
135
  set_stream_handler_to_stdout()
@@ -40,12 +40,12 @@ def iterate_all_data(
40
40
  )
41
41
 
42
42
 
43
- def extract_all(client: ClientMetabase, **kwargs: str) -> None:
43
+ def extract_all(client: ClientMetabase, **kwargs) -> None:
44
44
  """
45
45
  Extract Data From metabase
46
46
  Store the output files locally under the given output_directory
47
47
  """
48
- output_directory = kwargs.get("output_directory") or from_env(OUTPUT_DIR)
48
+ output_directory = kwargs.get("output") or from_env(OUTPUT_DIR)
49
49
  ts = current_timestamp()
50
50
 
51
51
  for key, data in iterate_all_data(client):
@@ -37,11 +37,12 @@ def iterate_all_data(
37
37
  yield Asset.MEMBER, deep_serialize(members)
38
38
 
39
39
 
40
- def extract_all(credentials: ModeCredentials, output_directory: str) -> None:
40
+ def extract_all(**kwargs) -> None:
41
41
  """Extract Data From Mode Analytics and store it locally in files under the output_directory"""
42
- output_directory = output_directory or from_env(OUTPUT_DIR)
43
-
42
+ output_directory = kwargs.get("output") or from_env(OUTPUT_DIR)
43
+ credentials = ModeCredentials(**kwargs)
44
44
  client = Client(credentials)
45
+
45
46
  ts = current_timestamp()
46
47
 
47
48
  for key, data in iterate_all_data(client):
@@ -1,4 +1,4 @@
1
- from typing import Iterable, List, Optional, Tuple, Union
1
+ from typing import Iterable, List, Tuple, Union
2
2
 
3
3
  from ...utils import (
4
4
  OUTPUT_DIR,
@@ -24,24 +24,13 @@ def iterate_all_data(
24
24
  yield asset, deep_serialize(data)
25
25
 
26
26
 
27
- def extract_all(
28
- tenant_id: str,
29
- client_id: str,
30
- secret: str,
31
- scopes: Optional[List[str]] = None,
32
- output_directory: Optional[str] = None,
33
- ) -> None:
27
+ def extract_all(**kwargs) -> None:
34
28
  """
35
29
  Extract data from PowerBI REST API
36
30
  Store the output files locally under the given output_directory
37
31
  """
38
- _output_directory = output_directory or from_env(OUTPUT_DIR)
39
- creds = PowerbiCredentials(
40
- tenant_id=tenant_id,
41
- client_id=client_id,
42
- secret=secret,
43
- scopes=scopes,
44
- )
32
+ _output_directory = kwargs.get("output") or from_env(OUTPUT_DIR)
33
+ creds = PowerbiCredentials(**kwargs)
45
34
  client = Client(creds)
46
35
  ts = current_timestamp()
47
36
 
@@ -47,18 +47,15 @@ def iterate_all_data(
47
47
 
48
48
 
49
49
  def extract_all(
50
- base_url: Optional[str] = None,
51
- api_key: Optional[str] = None,
52
- output_directory: Optional[str] = None,
53
- except_http_error_statuses: Optional[List[int]] = None,
50
+ except_http_error_statuses: Optional[List[int]] = None, **kwargs
54
51
  ) -> None:
55
52
  """
56
53
  Extract data from Qlik REST API
57
54
  Store the output files locally under the given output_directory
58
55
  """
59
56
 
60
- credentials = QlikCredentials(base_url=base_url, api_key=api_key)
61
- _output_directory = output_directory or from_env(OUTPUT_DIR)
57
+ credentials = QlikCredentials(**kwargs)
58
+ _output_directory = kwargs.get("output") or from_env(OUTPUT_DIR)
62
59
 
63
60
  client = QlikClient(
64
61
  credentials=credentials,
@@ -1,5 +1,5 @@
1
1
  import logging
2
- from typing import Iterable, Optional, Tuple, Union
2
+ from typing import Iterable, Tuple, Union
3
3
 
4
4
  from ...utils import (
5
5
  OUTPUT_DIR,
@@ -28,29 +28,15 @@ def iterate_all_data(
28
28
  yield asset.name.lower(), deep_serialize(data)
29
29
 
30
30
 
31
- def extract_all(
32
- username: str,
33
- password: str,
34
- client_id: str,
35
- client_secret: str,
36
- security_token: str,
37
- base_url: str,
38
- output_directory: Optional[str] = None,
39
- ) -> None:
31
+ def extract_all(**kwargs) -> None:
40
32
  """
41
33
  Extract data from Salesforce REST API
42
34
  Store the output files locally under the given output_directory
43
35
  """
44
- _output_directory = output_directory or from_env(OUTPUT_DIR)
45
- creds = SalesforceCredentials(
46
- username=username,
47
- password=password,
48
- client_id=client_id,
49
- client_secret=client_secret,
50
- security_token=security_token,
51
- base_url=base_url,
52
- )
53
- client = SalesforceReportingClient(credentials=creds)
36
+ _output_directory = kwargs.get("output") or from_env(OUTPUT_DIR)
37
+ credentials = SalesforceCredentials(**kwargs)
38
+
39
+ client = SalesforceReportingClient(credentials=credentials)
54
40
  ts = current_timestamp()
55
41
 
56
42
  for key, data in iterate_all_data(client):
@@ -1,5 +1,5 @@
1
1
  import logging
2
- from typing import Iterable, Iterator, Optional, Tuple, Union
2
+ from typing import Iterable, Iterator, Tuple, Union
3
3
 
4
4
  from ...utils import (
5
5
  OUTPUT_DIR,
@@ -50,24 +50,15 @@ def iterate_all_data(
50
50
  yield SigmaAsset.LINEAGES, list(deep_serialize(lineages))
51
51
 
52
52
 
53
- def extract_all(
54
- host: Optional[str] = None,
55
- client_id: Optional[str] = None,
56
- api_token: Optional[str] = None,
57
- output_directory: Optional[str] = None,
58
- ) -> None:
53
+ def extract_all(**kwargs) -> None:
59
54
  """
60
55
  Extract data from Sigma API
61
56
  Store the output files locally under the given output_directory
62
57
  """
63
58
 
64
- _output_directory = output_directory or from_env(OUTPUT_DIR)
59
+ _output_directory = kwargs.get("output") or from_env(OUTPUT_DIR)
65
60
 
66
- credentials = SigmaCredentials(
67
- host=host,
68
- client_id=client_id,
69
- api_token=api_token,
70
- )
61
+ credentials = SigmaCredentials(**kwargs)
71
62
  client = SigmaClient(credentials=credentials)
72
63
 
73
64
  ts = current_timestamp()
@@ -1,13 +1,14 @@
1
+ import itertools
1
2
  import logging
2
- from typing import List, Optional, Set, Tuple
3
+ from typing import List, Optional, Set
3
4
 
4
- from google.api_core.exceptions import Forbidden
5
- from google.api_core.page_iterator import Iterator as PageIterator
5
+ from google.api_core.exceptions import Forbidden # type: ignore
6
6
  from google.cloud.bigquery import Client as GoogleCloudClient # type: ignore
7
7
  from google.cloud.bigquery.dataset import Dataset # type: ignore
8
8
  from google.oauth2.service_account import Credentials # type: ignore
9
9
 
10
10
  from ...utils import SqlalchemyClient, retry
11
+ from .types import SetTwoString
11
12
 
12
13
  logger = logging.getLogger(__name__)
13
14
 
@@ -117,16 +118,50 @@ class BigQueryClient(SqlalchemyClient):
117
118
  ]
118
119
  return self._projects
119
120
 
120
- def get_regions(self) -> Set[Tuple[str, str]]:
121
+ def get_regions(self) -> SetTwoString:
121
122
  """
122
- Returns distinct (project_id, region) available for the given GCP client
123
+ Returns (project_id, region) available for the given GCP client
124
+ - Loops trough projects -> datasets -> region
125
+ - Returns distinct values
126
+ Example:
127
+ project_A
128
+ -> dataset_1:US
129
+ project_B
130
+ -> empty
131
+ project_C
132
+ -> dataset_2:EU
133
+ -> dataset_3:EU
134
+ Will return:
135
+ { (p_A, US), (p_C, EU) }
123
136
  """
124
137
  return {
125
138
  (ds.project, ds._properties["location"])
126
139
  for ds in self._list_datasets()
127
140
  }
128
141
 
129
- def get_datasets(self) -> Set[Tuple[str, str]]:
142
+ def get_extended_regions(self) -> SetTwoString:
143
+ """
144
+ Returns all combinations of (project_id, region) for the given client
145
+ - Fetch all projects
146
+ - Fetch all regions (cross projects)
147
+ - Returns a combination of the two lists
148
+ Example:
149
+ project_A
150
+ -> dataset_1:US
151
+ project_B
152
+ -> empty
153
+ project_C
154
+ -> dataset_2:EU
155
+ -> dataset_3:EU
156
+ Will return:
157
+ { (p_A, EU), (p_A, US), (p_B, EU), (p_B, US), (p_C, EU), (p_C, US) }
158
+ """
159
+ projects = self.get_projects()
160
+ regions = {ds._properties["location"] for ds in self._list_datasets()}
161
+ combinations = itertools.product(projects, regions)
162
+ return set(combinations)
163
+
164
+ def get_datasets(self) -> SetTwoString:
130
165
  """
131
166
  Returns distinct (project_id, dataset_id) available for the given GCP client
132
167
  """
@@ -68,6 +68,7 @@ def extract_all(**kwargs) -> None:
68
68
  query_builder = BigQueryQueryBuilder(
69
69
  regions=client.get_regions(),
70
70
  datasets=client.get_datasets(),
71
+ extended_regions=client.get_extended_regions(),
71
72
  )
72
73
 
73
74
  storage = LocalStorage(directory=output_directory)
@@ -2,18 +2,16 @@ import logging
2
2
  from typing import List, Optional
3
3
 
4
4
  from ..abstract import (
5
- QUERIES_DIR,
6
5
  AbstractQueryBuilder,
7
6
  ExtractionQuery,
8
7
  TimeFilter,
9
8
  WarehouseAsset,
10
9
  )
11
-
12
- # Those queries must be formatted with {region}
13
- from .types import IterTwoString
10
+ from .types import SetTwoString
14
11
 
15
12
  logger = logging.getLogger(__name__)
16
13
 
14
+ # Those queries must be formatted with {region}
17
15
  REGION_REQUIRED = (
18
16
  WarehouseAsset.COLUMN,
19
17
  WarehouseAsset.DATABASE,
@@ -23,6 +21,11 @@ REGION_REQUIRED = (
23
21
  WarehouseAsset.USER,
24
22
  )
25
23
 
24
+ # Some clients use empty projects (no datasets) to run their SQL queries
25
+ # The extended regions is a combination of all regions with all projects
26
+ # It allows to extract those queries which were left apart before
27
+ EXTENDED_REGION_REQUIRED = (WarehouseAsset.QUERY,)
28
+
26
29
  # Those queries must be formatted with {dataset}
27
30
  DATASET_REQUIRED = (WarehouseAsset.VIEW_DDL,)
28
31
 
@@ -38,7 +41,7 @@ SHARDED_ASSETS = (WarehouseAsset.TABLE, WarehouseAsset.COLUMN)
38
41
  SHARDED_FILE_PATH = "cte/sharded.sql"
39
42
 
40
43
 
41
- def _database_formatted(datasets: IterTwoString) -> str:
44
+ def _database_formatted(datasets: SetTwoString) -> str:
42
45
  databases = {db for _, db in datasets}
43
46
  if not databases:
44
47
  # when no datasets are provided condition should pass
@@ -55,10 +58,11 @@ class BigQueryQueryBuilder(AbstractQueryBuilder):
55
58
 
56
59
  def __init__(
57
60
  self,
58
- regions: IterTwoString,
59
- datasets: IterTwoString,
61
+ regions: SetTwoString,
62
+ datasets: SetTwoString,
60
63
  time_filter: Optional[TimeFilter] = None,
61
64
  sync_tags: Optional[bool] = False,
65
+ extended_regions: Optional[SetTwoString] = None,
62
66
  ):
63
67
  super().__init__(
64
68
  time_filter=time_filter,
@@ -67,6 +71,7 @@ class BigQueryQueryBuilder(AbstractQueryBuilder):
67
71
  self._regions = regions
68
72
  self._datasets = datasets
69
73
  self._sync_tags = sync_tags
74
+ self._extended_regions = extended_regions or regions
70
75
 
71
76
  @staticmethod
72
77
  def _format(query: ExtractionQuery, values: dict) -> ExtractionQuery:
@@ -97,6 +102,13 @@ class BigQueryQueryBuilder(AbstractQueryBuilder):
97
102
  sharded_statement = self._load_from_file(SHARDED_FILE_PATH)
98
103
  return statement.format(sharded_statement=sharded_statement)
99
104
 
105
+ def _get_regions(self, asset: WarehouseAsset) -> SetTwoString:
106
+ return (
107
+ self._extended_regions
108
+ if asset in EXTENDED_REGION_REQUIRED
109
+ else self._regions
110
+ )
111
+
100
112
  def build(self, asset: WarehouseAsset) -> List[ExtractionQuery]:
101
113
  """
102
114
  It would be easier to stitch data directly in the query statement (UNION ALL).
@@ -110,12 +122,14 @@ class BigQueryQueryBuilder(AbstractQueryBuilder):
110
122
  query = super().build_default(asset)
111
123
 
112
124
  if asset in REGION_REQUIRED:
125
+ regions = self._get_regions(asset)
126
+
113
127
  logger.info(
114
- f"\tWill run queries with following region params: {self._regions}",
128
+ f"\tWill run queries with following region params: {regions}",
115
129
  )
116
130
  return [
117
131
  self._format(query, {"project": project, "region": region})
118
- for project, region in self._regions
132
+ for project, region in regions
119
133
  ]
120
134
 
121
135
  if asset in DATASET_REQUIRED:
@@ -1,5 +1,4 @@
1
- from typing import Iterable, Set, Tuple
1
+ from typing import Set, Tuple
2
2
 
3
3
  SetString = Set[str]
4
4
  SetTwoString = Set[Tuple[str, str]]
5
- IterTwoString = Iterable[Tuple[str, str]]
@@ -28,6 +28,7 @@ from .utils import build_path, tag_label
28
28
 
29
29
  logger = logging.getLogger(__name__)
30
30
 
31
+ _DATABRICKS_CLIENT_TIMEOUT = 60
31
32
  _MAX_NUMBER_OF_LINEAGE_ERRORS = 1000
32
33
  _MAX_THREADS = 10
33
34
  _RETRY_ATTEMPTS = 3
@@ -98,6 +99,8 @@ class DatabricksClient(APIClient):
98
99
  self._db_blocked = db_blocked
99
100
  self._has_table_tags = has_table_tags
100
101
  self._has_column_tags = has_column_tags
102
+
103
+ self._timeout = _DATABRICKS_CLIENT_TIMEOUT
101
104
  self.formatter = DatabricksFormatter()
102
105
 
103
106
  def execute_sql(
@@ -1,11 +1,15 @@
1
1
  import logging
2
- from typing import Dict, Iterator, List, Tuple
2
+ from typing import Dict, Iterator, List, Optional, Tuple
3
3
 
4
4
  from tqdm import tqdm # type: ignore
5
5
 
6
6
  from ...utils.salesforce import SalesforceBaseClient, SalesforceCredentials
7
7
  from .format import SalesforceFormatter
8
- from .soql import SOBJECT_FIELDS_QUERY_TPL, SOBJECTS_QUERY_TPL
8
+ from .soql import (
9
+ DESCRIPTION_QUERY_TPL,
10
+ SOBJECT_FIELDS_QUERY_TPL,
11
+ SOBJECTS_QUERY_TPL,
12
+ )
9
13
 
10
14
  logger = logging.getLogger(__name__)
11
15
 
@@ -90,13 +94,34 @@ class SalesforceClient(SalesforceBaseClient):
90
94
  response = self._call(self.tooling_url, params={"q": query})
91
95
  return response["records"]
92
96
 
97
+ def fetch_description(self, table_name: str) -> Optional[str]:
98
+ """Retrieve description of a table"""
99
+ query = DESCRIPTION_QUERY_TPL.format(table_name=table_name)
100
+ response = self._call(self.tooling_url, params={"q": query})
101
+ if not response["records"]:
102
+ return None
103
+ return response["records"][0]["Description"]
104
+
105
+ def add_table_descriptions(self, sobjects: List[dict]) -> List[dict]:
106
+ """
107
+ Add table descriptions.
108
+ We use the tooling API which does not handle well the LIMIT in SOQL
109
+ so we have to retrieve descriptions individually
110
+ """
111
+ described_sobjects = []
112
+ for sobject in sobjects:
113
+ description = self.fetch_description(sobject["QualifiedApiName"])
114
+ described_sobjects.append({**sobject, "Description": description})
115
+ return described_sobjects
116
+
93
117
  def tables(self) -> List[dict]:
94
118
  """
95
119
  Get Salesforce sobjects as tables
96
120
  """
97
121
  sobjects = self.fetch_sobjects()
98
122
  logger.info(f"Extracted {len(sobjects)} sobjects")
99
- return list(self.formatter.tables(sobjects))
123
+ described_sobjects = self.add_table_descriptions(sobjects)
124
+ return list(self.formatter.tables(described_sobjects))
100
125
 
101
126
  def columns(
102
127
  self, sobject_names: List[Tuple[str, str]], show_progress: bool = True
@@ -26,12 +26,14 @@ def _field_description(field: Dict[str, Any]) -> str:
26
26
  def _to_column_payload(field: dict, position: int, table_name: str) -> dict:
27
27
  field_name = field["QualifiedApiName"]
28
28
  return {
29
- "id": f"{table_name}.{field_name}",
30
- "table_id": table_name,
31
29
  "column_name": field_name,
32
- "description": _field_description(field),
33
30
  "data_type": field.get("DataType"),
31
+ "description": _field_description(field),
32
+ "id": f"{table_name}.{field_name}",
34
33
  "ordinal_position": position,
34
+ "salesforce_developer_name": field.get("DeveloperName"),
35
+ "salesforce_tooling_url": field.get("attributes", {}).get("url"),
36
+ "table_id": table_name,
35
37
  }
36
38
 
37
39
 
@@ -42,7 +44,7 @@ def _to_table_payload(sobject: dict, table_name: str) -> dict:
42
44
  "label": sobject["Label"],
43
45
  "schema_id": SCHEMA_NAME,
44
46
  "table_name": table_name,
45
- "description": "",
47
+ "description": sobject.get("Description"),
46
48
  "tags": [],
47
49
  "type": "TABLE",
48
50
  }
@@ -59,7 +59,7 @@ def test__merge_label_and_api_name():
59
59
  "label": "foo",
60
60
  "schema_id": SCHEMA_NAME,
61
61
  "table_name": expected_name,
62
- "description": "",
62
+ "description": None,
63
63
  "tags": [],
64
64
  "type": "TABLE",
65
65
  }
@@ -1,3 +1,9 @@
1
+ DESCRIPTION_QUERY_TPL = """
2
+ SELECT Description
3
+ FROM EntityDefinition
4
+ WHERE QualifiedApiName = '{table_name}'
5
+ """
6
+
1
7
  SOBJECTS_QUERY_TPL = """
2
8
  SELECT
3
9
  DeveloperName,
@@ -13,7 +19,6 @@ SOBJECTS_QUERY_TPL = """
13
19
  LIMIT {limit}
14
20
  """
15
21
 
16
-
17
22
  SOBJECT_FIELDS_QUERY_TPL = """
18
23
  SELECT
19
24
  DataType,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: castor-extractor
3
- Version: 0.18.2
3
+ Version: 0.18.7
4
4
  Summary: Extract your metadata assets.
5
5
  Home-page: https://www.castordoc.com/
6
6
  License: EULA
@@ -1,24 +1,25 @@
1
- CHANGELOG.md,sha256=qBflkUup535wc0FM_zDwfGLnLUKsgCuufANAPF7SA8w,11612
1
+ CHANGELOG.md,sha256=uL2xlPDomxLQhjD55aaCwT6ItdW_ziWGTIjjOAF0IXo,11992
2
2
  Dockerfile,sha256=HcX5z8OpeSvkScQsN-Y7CNMUig_UB6vTMDl7uqzuLGE,303
3
+ DockerfileUsage.md,sha256=2hkJQF-5JuuzfPZ7IOxgM6QgIQW7l-9oRMFVwyXC4gE,998
3
4
  LICENCE,sha256=sL-IGa4hweyya1HgzMskrRdybbIa2cktzxb5qmUgDg8,8254
4
5
  README.md,sha256=uF6PXm9ocPITlKVSh9afTakHmpLx3TvawLf-CbMP3wM,3578
5
6
  castor_extractor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
7
  castor_extractor/commands/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
8
  castor_extractor/commands/extract_bigquery.py,sha256=dU4OiYO1V0n32orvZnMh1_xtFKF_VxHNXcVsH3otY-g,1269
8
9
  castor_extractor/commands/extract_databricks.py,sha256=SVKyoa-BBUQAM6HRHf1Wdg9-tpICic2yyvXQwHcNBhA,1264
9
- castor_extractor/commands/extract_domo.py,sha256=lwJ7XeYOeLMF2plf5PK3cL56N9n2yjcDsyRM6UFwKTM,1208
10
- castor_extractor/commands/extract_looker.py,sha256=gwjIQPOHrXevgU_o2l8vDHHQT8Sb-mGdwcceb6wJJbg,1483
11
- castor_extractor/commands/extract_metabase_api.py,sha256=5le3lXz91jMYkQO-Hwv1IJF2zrZ9aNWeDbQ9k1NR-zw,841
12
- castor_extractor/commands/extract_metabase_db.py,sha256=qYHMk5Y6JjNV7KFfX4jjCwwq73vOat8Xmj1zvsRmcZ0,1483
13
- castor_extractor/commands/extract_mode.py,sha256=f8-iS8uCyjGm0Az-h-4w39DQotaT11tnScyk6vX1Jn8,967
10
+ castor_extractor/commands/extract_domo.py,sha256=jvAawUsUTHrwCn_koK6StmQr4n_b5GyvJi6uu6WS0SM,1061
11
+ castor_extractor/commands/extract_looker.py,sha256=pmz2_ZiC9R0_I64WLfNRcfitTSoWTAoeo2rgZ2lRE-Q,1208
12
+ castor_extractor/commands/extract_metabase_api.py,sha256=5Oqf70wb0M_9QiDVElzZThhedXFy7MmzMVcg7gMhYs4,790
13
+ castor_extractor/commands/extract_metabase_db.py,sha256=oexaEkWpkayV3NnXv00tJkwSix-fiufQd4Ln5qSN2EQ,1259
14
+ castor_extractor/commands/extract_mode.py,sha256=Q4iO-VAKMg4zFPejhAO-foZibL5Ht3jsnhWKwJ0oqUU,823
14
15
  castor_extractor/commands/extract_mysql.py,sha256=7AH5qMzeLTsENCOeJwtesrWg8Vo8MCEq8fx2YT74Mcw,1034
15
16
  castor_extractor/commands/extract_postgres.py,sha256=pX0RnCPi4nw6QQ6wiAuZ_Xt3ZbDuMUG9aQKuqFgJtAU,1154
16
- castor_extractor/commands/extract_powerbi.py,sha256=e6MXDNOafdp0w4ZtOnE5z5o_CxvaodUbbQFk__pDiM4,875
17
- castor_extractor/commands/extract_qlik.py,sha256=mSeyGOprTyBExes-lzp___7tgBS1KeyTVpwKAqMpkiw,989
17
+ castor_extractor/commands/extract_powerbi.py,sha256=f0G5w61KXExJ6Sw39_mJIwqQNpLorE5-LKmZXlUqvKI,783
18
+ castor_extractor/commands/extract_qlik.py,sha256=VBe_xFKh_nR0QSFFIncAaC8yDqBeMa6VunBAga7AeGg,891
18
19
  castor_extractor/commands/extract_redshift.py,sha256=bdLp7d7ImZoKCkWc3f3NXF1imIzMVT43_KPI-x4UVac,1155
19
- castor_extractor/commands/extract_salesforce.py,sha256=s2o799ePjQFYsVwZbrGEflzOIwJKtxUMb9pcF4-K90Y,1400
20
- castor_extractor/commands/extract_salesforce_reporting.py,sha256=rmAo--dl_m2x7TtJ29w1PfsKt9tQDZocTdvwTqj-fnI,1146
21
- castor_extractor/commands/extract_sigma.py,sha256=agwfKj55C81-kect3K6xSJVBv3TDuPT6fGWSTytkQ2o,703
20
+ castor_extractor/commands/extract_salesforce.py,sha256=3j3YTmMkPAwocR-B1ozJQai0UIZPtpmAyWj-hHvdWn4,1226
21
+ castor_extractor/commands/extract_salesforce_reporting.py,sha256=FdANTNiLkIPdm80XMYxWReHjdycLsIa61pyeCD-sUDk,962
22
+ castor_extractor/commands/extract_sigma.py,sha256=sxewHcZ1Doq35V2qnpX_zCKKXkrb1_9bYjUMg7BOW-k,643
22
23
  castor_extractor/commands/extract_snowflake.py,sha256=vYiruxRoo--GeMemOGsSE1w9kcKTh_y4E165HtMVzkM,1982
23
24
  castor_extractor/commands/extract_sqlserver.py,sha256=lwhbcNChaXHZgMgSOch3faVr7WJw-sDU6GHl3lzBt_0,1141
24
25
  castor_extractor/commands/extract_tableau.py,sha256=u-6UCd-kfXwyhNWYxZusqtgTTYkf4gAJS1vRIYWsAVU,1415
@@ -44,7 +45,9 @@ castor_extractor/uploader/env_test.py,sha256=ClCWWtwd2N-5ClIDUxVMeKkWfhhOTxpppsX
44
45
  castor_extractor/uploader/upload.py,sha256=W1TGqO8_PtFdR661qNlh6v-LOIRvoJoda65-5OujFXs,3239
45
46
  castor_extractor/uploader/upload_test.py,sha256=7fwstdQe7FjuwGilsCdFpEQr1qLoR2WTRUzyy93fISw,402
46
47
  castor_extractor/uploader/utils.py,sha256=Tx_i875L2vJ8btOLV3-L0UMEFiyhH8E5n0XXRyLjO0Y,793
47
- castor_extractor/utils/__init__.py,sha256=oP5yVp7Vw7PqixkumDydEBAOncaFXmwf1IHqtDwLXGs,1275
48
+ castor_extractor/utils/__init__.py,sha256=H7VtWoAqT1OWxhvTaylVG_4BrAMI_YD09WyI6vv6PYE,1327
49
+ castor_extractor/utils/argument_parser.py,sha256=S4EcIh3wNDjs3fOrQnttCcPsAmG8m_Txl7xvEh0Q37s,283
50
+ castor_extractor/utils/argument_parser_test.py,sha256=CvqXxX71V6nzKrsdGpUoU4EB_2XDtw7NSvz0DjpZt-s,634
48
51
  castor_extractor/utils/client/__init__.py,sha256=CRE-xJKm6fVV9dB8ljzB5YoOxX4I1sCD1KSgqs3Y8_Y,161
49
52
  castor_extractor/utils/client/abstract.py,sha256=aA5Qcb9TwWDSMq8WpXbGkOB20hehwX2VTpqQAwV76wk,2048
50
53
  castor_extractor/utils/client/api.py,sha256=AGDj2JH__Q_x7RQdodoVazGvjGQJ9TzNqs-XEX6Hrms,1840
@@ -110,7 +113,7 @@ castor_extractor/visualization/domo/client/credentials.py,sha256=_dcoJCQa5dKz2EG
110
113
  castor_extractor/visualization/domo/client/endpoints.py,sha256=6UI5psMYaIa1Pq_Gulb4cNna7NZ16xMotScX7yg5TRQ,2367
111
114
  castor_extractor/visualization/domo/client/pagination.py,sha256=ukVkHVzoH4mfZ29H9YcnC2YrdVolP10wv25J6Q3ehRw,821
112
115
  castor_extractor/visualization/domo/client/pagination_test.py,sha256=nV4yZWfus13QFCr-tlBUgwva21VqfpF6P-0ks_Awwis,581
113
- castor_extractor/visualization/domo/extract.py,sha256=RfaXHbjMKzcNsg27Avc-vpIH654H-g9boakWVJrCxfE,2230
116
+ castor_extractor/visualization/domo/extract.py,sha256=W_HRz7GGShlU4JVAR017q96wM8fgR8dhUzA-o8icWkc,1846
114
117
  castor_extractor/visualization/looker/__init__.py,sha256=mem0020YeP4_5zDnqRXOW3gWfiU1_vl_oKj0E8qA88g,194
115
118
  castor_extractor/visualization/looker/api/__init__.py,sha256=HDLsLy3kDWHIplAzLl1_u_bvGlgY6cuplf8myJTdfTg,169
116
119
  castor_extractor/visualization/looker/api/client.py,sha256=1iXFpwSL39K5Nimh6pFFg-PJ45nGrrdWjEUMH7cp8r8,9896
@@ -124,7 +127,7 @@ castor_extractor/visualization/looker/api/utils.py,sha256=TJqq9UBVFtS33VB1zHzT6k
124
127
  castor_extractor/visualization/looker/assets.py,sha256=K08nV6MMIpfF9r91TmCO7_62smHzGRv3gR4aIOootMQ,827
125
128
  castor_extractor/visualization/looker/constant.py,sha256=UgfOdu6Xz3stXTeh3pzWus6I3tiPK51NL8EWi0C3G2c,129
126
129
  castor_extractor/visualization/looker/constants.py,sha256=522UNbOxx-VDJn4iPUVCpSKCQwrvEOjugsuS1yzyecI,361
127
- castor_extractor/visualization/looker/extract.py,sha256=8D0AsgvhS-19I-a8CiZ-iC2m-1AtHEgDLAWh4KwW6sI,5164
130
+ castor_extractor/visualization/looker/extract.py,sha256=Rt_XJIiqfNcBt6hSZMyJc32h4ZeRPRWvrRNrX5Fx13M,5090
128
131
  castor_extractor/visualization/looker/fields.py,sha256=WmiSehmczWTufCLg4r2Ozq2grUpzxDNvIAHyGuOoGs4,636
129
132
  castor_extractor/visualization/looker/fields_test.py,sha256=7Cwq8Qky6aTZg8nCHp1gmPJtd9pGNB4QeMIRRWdHo5w,782
130
133
  castor_extractor/visualization/looker/multithreading.py,sha256=6CrMOy9kMBfhHnZI7XrpNNyBiYYCO3CE4AuIjQVlLH0,2610
@@ -151,7 +154,7 @@ castor_extractor/visualization/metabase/client/decryption.py,sha256=q1LyFlHUx1xU
151
154
  castor_extractor/visualization/metabase/client/decryption_test.py,sha256=qZAIksj1gj5MKjapDPVWz3BrkzZ3Mhd1V0EPzG2vb0w,590
152
155
  castor_extractor/visualization/metabase/client/shared.py,sha256=0uh-jwdELjZDFo5ZDyHPa7EeNCMTCMgz2k-pPQq99VI,389
153
156
  castor_extractor/visualization/metabase/errors.py,sha256=A6uykA5IO-TwcnyoJsqzNFjPHIokmSRawL1Nkiv6750,1074
154
- castor_extractor/visualization/metabase/extract.py,sha256=EEAgveLy_JUnQpaFTDiQnKRuPFALfWIpyHO7txEO9m8,1775
157
+ castor_extractor/visualization/metabase/extract.py,sha256=IK6Ri5AstlGr_2VgvemZcwnWv4G__IrZbrdkaHmbCKA,1760
155
158
  castor_extractor/visualization/metabase/types.py,sha256=2dBZCUs5_x_y2rWbUVo6IGDWbgRZQm-c-tNetbkBzg4,45
156
159
  castor_extractor/visualization/mode/__init__.py,sha256=AoXzz7dGzyjDm0mjUNmkCiT9CpkA-t_z_YEXwpxpOwI,149
157
160
  castor_extractor/visualization/mode/assets.py,sha256=Gc8Z2oi5Jn8yyh9tj6OqYYYi2-gJ4zgos6XWLxirAzM,1574
@@ -162,7 +165,7 @@ castor_extractor/visualization/mode/client/client_test.py,sha256=zljcREpnIm3ZQtj
162
165
  castor_extractor/visualization/mode/client/constants.py,sha256=_Si5AF6VnpoSfnNNgb5afV1mby1E-_1GGE2WLSR_fRI,453
163
166
  castor_extractor/visualization/mode/client/credentials.py,sha256=fE12wiTeDvyKzDJ3XwXLk7bOW00uyESJpvIVmA9Xekk,617
164
167
  castor_extractor/visualization/mode/errors.py,sha256=SKpFT2AiLOuWx2VRLyO7jbAiKcGDFXXrsebpNEKtr0E,1495
165
- castor_extractor/visualization/mode/extract.py,sha256=mR9AloDqxuxcpoYsgrl-mqDts5MvWWY4dh-wwvUHveQ,1618
168
+ castor_extractor/visualization/mode/extract.py,sha256=g_X7k8L8MldFPbuwOrnyNMF3BEH1r-IAAgNmi3KLF-U,1623
166
169
  castor_extractor/visualization/powerbi/__init__.py,sha256=VylJP6kw4yd2zGj31V-U9UXdhnPS9MK2Fz7Sd9KTkKI,119
167
170
  castor_extractor/visualization/powerbi/assets.py,sha256=SASUjxtoOMag3NAlZfhpCy0sLap7WfENEMaEZuBrw6o,801
168
171
  castor_extractor/visualization/powerbi/client/__init__.py,sha256=ewaEKS_shQlBbCpf-12J-bx3aUIpxbFZRJUL4eNOOno,97
@@ -173,7 +176,7 @@ castor_extractor/visualization/powerbi/client/rest.py,sha256=oBPc7n58We2TAcL7IuX
173
176
  castor_extractor/visualization/powerbi/client/rest_test.py,sha256=WMd8042r0nbUZECCVVC9JpJuNica2qlQBBbM8QuYfcQ,8528
174
177
  castor_extractor/visualization/powerbi/client/utils.py,sha256=0RcoWcKOdvIGH4f3lYDvufmiMo4tr_ABFlITSrvXjTs,541
175
178
  castor_extractor/visualization/powerbi/client/utils_test.py,sha256=ULHL2JLrcv0xjW2r7QF_ce2OaGeeSzajkMDywJ8ZdVA,719
176
- castor_extractor/visualization/powerbi/extract.py,sha256=PuWR4Q25k3NYNkMSrDVH5SJ6tTvBlxfdkeeP0FoSCiQ,1342
179
+ castor_extractor/visualization/powerbi/extract.py,sha256=OPF2QxP44iruQWARmpAx1HSDj7NLadPApIWVl1yRVZI,1101
177
180
  castor_extractor/visualization/qlik/__init__.py,sha256=u6lIfm_WOykBwt6SlaB7C0Dtx37XBliUbM5oWv26gC8,177
178
181
  castor_extractor/visualization/qlik/assets.py,sha256=cG3Cqrj2s4inAqfW6dOaxRape2RPiCeGSYjKsRJRLLo,1657
179
182
  castor_extractor/visualization/qlik/client/__init__.py,sha256=5O5N9Jrt3d99agFEJ28lKWs2KkDaXK-lZ07IUtLj56M,130
@@ -190,13 +193,13 @@ castor_extractor/visualization/qlik/client/engine/websocket.py,sha256=KVgymI51s7
190
193
  castor_extractor/visualization/qlik/client/master.py,sha256=MQI62W9zG5wsIE2RZ9p-xuOF2kN1H90JU7H3qXh7CYo,3146
191
194
  castor_extractor/visualization/qlik/client/rest.py,sha256=HCAzWkTNiscu_6i-9EAC65-uPqWNORevx4bMKKI-wPs,6015
192
195
  castor_extractor/visualization/qlik/client/rest_test.py,sha256=yfiUht6BcpBYS2uGdaKCH-tYe0fQ-joM4MbitKwOf24,1799
193
- castor_extractor/visualization/qlik/extract.py,sha256=1S5KPnAg7aynh_xd_gMQxRpMTyYAGyzO3-JT0VNDIs0,2300
196
+ castor_extractor/visualization/qlik/extract.py,sha256=2ACqmWheVdkIXmDtmhdunf8oyNPvTb_2LaMIF9SBZvU,2172
194
197
  castor_extractor/visualization/salesforce_reporting/__init__.py,sha256=MvArD0GKNIpCDvLIYcpKrjMjFLhMyDETK6i3k0Fb6Tk,124
195
198
  castor_extractor/visualization/salesforce_reporting/assets.py,sha256=2J-iAmJGGDufOcJUgE47M3-dEcjYXcVyVUNcmHrj79w,271
196
199
  castor_extractor/visualization/salesforce_reporting/client/__init__.py,sha256=DIA6f_vNJZqT89qVYxg98Le7QeDn2y0Qew03V3J9t9o,44
197
200
  castor_extractor/visualization/salesforce_reporting/client/rest.py,sha256=hzaXWLcYt0aAHXK46DbsLmzocjRY1llwrNj8_3TObKs,1849
198
201
  castor_extractor/visualization/salesforce_reporting/client/soql.py,sha256=DHTi058UEaZKZnxJVmsCouPfA9Lgr3gFY6vY7NiqyMM,1584
199
- castor_extractor/visualization/salesforce_reporting/extract.py,sha256=5QwZwP27uXrFJSf00El7Ku592-9fhmCtTdiUGpNkHZM,1678
202
+ castor_extractor/visualization/salesforce_reporting/extract.py,sha256=RMhlf7NeYiAO_8hkFk4-JxTghNbC0vtX3P2M80qt9U8,1339
200
203
  castor_extractor/visualization/sigma/__init__.py,sha256=GINql4yJLtjfOJgjHaWNpE13cMtnKNytiFRomwav27Q,114
201
204
  castor_extractor/visualization/sigma/assets.py,sha256=JZ1Cpxnml8P3mIJoTUM57hvylB18ErECQXaP5FF63O4,268
202
205
  castor_extractor/visualization/sigma/client/__init__.py,sha256=YQv06FBBQHvBMFg_tN0nUcmUp2NCL2s-eFTXG8rXaBg,74
@@ -205,7 +208,7 @@ castor_extractor/visualization/sigma/client/client_test.py,sha256=BdxJsmTyhslup4
205
208
  castor_extractor/visualization/sigma/client/credentials.py,sha256=vXFBUeU9XpfmCGAdm8b2dC0tLQ6ITQnGOZcYxKFn_2c,486
206
209
  castor_extractor/visualization/sigma/client/endpoints.py,sha256=7yNDL0p9wZDE7CcVOj9Z1eP2tdJE1CUT9vbfSEV2kOY,1157
207
210
  castor_extractor/visualization/sigma/client/pagination.py,sha256=EZGMaONTzZ15VINUAtH0sOmJae0JlDjOQPKBXK_V8sg,667
208
- castor_extractor/visualization/sigma/extract.py,sha256=Qh0Dt1-IXg379Q4UWYhGul_zlj1LEowy25AcuwgYXV0,2501
211
+ castor_extractor/visualization/sigma/extract.py,sha256=pnArK5-F6DZcO0f3wp3_km_Od0f18Qmw9yTmynJ-2TU,2278
209
212
  castor_extractor/visualization/tableau/__init__.py,sha256=hDohrWjkorrX01JMc154aa9vi3ZqBKmA1lkfQtMFfYE,114
210
213
  castor_extractor/visualization/tableau/assets.py,sha256=mfBUzcBCLyiU9gnTB_6rvtiB5yXSDU99nezhGC__HQo,1270
211
214
  castor_extractor/visualization/tableau/client/__init__.py,sha256=FQX1MdxS8Opn3Oyq8eby7suk3ANbLlpzzCPQ3zqvk0I,78
@@ -260,10 +263,10 @@ castor_extractor/warehouse/abstract/query.py,sha256=GAgeISCmAdrkTKzFGO79hQDf6SA6
260
263
  castor_extractor/warehouse/abstract/time_filter.py,sha256=bggIONfMmUxffkA6TwM3BsjfS2l9WFxPq8krfsau5pw,935
261
264
  castor_extractor/warehouse/abstract/time_filter_test.py,sha256=PIkegB7KOKBdpc6zIvmyl_CeQyADeFDplyQ8HTNU5LA,448
262
265
  castor_extractor/warehouse/bigquery/__init__.py,sha256=PCGNYdi7dHv-SyanUWzRuBp-ypuQ01PkDaQjVnaNhbM,170
263
- castor_extractor/warehouse/bigquery/client.py,sha256=ypLKXvvfR0RtKex4T2mNvoef4T-jRF1T_RZGCZ6qbOM,4495
266
+ castor_extractor/warehouse/bigquery/client.py,sha256=UefTefQp0S9kpRQzIzJhlm6VcH5uoCRAHHCgfRB6I58,5606
264
267
  castor_extractor/warehouse/bigquery/client_test.py,sha256=Ym8e4d--0YQwiVcNUnXLx0X-X6ZznwNMBMbMaDS5oEA,1514
265
268
  castor_extractor/warehouse/bigquery/credentials.py,sha256=oCZ8H7qpudKzwM7PRMpVAmWXt7bjIRa8Harmp-ysQJ4,425
266
- castor_extractor/warehouse/bigquery/extract.py,sha256=vZFxJC1LtUMph5UhfhYdJLnsEto18IOERKzrt71jqJg,2883
269
+ castor_extractor/warehouse/bigquery/extract.py,sha256=lwiam_9YsaUumoY4OwgRMMV4OwLhPwAwnQNCkt-JIeo,2939
267
270
  castor_extractor/warehouse/bigquery/queries/.sqlfluff,sha256=ce8UDW2k39v6RBVxgKqjOHHYMoGN9S9f7BCZNHHhox8,30
268
271
  castor_extractor/warehouse/bigquery/queries/column.sql,sha256=NxdTnHwomHTEGSc-UoXFKUwg59I9XAOwrSau7JUqGQE,1815
269
272
  castor_extractor/warehouse/bigquery/queries/cte/sharded.sql,sha256=-G7_4lxV7UPe72mYlp4HDGeM_fJjZWuXJ7Q0vxvj5_U,1454
@@ -274,10 +277,10 @@ castor_extractor/warehouse/bigquery/queries/table.sql,sha256=D15UNem03Bfcy0JYvKT
274
277
  castor_extractor/warehouse/bigquery/queries/table_with_tags.sql,sha256=mhWQHaLgyumtdJX5XyEbdrn_Qtt-RCu4cH1WLM6TN9o,2660
275
278
  castor_extractor/warehouse/bigquery/queries/user.sql,sha256=l-fkNGWJVdZwVhbFZL23B8tve-UKXAI6HRlnQq0gIwM,192
276
279
  castor_extractor/warehouse/bigquery/queries/view_ddl.sql,sha256=obCm-IN9V8_YSZTwcgNSBDD0ZXPgRjlxJjrZDSEH2MU,326
277
- castor_extractor/warehouse/bigquery/query.py,sha256=hrFfjd5jW2oQnZ6ozlkn-gDe6sCIzu5zSX19T9W6fIk,4162
278
- castor_extractor/warehouse/bigquery/types.py,sha256=LZVWSmE57lOemNbB5hBRyYmDk9bFAU4nbRaJWALl6N8,140
280
+ castor_extractor/warehouse/bigquery/query.py,sha256=5Qc8PEa-kQKpTzguj4RNCAwKyvzWt20vAESYNB0lueo,4768
281
+ castor_extractor/warehouse/bigquery/types.py,sha256=DHK3wUaaLyLMp7LP-7QkXTDYpYTZiPtvptAOkpxgp4g,88
279
282
  castor_extractor/warehouse/databricks/__init__.py,sha256=YG3YSIJgCFRjjI8eExy9T7qGnfnjWhMFh8c15KTs_BA,184
280
- castor_extractor/warehouse/databricks/client.py,sha256=pnYb6nl9U33nH6UukYP5piWGYF-m1SP2TYiWiUozM_4,20552
283
+ castor_extractor/warehouse/databricks/client.py,sha256=ohqsCyLdUJCJGTBK-IBHCV2BUujPG2VsOkc8qAapaPk,20636
281
284
  castor_extractor/warehouse/databricks/client_test.py,sha256=KNp4Hi_CC6GwiW2QDJQQwqALfUebuT9D_qL6FuP_8tY,5246
282
285
  castor_extractor/warehouse/databricks/credentials.py,sha256=iphbVynVTQXMEbJy4QaT5fer-GpOi7QtbAlg8R7-Lj4,598
283
286
  castor_extractor/warehouse/databricks/extract.py,sha256=VYygE06f7ngYWVlRa48O6drLIZF-_4IBJdyXTYfxZQU,7395
@@ -326,12 +329,12 @@ castor_extractor/warehouse/redshift/queries/user.sql,sha256=sEXveJAuNvZacvpI6Wfw
326
329
  castor_extractor/warehouse/redshift/queries/view_ddl.sql,sha256=Pkyh_QT6d4rhTeyiVcqw6O8CRl7NEhk2p7eM5YIn5kg,719
327
330
  castor_extractor/warehouse/redshift/query.py,sha256=0C81rkt2cpkWrJIxxwALDyqr-49vlqQM04y_N6wwStc,540
328
331
  castor_extractor/warehouse/salesforce/__init__.py,sha256=NR4aNea5jeE1xYqeZ_29deeN84CkN0_D_Z7CLQdJvFY,137
329
- castor_extractor/warehouse/salesforce/client.py,sha256=ETnZ3n-GFFH0XohDB2ft74wI1HMspvTefR3k7ne-pmI,3891
332
+ castor_extractor/warehouse/salesforce/client.py,sha256=F3UdD5-9umEU-u_c7uVtaksg81VZeXRW83BVsFvYDkE,4902
330
333
  castor_extractor/warehouse/salesforce/constants.py,sha256=GusduVBCPvwpk_Im6F3bDvXeNQ7hRnCMdIAjIg65RnE,52
331
334
  castor_extractor/warehouse/salesforce/extract.py,sha256=GaxkGWhdksDT_rlT24KX8DMpWnhKlhDMAUvBPGalli0,3454
332
- castor_extractor/warehouse/salesforce/format.py,sha256=f5mMJyPsVU1ZSLe5WGCUOpj2SyW7_DFfzNVNu_m2aV0,3126
333
- castor_extractor/warehouse/salesforce/format_test.py,sha256=HBlAYBoCOHaq_QOFudZlpcZb5TyZWV9v-cxK4tklg50,2253
334
- castor_extractor/warehouse/salesforce/soql.py,sha256=pAEaJE8ZUcyN3ptBsZGzNcGRhCcU81X6RMlnF1HRMw4,1063
335
+ castor_extractor/warehouse/salesforce/format.py,sha256=eiPM_4i_m3FEg_2jkMYlhaRBg3gTvV-9xQuk8ghJZiM,3289
336
+ castor_extractor/warehouse/salesforce/format_test.py,sha256=aWUUYDAX-hN1XQJHlv6ZtI2noXWjRobV-9zdjiXR5n4,2255
337
+ castor_extractor/warehouse/salesforce/soql.py,sha256=XB8ohKwHFfC4Xger7Y84DXLW17IJDye_bZ3FL6DCcOI,1188
335
338
  castor_extractor/warehouse/snowflake/__init__.py,sha256=TEGXTyxWp4Tr9gIHb-UFVTRKj6YWmrRtqHruiKSZGiY,174
336
339
  castor_extractor/warehouse/snowflake/client.py,sha256=XT0QLVNff_586SDuMe40iu8FCwPDh2uBV5aKc1Ql914,5555
337
340
  castor_extractor/warehouse/snowflake/client_test.py,sha256=ihWtOOAQfh8pu5JTr_EWfqefKOVIaJXznACURzaU1Qs,1432
@@ -372,8 +375,8 @@ castor_extractor/warehouse/synapse/queries/schema.sql,sha256=aX9xNrBD_ydwl-znGSF
372
375
  castor_extractor/warehouse/synapse/queries/table.sql,sha256=mCE8bR1Vb7j7SwZW2gafcXidQ2fo1HwxcybA8wP2Kfs,1049
373
376
  castor_extractor/warehouse/synapse/queries/user.sql,sha256=sTb_SS7Zj3AXW1SggKPLNMCd0qoTpL7XI_BJRMaEpBg,67
374
377
  castor_extractor/warehouse/synapse/queries/view_ddl.sql,sha256=3EVbp5_yTgdByHFIPLHmnoOnqqLE77SrjAwFDvu4e54,249
375
- castor_extractor-0.18.2.dist-info/LICENCE,sha256=sL-IGa4hweyya1HgzMskrRdybbIa2cktzxb5qmUgDg8,8254
376
- castor_extractor-0.18.2.dist-info/METADATA,sha256=5Fjm4WkyShhmoOMNzgj8QB8tpOq_jw91ufKOVMIVeU0,7209
377
- castor_extractor-0.18.2.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
378
- castor_extractor-0.18.2.dist-info/entry_points.txt,sha256=SbyPk58Gh-FRztfCNnUZQ6w7SatzNJFZ6GIJLNsy7tI,1427
379
- castor_extractor-0.18.2.dist-info/RECORD,,
378
+ castor_extractor-0.18.7.dist-info/LICENCE,sha256=sL-IGa4hweyya1HgzMskrRdybbIa2cktzxb5qmUgDg8,8254
379
+ castor_extractor-0.18.7.dist-info/METADATA,sha256=QCkJjnGnFsOfbZ808-jK6dthnNFKqzC_YLydG1sUSqM,7209
380
+ castor_extractor-0.18.7.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
381
+ castor_extractor-0.18.7.dist-info/entry_points.txt,sha256=SbyPk58Gh-FRztfCNnUZQ6w7SatzNJFZ6GIJLNsy7tI,1427
382
+ castor_extractor-0.18.7.dist-info/RECORD,,