castor-extractor 0.18.7__py3-none-any.whl → 0.19.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of castor-extractor might be problematic. Click here for more details.

Files changed (62) hide show
  1. CHANGELOG.md +40 -1
  2. castor_extractor/commands/extract_looker.py +3 -3
  3. castor_extractor/commands/extract_metabase_api.py +1 -1
  4. castor_extractor/commands/extract_metabase_db.py +1 -1
  5. castor_extractor/commands/extract_notion.py +16 -0
  6. castor_extractor/commands/file_check.py +5 -2
  7. castor_extractor/commands/upload.py +5 -3
  8. castor_extractor/knowledge/__init__.py +0 -0
  9. castor_extractor/knowledge/notion/__init__.py +3 -0
  10. castor_extractor/knowledge/notion/assets.py +9 -0
  11. castor_extractor/knowledge/notion/client/__init__.py +2 -0
  12. castor_extractor/knowledge/notion/client/client.py +145 -0
  13. castor_extractor/knowledge/notion/client/client_test.py +67 -0
  14. castor_extractor/knowledge/notion/client/constants.py +3 -0
  15. castor_extractor/knowledge/notion/client/credentials.py +16 -0
  16. castor_extractor/knowledge/notion/client/endpoints.py +18 -0
  17. castor_extractor/knowledge/notion/client/pagination.py +16 -0
  18. castor_extractor/knowledge/notion/extract.py +59 -0
  19. castor_extractor/quality/__init__.py +0 -0
  20. castor_extractor/quality/soda/__init__.py +2 -0
  21. castor_extractor/quality/soda/assets.py +8 -0
  22. castor_extractor/quality/soda/client/__init__.py +1 -0
  23. castor_extractor/quality/soda/client/client.py +99 -0
  24. castor_extractor/quality/soda/client/credentials.py +28 -0
  25. castor_extractor/quality/soda/client/endpoints.py +13 -0
  26. castor_extractor/types.py +1 -3
  27. castor_extractor/uploader/upload.py +0 -1
  28. castor_extractor/utils/__init__.py +2 -0
  29. castor_extractor/utils/argument_parser_test.py +0 -1
  30. castor_extractor/utils/client/api.py +29 -11
  31. castor_extractor/utils/client/api_test.py +9 -1
  32. castor_extractor/utils/object_test.py +1 -1
  33. castor_extractor/utils/pager/pager.py +1 -1
  34. castor_extractor/utils/pager/pager_on_id.py +11 -6
  35. castor_extractor/utils/safe_request.py +5 -3
  36. castor_extractor/utils/safe_request_test.py +1 -3
  37. castor_extractor/utils/string_test.py +1 -1
  38. castor_extractor/utils/time.py +11 -0
  39. castor_extractor/visualization/domo/client/client.py +2 -3
  40. castor_extractor/visualization/looker/api/client.py +35 -0
  41. castor_extractor/visualization/looker/api/extraction_parameters.py +2 -1
  42. castor_extractor/visualization/looker/extract.py +2 -2
  43. castor_extractor/visualization/metabase/assets.py +3 -1
  44. castor_extractor/visualization/metabase/extract.py +20 -8
  45. castor_extractor/visualization/mode/client/client.py +1 -1
  46. castor_extractor/visualization/powerbi/client/constants.py +1 -1
  47. castor_extractor/visualization/powerbi/client/rest.py +5 -15
  48. castor_extractor/visualization/qlik/client/engine/client.py +36 -5
  49. castor_extractor/visualization/qlik/client/engine/constants.py +1 -0
  50. castor_extractor/visualization/qlik/client/engine/error.py +18 -1
  51. castor_extractor/visualization/salesforce_reporting/client/soql.py +3 -1
  52. castor_extractor/visualization/tableau/extract.py +40 -16
  53. castor_extractor/visualization/tableau_revamp/client/client.py +2 -5
  54. castor_extractor/visualization/tableau_revamp/extract.py +3 -2
  55. castor_extractor/warehouse/databricks/client.py +54 -35
  56. castor_extractor/warehouse/databricks/client_test.py +44 -31
  57. castor_extractor/warehouse/salesforce/format_test.py +0 -1
  58. {castor_extractor-0.18.7.dist-info → castor_extractor-0.19.0.dist-info}/METADATA +4 -4
  59. {castor_extractor-0.18.7.dist-info → castor_extractor-0.19.0.dist-info}/RECORD +62 -43
  60. {castor_extractor-0.18.7.dist-info → castor_extractor-0.19.0.dist-info}/entry_points.txt +1 -0
  61. {castor_extractor-0.18.7.dist-info → castor_extractor-0.19.0.dist-info}/LICENCE +0 -0
  62. {castor_extractor-0.18.7.dist-info → castor_extractor-0.19.0.dist-info}/WHEEL +0 -0
@@ -33,8 +33,10 @@ from .time import (
33
33
  current_datetime,
34
34
  current_timestamp,
35
35
  date_after,
36
+ format_date,
36
37
  past_date,
37
38
  timestamp_ms,
39
+ yesterday,
38
40
  )
39
41
  from .type import Callback, Getter, JsonType, SerializedAsset
40
42
  from .validation import validate_baseurl
@@ -4,7 +4,6 @@ from .argument_parser import parse_filled_arguments
4
4
 
5
5
 
6
6
  class MockArgumentParser:
7
-
8
7
  def __init__(self):
9
8
  self.attributes = {}
10
9
 
@@ -3,6 +3,9 @@ from typing import Any, Callable, Dict, Literal, Optional
3
3
 
4
4
  import requests
5
5
 
6
+ from ...types import ExternalAsset
7
+ from ..safe_request import RequestSafeMode, handle_response
8
+
6
9
  logger = logging.getLogger(__name__)
7
10
 
8
11
  DEFAULT_TIMEOUT_S = 30
@@ -11,16 +14,31 @@ DEFAULT_TIMEOUT_S = 30
11
14
  HttpMethod = Literal["GET", "OPTIONS", "HEAD", "POST", "PUT", "PATCH", "DELETE"]
12
15
 
13
16
 
17
+ def _authentication_header(token: Optional[str] = None) -> Dict[str, str]:
18
+ if token:
19
+ return {"Authorization": f"Bearer {token}"}
20
+ return dict()
21
+
22
+
14
23
  class APIClient:
15
24
  """
16
25
  API client
17
26
  - authentication via access token
18
27
  """
19
28
 
20
- def __init__(self, host: str, token: Optional[str] = None):
29
+ def __init__(
30
+ self,
31
+ host: str,
32
+ token: Optional[str] = None,
33
+ headers: Optional[Dict[str, str]] = None,
34
+ timeout: int = DEFAULT_TIMEOUT_S,
35
+ safe_mode: RequestSafeMode = RequestSafeMode(),
36
+ ):
21
37
  self._host = host
22
38
  self._token = token or ""
23
- self._timeout = DEFAULT_TIMEOUT_S
39
+ self._timeout = timeout
40
+ self._base_headers = headers or {}
41
+ self.safe_mode = safe_mode
24
42
 
25
43
  @staticmethod
26
44
  def build_url(host: str, path: str):
@@ -28,10 +46,10 @@ class APIClient:
28
46
  host = "https://" + host
29
47
  return f"{host.strip('/')}/{path}"
30
48
 
31
- def _headers(self) -> Dict[str, str]:
32
- if self._token:
33
- return {"Authorization": f"Bearer {self._token}"}
34
- return dict()
49
+ @property
50
+ def _headers(self):
51
+ """Returns specified headers and authentication headers altogether"""
52
+ return {**self._base_headers, **_authentication_header(self._token)}
35
53
 
36
54
  def _call(
37
55
  self,
@@ -43,20 +61,20 @@ class APIClient:
43
61
  processor: Optional[Callable] = None,
44
62
  ) -> Any:
45
63
  logger.debug(f"Calling {method} on {url}")
46
- result = requests.request(
64
+ response = requests.request(
47
65
  method,
48
66
  url,
49
- headers=self._headers(),
67
+ headers=self._headers,
50
68
  params=params,
51
69
  json=data,
52
70
  timeout=self._timeout,
53
71
  )
54
- result.raise_for_status()
72
+ response_payload = handle_response(response, self.safe_mode)
55
73
 
56
74
  if processor:
57
- return processor(result)
75
+ return processor(response)
58
76
 
59
- return result.json()
77
+ return response_payload
60
78
 
61
79
  def get(
62
80
  self,
@@ -1,4 +1,4 @@
1
- from .api import APIClient
1
+ from .api import APIClient, _authentication_header
2
2
 
3
3
 
4
4
  def test_APIClient_build_url():
@@ -14,3 +14,11 @@ def test_APIClient_build_url():
14
14
 
15
15
  host_with_trailing_slash = "https://3.14.azuredatabricks.net/"
16
16
  assert expected == APIClient.build_url(host_with_trailing_slash, path)
17
+
18
+
19
+ def test__authentication_header():
20
+ expected_headers = {"Authorization": "Bearer my_token"}
21
+ assert _authentication_header("my_token") == expected_headers
22
+
23
+ assert _authentication_header("") == {}
24
+ assert _authentication_header() == {}
@@ -19,7 +19,7 @@ class _User:
19
19
 
20
20
 
21
21
  def test_deep_serialize__None():
22
- assert deep_serialize(None) == None
22
+ assert deep_serialize(None) is None
23
23
 
24
24
 
25
25
  def test_deep_serialize__str():
@@ -48,7 +48,7 @@ class AbstractPager(Generic[T]):
48
48
  return should_stop
49
49
 
50
50
 
51
- class Pager(AbstractPager):
51
+ class Pager(AbstractPager[T]):
52
52
  def __init__(
53
53
  self,
54
54
  callback: Callable[[int, int], Sequence[T]],
@@ -1,11 +1,16 @@
1
- from typing import Callable, Iterator, Optional, Sequence, TypeVar
1
+ from typing import Callable, Iterator, Optional, Protocol, Sequence, TypeVar
2
2
  from uuid import UUID
3
3
 
4
4
  from .pager import DEFAULT_PER_PAGE, AbstractPager, PagerStopStrategy
5
5
 
6
6
  _DEFAULT_MIN_UUID = UUID("00000000-0000-0000-0000-000000000000")
7
7
 
8
- T = TypeVar("T")
8
+
9
+ class IndexableObject(Protocol):
10
+ def __getitem__(self, key: str) -> UUID: ...
11
+
12
+
13
+ Indexable = TypeVar("Indexable", bound=IndexableObject)
9
14
 
10
15
 
11
16
  class PagerOnIdLogger:
@@ -16,10 +21,10 @@ class PagerOnIdLogger:
16
21
  pass
17
22
 
18
23
 
19
- class PagerOnId(AbstractPager):
24
+ class PagerOnId(AbstractPager[Indexable]):
20
25
  def __init__(
21
26
  self,
22
- callback: Callable[[UUID, int], Sequence[T]],
27
+ callback: Callable[[UUID, int], Sequence[Indexable]],
23
28
  *,
24
29
  logger: Optional[PagerOnIdLogger] = None,
25
30
  stop_strategy: PagerStopStrategy = PagerStopStrategy.EMPTY_PAGE,
@@ -29,13 +34,13 @@ class PagerOnId(AbstractPager):
29
34
  self._stop_strategy = stop_strategy
30
35
 
31
36
  @staticmethod
32
- def _max_id(items: Sequence) -> UUID:
37
+ def _max_id(items: Sequence[Indexable]) -> UUID:
33
38
  return max(item["id"] for item in items)
34
39
 
35
40
  def iterator(
36
41
  self,
37
42
  per_page: int = DEFAULT_PER_PAGE,
38
- ) -> Iterator[Sequence[T]]:
43
+ ) -> Iterator[Sequence[Indexable]]:
39
44
  """Yields data provided by the callback as a list using the greatest UUID as a reference point"""
40
45
  greater_than_id = _DEFAULT_MIN_UUID
41
46
  stop_on_empty_page = self._stop_strategy == PagerStopStrategy.EMPTY_PAGE
@@ -1,5 +1,5 @@
1
1
  import logging
2
- from typing import List, Tuple, Union
2
+ from typing import Any, List, Optional, Tuple, Union
3
3
 
4
4
  from requests import HTTPError, Response
5
5
 
@@ -39,12 +39,14 @@ class RequestSafeMode:
39
39
 
40
40
 
41
41
  def handle_response(
42
- response: Response, safe_mode: RequestSafeMode
43
- ) -> ResponseJson:
42
+ response: Response,
43
+ safe_mode: Optional[RequestSafeMode] = None,
44
+ ) -> Any:
44
45
  """
45
46
  Util to handle a HTTP Response based on the response status code and the
46
47
  safe mode used
47
48
  """
49
+ safe_mode = safe_mode if safe_mode else RequestSafeMode()
48
50
  try:
49
51
  response.raise_for_status()
50
52
  except HTTPError as e:
@@ -15,10 +15,8 @@ def mock_response(status_code: int):
15
15
 
16
16
 
17
17
  def test_http_error_with_no_safe_mode():
18
- safe_params = RequestSafeMode() # Caught
19
-
20
18
  with pytest.raises(HTTPError):
21
- handle_response(mock_response(HTTPStatus.FORBIDDEN), safe_params)
19
+ handle_response(mock_response(HTTPStatus.FORBIDDEN))
22
20
 
23
21
 
24
22
  def test_http_error_with_no_status_code():
@@ -75,4 +75,4 @@ def test_decode_when_bytes():
75
75
  assert not decode_when_bytes(None)
76
76
  assert decode_when_bytes(1) == 1
77
77
  assert decode_when_bytes(1.34) == 1.34
78
- assert decode_when_bytes(True) == True
78
+ assert decode_when_bytes(True)
@@ -1,4 +1,7 @@
1
1
  from datetime import date, datetime, timedelta, timezone
2
+ from typing import Union
3
+
4
+ ISO_FORMAT = "%Y-%m-%dT%H:%M:%S"
2
5
 
3
6
 
4
7
  def current_datetime() -> datetime:
@@ -54,3 +57,11 @@ def at_midnight(date_: date) -> datetime:
54
57
  def date_after(day: date, future_days: int) -> date:
55
58
  """returns the date `future_days` after `day`"""
56
59
  return day + timedelta(future_days)
60
+
61
+
62
+ def format_date(timestamp: Union[datetime, date]) -> str:
63
+ return timestamp.strftime(ISO_FORMAT)
64
+
65
+
66
+ def yesterday() -> date:
67
+ return current_date() - timedelta(days=1)
@@ -1,13 +1,12 @@
1
1
  import logging
2
2
  from datetime import datetime, timedelta
3
3
  from http import HTTPStatus
4
- from typing import Iterator, List, Optional, Set
4
+ from typing import Any, Iterator, List, Optional, Set
5
5
 
6
6
  import requests
7
7
 
8
8
  from ....utils import (
9
9
  RequestSafeMode,
10
- ResponseJson,
11
10
  at_midnight,
12
11
  current_date,
13
12
  empty_iterator,
@@ -108,7 +107,7 @@ class DomoClient:
108
107
  endpoint: Endpoint,
109
108
  params: Optional[dict] = None,
110
109
  asset_id: Optional[str] = None,
111
- ) -> ResponseJson:
110
+ ) -> Any:
112
111
  params = params if params else {}
113
112
  is_private = endpoint.is_private
114
113
  headers = self._private_headers if is_private else self._bearer_auth()
@@ -22,6 +22,7 @@ from looker_sdk.sdk.api40.models import (
22
22
  from looker_sdk.sdk.constants import sdk_version
23
23
 
24
24
  from ....utils import Pager, PagerLogger, SafeMode, past_date, safe_mode
25
+ from ..assets import LookerAsset
25
26
  from ..constants import DEFAULT_LOOKER_PAGE_SIZE
26
27
  from ..fields import format_fields
27
28
  from .constants import (
@@ -291,3 +292,37 @@ class ApiClient:
291
292
  self._on_api_call()
292
293
 
293
294
  return user_attributes
295
+
296
+ def fetch(
297
+ self,
298
+ asset: LookerAsset,
299
+ *,
300
+ folder_id: Optional[str] = None,
301
+ explore_names: Optional[Iterator[Tuple[str, str]]] = None,
302
+ ) -> list:
303
+ if asset == LookerAsset.USERS:
304
+ return self.users()
305
+ if asset == LookerAsset.CONNECTIONS:
306
+ return self.connections()
307
+ if asset == LookerAsset.LOOKS:
308
+ return self.looks(folder_id=folder_id)
309
+ if asset == LookerAsset.DASHBOARDS:
310
+ return self.dashboards(folder_id=folder_id)
311
+ if asset == LookerAsset.CONTENT_VIEWS:
312
+ return self.content_views()
313
+ if asset == LookerAsset.EXPLORES:
314
+ assert explore_names is not None
315
+ return list(self.explores(explore_names=explore_names))
316
+ if asset == LookerAsset.FOLDERS:
317
+ return self.folders()
318
+ if asset == LookerAsset.GROUPS_HIERARCHY:
319
+ return self.groups_hierarchy()
320
+ if asset == LookerAsset.GROUPS_ROLES:
321
+ return self.groups_roles()
322
+ if asset == LookerAsset.LOOKML_MODELS:
323
+ return self.lookml_models()
324
+ if asset == LookerAsset.PROJECTS:
325
+ return self.projects()
326
+ if asset == LookerAsset.USERS_ATTRIBUTES:
327
+ return self.users_attributes()
328
+ raise ValueError(f"Asset {asset.value} is not supported")
@@ -1,6 +1,7 @@
1
1
  from pydantic import Field, field_validator
2
2
  from pydantic_settings import BaseSettings, SettingsConfigDict
3
3
 
4
+ from ....utils import OUTPUT_DIR
4
5
  from ..constants import (
5
6
  DEFAULT_LOOKER_PAGE_SIZE,
6
7
  DEFAULT_LOOKER_THREAD_POOL_SIZE,
@@ -24,7 +25,7 @@ class ExtractionParameters(BaseSettings):
24
25
 
25
26
  is_safe_mode: bool = False
26
27
  log_to_stdout: bool
27
- output_directory: str
28
+ output: str = Field(validation_alias=OUTPUT_DIR)
28
29
  search_per_folder: bool
29
30
  page_size: int = Field(default=DEFAULT_LOOKER_PAGE_SIZE)
30
31
  thread_pool_size: int = Field(default=DEFAULT_LOOKER_THREAD_POOL_SIZE)
@@ -39,7 +39,7 @@ def _safe_mode(
39
39
  ) -> Optional[SafeMode]:
40
40
  if extraction_parameters.is_safe_mode:
41
41
  return None
42
- add_logging_file_handler(extraction_parameters.output_directory)
42
+ add_logging_file_handler(extraction_parameters.output)
43
43
  return SafeMode((Exception,), float("inf"))
44
44
 
45
45
 
@@ -127,7 +127,7 @@ def extract_all(**kwargs) -> None:
127
127
  output_directory
128
128
  """
129
129
  extraction_parameters = ExtractionParameters(**kwargs)
130
- output_directory = extraction_parameters.output_directory
130
+ output_directory = extraction_parameters.output
131
131
 
132
132
  credentials = LookerCredentials(**kwargs)
133
133
 
@@ -1,3 +1,5 @@
1
+ from typing import Dict, Tuple
2
+
1
3
  from ...types import ExternalAsset
2
4
 
3
5
 
@@ -13,7 +15,7 @@ class MetabaseAsset(ExternalAsset):
13
15
  DASHBOARD_CARDS = "dashboard_cards"
14
16
 
15
17
 
16
- EXPORTED_FIELDS = {
18
+ EXPORTED_FIELDS: Dict[MetabaseAsset, Tuple[str, ...]] = {
17
19
  MetabaseAsset.COLLECTION: (
18
20
  "id",
19
21
  "name",
@@ -24,19 +24,31 @@ def iterate_all_data(
24
24
  """Iterate over the extracted Data From metabase"""
25
25
 
26
26
  yield MetabaseAsset.USER, deep_serialize(client.fetch(MetabaseAsset.USER))
27
- yield MetabaseAsset.COLLECTION, deep_serialize(
28
- client.fetch(MetabaseAsset.COLLECTION),
27
+ yield (
28
+ MetabaseAsset.COLLECTION,
29
+ deep_serialize(
30
+ client.fetch(MetabaseAsset.COLLECTION),
31
+ ),
29
32
  )
30
- yield MetabaseAsset.DATABASE, deep_serialize(
31
- client.fetch(MetabaseAsset.DATABASE),
33
+ yield (
34
+ MetabaseAsset.DATABASE,
35
+ deep_serialize(
36
+ client.fetch(MetabaseAsset.DATABASE),
37
+ ),
32
38
  )
33
39
  yield MetabaseAsset.TABLE, deep_serialize(client.fetch(MetabaseAsset.TABLE))
34
40
  yield MetabaseAsset.CARD, deep_serialize(client.fetch(MetabaseAsset.CARD))
35
- yield MetabaseAsset.DASHBOARD, deep_serialize(
36
- client.fetch(MetabaseAsset.DASHBOARD),
41
+ yield (
42
+ MetabaseAsset.DASHBOARD,
43
+ deep_serialize(
44
+ client.fetch(MetabaseAsset.DASHBOARD),
45
+ ),
37
46
  )
38
- yield MetabaseAsset.DASHBOARD_CARDS, deep_serialize(
39
- client.fetch(MetabaseAsset.DASHBOARD_CARDS),
47
+ yield (
48
+ MetabaseAsset.DASHBOARD_CARDS,
49
+ deep_serialize(
50
+ client.fetch(MetabaseAsset.DASHBOARD_CARDS),
51
+ ),
40
52
  )
41
53
 
42
54
 
@@ -220,6 +220,6 @@ class Client:
220
220
  # generic calls
221
221
  # example: https://modeanalytics.com/api/{workspace}/spaces
222
222
  # example: https://modeanalytics.com/api/{workspace}/data_sources
223
- result = self._call(resource_name=asset.value)
223
+ result = self._call(resource_name=str(asset.value))
224
224
  logger.info(f"{len(result)} rows extracted")
225
225
  return self._post_processing(asset, result)
@@ -1,5 +1,5 @@
1
1
  """
2
- File regrouping all constants used in PowerBi client
2
+ File regrouping all constants used in PowerBi client
3
3
  """
4
4
 
5
5
  DEFAULT_TIMEOUT_IN_SECS = 30
@@ -1,12 +1,12 @@
1
1
  import logging
2
- from datetime import date, datetime, timedelta
2
+ from datetime import date, datetime
3
3
  from time import sleep
4
4
  from typing import Any, Callable, Dict, Iterator, List, Optional, Tuple, Union
5
5
 
6
6
  import msal # type: ignore
7
7
  import requests
8
8
 
9
- from ....utils import at_midnight, current_date
9
+ from ....utils import at_midnight, format_date, yesterday
10
10
  from ..assets import PowerBiAsset
11
11
  from .constants import (
12
12
  DEFAULT_TIMEOUT_IN_SECS,
@@ -23,19 +23,9 @@ from .utils import batch_size_is_valid_or_assert, datetime_is_recent_or_assert
23
23
 
24
24
  logger = logging.getLogger(__name__)
25
25
 
26
- _FORMAT = "%Y-%m-%dT%H:%M:%S"
27
-
28
-
29
- def _format(timestamp: datetime) -> str:
30
- return timestamp.strftime(_FORMAT)
31
-
32
-
33
- def _yesterday() -> date:
34
- return current_date() - timedelta(days=1)
35
-
36
26
 
37
27
  def _time_filter(day: Optional[date]) -> Tuple[datetime, datetime]:
38
- target_day = day or _yesterday()
28
+ target_day = day or yesterday()
39
29
  start = at_midnight(target_day)
40
30
  end = datetime.combine(target_day, datetime.max.time())
41
31
  return start, end
@@ -51,8 +41,8 @@ def _url(
51
41
  url = Urls.ACTIVITY_EVENTS
52
42
  start, end = _time_filter(day)
53
43
  url += "?$filter=Activity eq 'viewreport'"
54
- url += f"&startDateTime='{_format(start)}'"
55
- url += f"&endDateTime='{_format(end)}'"
44
+ url += f"&startDateTime='{format_date(start)}'"
45
+ url += f"&endDateTime='{format_date(end)}'"
56
46
  return url
57
47
 
58
48
 
@@ -3,7 +3,12 @@ import logging
3
3
  from .....utils import SafeMode, safe_mode
4
4
  from .constants import MEASURES_SESSION_PARAMS, JsonRpcMethod
5
5
  from .credentials import QlikCredentials
6
- from .error import AccessDeniedError, AppSizeExceededError
6
+ from .error import (
7
+ AccessDeniedError,
8
+ AppSizeExceededError,
9
+ PersistenceReadFailedError,
10
+ QlikResponseKeyError,
11
+ )
7
12
  from .json_rpc import JsonRpcClient
8
13
  from .websocket import open_websocket
9
14
 
@@ -11,8 +16,29 @@ logger = logging.getLogger(__name__)
11
16
 
12
17
 
13
18
  def _handle(response: dict) -> int:
14
- """Returns the object Handle from the response payload"""
15
- return response["result"]["qReturn"]["qHandle"]
19
+ """
20
+ Returns the object Handle from the response payload, or raises an error
21
+ if one of the keys can't be found
22
+ """
23
+ try:
24
+ return response["result"]["qReturn"]["qHandle"]
25
+ except KeyError:
26
+ raise QlikResponseKeyError(
27
+ f"Could not fetch handle from response {response}"
28
+ )
29
+
30
+
31
+ def _measure(response: dict) -> list:
32
+ """
33
+ Returns the measure from the response payload, or raises a custom error
34
+ if one of the keys can't be found
35
+ """
36
+ try:
37
+ return response["result"]["qLayout"]["qMeasureList"]["qItems"]
38
+ except KeyError:
39
+ raise QlikResponseKeyError(
40
+ f"Could not fetch measure from response {response}"
41
+ )
16
42
 
17
43
 
18
44
  def _list_measures(client: JsonRpcClient, app_id: str) -> list:
@@ -41,7 +67,7 @@ def _list_measures(client: JsonRpcClient, app_id: str) -> list:
41
67
  method=JsonRpcMethod.GET_LAYOUT,
42
68
  handle=session_handle,
43
69
  )
44
- return response["result"]["qLayout"]["qMeasureList"]["qItems"]
70
+ return _measure(response)
45
71
 
46
72
 
47
73
  class EngineApiClient:
@@ -53,7 +79,12 @@ class EngineApiClient:
53
79
  def __init__(self, credentials: QlikCredentials):
54
80
  self.credentials = credentials
55
81
  self._safe_mode = SafeMode(
56
- exceptions=(AccessDeniedError, AppSizeExceededError),
82
+ exceptions=(
83
+ AccessDeniedError,
84
+ AppSizeExceededError,
85
+ PersistenceReadFailedError,
86
+ QlikResponseKeyError,
87
+ ),
57
88
  max_errors=float("inf"),
58
89
  )
59
90
 
@@ -6,6 +6,7 @@ DEFAULT_HANDLE = -1
6
6
 
7
7
  ACCESS_DENIED_ERROR_CODE = 5
8
8
  APP_SIZE_EXCEEDED_ERROR_CODE = 1015
9
+ PERSISTENCE_READ_FAILED_ERROR_CODE = 9001
9
10
 
10
11
  MEASURES_SESSION_PARAMS = (
11
12
  {
@@ -1,6 +1,10 @@
1
1
  from typing import Dict, Type
2
2
 
3
- from .constants import ACCESS_DENIED_ERROR_CODE, APP_SIZE_EXCEEDED_ERROR_CODE
3
+ from .constants import (
4
+ ACCESS_DENIED_ERROR_CODE,
5
+ APP_SIZE_EXCEEDED_ERROR_CODE,
6
+ PERSISTENCE_READ_FAILED_ERROR_CODE,
7
+ )
4
8
 
5
9
 
6
10
  class JsonRpcError(Exception):
@@ -32,9 +36,22 @@ class AppSizeExceededError(JsonRpcError):
32
36
  ...
33
37
 
34
38
 
39
+ class PersistenceReadFailedError(JsonRpcError):
40
+ """
41
+ Error class to be raised when JSON-RPC error is persistence read failed
42
+ """
43
+
44
+
45
+ class QlikResponseKeyError(Exception):
46
+ """
47
+ Error class to be raised when an expected Key is missing from the JSON-RPC response
48
+ """
49
+
50
+
35
51
  ERROR_CODE_MAPPING: Dict[int, Type[JsonRpcError]] = {
36
52
  ACCESS_DENIED_ERROR_CODE: AccessDeniedError,
37
53
  APP_SIZE_EXCEEDED_ERROR_CODE: AppSizeExceededError,
54
+ PERSISTENCE_READ_FAILED_ERROR_CODE: PersistenceReadFailedError,
38
55
  }
39
56
 
40
57
 
@@ -1,6 +1,8 @@
1
+ from typing import Dict
2
+
1
3
  from ..assets import SalesforceReportingAsset
2
4
 
3
- queries = {
5
+ queries: Dict[SalesforceReportingAsset, str] = {
4
6
  SalesforceReportingAsset.DASHBOARDS: """
5
7
  SELECT
6
8
  CreatedBy.Id,