mercuto-client 0.2.8__py3-none-any.whl → 0.3.0a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mercuto-client might be problematic. Click here for more details.

Files changed (37) hide show
  1. mercuto_client/__init__.py +2 -24
  2. mercuto_client/_authentication.py +72 -0
  3. mercuto_client/_tests/test_ingester/test_parsers.py +67 -67
  4. mercuto_client/_tests/test_mocking/__init__.py +0 -0
  5. mercuto_client/_tests/test_mocking/conftest.py +13 -0
  6. mercuto_client/_tests/test_mocking/test_mock_identity.py +8 -0
  7. mercuto_client/acl.py +16 -10
  8. mercuto_client/client.py +53 -779
  9. mercuto_client/exceptions.py +5 -1
  10. mercuto_client/ingester/__main__.py +1 -1
  11. mercuto_client/ingester/mercuto.py +15 -16
  12. mercuto_client/ingester/parsers/__init__.py +3 -3
  13. mercuto_client/ingester/parsers/campbell.py +2 -2
  14. mercuto_client/ingester/parsers/generic_csv.py +5 -5
  15. mercuto_client/ingester/parsers/worldsensing.py +4 -3
  16. mercuto_client/mocks/__init__.py +92 -0
  17. mercuto_client/mocks/_utility.py +69 -0
  18. mercuto_client/mocks/mock_data.py +402 -0
  19. mercuto_client/mocks/mock_fatigue.py +30 -0
  20. mercuto_client/mocks/mock_identity.py +188 -0
  21. mercuto_client/modules/__init__.py +19 -0
  22. mercuto_client/modules/_util.py +18 -0
  23. mercuto_client/modules/core.py +674 -0
  24. mercuto_client/modules/data.py +623 -0
  25. mercuto_client/modules/fatigue.py +189 -0
  26. mercuto_client/modules/identity.py +254 -0
  27. mercuto_client/{ingester/util.py → util.py} +27 -11
  28. {mercuto_client-0.2.8.dist-info → mercuto_client-0.3.0a0.dist-info}/METADATA +10 -3
  29. mercuto_client-0.3.0a0.dist-info/RECORD +41 -0
  30. mercuto_client/_tests/test_mocking.py +0 -93
  31. mercuto_client/_util.py +0 -13
  32. mercuto_client/mocks.py +0 -203
  33. mercuto_client/types.py +0 -409
  34. mercuto_client-0.2.8.dist-info/RECORD +0 -30
  35. {mercuto_client-0.2.8.dist-info → mercuto_client-0.3.0a0.dist-info}/WHEEL +0 -0
  36. {mercuto_client-0.2.8.dist-info → mercuto_client-0.3.0a0.dist-info}/licenses/LICENSE +0 -0
  37. {mercuto_client-0.2.8.dist-info → mercuto_client-0.3.0a0.dist-info}/top_level.txt +0 -0
@@ -1,4 +1,5 @@
1
1
  import json
2
+ from typing import Any
2
3
 
3
4
 
4
5
  class MercutoClientException(Exception):
@@ -11,5 +12,8 @@ class MercutoHTTPException(MercutoClientException):
11
12
  self.status_code = status_code
12
13
  self.message = message
13
14
 
14
- def json(self) -> dict:
15
+ def json(self) -> Any:
15
16
  return json.loads(self.message)
17
+
18
+ def __str__(self) -> str:
19
+ return f"MercutoHTTPException(status_code='{self.status_code}', message='{self.message}')"
@@ -8,10 +8,10 @@ from typing import Callable, TypeVar
8
8
 
9
9
  import schedule
10
10
 
11
+ from ..util import get_free_space_excluding_files
11
12
  from .ftp import simple_ftp_server
12
13
  from .mercuto import MercutoIngester
13
14
  from .processor import FileProcessor
14
- from .util import get_free_space_excluding_files
15
15
 
16
16
  logger = logging.getLogger(__name__)
17
17
 
@@ -1,13 +1,14 @@
1
1
  import fnmatch
2
- import itertools
3
2
  import logging
4
3
  import os
5
4
  from typing import Optional
6
5
 
7
6
  from .. import MercutoClient, MercutoHTTPException
8
- from ..types import Channel, DataSample, DatatableOut, Project
7
+ from ..modules.core import Project
8
+ from ..modules.data import (Channel, ChannelClassification, Datatable,
9
+ SecondaryDataSample)
10
+ from ..util import batched, get_my_public_ip
9
11
  from .parsers import detect_parser
10
- from .util import batched, get_my_public_ip
11
12
 
12
13
  logger = logging.getLogger(__name__)
13
14
 
@@ -22,19 +23,19 @@ class MercutoIngester:
22
23
 
23
24
  self._project: Optional[Project] = None
24
25
  self._secondary_channels: Optional[list[Channel]] = None
25
- self._datatables: Optional[list[DatatableOut]] = None
26
+ self._datatables: Optional[list[Datatable]] = None
26
27
 
27
28
  self._channel_map: dict[str, str] = {}
28
29
 
29
30
  def _refresh_mercuto_data(self) -> None:
30
31
  with self._client.as_credentials(api_key=self._api_key) as client:
31
- self._project = client.projects().get_project(self._project_code)
32
- assert self._project['code'] == self._project_code
32
+ self._project = client.core().get_project(self._project_code)
33
+ assert self._project.code == self._project_code
33
34
 
34
- self._secondary_channels = client.channels().get_channels(self._project_code, classification='SECONDARY')
35
- self._datatables = list(itertools.chain.from_iterable([dt['datatables'] for dt in client.devices().list_dataloggers(self._project_code)]))
35
+ self._secondary_channels = client.data().list_channels(self._project_code, classification=ChannelClassification.SECONDARY)
36
+ self._datatables = client.data().list_datatables(self._project_code)
36
37
 
37
- self._channel_map.update({c['label']: c['code'] for c in self._secondary_channels})
38
+ self._channel_map.update({c.label: c.code for c in self._secondary_channels})
38
39
 
39
40
  def _can_process(self) -> bool:
40
41
  return self._project is not None and self._secondary_channels is not None and self._datatables is not None
@@ -56,7 +57,7 @@ class MercutoIngester:
56
57
  """
57
58
  ip = get_my_public_ip()
58
59
  with self._client.as_credentials(api_key=self._api_key) as client:
59
- client.projects().ping_project(self.project_code, ip_address=ip)
60
+ client.core().ping_project(self.project_code, ip_address=ip)
60
61
  logging.info(f"Pinged Mercuto server from IP: {ip} for project: {self.project_code}")
61
62
 
62
63
  def matching_datatable(self, filename: str) -> str | None:
@@ -85,20 +86,18 @@ class MercutoIngester:
85
86
 
86
87
  for dt in self._datatables:
87
88
  # Match using datatable pattern
88
- if matches(dt['name']):
89
- return dt['code']
90
- if dt['src'] and matches(dt['src']):
91
- return dt['code']
89
+ if matches(dt.name):
90
+ return dt.code
92
91
  return None
93
92
 
94
- def _upload_samples(self, samples: list[DataSample]) -> bool:
93
+ def _upload_samples(self, samples: list[SecondaryDataSample]) -> bool:
95
94
  """
96
95
  Upload samples to the Mercuto project.
97
96
  """
98
97
  try:
99
98
  with self._client.as_credentials(api_key=self._api_key) as client:
100
99
  for batch in batched(samples, 500):
101
- client.data().upload_samples(batch)
100
+ client.data().insert_secondary_samples(self.project_code, batch)
102
101
  return True
103
102
  except MercutoHTTPException as e:
104
103
  if e.status_code in NON_RETRYABLE_ERRORS:
@@ -2,7 +2,7 @@ from typing import Optional, Protocol
2
2
 
3
3
  import pytz
4
4
 
5
- from ...types import DataSample
5
+ from ...modules.data import SecondaryDataSample
6
6
  from .campbell import parse_campbell_file
7
7
  from .worldsensing import (parse_worldsensing_compact_file,
8
8
  parse_worldsensing_standard_file)
@@ -10,9 +10,9 @@ from .worldsensing import (parse_worldsensing_compact_file,
10
10
 
11
11
  class Parser(Protocol):
12
12
  def __call__(self, filename: str, label_to_channel_code: dict[str, str],
13
- timezone: Optional[pytz.BaseTzInfo] = None) -> list[DataSample]:
13
+ timezone: Optional[pytz.BaseTzInfo] = None) -> list[SecondaryDataSample]:
14
14
  """
15
- Parse the file and return a list of DataSample objects.
15
+ Parse the file and return a list of SecondaryDataSample objects.
16
16
  """
17
17
  pass
18
18
 
@@ -2,11 +2,11 @@ from typing import Optional
2
2
 
3
3
  import pytz
4
4
 
5
- from ...types import DataSample
5
+ from ...modules.data import SecondaryDataSample
6
6
  from .generic_csv import parse_generic_csv_file
7
7
 
8
8
 
9
9
  def parse_campbell_file(filename: str, label_to_channel_code: dict[str, str],
10
- timezone: Optional[pytz.BaseTzInfo] = None) -> list[DataSample]:
10
+ timezone: Optional[pytz.BaseTzInfo] = None) -> list[SecondaryDataSample]:
11
11
  return parse_generic_csv_file(
12
12
  filename, label_to_channel_code, header_index=1, data_index=2, timezone=timezone)
@@ -5,7 +5,7 @@ from typing import Optional
5
5
  import pytz
6
6
  from dateutil import parser
7
7
 
8
- from ...types import DataSample
8
+ from ...modules.data import SecondaryDataSample
9
9
 
10
10
  logger = logging.getLogger(__name__)
11
11
 
@@ -56,7 +56,7 @@ def _parse_csv_line(line: str, sep: str = ',', timestamp_index: int = 0) -> tupl
56
56
 
57
57
  def parse_generic_csv_file(filename: str, label_to_channel_code: dict[str, str],
58
58
  header_index: int, data_index: int,
59
- timezone: Optional[pytz.BaseTzInfo] = None) -> list[DataSample]:
59
+ timezone: Optional[pytz.BaseTzInfo] = None) -> list[SecondaryDataSample]:
60
60
  """
61
61
  header index: Number of lines to skip before header
62
62
  data index: Number of lines to skip after the header before data
@@ -64,7 +64,7 @@ def parse_generic_csv_file(filename: str, label_to_channel_code: dict[str, str],
64
64
  We are avoiding using pandas here to keep dependencies minimal as this is often run on edge devices.
65
65
  """
66
66
 
67
- output: list[DataSample] = []
67
+ output: list[SecondaryDataSample] = []
68
68
  with open(filename, "r") as f:
69
69
  for _ in range(header_index):
70
70
  next(f, None)
@@ -109,6 +109,6 @@ def parse_generic_csv_file(filename: str, label_to_channel_code: dict[str, str],
109
109
 
110
110
  logger.debug(
111
111
  f"Adding entry for label: {header} with value: {value} and timestamp: {timestamp}")
112
- output.append(DataSample(timestamp=timestamp.isoformat(),
113
- channel_code=channel_code, value=value))
112
+ output.append(SecondaryDataSample(timestamp=timestamp,
113
+ channel=channel_code, value=value))
114
114
  return output
@@ -2,11 +2,12 @@ from typing import Optional
2
2
 
3
3
  import pytz
4
4
 
5
- from .generic_csv import DataSample, parse_generic_csv_file
5
+ from ...modules.data import SecondaryDataSample
6
+ from .generic_csv import parse_generic_csv_file
6
7
 
7
8
 
8
9
  def parse_worldsensing_standard_file(filename: str, label_to_channel_code: dict[str, str],
9
- timezone: Optional[pytz.BaseTzInfo] = None) -> list[DataSample]:
10
+ timezone: Optional[pytz.BaseTzInfo] = None) -> list[SecondaryDataSample]:
10
11
  """
11
12
  Parse a worldsensing standard CSV file provided when downloading data or using standard CSV export.
12
13
  """
@@ -15,7 +16,7 @@ def parse_worldsensing_standard_file(filename: str, label_to_channel_code: dict[
15
16
 
16
17
 
17
18
  def parse_worldsensing_compact_file(filename: str, label_to_channel_code: dict[str, str],
18
- timezone: Optional[pytz.BaseTzInfo] = None) -> list[DataSample]:
19
+ timezone: Optional[pytz.BaseTzInfo] = None) -> list[SecondaryDataSample]:
19
20
  """
20
21
  Parse a worldsensing custom CSV file. These are generated when using compacted CSV mechanism.
21
22
  """
@@ -0,0 +1,92 @@
1
+ import contextlib
2
+ from typing import Callable, Iterator, Optional
3
+
4
+ from mercuto_client.modules.identity import VerifyMyPermissions
5
+
6
+ from ..client import MercutoClient
7
+
8
+
9
+ @contextlib.contextmanager
10
+ def mock_mercuto(data: bool = True,
11
+ identity: bool = True,
12
+ fatigue: bool = True,
13
+ verify_service_token: Optional[Callable[[str], VerifyMyPermissions]] = None) -> Iterator[None]:
14
+ """
15
+ While this context is active, all calls to MercutoClient will use mocked services.
16
+
17
+ :param data: Whether to mock the data module.
18
+ :param identity: Whether to mock the identity module.
19
+ :param fatigue: Whether to mock the fatigue module.
20
+ :param verify_service_token: Optional function to mock the verify_service_token behavior. Only used for the mock identity service.
21
+ """
22
+ with contextlib.ExitStack() as stack:
23
+ if data:
24
+ stack.enter_context(mock_data_module())
25
+ if identity:
26
+ stack.enter_context(mock_identity_module(verify_service_token=verify_service_token))
27
+ if fatigue:
28
+ stack.enter_context(mock_fatigue_module())
29
+ yield
30
+
31
+
32
+ @contextlib.contextmanager
33
+ def mock_data_module() -> Iterator[None]:
34
+ from .mock_data import MockMercutoDataService
35
+ original = MercutoClient.data
36
+
37
+ _cache: Optional[MockMercutoDataService] = None
38
+
39
+ def stub(self: MercutoClient) -> MockMercutoDataService:
40
+ nonlocal _cache
41
+ if _cache is None:
42
+ _cache = MockMercutoDataService(self)
43
+ _cache._client = self
44
+ return _cache
45
+
46
+ try:
47
+ setattr(MercutoClient, 'data', stub)
48
+ yield
49
+ finally:
50
+ setattr(MercutoClient, 'data', original)
51
+
52
+
53
+ @contextlib.contextmanager
54
+ def mock_identity_module(verify_service_token: Optional[Callable[[str], VerifyMyPermissions]] = None) -> Iterator[None]:
55
+ from .mock_identity import MockMercutoIdentityService
56
+ original = MercutoClient.identity
57
+
58
+ _cache: Optional[MockMercutoIdentityService] = None
59
+
60
+ def stub(self: MercutoClient) -> MockMercutoIdentityService:
61
+ nonlocal _cache
62
+ if _cache is None:
63
+ _cache = MockMercutoIdentityService(self, verify_service_token=verify_service_token)
64
+ _cache._client = self
65
+ return _cache
66
+
67
+ try:
68
+ setattr(MercutoClient, 'identity', stub)
69
+ yield
70
+ finally:
71
+ setattr(MercutoClient, 'identity', original)
72
+
73
+
74
+ @contextlib.contextmanager
75
+ def mock_fatigue_module() -> Iterator[None]:
76
+ from .mock_fatigue import MockMercutoFatigueService
77
+ original = MercutoClient.fatigue
78
+
79
+ _cache: Optional[MockMercutoFatigueService] = None
80
+
81
+ def stub(self: MercutoClient) -> MockMercutoFatigueService:
82
+ nonlocal _cache
83
+ if _cache is None:
84
+ _cache = MockMercutoFatigueService(self)
85
+ _cache._client = self
86
+ return _cache
87
+
88
+ try:
89
+ setattr(MercutoClient, 'fatigue', stub)
90
+ yield
91
+ finally:
92
+ setattr(MercutoClient, 'fatigue', original)
@@ -0,0 +1,69 @@
1
+ import logging
2
+ from types import FunctionType
3
+ from typing import Any, Callable
4
+
5
+ logger = logging.getLogger(__name__)
6
+
7
+
8
+ class EnforceOverridesMeta(type):
9
+ """
10
+ Helper for patching mock implementations.
11
+
12
+ Assign this as a metaclass when overriding a single base class, and all of the base class' methods
13
+ must be overridden and cannot be called.
14
+
15
+ Example usage:
16
+
17
+ ```
18
+ class BaseClass():
19
+ def method1(self):
20
+ pass
21
+ def method2(self):
22
+ pass
23
+
24
+ class MockClass(BaseClass, metaclass=EnforceOverridesMeta):
25
+ def method1(self):
26
+ return "Mocked!"
27
+
28
+ mock = MockClass()
29
+ mock.method1() # Okay
30
+ mock.method2() # Raises NotImplementedError
31
+ ```
32
+
33
+ To exclude specific methods from requiring overrides, add them to the `__exclude_enforce__` set:
34
+
35
+ ```
36
+ class MockClass(BaseClass, metaclass=EnforceOverridesMeta):
37
+ __exclude_enforce__ = {BaseClass.method2}
38
+
39
+ def method1(self):
40
+ return "Mocked!"
41
+
42
+ mock = MockClass()
43
+ mock.method1() # Okay
44
+ mock.method2() # Okay
45
+ ```
46
+ """
47
+ def __init__(cls: type[Any], name: str, bases: tuple[type, ...], namespace: dict[str, Any]) -> None:
48
+ type.__init__(cls, name, bases, namespace)
49
+
50
+ # Assumes single inheritance from Base
51
+ if len(bases) != 1:
52
+ raise TypeError("EnforceOverridesMeta can only be used with a single base class.")
53
+
54
+ base = bases[0]
55
+
56
+ excluded: set[FunctionType] = getattr(cls, "__exclude_enforce__", set())
57
+
58
+ for attr in dir(base):
59
+ if attr.startswith('__'):
60
+ continue
61
+
62
+ base_method = getattr(base, attr)
63
+ if isinstance(base_method, FunctionType) and base_method not in excluded and attr not in namespace:
64
+ def make_error_method(attr_name: str) -> Callable[..., None]:
65
+ def error_method(self: Any, *args: Any, **kwargs: Any) -> None:
66
+ raise NotImplementedError(f"Mocker '{cls.__name__}' does not currently support method '{attr_name}'")
67
+ return error_method
68
+
69
+ setattr(cls, attr, make_error_method(attr))