mercuto-client 0.2.7__py3-none-any.whl → 0.3.0a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mercuto-client might be problematic. Click here for more details.

Files changed (38) hide show
  1. mercuto_client/__init__.py +2 -24
  2. mercuto_client/_authentication.py +72 -0
  3. mercuto_client/_tests/test_ingester/test_parsers.py +67 -67
  4. mercuto_client/_tests/test_mocking/__init__.py +0 -0
  5. mercuto_client/_tests/test_mocking/conftest.py +13 -0
  6. mercuto_client/_tests/test_mocking/test_mock_identity.py +8 -0
  7. mercuto_client/acl.py +16 -10
  8. mercuto_client/client.py +53 -779
  9. mercuto_client/exceptions.py +5 -1
  10. mercuto_client/ingester/__main__.py +1 -1
  11. mercuto_client/ingester/mercuto.py +15 -16
  12. mercuto_client/ingester/parsers/__init__.py +3 -3
  13. mercuto_client/ingester/parsers/campbell.py +2 -2
  14. mercuto_client/ingester/parsers/generic_csv.py +5 -5
  15. mercuto_client/ingester/parsers/worldsensing.py +4 -3
  16. mercuto_client/mocks/__init__.py +92 -0
  17. mercuto_client/mocks/_utility.py +69 -0
  18. mercuto_client/mocks/mock_data.py +402 -0
  19. mercuto_client/mocks/mock_fatigue.py +30 -0
  20. mercuto_client/mocks/mock_identity.py +188 -0
  21. mercuto_client/modules/__init__.py +19 -0
  22. mercuto_client/modules/_util.py +18 -0
  23. mercuto_client/modules/core.py +674 -0
  24. mercuto_client/modules/data.py +623 -0
  25. mercuto_client/modules/fatigue.py +189 -0
  26. mercuto_client/modules/identity.py +254 -0
  27. mercuto_client/{ingester/util.py → util.py} +27 -11
  28. mercuto_client-0.3.0a0.dist-info/METADATA +72 -0
  29. mercuto_client-0.3.0a0.dist-info/RECORD +41 -0
  30. mercuto_client/_tests/test_mocking.py +0 -93
  31. mercuto_client/_util.py +0 -13
  32. mercuto_client/mocks.py +0 -203
  33. mercuto_client/types.py +0 -409
  34. mercuto_client-0.2.7.dist-info/METADATA +0 -20
  35. mercuto_client-0.2.7.dist-info/RECORD +0 -30
  36. {mercuto_client-0.2.7.dist-info → mercuto_client-0.3.0a0.dist-info}/WHEEL +0 -0
  37. {mercuto_client-0.2.7.dist-info → mercuto_client-0.3.0a0.dist-info}/licenses/LICENSE +0 -0
  38. {mercuto_client-0.2.7.dist-info → mercuto_client-0.3.0a0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,402 @@
1
+ import base64
2
+ import io
3
+ import logging
4
+ import uuid
5
+ from datetime import datetime, timedelta, timezone
6
+ from typing import BinaryIO, Collection, Optional, TextIO
7
+
8
+ import pandas as pd
9
+
10
+ from ..client import MercutoClient
11
+ from ..exceptions import MercutoHTTPException
12
+ from ..modules.data import (AggregationOptions, Channel, ChannelClassification,
13
+ ChannelFormat, Datatable, DatatableColumn,
14
+ FileFormat, FrameFormat, GetStatusRequestResponse,
15
+ LatestDataSample, MercutoDataService,
16
+ MetricDataSample, SecondaryDataSample)
17
+ from ._utility import EnforceOverridesMeta
18
+
19
+ logger = logging.getLogger(__name__)
20
+
21
+
22
+ class MockMercutoDataService(MercutoDataService, metaclass=EnforceOverridesMeta):
23
+ __exclude_enforce__ = {MercutoDataService.load_presigned_url,
24
+ MercutoDataService.load_metric_sample,
25
+ MercutoDataService.load_data_request}
26
+
27
+ def __init__(self, client: 'MercutoClient'):
28
+ super().__init__(client=client, path='/mock-data-service-method-not-implemented')
29
+ self._secondary_and_primary_buffer = pd.DataFrame(columns=['channel', 'timestamp', 'value']).set_index(['channel', 'timestamp'])
30
+ self._metric_buffer = pd.DataFrame(columns=['channel', 'timestamp', 'value', 'event']).set_index(['channel', 'timestamp'])
31
+
32
+ self._known_requests: dict[str, GetStatusRequestResponse] = {}
33
+
34
+ self._channels: dict[str, Channel] = {}
35
+ self._datatables: dict[str, Datatable] = {}
36
+
37
+ def _last_timestamp(self, channel: str) -> Optional[datetime]:
38
+ if channel in self._secondary_and_primary_buffer.index.get_level_values('channel'):
39
+ return self._secondary_and_primary_buffer.loc[channel, :].index.get_level_values('timestamp').max()
40
+ if channel in self._metric_buffer.index.get_level_values('channel'):
41
+ return self._metric_buffer.loc[channel, :].index.get_level_values('timestamp').max()
42
+ return None
43
+
44
+ def _update_last_valid_samples(self) -> None:
45
+ for channel in self._channels.values():
46
+ channel.last_valid_timestamp = self._last_timestamp(channel.code)
47
+
48
+ def list_channels(self,
49
+ project: str,
50
+ classification: Optional[ChannelClassification] = None,
51
+ aggregate: Optional[str] = None, metric: Optional[str] = None,
52
+ show_hidden: bool = False) -> list[Channel]:
53
+ selection = filter(lambda ch: ch.project == project, self._channels.values())
54
+ if classification is not None:
55
+ selection = filter(lambda ch: ch.classification == classification, selection)
56
+ if aggregate is not None:
57
+ selection = filter(lambda ch: ch.aggregate == aggregate, selection)
58
+ if metric is not None:
59
+ selection = filter(lambda ch: ch.metric == metric, selection)
60
+ return list(selection)
61
+
62
+ def get_channel(self, code: str) -> Optional[Channel]:
63
+ return self._channels.get(code)
64
+
65
+ def create_channel(self, project: str,
66
+ label: str,
67
+ classification: ChannelClassification = ChannelClassification.SECONDARY,
68
+ sampling_period: Optional[timedelta] = None,
69
+ multiplier: float = 1.0, offset: float = 0.0,
70
+ value_range_min: Optional[float] = None, value_range_max: Optional[float] = None,
71
+ delta_max: Optional[float] = None,
72
+ units: Optional[str] = None,
73
+ aggregate: Optional[str] = None,
74
+ source: Optional[str] = None,
75
+ metric: Optional[str] = None) -> Channel:
76
+
77
+ if multiplier != 1.0:
78
+ logger.warning("MockMercutoDataService does not support channel multiplier.")
79
+ if offset != 0.0:
80
+ logger.warning("MockMercutoDataService does not support channel offset.")
81
+ if value_range_min is not None:
82
+ logger.warning("MockMercutoDataService does not support channel value_range_min.")
83
+ if value_range_max is not None:
84
+ logger.warning("MockMercutoDataService does not support channel value_range_max.")
85
+ if delta_max is not None:
86
+ logger.warning("MockMercutoDataService does not support channel delta_max.")
87
+ if units is not None:
88
+ logger.warning("MockMercutoDataService does not support channel units.")
89
+
90
+ # Generate a unique UUID code
91
+ code = str(uuid.uuid4())
92
+ channel = Channel(code=code,
93
+ project=project,
94
+ units=None,
95
+ sampling_period=sampling_period,
96
+ classification=classification,
97
+ label=label,
98
+ metric=metric,
99
+ source=source,
100
+ aggregate=aggregate,
101
+ value_range_min=None,
102
+ value_range_max=None,
103
+ multiplier=multiplier,
104
+ offset=offset,
105
+ last_valid_timestamp=None,
106
+ is_wallclock_interval=False)
107
+ self._channels[code] = channel
108
+
109
+ return channel
110
+
111
+ def create_request(self,
112
+ start_time: datetime,
113
+ end_time: datetime,
114
+ project: Optional[str] = None,
115
+ channels: Optional[Collection[str]] = None,
116
+ classification: Optional[ChannelClassification] = None,
117
+ frame_format: FrameFormat = FrameFormat.SAMPLES,
118
+ file_format: FileFormat = FileFormat.PARQUET,
119
+ channel_format: ChannelFormat = ChannelFormat.CODE,
120
+ aggregation: Optional[AggregationOptions] = None,
121
+ timeout: float = 0) -> GetStatusRequestResponse:
122
+
123
+ if channel_format != ChannelFormat.CODE:
124
+ raise NotImplementedError(f"Unsupported channel format: {channel_format}")
125
+
126
+ if channels is None and project is None:
127
+ raise ValueError("Must supply either channels or project.")
128
+
129
+ if channels is None and classification is None:
130
+ raise ValueError("Must supply either channels or classification.")
131
+
132
+ if aggregation is not None:
133
+ raise NotImplementedError("MockMercutoDataService does not support aggregation.")
134
+
135
+ if channels is None:
136
+ assert classification is not None
137
+ assert project is not None
138
+ channels = [ch.code for ch in self._channels.values() if ch.classification == classification and ch.project == project]
139
+
140
+ assert channels is not None
141
+
142
+ def load_from_buffer(buffer: pd.DataFrame) -> pd.DataFrame:
143
+ # Filter by channels if provided
144
+ if channels is not None:
145
+ buffer = buffer[buffer.index.get_level_values('channel').isin(channels)]
146
+
147
+ # Filter by time range
148
+ buffer = buffer[
149
+ (buffer.index.get_level_values('timestamp') >= start_time) &
150
+ (buffer.index.get_level_values('timestamp') <= end_time)
151
+ ]
152
+ return buffer
153
+
154
+ secondary_part = load_from_buffer(self._secondary_and_primary_buffer)
155
+ metric_part = load_from_buffer(self._metric_buffer)[['value']]
156
+ ts = pd.concat([secondary_part, metric_part], axis=0).sort_index()
157
+
158
+ assert ts.columns == ['value']
159
+ assert ts.index.names == ['channel', 'timestamp']
160
+
161
+ if frame_format == FrameFormat.COLUMNS:
162
+ ts = ts.reset_index(drop=False).pivot(index='timestamp',
163
+ columns='channel',
164
+ values='value')
165
+
166
+ buffer = io.BytesIO()
167
+ if file_format == FileFormat.FEATHER:
168
+ ts.to_feather(buffer)
169
+ mime_type = 'application/feather'
170
+ elif file_format == FileFormat.PARQUET:
171
+ ts.to_parquet(buffer, index=True)
172
+ mime_type = 'application/parquet'
173
+ else:
174
+ raise NotImplementedError(f"Unsupported file format: {file_format}")
175
+ buffer.seek(0)
176
+ data = buffer.read()
177
+
178
+ first_timestamp = None if len(ts) == 0 else ts.index.get_level_values('timestamp').min()
179
+
180
+ # Encode as a data-url
181
+ b64_data = base64.b64encode(data).decode('utf-8')
182
+ url = f"data:{mime_type};base64,{b64_data}"
183
+
184
+ req = GetStatusRequestResponse(
185
+ request_id=str(uuid.uuid4()),
186
+ status_code=200,
187
+ message="Success",
188
+ requested_at=datetime.now(timezone.utc),
189
+ completed_at=datetime.now(timezone.utc),
190
+ result=GetStatusRequestResponse.GetDataRequestStatusCompletedResult(
191
+ result_url=url,
192
+ expires_at=datetime.now(timezone.utc) + timedelta(hours=1),
193
+ mime_type=mime_type,
194
+ file_size=len(data),
195
+ metadata=GetStatusRequestResponse.GetDataRequestStatusCompletedResult.ResultMetadata(
196
+ first_timestamp=first_timestamp
197
+ )
198
+ )
199
+ )
200
+ self._known_requests[req.request_id] = req
201
+ return req
202
+
203
+ def get_request_status(self, request_id: str) -> GetStatusRequestResponse:
204
+ if request_id not in self._known_requests:
205
+ raise MercutoHTTPException(status_code=404, message="Not Found")
206
+ return self._known_requests[request_id]
207
+
208
+ def insert_metric_samples(
209
+ self,
210
+ project: str,
211
+ samples: Collection[MetricDataSample]
212
+ ) -> None:
213
+ if not samples:
214
+ return
215
+
216
+ # Ensure all channels are of type METRIC
217
+ if not all(
218
+ sample.channel in self._channels and (self._channels[sample.channel].classification == ChannelClassification.EVENT_METRIC or
219
+ self._channels[sample.channel].classification == ChannelClassification.PRIMARY_EVENT_AGGREGATE)
220
+ and self._channels[sample.channel].project == project
221
+ for sample in samples
222
+ ):
223
+ return
224
+
225
+ df = pd.DataFrame([{
226
+ 'channel': s.channel,
227
+ 'timestamp': s.timestamp,
228
+ 'value': s.value,
229
+ 'event': s.event
230
+ } for s in samples])
231
+ df = df.set_index(['channel', 'timestamp'])
232
+
233
+ to_concat: list[pd.DataFrame] = []
234
+ if len(self._metric_buffer) > 0:
235
+ to_concat.append(self._metric_buffer)
236
+ if len(df) > 0:
237
+ to_concat.append(df)
238
+
239
+ self._metric_buffer = pd.concat(to_concat).sort_index()
240
+ self._update_last_valid_samples()
241
+
242
+ def insert_secondary_samples(
243
+ self,
244
+ project: str,
245
+ samples: Collection[SecondaryDataSample]
246
+ ) -> None:
247
+ if not samples:
248
+ return
249
+
250
+ # Ensure all channels are of type SECONDARY
251
+ if not all(
252
+ sample.channel in self._channels and self._channels[sample.channel].classification == ChannelClassification.SECONDARY
253
+ and self._channels[sample.channel].project == project
254
+ for sample in samples
255
+ ):
256
+ return
257
+
258
+ df = pd.DataFrame([{
259
+ 'channel': s.channel,
260
+ 'timestamp': s.timestamp,
261
+ 'value': s.value,
262
+ } for s in samples])
263
+ df = df.set_index(['channel', 'timestamp'])
264
+
265
+ to_concat: list[pd.DataFrame] = []
266
+ if len(self._secondary_and_primary_buffer) > 0:
267
+ to_concat.append(self._secondary_and_primary_buffer)
268
+ if len(df) > 0:
269
+ to_concat.append(df)
270
+
271
+ self._secondary_and_primary_buffer = pd.concat(to_concat).sort_index()
272
+ self._update_last_valid_samples()
273
+
274
+ def delete_metric_samples(self, project: str, event: str, channels: Optional[Collection[str]] = None) -> None:
275
+ if channels is None:
276
+ channels = [c.code for c in self._channels.values() if c.project == project]
277
+ idx = self._metric_buffer.index
278
+
279
+ mask = (
280
+ idx.get_level_values('channel').isin(channels) &
281
+ (self._metric_buffer['event'] == event)
282
+ )
283
+ self._metric_buffer = self._metric_buffer[~mask]
284
+ self._update_last_valid_samples()
285
+
286
+ def load_metric_samples(
287
+ self,
288
+ channels: Optional[Collection[str]] = None,
289
+ start_time: Optional[datetime] = None,
290
+ end_time: Optional[datetime] = None,
291
+ events: Optional[Collection[str]] = None,
292
+ project: Optional[str] = None,
293
+ limit: int = 100
294
+ ) -> list[MetricDataSample]:
295
+ if channels is None and project is None:
296
+ raise ValueError("Must supply either channels or project.")
297
+
298
+ if channels is None:
299
+ channels = [c.code for c in self._channels.values() if c.project == project and c.classification in {
300
+ ChannelClassification.EVENT_METRIC, ChannelClassification.PRIMARY_EVENT_AGGREGATE
301
+ }]
302
+
303
+ # Ensure all channels exist are of type METRIC or PRIMARY_EVENT_AGGREGATE
304
+ if not all(
305
+ ch in self._channels and self._channels[ch].classification in {
306
+ ChannelClassification.EVENT_METRIC, ChannelClassification.PRIMARY_EVENT_AGGREGATE}
307
+ for ch in channels
308
+ ):
309
+ return []
310
+
311
+ idx = self._metric_buffer.index
312
+ mask = (
313
+ idx.get_level_values('channel').isin(channels) &
314
+ (start_time is None or idx.get_level_values('timestamp') >= start_time) &
315
+ (end_time is None or idx.get_level_values('timestamp') <= end_time) &
316
+ (events is None or self._metric_buffer['event'].isin(events))
317
+ )
318
+ filtered = self._metric_buffer[mask]
319
+ return [
320
+ MetricDataSample(
321
+ channel=channel,
322
+ timestamp=timestamp,
323
+ value=row['value'],
324
+ event=row['event']
325
+ )
326
+ for (channel, timestamp), row in filtered.iterrows()
327
+ ][:limit]
328
+
329
+ def create_datatable(self, project: str, name: str, sampling_period: timedelta, column_labels: Collection[str]) -> Datatable:
330
+ if sampling_period <= timedelta(seconds=1):
331
+ classification = ChannelClassification.PRIMARY
332
+ else:
333
+ classification = ChannelClassification.SECONDARY
334
+ channels = [self.create_channel(project=project, label=col, classification=classification,
335
+ sampling_period=sampling_period) for col in column_labels]
336
+ dt = Datatable(
337
+ code=str(uuid.uuid4()),
338
+ project=project,
339
+ name=name,
340
+ sampling_period=sampling_period,
341
+ columns=[DatatableColumn(column_label=ch.label, channel=ch.code) for ch in channels],
342
+ enabled=True
343
+ )
344
+ self._datatables[dt.code] = dt
345
+ return dt
346
+
347
+ def _dt_col_to_channel_code(self, dt: str, column_label: str) -> str:
348
+ for col in self._datatables[dt].columns:
349
+ if col.column_label == column_label:
350
+ return col.channel
351
+ raise ValueError(f"Column label '{column_label}' not found in datatable '{dt}'")
352
+
353
+ def upload_file(self, project: str, datatable: str, file: str | bytes | TextIO | BinaryIO,
354
+ filename: Optional[str] = None,
355
+ timezone: Optional[str] = None) -> None:
356
+ frame = pd.read_csv(file, header=1, skiprows=[2, 3],
357
+ usecols=None, sep=',', index_col=0, na_values=['NAN', '"NAN"'])
358
+ frame.index = pd.to_datetime(frame.index, utc=False)
359
+ assert isinstance(frame.index, pd.DatetimeIndex)
360
+ if frame.index.tz is None:
361
+ frame.index = frame.index.tz_convert(timezone)
362
+ del frame['RECORD']
363
+
364
+ # Drop unknown channels
365
+ frame = frame[[col for col in frame.columns if col in {c.column_label for c in self._datatables[datatable].columns}]]
366
+
367
+ # rename from label to code
368
+ frame = frame.rename(columns=lambda x: self._dt_col_to_channel_code(datatable, x))
369
+
370
+ frame = frame.melt([], tuple(frame.columns), ignore_index=False, var_name='channel',
371
+ value_name='value').sort_index()
372
+ frame.index.name = 'timestamp'
373
+ frame = frame.reset_index().set_index(['channel', 'timestamp'])
374
+ self._secondary_and_primary_buffer = pd.concat([self._secondary_and_primary_buffer, frame]).sort_index()
375
+ self._update_last_valid_samples()
376
+
377
+ def get_latest_samples(self, project: str, include_primary: bool = True) -> list[LatestDataSample]:
378
+ if include_primary:
379
+ channels = [c.code for c in self._channels.values() if c.project == project]
380
+ else:
381
+ channels = [ch for ch in channels if self._channels[ch].classification !=
382
+ ChannelClassification.PRIMARY and self._channels[ch].project == project]
383
+
384
+ out: list[LatestDataSample] = []
385
+
386
+ # Get the last timestamp and value for each channel in the secondary_and_primary_buffer
387
+ latest = self._secondary_and_primary_buffer.groupby(level='channel').last()
388
+ for (channel, timestamp), row in latest.iterrows():
389
+ if channel not in channels:
390
+ continue
391
+ out.append(LatestDataSample(channel=channel,
392
+ timestamp=timestamp,
393
+ value=row['value']))
394
+
395
+ latest = self._metric_buffer.groupby(level='channel').last()
396
+ for (channel, timestamp), row in latest.iterrows():
397
+ if channel not in channels:
398
+ continue
399
+ out.append(LatestDataSample(channel=channel,
400
+ timestamp=timestamp,
401
+ value=row['value']))
402
+ return out
@@ -0,0 +1,30 @@
1
+ import logging
2
+ from datetime import datetime
3
+ from typing import Literal, Optional
4
+
5
+ from ..client import MercutoClient
6
+ from ..modules.fatigue import MercutoFatigueService
7
+ from ._utility import EnforceOverridesMeta
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class MockMercutoFatigueService(MercutoFatigueService, metaclass=EnforceOverridesMeta):
13
+ def __init__(self, client: 'MercutoClient'):
14
+ super().__init__(client=client, path='/mock-fatigue-service-method-not-implemented')
15
+
16
+ def delete_cycle_counts(
17
+ self, project: str, start_time: datetime, end_time: datetime, ignore_if_not_configured: bool = False
18
+ ) -> None:
19
+ pass
20
+
21
+ def calculate_cycle_counts(
22
+ self,
23
+ project: str,
24
+ event: str,
25
+ presigned_url: str,
26
+ mime_type: Literal['application/feather'],
27
+ url_expiry: Optional[datetime] = None,
28
+ ignore_if_not_configured: bool = False
29
+ ) -> None:
30
+ pass
@@ -0,0 +1,188 @@
1
+ import logging
2
+ import uuid
3
+ from dataclasses import dataclass
4
+ from typing import Callable, Optional
5
+
6
+ from ..client import MercutoClient
7
+ from ..exceptions import MercutoHTTPException
8
+ from ..modules.identity import (CurrentUser, HiddenUserAPIKey,
9
+ MercutoIdentityService, PermissionGroup,
10
+ Tenant, User, UserDetails, VerifyMyPermissions,
11
+ VisibleUserAPIKey)
12
+ from ._utility import EnforceOverridesMeta
13
+
14
+ logger = logging.getLogger(__name__)
15
+
16
+
17
+ class MockMercutoIdentityService(MercutoIdentityService, metaclass=EnforceOverridesMeta):
18
+ @dataclass
19
+ class _GeneratedUserApiKey:
20
+ user: str
21
+ api_key: str
22
+ hidden: HiddenUserAPIKey
23
+
24
+ def __init__(self, client: 'MercutoClient', verify_service_token: Optional[Callable[[str], VerifyMyPermissions]] = None) -> None:
25
+ """
26
+ Initialize the mock identity service.
27
+ :param client: The MercutoClient instance.
28
+ :param verify_service_token: Optional callable to verify service tokens.
29
+ """
30
+
31
+ super().__init__(client=client, path='/mock-identity-service-method-not-implemented')
32
+ self._verify_service_token = verify_service_token
33
+
34
+ # In-memory stores for mock data
35
+ self._users: dict[str, User] = {}
36
+ self._user_details: dict[str, UserDetails] = {}
37
+ self._tenants: dict[str, Tenant] = {}
38
+ self._permission_groups: dict[str, PermissionGroup] = {}
39
+
40
+ # Maps API Key to User
41
+ self._api_keys: dict[str, MockMercutoIdentityService._GeneratedUserApiKey] = {}
42
+
43
+ def get_my_permissions(self) -> VerifyMyPermissions:
44
+ if self._client._auth_method is None:
45
+ raise MercutoHTTPException('Not authenticated', 401)
46
+ header: dict[str, str] = {}
47
+ self._client._auth_method.update_header(header)
48
+ if (api_key := header.get('X-Api-Key')) is not None:
49
+ known = self._api_keys.get(api_key)
50
+ if known is None:
51
+ raise MercutoHTTPException('Invalid API key', 403)
52
+ if known.hidden.custom_policy is not None:
53
+ return VerifyMyPermissions(user=known.user, acl_policy=known.hidden.custom_policy)
54
+ user = self._users[known.user]
55
+ group = self._permission_groups[user.permission_group]
56
+ return VerifyMyPermissions(user=user.code, acl_policy=group.acl_policy)
57
+ elif (service_token := header.get('X-Service-Token')) is not None:
58
+ if self._verify_service_token is None:
59
+ raise MercutoHTTPException('Service token verification not implemented', 501)
60
+ return self._verify_service_token(service_token)
61
+
62
+ raise MercutoHTTPException('Invalid authentication method for mock implementation', 403)
63
+
64
+ def list_users(self, tenant: Optional[str] = None) -> list[User]:
65
+ if tenant is None:
66
+ return list(self._users.values())
67
+ return [u for u in self._users.values() if u.tenant == tenant]
68
+
69
+ def create_user(
70
+ self,
71
+ username: str,
72
+ tenant: str,
73
+ description: str,
74
+ group: str,
75
+ default_password: Optional[str] = None
76
+ ) -> User:
77
+ code = str(uuid.uuid4())
78
+ user = User(code=code, username=username, description=description, tenant=tenant, permission_group=group)
79
+ self._users[code] = user
80
+ self._user_details[code] = UserDetails(code=code, username=username)
81
+ return user
82
+
83
+ def get_current_user(self) -> CurrentUser:
84
+ perms = self.get_my_permissions()
85
+ if perms.user is None:
86
+ raise MercutoHTTPException('Not authenticated', 401)
87
+ user = self._users[perms.user]
88
+ tenant = self._tenants[user.tenant]
89
+ group = self._permission_groups[user.permission_group]
90
+ return CurrentUser(
91
+ code=user.code,
92
+ username=user.username,
93
+ description=user.description,
94
+ tenant=tenant,
95
+ permission_group=group,
96
+ current_permission_policy=group.acl_policy
97
+ )
98
+
99
+ def get_user(self, code: str) -> User:
100
+ return self._users[code]
101
+
102
+ def delete_user(self, code: str) -> None:
103
+ if code in self._users:
104
+ del self._users[code]
105
+ if code in self._user_details:
106
+ del self._user_details[code]
107
+ for k, v in list(self._api_keys.items()):
108
+ if v.user == code:
109
+ del self._api_keys[k]
110
+
111
+ def get_user_details(self, code: str) -> UserDetails:
112
+ return self._user_details[code]
113
+
114
+ def set_user_details(
115
+ self,
116
+ code: str,
117
+ email_address: Optional[str] = None,
118
+ mobile_number: Optional[str] = None,
119
+ first_name: Optional[str] = None,
120
+ last_name: Optional[str] = None
121
+ ) -> UserDetails:
122
+ user_details = self._user_details[code]
123
+ if email_address is not None:
124
+ user_details.email_address = email_address
125
+ if mobile_number is not None:
126
+ user_details.mobile_number = mobile_number
127
+ if first_name is not None:
128
+ user_details.first_name = first_name
129
+ if last_name is not None:
130
+ user_details.last_name = last_name
131
+ return user_details
132
+
133
+ def get_user_api_keys(self, user: str) -> list[HiddenUserAPIKey]:
134
+ return [v.hidden for v in self._api_keys.values() if v.user == user]
135
+
136
+ def generate_api_key_for_user(
137
+ self,
138
+ user: str,
139
+ description: str,
140
+ custom_policy: Optional[str] = None
141
+ ) -> VisibleUserAPIKey:
142
+ key_code = str(uuid.uuid4())
143
+ new_api_key = str(uuid.uuid4())
144
+ hidden_key = HiddenUserAPIKey(code=key_code, description=description, last_used=None, custom_policy=custom_policy)
145
+
146
+ if user not in self._users:
147
+ raise MercutoHTTPException('User not found', 404)
148
+
149
+ self._api_keys[new_api_key] = self._GeneratedUserApiKey(
150
+ user=user, api_key=new_api_key, hidden=hidden_key
151
+ )
152
+ return VisibleUserAPIKey(code=key_code, new_api_key=new_api_key, description=description, custom_policy=custom_policy)
153
+
154
+ def list_tenants(self) -> list[Tenant]:
155
+ return list(self._tenants.values())
156
+
157
+ def get_tenant(self, code: str) -> Tenant:
158
+ return self._tenants[code]
159
+
160
+ def create_tenant(
161
+ self,
162
+ name: str,
163
+ description: str,
164
+ logo_url: Optional[str] = None
165
+ ) -> Tenant:
166
+ code = str(uuid.uuid4())
167
+ tenant = Tenant(code=code, name=name, description=description, logo_url=logo_url)
168
+ self._tenants[code] = tenant
169
+ return tenant
170
+
171
+ def get_permission_groups(self, tenant: Optional[str] = None) -> list[PermissionGroup]:
172
+ if tenant is None:
173
+ return list(self._permission_groups.values())
174
+ return [g for g in self._permission_groups.values() if g.tenant == tenant]
175
+
176
+ def create_permission_group(
177
+ self,
178
+ tenant: str,
179
+ label: str,
180
+ acl_policy: str
181
+ ) -> PermissionGroup:
182
+ code = str(uuid.uuid4())
183
+ group = PermissionGroup(tenant=tenant, code=code, label=label, acl_policy=acl_policy)
184
+ self._permission_groups[code] = group
185
+ return group
186
+
187
+ def get_permission_group(self, group: str) -> PermissionGroup:
188
+ return self._permission_groups[group]
@@ -0,0 +1,19 @@
1
+ import requests
2
+
3
+ from ..exceptions import MercutoClientException, MercutoHTTPException
4
+
5
+ _PayloadValueType = str | float | int | None
6
+ _PayloadListType = list[str] | list[float] | list[int] | list[_PayloadValueType]
7
+ _PayloadDictType = dict[str, str] | dict[str, float] | dict[str, int] | dict[str, _PayloadValueType]
8
+ _PayloadType = dict[str, _PayloadValueType | _PayloadListType | _PayloadDictType]
9
+
10
+
11
+ def _raise_for_response(r: requests.Response) -> None:
12
+ if 500 <= r.status_code < 600:
13
+ raise MercutoClientException(f"Server error: {r.text}")
14
+ if not (200 <= r.status_code < 300):
15
+ try:
16
+ detail = r.text
17
+ except Exception:
18
+ detail = str(r)
19
+ raise MercutoHTTPException(detail, r.status_code)
@@ -0,0 +1,18 @@
1
+ from datetime import timedelta
2
+
3
+ from pydantic import BaseModel as _BaseModel
4
+ from pydantic import ConfigDict, TypeAdapter
5
+
6
+ _TimedeltaAdapter = TypeAdapter(timedelta)
7
+
8
+
9
+ def serialise_timedelta(td: timedelta) -> str:
10
+ s = _TimedeltaAdapter.dump_python(td, mode='json')
11
+ assert isinstance(s, str)
12
+ return s
13
+
14
+
15
+ class BaseModel(_BaseModel):
16
+ model_config = ConfigDict(
17
+ extra='allow'
18
+ )