mercuto-client 0.2.8__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mercuto_client/__init__.py +2 -24
- mercuto_client/_authentication.py +72 -0
- mercuto_client/_tests/test_ingester/test_parsers.py +67 -67
- mercuto_client/_tests/test_mocking/__init__.py +0 -0
- mercuto_client/_tests/test_mocking/conftest.py +13 -0
- mercuto_client/_tests/test_mocking/test_mock_identity.py +8 -0
- mercuto_client/acl.py +16 -10
- mercuto_client/client.py +53 -779
- mercuto_client/exceptions.py +5 -1
- mercuto_client/ingester/__main__.py +1 -1
- mercuto_client/ingester/mercuto.py +15 -16
- mercuto_client/ingester/parsers/__init__.py +3 -3
- mercuto_client/ingester/parsers/campbell.py +2 -2
- mercuto_client/ingester/parsers/generic_csv.py +5 -5
- mercuto_client/ingester/parsers/worldsensing.py +4 -3
- mercuto_client/mocks/__init__.py +92 -0
- mercuto_client/mocks/_utility.py +69 -0
- mercuto_client/mocks/mock_data.py +402 -0
- mercuto_client/mocks/mock_fatigue.py +30 -0
- mercuto_client/mocks/mock_identity.py +188 -0
- mercuto_client/modules/__init__.py +19 -0
- mercuto_client/modules/_util.py +18 -0
- mercuto_client/modules/core.py +674 -0
- mercuto_client/modules/data.py +623 -0
- mercuto_client/modules/fatigue.py +189 -0
- mercuto_client/modules/identity.py +254 -0
- mercuto_client/{ingester/util.py → util.py} +27 -11
- {mercuto_client-0.2.8.dist-info → mercuto_client-0.3.0.dist-info}/METADATA +10 -3
- mercuto_client-0.3.0.dist-info/RECORD +41 -0
- mercuto_client/_tests/test_mocking.py +0 -93
- mercuto_client/_util.py +0 -13
- mercuto_client/mocks.py +0 -203
- mercuto_client/types.py +0 -409
- mercuto_client-0.2.8.dist-info/RECORD +0 -30
- {mercuto_client-0.2.8.dist-info → mercuto_client-0.3.0.dist-info}/WHEEL +0 -0
- {mercuto_client-0.2.8.dist-info → mercuto_client-0.3.0.dist-info}/licenses/LICENSE +0 -0
- {mercuto_client-0.2.8.dist-info → mercuto_client-0.3.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,623 @@
|
|
|
1
|
+
import enum
|
|
2
|
+
import os
|
|
3
|
+
import time
|
|
4
|
+
from contextlib import nullcontext
|
|
5
|
+
from datetime import datetime, timedelta
|
|
6
|
+
from typing import (TYPE_CHECKING, Any, BinaryIO, Collection, Literal,
|
|
7
|
+
Optional, TextIO)
|
|
8
|
+
|
|
9
|
+
from pydantic import TypeAdapter
|
|
10
|
+
|
|
11
|
+
from ..exceptions import MercutoClientException, MercutoHTTPException
|
|
12
|
+
from ..util import batched
|
|
13
|
+
from . import _PayloadType, _raise_for_response
|
|
14
|
+
from ._util import BaseModel, serialise_timedelta
|
|
15
|
+
|
|
16
|
+
if TYPE_CHECKING:
|
|
17
|
+
from ..client import MercutoClient
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class ChannelClassification(enum.Enum):
|
|
21
|
+
PRIMARY = 'PRIMARY'
|
|
22
|
+
PRIMARY_EVENT_AGGREGATE = 'PRIMARY_EVENT_AGGREGATE'
|
|
23
|
+
EVENT_METRIC = 'EVENT_METRIC'
|
|
24
|
+
SECONDARY = 'SECONDARY'
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class Units(BaseModel):
|
|
28
|
+
code: str
|
|
29
|
+
name: str
|
|
30
|
+
unit: Optional[str]
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class Channel(BaseModel):
|
|
34
|
+
code: str
|
|
35
|
+
project: str
|
|
36
|
+
units: Optional[Units]
|
|
37
|
+
sampling_period: Optional[timedelta]
|
|
38
|
+
classification: ChannelClassification
|
|
39
|
+
label: str
|
|
40
|
+
metric: Optional[str]
|
|
41
|
+
source: Optional[str]
|
|
42
|
+
aggregate: Optional[str]
|
|
43
|
+
value_range_min: Optional[float]
|
|
44
|
+
value_range_max: Optional[float]
|
|
45
|
+
multiplier: float
|
|
46
|
+
offset: float
|
|
47
|
+
last_valid_timestamp: Optional[datetime]
|
|
48
|
+
is_wallclock_interval: bool
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class Expression(BaseModel):
|
|
52
|
+
expression: str
|
|
53
|
+
target: Channel
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
class DatatableColumn(BaseModel):
|
|
57
|
+
channel: str
|
|
58
|
+
column_label: str
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class Datatable(BaseModel):
|
|
62
|
+
code: str
|
|
63
|
+
project: str
|
|
64
|
+
name: str
|
|
65
|
+
enabled: bool
|
|
66
|
+
sampling_period: Optional[timedelta] = None
|
|
67
|
+
columns: list[DatatableColumn]
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
class SecondaryDataSample(BaseModel):
|
|
71
|
+
channel: str
|
|
72
|
+
timestamp: datetime
|
|
73
|
+
value: float
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
class MetricDataSample(BaseModel):
|
|
77
|
+
channel: str
|
|
78
|
+
timestamp: datetime
|
|
79
|
+
value: float
|
|
80
|
+
event: str
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
class LatestDataSample(BaseModel):
|
|
84
|
+
channel: str
|
|
85
|
+
timestamp: datetime
|
|
86
|
+
value: float
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
_ChannellistAdapter = TypeAdapter(list[Channel])
|
|
90
|
+
_ExpressionlistAdapter = TypeAdapter(list[Expression])
|
|
91
|
+
_DatatablelistAdapter = TypeAdapter(list[Datatable])
|
|
92
|
+
_UnitslistAdapter = TypeAdapter(list[Units])
|
|
93
|
+
_MetricSamplelistAdapter = TypeAdapter(list[MetricDataSample])
|
|
94
|
+
_SecondarySamplelistAdapter = TypeAdapter(list[SecondaryDataSample])
|
|
95
|
+
_LatestSampleListAdapter = TypeAdapter(list[LatestDataSample])
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
class FrameFormat(enum.Enum):
|
|
99
|
+
COLUMNS = "COLUMNS"
|
|
100
|
+
SAMPLES = "SAMPLES"
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
class FileFormat(enum.Enum):
|
|
104
|
+
FEATHER = "FEATHER"
|
|
105
|
+
PARQUET = "PARQUET"
|
|
106
|
+
CSV = "CSV"
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
class ChannelFormat(enum.Enum):
|
|
110
|
+
CODE = "CODE"
|
|
111
|
+
LABEL = "LABEL"
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
class AggregationOptions(BaseModel):
|
|
115
|
+
method: Literal['min', 'max', 'mean', 'sum', 'count', 'greatest']
|
|
116
|
+
interval: Literal['second', 'minute', 'hour', 'day', 'week', 'month', 'year']
|
|
117
|
+
rolling: bool = False
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
class GetStatusRequestResponse(BaseModel):
|
|
121
|
+
class GetDataRequestStatusCompletedResult(BaseModel):
|
|
122
|
+
class ResultMetadata(BaseModel):
|
|
123
|
+
first_timestamp: Optional[datetime]
|
|
124
|
+
result_url: str
|
|
125
|
+
expires_at: datetime
|
|
126
|
+
mime_type: str
|
|
127
|
+
file_size: int
|
|
128
|
+
metadata: ResultMetadata
|
|
129
|
+
|
|
130
|
+
request_id: str
|
|
131
|
+
status_code: int
|
|
132
|
+
message: str
|
|
133
|
+
requested_at: Optional[datetime]
|
|
134
|
+
completed_at: Optional[datetime]
|
|
135
|
+
result: Optional["GetStatusRequestResponse.GetDataRequestStatusCompletedResult"]
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
class Healthcheck(BaseModel):
|
|
139
|
+
status: str
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
class MercutoDataService:
|
|
143
|
+
def __init__(self, client: 'MercutoClient', path: str = '/v2/data') -> None:
|
|
144
|
+
self._client = client
|
|
145
|
+
self._path = path
|
|
146
|
+
|
|
147
|
+
def healthcheck(self) -> Healthcheck:
|
|
148
|
+
r = self._client._http_request(f"{self._path}/healthcheck", "GET")
|
|
149
|
+
return Healthcheck.model_validate_json(r.text)
|
|
150
|
+
|
|
151
|
+
def refresh_continuous_aggregates(self) -> None:
|
|
152
|
+
"""
|
|
153
|
+
Request a refresh of continuous aggregates on all tables
|
|
154
|
+
"""
|
|
155
|
+
self._client._http_request(f"{self._path}/meta/refresh-aggregates", "POST")
|
|
156
|
+
|
|
157
|
+
"""
|
|
158
|
+
Channels
|
|
159
|
+
"""
|
|
160
|
+
|
|
161
|
+
def list_channels(self, project: str, classification: Optional[ChannelClassification] = None,
|
|
162
|
+
aggregate: Optional[str] = None, metric: Optional[str] = None,
|
|
163
|
+
show_hidden: bool = False) -> list[Channel]:
|
|
164
|
+
params: dict[str, Any] = {
|
|
165
|
+
'project': project,
|
|
166
|
+
'limit': 100,
|
|
167
|
+
'offset': 0,
|
|
168
|
+
'show_hidden': show_hidden,
|
|
169
|
+
}
|
|
170
|
+
if classification:
|
|
171
|
+
params['classification'] = classification.value
|
|
172
|
+
if aggregate:
|
|
173
|
+
params['aggregate'] = aggregate
|
|
174
|
+
if metric:
|
|
175
|
+
params['metric'] = metric
|
|
176
|
+
|
|
177
|
+
all_channels = []
|
|
178
|
+
while True:
|
|
179
|
+
r = self._client._http_request(f'{self._path}/channels', 'GET', params=params)
|
|
180
|
+
|
|
181
|
+
channels = _ChannellistAdapter.validate_json(r.text)
|
|
182
|
+
all_channels.extend(channels)
|
|
183
|
+
if len(channels) < params['limit']:
|
|
184
|
+
break
|
|
185
|
+
params['offset'] += params['limit']
|
|
186
|
+
return all_channels
|
|
187
|
+
|
|
188
|
+
def get_channel(self, code: str) -> Optional[Channel]:
|
|
189
|
+
r = self._client._http_request(f'{self._path}/channels/{code}', 'GET', raise_for_status=False)
|
|
190
|
+
if r.status_code == 404:
|
|
191
|
+
return None
|
|
192
|
+
_raise_for_response(r)
|
|
193
|
+
return Channel.model_validate_json(r.text)
|
|
194
|
+
|
|
195
|
+
def update_channel(self, code: str, label: Optional[str] = None, units: Optional[str] = None,
|
|
196
|
+
metric: Optional[str] = None, multiplier: Optional[float] = None,
|
|
197
|
+
offset: Optional[float] = None) -> Channel:
|
|
198
|
+
payload: _PayloadType = {}
|
|
199
|
+
if label is not None:
|
|
200
|
+
payload['label'] = label
|
|
201
|
+
if units is not None:
|
|
202
|
+
payload['units'] = units
|
|
203
|
+
if metric is not None:
|
|
204
|
+
payload['metric'] = metric
|
|
205
|
+
if multiplier is not None:
|
|
206
|
+
payload['multiplier'] = multiplier
|
|
207
|
+
if offset is not None:
|
|
208
|
+
payload['offset'] = offset
|
|
209
|
+
|
|
210
|
+
r = self._client._http_request(f'{self._path}/channels/{code}', 'PATCH', json=payload)
|
|
211
|
+
return Channel.model_validate_json(r.text)
|
|
212
|
+
|
|
213
|
+
def delete_channel(self, code: str) -> bool:
|
|
214
|
+
r = self._client._http_request(f'{self._path}/channels/{code}', 'DELETE')
|
|
215
|
+
return r.status_code == 204
|
|
216
|
+
|
|
217
|
+
def create_channel(self, project: str,
|
|
218
|
+
label: str,
|
|
219
|
+
classification: ChannelClassification = ChannelClassification.SECONDARY,
|
|
220
|
+
sampling_period: Optional[timedelta] = None,
|
|
221
|
+
multiplier: float = 1.0, offset: float = 0.0,
|
|
222
|
+
value_range_min: Optional[float] = None, value_range_max: Optional[float] = None,
|
|
223
|
+
delta_max: Optional[float] = None,
|
|
224
|
+
units: Optional[str] = None,
|
|
225
|
+
aggregate: Optional[str] = None,
|
|
226
|
+
source: Optional[str] = None,
|
|
227
|
+
metric: Optional[str] = None) -> Channel:
|
|
228
|
+
payload: _PayloadType = {
|
|
229
|
+
'project': project,
|
|
230
|
+
'label': label,
|
|
231
|
+
'classification': classification.value,
|
|
232
|
+
'multiplier': multiplier,
|
|
233
|
+
'offset': offset,
|
|
234
|
+
}
|
|
235
|
+
if sampling_period is not None:
|
|
236
|
+
payload['sampling_period'] = serialise_timedelta(sampling_period)
|
|
237
|
+
if value_range_min is not None:
|
|
238
|
+
payload['value_range_min'] = value_range_min
|
|
239
|
+
if value_range_max is not None:
|
|
240
|
+
payload['value_range_max'] = value_range_max
|
|
241
|
+
if delta_max is not None:
|
|
242
|
+
payload['delta_max'] = delta_max
|
|
243
|
+
if units is not None:
|
|
244
|
+
payload['units'] = units
|
|
245
|
+
if aggregate is not None:
|
|
246
|
+
payload['aggregate'] = aggregate
|
|
247
|
+
if source is not None:
|
|
248
|
+
payload['source'] = source
|
|
249
|
+
if metric is not None:
|
|
250
|
+
payload['metric'] = metric
|
|
251
|
+
|
|
252
|
+
r = self._client._http_request(f'{self._path}/channels', 'PUT', json=payload)
|
|
253
|
+
return Channel.model_validate_json(r.text)
|
|
254
|
+
|
|
255
|
+
"""
|
|
256
|
+
Expressions
|
|
257
|
+
"""
|
|
258
|
+
|
|
259
|
+
def create_expression(
|
|
260
|
+
self,
|
|
261
|
+
project: str,
|
|
262
|
+
label: str,
|
|
263
|
+
expression: str,
|
|
264
|
+
units: Optional[str] = None,
|
|
265
|
+
aggregate: Optional[str] = None,
|
|
266
|
+
metric: Optional[str] = None
|
|
267
|
+
) -> Expression:
|
|
268
|
+
payload: _PayloadType = {
|
|
269
|
+
"project": project,
|
|
270
|
+
"label": label,
|
|
271
|
+
"expression": expression,
|
|
272
|
+
}
|
|
273
|
+
if units is not None:
|
|
274
|
+
payload["units"] = units
|
|
275
|
+
if aggregate is not None:
|
|
276
|
+
payload["aggregate"] = aggregate
|
|
277
|
+
if metric is not None:
|
|
278
|
+
payload["metric"] = metric
|
|
279
|
+
|
|
280
|
+
r = self._client._http_request(f'{self._path}/expressions', 'PUT', json=payload)
|
|
281
|
+
return Expression.model_validate_json(r.text)
|
|
282
|
+
|
|
283
|
+
def delete_expression(self, code: str) -> bool:
|
|
284
|
+
r = self._client._http_request(f'{self._path}/expressions/{code}', 'DELETE')
|
|
285
|
+
return r.status_code == 202
|
|
286
|
+
|
|
287
|
+
"""
|
|
288
|
+
Datatables
|
|
289
|
+
"""
|
|
290
|
+
|
|
291
|
+
def create_datatable(self, project: str, name: str, sampling_period: timedelta, column_labels: Collection[str]) -> Datatable:
|
|
292
|
+
payload: _PayloadType = {
|
|
293
|
+
"project": project,
|
|
294
|
+
"name": name,
|
|
295
|
+
"sampling_period": serialise_timedelta(sampling_period),
|
|
296
|
+
"column_labels": list(column_labels),
|
|
297
|
+
}
|
|
298
|
+
r = self._client._http_request(f'{self._path}/datatables', 'PUT', json=payload)
|
|
299
|
+
return Datatable.model_validate_json(r.text)
|
|
300
|
+
|
|
301
|
+
def list_datatables(self, project: str) -> list[Datatable]:
|
|
302
|
+
datatables = []
|
|
303
|
+
params: dict[str, Any] = {
|
|
304
|
+
"project": project,
|
|
305
|
+
"limit": 100,
|
|
306
|
+
"offset": 0,
|
|
307
|
+
}
|
|
308
|
+
while True:
|
|
309
|
+
r = self._client._http_request(f'{self._path}/datatables', 'GET', params=params)
|
|
310
|
+
|
|
311
|
+
batch = _DatatablelistAdapter.validate_json(r.text)
|
|
312
|
+
datatables.extend(batch)
|
|
313
|
+
if len(batch) < params["limit"]:
|
|
314
|
+
break
|
|
315
|
+
params["offset"] += params["limit"]
|
|
316
|
+
return datatables
|
|
317
|
+
|
|
318
|
+
"""
|
|
319
|
+
Units
|
|
320
|
+
"""
|
|
321
|
+
|
|
322
|
+
def list_units(self) -> list[Units]:
|
|
323
|
+
r = self._client._http_request(f'{self._path}/units', 'GET')
|
|
324
|
+
return _UnitslistAdapter.validate_json(r.text)
|
|
325
|
+
|
|
326
|
+
def create_unit(self, name: str, unit: str) -> Units:
|
|
327
|
+
payload: _PayloadType = {
|
|
328
|
+
"name": name,
|
|
329
|
+
"unit": unit,
|
|
330
|
+
}
|
|
331
|
+
r = self._client._http_request(f'{self._path}/units', 'PUT', json=payload)
|
|
332
|
+
return Units.model_validate_json(r.text)
|
|
333
|
+
|
|
334
|
+
"""
|
|
335
|
+
Requests
|
|
336
|
+
"""
|
|
337
|
+
|
|
338
|
+
def create_request(
|
|
339
|
+
self,
|
|
340
|
+
start_time: datetime,
|
|
341
|
+
end_time: datetime,
|
|
342
|
+
project: Optional[str] = None,
|
|
343
|
+
channels: Optional[Collection[str]] = None,
|
|
344
|
+
classification: Optional[ChannelClassification] = None,
|
|
345
|
+
frame_format: FrameFormat = FrameFormat.SAMPLES,
|
|
346
|
+
file_format: FileFormat = FileFormat.PARQUET,
|
|
347
|
+
channel_format: ChannelFormat = ChannelFormat.CODE,
|
|
348
|
+
aggregation: Optional[AggregationOptions] = None,
|
|
349
|
+
timeout: float = 0
|
|
350
|
+
) -> GetStatusRequestResponse:
|
|
351
|
+
if timeout > 20:
|
|
352
|
+
timeout = 20 # Cap timeout to 20 seconds
|
|
353
|
+
|
|
354
|
+
if channels is None and classification is None:
|
|
355
|
+
raise ValueError("Must supply either channels or classification.")
|
|
356
|
+
|
|
357
|
+
payload: _PayloadType = {
|
|
358
|
+
"start_time": start_time.isoformat(),
|
|
359
|
+
"end_time": end_time.isoformat(),
|
|
360
|
+
"frame_format": frame_format.value,
|
|
361
|
+
"file_format": file_format.value,
|
|
362
|
+
"channel_format": channel_format.value,
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
if project:
|
|
366
|
+
payload["project"] = project
|
|
367
|
+
|
|
368
|
+
if channels:
|
|
369
|
+
payload["channels"] = list(channels)
|
|
370
|
+
|
|
371
|
+
if classification:
|
|
372
|
+
payload["classification"] = classification.value
|
|
373
|
+
|
|
374
|
+
if aggregation is not None:
|
|
375
|
+
payload["aggregation"] = aggregation.model_dump(mode='json')
|
|
376
|
+
|
|
377
|
+
r = self._client._http_request(
|
|
378
|
+
f'{self._path}/requests', 'POST',
|
|
379
|
+
json=payload,
|
|
380
|
+
params={"timeout": timeout}
|
|
381
|
+
)
|
|
382
|
+
return GetStatusRequestResponse.model_validate_json(r.text)
|
|
383
|
+
|
|
384
|
+
def get_request(self, request_id: str) -> GetStatusRequestResponse:
|
|
385
|
+
r = self._client._http_request(f'{self._path}/requests/{request_id}', 'GET')
|
|
386
|
+
return GetStatusRequestResponse.model_validate_json(r.text)
|
|
387
|
+
|
|
388
|
+
"""
|
|
389
|
+
Request Helpers
|
|
390
|
+
"""
|
|
391
|
+
|
|
392
|
+
def load_presigned_url(
|
|
393
|
+
self,
|
|
394
|
+
start_time: datetime,
|
|
395
|
+
end_time: datetime,
|
|
396
|
+
project: Optional[str] = None,
|
|
397
|
+
channels: Optional[Collection[str]] = None,
|
|
398
|
+
classification: Optional[ChannelClassification] = None,
|
|
399
|
+
frame_format: FrameFormat = FrameFormat.SAMPLES,
|
|
400
|
+
file_format: FileFormat = FileFormat.PARQUET,
|
|
401
|
+
channel_format: ChannelFormat = ChannelFormat.CODE,
|
|
402
|
+
aggregation: Optional[AggregationOptions] = None,
|
|
403
|
+
poll_interval: float = 0.25,
|
|
404
|
+
timeout: int = 60
|
|
405
|
+
) -> str:
|
|
406
|
+
"""
|
|
407
|
+
Request a presigned download URL for data and poll until ready.
|
|
408
|
+
|
|
409
|
+
Returns:
|
|
410
|
+
The presigned result_url as a string.
|
|
411
|
+
Raises:
|
|
412
|
+
MercutoHTTPException, MercutoClientException on error or timeout.
|
|
413
|
+
"""
|
|
414
|
+
|
|
415
|
+
result = self.load_data_request(
|
|
416
|
+
start_time=start_time,
|
|
417
|
+
end_time=end_time,
|
|
418
|
+
project=project,
|
|
419
|
+
channels=channels,
|
|
420
|
+
classification=classification,
|
|
421
|
+
frame_format=frame_format,
|
|
422
|
+
file_format=file_format,
|
|
423
|
+
channel_format=channel_format,
|
|
424
|
+
aggregation=aggregation,
|
|
425
|
+
poll_interval=poll_interval,
|
|
426
|
+
timeout=timeout
|
|
427
|
+
)
|
|
428
|
+
if result.result_url is None:
|
|
429
|
+
raise MercutoClientException("Failed to obtain presigned URL.")
|
|
430
|
+
return result.result_url
|
|
431
|
+
|
|
432
|
+
def load_data_request(
|
|
433
|
+
self,
|
|
434
|
+
start_time: datetime,
|
|
435
|
+
end_time: datetime,
|
|
436
|
+
project: Optional[str] = None,
|
|
437
|
+
channels: Optional[Collection[str]] = None,
|
|
438
|
+
classification: Optional[ChannelClassification] = None,
|
|
439
|
+
frame_format: FrameFormat = FrameFormat.SAMPLES,
|
|
440
|
+
file_format: FileFormat = FileFormat.PARQUET,
|
|
441
|
+
channel_format: ChannelFormat = ChannelFormat.CODE,
|
|
442
|
+
aggregation: Optional[AggregationOptions] = None,
|
|
443
|
+
poll_interval: float = 0.25,
|
|
444
|
+
timeout: int = 60
|
|
445
|
+
) -> GetStatusRequestResponse.GetDataRequestStatusCompletedResult:
|
|
446
|
+
"""
|
|
447
|
+
Request a presigned download URL for data and poll until ready.
|
|
448
|
+
|
|
449
|
+
Returns:
|
|
450
|
+
The GetStatusRequestResponse
|
|
451
|
+
Raises:
|
|
452
|
+
MercutoHTTPException, MercutoClientException on error or timeout.
|
|
453
|
+
"""
|
|
454
|
+
|
|
455
|
+
# Start the request, using poll_interval as the initial timeout
|
|
456
|
+
status = self.create_request(
|
|
457
|
+
project=project,
|
|
458
|
+
channels=channels,
|
|
459
|
+
start_time=start_time,
|
|
460
|
+
end_time=end_time,
|
|
461
|
+
classification=classification,
|
|
462
|
+
frame_format=frame_format,
|
|
463
|
+
file_format=file_format,
|
|
464
|
+
channel_format=channel_format,
|
|
465
|
+
timeout=poll_interval,
|
|
466
|
+
aggregation=aggregation
|
|
467
|
+
)
|
|
468
|
+
request_id = status.request_id
|
|
469
|
+
|
|
470
|
+
# If already complete, return immediately
|
|
471
|
+
if status.status_code == 200 and status.result and status.result.result_url:
|
|
472
|
+
return status.result
|
|
473
|
+
if status.status_code >= 400:
|
|
474
|
+
raise MercutoHTTPException(status.message, status.status_code)
|
|
475
|
+
|
|
476
|
+
# Otherwise, poll for completion
|
|
477
|
+
start_poll = time.time()
|
|
478
|
+
while True:
|
|
479
|
+
status = self.get_request(request_id)
|
|
480
|
+
if status.status_code == 200 and status.result and status.result.result_url:
|
|
481
|
+
return status.result
|
|
482
|
+
if status.status_code >= 400:
|
|
483
|
+
raise MercutoHTTPException(status.message, status.status_code)
|
|
484
|
+
if time.time() - start_poll > timeout:
|
|
485
|
+
raise MercutoClientException("Timed out waiting for presigned url.")
|
|
486
|
+
time.sleep(poll_interval)
|
|
487
|
+
|
|
488
|
+
"""
|
|
489
|
+
Samples
|
|
490
|
+
"""
|
|
491
|
+
|
|
492
|
+
def insert_secondary_samples(
|
|
493
|
+
self,
|
|
494
|
+
project: str,
|
|
495
|
+
samples: Collection[SecondaryDataSample]
|
|
496
|
+
) -> None:
|
|
497
|
+
"""
|
|
498
|
+
Insert secondary samples.
|
|
499
|
+
"""
|
|
500
|
+
for batch in batched(samples, 5000):
|
|
501
|
+
payload = _SecondarySamplelistAdapter.dump_python(list(batch), mode='json')
|
|
502
|
+
self._client._http_request(
|
|
503
|
+
f'{self._path}/samples/secondary', 'PUT', json=payload, params={"project": project}
|
|
504
|
+
)
|
|
505
|
+
|
|
506
|
+
# No return value, 202 accepted
|
|
507
|
+
|
|
508
|
+
def insert_metric_samples(
|
|
509
|
+
self,
|
|
510
|
+
project: str,
|
|
511
|
+
samples: Collection[MetricDataSample]
|
|
512
|
+
) -> None:
|
|
513
|
+
"""
|
|
514
|
+
Insert metric samples.
|
|
515
|
+
"""
|
|
516
|
+
for batch in batched(samples, 5000):
|
|
517
|
+
payload = _MetricSamplelistAdapter.dump_python(list(batch), mode='json')
|
|
518
|
+
self._client._http_request(
|
|
519
|
+
f'{self._path}/samples/metric', 'PUT', json=payload, params={"project": project}
|
|
520
|
+
)
|
|
521
|
+
|
|
522
|
+
# No return value, 202 accepted
|
|
523
|
+
|
|
524
|
+
def load_secondary_samples(
|
|
525
|
+
self,
|
|
526
|
+
channels: Collection[str],
|
|
527
|
+
start_time: datetime,
|
|
528
|
+
end_time: datetime,
|
|
529
|
+
limit: int = 100
|
|
530
|
+
) -> list[SecondaryDataSample]:
|
|
531
|
+
"""
|
|
532
|
+
Load up to 100 secondary samples.
|
|
533
|
+
"""
|
|
534
|
+
params: _PayloadType = {
|
|
535
|
+
"channels": list(channels),
|
|
536
|
+
"start_time": start_time.isoformat(),
|
|
537
|
+
"end_time": end_time.isoformat(),
|
|
538
|
+
"limit": limit
|
|
539
|
+
}
|
|
540
|
+
r = self._client._http_request(
|
|
541
|
+
f'{self._path}/samples/secondary', 'GET', params=params
|
|
542
|
+
)
|
|
543
|
+
|
|
544
|
+
return _SecondarySamplelistAdapter.validate_json(r.text)
|
|
545
|
+
|
|
546
|
+
def load_metric_samples(
|
|
547
|
+
self,
|
|
548
|
+
channels: Optional[Collection[str]] = None,
|
|
549
|
+
start_time: Optional[datetime] = None,
|
|
550
|
+
end_time: Optional[datetime] = None,
|
|
551
|
+
events: Optional[Collection[str]] = None,
|
|
552
|
+
project: Optional[str] = None,
|
|
553
|
+
limit: int = 100
|
|
554
|
+
) -> list[MetricDataSample]:
|
|
555
|
+
"""
|
|
556
|
+
Load up to 100 metric samples.
|
|
557
|
+
"""
|
|
558
|
+
params: _PayloadType = {
|
|
559
|
+
"limit": limit
|
|
560
|
+
}
|
|
561
|
+
if project is not None:
|
|
562
|
+
params["project"] = project
|
|
563
|
+
if channels is not None:
|
|
564
|
+
params["channels"] = list(channels)
|
|
565
|
+
if start_time is not None:
|
|
566
|
+
params["start_time"] = start_time.isoformat()
|
|
567
|
+
if end_time is not None:
|
|
568
|
+
params["end_time"] = end_time.isoformat()
|
|
569
|
+
if events is not None:
|
|
570
|
+
params["event"] = list(events)
|
|
571
|
+
r = self._client._http_request(
|
|
572
|
+
f'{self._path}/samples/metric', 'GET', params=params
|
|
573
|
+
)
|
|
574
|
+
|
|
575
|
+
return _MetricSamplelistAdapter.validate_json(r.text)
|
|
576
|
+
|
|
577
|
+
def load_metric_sample(self, channel: str, event: str) -> Optional[float]:
|
|
578
|
+
"""
|
|
579
|
+
Load a single metric sample for a specific channel and event.
|
|
580
|
+
"""
|
|
581
|
+
samples = self.load_metric_samples([channel], events=[event])
|
|
582
|
+
return samples[0].value if samples else None
|
|
583
|
+
|
|
584
|
+
def delete_metric_samples(self, project: str, event: str, channels: Optional[Collection[str]] = None) -> None:
|
|
585
|
+
params: _PayloadType = {"project": project, "event": event}
|
|
586
|
+
if channels is not None:
|
|
587
|
+
params["channels"] = list(channels)
|
|
588
|
+
self._client._http_request(
|
|
589
|
+
f'{self._path}/samples/metric', 'DELETE', params=params
|
|
590
|
+
)
|
|
591
|
+
|
|
592
|
+
def upload_file(self, project: str, datatable: str, file: str | bytes | TextIO | BinaryIO,
|
|
593
|
+
filename: Optional[str] = None,
|
|
594
|
+
timezone: Optional[str] = None) -> None:
|
|
595
|
+
if isinstance(file, str):
|
|
596
|
+
ctx = open(file, 'rb')
|
|
597
|
+
filename = filename or os.path.basename(file)
|
|
598
|
+
else:
|
|
599
|
+
ctx = nullcontext(file) # type: ignore
|
|
600
|
+
filename = filename or 'file.dat'
|
|
601
|
+
|
|
602
|
+
params: _PayloadType = {
|
|
603
|
+
"project": project,
|
|
604
|
+
"datatable": datatable,
|
|
605
|
+
}
|
|
606
|
+
if timezone is not None:
|
|
607
|
+
params["timezone"] = timezone
|
|
608
|
+
|
|
609
|
+
with ctx as f:
|
|
610
|
+
self._client._http_request(f'{self._path}/files/upload/small', 'POST',
|
|
611
|
+
params=params,
|
|
612
|
+
files={'file': (filename, f, 'text/csv')})
|
|
613
|
+
|
|
614
|
+
def get_latest_samples(self, project: str, include_primary: bool = True) -> list[LatestDataSample]:
|
|
615
|
+
params: _PayloadType = {
|
|
616
|
+
"project": project,
|
|
617
|
+
"include_primary": include_primary
|
|
618
|
+
}
|
|
619
|
+
r = self._client._http_request(
|
|
620
|
+
f'{self._path}/statistics/latest-samples', 'GET', params=params
|
|
621
|
+
)
|
|
622
|
+
|
|
623
|
+
return _LatestSampleListAdapter.validate_json(r.text)
|