coredis 5.5.0__cp313-cp313-macosx_11_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- 22fe76227e35f92ab5c3__mypyc.cpython-313-darwin.so +0 -0
- coredis/__init__.py +42 -0
- coredis/_enum.py +42 -0
- coredis/_json.py +11 -0
- coredis/_packer.cpython-313-darwin.so +0 -0
- coredis/_packer.py +71 -0
- coredis/_protocols.py +50 -0
- coredis/_py_311_typing.py +20 -0
- coredis/_py_312_typing.py +17 -0
- coredis/_sidecar.py +114 -0
- coredis/_utils.cpython-313-darwin.so +0 -0
- coredis/_utils.py +440 -0
- coredis/_version.py +34 -0
- coredis/_version.pyi +1 -0
- coredis/cache.py +801 -0
- coredis/client/__init__.py +6 -0
- coredis/client/basic.py +1240 -0
- coredis/client/cluster.py +1265 -0
- coredis/commands/__init__.py +64 -0
- coredis/commands/_key_spec.py +517 -0
- coredis/commands/_utils.py +108 -0
- coredis/commands/_validators.py +159 -0
- coredis/commands/_wrappers.py +175 -0
- coredis/commands/bitfield.py +110 -0
- coredis/commands/constants.py +662 -0
- coredis/commands/core.py +8484 -0
- coredis/commands/function.py +408 -0
- coredis/commands/monitor.py +168 -0
- coredis/commands/pubsub.py +905 -0
- coredis/commands/request.py +108 -0
- coredis/commands/script.py +296 -0
- coredis/commands/sentinel.py +246 -0
- coredis/config.py +50 -0
- coredis/connection.py +906 -0
- coredis/constants.cpython-313-darwin.so +0 -0
- coredis/constants.py +37 -0
- coredis/credentials.py +45 -0
- coredis/exceptions.py +360 -0
- coredis/experimental/__init__.py +1 -0
- coredis/globals.py +23 -0
- coredis/modules/__init__.py +121 -0
- coredis/modules/autocomplete.py +138 -0
- coredis/modules/base.py +262 -0
- coredis/modules/filters.py +1319 -0
- coredis/modules/graph.py +362 -0
- coredis/modules/json.py +691 -0
- coredis/modules/response/__init__.py +0 -0
- coredis/modules/response/_callbacks/__init__.py +0 -0
- coredis/modules/response/_callbacks/autocomplete.py +42 -0
- coredis/modules/response/_callbacks/graph.py +237 -0
- coredis/modules/response/_callbacks/json.py +21 -0
- coredis/modules/response/_callbacks/search.py +221 -0
- coredis/modules/response/_callbacks/timeseries.py +158 -0
- coredis/modules/response/types.py +179 -0
- coredis/modules/search.py +1089 -0
- coredis/modules/timeseries.py +1139 -0
- coredis/parser.cpython-313-darwin.so +0 -0
- coredis/parser.py +344 -0
- coredis/pipeline.py +1225 -0
- coredis/pool/__init__.py +11 -0
- coredis/pool/basic.py +453 -0
- coredis/pool/cluster.py +517 -0
- coredis/pool/nodemanager.py +340 -0
- coredis/py.typed +0 -0
- coredis/recipes/__init__.py +0 -0
- coredis/recipes/credentials/__init__.py +5 -0
- coredis/recipes/credentials/iam_provider.py +63 -0
- coredis/recipes/locks/__init__.py +5 -0
- coredis/recipes/locks/extend.lua +17 -0
- coredis/recipes/locks/lua_lock.py +281 -0
- coredis/recipes/locks/release.lua +10 -0
- coredis/response/__init__.py +5 -0
- coredis/response/_callbacks/__init__.py +538 -0
- coredis/response/_callbacks/acl.py +32 -0
- coredis/response/_callbacks/cluster.py +183 -0
- coredis/response/_callbacks/command.py +86 -0
- coredis/response/_callbacks/connection.py +31 -0
- coredis/response/_callbacks/geo.py +58 -0
- coredis/response/_callbacks/hash.py +85 -0
- coredis/response/_callbacks/keys.py +59 -0
- coredis/response/_callbacks/module.py +33 -0
- coredis/response/_callbacks/script.py +85 -0
- coredis/response/_callbacks/sentinel.py +179 -0
- coredis/response/_callbacks/server.py +241 -0
- coredis/response/_callbacks/sets.py +44 -0
- coredis/response/_callbacks/sorted_set.py +204 -0
- coredis/response/_callbacks/streams.py +185 -0
- coredis/response/_callbacks/strings.py +70 -0
- coredis/response/_callbacks/vector_sets.py +159 -0
- coredis/response/_utils.py +33 -0
- coredis/response/types.py +416 -0
- coredis/retry.py +233 -0
- coredis/sentinel.py +477 -0
- coredis/stream.py +369 -0
- coredis/tokens.py +2286 -0
- coredis/typing.py +593 -0
- coredis-5.5.0.dist-info/METADATA +211 -0
- coredis-5.5.0.dist-info/RECORD +100 -0
- coredis-5.5.0.dist-info/WHEEL +6 -0
- coredis-5.5.0.dist-info/licenses/LICENSE +23 -0
|
@@ -0,0 +1,1139 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import itertools
|
|
4
|
+
from datetime import datetime, timedelta
|
|
5
|
+
|
|
6
|
+
from deprecated.sphinx import versionadded
|
|
7
|
+
|
|
8
|
+
from coredis.typing import (
|
|
9
|
+
AnyStr,
|
|
10
|
+
CommandArgList,
|
|
11
|
+
KeyT,
|
|
12
|
+
Literal,
|
|
13
|
+
Mapping,
|
|
14
|
+
Parameters,
|
|
15
|
+
RedisValueT,
|
|
16
|
+
ResponseType,
|
|
17
|
+
StringT,
|
|
18
|
+
ValueT,
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
from .._utils import dict_to_flat_list
|
|
22
|
+
from ..commands._utils import normalized_milliseconds, normalized_time_milliseconds
|
|
23
|
+
from ..commands._validators import (
|
|
24
|
+
mutually_exclusive_parameters,
|
|
25
|
+
mutually_inclusive_parameters,
|
|
26
|
+
)
|
|
27
|
+
from ..commands._wrappers import ClusterCommandConfig
|
|
28
|
+
from ..commands.constants import CommandFlag, CommandGroup, CommandName, NodeFlag
|
|
29
|
+
from ..commands.request import CommandRequest
|
|
30
|
+
from ..response._callbacks import (
|
|
31
|
+
ClusterMergeSets,
|
|
32
|
+
IntCallback,
|
|
33
|
+
SetCallback,
|
|
34
|
+
SimpleStringCallback,
|
|
35
|
+
TupleCallback,
|
|
36
|
+
)
|
|
37
|
+
from ..tokens import PrefixToken, PureToken
|
|
38
|
+
from .base import Module, ModuleGroup, module_command
|
|
39
|
+
from .response._callbacks.timeseries import (
|
|
40
|
+
ClusterMergeTimeSeries,
|
|
41
|
+
SampleCallback,
|
|
42
|
+
SamplesCallback,
|
|
43
|
+
TimeSeriesCallback,
|
|
44
|
+
TimeSeriesInfoCallback,
|
|
45
|
+
TimeSeriesMultiCallback,
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def normalized_timestamp(ts: int | datetime | StringT) -> StringT | int:
|
|
50
|
+
if isinstance(ts, (bytes, str)):
|
|
51
|
+
return ts
|
|
52
|
+
return normalized_time_milliseconds(ts)
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class RedisTimeSeries(Module[AnyStr]):
|
|
56
|
+
NAME = "timeseries"
|
|
57
|
+
FULL_NAME = "RedisTimeSeries"
|
|
58
|
+
DESCRIPTION = """RedisTimeSeries is a Redis module that implements a time series
|
|
59
|
+
data structure. It is designed to be used as a database for time series data,
|
|
60
|
+
and is optimized for fast insertion and retrieval of time series data.
|
|
61
|
+
"""
|
|
62
|
+
DOCUMENTATION_URL = "https://redis.io/docs/stack/timeseries/"
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
@versionadded(version="4.12")
|
|
66
|
+
class TimeSeries(ModuleGroup[AnyStr]):
|
|
67
|
+
MODULE = RedisTimeSeries
|
|
68
|
+
COMMAND_GROUP = CommandGroup.TIMESERIES
|
|
69
|
+
|
|
70
|
+
@module_command(
|
|
71
|
+
CommandName.TS_CREATE,
|
|
72
|
+
group=COMMAND_GROUP,
|
|
73
|
+
version_introduced="1.0.0",
|
|
74
|
+
module=MODULE,
|
|
75
|
+
)
|
|
76
|
+
def create(
|
|
77
|
+
self,
|
|
78
|
+
key: KeyT,
|
|
79
|
+
retention: int | timedelta | None = None,
|
|
80
|
+
encoding: Literal[PureToken.COMPRESSED, PureToken.UNCOMPRESSED] | None = None,
|
|
81
|
+
chunk_size: int | None = None,
|
|
82
|
+
duplicate_policy: None
|
|
83
|
+
| (
|
|
84
|
+
Literal[
|
|
85
|
+
PureToken.BLOCK,
|
|
86
|
+
PureToken.FIRST,
|
|
87
|
+
PureToken.LAST,
|
|
88
|
+
PureToken.MAX,
|
|
89
|
+
PureToken.MIN,
|
|
90
|
+
PureToken.SUM,
|
|
91
|
+
]
|
|
92
|
+
) = None,
|
|
93
|
+
labels: Mapping[StringT, ValueT] | None = None,
|
|
94
|
+
) -> CommandRequest[bool]:
|
|
95
|
+
"""
|
|
96
|
+
Create a new time series with the given key.
|
|
97
|
+
|
|
98
|
+
:param key: The key name for the time series.
|
|
99
|
+
:param retention: Maximum age for samples compared to the highest reported timestamp,
|
|
100
|
+
in milliseconds.
|
|
101
|
+
:param encoding: Specifies the series samples encoding format as ``COMPRESSED`` or
|
|
102
|
+
``UNCOMPRESSED``.
|
|
103
|
+
:param chunk_size: Initial allocation size, in bytes, for the data part of each new chunk.
|
|
104
|
+
:param duplicate_policy: Policy for handling insertion of multiple samples with identical
|
|
105
|
+
timestamps.
|
|
106
|
+
:param labels: A dictionary of labels to be associated with the time series.
|
|
107
|
+
:return: True if the time series was created successfully, False otherwise.
|
|
108
|
+
"""
|
|
109
|
+
command_arguments: CommandArgList = [key]
|
|
110
|
+
if retention is not None:
|
|
111
|
+
command_arguments.extend([PrefixToken.RETENTION, normalized_milliseconds(retention)])
|
|
112
|
+
if encoding:
|
|
113
|
+
command_arguments.extend([PrefixToken.ENCODING, encoding])
|
|
114
|
+
if chunk_size is not None:
|
|
115
|
+
command_arguments.extend([PrefixToken.CHUNK_SIZE, chunk_size])
|
|
116
|
+
if duplicate_policy is not None:
|
|
117
|
+
command_arguments.extend([PrefixToken.DUPLICATE_POLICY, duplicate_policy])
|
|
118
|
+
if labels:
|
|
119
|
+
command_arguments.extend(
|
|
120
|
+
[
|
|
121
|
+
PrefixToken.LABELS,
|
|
122
|
+
*dict_to_flat_list(labels), # type: ignore
|
|
123
|
+
]
|
|
124
|
+
)
|
|
125
|
+
return self.client.create_request(
|
|
126
|
+
CommandName.TS_CREATE, *command_arguments, callback=SimpleStringCallback()
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
@module_command(
|
|
130
|
+
CommandName.TS_DEL,
|
|
131
|
+
group=COMMAND_GROUP,
|
|
132
|
+
version_introduced="1.6.0",
|
|
133
|
+
module=MODULE,
|
|
134
|
+
)
|
|
135
|
+
def delete(
|
|
136
|
+
self,
|
|
137
|
+
key: KeyT,
|
|
138
|
+
fromtimestamp: int | datetime | StringT,
|
|
139
|
+
totimestamp: int | datetime | StringT,
|
|
140
|
+
) -> CommandRequest[int]:
|
|
141
|
+
"""
|
|
142
|
+
Delete all samples between two timestamps for a given time series.
|
|
143
|
+
|
|
144
|
+
:param key: Key name for the time series.
|
|
145
|
+
:param fromtimestamp: Start timestamp for the range deletion.
|
|
146
|
+
:param totimestamp: End timestamp for the range deletion.
|
|
147
|
+
:return: The number of samples that were deleted, or an error reply.
|
|
148
|
+
"""
|
|
149
|
+
return self.client.create_request(
|
|
150
|
+
CommandName.TS_DEL,
|
|
151
|
+
key,
|
|
152
|
+
normalized_timestamp(fromtimestamp),
|
|
153
|
+
normalized_timestamp(totimestamp),
|
|
154
|
+
callback=IntCallback(),
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
@module_command(
|
|
158
|
+
CommandName.TS_ALTER,
|
|
159
|
+
group=COMMAND_GROUP,
|
|
160
|
+
version_introduced="1.0.0",
|
|
161
|
+
module=MODULE,
|
|
162
|
+
)
|
|
163
|
+
def alter(
|
|
164
|
+
self,
|
|
165
|
+
key: KeyT,
|
|
166
|
+
labels: Mapping[StringT, StringT] | None = None,
|
|
167
|
+
retention: int | None = None,
|
|
168
|
+
chunk_size: int | None = None,
|
|
169
|
+
duplicate_policy: None
|
|
170
|
+
| (
|
|
171
|
+
Literal[
|
|
172
|
+
PureToken.BLOCK,
|
|
173
|
+
PureToken.FIRST,
|
|
174
|
+
PureToken.LAST,
|
|
175
|
+
PureToken.MAX,
|
|
176
|
+
PureToken.MIN,
|
|
177
|
+
PureToken.SUM,
|
|
178
|
+
]
|
|
179
|
+
) = None,
|
|
180
|
+
) -> CommandRequest[bool]:
|
|
181
|
+
"""
|
|
182
|
+
Update the retention, chunk size, duplicate policy, and labels of an existing time series.
|
|
183
|
+
|
|
184
|
+
:param key: Key name for the time series.
|
|
185
|
+
:param labels: Dictionary mapping labels to values that represent metadata labels of the
|
|
186
|
+
key and serve as a secondary index.
|
|
187
|
+
:param retention: Maximum retention period, compared to the maximum existing timestamp, in
|
|
188
|
+
milliseconds.
|
|
189
|
+
:param chunk_size: Initial allocation size, in bytes, for the data part of each new chunk.
|
|
190
|
+
:param duplicate_policy: Policy for handling multiple samples with identical timestamps.
|
|
191
|
+
:return: True if executed correctly, False otherwise.
|
|
192
|
+
"""
|
|
193
|
+
command_arguments: CommandArgList = [key]
|
|
194
|
+
if labels:
|
|
195
|
+
command_arguments.extend(
|
|
196
|
+
[
|
|
197
|
+
PrefixToken.LABELS,
|
|
198
|
+
*dict_to_flat_list(labels), # type: ignore
|
|
199
|
+
]
|
|
200
|
+
)
|
|
201
|
+
if retention is not None:
|
|
202
|
+
command_arguments.extend([PrefixToken.RETENTION, retention])
|
|
203
|
+
if chunk_size is not None:
|
|
204
|
+
command_arguments.extend([PrefixToken.CHUNK_SIZE, chunk_size])
|
|
205
|
+
if duplicate_policy:
|
|
206
|
+
command_arguments.extend([PrefixToken.DUPLICATE_POLICY, duplicate_policy])
|
|
207
|
+
return self.client.create_request(
|
|
208
|
+
CommandName.TS_ALTER, *command_arguments, callback=SimpleStringCallback()
|
|
209
|
+
)
|
|
210
|
+
|
|
211
|
+
@module_command(
|
|
212
|
+
CommandName.TS_ADD,
|
|
213
|
+
group=COMMAND_GROUP,
|
|
214
|
+
version_introduced="1.0.0",
|
|
215
|
+
module=MODULE,
|
|
216
|
+
)
|
|
217
|
+
def add(
|
|
218
|
+
self,
|
|
219
|
+
key: KeyT,
|
|
220
|
+
timestamp: int | datetime | StringT,
|
|
221
|
+
value: int | float,
|
|
222
|
+
retention: int | None = None,
|
|
223
|
+
encoding: Literal[PureToken.COMPRESSED, PureToken.UNCOMPRESSED] | None = None,
|
|
224
|
+
chunk_size: int | None = None,
|
|
225
|
+
duplicate_policy: None
|
|
226
|
+
| (
|
|
227
|
+
Literal[
|
|
228
|
+
PureToken.BLOCK,
|
|
229
|
+
PureToken.FIRST,
|
|
230
|
+
PureToken.LAST,
|
|
231
|
+
PureToken.MAX,
|
|
232
|
+
PureToken.MIN,
|
|
233
|
+
PureToken.SUM,
|
|
234
|
+
]
|
|
235
|
+
) = None,
|
|
236
|
+
labels: Mapping[StringT, ValueT] | None = None,
|
|
237
|
+
) -> CommandRequest[int]:
|
|
238
|
+
"""
|
|
239
|
+
Add a sample to a time series.
|
|
240
|
+
|
|
241
|
+
:param key: Name of the time series.
|
|
242
|
+
:param timestamp: UNIX sample timestamp in milliseconds or `*` to set the timestamp
|
|
243
|
+
according to the server clock.
|
|
244
|
+
:param value: Numeric data value of the sample.
|
|
245
|
+
:param retention: Maximum retention period, compared to the maximum existing timestamp, in
|
|
246
|
+
milliseconds.
|
|
247
|
+
:param encoding: Encoding format for the series sample.
|
|
248
|
+
:param chunk_size: Memory size, in bytes, allocated for each data chunk.
|
|
249
|
+
:param duplicate_policy: Policy for handling samples with identical timestamps.
|
|
250
|
+
:param labels: Dictionary of labels associated with the sample.
|
|
251
|
+
:return: Number of samples added to the time series.
|
|
252
|
+
"""
|
|
253
|
+
command_arguments: CommandArgList = [
|
|
254
|
+
key,
|
|
255
|
+
normalized_timestamp(timestamp),
|
|
256
|
+
value,
|
|
257
|
+
]
|
|
258
|
+
if retention is not None:
|
|
259
|
+
command_arguments.extend([PrefixToken.RETENTION, retention])
|
|
260
|
+
if encoding:
|
|
261
|
+
command_arguments.extend([PrefixToken.ENCODING, encoding])
|
|
262
|
+
if chunk_size is not None:
|
|
263
|
+
command_arguments.extend([PrefixToken.CHUNK_SIZE, chunk_size])
|
|
264
|
+
if duplicate_policy:
|
|
265
|
+
command_arguments.extend([PrefixToken.ON_DUPLICATE, duplicate_policy])
|
|
266
|
+
if labels:
|
|
267
|
+
command_arguments.extend(
|
|
268
|
+
[
|
|
269
|
+
PrefixToken.LABELS,
|
|
270
|
+
*dict_to_flat_list(labels), # type: ignore
|
|
271
|
+
]
|
|
272
|
+
)
|
|
273
|
+
return self.client.create_request(
|
|
274
|
+
CommandName.TS_ADD, *command_arguments, callback=IntCallback()
|
|
275
|
+
)
|
|
276
|
+
|
|
277
|
+
@module_command(
|
|
278
|
+
CommandName.TS_MADD,
|
|
279
|
+
group=COMMAND_GROUP,
|
|
280
|
+
version_introduced="1.0.0",
|
|
281
|
+
module=MODULE,
|
|
282
|
+
)
|
|
283
|
+
def madd(
|
|
284
|
+
self, ktvs: Parameters[tuple[AnyStr, int, int | float]]
|
|
285
|
+
) -> CommandRequest[tuple[int, ...]]:
|
|
286
|
+
"""
|
|
287
|
+
Append new samples to one or more time series.
|
|
288
|
+
|
|
289
|
+
:param ktvs: A list of tuples, where each tuple contains the key name for the time series,
|
|
290
|
+
an integer UNIX sample timestamp in milliseconds or `*` to set the timestamp according
|
|
291
|
+
to the server clock, and a numeric data value of the sample.
|
|
292
|
+
:return: A tuple of integers representing the timestamp of each added sample
|
|
293
|
+
"""
|
|
294
|
+
command_arguments: CommandArgList = list(itertools.chain(*ktvs))
|
|
295
|
+
|
|
296
|
+
return self.client.create_request(
|
|
297
|
+
CommandName.TS_MADD, *command_arguments, callback=TupleCallback[int]()
|
|
298
|
+
)
|
|
299
|
+
|
|
300
|
+
@module_command(
|
|
301
|
+
CommandName.TS_INCRBY,
|
|
302
|
+
group=COMMAND_GROUP,
|
|
303
|
+
version_introduced="1.0.0",
|
|
304
|
+
module=MODULE,
|
|
305
|
+
)
|
|
306
|
+
def incrby(
|
|
307
|
+
self,
|
|
308
|
+
key: KeyT,
|
|
309
|
+
value: int | float,
|
|
310
|
+
labels: Mapping[StringT, RedisValueT] | None = None,
|
|
311
|
+
timestamp: datetime | int | StringT | None = None,
|
|
312
|
+
retention: int | timedelta | None = None,
|
|
313
|
+
uncompressed: bool | None = None,
|
|
314
|
+
chunk_size: int | None = None,
|
|
315
|
+
) -> CommandRequest[int]:
|
|
316
|
+
"""
|
|
317
|
+
Increments the value of the sample with the maximum existing timestamp, or creates
|
|
318
|
+
a new sample with a value equal to the value of the sample with the maximum existing
|
|
319
|
+
timestamp with a given increment.
|
|
320
|
+
|
|
321
|
+
:param key: Name of the time series.
|
|
322
|
+
:param value: Numeric data value of the sample.
|
|
323
|
+
:param labels: Set of label-value pairs that represent metadata labels of the key and serve
|
|
324
|
+
as a secondary index. Use it only if you are creating a new time series.
|
|
325
|
+
:param timestamp: UNIX sample timestamp in milliseconds or `*` to set the timestamp
|
|
326
|
+
according to the server clock. `timestamp` must be equal to or higher than the maximum
|
|
327
|
+
existing timestamp. When not specified, the timestamp is set according to the server clock.
|
|
328
|
+
:param retention: Maximum retention period, compared to the maximum existing timestamp,
|
|
329
|
+
in milliseconds. Use it only if you are creating a new time series.
|
|
330
|
+
:param uncompressed: Changes data storage from compressed (default) to uncompressed.
|
|
331
|
+
Use it only if you are creating a new time series.
|
|
332
|
+
:param chunk_size: Memory size, in bytes, allocated for each data chunk.
|
|
333
|
+
Use it only if you are creating a new time series.
|
|
334
|
+
:return: The timestamp of the upserted sample, or an error.
|
|
335
|
+
"""
|
|
336
|
+
command_arguments: CommandArgList = [key, value]
|
|
337
|
+
if timestamp:
|
|
338
|
+
command_arguments.extend([PrefixToken.TIMESTAMP, normalized_timestamp(timestamp)])
|
|
339
|
+
if retention:
|
|
340
|
+
command_arguments.extend([PrefixToken.RETENTION, normalized_milliseconds(retention)])
|
|
341
|
+
if uncompressed:
|
|
342
|
+
command_arguments.append(PureToken.UNCOMPRESSED)
|
|
343
|
+
if chunk_size:
|
|
344
|
+
command_arguments.extend([PrefixToken.CHUNK_SIZE, chunk_size])
|
|
345
|
+
if labels:
|
|
346
|
+
command_arguments.extend(
|
|
347
|
+
[PrefixToken.LABELS, *dict_to_flat_list(labels)] # type: ignore
|
|
348
|
+
)
|
|
349
|
+
|
|
350
|
+
return self.client.create_request(
|
|
351
|
+
CommandName.TS_INCRBY, *command_arguments, callback=IntCallback()
|
|
352
|
+
)
|
|
353
|
+
|
|
354
|
+
@module_command(
|
|
355
|
+
CommandName.TS_DECRBY,
|
|
356
|
+
group=COMMAND_GROUP,
|
|
357
|
+
version_introduced="1.0.0",
|
|
358
|
+
module=MODULE,
|
|
359
|
+
)
|
|
360
|
+
def decrby(
|
|
361
|
+
self,
|
|
362
|
+
key: KeyT,
|
|
363
|
+
value: int | float,
|
|
364
|
+
labels: Mapping[StringT, RedisValueT] | None = None,
|
|
365
|
+
timestamp: datetime | int | StringT | None = None,
|
|
366
|
+
retention: int | timedelta | None = None,
|
|
367
|
+
uncompressed: bool | None = None,
|
|
368
|
+
chunk_size: int | None = None,
|
|
369
|
+
) -> CommandRequest[int]:
|
|
370
|
+
"""
|
|
371
|
+
Decrease the value of the sample with the maximum existing timestamp, or create a new
|
|
372
|
+
sample with a value equal to the value of the sample with the maximum existing timestamp
|
|
373
|
+
with a given decrement.
|
|
374
|
+
|
|
375
|
+
:param key: Key name for the time series.
|
|
376
|
+
:param value: Numeric data value of the sample.
|
|
377
|
+
:param labels: Mapping of labels to values that represent metadata labels of the key
|
|
378
|
+
and serve as a secondary index. Use it only if you are creating a new time series.
|
|
379
|
+
:param timestamp: UNIX sample timestamp in milliseconds or `*` to set the timestamp
|
|
380
|
+
according to the server clock. When not specified, the timestamp is set according
|
|
381
|
+
to the server clock.
|
|
382
|
+
:param retention: Maximum retention period, compared to the maximum existing timestamp,
|
|
383
|
+
in milliseconds. Use it only if you are creating a new time series. It is ignored if
|
|
384
|
+
you are adding samples to an existing time series.
|
|
385
|
+
:param uncompressed: Changes data storage from compressed (default) to uncompressed.
|
|
386
|
+
Use it only if you are creating a new time series. It is ignored if you are adding samples
|
|
387
|
+
to an existing time series.
|
|
388
|
+
:param chunk_size: Memory size, in bytes, allocated for each data chunk. Use it only if
|
|
389
|
+
you are creating a new time series. It is ignored if you are adding samples to an existing
|
|
390
|
+
time series.
|
|
391
|
+
:return: The timestamp of the upserted sample, or an error if the operation failed.
|
|
392
|
+
"""
|
|
393
|
+
command_arguments: CommandArgList = [key, value]
|
|
394
|
+
|
|
395
|
+
if timestamp:
|
|
396
|
+
command_arguments.extend([PrefixToken.TIMESTAMP, normalized_timestamp(timestamp)])
|
|
397
|
+
if retention:
|
|
398
|
+
command_arguments.extend([PrefixToken.RETENTION, normalized_milliseconds(retention)])
|
|
399
|
+
if uncompressed:
|
|
400
|
+
command_arguments.append(PureToken.UNCOMPRESSED)
|
|
401
|
+
if chunk_size:
|
|
402
|
+
command_arguments.extend([PrefixToken.CHUNK_SIZE, chunk_size])
|
|
403
|
+
if labels:
|
|
404
|
+
command_arguments.extend(
|
|
405
|
+
[PrefixToken.LABELS, *dict_to_flat_list(labels)] # type: ignore
|
|
406
|
+
)
|
|
407
|
+
return self.client.create_request(
|
|
408
|
+
CommandName.TS_DECRBY, *command_arguments, callback=IntCallback()
|
|
409
|
+
)
|
|
410
|
+
|
|
411
|
+
@module_command(
|
|
412
|
+
CommandName.TS_CREATERULE,
|
|
413
|
+
group=COMMAND_GROUP,
|
|
414
|
+
version_introduced="1.0.0",
|
|
415
|
+
arguments={"aligntimestamp": {"version_introduced": "1.8.0"}},
|
|
416
|
+
module=MODULE,
|
|
417
|
+
)
|
|
418
|
+
def createrule(
|
|
419
|
+
self,
|
|
420
|
+
source: KeyT,
|
|
421
|
+
destination: KeyT,
|
|
422
|
+
aggregation: Literal[
|
|
423
|
+
PureToken.AVG,
|
|
424
|
+
PureToken.COUNT,
|
|
425
|
+
PureToken.FIRST,
|
|
426
|
+
PureToken.LAST,
|
|
427
|
+
PureToken.MAX,
|
|
428
|
+
PureToken.MIN,
|
|
429
|
+
PureToken.RANGE,
|
|
430
|
+
PureToken.STD_P,
|
|
431
|
+
PureToken.STD_S,
|
|
432
|
+
PureToken.SUM,
|
|
433
|
+
PureToken.TWA,
|
|
434
|
+
PureToken.VAR_P,
|
|
435
|
+
PureToken.VAR_S,
|
|
436
|
+
],
|
|
437
|
+
bucketduration: int | timedelta,
|
|
438
|
+
aligntimestamp: int | None = None,
|
|
439
|
+
) -> CommandRequest[bool]:
|
|
440
|
+
"""
|
|
441
|
+
Create a compaction rule
|
|
442
|
+
|
|
443
|
+
:param source: Key name for the source time series.
|
|
444
|
+
:param destination: Key name for the destination (compacted) time series.
|
|
445
|
+
:param aggregation: Aggregates results into time buckets by the given aggregation type
|
|
446
|
+
:param bucketduration: Duration of each bucket, in milliseconds.
|
|
447
|
+
:param aligntimestamp: Ensures that there is a bucket that starts exactly at
|
|
448
|
+
``aligntimestamp`` and aligns all other buckets accordingly. It is expressed
|
|
449
|
+
in milliseconds. The default value is 0 aligned with the epoch.
|
|
450
|
+
:return: True if executed correctly, False otherwise.
|
|
451
|
+
"""
|
|
452
|
+
command_arguments: CommandArgList = [source, destination]
|
|
453
|
+
command_arguments.extend(
|
|
454
|
+
[
|
|
455
|
+
PrefixToken.AGGREGATION,
|
|
456
|
+
aggregation,
|
|
457
|
+
normalized_milliseconds(bucketduration),
|
|
458
|
+
]
|
|
459
|
+
)
|
|
460
|
+
if aligntimestamp is not None:
|
|
461
|
+
command_arguments.append(aligntimestamp)
|
|
462
|
+
return self.client.create_request(
|
|
463
|
+
CommandName.TS_CREATERULE,
|
|
464
|
+
*command_arguments,
|
|
465
|
+
callback=SimpleStringCallback(),
|
|
466
|
+
)
|
|
467
|
+
|
|
468
|
+
@module_command(
|
|
469
|
+
CommandName.TS_DELETERULE,
|
|
470
|
+
group=COMMAND_GROUP,
|
|
471
|
+
version_introduced="1.0.0",
|
|
472
|
+
module=MODULE,
|
|
473
|
+
)
|
|
474
|
+
def deleterule(self, source: KeyT, destination: KeyT) -> CommandRequest[bool]:
|
|
475
|
+
"""
|
|
476
|
+
Delete a compaction rule from a RedisTimeSeries sourceKey to a destinationKey.
|
|
477
|
+
|
|
478
|
+
:param source: Key name for the source time series.
|
|
479
|
+
:param destination: Key name for the destination (compacted) time series.
|
|
480
|
+
:return: True if the command executed correctly, False otherwise.
|
|
481
|
+
|
|
482
|
+
.. warning:: This command does not delete the compacted series.
|
|
483
|
+
"""
|
|
484
|
+
command_arguments: CommandArgList = [source, destination]
|
|
485
|
+
|
|
486
|
+
return self.client.create_request(
|
|
487
|
+
CommandName.TS_DELETERULE,
|
|
488
|
+
*command_arguments,
|
|
489
|
+
callback=SimpleStringCallback(),
|
|
490
|
+
)
|
|
491
|
+
|
|
492
|
+
@mutually_inclusive_parameters("min_value", "max_value")
|
|
493
|
+
@mutually_inclusive_parameters("aggregator", "bucketduration")
|
|
494
|
+
@module_command(
|
|
495
|
+
CommandName.TS_RANGE,
|
|
496
|
+
group=COMMAND_GROUP,
|
|
497
|
+
version_introduced="1.0.0",
|
|
498
|
+
arguments={
|
|
499
|
+
"latest": {"version_introduced": "1.8.0"},
|
|
500
|
+
"empty": {"version_introduced": "1.8.0"},
|
|
501
|
+
},
|
|
502
|
+
module=MODULE,
|
|
503
|
+
flags={CommandFlag.READONLY},
|
|
504
|
+
cacheable=True,
|
|
505
|
+
)
|
|
506
|
+
def range(
|
|
507
|
+
self,
|
|
508
|
+
key: KeyT,
|
|
509
|
+
fromtimestamp: datetime | int | StringT,
|
|
510
|
+
totimestamp: datetime | int | StringT,
|
|
511
|
+
*,
|
|
512
|
+
filter_by_ts: Parameters[int] | None = None,
|
|
513
|
+
min_value: int | float | None = None,
|
|
514
|
+
max_value: int | float | None = None,
|
|
515
|
+
count: int | None = None,
|
|
516
|
+
aggregator: None
|
|
517
|
+
| (
|
|
518
|
+
Literal[
|
|
519
|
+
PureToken.AVG,
|
|
520
|
+
PureToken.COUNT,
|
|
521
|
+
PureToken.FIRST,
|
|
522
|
+
PureToken.LAST,
|
|
523
|
+
PureToken.MAX,
|
|
524
|
+
PureToken.MIN,
|
|
525
|
+
PureToken.RANGE,
|
|
526
|
+
PureToken.STD_P,
|
|
527
|
+
PureToken.STD_S,
|
|
528
|
+
PureToken.SUM,
|
|
529
|
+
PureToken.TWA,
|
|
530
|
+
PureToken.VAR_P,
|
|
531
|
+
PureToken.VAR_S,
|
|
532
|
+
]
|
|
533
|
+
) = None,
|
|
534
|
+
bucketduration: int | timedelta | None = None,
|
|
535
|
+
align: int | StringT | None = None,
|
|
536
|
+
buckettimestamp: StringT | None = None,
|
|
537
|
+
empty: bool | None = None,
|
|
538
|
+
latest: bool | None = None,
|
|
539
|
+
) -> CommandRequest[tuple[tuple[int, float], ...] | tuple[()]]:
|
|
540
|
+
"""
|
|
541
|
+
Query a range in forward direction.
|
|
542
|
+
|
|
543
|
+
:param key: The key name for the time series.
|
|
544
|
+
:param fromtimestamp: Start timestamp for the range query (integer UNIX timestamp in
|
|
545
|
+
milliseconds) or `-` to denote the timestamp of the earliest sample in the time series.
|
|
546
|
+
:param totimestamp: End timestamp for the range query (integer UNIX timestamp in
|
|
547
|
+
milliseconds) or `+` to denote the timestamp of the latest sample in the time series.
|
|
548
|
+
:param filter_by_ts: List of specific timestamps to filter samples by.
|
|
549
|
+
:param min_value: Minimum value to filter samples by.
|
|
550
|
+
:param max_value: Maximum value to filter samples by.
|
|
551
|
+
:param count: Limits the number of returned samples.
|
|
552
|
+
:param aggregator: Aggregates samples into time buckets by the provided aggregation type.
|
|
553
|
+
:param bucketduration: Duration of each bucket in milliseconds.
|
|
554
|
+
:param align: Time bucket alignment control for :paramref:`aggregator`.
|
|
555
|
+
:param buckettimestamp: Timestamp of the first bucket.
|
|
556
|
+
:param empty: If True, returns an empty list instead of raising an error when no data
|
|
557
|
+
is available.
|
|
558
|
+
:param latest: Used when a time series is a compaction. When ``True``, the command also
|
|
559
|
+
reports the compacted value of the latest, possibly partial, bucket, given that
|
|
560
|
+
this bucket's start time falls within ``[fromtimestamp, totimestamp]``.
|
|
561
|
+
|
|
562
|
+
:return: A tuple of samples, where each sample is a tuple of timestamp and value.
|
|
563
|
+
"""
|
|
564
|
+
command_arguments: CommandArgList = [
|
|
565
|
+
key,
|
|
566
|
+
normalized_timestamp(fromtimestamp),
|
|
567
|
+
normalized_timestamp(totimestamp),
|
|
568
|
+
]
|
|
569
|
+
if latest:
|
|
570
|
+
command_arguments.append(b"LATEST")
|
|
571
|
+
if filter_by_ts:
|
|
572
|
+
_ts: list[int] = list(filter_by_ts)
|
|
573
|
+
command_arguments.extend([PrefixToken.FILTER_BY_TS, *_ts])
|
|
574
|
+
if min_value is not None and max_value is not None:
|
|
575
|
+
command_arguments.extend([PureToken.FILTER_BY_VALUE, min_value, max_value])
|
|
576
|
+
if count is not None:
|
|
577
|
+
command_arguments.extend([PrefixToken.COUNT, count])
|
|
578
|
+
if aggregator and bucketduration is not None:
|
|
579
|
+
if align is not None:
|
|
580
|
+
command_arguments.extend([PrefixToken.ALIGN, align])
|
|
581
|
+
command_arguments.extend(
|
|
582
|
+
[
|
|
583
|
+
PrefixToken.AGGREGATION,
|
|
584
|
+
aggregator,
|
|
585
|
+
normalized_milliseconds(bucketduration),
|
|
586
|
+
]
|
|
587
|
+
)
|
|
588
|
+
if buckettimestamp is not None:
|
|
589
|
+
command_arguments.extend([PureToken.BUCKETTIMESTAMP, buckettimestamp])
|
|
590
|
+
if empty is not None:
|
|
591
|
+
command_arguments.append(PureToken.EMPTY)
|
|
592
|
+
|
|
593
|
+
return self.client.create_request(
|
|
594
|
+
CommandName.TS_RANGE, *command_arguments, callback=SamplesCallback()
|
|
595
|
+
)
|
|
596
|
+
|
|
597
|
+
@mutually_inclusive_parameters("min_value", "max_value")
|
|
598
|
+
@mutually_inclusive_parameters("aggregator", "bucketduration")
|
|
599
|
+
@module_command(
|
|
600
|
+
CommandName.TS_REVRANGE,
|
|
601
|
+
group=COMMAND_GROUP,
|
|
602
|
+
version_introduced="1.4.0",
|
|
603
|
+
arguments={
|
|
604
|
+
"latest": {"version_introduced": "1.8.0"},
|
|
605
|
+
"empty": {"version_introduced": "1.8.0"},
|
|
606
|
+
},
|
|
607
|
+
module=MODULE,
|
|
608
|
+
flags={CommandFlag.READONLY},
|
|
609
|
+
cacheable=True,
|
|
610
|
+
)
|
|
611
|
+
def revrange(
|
|
612
|
+
self,
|
|
613
|
+
key: KeyT,
|
|
614
|
+
fromtimestamp: int | datetime | StringT,
|
|
615
|
+
totimestamp: int | datetime | StringT,
|
|
616
|
+
*,
|
|
617
|
+
filter_by_ts: Parameters[int] | None = None,
|
|
618
|
+
min_value: int | float | None = None,
|
|
619
|
+
max_value: int | float | None = None,
|
|
620
|
+
count: int | None = None,
|
|
621
|
+
aggregator: None
|
|
622
|
+
| (
|
|
623
|
+
Literal[
|
|
624
|
+
PureToken.AVG,
|
|
625
|
+
PureToken.COUNT,
|
|
626
|
+
PureToken.FIRST,
|
|
627
|
+
PureToken.LAST,
|
|
628
|
+
PureToken.MAX,
|
|
629
|
+
PureToken.MIN,
|
|
630
|
+
PureToken.RANGE,
|
|
631
|
+
PureToken.STD_P,
|
|
632
|
+
PureToken.STD_S,
|
|
633
|
+
PureToken.SUM,
|
|
634
|
+
PureToken.TWA,
|
|
635
|
+
PureToken.VAR_P,
|
|
636
|
+
PureToken.VAR_S,
|
|
637
|
+
]
|
|
638
|
+
) = None,
|
|
639
|
+
bucketduration: int | timedelta | None = None,
|
|
640
|
+
align: int | StringT | None = None,
|
|
641
|
+
buckettimestamp: StringT | None = None,
|
|
642
|
+
empty: bool | None = None,
|
|
643
|
+
latest: bool | None = None,
|
|
644
|
+
) -> CommandRequest[tuple[tuple[int, float], ...] | tuple[()]]:
|
|
645
|
+
"""
|
|
646
|
+
Query a range in reverse direction from a RedisTimeSeries key.
|
|
647
|
+
|
|
648
|
+
:param key: The key name for the time series.
|
|
649
|
+
:param fromtimestamp: Start timestamp for the range query (integer UNIX timestamp
|
|
650
|
+
in milliseconds) or `-` to denote the timestamp of the earliest sample in the time series.
|
|
651
|
+
:param totimestamp: End timestamp for the range query (integer UNIX timestamp in
|
|
652
|
+
milliseconds) or `+` to denote the timestamp of the latest sample in the time series.
|
|
653
|
+
:param filter_by_ts: List of specific timestamps to filter samples by.
|
|
654
|
+
:param min_value: Minimum value to filter samples by.
|
|
655
|
+
:param max_value: Maximum value to filter samples by.
|
|
656
|
+
:param count: Limit the number of returned samples.
|
|
657
|
+
:param aggregator: Aggregates samples into time buckets by the provided aggregation type.
|
|
658
|
+
:param bucketduration: Duration of each bucket in milliseconds.
|
|
659
|
+
:param align: Time bucket alignment control for :paramref:`aggregator`.
|
|
660
|
+
:param buckettimestamp: Timestamp for the first bucket.
|
|
661
|
+
:param empty: Return an empty list if no samples are found.
|
|
662
|
+
:param latest: Report the compacted value of the latest, possibly partial, bucket.
|
|
663
|
+
|
|
664
|
+
:return: A tuple of timestamp-value pairs in reverse order.
|
|
665
|
+
"""
|
|
666
|
+
command_arguments: CommandArgList = [
|
|
667
|
+
key,
|
|
668
|
+
normalized_timestamp(fromtimestamp),
|
|
669
|
+
normalized_timestamp(totimestamp),
|
|
670
|
+
]
|
|
671
|
+
if latest:
|
|
672
|
+
command_arguments.append(b"LATEST")
|
|
673
|
+
if filter_by_ts:
|
|
674
|
+
_ts: list[int] = list(filter_by_ts)
|
|
675
|
+
command_arguments.extend([PrefixToken.FILTER_BY_TS, *_ts])
|
|
676
|
+
if min_value is not None and max_value is not None:
|
|
677
|
+
command_arguments.extend([PureToken.FILTER_BY_VALUE, min_value, max_value])
|
|
678
|
+
if count is not None:
|
|
679
|
+
command_arguments.extend([PrefixToken.COUNT, count])
|
|
680
|
+
if aggregator and bucketduration is not None:
|
|
681
|
+
if align is not None:
|
|
682
|
+
command_arguments.extend([PrefixToken.ALIGN, align])
|
|
683
|
+
command_arguments.extend(
|
|
684
|
+
[
|
|
685
|
+
PrefixToken.AGGREGATION,
|
|
686
|
+
aggregator,
|
|
687
|
+
normalized_milliseconds(bucketduration),
|
|
688
|
+
]
|
|
689
|
+
)
|
|
690
|
+
if buckettimestamp is not None:
|
|
691
|
+
command_arguments.extend([PureToken.BUCKETTIMESTAMP, buckettimestamp])
|
|
692
|
+
if empty is not None:
|
|
693
|
+
command_arguments.append(PureToken.EMPTY)
|
|
694
|
+
|
|
695
|
+
return self.client.create_request(
|
|
696
|
+
CommandName.TS_REVRANGE, *command_arguments, callback=SamplesCallback()
|
|
697
|
+
)
|
|
698
|
+
|
|
699
|
+
@mutually_inclusive_parameters("min_value", "max_value")
|
|
700
|
+
@mutually_exclusive_parameters("withlabels", "selected_labels")
|
|
701
|
+
@mutually_inclusive_parameters("aggregator", "bucketduration")
|
|
702
|
+
@mutually_inclusive_parameters("groupby", "reducer")
|
|
703
|
+
@module_command(
|
|
704
|
+
CommandName.TS_MRANGE,
|
|
705
|
+
group=COMMAND_GROUP,
|
|
706
|
+
version_introduced="1.0.0",
|
|
707
|
+
arguments={
|
|
708
|
+
"latest": {"version_introduced": "1.8.0"},
|
|
709
|
+
"empty": {"version_introduced": "1.8.0"},
|
|
710
|
+
},
|
|
711
|
+
module=MODULE,
|
|
712
|
+
cluster=ClusterCommandConfig(
|
|
713
|
+
route=NodeFlag.PRIMARIES,
|
|
714
|
+
combine=ClusterMergeTimeSeries(),
|
|
715
|
+
),
|
|
716
|
+
flags={CommandFlag.READONLY},
|
|
717
|
+
)
|
|
718
|
+
def mrange(
|
|
719
|
+
self,
|
|
720
|
+
fromtimestamp: int | datetime | StringT,
|
|
721
|
+
totimestamp: int | datetime | StringT,
|
|
722
|
+
filters: Parameters[StringT] | None = None,
|
|
723
|
+
*,
|
|
724
|
+
filter_by_ts: Parameters[int] | None = None,
|
|
725
|
+
min_value: int | float | None = None,
|
|
726
|
+
max_value: int | float | None = None,
|
|
727
|
+
withlabels: bool | None = None,
|
|
728
|
+
selected_labels: Parameters[StringT] | None = None,
|
|
729
|
+
count: int | None = None,
|
|
730
|
+
align: int | StringT | None = None,
|
|
731
|
+
aggregator: None
|
|
732
|
+
| (
|
|
733
|
+
Literal[
|
|
734
|
+
PureToken.AVG,
|
|
735
|
+
PureToken.COUNT,
|
|
736
|
+
PureToken.FIRST,
|
|
737
|
+
PureToken.LAST,
|
|
738
|
+
PureToken.MAX,
|
|
739
|
+
PureToken.MIN,
|
|
740
|
+
PureToken.RANGE,
|
|
741
|
+
PureToken.STD_P,
|
|
742
|
+
PureToken.STD_S,
|
|
743
|
+
PureToken.SUM,
|
|
744
|
+
PureToken.TWA,
|
|
745
|
+
PureToken.VAR_P,
|
|
746
|
+
PureToken.VAR_S,
|
|
747
|
+
]
|
|
748
|
+
) = None,
|
|
749
|
+
bucketduration: int | timedelta | None = None,
|
|
750
|
+
buckettimestamp: StringT | None = None,
|
|
751
|
+
groupby: StringT | None = None,
|
|
752
|
+
reducer: None
|
|
753
|
+
| (
|
|
754
|
+
Literal[
|
|
755
|
+
PureToken.AVG,
|
|
756
|
+
PureToken.COUNT,
|
|
757
|
+
PureToken.FIRST,
|
|
758
|
+
PureToken.LAST,
|
|
759
|
+
PureToken.MAX,
|
|
760
|
+
PureToken.MIN,
|
|
761
|
+
PureToken.RANGE,
|
|
762
|
+
PureToken.STD_P,
|
|
763
|
+
PureToken.STD_S,
|
|
764
|
+
PureToken.SUM,
|
|
765
|
+
PureToken.VAR_P,
|
|
766
|
+
PureToken.VAR_S,
|
|
767
|
+
]
|
|
768
|
+
) = None,
|
|
769
|
+
empty: bool | None = None,
|
|
770
|
+
latest: bool | None = None,
|
|
771
|
+
) -> CommandRequest[
|
|
772
|
+
dict[
|
|
773
|
+
AnyStr,
|
|
774
|
+
tuple[dict[AnyStr, AnyStr], tuple[tuple[int, float], ...] | tuple[()]],
|
|
775
|
+
]
|
|
776
|
+
]:
|
|
777
|
+
"""
|
|
778
|
+
Query a range across multiple time series by filters in forward direction.
|
|
779
|
+
|
|
780
|
+
:param fromtimestamp: Start timestamp for the range query (integer UNIX timestamp
|
|
781
|
+
in milliseconds) or `-` to denote the timestamp of the earliest sample amongst
|
|
782
|
+
all time series that passes the filters.
|
|
783
|
+
:param totimestamp: End timestamp for the range query (integer UNIX timestamp in
|
|
784
|
+
milliseconds) or `+` to denote the timestamp of the latest sample amongst all
|
|
785
|
+
time series that passes the filters
|
|
786
|
+
:param filters: Filter expressions to apply to the time series.
|
|
787
|
+
:param filter_by_ts: Timestamps to filter the time series by.
|
|
788
|
+
:param min_value: Minimum value to filter the time series by.
|
|
789
|
+
:param max_value: Maximum value to filter the time series by.
|
|
790
|
+
:param withlabels: Whether to include labels in the response.
|
|
791
|
+
:param selected_labels: Returns a subset of the label-value pairs that represent metadata
|
|
792
|
+
labels of the time series. Use when a large number of labels exists per series, but only
|
|
793
|
+
the values of some of the labels are required. If :paramref:`withlabels` or
|
|
794
|
+
:paramref:`selected_labels` are not specified, by default, an empty mapping is reported
|
|
795
|
+
as label-value pairs.
|
|
796
|
+
:param count: Limit the number of samples returned.
|
|
797
|
+
:param align: Time bucket alignment control for :paramref:`aggregator`.
|
|
798
|
+
:param aggregator: Aggregates samples into time buckets by the provided aggregation type.
|
|
799
|
+
:param bucketduration: Duration of each bucket, in milliseconds.
|
|
800
|
+
:param buckettimestamp: Timestamp of the first bucket.
|
|
801
|
+
:param groupby: Label to group the samples by
|
|
802
|
+
:param reducer: Aggregation type to aggregate the results in each group
|
|
803
|
+
:param empty: Optional boolean to include empty time series in the response.
|
|
804
|
+
:param latest: Report the compacted value of the latest, possibly partial, bucket.
|
|
805
|
+
|
|
806
|
+
:return: A dictionary containing the time series data.
|
|
807
|
+
"""
|
|
808
|
+
command_arguments: CommandArgList = [
|
|
809
|
+
normalized_timestamp(fromtimestamp),
|
|
810
|
+
normalized_timestamp(totimestamp),
|
|
811
|
+
]
|
|
812
|
+
if latest:
|
|
813
|
+
command_arguments.append(b"LATEST")
|
|
814
|
+
if filter_by_ts:
|
|
815
|
+
_ts: list[int] = list(filter_by_ts)
|
|
816
|
+
command_arguments.extend([PrefixToken.FILTER_BY_TS, *_ts])
|
|
817
|
+
if min_value is not None and max_value is not None:
|
|
818
|
+
command_arguments.extend([PureToken.FILTER_BY_VALUE, min_value, max_value])
|
|
819
|
+
if withlabels:
|
|
820
|
+
command_arguments.append(PureToken.WITHLABELS)
|
|
821
|
+
if selected_labels:
|
|
822
|
+
_labels: list[StringT] = list(selected_labels)
|
|
823
|
+
command_arguments.extend([PureToken.SELECTED_LABELS, *_labels])
|
|
824
|
+
if count is not None:
|
|
825
|
+
command_arguments.extend([PrefixToken.COUNT, count])
|
|
826
|
+
if aggregator or buckettimestamp is not None:
|
|
827
|
+
if align is not None:
|
|
828
|
+
command_arguments.extend([PrefixToken.ALIGN, align])
|
|
829
|
+
if aggregator and bucketduration is not None:
|
|
830
|
+
command_arguments.extend(
|
|
831
|
+
[
|
|
832
|
+
PrefixToken.AGGREGATION,
|
|
833
|
+
aggregator,
|
|
834
|
+
normalized_milliseconds(bucketduration),
|
|
835
|
+
]
|
|
836
|
+
)
|
|
837
|
+
if buckettimestamp is not None:
|
|
838
|
+
command_arguments.extend([PureToken.BUCKETTIMESTAMP, buckettimestamp])
|
|
839
|
+
if empty:
|
|
840
|
+
command_arguments.append(PureToken.EMPTY)
|
|
841
|
+
if filters:
|
|
842
|
+
_filters: list[StringT] = list(filters)
|
|
843
|
+
command_arguments.extend([PrefixToken.FILTER, *_filters])
|
|
844
|
+
if groupby and reducer:
|
|
845
|
+
command_arguments.extend([PureToken.GROUPBY, groupby, b"REDUCE", reducer])
|
|
846
|
+
return self.client.create_request(
|
|
847
|
+
CommandName.TS_MRANGE,
|
|
848
|
+
*command_arguments,
|
|
849
|
+
callback=TimeSeriesMultiCallback[AnyStr](grouped=groupby is not None),
|
|
850
|
+
)
|
|
851
|
+
|
|
852
|
+
@mutually_inclusive_parameters("min_value", "max_value")
|
|
853
|
+
@mutually_exclusive_parameters("withlabels", "selected_labels")
|
|
854
|
+
@mutually_inclusive_parameters("aggregator", "bucketduration")
|
|
855
|
+
@mutually_inclusive_parameters("groupby", "reducer")
|
|
856
|
+
@module_command(
|
|
857
|
+
CommandName.TS_MREVRANGE,
|
|
858
|
+
group=COMMAND_GROUP,
|
|
859
|
+
version_introduced="1.4.0",
|
|
860
|
+
arguments={
|
|
861
|
+
"latest": {"version_introduced": "1.8.0"},
|
|
862
|
+
"empty": {"version_introduced": "1.8.0"},
|
|
863
|
+
},
|
|
864
|
+
module=MODULE,
|
|
865
|
+
cluster=ClusterCommandConfig(route=NodeFlag.PRIMARIES, combine=ClusterMergeTimeSeries()),
|
|
866
|
+
flags={CommandFlag.READONLY},
|
|
867
|
+
)
|
|
868
|
+
def mrevrange(
|
|
869
|
+
self,
|
|
870
|
+
fromtimestamp: int | datetime | StringT,
|
|
871
|
+
totimestamp: int | datetime | StringT,
|
|
872
|
+
filters: Parameters[StringT] | None = None,
|
|
873
|
+
*,
|
|
874
|
+
filter_by_ts: Parameters[int] | None = None,
|
|
875
|
+
min_value: int | float | None = None,
|
|
876
|
+
max_value: int | float | None = None,
|
|
877
|
+
withlabels: bool | None = None,
|
|
878
|
+
selected_labels: Parameters[StringT] | None = None,
|
|
879
|
+
count: int | None = None,
|
|
880
|
+
align: int | StringT | None = None,
|
|
881
|
+
aggregator: None
|
|
882
|
+
| (
|
|
883
|
+
Literal[
|
|
884
|
+
PureToken.AVG,
|
|
885
|
+
PureToken.COUNT,
|
|
886
|
+
PureToken.FIRST,
|
|
887
|
+
PureToken.LAST,
|
|
888
|
+
PureToken.MAX,
|
|
889
|
+
PureToken.MIN,
|
|
890
|
+
PureToken.RANGE,
|
|
891
|
+
PureToken.STD_P,
|
|
892
|
+
PureToken.STD_S,
|
|
893
|
+
PureToken.SUM,
|
|
894
|
+
PureToken.TWA,
|
|
895
|
+
PureToken.VAR_P,
|
|
896
|
+
PureToken.VAR_S,
|
|
897
|
+
]
|
|
898
|
+
) = None,
|
|
899
|
+
bucketduration: int | timedelta | None = None,
|
|
900
|
+
buckettimestamp: StringT | None = None,
|
|
901
|
+
groupby: StringT | None = None,
|
|
902
|
+
reducer: StringT | None = None,
|
|
903
|
+
empty: bool | None = None,
|
|
904
|
+
latest: bool | None = None,
|
|
905
|
+
) -> CommandRequest[
|
|
906
|
+
dict[
|
|
907
|
+
AnyStr,
|
|
908
|
+
tuple[dict[AnyStr, AnyStr], tuple[tuple[int, float], ...] | tuple[()]],
|
|
909
|
+
]
|
|
910
|
+
]:
|
|
911
|
+
"""
|
|
912
|
+
Query a range across multiple time series by filters in reverse direction.
|
|
913
|
+
|
|
914
|
+
:param fromtimestamp: Start timestamp for the range query (integer UNIX timestamp
|
|
915
|
+
in milliseconds) or `-` to denote the timestamp of the earliest sample amongst
|
|
916
|
+
all time series that passes the filters.
|
|
917
|
+
:param totimestamp: End timestamp for the range query (integer UNIX timestamp in
|
|
918
|
+
milliseconds) or `+` to denote the timestamp of the latest sample amongst all
|
|
919
|
+
time series that passes the filters
|
|
920
|
+
:param filters: Filter expressions to apply to the time series.
|
|
921
|
+
:param filter_by_ts: Timestamps to filter the time series by.
|
|
922
|
+
:param min_value: Minimum value to filter the time series by.
|
|
923
|
+
:param max_value: Maximum value to filter the time series by.
|
|
924
|
+
:param withlabels: Whether to include labels in the response.
|
|
925
|
+
:param selected_labels: Returns a subset of the label-value pairs that represent metadata
|
|
926
|
+
labels of the time series. Use when a large number of labels exists per series, but only
|
|
927
|
+
the values of some of the labels are required. If :paramref:`withlabels` or
|
|
928
|
+
:paramref:`selected_labels` are not specified, by default, an empty mapping is reported
|
|
929
|
+
as label-value pairs.
|
|
930
|
+
:param count: Limit the number of samples returned.
|
|
931
|
+
:param align: Time bucket alignment control for :paramref:`aggregator`.
|
|
932
|
+
:param aggregator: Aggregates samples into time buckets by the provided aggregation type.
|
|
933
|
+
:param bucketduration: Duration of each bucket, in milliseconds.
|
|
934
|
+
:param buckettimestamp: Timestamp of the first bucket.
|
|
935
|
+
:param groupby: Label to group the samples by
|
|
936
|
+
:param reducer: Aggregation type to aggregate the results in each group
|
|
937
|
+
:param empty: Optional boolean to include empty time series in the response.
|
|
938
|
+
:param latest: Report the compacted value of the latest, possibly partial, bucket.
|
|
939
|
+
|
|
940
|
+
:return: A dictionary containing the result of the query.
|
|
941
|
+
"""
|
|
942
|
+
command_arguments: CommandArgList = [
|
|
943
|
+
normalized_timestamp(fromtimestamp),
|
|
944
|
+
normalized_timestamp(totimestamp),
|
|
945
|
+
]
|
|
946
|
+
if latest:
|
|
947
|
+
command_arguments.append(b"LATEST")
|
|
948
|
+
if filter_by_ts:
|
|
949
|
+
_ts: list[int] = list(filter_by_ts)
|
|
950
|
+
command_arguments.extend([PrefixToken.FILTER_BY_TS, *_ts])
|
|
951
|
+
if min_value is not None and max_value is not None:
|
|
952
|
+
command_arguments.extend([PureToken.FILTER_BY_VALUE, min_value, max_value])
|
|
953
|
+
if withlabels:
|
|
954
|
+
command_arguments.append(PureToken.WITHLABELS)
|
|
955
|
+
if selected_labels:
|
|
956
|
+
_labels: list[StringT] = list(selected_labels)
|
|
957
|
+
command_arguments.extend([PureToken.SELECTED_LABELS, *_labels])
|
|
958
|
+
if count is not None:
|
|
959
|
+
command_arguments.extend([PrefixToken.COUNT, count])
|
|
960
|
+
if aggregator or buckettimestamp is not None:
|
|
961
|
+
if align is not None:
|
|
962
|
+
command_arguments.extend([PrefixToken.ALIGN, align])
|
|
963
|
+
if aggregator and bucketduration is not None:
|
|
964
|
+
command_arguments.extend(
|
|
965
|
+
[
|
|
966
|
+
PrefixToken.AGGREGATION,
|
|
967
|
+
aggregator,
|
|
968
|
+
normalized_milliseconds(bucketduration),
|
|
969
|
+
]
|
|
970
|
+
)
|
|
971
|
+
if buckettimestamp is not None:
|
|
972
|
+
command_arguments.extend([PureToken.BUCKETTIMESTAMP, buckettimestamp])
|
|
973
|
+
if empty:
|
|
974
|
+
command_arguments.append(PureToken.EMPTY)
|
|
975
|
+
if filters:
|
|
976
|
+
_filters: list[StringT] = list(filters)
|
|
977
|
+
command_arguments.extend([PrefixToken.FILTER, *_filters])
|
|
978
|
+
if groupby and reducer and reducer:
|
|
979
|
+
command_arguments.extend([PureToken.GROUPBY, groupby, b"REDUCE", reducer])
|
|
980
|
+
|
|
981
|
+
return self.client.create_request(
|
|
982
|
+
CommandName.TS_MREVRANGE,
|
|
983
|
+
*command_arguments,
|
|
984
|
+
callback=TimeSeriesMultiCallback[AnyStr](grouped=groupby is not None),
|
|
985
|
+
)
|
|
986
|
+
|
|
987
|
+
@module_command(
|
|
988
|
+
CommandName.TS_GET,
|
|
989
|
+
group=COMMAND_GROUP,
|
|
990
|
+
version_introduced="1.0.0",
|
|
991
|
+
arguments={"latest": {"version_introduced": "1.8.0"}},
|
|
992
|
+
module=MODULE,
|
|
993
|
+
flags={CommandFlag.READONLY},
|
|
994
|
+
cacheable=True,
|
|
995
|
+
)
|
|
996
|
+
def get(
|
|
997
|
+
self, key: KeyT, latest: bool | None = None
|
|
998
|
+
) -> CommandRequest[tuple[int, float] | tuple[()]]:
|
|
999
|
+
"""
|
|
1000
|
+
Get the sample with the highest timestamp from a given time series.
|
|
1001
|
+
|
|
1002
|
+
:param key: The key name for the time series.
|
|
1003
|
+
:param latest: If the time series is a compaction, if ``True``, reports
|
|
1004
|
+
the compacted value of the latest, possibly partial, bucket. When ``False``,
|
|
1005
|
+
does not report the latest, possibly partial, bucket. When a time series is not a
|
|
1006
|
+
compaction, the parameter is ignored.
|
|
1007
|
+
:return: A tuple of (timestamp, value) of the sample with the highest timestamp,
|
|
1008
|
+
or an empty tuple if the time series is empty.
|
|
1009
|
+
"""
|
|
1010
|
+
command_arguments: CommandArgList = [key]
|
|
1011
|
+
if latest:
|
|
1012
|
+
command_arguments.append(b"LATEST")
|
|
1013
|
+
return self.client.create_request(
|
|
1014
|
+
CommandName.TS_GET, *command_arguments, callback=SampleCallback()
|
|
1015
|
+
)
|
|
1016
|
+
|
|
1017
|
+
@mutually_exclusive_parameters("withlabels", "selected_labels")
|
|
1018
|
+
@module_command(
|
|
1019
|
+
CommandName.TS_MGET,
|
|
1020
|
+
group=COMMAND_GROUP,
|
|
1021
|
+
version_introduced="1.0.0",
|
|
1022
|
+
arguments={"latest": {"version_introduced": "1.8.0"}},
|
|
1023
|
+
module=MODULE,
|
|
1024
|
+
cluster=ClusterCommandConfig(
|
|
1025
|
+
route=NodeFlag.PRIMARIES,
|
|
1026
|
+
combine=ClusterMergeTimeSeries(),
|
|
1027
|
+
),
|
|
1028
|
+
flags={CommandFlag.READONLY},
|
|
1029
|
+
)
|
|
1030
|
+
def mget(
|
|
1031
|
+
self,
|
|
1032
|
+
filters: Parameters[StringT],
|
|
1033
|
+
withlabels: bool | None = None,
|
|
1034
|
+
selected_labels: Parameters[StringT] | None = None,
|
|
1035
|
+
latest: bool | None = None,
|
|
1036
|
+
) -> CommandRequest[dict[AnyStr, tuple[dict[AnyStr, AnyStr], tuple[int, float] | tuple[()]]]]:
|
|
1037
|
+
"""
|
|
1038
|
+
Get the sample with the highest timestamp from each time series matching a specific filter.
|
|
1039
|
+
|
|
1040
|
+
:param filters: Filters time series based on their labels and label values. At least one
|
|
1041
|
+
`label=value` filter is required.
|
|
1042
|
+
:param withlabels: Includes in the reply all label-value pairs representing metadata labels
|
|
1043
|
+
of the time series. If :paramref:`withlabels` or :paramref:`selected_labels` are not
|
|
1044
|
+
specified, by default, an empty dictionary is reported as label-value pairs.
|
|
1045
|
+
:param selected_labels: Returns a subset of the label-value pairs that represent metadata
|
|
1046
|
+
labels of the time series. Use when a large number of labels exists per series, but only
|
|
1047
|
+
the values of some of the labels are required. If :paramref:`withlabels` or
|
|
1048
|
+
:paramref:`selected_labels` are not specified, by default, an empty mapping is reported
|
|
1049
|
+
as label-value pairs.
|
|
1050
|
+
:param latest: Used when a time series is a compaction. If ``True``, the command also
|
|
1051
|
+
reports the compacted value of the latest possibly partial bucket, given that this
|
|
1052
|
+
bucket's start time falls within `[fromTimestamp, toTimestamp]`. If ``False``,
|
|
1053
|
+
the command does not report the latest possibly partial bucket. When a time series is
|
|
1054
|
+
not a compaction, the argument is ignored
|
|
1055
|
+
:return: For each time series matching the specified filters, a dictionary is returned with
|
|
1056
|
+
the time series key name as the key and a tuple containing the label-value pairs and a
|
|
1057
|
+
single timestamp-value pair as the value.
|
|
1058
|
+
"""
|
|
1059
|
+
command_arguments: CommandArgList = []
|
|
1060
|
+
if latest:
|
|
1061
|
+
command_arguments.append(b"LATEST")
|
|
1062
|
+
if withlabels:
|
|
1063
|
+
command_arguments.append(PureToken.WITHLABELS)
|
|
1064
|
+
if selected_labels:
|
|
1065
|
+
_labels: list[StringT] = list(selected_labels)
|
|
1066
|
+
command_arguments.extend([b"SELECTED_LABELS", *_labels])
|
|
1067
|
+
command_arguments.extend([PrefixToken.FILTER, *filters])
|
|
1068
|
+
return self.client.create_request(
|
|
1069
|
+
CommandName.TS_MGET,
|
|
1070
|
+
*command_arguments,
|
|
1071
|
+
callback=TimeSeriesCallback[AnyStr](),
|
|
1072
|
+
)
|
|
1073
|
+
|
|
1074
|
+
@module_command(
|
|
1075
|
+
CommandName.TS_INFO,
|
|
1076
|
+
group=COMMAND_GROUP,
|
|
1077
|
+
version_introduced="1.0.0",
|
|
1078
|
+
module=MODULE,
|
|
1079
|
+
)
|
|
1080
|
+
def info(
|
|
1081
|
+
self, key: KeyT, debug: bool | None = None
|
|
1082
|
+
) -> CommandRequest[dict[AnyStr, ResponseType]]:
|
|
1083
|
+
"""
|
|
1084
|
+
Return information and statistics for a time series.
|
|
1085
|
+
|
|
1086
|
+
:param key: Key name of the time series.
|
|
1087
|
+
:param debug: Optional flag to get a more detailed information about the chunks.
|
|
1088
|
+
:return: Dictionary with information about the time series (name-value pairs).
|
|
1089
|
+
"""
|
|
1090
|
+
command_arguments: CommandArgList = [key]
|
|
1091
|
+
if debug:
|
|
1092
|
+
command_arguments.append(b"DEBUG")
|
|
1093
|
+
return self.client.create_request(
|
|
1094
|
+
CommandName.TS_INFO,
|
|
1095
|
+
*command_arguments,
|
|
1096
|
+
callback=TimeSeriesInfoCallback[AnyStr](),
|
|
1097
|
+
)
|
|
1098
|
+
|
|
1099
|
+
@module_command(
|
|
1100
|
+
CommandName.TS_QUERYINDEX,
|
|
1101
|
+
group=COMMAND_GROUP,
|
|
1102
|
+
version_introduced="1.0.0",
|
|
1103
|
+
module=MODULE,
|
|
1104
|
+
cluster=ClusterCommandConfig(
|
|
1105
|
+
route=NodeFlag.PRIMARIES,
|
|
1106
|
+
combine=ClusterMergeSets(),
|
|
1107
|
+
),
|
|
1108
|
+
flags={CommandFlag.READONLY},
|
|
1109
|
+
)
|
|
1110
|
+
def queryindex(self, filters: Parameters[StringT]) -> CommandRequest[set[AnyStr]]:
|
|
1111
|
+
"""
|
|
1112
|
+
Get all time series keys matching a filter list.
|
|
1113
|
+
|
|
1114
|
+
:param filters: A list of filter expressions to match time series based on their labels
|
|
1115
|
+
and label values. Each filter expression has one of the following syntaxes:
|
|
1116
|
+
|
|
1117
|
+
- ``label=value``, where ``label`` equals ``value``
|
|
1118
|
+
- ``label!=value``, where ``label`` does not equal ``value``
|
|
1119
|
+
- ``label=``, where ``key`` does not have label ``label``
|
|
1120
|
+
- ``label!=``, where ``key`` has label ``label``
|
|
1121
|
+
- ``label=(value1,value2,...)``, where ``key`` with label ``label`` equals one of
|
|
1122
|
+
the values in the list
|
|
1123
|
+
- ``label!=(value1,value2,...)``, where key with label ``label`` does not equal
|
|
1124
|
+
any of the values in the list
|
|
1125
|
+
|
|
1126
|
+
At least one ``label=value`` filter is required. Filters are conjunctive. For example, the
|
|
1127
|
+
filter ``type=temperature room=study`` means the a time series is a temperature time series
|
|
1128
|
+
of a study room. Don't use whitespaces in the filter expression.
|
|
1129
|
+
:return: A set of time series keys matching the filter list. The set is empty if no time
|
|
1130
|
+
series matches the filter. An error is returned on invalid filter expression.
|
|
1131
|
+
|
|
1132
|
+
"""
|
|
1133
|
+
command_arguments: CommandArgList = [*filters]
|
|
1134
|
+
|
|
1135
|
+
return self.client.create_request(
|
|
1136
|
+
CommandName.TS_QUERYINDEX,
|
|
1137
|
+
*command_arguments,
|
|
1138
|
+
callback=SetCallback[AnyStr](),
|
|
1139
|
+
)
|