coredis 4.24.0__py3-none-any.whl → 5.0.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of coredis might be problematic. Click here for more details.

Files changed (77) hide show
  1. coredis/__init__.py +1 -3
  2. coredis/_packer.py +10 -10
  3. coredis/_protocols.py +23 -32
  4. coredis/_py_311_typing.py +20 -0
  5. coredis/_py_312_typing.py +17 -0
  6. coredis/_utils.py +49 -51
  7. coredis/_version.py +3 -3
  8. coredis/cache.py +57 -82
  9. coredis/client/__init__.py +1 -2
  10. coredis/client/basic.py +129 -56
  11. coredis/client/cluster.py +147 -70
  12. coredis/commands/__init__.py +27 -7
  13. coredis/commands/_key_spec.py +11 -10
  14. coredis/commands/_utils.py +1 -1
  15. coredis/commands/_validators.py +30 -20
  16. coredis/commands/_wrappers.py +19 -99
  17. coredis/commands/bitfield.py +10 -2
  18. coredis/commands/constants.py +20 -3
  19. coredis/commands/core.py +1627 -1246
  20. coredis/commands/function.py +21 -19
  21. coredis/commands/monitor.py +0 -71
  22. coredis/commands/pubsub.py +7 -142
  23. coredis/commands/request.py +108 -0
  24. coredis/commands/script.py +9 -9
  25. coredis/commands/sentinel.py +60 -49
  26. coredis/connection.py +14 -15
  27. coredis/exceptions.py +2 -2
  28. coredis/experimental/__init__.py +0 -4
  29. coredis/globals.py +3 -0
  30. coredis/modules/autocomplete.py +28 -30
  31. coredis/modules/base.py +15 -31
  32. coredis/modules/filters.py +269 -245
  33. coredis/modules/graph.py +61 -62
  34. coredis/modules/json.py +172 -140
  35. coredis/modules/response/_callbacks/autocomplete.py +5 -4
  36. coredis/modules/response/_callbacks/graph.py +34 -29
  37. coredis/modules/response/_callbacks/json.py +5 -3
  38. coredis/modules/response/_callbacks/search.py +49 -53
  39. coredis/modules/response/_callbacks/timeseries.py +18 -30
  40. coredis/modules/response/types.py +1 -5
  41. coredis/modules/search.py +186 -169
  42. coredis/modules/timeseries.py +184 -164
  43. coredis/parser.py +6 -19
  44. coredis/pipeline.py +391 -422
  45. coredis/pool/basic.py +7 -7
  46. coredis/pool/cluster.py +3 -3
  47. coredis/pool/nodemanager.py +10 -3
  48. coredis/response/_callbacks/__init__.py +76 -57
  49. coredis/response/_callbacks/acl.py +0 -3
  50. coredis/response/_callbacks/cluster.py +25 -16
  51. coredis/response/_callbacks/command.py +8 -6
  52. coredis/response/_callbacks/connection.py +4 -3
  53. coredis/response/_callbacks/geo.py +17 -13
  54. coredis/response/_callbacks/hash.py +13 -11
  55. coredis/response/_callbacks/keys.py +9 -5
  56. coredis/response/_callbacks/module.py +2 -3
  57. coredis/response/_callbacks/script.py +6 -8
  58. coredis/response/_callbacks/sentinel.py +21 -17
  59. coredis/response/_callbacks/server.py +36 -14
  60. coredis/response/_callbacks/sets.py +3 -4
  61. coredis/response/_callbacks/sorted_set.py +27 -24
  62. coredis/response/_callbacks/streams.py +22 -13
  63. coredis/response/_callbacks/strings.py +7 -6
  64. coredis/response/_callbacks/vector_sets.py +126 -0
  65. coredis/response/types.py +13 -4
  66. coredis/sentinel.py +1 -1
  67. coredis/stream.py +4 -3
  68. coredis/tokens.py +343 -16
  69. coredis/typing.py +432 -79
  70. {coredis-4.24.0.dist-info → coredis-5.0.0rc1.dist-info}/METADATA +4 -5
  71. coredis-5.0.0rc1.dist-info/RECORD +95 -0
  72. coredis/client/keydb.py +0 -336
  73. coredis/pipeline.pyi +0 -2103
  74. coredis-4.24.0.dist-info/RECORD +0 -93
  75. {coredis-4.24.0.dist-info → coredis-5.0.0rc1.dist-info}/WHEEL +0 -0
  76. {coredis-4.24.0.dist-info → coredis-5.0.0rc1.dist-info}/licenses/LICENSE +0 -0
  77. {coredis-4.24.0.dist-info → coredis-5.0.0rc1.dist-info}/top_level.txt +0 -0
@@ -12,6 +12,7 @@ from coredis.typing import (
12
12
  Literal,
13
13
  Mapping,
14
14
  Parameters,
15
+ RedisValueT,
15
16
  ResponseType,
16
17
  StringT,
17
18
  ValueT,
@@ -23,8 +24,9 @@ from ..commands._validators import (
23
24
  mutually_exclusive_parameters,
24
25
  mutually_inclusive_parameters,
25
26
  )
26
- from ..commands._wrappers import CacheConfig, ClusterCommandConfig
27
+ from ..commands._wrappers import ClusterCommandConfig
27
28
  from ..commands.constants import CommandFlag, CommandGroup, CommandName, NodeFlag
29
+ from ..commands.request import CommandRequest
28
30
  from ..response._callbacks import (
29
31
  ClusterMergeSets,
30
32
  IntCallback,
@@ -71,7 +73,7 @@ class TimeSeries(ModuleGroup[AnyStr]):
71
73
  version_introduced="1.0.0",
72
74
  module=MODULE,
73
75
  )
74
- async def create(
76
+ def create(
75
77
  self,
76
78
  key: KeyT,
77
79
  retention: int | timedelta | None = None,
@@ -89,7 +91,7 @@ class TimeSeries(ModuleGroup[AnyStr]):
89
91
  ]
90
92
  ) = None,
91
93
  labels: Mapping[StringT, ValueT] | None = None,
92
- ) -> bool:
94
+ ) -> CommandRequest[bool]:
93
95
  """
94
96
  Create a new time series with the given key.
95
97
 
@@ -104,24 +106,24 @@ class TimeSeries(ModuleGroup[AnyStr]):
104
106
  :param labels: A dictionary of labels to be associated with the time series.
105
107
  :return: True if the time series was created successfully, False otherwise.
106
108
  """
107
- pieces: CommandArgList = [key]
109
+ command_arguments: CommandArgList = [key]
108
110
  if retention is not None:
109
- pieces.extend([PrefixToken.RETENTION, normalized_milliseconds(retention)])
111
+ command_arguments.extend([PrefixToken.RETENTION, normalized_milliseconds(retention)])
110
112
  if encoding:
111
- pieces.extend([PrefixToken.ENCODING, encoding])
113
+ command_arguments.extend([PrefixToken.ENCODING, encoding])
112
114
  if chunk_size is not None:
113
- pieces.extend([PrefixToken.CHUNK_SIZE, chunk_size])
115
+ command_arguments.extend([PrefixToken.CHUNK_SIZE, chunk_size])
114
116
  if duplicate_policy is not None:
115
- pieces.extend([PrefixToken.DUPLICATE_POLICY, duplicate_policy])
117
+ command_arguments.extend([PrefixToken.DUPLICATE_POLICY, duplicate_policy])
116
118
  if labels:
117
- pieces.extend(
119
+ command_arguments.extend(
118
120
  [
119
121
  PrefixToken.LABELS,
120
122
  *dict_to_flat_list(labels), # type: ignore
121
123
  ]
122
124
  )
123
- return await self.execute_module_command(
124
- CommandName.TS_CREATE, *pieces, callback=SimpleStringCallback()
125
+ return self.client.create_request(
126
+ CommandName.TS_CREATE, *command_arguments, callback=SimpleStringCallback()
125
127
  )
126
128
 
127
129
  @module_command(
@@ -130,12 +132,12 @@ class TimeSeries(ModuleGroup[AnyStr]):
130
132
  version_introduced="1.6.0",
131
133
  module=MODULE,
132
134
  )
133
- async def delete(
135
+ def delete(
134
136
  self,
135
137
  key: KeyT,
136
138
  fromtimestamp: int | datetime | StringT,
137
139
  totimestamp: int | datetime | StringT,
138
- ) -> int:
140
+ ) -> CommandRequest[int]:
139
141
  """
140
142
  Delete all samples between two timestamps for a given time series.
141
143
 
@@ -144,7 +146,7 @@ class TimeSeries(ModuleGroup[AnyStr]):
144
146
  :param totimestamp: End timestamp for the range deletion.
145
147
  :return: The number of samples that were deleted, or an error reply.
146
148
  """
147
- return await self.execute_module_command(
149
+ return self.client.create_request(
148
150
  CommandName.TS_DEL,
149
151
  key,
150
152
  normalized_timestamp(fromtimestamp),
@@ -158,7 +160,7 @@ class TimeSeries(ModuleGroup[AnyStr]):
158
160
  version_introduced="1.0.0",
159
161
  module=MODULE,
160
162
  )
161
- async def alter(
163
+ def alter(
162
164
  self,
163
165
  key: KeyT,
164
166
  labels: Mapping[StringT, StringT] | None = None,
@@ -175,7 +177,7 @@ class TimeSeries(ModuleGroup[AnyStr]):
175
177
  PureToken.SUM,
176
178
  ]
177
179
  ) = None,
178
- ) -> bool:
180
+ ) -> CommandRequest[bool]:
179
181
  """
180
182
  Update the retention, chunk size, duplicate policy, and labels of an existing time series.
181
183
 
@@ -188,22 +190,22 @@ class TimeSeries(ModuleGroup[AnyStr]):
188
190
  :param duplicate_policy: Policy for handling multiple samples with identical timestamps.
189
191
  :return: True if executed correctly, False otherwise.
190
192
  """
191
- pieces: CommandArgList = [key]
193
+ command_arguments: CommandArgList = [key]
192
194
  if labels:
193
- pieces.extend(
195
+ command_arguments.extend(
194
196
  [
195
197
  PrefixToken.LABELS,
196
198
  *dict_to_flat_list(labels), # type: ignore
197
199
  ]
198
200
  )
199
201
  if retention is not None:
200
- pieces.extend([PrefixToken.RETENTION, retention])
202
+ command_arguments.extend([PrefixToken.RETENTION, retention])
201
203
  if chunk_size is not None:
202
- pieces.extend([PrefixToken.CHUNK_SIZE, chunk_size])
204
+ command_arguments.extend([PrefixToken.CHUNK_SIZE, chunk_size])
203
205
  if duplicate_policy:
204
- pieces.extend([PrefixToken.DUPLICATE_POLICY, duplicate_policy])
205
- return await self.execute_module_command(
206
- CommandName.TS_ALTER, *pieces, callback=SimpleStringCallback()
206
+ command_arguments.extend([PrefixToken.DUPLICATE_POLICY, duplicate_policy])
207
+ return self.client.create_request(
208
+ CommandName.TS_ALTER, *command_arguments, callback=SimpleStringCallback()
207
209
  )
208
210
 
209
211
  @module_command(
@@ -212,7 +214,7 @@ class TimeSeries(ModuleGroup[AnyStr]):
212
214
  version_introduced="1.0.0",
213
215
  module=MODULE,
214
216
  )
215
- async def add(
217
+ def add(
216
218
  self,
217
219
  key: KeyT,
218
220
  timestamp: int | datetime | StringT,
@@ -232,7 +234,7 @@ class TimeSeries(ModuleGroup[AnyStr]):
232
234
  ]
233
235
  ) = None,
234
236
  labels: Mapping[StringT, ValueT] | None = None,
235
- ) -> int:
237
+ ) -> CommandRequest[int]:
236
238
  """
237
239
  Add a sample to a time series.
238
240
 
@@ -248,28 +250,28 @@ class TimeSeries(ModuleGroup[AnyStr]):
248
250
  :param labels: Dictionary of labels associated with the sample.
249
251
  :return: Number of samples added to the time series.
250
252
  """
251
- pieces: CommandArgList = [
253
+ command_arguments: CommandArgList = [
252
254
  key,
253
255
  normalized_timestamp(timestamp),
254
256
  value,
255
257
  ]
256
258
  if retention is not None:
257
- pieces.extend([PrefixToken.RETENTION, retention])
259
+ command_arguments.extend([PrefixToken.RETENTION, retention])
258
260
  if encoding:
259
- pieces.extend([PrefixToken.ENCODING, encoding])
261
+ command_arguments.extend([PrefixToken.ENCODING, encoding])
260
262
  if chunk_size is not None:
261
- pieces.extend([PrefixToken.CHUNK_SIZE, chunk_size])
263
+ command_arguments.extend([PrefixToken.CHUNK_SIZE, chunk_size])
262
264
  if duplicate_policy:
263
- pieces.extend([PrefixToken.ON_DUPLICATE, duplicate_policy])
265
+ command_arguments.extend([PrefixToken.ON_DUPLICATE, duplicate_policy])
264
266
  if labels:
265
- pieces.extend(
267
+ command_arguments.extend(
266
268
  [
267
269
  PrefixToken.LABELS,
268
270
  *dict_to_flat_list(labels), # type: ignore
269
271
  ]
270
272
  )
271
- return await self.execute_module_command(
272
- CommandName.TS_ADD, *pieces, callback=IntCallback()
273
+ return self.client.create_request(
274
+ CommandName.TS_ADD, *command_arguments, callback=IntCallback()
273
275
  )
274
276
 
275
277
  @module_command(
@@ -278,7 +280,9 @@ class TimeSeries(ModuleGroup[AnyStr]):
278
280
  version_introduced="1.0.0",
279
281
  module=MODULE,
280
282
  )
281
- async def madd(self, ktvs: Parameters[tuple[AnyStr, int, int | float]]) -> tuple[int, ...]:
283
+ def madd(
284
+ self, ktvs: Parameters[tuple[AnyStr, int, int | float]]
285
+ ) -> CommandRequest[tuple[int, ...]]:
282
286
  """
283
287
  Append new samples to one or more time series.
284
288
 
@@ -287,10 +291,10 @@ class TimeSeries(ModuleGroup[AnyStr]):
287
291
  to the server clock, and a numeric data value of the sample.
288
292
  :return: A tuple of integers representing the timestamp of each added sample
289
293
  """
290
- pieces: CommandArgList = list(itertools.chain(*ktvs))
294
+ command_arguments: CommandArgList = list(itertools.chain(*ktvs))
291
295
 
292
- return await self.execute_module_command(
293
- CommandName.TS_MADD, *pieces, callback=TupleCallback[int]()
296
+ return self.client.create_request(
297
+ CommandName.TS_MADD, *command_arguments, callback=TupleCallback[int]()
294
298
  )
295
299
 
296
300
  @module_command(
@@ -299,16 +303,16 @@ class TimeSeries(ModuleGroup[AnyStr]):
299
303
  version_introduced="1.0.0",
300
304
  module=MODULE,
301
305
  )
302
- async def incrby(
306
+ def incrby(
303
307
  self,
304
308
  key: KeyT,
305
309
  value: int | float,
306
- labels: Mapping[StringT, ValueT] | None = None,
310
+ labels: Mapping[StringT, RedisValueT] | None = None,
307
311
  timestamp: datetime | int | StringT | None = None,
308
312
  retention: int | timedelta | None = None,
309
313
  uncompressed: bool | None = None,
310
314
  chunk_size: int | None = None,
311
- ) -> int:
315
+ ) -> CommandRequest[int]:
312
316
  """
313
317
  Increments the value of the sample with the maximum existing timestamp, or creates
314
318
  a new sample with a value equal to the value of the sample with the maximum existing
@@ -329,22 +333,22 @@ class TimeSeries(ModuleGroup[AnyStr]):
329
333
  Use it only if you are creating a new time series.
330
334
  :return: The timestamp of the upserted sample, or an error.
331
335
  """
332
- pieces: CommandArgList = [key, value]
336
+ command_arguments: CommandArgList = [key, value]
333
337
  if timestamp:
334
- pieces.extend([PrefixToken.TIMESTAMP, normalized_timestamp(timestamp)])
338
+ command_arguments.extend([PrefixToken.TIMESTAMP, normalized_timestamp(timestamp)])
335
339
  if retention:
336
- pieces.extend([PrefixToken.RETENTION, normalized_milliseconds(retention)])
340
+ command_arguments.extend([PrefixToken.RETENTION, normalized_milliseconds(retention)])
337
341
  if uncompressed:
338
- pieces.append(PureToken.UNCOMPRESSED)
342
+ command_arguments.append(PureToken.UNCOMPRESSED)
339
343
  if chunk_size:
340
- pieces.extend([PrefixToken.CHUNK_SIZE, chunk_size])
344
+ command_arguments.extend([PrefixToken.CHUNK_SIZE, chunk_size])
341
345
  if labels:
342
- pieces.extend(
346
+ command_arguments.extend(
343
347
  [PrefixToken.LABELS, *dict_to_flat_list(labels)] # type: ignore
344
348
  )
345
349
 
346
- return await self.execute_module_command(
347
- CommandName.TS_INCRBY, *pieces, callback=IntCallback()
350
+ return self.client.create_request(
351
+ CommandName.TS_INCRBY, *command_arguments, callback=IntCallback()
348
352
  )
349
353
 
350
354
  @module_command(
@@ -353,16 +357,16 @@ class TimeSeries(ModuleGroup[AnyStr]):
353
357
  version_introduced="1.0.0",
354
358
  module=MODULE,
355
359
  )
356
- async def decrby(
360
+ def decrby(
357
361
  self,
358
362
  key: KeyT,
359
363
  value: int | float,
360
- labels: Mapping[StringT, ValueT] | None = None,
364
+ labels: Mapping[StringT, RedisValueT] | None = None,
361
365
  timestamp: datetime | int | StringT | None = None,
362
366
  retention: int | timedelta | None = None,
363
367
  uncompressed: bool | None = None,
364
368
  chunk_size: int | None = None,
365
- ) -> int:
369
+ ) -> CommandRequest[int]:
366
370
  """
367
371
  Decrease the value of the sample with the maximum existing timestamp, or create a new
368
372
  sample with a value equal to the value of the sample with the maximum existing timestamp
@@ -386,22 +390,22 @@ class TimeSeries(ModuleGroup[AnyStr]):
386
390
  time series.
387
391
  :return: The timestamp of the upserted sample, or an error if the operation failed.
388
392
  """
389
- pieces: CommandArgList = [key, value]
393
+ command_arguments: CommandArgList = [key, value]
390
394
 
391
395
  if timestamp:
392
- pieces.extend([PrefixToken.TIMESTAMP, normalized_timestamp(timestamp)])
396
+ command_arguments.extend([PrefixToken.TIMESTAMP, normalized_timestamp(timestamp)])
393
397
  if retention:
394
- pieces.extend([PrefixToken.RETENTION, normalized_milliseconds(retention)])
398
+ command_arguments.extend([PrefixToken.RETENTION, normalized_milliseconds(retention)])
395
399
  if uncompressed:
396
- pieces.append(PureToken.UNCOMPRESSED)
400
+ command_arguments.append(PureToken.UNCOMPRESSED)
397
401
  if chunk_size:
398
- pieces.extend([PrefixToken.CHUNK_SIZE, chunk_size])
402
+ command_arguments.extend([PrefixToken.CHUNK_SIZE, chunk_size])
399
403
  if labels:
400
- pieces.extend(
404
+ command_arguments.extend(
401
405
  [PrefixToken.LABELS, *dict_to_flat_list(labels)] # type: ignore
402
406
  )
403
- return await self.execute_module_command(
404
- CommandName.TS_DECRBY, *pieces, callback=IntCallback()
407
+ return self.client.create_request(
408
+ CommandName.TS_DECRBY, *command_arguments, callback=IntCallback()
405
409
  )
406
410
 
407
411
  @module_command(
@@ -411,7 +415,7 @@ class TimeSeries(ModuleGroup[AnyStr]):
411
415
  arguments={"aligntimestamp": {"version_introduced": "1.8.0"}},
412
416
  module=MODULE,
413
417
  )
414
- async def createrule(
418
+ def createrule(
415
419
  self,
416
420
  source: KeyT,
417
421
  destination: KeyT,
@@ -432,7 +436,7 @@ class TimeSeries(ModuleGroup[AnyStr]):
432
436
  ],
433
437
  bucketduration: int | timedelta,
434
438
  aligntimestamp: int | None = None,
435
- ) -> bool:
439
+ ) -> CommandRequest[bool]:
436
440
  """
437
441
  Create a compaction rule
438
442
 
@@ -445,8 +449,8 @@ class TimeSeries(ModuleGroup[AnyStr]):
445
449
  in milliseconds. The default value is 0 aligned with the epoch.
446
450
  :return: True if executed correctly, False otherwise.
447
451
  """
448
- pieces: CommandArgList = [source, destination]
449
- pieces.extend(
452
+ command_arguments: CommandArgList = [source, destination]
453
+ command_arguments.extend(
450
454
  [
451
455
  PrefixToken.AGGREGATION,
452
456
  aggregation,
@@ -454,9 +458,11 @@ class TimeSeries(ModuleGroup[AnyStr]):
454
458
  ]
455
459
  )
456
460
  if aligntimestamp is not None:
457
- pieces.append(aligntimestamp)
458
- return await self.execute_module_command(
459
- CommandName.TS_CREATERULE, *pieces, callback=SimpleStringCallback()
461
+ command_arguments.append(aligntimestamp)
462
+ return self.client.create_request(
463
+ CommandName.TS_CREATERULE,
464
+ *command_arguments,
465
+ callback=SimpleStringCallback(),
460
466
  )
461
467
 
462
468
  @module_command(
@@ -465,7 +471,7 @@ class TimeSeries(ModuleGroup[AnyStr]):
465
471
  version_introduced="1.0.0",
466
472
  module=MODULE,
467
473
  )
468
- async def deleterule(self, source: KeyT, destination: KeyT) -> bool:
474
+ def deleterule(self, source: KeyT, destination: KeyT) -> CommandRequest[bool]:
469
475
  """
470
476
  Delete a compaction rule from a RedisTimeSeries sourceKey to a destinationKey.
471
477
 
@@ -475,10 +481,12 @@ class TimeSeries(ModuleGroup[AnyStr]):
475
481
 
476
482
  .. warning:: This command does not delete the compacted series.
477
483
  """
478
- pieces: CommandArgList = [source, destination]
484
+ command_arguments: CommandArgList = [source, destination]
479
485
 
480
- return await self.execute_module_command(
481
- CommandName.TS_DELETERULE, *pieces, callback=SimpleStringCallback()
486
+ return self.client.create_request(
487
+ CommandName.TS_DELETERULE,
488
+ *command_arguments,
489
+ callback=SimpleStringCallback(),
482
490
  )
483
491
 
484
492
  @mutually_inclusive_parameters("min_value", "max_value")
@@ -493,9 +501,9 @@ class TimeSeries(ModuleGroup[AnyStr]):
493
501
  },
494
502
  module=MODULE,
495
503
  flags={CommandFlag.READONLY},
496
- cache_config=CacheConfig(lambda *a, **_: a[0]),
504
+ cacheable=True,
497
505
  )
498
- async def range(
506
+ def range(
499
507
  self,
500
508
  key: KeyT,
501
509
  fromtimestamp: datetime | int | StringT,
@@ -528,7 +536,7 @@ class TimeSeries(ModuleGroup[AnyStr]):
528
536
  buckettimestamp: StringT | None = None,
529
537
  empty: bool | None = None,
530
538
  latest: bool | None = None,
531
- ) -> tuple[tuple[int, float], ...] | tuple[()]:
539
+ ) -> CommandRequest[tuple[tuple[int, float], ...] | tuple[()]]:
532
540
  """
533
541
  Query a range in forward direction.
534
542
 
@@ -553,24 +561,24 @@ class TimeSeries(ModuleGroup[AnyStr]):
553
561
 
554
562
  :return: A tuple of samples, where each sample is a tuple of timestamp and value.
555
563
  """
556
- pieces: CommandArgList = [
564
+ command_arguments: CommandArgList = [
557
565
  key,
558
566
  normalized_timestamp(fromtimestamp),
559
567
  normalized_timestamp(totimestamp),
560
568
  ]
561
569
  if latest:
562
- pieces.append(b"LATEST")
570
+ command_arguments.append(b"LATEST")
563
571
  if filter_by_ts:
564
572
  _ts: list[int] = list(filter_by_ts)
565
- pieces.extend([PrefixToken.FILTER_BY_TS, *_ts])
573
+ command_arguments.extend([PrefixToken.FILTER_BY_TS, *_ts])
566
574
  if min_value is not None and max_value is not None:
567
- pieces.extend([PureToken.FILTER_BY_VALUE, min_value, max_value])
575
+ command_arguments.extend([PureToken.FILTER_BY_VALUE, min_value, max_value])
568
576
  if count is not None:
569
- pieces.extend([PrefixToken.COUNT, count])
577
+ command_arguments.extend([PrefixToken.COUNT, count])
570
578
  if aggregator and bucketduration is not None:
571
579
  if align is not None:
572
- pieces.extend([PrefixToken.ALIGN, align])
573
- pieces.extend(
580
+ command_arguments.extend([PrefixToken.ALIGN, align])
581
+ command_arguments.extend(
574
582
  [
575
583
  PrefixToken.AGGREGATION,
576
584
  aggregator,
@@ -578,12 +586,12 @@ class TimeSeries(ModuleGroup[AnyStr]):
578
586
  ]
579
587
  )
580
588
  if buckettimestamp is not None:
581
- pieces.extend([PureToken.BUCKETTIMESTAMP, buckettimestamp])
589
+ command_arguments.extend([PureToken.BUCKETTIMESTAMP, buckettimestamp])
582
590
  if empty is not None:
583
- pieces.append(PureToken.EMPTY)
591
+ command_arguments.append(PureToken.EMPTY)
584
592
 
585
- return await self.execute_module_command(
586
- CommandName.TS_RANGE, *pieces, callback=SamplesCallback()
593
+ return self.client.create_request(
594
+ CommandName.TS_RANGE, *command_arguments, callback=SamplesCallback()
587
595
  )
588
596
 
589
597
  @mutually_inclusive_parameters("min_value", "max_value")
@@ -598,9 +606,9 @@ class TimeSeries(ModuleGroup[AnyStr]):
598
606
  },
599
607
  module=MODULE,
600
608
  flags={CommandFlag.READONLY},
601
- cache_config=CacheConfig(lambda *a, **_: a[0]),
609
+ cacheable=True,
602
610
  )
603
- async def revrange(
611
+ def revrange(
604
612
  self,
605
613
  key: KeyT,
606
614
  fromtimestamp: int | datetime | StringT,
@@ -633,7 +641,7 @@ class TimeSeries(ModuleGroup[AnyStr]):
633
641
  buckettimestamp: StringT | None = None,
634
642
  empty: bool | None = None,
635
643
  latest: bool | None = None,
636
- ) -> tuple[tuple[int, float], ...] | tuple[()]:
644
+ ) -> CommandRequest[tuple[tuple[int, float], ...] | tuple[()]]:
637
645
  """
638
646
  Query a range in reverse direction from a RedisTimeSeries key.
639
647
 
@@ -655,24 +663,24 @@ class TimeSeries(ModuleGroup[AnyStr]):
655
663
 
656
664
  :return: A tuple of timestamp-value pairs in reverse order.
657
665
  """
658
- pieces: CommandArgList = [
666
+ command_arguments: CommandArgList = [
659
667
  key,
660
668
  normalized_timestamp(fromtimestamp),
661
669
  normalized_timestamp(totimestamp),
662
670
  ]
663
671
  if latest:
664
- pieces.append(b"LATEST")
672
+ command_arguments.append(b"LATEST")
665
673
  if filter_by_ts:
666
674
  _ts: list[int] = list(filter_by_ts)
667
- pieces.extend([PrefixToken.FILTER_BY_TS, *_ts])
675
+ command_arguments.extend([PrefixToken.FILTER_BY_TS, *_ts])
668
676
  if min_value is not None and max_value is not None:
669
- pieces.extend([PureToken.FILTER_BY_VALUE, min_value, max_value])
677
+ command_arguments.extend([PureToken.FILTER_BY_VALUE, min_value, max_value])
670
678
  if count is not None:
671
- pieces.extend([PrefixToken.COUNT, count])
679
+ command_arguments.extend([PrefixToken.COUNT, count])
672
680
  if aggregator and bucketduration is not None:
673
681
  if align is not None:
674
- pieces.extend([PrefixToken.ALIGN, align])
675
- pieces.extend(
682
+ command_arguments.extend([PrefixToken.ALIGN, align])
683
+ command_arguments.extend(
676
684
  [
677
685
  PrefixToken.AGGREGATION,
678
686
  aggregator,
@@ -680,12 +688,12 @@ class TimeSeries(ModuleGroup[AnyStr]):
680
688
  ]
681
689
  )
682
690
  if buckettimestamp is not None:
683
- pieces.extend([PureToken.BUCKETTIMESTAMP, buckettimestamp])
691
+ command_arguments.extend([PureToken.BUCKETTIMESTAMP, buckettimestamp])
684
692
  if empty is not None:
685
- pieces.append(PureToken.EMPTY)
693
+ command_arguments.append(PureToken.EMPTY)
686
694
 
687
- return await self.execute_module_command(
688
- CommandName.TS_REVRANGE, *pieces, callback=SamplesCallback()
695
+ return self.client.create_request(
696
+ CommandName.TS_REVRANGE, *command_arguments, callback=SamplesCallback()
689
697
  )
690
698
 
691
699
  @mutually_inclusive_parameters("min_value", "max_value")
@@ -707,7 +715,7 @@ class TimeSeries(ModuleGroup[AnyStr]):
707
715
  ),
708
716
  flags={CommandFlag.READONLY},
709
717
  )
710
- async def mrange(
718
+ def mrange(
711
719
  self,
712
720
  fromtimestamp: int | datetime | StringT,
713
721
  totimestamp: int | datetime | StringT,
@@ -760,9 +768,11 @@ class TimeSeries(ModuleGroup[AnyStr]):
760
768
  ) = None,
761
769
  empty: bool | None = None,
762
770
  latest: bool | None = None,
763
- ) -> dict[
764
- AnyStr,
765
- tuple[dict[AnyStr, AnyStr], tuple[tuple[int, float], ...] | tuple[()]],
771
+ ) -> CommandRequest[
772
+ dict[
773
+ AnyStr,
774
+ tuple[dict[AnyStr, AnyStr], tuple[tuple[int, float], ...] | tuple[()]],
775
+ ]
766
776
  ]:
767
777
  """
768
778
  Query a range across multiple time series by filters in forward direction.
@@ -795,29 +805,29 @@ class TimeSeries(ModuleGroup[AnyStr]):
795
805
 
796
806
  :return: A dictionary containing the time series data.
797
807
  """
798
- pieces: CommandArgList = [
808
+ command_arguments: CommandArgList = [
799
809
  normalized_timestamp(fromtimestamp),
800
810
  normalized_timestamp(totimestamp),
801
811
  ]
802
812
  if latest:
803
- pieces.append(b"LATEST")
813
+ command_arguments.append(b"LATEST")
804
814
  if filter_by_ts:
805
815
  _ts: list[int] = list(filter_by_ts)
806
- pieces.extend([PrefixToken.FILTER_BY_TS, *_ts])
816
+ command_arguments.extend([PrefixToken.FILTER_BY_TS, *_ts])
807
817
  if min_value is not None and max_value is not None:
808
- pieces.extend([PureToken.FILTER_BY_VALUE, min_value, max_value])
818
+ command_arguments.extend([PureToken.FILTER_BY_VALUE, min_value, max_value])
809
819
  if withlabels:
810
- pieces.append(PureToken.WITHLABELS)
820
+ command_arguments.append(PureToken.WITHLABELS)
811
821
  if selected_labels:
812
822
  _labels: list[StringT] = list(selected_labels)
813
- pieces.extend([PureToken.SELECTED_LABELS, *_labels])
823
+ command_arguments.extend([PureToken.SELECTED_LABELS, *_labels])
814
824
  if count is not None:
815
- pieces.extend([PrefixToken.COUNT, count])
825
+ command_arguments.extend([PrefixToken.COUNT, count])
816
826
  if aggregator or buckettimestamp is not None:
817
827
  if align is not None:
818
- pieces.extend([PrefixToken.ALIGN, align])
828
+ command_arguments.extend([PrefixToken.ALIGN, align])
819
829
  if aggregator and bucketduration is not None:
820
- pieces.extend(
830
+ command_arguments.extend(
821
831
  [
822
832
  PrefixToken.AGGREGATION,
823
833
  aggregator,
@@ -825,19 +835,18 @@ class TimeSeries(ModuleGroup[AnyStr]):
825
835
  ]
826
836
  )
827
837
  if buckettimestamp is not None:
828
- pieces.extend([PureToken.BUCKETTIMESTAMP, buckettimestamp])
838
+ command_arguments.extend([PureToken.BUCKETTIMESTAMP, buckettimestamp])
829
839
  if empty:
830
- pieces.append(PureToken.EMPTY)
840
+ command_arguments.append(PureToken.EMPTY)
831
841
  if filters:
832
842
  _filters: list[StringT] = list(filters)
833
- pieces.extend([PrefixToken.FILTER, *_filters])
843
+ command_arguments.extend([PrefixToken.FILTER, *_filters])
834
844
  if groupby and reducer:
835
- pieces.extend([PureToken.GROUPBY, groupby, b"REDUCE", reducer])
836
- return await self.execute_module_command(
845
+ command_arguments.extend([PureToken.GROUPBY, groupby, b"REDUCE", reducer])
846
+ return self.client.create_request(
837
847
  CommandName.TS_MRANGE,
838
- *pieces,
839
- callback=TimeSeriesMultiCallback[AnyStr](),
840
- grouped=groupby is not None,
848
+ *command_arguments,
849
+ callback=TimeSeriesMultiCallback[AnyStr](grouped=groupby is not None),
841
850
  )
842
851
 
843
852
  @mutually_inclusive_parameters("min_value", "max_value")
@@ -856,7 +865,7 @@ class TimeSeries(ModuleGroup[AnyStr]):
856
865
  cluster=ClusterCommandConfig(route=NodeFlag.PRIMARIES, combine=ClusterMergeTimeSeries()),
857
866
  flags={CommandFlag.READONLY},
858
867
  )
859
- async def mrevrange(
868
+ def mrevrange(
860
869
  self,
861
870
  fromtimestamp: int | datetime | StringT,
862
871
  totimestamp: int | datetime | StringT,
@@ -893,9 +902,11 @@ class TimeSeries(ModuleGroup[AnyStr]):
893
902
  reducer: StringT | None = None,
894
903
  empty: bool | None = None,
895
904
  latest: bool | None = None,
896
- ) -> dict[
897
- AnyStr,
898
- tuple[dict[AnyStr, AnyStr], tuple[tuple[int, float], ...] | tuple[()]],
905
+ ) -> CommandRequest[
906
+ dict[
907
+ AnyStr,
908
+ tuple[dict[AnyStr, AnyStr], tuple[tuple[int, float], ...] | tuple[()]],
909
+ ]
899
910
  ]:
900
911
  """
901
912
  Query a range across multiple time series by filters in reverse direction.
@@ -928,29 +939,29 @@ class TimeSeries(ModuleGroup[AnyStr]):
928
939
 
929
940
  :return: A dictionary containing the result of the query.
930
941
  """
931
- pieces: CommandArgList = [
942
+ command_arguments: CommandArgList = [
932
943
  normalized_timestamp(fromtimestamp),
933
944
  normalized_timestamp(totimestamp),
934
945
  ]
935
946
  if latest:
936
- pieces.append(b"LATEST")
947
+ command_arguments.append(b"LATEST")
937
948
  if filter_by_ts:
938
949
  _ts: list[int] = list(filter_by_ts)
939
- pieces.extend([PrefixToken.FILTER_BY_TS, *_ts])
950
+ command_arguments.extend([PrefixToken.FILTER_BY_TS, *_ts])
940
951
  if min_value is not None and max_value is not None:
941
- pieces.extend([PureToken.FILTER_BY_VALUE, min_value, max_value])
952
+ command_arguments.extend([PureToken.FILTER_BY_VALUE, min_value, max_value])
942
953
  if withlabels:
943
- pieces.append(PureToken.WITHLABELS)
954
+ command_arguments.append(PureToken.WITHLABELS)
944
955
  if selected_labels:
945
956
  _labels: list[StringT] = list(selected_labels)
946
- pieces.extend([PureToken.SELECTED_LABELS, *_labels])
957
+ command_arguments.extend([PureToken.SELECTED_LABELS, *_labels])
947
958
  if count is not None:
948
- pieces.extend([PrefixToken.COUNT, count])
959
+ command_arguments.extend([PrefixToken.COUNT, count])
949
960
  if aggregator or buckettimestamp is not None:
950
961
  if align is not None:
951
- pieces.extend([PrefixToken.ALIGN, align])
962
+ command_arguments.extend([PrefixToken.ALIGN, align])
952
963
  if aggregator and bucketduration is not None:
953
- pieces.extend(
964
+ command_arguments.extend(
954
965
  [
955
966
  PrefixToken.AGGREGATION,
956
967
  aggregator,
@@ -958,20 +969,19 @@ class TimeSeries(ModuleGroup[AnyStr]):
958
969
  ]
959
970
  )
960
971
  if buckettimestamp is not None:
961
- pieces.extend([PureToken.BUCKETTIMESTAMP, buckettimestamp])
972
+ command_arguments.extend([PureToken.BUCKETTIMESTAMP, buckettimestamp])
962
973
  if empty:
963
- pieces.append(PureToken.EMPTY)
974
+ command_arguments.append(PureToken.EMPTY)
964
975
  if filters:
965
976
  _filters: list[StringT] = list(filters)
966
- pieces.extend([PrefixToken.FILTER, *_filters])
977
+ command_arguments.extend([PrefixToken.FILTER, *_filters])
967
978
  if groupby and reducer and reducer:
968
- pieces.extend([PureToken.GROUPBY, groupby, b"REDUCE", reducer])
979
+ command_arguments.extend([PureToken.GROUPBY, groupby, b"REDUCE", reducer])
969
980
 
970
- return await self.execute_module_command(
981
+ return self.client.create_request(
971
982
  CommandName.TS_MREVRANGE,
972
- *pieces,
973
- callback=TimeSeriesMultiCallback[AnyStr](),
974
- grouped=groupby is not None,
983
+ *command_arguments,
984
+ callback=TimeSeriesMultiCallback[AnyStr](grouped=groupby is not None),
975
985
  )
976
986
 
977
987
  @module_command(
@@ -981,9 +991,11 @@ class TimeSeries(ModuleGroup[AnyStr]):
981
991
  arguments={"latest": {"version_introduced": "1.8.0"}},
982
992
  module=MODULE,
983
993
  flags={CommandFlag.READONLY},
984
- cache_config=CacheConfig(lambda *a, **_: a[0]),
994
+ cacheable=True,
985
995
  )
986
- async def get(self, key: KeyT, latest: bool | None = None) -> tuple[int, float] | tuple[()]:
996
+ def get(
997
+ self, key: KeyT, latest: bool | None = None
998
+ ) -> CommandRequest[tuple[int, float] | tuple[()]]:
987
999
  """
988
1000
  Get the sample with the highest timestamp from a given time series.
989
1001
 
@@ -995,11 +1007,11 @@ class TimeSeries(ModuleGroup[AnyStr]):
995
1007
  :return: A tuple of (timestamp, value) of the sample with the highest timestamp,
996
1008
  or an empty tuple if the time series is empty.
997
1009
  """
998
- pieces: CommandArgList = [key]
1010
+ command_arguments: CommandArgList = [key]
999
1011
  if latest:
1000
- pieces.append(b"LATEST")
1001
- return await self.execute_module_command(
1002
- CommandName.TS_GET, *pieces, callback=SampleCallback()
1012
+ command_arguments.append(b"LATEST")
1013
+ return self.client.create_request(
1014
+ CommandName.TS_GET, *command_arguments, callback=SampleCallback()
1003
1015
  )
1004
1016
 
1005
1017
  @mutually_exclusive_parameters("withlabels", "selected_labels")
@@ -1015,13 +1027,13 @@ class TimeSeries(ModuleGroup[AnyStr]):
1015
1027
  ),
1016
1028
  flags={CommandFlag.READONLY},
1017
1029
  )
1018
- async def mget(
1030
+ def mget(
1019
1031
  self,
1020
1032
  filters: Parameters[StringT],
1021
1033
  withlabels: bool | None = None,
1022
1034
  selected_labels: Parameters[StringT] | None = None,
1023
1035
  latest: bool | None = None,
1024
- ) -> dict[AnyStr, tuple[dict[AnyStr, AnyStr], tuple[int, float] | tuple[()]]]:
1036
+ ) -> CommandRequest[dict[AnyStr, tuple[dict[AnyStr, AnyStr], tuple[int, float] | tuple[()]]]]:
1025
1037
  """
1026
1038
  Get the sample with the highest timestamp from each time series matching a specific filter.
1027
1039
 
@@ -1044,17 +1056,19 @@ class TimeSeries(ModuleGroup[AnyStr]):
1044
1056
  the time series key name as the key and a tuple containing the label-value pairs and a
1045
1057
  single timestamp-value pair as the value.
1046
1058
  """
1047
- pieces: CommandArgList = []
1059
+ command_arguments: CommandArgList = []
1048
1060
  if latest:
1049
- pieces.append(b"LATEST")
1061
+ command_arguments.append(b"LATEST")
1050
1062
  if withlabels:
1051
- pieces.append(PureToken.WITHLABELS)
1063
+ command_arguments.append(PureToken.WITHLABELS)
1052
1064
  if selected_labels:
1053
1065
  _labels: list[StringT] = list(selected_labels)
1054
- pieces.extend([b"SELECTED_LABELS", *_labels])
1055
- pieces.extend([PrefixToken.FILTER, *filters])
1056
- return await self.execute_module_command(
1057
- CommandName.TS_MGET, *pieces, callback=TimeSeriesCallback[AnyStr]()
1066
+ command_arguments.extend([b"SELECTED_LABELS", *_labels])
1067
+ command_arguments.extend([PrefixToken.FILTER, *filters])
1068
+ return self.client.create_request(
1069
+ CommandName.TS_MGET,
1070
+ *command_arguments,
1071
+ callback=TimeSeriesCallback[AnyStr](),
1058
1072
  )
1059
1073
 
1060
1074
  @module_command(
@@ -1063,7 +1077,9 @@ class TimeSeries(ModuleGroup[AnyStr]):
1063
1077
  version_introduced="1.0.0",
1064
1078
  module=MODULE,
1065
1079
  )
1066
- async def info(self, key: KeyT, debug: bool | None = None) -> dict[AnyStr, ResponseType]:
1080
+ def info(
1081
+ self, key: KeyT, debug: bool | None = None
1082
+ ) -> CommandRequest[dict[AnyStr, ResponseType]]:
1067
1083
  """
1068
1084
  Return information and statistics for a time series.
1069
1085
 
@@ -1071,11 +1087,13 @@ class TimeSeries(ModuleGroup[AnyStr]):
1071
1087
  :param debug: Optional flag to get a more detailed information about the chunks.
1072
1088
  :return: Dictionary with information about the time series (name-value pairs).
1073
1089
  """
1074
- pieces: CommandArgList = [key]
1090
+ command_arguments: CommandArgList = [key]
1075
1091
  if debug:
1076
- pieces.append(b"DEBUG")
1077
- return await self.execute_module_command(
1078
- CommandName.TS_INFO, *pieces, callback=TimeSeriesInfoCallback[AnyStr]()
1092
+ command_arguments.append(b"DEBUG")
1093
+ return self.client.create_request(
1094
+ CommandName.TS_INFO,
1095
+ *command_arguments,
1096
+ callback=TimeSeriesInfoCallback[AnyStr](),
1079
1097
  )
1080
1098
 
1081
1099
  @module_command(
@@ -1089,7 +1107,7 @@ class TimeSeries(ModuleGroup[AnyStr]):
1089
1107
  ),
1090
1108
  flags={CommandFlag.READONLY},
1091
1109
  )
1092
- async def queryindex(self, filters: Parameters[StringT]) -> set[AnyStr]:
1110
+ def queryindex(self, filters: Parameters[StringT]) -> CommandRequest[set[AnyStr]]:
1093
1111
  """
1094
1112
  Get all time series keys matching a filter list.
1095
1113
 
@@ -1112,8 +1130,10 @@ class TimeSeries(ModuleGroup[AnyStr]):
1112
1130
  series matches the filter. An error is returned on invalid filter expression.
1113
1131
 
1114
1132
  """
1115
- pieces: CommandArgList = [*filters]
1133
+ command_arguments: CommandArgList = [*filters]
1116
1134
 
1117
- return await self.execute_module_command(
1118
- CommandName.TS_QUERYINDEX, *pieces, callback=SetCallback[AnyStr]()
1135
+ return self.client.create_request(
1136
+ CommandName.TS_QUERYINDEX,
1137
+ *command_arguments,
1138
+ callback=SetCallback[AnyStr](),
1119
1139
  )