taskiq-redis 0.5.6__py3-none-any.whl → 1.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- taskiq_redis/__init__.py +10 -0
- taskiq_redis/redis_backend.py +335 -11
- taskiq_redis/redis_broker.py +14 -3
- taskiq_redis/redis_sentinel_broker.py +132 -0
- taskiq_redis/schedule_source.py +128 -13
- taskiq_redis-1.0.1.dist-info/LICENSE +21 -0
- {taskiq_redis-0.5.6.dist-info → taskiq_redis-1.0.1.dist-info}/METADATA +2 -2
- taskiq_redis-1.0.1.dist-info/RECORD +12 -0
- taskiq_redis/serializer.py +0 -16
- taskiq_redis-0.5.6.dist-info/RECORD +0 -11
- {taskiq_redis-0.5.6.dist-info → taskiq_redis-1.0.1.dist-info}/WHEEL +0 -0
taskiq_redis/__init__.py
CHANGED
|
@@ -2,20 +2,30 @@
|
|
|
2
2
|
from taskiq_redis.redis_backend import (
|
|
3
3
|
RedisAsyncClusterResultBackend,
|
|
4
4
|
RedisAsyncResultBackend,
|
|
5
|
+
RedisAsyncSentinelResultBackend,
|
|
5
6
|
)
|
|
6
7
|
from taskiq_redis.redis_broker import ListQueueBroker, PubSubBroker
|
|
7
8
|
from taskiq_redis.redis_cluster_broker import ListQueueClusterBroker
|
|
9
|
+
from taskiq_redis.redis_sentinel_broker import (
|
|
10
|
+
ListQueueSentinelBroker,
|
|
11
|
+
PubSubSentinelBroker,
|
|
12
|
+
)
|
|
8
13
|
from taskiq_redis.schedule_source import (
|
|
9
14
|
RedisClusterScheduleSource,
|
|
10
15
|
RedisScheduleSource,
|
|
16
|
+
RedisSentinelScheduleSource,
|
|
11
17
|
)
|
|
12
18
|
|
|
13
19
|
__all__ = [
|
|
14
20
|
"RedisAsyncClusterResultBackend",
|
|
15
21
|
"RedisAsyncResultBackend",
|
|
22
|
+
"RedisAsyncSentinelResultBackend",
|
|
16
23
|
"ListQueueBroker",
|
|
17
24
|
"PubSubBroker",
|
|
18
25
|
"ListQueueClusterBroker",
|
|
26
|
+
"ListQueueSentinelBroker",
|
|
27
|
+
"PubSubSentinelBroker",
|
|
19
28
|
"RedisScheduleSource",
|
|
20
29
|
"RedisClusterScheduleSource",
|
|
30
|
+
"RedisSentinelScheduleSource",
|
|
21
31
|
]
|
taskiq_redis/redis_backend.py
CHANGED
|
@@ -1,10 +1,26 @@
|
|
|
1
|
-
import
|
|
2
|
-
from
|
|
1
|
+
import sys
|
|
2
|
+
from contextlib import asynccontextmanager
|
|
3
|
+
from typing import (
|
|
4
|
+
TYPE_CHECKING,
|
|
5
|
+
Any,
|
|
6
|
+
AsyncIterator,
|
|
7
|
+
Dict,
|
|
8
|
+
List,
|
|
9
|
+
Optional,
|
|
10
|
+
Tuple,
|
|
11
|
+
TypeVar,
|
|
12
|
+
Union,
|
|
13
|
+
)
|
|
3
14
|
|
|
4
|
-
from redis.asyncio import BlockingConnectionPool, Redis
|
|
15
|
+
from redis.asyncio import BlockingConnectionPool, Redis, Sentinel
|
|
5
16
|
from redis.asyncio.cluster import RedisCluster
|
|
17
|
+
from redis.asyncio.connection import Connection
|
|
6
18
|
from taskiq import AsyncResultBackend
|
|
7
19
|
from taskiq.abc.result_backend import TaskiqResult
|
|
20
|
+
from taskiq.abc.serializer import TaskiqSerializer
|
|
21
|
+
from taskiq.compat import model_dump, model_validate
|
|
22
|
+
from taskiq.depends.progress_tracker import TaskProgress
|
|
23
|
+
from taskiq.serializers import PickleSerializer
|
|
8
24
|
|
|
9
25
|
from taskiq_redis.exceptions import (
|
|
10
26
|
DuplicateExpireTimeSelectedError,
|
|
@@ -12,8 +28,22 @@ from taskiq_redis.exceptions import (
|
|
|
12
28
|
ResultIsMissingError,
|
|
13
29
|
)
|
|
14
30
|
|
|
31
|
+
if sys.version_info >= (3, 10):
|
|
32
|
+
from typing import TypeAlias
|
|
33
|
+
else:
|
|
34
|
+
from typing_extensions import TypeAlias
|
|
35
|
+
|
|
36
|
+
if TYPE_CHECKING:
|
|
37
|
+
_Redis: TypeAlias = Redis[bytes]
|
|
38
|
+
_BlockingConnectionPool: TypeAlias = BlockingConnectionPool[Connection]
|
|
39
|
+
else:
|
|
40
|
+
_Redis: TypeAlias = Redis
|
|
41
|
+
_BlockingConnectionPool: TypeAlias = BlockingConnectionPool
|
|
42
|
+
|
|
15
43
|
_ReturnType = TypeVar("_ReturnType")
|
|
16
44
|
|
|
45
|
+
PROGRESS_KEY_SUFFIX = "__progress"
|
|
46
|
+
|
|
17
47
|
|
|
18
48
|
class RedisAsyncResultBackend(AsyncResultBackend[_ReturnType]):
|
|
19
49
|
"""Async result based on redis."""
|
|
@@ -25,6 +55,7 @@ class RedisAsyncResultBackend(AsyncResultBackend[_ReturnType]):
|
|
|
25
55
|
result_ex_time: Optional[int] = None,
|
|
26
56
|
result_px_time: Optional[int] = None,
|
|
27
57
|
max_connection_pool_size: Optional[int] = None,
|
|
58
|
+
serializer: Optional[TaskiqSerializer] = None,
|
|
28
59
|
**connection_kwargs: Any,
|
|
29
60
|
) -> None:
|
|
30
61
|
"""
|
|
@@ -42,11 +73,12 @@ class RedisAsyncResultBackend(AsyncResultBackend[_ReturnType]):
|
|
|
42
73
|
:raises ExpireTimeMustBeMoreThanZeroError: if result_ex_time
|
|
43
74
|
and result_px_time are equal zero.
|
|
44
75
|
"""
|
|
45
|
-
self.redis_pool = BlockingConnectionPool.from_url(
|
|
76
|
+
self.redis_pool: _BlockingConnectionPool = BlockingConnectionPool.from_url(
|
|
46
77
|
url=redis_url,
|
|
47
78
|
max_connections=max_connection_pool_size,
|
|
48
79
|
**connection_kwargs,
|
|
49
80
|
)
|
|
81
|
+
self.serializer = serializer or PickleSerializer()
|
|
50
82
|
self.keep_results = keep_results
|
|
51
83
|
self.result_ex_time = result_ex_time
|
|
52
84
|
self.result_px_time = result_px_time
|
|
@@ -86,9 +118,9 @@ class RedisAsyncResultBackend(AsyncResultBackend[_ReturnType]):
|
|
|
86
118
|
:param task_id: ID of the task.
|
|
87
119
|
:param result: TaskiqResult instance.
|
|
88
120
|
"""
|
|
89
|
-
redis_set_params: Dict[str, Union[str,
|
|
121
|
+
redis_set_params: Dict[str, Union[str, int, bytes]] = {
|
|
90
122
|
"name": task_id,
|
|
91
|
-
"value":
|
|
123
|
+
"value": self.serializer.dumpb(model_dump(result)),
|
|
92
124
|
}
|
|
93
125
|
if self.result_ex_time:
|
|
94
126
|
redis_set_params["ex"] = self.result_ex_time
|
|
@@ -135,8 +167,9 @@ class RedisAsyncResultBackend(AsyncResultBackend[_ReturnType]):
|
|
|
135
167
|
if result_value is None:
|
|
136
168
|
raise ResultIsMissingError
|
|
137
169
|
|
|
138
|
-
taskiq_result
|
|
139
|
-
|
|
170
|
+
taskiq_result = model_validate(
|
|
171
|
+
TaskiqResult[_ReturnType],
|
|
172
|
+
self.serializer.loadb(result_value),
|
|
140
173
|
)
|
|
141
174
|
|
|
142
175
|
if not with_logs:
|
|
@@ -144,6 +177,55 @@ class RedisAsyncResultBackend(AsyncResultBackend[_ReturnType]):
|
|
|
144
177
|
|
|
145
178
|
return taskiq_result
|
|
146
179
|
|
|
180
|
+
async def set_progress(
|
|
181
|
+
self,
|
|
182
|
+
task_id: str,
|
|
183
|
+
progress: TaskProgress[_ReturnType],
|
|
184
|
+
) -> None:
|
|
185
|
+
"""
|
|
186
|
+
Sets task progress in redis.
|
|
187
|
+
|
|
188
|
+
Dumps TaskProgress instance into the bytes and writes
|
|
189
|
+
it to redis with a standard suffix on the task_id as the key
|
|
190
|
+
|
|
191
|
+
:param task_id: ID of the task.
|
|
192
|
+
:param result: task's TaskProgress instance.
|
|
193
|
+
"""
|
|
194
|
+
redis_set_params: Dict[str, Union[str, int, bytes]] = {
|
|
195
|
+
"name": task_id + PROGRESS_KEY_SUFFIX,
|
|
196
|
+
"value": self.serializer.dumpb(model_dump(progress)),
|
|
197
|
+
}
|
|
198
|
+
if self.result_ex_time:
|
|
199
|
+
redis_set_params["ex"] = self.result_ex_time
|
|
200
|
+
elif self.result_px_time:
|
|
201
|
+
redis_set_params["px"] = self.result_px_time
|
|
202
|
+
|
|
203
|
+
async with Redis(connection_pool=self.redis_pool) as redis:
|
|
204
|
+
await redis.set(**redis_set_params) # type: ignore
|
|
205
|
+
|
|
206
|
+
async def get_progress(
|
|
207
|
+
self,
|
|
208
|
+
task_id: str,
|
|
209
|
+
) -> Union[TaskProgress[_ReturnType], None]:
|
|
210
|
+
"""
|
|
211
|
+
Gets progress results from the task.
|
|
212
|
+
|
|
213
|
+
:param task_id: task's id.
|
|
214
|
+
:return: task's TaskProgress instance.
|
|
215
|
+
"""
|
|
216
|
+
async with Redis(connection_pool=self.redis_pool) as redis:
|
|
217
|
+
result_value = await redis.get(
|
|
218
|
+
name=task_id + PROGRESS_KEY_SUFFIX,
|
|
219
|
+
)
|
|
220
|
+
|
|
221
|
+
if result_value is None:
|
|
222
|
+
return None
|
|
223
|
+
|
|
224
|
+
return model_validate(
|
|
225
|
+
TaskProgress[_ReturnType],
|
|
226
|
+
self.serializer.loadb(result_value),
|
|
227
|
+
)
|
|
228
|
+
|
|
147
229
|
|
|
148
230
|
class RedisAsyncClusterResultBackend(AsyncResultBackend[_ReturnType]):
|
|
149
231
|
"""Async result backend based on redis cluster."""
|
|
@@ -154,6 +236,7 @@ class RedisAsyncClusterResultBackend(AsyncResultBackend[_ReturnType]):
|
|
|
154
236
|
keep_results: bool = True,
|
|
155
237
|
result_ex_time: Optional[int] = None,
|
|
156
238
|
result_px_time: Optional[int] = None,
|
|
239
|
+
serializer: Optional[TaskiqSerializer] = None,
|
|
157
240
|
**connection_kwargs: Any,
|
|
158
241
|
) -> None:
|
|
159
242
|
"""
|
|
@@ -174,6 +257,7 @@ class RedisAsyncClusterResultBackend(AsyncResultBackend[_ReturnType]):
|
|
|
174
257
|
redis_url,
|
|
175
258
|
**connection_kwargs,
|
|
176
259
|
)
|
|
260
|
+
self.serializer = serializer or PickleSerializer()
|
|
177
261
|
self.keep_results = keep_results
|
|
178
262
|
self.result_ex_time = result_ex_time
|
|
179
263
|
self.result_px_time = result_px_time
|
|
@@ -215,7 +299,7 @@ class RedisAsyncClusterResultBackend(AsyncResultBackend[_ReturnType]):
|
|
|
215
299
|
"""
|
|
216
300
|
redis_set_params: Dict[str, Union[str, bytes, int]] = {
|
|
217
301
|
"name": task_id,
|
|
218
|
-
"value":
|
|
302
|
+
"value": self.serializer.dumpb(model_dump(result)),
|
|
219
303
|
}
|
|
220
304
|
if self.result_ex_time:
|
|
221
305
|
redis_set_params["ex"] = self.result_ex_time
|
|
@@ -259,11 +343,251 @@ class RedisAsyncClusterResultBackend(AsyncResultBackend[_ReturnType]):
|
|
|
259
343
|
if result_value is None:
|
|
260
344
|
raise ResultIsMissingError
|
|
261
345
|
|
|
262
|
-
taskiq_result: TaskiqResult[_ReturnType] =
|
|
263
|
-
|
|
346
|
+
taskiq_result: TaskiqResult[_ReturnType] = model_validate(
|
|
347
|
+
TaskiqResult[_ReturnType],
|
|
348
|
+
self.serializer.loadb(result_value),
|
|
264
349
|
)
|
|
265
350
|
|
|
266
351
|
if not with_logs:
|
|
267
352
|
taskiq_result.log = None
|
|
268
353
|
|
|
269
354
|
return taskiq_result
|
|
355
|
+
|
|
356
|
+
async def set_progress(
|
|
357
|
+
self,
|
|
358
|
+
task_id: str,
|
|
359
|
+
progress: TaskProgress[_ReturnType],
|
|
360
|
+
) -> None:
|
|
361
|
+
"""
|
|
362
|
+
Sets task progress in redis.
|
|
363
|
+
|
|
364
|
+
Dumps TaskProgress instance into the bytes and writes
|
|
365
|
+
it to redis with a standard suffix on the task_id as the key
|
|
366
|
+
|
|
367
|
+
:param task_id: ID of the task.
|
|
368
|
+
:param result: task's TaskProgress instance.
|
|
369
|
+
"""
|
|
370
|
+
redis_set_params: Dict[str, Union[str, int, bytes]] = {
|
|
371
|
+
"name": task_id + PROGRESS_KEY_SUFFIX,
|
|
372
|
+
"value": self.serializer.dumpb(model_dump(progress)),
|
|
373
|
+
}
|
|
374
|
+
if self.result_ex_time:
|
|
375
|
+
redis_set_params["ex"] = self.result_ex_time
|
|
376
|
+
elif self.result_px_time:
|
|
377
|
+
redis_set_params["px"] = self.result_px_time
|
|
378
|
+
|
|
379
|
+
await self.redis.set(**redis_set_params) # type: ignore
|
|
380
|
+
|
|
381
|
+
async def get_progress(
|
|
382
|
+
self,
|
|
383
|
+
task_id: str,
|
|
384
|
+
) -> Union[TaskProgress[_ReturnType], None]:
|
|
385
|
+
"""
|
|
386
|
+
Gets progress results from the task.
|
|
387
|
+
|
|
388
|
+
:param task_id: task's id.
|
|
389
|
+
:return: task's TaskProgress instance.
|
|
390
|
+
"""
|
|
391
|
+
result_value = await self.redis.get( # type: ignore[attr-defined]
|
|
392
|
+
name=task_id + PROGRESS_KEY_SUFFIX,
|
|
393
|
+
)
|
|
394
|
+
|
|
395
|
+
if result_value is None:
|
|
396
|
+
return None
|
|
397
|
+
|
|
398
|
+
return model_validate(
|
|
399
|
+
TaskProgress[_ReturnType],
|
|
400
|
+
self.serializer.loadb(result_value),
|
|
401
|
+
)
|
|
402
|
+
|
|
403
|
+
|
|
404
|
+
class RedisAsyncSentinelResultBackend(AsyncResultBackend[_ReturnType]):
|
|
405
|
+
"""Async result based on redis sentinel."""
|
|
406
|
+
|
|
407
|
+
def __init__(
|
|
408
|
+
self,
|
|
409
|
+
sentinels: List[Tuple[str, int]],
|
|
410
|
+
master_name: str,
|
|
411
|
+
keep_results: bool = True,
|
|
412
|
+
result_ex_time: Optional[int] = None,
|
|
413
|
+
result_px_time: Optional[int] = None,
|
|
414
|
+
min_other_sentinels: int = 0,
|
|
415
|
+
sentinel_kwargs: Optional[Any] = None,
|
|
416
|
+
serializer: Optional[TaskiqSerializer] = None,
|
|
417
|
+
**connection_kwargs: Any,
|
|
418
|
+
) -> None:
|
|
419
|
+
"""
|
|
420
|
+
Constructs a new result backend.
|
|
421
|
+
|
|
422
|
+
:param sentinels: list of sentinel host and ports pairs.
|
|
423
|
+
:param master_name: sentinel master name.
|
|
424
|
+
:param keep_results: flag to not remove results from Redis after reading.
|
|
425
|
+
:param result_ex_time: expire time in seconds for result.
|
|
426
|
+
:param result_px_time: expire time in milliseconds for result.
|
|
427
|
+
:param max_connection_pool_size: maximum number of connections in pool.
|
|
428
|
+
:param connection_kwargs: additional arguments for redis BlockingConnectionPool.
|
|
429
|
+
|
|
430
|
+
:raises DuplicateExpireTimeSelectedError: if result_ex_time
|
|
431
|
+
and result_px_time are selected.
|
|
432
|
+
:raises ExpireTimeMustBeMoreThanZeroError: if result_ex_time
|
|
433
|
+
and result_px_time are equal zero.
|
|
434
|
+
"""
|
|
435
|
+
self.sentinel = Sentinel(
|
|
436
|
+
sentinels=sentinels,
|
|
437
|
+
min_other_sentinels=min_other_sentinels,
|
|
438
|
+
sentinel_kwargs=sentinel_kwargs,
|
|
439
|
+
**connection_kwargs,
|
|
440
|
+
)
|
|
441
|
+
self.master_name = master_name
|
|
442
|
+
self.serializer = serializer or PickleSerializer()
|
|
443
|
+
self.keep_results = keep_results
|
|
444
|
+
self.result_ex_time = result_ex_time
|
|
445
|
+
self.result_px_time = result_px_time
|
|
446
|
+
|
|
447
|
+
unavailable_conditions = any(
|
|
448
|
+
(
|
|
449
|
+
self.result_ex_time is not None and self.result_ex_time <= 0,
|
|
450
|
+
self.result_px_time is not None and self.result_px_time <= 0,
|
|
451
|
+
),
|
|
452
|
+
)
|
|
453
|
+
if unavailable_conditions:
|
|
454
|
+
raise ExpireTimeMustBeMoreThanZeroError(
|
|
455
|
+
"You must select one expire time param and it must be more than zero.",
|
|
456
|
+
)
|
|
457
|
+
|
|
458
|
+
if self.result_ex_time and self.result_px_time:
|
|
459
|
+
raise DuplicateExpireTimeSelectedError(
|
|
460
|
+
"Choose either result_ex_time or result_px_time.",
|
|
461
|
+
)
|
|
462
|
+
|
|
463
|
+
@asynccontextmanager
|
|
464
|
+
async def _acquire_master_conn(self) -> AsyncIterator[_Redis]:
|
|
465
|
+
async with self.sentinel.master_for(self.master_name) as redis_conn:
|
|
466
|
+
yield redis_conn
|
|
467
|
+
|
|
468
|
+
async def set_result(
|
|
469
|
+
self,
|
|
470
|
+
task_id: str,
|
|
471
|
+
result: TaskiqResult[_ReturnType],
|
|
472
|
+
) -> None:
|
|
473
|
+
"""
|
|
474
|
+
Sets task result in redis.
|
|
475
|
+
|
|
476
|
+
Dumps TaskiqResult instance into the bytes and writes
|
|
477
|
+
it to redis.
|
|
478
|
+
|
|
479
|
+
:param task_id: ID of the task.
|
|
480
|
+
:param result: TaskiqResult instance.
|
|
481
|
+
"""
|
|
482
|
+
redis_set_params: Dict[str, Union[str, bytes, int]] = {
|
|
483
|
+
"name": task_id,
|
|
484
|
+
"value": self.serializer.dumpb(model_dump(result)),
|
|
485
|
+
}
|
|
486
|
+
if self.result_ex_time:
|
|
487
|
+
redis_set_params["ex"] = self.result_ex_time
|
|
488
|
+
elif self.result_px_time:
|
|
489
|
+
redis_set_params["px"] = self.result_px_time
|
|
490
|
+
|
|
491
|
+
async with self._acquire_master_conn() as redis:
|
|
492
|
+
await redis.set(**redis_set_params) # type: ignore
|
|
493
|
+
|
|
494
|
+
async def is_result_ready(self, task_id: str) -> bool:
|
|
495
|
+
"""
|
|
496
|
+
Returns whether the result is ready.
|
|
497
|
+
|
|
498
|
+
:param task_id: ID of the task.
|
|
499
|
+
|
|
500
|
+
:returns: True if the result is ready else False.
|
|
501
|
+
"""
|
|
502
|
+
async with self._acquire_master_conn() as redis:
|
|
503
|
+
return bool(await redis.exists(task_id))
|
|
504
|
+
|
|
505
|
+
async def get_result(
|
|
506
|
+
self,
|
|
507
|
+
task_id: str,
|
|
508
|
+
with_logs: bool = False,
|
|
509
|
+
) -> TaskiqResult[_ReturnType]:
|
|
510
|
+
"""
|
|
511
|
+
Gets result from the task.
|
|
512
|
+
|
|
513
|
+
:param task_id: task's id.
|
|
514
|
+
:param with_logs: if True it will download task's logs.
|
|
515
|
+
:raises ResultIsMissingError: if there is no result when trying to get it.
|
|
516
|
+
:return: task's return value.
|
|
517
|
+
"""
|
|
518
|
+
async with self._acquire_master_conn() as redis:
|
|
519
|
+
if self.keep_results:
|
|
520
|
+
result_value = await redis.get(
|
|
521
|
+
name=task_id,
|
|
522
|
+
)
|
|
523
|
+
else:
|
|
524
|
+
result_value = await redis.getdel(
|
|
525
|
+
name=task_id,
|
|
526
|
+
)
|
|
527
|
+
|
|
528
|
+
if result_value is None:
|
|
529
|
+
raise ResultIsMissingError
|
|
530
|
+
|
|
531
|
+
taskiq_result = model_validate(
|
|
532
|
+
TaskiqResult[_ReturnType],
|
|
533
|
+
self.serializer.loadb(result_value),
|
|
534
|
+
)
|
|
535
|
+
|
|
536
|
+
if not with_logs:
|
|
537
|
+
taskiq_result.log = None
|
|
538
|
+
|
|
539
|
+
return taskiq_result
|
|
540
|
+
|
|
541
|
+
async def set_progress(
|
|
542
|
+
self,
|
|
543
|
+
task_id: str,
|
|
544
|
+
progress: TaskProgress[_ReturnType],
|
|
545
|
+
) -> None:
|
|
546
|
+
"""
|
|
547
|
+
Sets task progress in redis.
|
|
548
|
+
|
|
549
|
+
Dumps TaskProgress instance into the bytes and writes
|
|
550
|
+
it to redis with a standard suffix on the task_id as the key
|
|
551
|
+
|
|
552
|
+
:param task_id: ID of the task.
|
|
553
|
+
:param result: task's TaskProgress instance.
|
|
554
|
+
"""
|
|
555
|
+
redis_set_params: Dict[str, Union[str, int, bytes]] = {
|
|
556
|
+
"name": task_id + PROGRESS_KEY_SUFFIX,
|
|
557
|
+
"value": self.serializer.dumpb(model_dump(progress)),
|
|
558
|
+
}
|
|
559
|
+
if self.result_ex_time:
|
|
560
|
+
redis_set_params["ex"] = self.result_ex_time
|
|
561
|
+
elif self.result_px_time:
|
|
562
|
+
redis_set_params["px"] = self.result_px_time
|
|
563
|
+
|
|
564
|
+
async with self._acquire_master_conn() as redis:
|
|
565
|
+
await redis.set(**redis_set_params) # type: ignore
|
|
566
|
+
|
|
567
|
+
async def get_progress(
|
|
568
|
+
self,
|
|
569
|
+
task_id: str,
|
|
570
|
+
) -> Union[TaskProgress[_ReturnType], None]:
|
|
571
|
+
"""
|
|
572
|
+
Gets progress results from the task.
|
|
573
|
+
|
|
574
|
+
:param task_id: task's id.
|
|
575
|
+
:return: task's TaskProgress instance.
|
|
576
|
+
"""
|
|
577
|
+
async with self._acquire_master_conn() as redis:
|
|
578
|
+
result_value = await redis.get(
|
|
579
|
+
name=task_id + PROGRESS_KEY_SUFFIX,
|
|
580
|
+
)
|
|
581
|
+
|
|
582
|
+
if result_value is None:
|
|
583
|
+
return None
|
|
584
|
+
|
|
585
|
+
return model_validate(
|
|
586
|
+
TaskProgress[_ReturnType],
|
|
587
|
+
self.serializer.loadb(result_value),
|
|
588
|
+
)
|
|
589
|
+
|
|
590
|
+
async def shutdown(self) -> None:
|
|
591
|
+
"""Shutdown sentinel connections."""
|
|
592
|
+
for sentinel in self.sentinel.sentinels:
|
|
593
|
+
await sentinel.aclose() # type: ignore[attr-defined]
|
taskiq_redis/redis_broker.py
CHANGED
|
@@ -1,7 +1,8 @@
|
|
|
1
|
+
import sys
|
|
1
2
|
from logging import getLogger
|
|
2
|
-
from typing import Any, AsyncGenerator, Callable, Optional, TypeVar
|
|
3
|
+
from typing import TYPE_CHECKING, Any, AsyncGenerator, Callable, Optional, TypeVar
|
|
3
4
|
|
|
4
|
-
from redis.asyncio import BlockingConnectionPool,
|
|
5
|
+
from redis.asyncio import BlockingConnectionPool, Connection, Redis
|
|
5
6
|
from taskiq.abc.broker import AsyncBroker
|
|
6
7
|
from taskiq.abc.result_backend import AsyncResultBackend
|
|
7
8
|
from taskiq.message import BrokerMessage
|
|
@@ -10,6 +11,16 @@ _T = TypeVar("_T")
|
|
|
10
11
|
|
|
11
12
|
logger = getLogger("taskiq.redis_broker")
|
|
12
13
|
|
|
14
|
+
if sys.version_info >= (3, 10):
|
|
15
|
+
from typing import TypeAlias
|
|
16
|
+
else:
|
|
17
|
+
from typing_extensions import TypeAlias
|
|
18
|
+
|
|
19
|
+
if TYPE_CHECKING:
|
|
20
|
+
_BlockingConnectionPool: TypeAlias = BlockingConnectionPool[Connection]
|
|
21
|
+
else:
|
|
22
|
+
_BlockingConnectionPool: TypeAlias = BlockingConnectionPool
|
|
23
|
+
|
|
13
24
|
|
|
14
25
|
class BaseRedisBroker(AsyncBroker):
|
|
15
26
|
"""Base broker that works with Redis."""
|
|
@@ -40,7 +51,7 @@ class BaseRedisBroker(AsyncBroker):
|
|
|
40
51
|
task_id_generator=task_id_generator,
|
|
41
52
|
)
|
|
42
53
|
|
|
43
|
-
self.connection_pool:
|
|
54
|
+
self.connection_pool: _BlockingConnectionPool = BlockingConnectionPool.from_url(
|
|
44
55
|
url=url,
|
|
45
56
|
max_connections=max_connection_pool_size,
|
|
46
57
|
**connection_kwargs,
|
|
@@ -0,0 +1,132 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
from contextlib import asynccontextmanager
|
|
3
|
+
from logging import getLogger
|
|
4
|
+
from typing import (
|
|
5
|
+
TYPE_CHECKING,
|
|
6
|
+
Any,
|
|
7
|
+
AsyncGenerator,
|
|
8
|
+
AsyncIterator,
|
|
9
|
+
Callable,
|
|
10
|
+
List,
|
|
11
|
+
Optional,
|
|
12
|
+
Tuple,
|
|
13
|
+
TypeVar,
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
from redis.asyncio import Redis, Sentinel
|
|
17
|
+
from taskiq import AsyncResultBackend, BrokerMessage
|
|
18
|
+
from taskiq.abc.broker import AsyncBroker
|
|
19
|
+
|
|
20
|
+
if sys.version_info >= (3, 10):
|
|
21
|
+
from typing import TypeAlias
|
|
22
|
+
else:
|
|
23
|
+
from typing_extensions import TypeAlias
|
|
24
|
+
|
|
25
|
+
if TYPE_CHECKING:
|
|
26
|
+
_Redis: TypeAlias = Redis[bytes]
|
|
27
|
+
else:
|
|
28
|
+
_Redis: TypeAlias = Redis
|
|
29
|
+
|
|
30
|
+
_T = TypeVar("_T")
|
|
31
|
+
|
|
32
|
+
logger = getLogger("taskiq.redis_sentinel_broker")
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class BaseSentinelBroker(AsyncBroker):
|
|
36
|
+
"""Base broker that works with Sentinel."""
|
|
37
|
+
|
|
38
|
+
def __init__(
|
|
39
|
+
self,
|
|
40
|
+
sentinels: List[Tuple[str, int]],
|
|
41
|
+
master_name: str,
|
|
42
|
+
result_backend: Optional[AsyncResultBackend[_T]] = None,
|
|
43
|
+
task_id_generator: Optional[Callable[[], str]] = None,
|
|
44
|
+
queue_name: str = "taskiq",
|
|
45
|
+
min_other_sentinels: int = 0,
|
|
46
|
+
sentinel_kwargs: Optional[Any] = None,
|
|
47
|
+
**connection_kwargs: Any,
|
|
48
|
+
) -> None:
|
|
49
|
+
super().__init__(
|
|
50
|
+
result_backend=result_backend,
|
|
51
|
+
task_id_generator=task_id_generator,
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
self.sentinel = Sentinel(
|
|
55
|
+
sentinels=sentinels,
|
|
56
|
+
min_other_sentinels=min_other_sentinels,
|
|
57
|
+
sentinel_kwargs=sentinel_kwargs,
|
|
58
|
+
**connection_kwargs,
|
|
59
|
+
)
|
|
60
|
+
self.master_name = master_name
|
|
61
|
+
self.queue_name = queue_name
|
|
62
|
+
|
|
63
|
+
@asynccontextmanager
|
|
64
|
+
async def _acquire_master_conn(self) -> AsyncIterator[_Redis]:
|
|
65
|
+
async with self.sentinel.master_for(self.master_name) as redis_conn:
|
|
66
|
+
yield redis_conn
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class PubSubSentinelBroker(BaseSentinelBroker):
|
|
70
|
+
"""Broker that works with Sentinel and broadcasts tasks to all workers."""
|
|
71
|
+
|
|
72
|
+
async def kick(self, message: BrokerMessage) -> None:
|
|
73
|
+
"""
|
|
74
|
+
Publish message over PUBSUB channel.
|
|
75
|
+
|
|
76
|
+
:param message: message to send.
|
|
77
|
+
"""
|
|
78
|
+
queue_name = message.labels.get("queue_name") or self.queue_name
|
|
79
|
+
async with self._acquire_master_conn() as redis_conn:
|
|
80
|
+
await redis_conn.publish(queue_name, message.message)
|
|
81
|
+
|
|
82
|
+
async def listen(self) -> AsyncGenerator[bytes, None]:
|
|
83
|
+
"""
|
|
84
|
+
Listen redis queue for new messages.
|
|
85
|
+
|
|
86
|
+
This function listens to the pubsub channel
|
|
87
|
+
and yields all messages with proper types.
|
|
88
|
+
|
|
89
|
+
:yields: broker messages.
|
|
90
|
+
"""
|
|
91
|
+
async with self._acquire_master_conn() as redis_conn:
|
|
92
|
+
redis_pubsub_channel = redis_conn.pubsub()
|
|
93
|
+
await redis_pubsub_channel.subscribe(self.queue_name)
|
|
94
|
+
async for message in redis_pubsub_channel.listen():
|
|
95
|
+
if not message:
|
|
96
|
+
continue
|
|
97
|
+
if message["type"] != "message":
|
|
98
|
+
logger.debug("Received non-message from redis: %s", message)
|
|
99
|
+
continue
|
|
100
|
+
yield message["data"]
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
class ListQueueSentinelBroker(BaseSentinelBroker):
|
|
104
|
+
"""Broker that works with Sentinel and distributes tasks between workers."""
|
|
105
|
+
|
|
106
|
+
async def kick(self, message: BrokerMessage) -> None:
|
|
107
|
+
"""
|
|
108
|
+
Put a message in a list.
|
|
109
|
+
|
|
110
|
+
This method appends a message to the list of all messages.
|
|
111
|
+
|
|
112
|
+
:param message: message to append.
|
|
113
|
+
"""
|
|
114
|
+
queue_name = message.labels.get("queue_name") or self.queue_name
|
|
115
|
+
async with self._acquire_master_conn() as redis_conn:
|
|
116
|
+
await redis_conn.lpush(queue_name, message.message)
|
|
117
|
+
|
|
118
|
+
async def listen(self) -> AsyncGenerator[bytes, None]:
|
|
119
|
+
"""
|
|
120
|
+
Listen redis queue for new messages.
|
|
121
|
+
|
|
122
|
+
This function listens to the queue
|
|
123
|
+
and yields new messages if they have BrokerMessage type.
|
|
124
|
+
|
|
125
|
+
:yields: broker messages.
|
|
126
|
+
"""
|
|
127
|
+
redis_brpop_data_position = 1
|
|
128
|
+
async with self._acquire_master_conn() as redis_conn:
|
|
129
|
+
while True:
|
|
130
|
+
yield (await redis_conn.brpop(self.queue_name))[
|
|
131
|
+
redis_brpop_data_position
|
|
132
|
+
]
|
taskiq_redis/schedule_source.py
CHANGED
|
@@ -1,12 +1,31 @@
|
|
|
1
|
-
|
|
1
|
+
import sys
|
|
2
|
+
from contextlib import asynccontextmanager
|
|
3
|
+
from typing import TYPE_CHECKING, Any, AsyncIterator, List, Optional, Tuple
|
|
2
4
|
|
|
3
|
-
from redis.asyncio import
|
|
5
|
+
from redis.asyncio import (
|
|
6
|
+
BlockingConnectionPool,
|
|
7
|
+
Connection,
|
|
8
|
+
Redis,
|
|
9
|
+
RedisCluster,
|
|
10
|
+
Sentinel,
|
|
11
|
+
)
|
|
4
12
|
from taskiq import ScheduleSource
|
|
5
13
|
from taskiq.abc.serializer import TaskiqSerializer
|
|
6
14
|
from taskiq.compat import model_dump, model_validate
|
|
7
15
|
from taskiq.scheduler.scheduled_task import ScheduledTask
|
|
16
|
+
from taskiq.serializers import PickleSerializer
|
|
8
17
|
|
|
9
|
-
|
|
18
|
+
if sys.version_info >= (3, 10):
|
|
19
|
+
from typing import TypeAlias
|
|
20
|
+
else:
|
|
21
|
+
from typing_extensions import TypeAlias
|
|
22
|
+
|
|
23
|
+
if TYPE_CHECKING:
|
|
24
|
+
_Redis: TypeAlias = Redis[bytes]
|
|
25
|
+
_BlockingConnectionPool: TypeAlias = BlockingConnectionPool[Connection]
|
|
26
|
+
else:
|
|
27
|
+
_Redis: TypeAlias = Redis
|
|
28
|
+
_BlockingConnectionPool: TypeAlias = BlockingConnectionPool
|
|
10
29
|
|
|
11
30
|
|
|
12
31
|
class RedisScheduleSource(ScheduleSource):
|
|
@@ -35,7 +54,7 @@ class RedisScheduleSource(ScheduleSource):
|
|
|
35
54
|
**connection_kwargs: Any,
|
|
36
55
|
) -> None:
|
|
37
56
|
self.prefix = prefix
|
|
38
|
-
self.connection_pool:
|
|
57
|
+
self.connection_pool: _BlockingConnectionPool = BlockingConnectionPool.from_url(
|
|
39
58
|
url=url,
|
|
40
59
|
max_connections=max_connection_pool_size,
|
|
41
60
|
**connection_kwargs,
|
|
@@ -117,7 +136,6 @@ class RedisClusterScheduleSource(ScheduleSource):
|
|
|
117
136
|
self,
|
|
118
137
|
url: str,
|
|
119
138
|
prefix: str = "schedule",
|
|
120
|
-
buffer_size: int = 50,
|
|
121
139
|
serializer: Optional[TaskiqSerializer] = None,
|
|
122
140
|
**connection_kwargs: Any,
|
|
123
141
|
) -> None:
|
|
@@ -126,7 +144,6 @@ class RedisClusterScheduleSource(ScheduleSource):
|
|
|
126
144
|
url,
|
|
127
145
|
**connection_kwargs,
|
|
128
146
|
)
|
|
129
|
-
self.buffer_size = buffer_size
|
|
130
147
|
if serializer is None:
|
|
131
148
|
serializer = PickleSerializer()
|
|
132
149
|
self.serializer = serializer
|
|
@@ -156,14 +173,107 @@ class RedisClusterScheduleSource(ScheduleSource):
|
|
|
156
173
|
:return: list of schedules.
|
|
157
174
|
"""
|
|
158
175
|
schedules = []
|
|
159
|
-
buffer = []
|
|
160
176
|
async for key in self.redis.scan_iter(f"{self.prefix}:*"): # type: ignore[attr-defined]
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
schedules.
|
|
177
|
+
raw_schedule = await self.redis.get(key) # type: ignore[attr-defined]
|
|
178
|
+
parsed_schedule = model_validate(
|
|
179
|
+
ScheduledTask,
|
|
180
|
+
self.serializer.loadb(raw_schedule),
|
|
181
|
+
)
|
|
182
|
+
schedules.append(parsed_schedule)
|
|
183
|
+
return schedules
|
|
184
|
+
|
|
185
|
+
async def post_send(self, task: ScheduledTask) -> None:
|
|
186
|
+
"""Delete a task after it's completed."""
|
|
187
|
+
if task.time is not None:
|
|
188
|
+
await self.delete_schedule(task.schedule_id)
|
|
189
|
+
|
|
190
|
+
async def shutdown(self) -> None:
|
|
191
|
+
"""Shut down the schedule source."""
|
|
192
|
+
await self.redis.aclose() # type: ignore[attr-defined]
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
class RedisSentinelScheduleSource(ScheduleSource):
|
|
196
|
+
"""
|
|
197
|
+
Source of schedules for redis cluster.
|
|
198
|
+
|
|
199
|
+
This class allows you to store schedules in redis.
|
|
200
|
+
Also it supports dynamic schedules.
|
|
201
|
+
|
|
202
|
+
:param sentinels: list of sentinel host and ports pairs.
|
|
203
|
+
:param master_name: sentinel master name.
|
|
204
|
+
:param prefix: prefix for redis schedule keys.
|
|
205
|
+
:param buffer_size: buffer size for redis scan.
|
|
206
|
+
This is how many keys will be fetched at once.
|
|
207
|
+
:param max_connection_pool_size: maximum number of connections in pool.
|
|
208
|
+
:param serializer: serializer for data.
|
|
209
|
+
:param connection_kwargs: additional arguments for RedisCluster.
|
|
210
|
+
"""
|
|
211
|
+
|
|
212
|
+
def __init__(
|
|
213
|
+
self,
|
|
214
|
+
sentinels: List[Tuple[str, int]],
|
|
215
|
+
master_name: str,
|
|
216
|
+
prefix: str = "schedule",
|
|
217
|
+
buffer_size: int = 50,
|
|
218
|
+
serializer: Optional[TaskiqSerializer] = None,
|
|
219
|
+
min_other_sentinels: int = 0,
|
|
220
|
+
sentinel_kwargs: Optional[Any] = None,
|
|
221
|
+
**connection_kwargs: Any,
|
|
222
|
+
) -> None:
|
|
223
|
+
self.prefix = prefix
|
|
224
|
+
self.sentinel = Sentinel(
|
|
225
|
+
sentinels=sentinels,
|
|
226
|
+
min_other_sentinels=min_other_sentinels,
|
|
227
|
+
sentinel_kwargs=sentinel_kwargs,
|
|
228
|
+
**connection_kwargs,
|
|
229
|
+
)
|
|
230
|
+
self.master_name = master_name
|
|
231
|
+
self.buffer_size = buffer_size
|
|
232
|
+
if serializer is None:
|
|
233
|
+
serializer = PickleSerializer()
|
|
234
|
+
self.serializer = serializer
|
|
235
|
+
|
|
236
|
+
@asynccontextmanager
|
|
237
|
+
async def _acquire_master_conn(self) -> AsyncIterator[_Redis]:
|
|
238
|
+
async with self.sentinel.master_for(self.master_name) as redis_conn:
|
|
239
|
+
yield redis_conn
|
|
240
|
+
|
|
241
|
+
async def delete_schedule(self, schedule_id: str) -> None:
|
|
242
|
+
"""Remove schedule by id."""
|
|
243
|
+
async with self._acquire_master_conn() as redis:
|
|
244
|
+
await redis.delete(f"{self.prefix}:{schedule_id}")
|
|
245
|
+
|
|
246
|
+
async def add_schedule(self, schedule: ScheduledTask) -> None:
|
|
247
|
+
"""
|
|
248
|
+
Add schedule to redis.
|
|
249
|
+
|
|
250
|
+
:param schedule: schedule to add.
|
|
251
|
+
:param schedule_id: schedule id.
|
|
252
|
+
"""
|
|
253
|
+
async with self._acquire_master_conn() as redis:
|
|
254
|
+
await redis.set(
|
|
255
|
+
f"{self.prefix}:{schedule.schedule_id}",
|
|
256
|
+
self.serializer.dumpb(model_dump(schedule)),
|
|
257
|
+
)
|
|
258
|
+
|
|
259
|
+
async def get_schedules(self) -> List[ScheduledTask]:
|
|
260
|
+
"""
|
|
261
|
+
Get all schedules from redis.
|
|
262
|
+
|
|
263
|
+
This method is used by scheduler to get all schedules.
|
|
264
|
+
|
|
265
|
+
:return: list of schedules.
|
|
266
|
+
"""
|
|
267
|
+
schedules = []
|
|
268
|
+
async with self._acquire_master_conn() as redis:
|
|
269
|
+
buffer = []
|
|
270
|
+
async for key in redis.scan_iter(f"{self.prefix}:*"):
|
|
271
|
+
buffer.append(key)
|
|
272
|
+
if len(buffer) >= self.buffer_size:
|
|
273
|
+
schedules.extend(await redis.mget(buffer))
|
|
274
|
+
buffer = []
|
|
275
|
+
if buffer:
|
|
276
|
+
schedules.extend(await redis.mget(buffer))
|
|
167
277
|
return [
|
|
168
278
|
model_validate(ScheduledTask, self.serializer.loadb(schedule))
|
|
169
279
|
for schedule in schedules
|
|
@@ -174,3 +284,8 @@ class RedisClusterScheduleSource(ScheduleSource):
|
|
|
174
284
|
"""Delete a task after it's completed."""
|
|
175
285
|
if task.time is not None:
|
|
176
286
|
await self.delete_schedule(task.schedule_id)
|
|
287
|
+
|
|
288
|
+
async def shutdown(self) -> None:
|
|
289
|
+
"""Shut down the schedule source."""
|
|
290
|
+
for sentinel in self.sentinel.sentinels:
|
|
291
|
+
await sentinel.aclose() # type: ignore[attr-defined]
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2022-2024 Pavel Kirilin
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: taskiq-redis
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 1.0.1
|
|
4
4
|
Summary: Redis integration for taskiq
|
|
5
5
|
Home-page: https://github.com/taskiq-python/taskiq-redis
|
|
6
6
|
Keywords: taskiq,tasks,distributed,async,redis,result_backend
|
|
@@ -16,7 +16,7 @@ Classifier: Programming Language :: Python :: 3.12
|
|
|
16
16
|
Classifier: Programming Language :: Python :: 3 :: Only
|
|
17
17
|
Classifier: Programming Language :: Python :: 3.8
|
|
18
18
|
Requires-Dist: redis (>=5,<6)
|
|
19
|
-
Requires-Dist: taskiq (>=0.
|
|
19
|
+
Requires-Dist: taskiq (>=0.11.1,<1)
|
|
20
20
|
Project-URL: Repository, https://github.com/taskiq-python/taskiq-redis
|
|
21
21
|
Description-Content-Type: text/markdown
|
|
22
22
|
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
taskiq_redis/__init__.py,sha256=UEW3rQXt4jinMnAKJlpXQhyPDh6SU2in0bPgzfIo3y4,911
|
|
2
|
+
taskiq_redis/exceptions.py,sha256=eS4bfZVAjyMsnFs3IF74uYwO1KZOlrYxhxgPqD49ztU,561
|
|
3
|
+
taskiq_redis/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
|
+
taskiq_redis/redis_backend.py,sha256=mGU3rXJ727X-qYqSfaderTFVGz93NpCyHCf5PDFTjGk,19543
|
|
5
|
+
taskiq_redis/redis_broker.py,sha256=c43ytdc-NJ_Zf-eQOkSBFZE1-r6DLbkL6ROLgvZ4HMA,4566
|
|
6
|
+
taskiq_redis/redis_cluster_broker.py,sha256=CgPKkoEHZ1moNM-VNmzPQdjjNOrhiVUCNV-7FrUgqTo,2121
|
|
7
|
+
taskiq_redis/redis_sentinel_broker.py,sha256=5MxUFIX7qRyDT7IHebLUhxAmmUwk1_b2sxjpSXRcjlo,4114
|
|
8
|
+
taskiq_redis/schedule_source.py,sha256=bk96UBg8op-Xqg_PVETgyDb92cDaY69EAjpP8GvYSnY,10068
|
|
9
|
+
taskiq_redis-1.0.1.dist-info/LICENSE,sha256=lEHEEE-ZxmuItxYgUMPiFWdRcAITxE8DFMNyAg4eOYE,1075
|
|
10
|
+
taskiq_redis-1.0.1.dist-info/METADATA,sha256=gCDmIUvQcF4p9lhq3sANxC-McTV3LuwzveAi5qqZdOs,4030
|
|
11
|
+
taskiq_redis-1.0.1.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
|
12
|
+
taskiq_redis-1.0.1.dist-info/RECORD,,
|
taskiq_redis/serializer.py
DELETED
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
import pickle
|
|
2
|
-
from typing import Any
|
|
3
|
-
|
|
4
|
-
from taskiq.abc.serializer import TaskiqSerializer
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
class PickleSerializer(TaskiqSerializer):
|
|
8
|
-
"""Serializer that uses pickle."""
|
|
9
|
-
|
|
10
|
-
def dumpb(self, value: Any) -> bytes:
|
|
11
|
-
"""Dumps value to bytes."""
|
|
12
|
-
return pickle.dumps(value)
|
|
13
|
-
|
|
14
|
-
def loadb(self, value: bytes) -> Any:
|
|
15
|
-
"""Loads value from bytes."""
|
|
16
|
-
return pickle.loads(value) # noqa: S301
|
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
taskiq_redis/__init__.py,sha256=fMdXYxulcaKur66UUlmqAQf_q24jT5UHDYsMYP6J4fw,602
|
|
2
|
-
taskiq_redis/exceptions.py,sha256=eS4bfZVAjyMsnFs3IF74uYwO1KZOlrYxhxgPqD49ztU,561
|
|
3
|
-
taskiq_redis/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
|
-
taskiq_redis/redis_backend.py,sha256=BwzFWXLHqpQEhZ675dvt2ueOfB7xjJOAGTSHZSyoR7A,8914
|
|
5
|
-
taskiq_redis/redis_broker.py,sha256=b5oOKXP-uuqffGnNhUsT4HgTPmBiBdAfpUOd5V0VfFc,4254
|
|
6
|
-
taskiq_redis/redis_cluster_broker.py,sha256=CgPKkoEHZ1moNM-VNmzPQdjjNOrhiVUCNV-7FrUgqTo,2121
|
|
7
|
-
taskiq_redis/schedule_source.py,sha256=uznI6wbrdSbD-hIAF3xDGTVXD99SnfWYAuYsTQUIL8E,6202
|
|
8
|
-
taskiq_redis/serializer.py,sha256=x-1ExYoD_EnDiM53lyvI99MdTpNj_pORMIaCL07-6nU,416
|
|
9
|
-
taskiq_redis-0.5.6.dist-info/METADATA,sha256=zM6LiFFui-OFZqAVJnQ-qdh-U7_wKrTBnHYNMJuar4M,4030
|
|
10
|
-
taskiq_redis-0.5.6.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
|
11
|
-
taskiq_redis-0.5.6.dist-info/RECORD,,
|
|
File without changes
|