taskiq-redis 0.2.0__py3-none-any.whl → 0.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- taskiq_redis/exceptions.py +10 -0
- taskiq_redis/redis_backend.py +57 -29
- {taskiq_redis-0.2.0.dist-info → taskiq_redis-0.2.1.dist-info}/METADATA +17 -2
- taskiq_redis-0.2.1.dist-info/RECORD +7 -0
- {taskiq_redis-0.2.0.dist-info → taskiq_redis-0.2.1.dist-info}/WHEEL +1 -1
- taskiq_redis-0.2.0.dist-info/RECORD +0 -6
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
class TaskIQRedisError(Exception):
|
|
2
|
+
"""Base error for all taskiq-redis exceptions."""
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class DuplicateExpireTimeSelectedError(TaskIQRedisError):
|
|
6
|
+
"""Error if two lifetimes are selected."""
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class ExpireTimeMustBeMoreThanZeroError(TaskIQRedisError):
|
|
10
|
+
"""Error if two lifetimes are less or equal zero."""
|
taskiq_redis/redis_backend.py
CHANGED
|
@@ -1,29 +1,63 @@
|
|
|
1
1
|
import pickle
|
|
2
|
-
from typing import TypeVar
|
|
2
|
+
from typing import Dict, Optional, TypeVar, Union
|
|
3
3
|
|
|
4
4
|
from redis.asyncio import ConnectionPool, Redis
|
|
5
5
|
from taskiq import AsyncResultBackend
|
|
6
6
|
from taskiq.abc.result_backend import TaskiqResult
|
|
7
7
|
|
|
8
|
+
from taskiq_redis.exceptions import (
|
|
9
|
+
DuplicateExpireTimeSelectedError,
|
|
10
|
+
ExpireTimeMustBeMoreThanZeroError,
|
|
11
|
+
)
|
|
12
|
+
|
|
8
13
|
_ReturnType = TypeVar("_ReturnType")
|
|
9
14
|
|
|
10
15
|
|
|
11
16
|
class RedisAsyncResultBackend(AsyncResultBackend[_ReturnType]):
|
|
12
17
|
"""Async result based on redis."""
|
|
13
18
|
|
|
14
|
-
def __init__(
|
|
19
|
+
def __init__(
|
|
20
|
+
self,
|
|
21
|
+
redis_url: str,
|
|
22
|
+
keep_results: bool = True,
|
|
23
|
+
result_ex_time: Optional[int] = None,
|
|
24
|
+
result_px_time: Optional[int] = None,
|
|
25
|
+
):
|
|
15
26
|
"""
|
|
16
27
|
Constructs a new result backend.
|
|
17
28
|
|
|
18
29
|
:param redis_url: url to redis.
|
|
19
30
|
:param keep_results: flag to not remove results from Redis after reading.
|
|
31
|
+
:param result_ex_time: expire time in seconds for result.
|
|
32
|
+
:param result_px_time: expire time in milliseconds for result.
|
|
33
|
+
|
|
34
|
+
:raises DuplicateExpireTimeSelectedError: if result_ex_time
|
|
35
|
+
and result_px_time are selected.
|
|
36
|
+
:raises ExpireTimeMustBeMoreThanZeroError: if result_ex_time
|
|
37
|
+
and result_px_time are equal zero.
|
|
20
38
|
"""
|
|
21
39
|
self.redis_pool = ConnectionPool.from_url(redis_url)
|
|
22
40
|
self.keep_results = keep_results
|
|
41
|
+
self.result_ex_time = result_ex_time
|
|
42
|
+
self.result_px_time = result_px_time
|
|
43
|
+
|
|
44
|
+
if self.result_ex_time == 0 or self.result_px_time == 0:
|
|
45
|
+
raise ExpireTimeMustBeMoreThanZeroError(
|
|
46
|
+
"You must select one expire time param and it must be more than zero.",
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
if self.result_ex_time and self.result_px_time:
|
|
50
|
+
raise DuplicateExpireTimeSelectedError(
|
|
51
|
+
"Choose either result_ex_time or result_px_time.",
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
if not self.result_ex_time and not self.result_px_time:
|
|
55
|
+
self.result_ex_time = 60
|
|
23
56
|
|
|
24
57
|
async def shutdown(self) -> None:
|
|
25
58
|
"""Closes redis connection."""
|
|
26
59
|
await self.redis_pool.disconnect()
|
|
60
|
+
await super().shutdown()
|
|
27
61
|
|
|
28
62
|
async def set_result(
|
|
29
63
|
self,
|
|
@@ -39,19 +73,17 @@ class RedisAsyncResultBackend(AsyncResultBackend[_ReturnType]):
|
|
|
39
73
|
:param task_id: ID of the task.
|
|
40
74
|
:param result: TaskiqResult instance.
|
|
41
75
|
"""
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
76
|
+
redis_set_params: Dict[str, Union[str, bytes, int]] = {
|
|
77
|
+
"name": task_id,
|
|
78
|
+
"value": pickle.dumps(result),
|
|
79
|
+
}
|
|
80
|
+
if self.result_ex_time:
|
|
81
|
+
redis_set_params["ex"] = self.result_ex_time
|
|
82
|
+
elif self.result_px_time:
|
|
83
|
+
redis_set_params["px"] = self.result_px_time
|
|
49
84
|
|
|
50
85
|
async with Redis(connection_pool=self.redis_pool) as redis:
|
|
51
|
-
await redis.
|
|
52
|
-
task_id,
|
|
53
|
-
mapping=result_dict,
|
|
54
|
-
)
|
|
86
|
+
await redis.set(**redis_set_params)
|
|
55
87
|
|
|
56
88
|
async def is_result_ready(self, task_id: str) -> bool:
|
|
57
89
|
"""
|
|
@@ -76,23 +108,19 @@ class RedisAsyncResultBackend(AsyncResultBackend[_ReturnType]):
|
|
|
76
108
|
:param with_logs: if True it will download task's logs.
|
|
77
109
|
:return: task's return value.
|
|
78
110
|
"""
|
|
79
|
-
fields = list(TaskiqResult.__fields__.keys())
|
|
80
|
-
|
|
81
|
-
if not with_logs:
|
|
82
|
-
fields.remove("log")
|
|
83
|
-
|
|
84
111
|
async with Redis(connection_pool=self.redis_pool) as redis:
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
112
|
+
if self.keep_results:
|
|
113
|
+
result_value = await redis.get(
|
|
114
|
+
name=task_id,
|
|
115
|
+
)
|
|
116
|
+
else:
|
|
117
|
+
result_value = await redis.getdel(
|
|
118
|
+
name=task_id,
|
|
119
|
+
)
|
|
89
120
|
|
|
90
|
-
|
|
91
|
-
await redis.delete(task_id)
|
|
121
|
+
taskiq_result: TaskiqResult[_ReturnType] = pickle.loads(result_value)
|
|
92
122
|
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
for result_value, result_key in zip(result_values, fields)
|
|
96
|
-
}
|
|
123
|
+
if not with_logs:
|
|
124
|
+
taskiq_result.log = None
|
|
97
125
|
|
|
98
|
-
return
|
|
126
|
+
return taskiq_result
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: taskiq-redis
|
|
3
|
-
Version: 0.2.
|
|
3
|
+
Version: 0.2.1
|
|
4
4
|
Summary: Redis integration for taskiq
|
|
5
5
|
Home-page: https://github.com/taskiq-python/taskiq-redis
|
|
6
6
|
Keywords: taskiq,tasks,distributed,async,redis,result_backend
|
|
@@ -98,4 +98,19 @@ Brokers parameters:
|
|
|
98
98
|
RedisAsyncResultBackend parameters:
|
|
99
99
|
* `redis_url` - url to redis.
|
|
100
100
|
* `keep_results` - flag to not remove results from Redis after reading.
|
|
101
|
-
|
|
101
|
+
* `result_ex_time` - expire time in seconds (by default - 1 minute)
|
|
102
|
+
* `result_px_time` - expire time in milliseconds (by default - not specified)
|
|
103
|
+
> IMPORTANT: You must specify either `result_ex_time` or `result_px_time`.
|
|
104
|
+
>```python
|
|
105
|
+
># First variant
|
|
106
|
+
>redis_async_result = RedisAsyncResultBackend(
|
|
107
|
+
> redis_url="redis://localhost:6379",
|
|
108
|
+
> result_ex_time=1000,
|
|
109
|
+
>)
|
|
110
|
+
>
|
|
111
|
+
># Second variant
|
|
112
|
+
>redis_async_result = RedisAsyncResultBackend(
|
|
113
|
+
> redis_url="redis://localhost:6379",
|
|
114
|
+
> result_px_time=1000000,
|
|
115
|
+
>)
|
|
116
|
+
>```
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
taskiq_redis/__init__.py,sha256=7APdvz7YtRB2VpB0PvCvgIF_f4cPTXG5XyIBvdmfEVE,212
|
|
2
|
+
taskiq_redis/exceptions.py,sha256=lqvMepJWafxn-oJBum4gI6JKSImxxLSf8esFtn5iR98,314
|
|
3
|
+
taskiq_redis/redis_backend.py,sha256=k_ohDG8vW8MWk1vUQUKZKkylYLSYN5GBroAPrPrtmdc,4053
|
|
4
|
+
taskiq_redis/redis_broker.py,sha256=l8bwfh2qKzqukJwAYY7wJ0aReMd7Ihme_1eUsxR4TPU,4279
|
|
5
|
+
taskiq_redis-0.2.1.dist-info/METADATA,sha256=0gxtd5YkrQ5rUtvJm15Lldjz3dtidtMyrAYSJA9oPlI,3642
|
|
6
|
+
taskiq_redis-0.2.1.dist-info/WHEEL,sha256=7Z8_27uaHI_UZAc4Uox4PpBhQ9Y5_modZXWMxtUi4NU,88
|
|
7
|
+
taskiq_redis-0.2.1.dist-info/RECORD,,
|
|
@@ -1,6 +0,0 @@
|
|
|
1
|
-
taskiq_redis/__init__.py,sha256=7APdvz7YtRB2VpB0PvCvgIF_f4cPTXG5XyIBvdmfEVE,212
|
|
2
|
-
taskiq_redis/redis_backend.py,sha256=N6XH_XWUQr2VBoNTe1ZbwgxHh8c4yWW5fF0mkBsXQ1c,2979
|
|
3
|
-
taskiq_redis/redis_broker.py,sha256=l8bwfh2qKzqukJwAYY7wJ0aReMd7Ihme_1eUsxR4TPU,4279
|
|
4
|
-
taskiq_redis-0.2.0.dist-info/METADATA,sha256=SvT_vjNmL4trJ7Md84uo75FRcq9bMYDEscXGxha7SSg,3130
|
|
5
|
-
taskiq_redis-0.2.0.dist-info/WHEEL,sha256=kLuE8m1WYU0Ig0_YEGrXyTtiJvKPpLpDEiChiNyei5Y,88
|
|
6
|
-
taskiq_redis-0.2.0.dist-info/RECORD,,
|