taskiq-redis 0.2.0__tar.gz → 0.2.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {taskiq_redis-0.2.0 → taskiq_redis-0.2.1}/PKG-INFO +17 -2
- {taskiq_redis-0.2.0 → taskiq_redis-0.2.1}/README.md +16 -0
- {taskiq_redis-0.2.0 → taskiq_redis-0.2.1}/pyproject.toml +1 -1
- taskiq_redis-0.2.1/taskiq_redis/exceptions.py +10 -0
- taskiq_redis-0.2.1/taskiq_redis/redis_backend.py +126 -0
- taskiq_redis-0.2.0/taskiq_redis/redis_backend.py +0 -98
- {taskiq_redis-0.2.0 → taskiq_redis-0.2.1}/taskiq_redis/__init__.py +0 -0
- {taskiq_redis-0.2.0 → taskiq_redis-0.2.1}/taskiq_redis/redis_broker.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: taskiq-redis
|
|
3
|
-
Version: 0.2.
|
|
3
|
+
Version: 0.2.1
|
|
4
4
|
Summary: Redis integration for taskiq
|
|
5
5
|
Home-page: https://github.com/taskiq-python/taskiq-redis
|
|
6
6
|
Keywords: taskiq,tasks,distributed,async,redis,result_backend
|
|
@@ -98,4 +98,19 @@ Brokers parameters:
|
|
|
98
98
|
RedisAsyncResultBackend parameters:
|
|
99
99
|
* `redis_url` - url to redis.
|
|
100
100
|
* `keep_results` - flag to not remove results from Redis after reading.
|
|
101
|
-
|
|
101
|
+
* `result_ex_time` - expire time in seconds (by default - 1 minute)
|
|
102
|
+
* `result_px_time` - expire time in milliseconds (by default - not specified)
|
|
103
|
+
> IMPORTANT: You must specify either `result_ex_time` or `result_px_time`.
|
|
104
|
+
>```python
|
|
105
|
+
># First variant
|
|
106
|
+
>redis_async_result = RedisAsyncResultBackend(
|
|
107
|
+
> redis_url="redis://localhost:6379",
|
|
108
|
+
> result_ex_time=1000,
|
|
109
|
+
>)
|
|
110
|
+
>
|
|
111
|
+
># Second variant
|
|
112
|
+
>redis_async_result = RedisAsyncResultBackend(
|
|
113
|
+
> redis_url="redis://localhost:6379",
|
|
114
|
+
> result_px_time=1000000,
|
|
115
|
+
>)
|
|
116
|
+
>```
|
|
@@ -71,3 +71,19 @@ Brokers parameters:
|
|
|
71
71
|
RedisAsyncResultBackend parameters:
|
|
72
72
|
* `redis_url` - url to redis.
|
|
73
73
|
* `keep_results` - flag to not remove results from Redis after reading.
|
|
74
|
+
* `result_ex_time` - expire time in seconds (by default - 1 minute)
|
|
75
|
+
* `result_px_time` - expire time in milliseconds (by default - not specified)
|
|
76
|
+
> IMPORTANT: You must specify either `result_ex_time` or `result_px_time`.
|
|
77
|
+
>```python
|
|
78
|
+
># First variant
|
|
79
|
+
>redis_async_result = RedisAsyncResultBackend(
|
|
80
|
+
> redis_url="redis://localhost:6379",
|
|
81
|
+
> result_ex_time=1000,
|
|
82
|
+
>)
|
|
83
|
+
>
|
|
84
|
+
># Second variant
|
|
85
|
+
>redis_async_result = RedisAsyncResultBackend(
|
|
86
|
+
> redis_url="redis://localhost:6379",
|
|
87
|
+
> result_px_time=1000000,
|
|
88
|
+
>)
|
|
89
|
+
>```
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
class TaskIQRedisError(Exception):
|
|
2
|
+
"""Base error for all taskiq-redis exceptions."""
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class DuplicateExpireTimeSelectedError(TaskIQRedisError):
|
|
6
|
+
"""Error if two lifetimes are selected."""
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class ExpireTimeMustBeMoreThanZeroError(TaskIQRedisError):
|
|
10
|
+
"""Error if two lifetimes are less or equal zero."""
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
import pickle
|
|
2
|
+
from typing import Dict, Optional, TypeVar, Union
|
|
3
|
+
|
|
4
|
+
from redis.asyncio import ConnectionPool, Redis
|
|
5
|
+
from taskiq import AsyncResultBackend
|
|
6
|
+
from taskiq.abc.result_backend import TaskiqResult
|
|
7
|
+
|
|
8
|
+
from taskiq_redis.exceptions import (
|
|
9
|
+
DuplicateExpireTimeSelectedError,
|
|
10
|
+
ExpireTimeMustBeMoreThanZeroError,
|
|
11
|
+
)
|
|
12
|
+
|
|
13
|
+
_ReturnType = TypeVar("_ReturnType")
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class RedisAsyncResultBackend(AsyncResultBackend[_ReturnType]):
|
|
17
|
+
"""Async result based on redis."""
|
|
18
|
+
|
|
19
|
+
def __init__(
|
|
20
|
+
self,
|
|
21
|
+
redis_url: str,
|
|
22
|
+
keep_results: bool = True,
|
|
23
|
+
result_ex_time: Optional[int] = None,
|
|
24
|
+
result_px_time: Optional[int] = None,
|
|
25
|
+
):
|
|
26
|
+
"""
|
|
27
|
+
Constructs a new result backend.
|
|
28
|
+
|
|
29
|
+
:param redis_url: url to redis.
|
|
30
|
+
:param keep_results: flag to not remove results from Redis after reading.
|
|
31
|
+
:param result_ex_time: expire time in seconds for result.
|
|
32
|
+
:param result_px_time: expire time in milliseconds for result.
|
|
33
|
+
|
|
34
|
+
:raises DuplicateExpireTimeSelectedError: if result_ex_time
|
|
35
|
+
and result_px_time are selected.
|
|
36
|
+
:raises ExpireTimeMustBeMoreThanZeroError: if result_ex_time
|
|
37
|
+
and result_px_time are equal zero.
|
|
38
|
+
"""
|
|
39
|
+
self.redis_pool = ConnectionPool.from_url(redis_url)
|
|
40
|
+
self.keep_results = keep_results
|
|
41
|
+
self.result_ex_time = result_ex_time
|
|
42
|
+
self.result_px_time = result_px_time
|
|
43
|
+
|
|
44
|
+
if self.result_ex_time == 0 or self.result_px_time == 0:
|
|
45
|
+
raise ExpireTimeMustBeMoreThanZeroError(
|
|
46
|
+
"You must select one expire time param and it must be more than zero.",
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
if self.result_ex_time and self.result_px_time:
|
|
50
|
+
raise DuplicateExpireTimeSelectedError(
|
|
51
|
+
"Choose either result_ex_time or result_px_time.",
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
if not self.result_ex_time and not self.result_px_time:
|
|
55
|
+
self.result_ex_time = 60
|
|
56
|
+
|
|
57
|
+
async def shutdown(self) -> None:
|
|
58
|
+
"""Closes redis connection."""
|
|
59
|
+
await self.redis_pool.disconnect()
|
|
60
|
+
await super().shutdown()
|
|
61
|
+
|
|
62
|
+
async def set_result(
|
|
63
|
+
self,
|
|
64
|
+
task_id: str,
|
|
65
|
+
result: TaskiqResult[_ReturnType],
|
|
66
|
+
) -> None:
|
|
67
|
+
"""
|
|
68
|
+
Sets task result in redis.
|
|
69
|
+
|
|
70
|
+
Dumps TaskiqResult instance into the bytes and writes
|
|
71
|
+
it to redis.
|
|
72
|
+
|
|
73
|
+
:param task_id: ID of the task.
|
|
74
|
+
:param result: TaskiqResult instance.
|
|
75
|
+
"""
|
|
76
|
+
redis_set_params: Dict[str, Union[str, bytes, int]] = {
|
|
77
|
+
"name": task_id,
|
|
78
|
+
"value": pickle.dumps(result),
|
|
79
|
+
}
|
|
80
|
+
if self.result_ex_time:
|
|
81
|
+
redis_set_params["ex"] = self.result_ex_time
|
|
82
|
+
elif self.result_px_time:
|
|
83
|
+
redis_set_params["px"] = self.result_px_time
|
|
84
|
+
|
|
85
|
+
async with Redis(connection_pool=self.redis_pool) as redis:
|
|
86
|
+
await redis.set(**redis_set_params)
|
|
87
|
+
|
|
88
|
+
async def is_result_ready(self, task_id: str) -> bool:
|
|
89
|
+
"""
|
|
90
|
+
Returns whether the result is ready.
|
|
91
|
+
|
|
92
|
+
:param task_id: ID of the task.
|
|
93
|
+
|
|
94
|
+
:returns: True if the result is ready else False.
|
|
95
|
+
"""
|
|
96
|
+
async with Redis(connection_pool=self.redis_pool) as redis:
|
|
97
|
+
return bool(await redis.exists(task_id))
|
|
98
|
+
|
|
99
|
+
async def get_result( # noqa: WPS210
|
|
100
|
+
self,
|
|
101
|
+
task_id: str,
|
|
102
|
+
with_logs: bool = False,
|
|
103
|
+
) -> TaskiqResult[_ReturnType]:
|
|
104
|
+
"""
|
|
105
|
+
Gets result from the task.
|
|
106
|
+
|
|
107
|
+
:param task_id: task's id.
|
|
108
|
+
:param with_logs: if True it will download task's logs.
|
|
109
|
+
:return: task's return value.
|
|
110
|
+
"""
|
|
111
|
+
async with Redis(connection_pool=self.redis_pool) as redis:
|
|
112
|
+
if self.keep_results:
|
|
113
|
+
result_value = await redis.get(
|
|
114
|
+
name=task_id,
|
|
115
|
+
)
|
|
116
|
+
else:
|
|
117
|
+
result_value = await redis.getdel(
|
|
118
|
+
name=task_id,
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
taskiq_result: TaskiqResult[_ReturnType] = pickle.loads(result_value)
|
|
122
|
+
|
|
123
|
+
if not with_logs:
|
|
124
|
+
taskiq_result.log = None
|
|
125
|
+
|
|
126
|
+
return taskiq_result
|
|
@@ -1,98 +0,0 @@
|
|
|
1
|
-
import pickle
|
|
2
|
-
from typing import TypeVar
|
|
3
|
-
|
|
4
|
-
from redis.asyncio import ConnectionPool, Redis
|
|
5
|
-
from taskiq import AsyncResultBackend
|
|
6
|
-
from taskiq.abc.result_backend import TaskiqResult
|
|
7
|
-
|
|
8
|
-
_ReturnType = TypeVar("_ReturnType")
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
class RedisAsyncResultBackend(AsyncResultBackend[_ReturnType]):
|
|
12
|
-
"""Async result based on redis."""
|
|
13
|
-
|
|
14
|
-
def __init__(self, redis_url: str, keep_results: bool = True):
|
|
15
|
-
"""
|
|
16
|
-
Constructs a new result backend.
|
|
17
|
-
|
|
18
|
-
:param redis_url: url to redis.
|
|
19
|
-
:param keep_results: flag to not remove results from Redis after reading.
|
|
20
|
-
"""
|
|
21
|
-
self.redis_pool = ConnectionPool.from_url(redis_url)
|
|
22
|
-
self.keep_results = keep_results
|
|
23
|
-
|
|
24
|
-
async def shutdown(self) -> None:
|
|
25
|
-
"""Closes redis connection."""
|
|
26
|
-
await self.redis_pool.disconnect()
|
|
27
|
-
|
|
28
|
-
async def set_result(
|
|
29
|
-
self,
|
|
30
|
-
task_id: str,
|
|
31
|
-
result: TaskiqResult[_ReturnType],
|
|
32
|
-
) -> None:
|
|
33
|
-
"""
|
|
34
|
-
Sets task result in redis.
|
|
35
|
-
|
|
36
|
-
Dumps TaskiqResult instance into the bytes and writes
|
|
37
|
-
it to redis.
|
|
38
|
-
|
|
39
|
-
:param task_id: ID of the task.
|
|
40
|
-
:param result: TaskiqResult instance.
|
|
41
|
-
"""
|
|
42
|
-
result_dict = result.dict(exclude={"return_value"})
|
|
43
|
-
|
|
44
|
-
for result_key, result_value in result_dict.items():
|
|
45
|
-
result_dict[result_key] = pickle.dumps(result_value)
|
|
46
|
-
# This trick will preserve original returned value.
|
|
47
|
-
# It helps when you return not serializable classes.
|
|
48
|
-
result_dict["return_value"] = pickle.dumps(result.return_value)
|
|
49
|
-
|
|
50
|
-
async with Redis(connection_pool=self.redis_pool) as redis:
|
|
51
|
-
await redis.hset(
|
|
52
|
-
task_id,
|
|
53
|
-
mapping=result_dict,
|
|
54
|
-
)
|
|
55
|
-
|
|
56
|
-
async def is_result_ready(self, task_id: str) -> bool:
|
|
57
|
-
"""
|
|
58
|
-
Returns whether the result is ready.
|
|
59
|
-
|
|
60
|
-
:param task_id: ID of the task.
|
|
61
|
-
|
|
62
|
-
:returns: True if the result is ready else False.
|
|
63
|
-
"""
|
|
64
|
-
async with Redis(connection_pool=self.redis_pool) as redis:
|
|
65
|
-
return bool(await redis.exists(task_id))
|
|
66
|
-
|
|
67
|
-
async def get_result( # noqa: WPS210
|
|
68
|
-
self,
|
|
69
|
-
task_id: str,
|
|
70
|
-
with_logs: bool = False,
|
|
71
|
-
) -> TaskiqResult[_ReturnType]:
|
|
72
|
-
"""
|
|
73
|
-
Gets result from the task.
|
|
74
|
-
|
|
75
|
-
:param task_id: task's id.
|
|
76
|
-
:param with_logs: if True it will download task's logs.
|
|
77
|
-
:return: task's return value.
|
|
78
|
-
"""
|
|
79
|
-
fields = list(TaskiqResult.__fields__.keys())
|
|
80
|
-
|
|
81
|
-
if not with_logs:
|
|
82
|
-
fields.remove("log")
|
|
83
|
-
|
|
84
|
-
async with Redis(connection_pool=self.redis_pool) as redis:
|
|
85
|
-
result_values = await redis.hmget(
|
|
86
|
-
name=task_id,
|
|
87
|
-
keys=fields,
|
|
88
|
-
)
|
|
89
|
-
|
|
90
|
-
if not self.keep_results:
|
|
91
|
-
await redis.delete(task_id)
|
|
92
|
-
|
|
93
|
-
result = {
|
|
94
|
-
result_key: pickle.loads(result_value)
|
|
95
|
-
for result_value, result_key in zip(result_values, fields)
|
|
96
|
-
}
|
|
97
|
-
|
|
98
|
-
return TaskiqResult(**result)
|
|
File without changes
|
|
File without changes
|