taskiq-redis 0.1.0__tar.gz → 0.2.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: taskiq-redis
3
- Version: 0.1.0
3
+ Version: 0.2.1
4
4
  Summary: Redis integration for taskiq
5
5
  Home-page: https://github.com/taskiq-python/taskiq-redis
6
6
  Keywords: taskiq,tasks,distributed,async,redis,result_backend
@@ -43,18 +43,19 @@ pip install taskiq-redis
43
43
  # Usage
44
44
 
45
45
  Let's see the example with the redis broker and redis async result:
46
+
46
47
  ```python
47
48
  import asyncio
48
49
 
49
- from taskiq_redis.redis_broker import RedisBroker
50
+ from taskiq_redis.redis_broker import ListQueueBroker
50
51
  from taskiq_redis.redis_backend import RedisAsyncResultBackend
51
52
 
52
-
53
53
  redis_async_result = RedisAsyncResultBackend(
54
54
  redis_url="redis://localhost:6379",
55
55
  )
56
56
 
57
- broker = RedisBroker(
57
+ # Or you can use PubSubBroker if you need broadcasting
58
+ broker = ListQueueBroker(
58
59
  url="redis://localhost:6379",
59
60
  result_backend=redis_async_result,
60
61
  )
@@ -75,9 +76,17 @@ async def main():
75
76
  asyncio.run(main())
76
77
  ```
77
78
 
78
- ## RedisBroker configuration
79
+ ## PubSubBroker and ListQueueBroker configuration
80
+
81
+ We have two brokers with similar interfaces, but with different logic.
82
+ The PubSubBroker uses redis' pubsub mechanism and is very powerful,
83
+ but it executes every task on all workers, because PUBSUB broadcasts message
84
+ to all subscribers.
79
85
 
80
- RedisBroker parameters:
86
+ If you want your messages to be processed only once, please use ListQueueBroker.
87
+ It uses redis' [LPUSH](https://redis.io/commands/lpush/) and [BRPOP](https://redis.io/commands/brpop/) commands to deal with messages.
88
+
89
+ Brokers parameters:
81
90
  * `url` - url to redis.
82
91
  * `task_id_generator` - custom task_id genertaor.
83
92
  * `result_backend` - custom result backend.
@@ -89,4 +98,19 @@ RedisBroker parameters:
89
98
  RedisAsyncResultBackend parameters:
90
99
  * `redis_url` - url to redis.
91
100
  * `keep_results` - flag to not remove results from Redis after reading.
92
-
101
+ * `result_ex_time` - expire time in seconds (by default - 1 minute)
102
+ * `result_px_time` - expire time in milliseconds (by default - not specified)
103
+ > IMPORTANT: You must specify either `result_ex_time` or `result_px_time`.
104
+ >```python
105
+ ># First variant
106
+ >redis_async_result = RedisAsyncResultBackend(
107
+ > redis_url="redis://localhost:6379",
108
+ > result_ex_time=1000,
109
+ >)
110
+ >
111
+ ># Second variant
112
+ >redis_async_result = RedisAsyncResultBackend(
113
+ > redis_url="redis://localhost:6379",
114
+ > result_px_time=1000000,
115
+ >)
116
+ >```
@@ -16,18 +16,19 @@ pip install taskiq-redis
16
16
  # Usage
17
17
 
18
18
  Let's see the example with the redis broker and redis async result:
19
+
19
20
  ```python
20
21
  import asyncio
21
22
 
22
- from taskiq_redis.redis_broker import RedisBroker
23
+ from taskiq_redis.redis_broker import ListQueueBroker
23
24
  from taskiq_redis.redis_backend import RedisAsyncResultBackend
24
25
 
25
-
26
26
  redis_async_result = RedisAsyncResultBackend(
27
27
  redis_url="redis://localhost:6379",
28
28
  )
29
29
 
30
- broker = RedisBroker(
30
+ # Or you can use PubSubBroker if you need broadcasting
31
+ broker = ListQueueBroker(
31
32
  url="redis://localhost:6379",
32
33
  result_backend=redis_async_result,
33
34
  )
@@ -48,9 +49,17 @@ async def main():
48
49
  asyncio.run(main())
49
50
  ```
50
51
 
51
- ## RedisBroker configuration
52
+ ## PubSubBroker and ListQueueBroker configuration
53
+
54
+ We have two brokers with similar interfaces, but with different logic.
55
+ The PubSubBroker uses redis' pubsub mechanism and is very powerful,
56
+ but it executes every task on all workers, because PUBSUB broadcasts message
57
+ to all subscribers.
58
+
59
+ If you want your messages to be processed only once, please use ListQueueBroker.
60
+ It uses redis' [LPUSH](https://redis.io/commands/lpush/) and [BRPOP](https://redis.io/commands/brpop/) commands to deal with messages.
52
61
 
53
- RedisBroker parameters:
62
+ Brokers parameters:
54
63
  * `url` - url to redis.
55
64
  * `task_id_generator` - custom task_id genertaor.
56
65
  * `result_backend` - custom result backend.
@@ -62,3 +71,19 @@ RedisBroker parameters:
62
71
  RedisAsyncResultBackend parameters:
63
72
  * `redis_url` - url to redis.
64
73
  * `keep_results` - flag to not remove results from Redis after reading.
74
+ * `result_ex_time` - expire time in seconds (by default - 1 minute)
75
+ * `result_px_time` - expire time in milliseconds (by default - not specified)
76
+ > IMPORTANT: You must specify either `result_ex_time` or `result_px_time`.
77
+ >```python
78
+ ># First variant
79
+ >redis_async_result = RedisAsyncResultBackend(
80
+ > redis_url="redis://localhost:6379",
81
+ > result_ex_time=1000,
82
+ >)
83
+ >
84
+ ># Second variant
85
+ >redis_async_result = RedisAsyncResultBackend(
86
+ > redis_url="redis://localhost:6379",
87
+ > result_px_time=1000000,
88
+ >)
89
+ >```
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "taskiq-redis"
3
- version = "0.1.0"
3
+ version = "0.2.1"
4
4
  description = "Redis integration for taskiq"
5
5
  authors = ["taskiq-team <taskiq@norely.com>"]
6
6
  readme = "README.md"
@@ -0,0 +1,5 @@
1
+ """Package for redis integration."""
2
+ from taskiq_redis.redis_backend import RedisAsyncResultBackend
3
+ from taskiq_redis.redis_broker import ListQueueBroker
4
+
5
+ __all__ = ["RedisAsyncResultBackend", "ListQueueBroker"]
@@ -0,0 +1,10 @@
1
+ class TaskIQRedisError(Exception):
2
+ """Base error for all taskiq-redis exceptions."""
3
+
4
+
5
+ class DuplicateExpireTimeSelectedError(TaskIQRedisError):
6
+ """Error if two lifetimes are selected."""
7
+
8
+
9
+ class ExpireTimeMustBeMoreThanZeroError(TaskIQRedisError):
10
+ """Error if two lifetimes are less or equal zero."""
@@ -0,0 +1,126 @@
1
+ import pickle
2
+ from typing import Dict, Optional, TypeVar, Union
3
+
4
+ from redis.asyncio import ConnectionPool, Redis
5
+ from taskiq import AsyncResultBackend
6
+ from taskiq.abc.result_backend import TaskiqResult
7
+
8
+ from taskiq_redis.exceptions import (
9
+ DuplicateExpireTimeSelectedError,
10
+ ExpireTimeMustBeMoreThanZeroError,
11
+ )
12
+
13
+ _ReturnType = TypeVar("_ReturnType")
14
+
15
+
16
+ class RedisAsyncResultBackend(AsyncResultBackend[_ReturnType]):
17
+ """Async result based on redis."""
18
+
19
+ def __init__(
20
+ self,
21
+ redis_url: str,
22
+ keep_results: bool = True,
23
+ result_ex_time: Optional[int] = None,
24
+ result_px_time: Optional[int] = None,
25
+ ):
26
+ """
27
+ Constructs a new result backend.
28
+
29
+ :param redis_url: url to redis.
30
+ :param keep_results: flag to not remove results from Redis after reading.
31
+ :param result_ex_time: expire time in seconds for result.
32
+ :param result_px_time: expire time in milliseconds for result.
33
+
34
+ :raises DuplicateExpireTimeSelectedError: if result_ex_time
35
+ and result_px_time are selected.
36
+ :raises ExpireTimeMustBeMoreThanZeroError: if result_ex_time
37
+ and result_px_time are equal zero.
38
+ """
39
+ self.redis_pool = ConnectionPool.from_url(redis_url)
40
+ self.keep_results = keep_results
41
+ self.result_ex_time = result_ex_time
42
+ self.result_px_time = result_px_time
43
+
44
+ if self.result_ex_time == 0 or self.result_px_time == 0:
45
+ raise ExpireTimeMustBeMoreThanZeroError(
46
+ "You must select one expire time param and it must be more than zero.",
47
+ )
48
+
49
+ if self.result_ex_time and self.result_px_time:
50
+ raise DuplicateExpireTimeSelectedError(
51
+ "Choose either result_ex_time or result_px_time.",
52
+ )
53
+
54
+ if not self.result_ex_time and not self.result_px_time:
55
+ self.result_ex_time = 60
56
+
57
+ async def shutdown(self) -> None:
58
+ """Closes redis connection."""
59
+ await self.redis_pool.disconnect()
60
+ await super().shutdown()
61
+
62
+ async def set_result(
63
+ self,
64
+ task_id: str,
65
+ result: TaskiqResult[_ReturnType],
66
+ ) -> None:
67
+ """
68
+ Sets task result in redis.
69
+
70
+ Dumps TaskiqResult instance into the bytes and writes
71
+ it to redis.
72
+
73
+ :param task_id: ID of the task.
74
+ :param result: TaskiqResult instance.
75
+ """
76
+ redis_set_params: Dict[str, Union[str, bytes, int]] = {
77
+ "name": task_id,
78
+ "value": pickle.dumps(result),
79
+ }
80
+ if self.result_ex_time:
81
+ redis_set_params["ex"] = self.result_ex_time
82
+ elif self.result_px_time:
83
+ redis_set_params["px"] = self.result_px_time
84
+
85
+ async with Redis(connection_pool=self.redis_pool) as redis:
86
+ await redis.set(**redis_set_params)
87
+
88
+ async def is_result_ready(self, task_id: str) -> bool:
89
+ """
90
+ Returns whether the result is ready.
91
+
92
+ :param task_id: ID of the task.
93
+
94
+ :returns: True if the result is ready else False.
95
+ """
96
+ async with Redis(connection_pool=self.redis_pool) as redis:
97
+ return bool(await redis.exists(task_id))
98
+
99
+ async def get_result( # noqa: WPS210
100
+ self,
101
+ task_id: str,
102
+ with_logs: bool = False,
103
+ ) -> TaskiqResult[_ReturnType]:
104
+ """
105
+ Gets result from the task.
106
+
107
+ :param task_id: task's id.
108
+ :param with_logs: if True it will download task's logs.
109
+ :return: task's return value.
110
+ """
111
+ async with Redis(connection_pool=self.redis_pool) as redis:
112
+ if self.keep_results:
113
+ result_value = await redis.get(
114
+ name=task_id,
115
+ )
116
+ else:
117
+ result_value = await redis.getdel(
118
+ name=task_id,
119
+ )
120
+
121
+ taskiq_result: TaskiqResult[_ReturnType] = pickle.loads(result_value)
122
+
123
+ if not with_logs:
124
+ taskiq_result.log = None
125
+
126
+ return taskiq_result
@@ -1,4 +1,5 @@
1
1
  import pickle
2
+ from abc import abstractmethod
2
3
  from logging import getLogger
3
4
  from typing import Any, AsyncGenerator, Callable, Optional, TypeVar
4
5
 
@@ -12,8 +13,8 @@ _T = TypeVar("_T") # noqa: WPS111
12
13
  logger = getLogger("taskiq.redis_broker")
13
14
 
14
15
 
15
- class RedisBroker(AsyncBroker):
16
- """Broker that works with Redis."""
16
+ class BaseRedisBroker(AsyncBroker):
17
+ """Base broker that works with Redis."""
17
18
 
18
19
  def __init__(
19
20
  self,
@@ -44,31 +45,12 @@ class RedisBroker(AsyncBroker):
44
45
  max_connections=max_connection_pool_size,
45
46
  **connection_kwargs,
46
47
  )
47
-
48
- self.redis_pubsub_channel = queue_name
48
+ self.queue_name = queue_name
49
49
 
50
50
  async def shutdown(self) -> None:
51
51
  """Closes redis connection pool."""
52
52
  await self.connection_pool.disconnect()
53
53
 
54
- async def kick(self, message: BrokerMessage) -> None:
55
- """
56
- Sends a message to the redis broker list.
57
-
58
- This function constructs message for redis
59
- and sends it.
60
-
61
- The message is pickled dict object with message,
62
- task_id, task_name and labels.
63
-
64
- :param message: message to send.
65
- """
66
- async with Redis(connection_pool=self.connection_pool) as redis_conn:
67
- await redis_conn.publish(
68
- self.redis_pubsub_channel,
69
- pickle.dumps(message),
70
- )
71
-
72
54
  async def listen(self) -> AsyncGenerator[BrokerMessage, None]:
73
55
  """
74
56
  Listen redis queue for new messages.
@@ -78,24 +60,60 @@ class RedisBroker(AsyncBroker):
78
60
 
79
61
  :yields: broker messages.
80
62
  """
63
+ async for message in self._listen_to_raw_messages():
64
+ try:
65
+ redis_message = pickle.loads(message)
66
+ if isinstance(redis_message, BrokerMessage):
67
+ yield redis_message
68
+ except (
69
+ TypeError,
70
+ AttributeError,
71
+ pickle.UnpicklingError,
72
+ ) as exc:
73
+ logger.debug(
74
+ "Cannot read broker message %s",
75
+ exc,
76
+ exc_info=True,
77
+ )
78
+
79
+ @abstractmethod
80
+ async def _listen_to_raw_messages(self) -> AsyncGenerator[bytes, None]:
81
+ """
82
+ Generator for reading raw data from Redis.
83
+
84
+ :yields: raw data.
85
+ """
86
+ yield # type: ignore
87
+
88
+
89
+ class PubSubBroker(BaseRedisBroker):
90
+ """Broker that works with Redis and broadcasts tasks to all workers."""
91
+
92
+ async def kick(self, message: BrokerMessage) -> None: # noqa: D102
93
+ async with Redis(connection_pool=self.connection_pool) as redis_conn:
94
+ await redis_conn.publish(self.queue_name, pickle.dumps(message))
95
+
96
+ async def _listen_to_raw_messages(self) -> AsyncGenerator[bytes, None]:
81
97
  async with Redis(connection_pool=self.connection_pool) as redis_conn:
82
98
  redis_pubsub_channel = redis_conn.pubsub()
83
- await redis_pubsub_channel.subscribe(self.redis_pubsub_channel)
99
+ await redis_pubsub_channel.subscribe(self.queue_name)
84
100
  async for message in redis_pubsub_channel.listen():
85
- if message:
86
- try:
87
- redis_message = pickle.loads(
88
- message["data"],
89
- )
90
- if isinstance(redis_message, BrokerMessage):
91
- yield redis_message
92
- except (
93
- TypeError,
94
- AttributeError,
95
- pickle.UnpicklingError,
96
- ) as exc:
97
- logger.debug(
98
- "Cannot read broker message %s",
99
- exc,
100
- exc_info=True,
101
- )
101
+ if not message:
102
+ continue
103
+ yield message["data"]
104
+
105
+
106
+ class ListQueueBroker(BaseRedisBroker):
107
+ """Broker that works with Redis and distributes tasks between workers."""
108
+
109
+ async def kick(self, message: BrokerMessage) -> None: # noqa: D102
110
+ async with Redis(connection_pool=self.connection_pool) as redis_conn:
111
+ await redis_conn.lpush(self.queue_name, pickle.dumps(message))
112
+
113
+ async def _listen_to_raw_messages(self) -> AsyncGenerator[bytes, None]:
114
+ redis_brpop_data_position = 1
115
+ async with Redis(connection_pool=self.connection_pool) as redis_conn:
116
+ while True: # noqa: WPS457
117
+ yield (await redis_conn.brpop(self.queue_name))[
118
+ redis_brpop_data_position
119
+ ]
@@ -1,5 +0,0 @@
1
- """Package for redis integration."""
2
- from taskiq_redis.redis_backend import RedisAsyncResultBackend
3
- from taskiq_redis.redis_broker import RedisBroker
4
-
5
- __all__ = ["RedisAsyncResultBackend", "RedisBroker"]
@@ -1,98 +0,0 @@
1
- import pickle
2
- from typing import TypeVar
3
-
4
- from redis.asyncio import ConnectionPool, Redis
5
- from taskiq import AsyncResultBackend
6
- from taskiq.abc.result_backend import TaskiqResult
7
-
8
- _ReturnType = TypeVar("_ReturnType")
9
-
10
-
11
- class RedisAsyncResultBackend(AsyncResultBackend[_ReturnType]):
12
- """Async result based on redis."""
13
-
14
- def __init__(self, redis_url: str, keep_results: bool = True):
15
- """
16
- Constructs a new result backend.
17
-
18
- :param redis_url: url to redis.
19
- :param keep_results: flag to not remove results from Redis after reading.
20
- """
21
- self.redis_pool = ConnectionPool.from_url(redis_url)
22
- self.keep_results = keep_results
23
-
24
- async def shutdown(self) -> None:
25
- """Closes redis connection."""
26
- await self.redis_pool.disconnect()
27
-
28
- async def set_result(
29
- self,
30
- task_id: str,
31
- result: TaskiqResult[_ReturnType],
32
- ) -> None:
33
- """
34
- Sets task result in redis.
35
-
36
- Dumps TaskiqResult instance into the bytes and writes
37
- it to redis.
38
-
39
- :param task_id: ID of the task.
40
- :param result: TaskiqResult instance.
41
- """
42
- result_dict = result.dict(exclude={"return_value"})
43
-
44
- for result_key, result_value in result_dict.items():
45
- result_dict[result_key] = pickle.dumps(result_value)
46
- # This trick will preserve original returned value.
47
- # It helps when you return not serializable classes.
48
- result_dict["return_value"] = pickle.dumps(result.return_value)
49
-
50
- async with Redis(connection_pool=self.redis_pool) as redis:
51
- await redis.hset(
52
- task_id,
53
- mapping=result_dict,
54
- )
55
-
56
- async def is_result_ready(self, task_id: str) -> bool:
57
- """
58
- Returns whether the result is ready.
59
-
60
- :param task_id: ID of the task.
61
-
62
- :returns: True if the result is ready else False.
63
- """
64
- async with Redis(connection_pool=self.redis_pool) as redis:
65
- return bool(await redis.exists(task_id))
66
-
67
- async def get_result( # noqa: WPS210
68
- self,
69
- task_id: str,
70
- with_logs: bool = False,
71
- ) -> TaskiqResult[_ReturnType]:
72
- """
73
- Gets result from the task.
74
-
75
- :param task_id: task's id.
76
- :param with_logs: if True it will download task's logs.
77
- :return: task's return value.
78
- """
79
- fields = list(TaskiqResult.__fields__.keys())
80
-
81
- if not with_logs:
82
- fields.remove("log")
83
-
84
- async with Redis(connection_pool=self.redis_pool) as redis:
85
- result_values = await redis.hmget(
86
- name=task_id,
87
- keys=fields,
88
- )
89
-
90
- if not self.keep_results:
91
- await redis.delete(task_id)
92
-
93
- result = {
94
- result_key: pickle.loads(result_value)
95
- for result_value, result_key in zip(result_values, fields)
96
- }
97
-
98
- return TaskiqResult(**result)