taskiq-redis 0.5.5__py3-none-any.whl → 0.5.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,7 +1,7 @@
1
1
  import pickle
2
- from typing import Dict, Optional, TypeVar, Union
2
+ from typing import Any, Dict, Optional, TypeVar, Union
3
3
 
4
- from redis.asyncio import ConnectionPool, Redis
4
+ from redis.asyncio import BlockingConnectionPool, Redis
5
5
  from redis.asyncio.cluster import RedisCluster
6
6
  from taskiq import AsyncResultBackend
7
7
  from taskiq.abc.result_backend import TaskiqResult
@@ -24,6 +24,8 @@ class RedisAsyncResultBackend(AsyncResultBackend[_ReturnType]):
24
24
  keep_results: bool = True,
25
25
  result_ex_time: Optional[int] = None,
26
26
  result_px_time: Optional[int] = None,
27
+ max_connection_pool_size: Optional[int] = None,
28
+ **connection_kwargs: Any,
27
29
  ) -> None:
28
30
  """
29
31
  Constructs a new result backend.
@@ -32,13 +34,19 @@ class RedisAsyncResultBackend(AsyncResultBackend[_ReturnType]):
32
34
  :param keep_results: flag to not remove results from Redis after reading.
33
35
  :param result_ex_time: expire time in seconds for result.
34
36
  :param result_px_time: expire time in milliseconds for result.
37
+ :param max_connection_pool_size: maximum number of connections in pool.
38
+ :param connection_kwargs: additional arguments for redis BlockingConnectionPool.
35
39
 
36
40
  :raises DuplicateExpireTimeSelectedError: if result_ex_time
37
41
  and result_px_time are selected.
38
42
  :raises ExpireTimeMustBeMoreThanZeroError: if result_ex_time
39
43
  and result_px_time are equal zero.
40
44
  """
41
- self.redis_pool = ConnectionPool.from_url(redis_url)
45
+ self.redis_pool = BlockingConnectionPool.from_url(
46
+ url=redis_url,
47
+ max_connections=max_connection_pool_size,
48
+ **connection_kwargs,
49
+ )
42
50
  self.keep_results = keep_results
43
51
  self.result_ex_time = result_ex_time
44
52
  self.result_px_time = result_px_time
@@ -146,6 +154,7 @@ class RedisAsyncClusterResultBackend(AsyncResultBackend[_ReturnType]):
146
154
  keep_results: bool = True,
147
155
  result_ex_time: Optional[int] = None,
148
156
  result_px_time: Optional[int] = None,
157
+ **connection_kwargs: Any,
149
158
  ) -> None:
150
159
  """
151
160
  Constructs a new result backend.
@@ -154,13 +163,17 @@ class RedisAsyncClusterResultBackend(AsyncResultBackend[_ReturnType]):
154
163
  :param keep_results: flag to not remove results from Redis after reading.
155
164
  :param result_ex_time: expire time in seconds for result.
156
165
  :param result_px_time: expire time in milliseconds for result.
166
+ :param connection_kwargs: additional arguments for RedisCluster.
157
167
 
158
168
  :raises DuplicateExpireTimeSelectedError: if result_ex_time
159
169
  and result_px_time are selected.
160
170
  :raises ExpireTimeMustBeMoreThanZeroError: if result_ex_time
161
171
  and result_px_time are equal zero.
162
172
  """
163
- self.redis: RedisCluster[bytes] = RedisCluster.from_url(redis_url)
173
+ self.redis: RedisCluster[bytes] = RedisCluster.from_url(
174
+ redis_url,
175
+ **connection_kwargs,
176
+ )
164
177
  self.keep_results = keep_results
165
178
  self.result_ex_time = result_ex_time
166
179
  self.result_px_time = result_px_time
@@ -1,7 +1,7 @@
1
1
  from logging import getLogger
2
2
  from typing import Any, AsyncGenerator, Callable, Optional, TypeVar
3
3
 
4
- from redis.asyncio import ConnectionPool, Redis
4
+ from redis.asyncio import BlockingConnectionPool, ConnectionPool, Redis
5
5
  from taskiq.abc.broker import AsyncBroker
6
6
  from taskiq.abc.result_backend import AsyncResultBackend
7
7
  from taskiq.message import BrokerMessage
@@ -31,14 +31,16 @@ class BaseRedisBroker(AsyncBroker):
31
31
  :param result_backend: custom result backend.
32
32
  :param queue_name: name for a list in redis.
33
33
  :param max_connection_pool_size: maximum number of connections in pool.
34
- :param connection_kwargs: additional arguments for aio-redis ConnectionPool.
34
+ Each worker opens its own connection. Therefore this value has to be
35
+ at least number of workers + 1.
36
+ :param connection_kwargs: additional arguments for redis BlockingConnectionPool.
35
37
  """
36
38
  super().__init__(
37
39
  result_backend=result_backend,
38
40
  task_id_generator=task_id_generator,
39
41
  )
40
42
 
41
- self.connection_pool: ConnectionPool = ConnectionPool.from_url(
43
+ self.connection_pool: ConnectionPool = BlockingConnectionPool.from_url(
42
44
  url=url,
43
45
  max_connections=max_connection_pool_size,
44
46
  **connection_kwargs,
@@ -60,8 +62,9 @@ class PubSubBroker(BaseRedisBroker):
60
62
 
61
63
  :param message: message to send.
62
64
  """
65
+ queue_name = message.labels.get("queue_name") or self.queue_name
63
66
  async with Redis(connection_pool=self.connection_pool) as redis_conn:
64
- await redis_conn.publish(self.queue_name, message.message)
67
+ await redis_conn.publish(queue_name, message.message)
65
68
 
66
69
  async def listen(self) -> AsyncGenerator[bytes, None]:
67
70
  """
@@ -95,8 +98,9 @@ class ListQueueBroker(BaseRedisBroker):
95
98
 
96
99
  :param message: message to append.
97
100
  """
101
+ queue_name = message.labels.get("queue_name") or self.queue_name
98
102
  async with Redis(connection_pool=self.connection_pool) as redis_conn:
99
- await redis_conn.lpush(self.queue_name, message.message)
103
+ await redis_conn.lpush(queue_name, message.message)
100
104
 
101
105
  async def listen(self) -> AsyncGenerator[bytes, None]:
102
106
  """
@@ -1,6 +1,6 @@
1
1
  from typing import Any, List, Optional
2
2
 
3
- from redis.asyncio import ConnectionPool, Redis, RedisCluster
3
+ from redis.asyncio import BlockingConnectionPool, ConnectionPool, Redis, RedisCluster
4
4
  from taskiq import ScheduleSource
5
5
  from taskiq.abc.serializer import TaskiqSerializer
6
6
  from taskiq.compat import model_dump, model_validate
@@ -22,7 +22,7 @@ class RedisScheduleSource(ScheduleSource):
22
22
  This is how many keys will be fetched at once.
23
23
  :param max_connection_pool_size: maximum number of connections in pool.
24
24
  :param serializer: serializer for data.
25
- :param connection_kwargs: additional arguments for aio-redis ConnectionPool.
25
+ :param connection_kwargs: additional arguments for redis BlockingConnectionPool.
26
26
  """
27
27
 
28
28
  def __init__(
@@ -35,7 +35,7 @@ class RedisScheduleSource(ScheduleSource):
35
35
  **connection_kwargs: Any,
36
36
  ) -> None:
37
37
  self.prefix = prefix
38
- self.connection_pool: ConnectionPool = ConnectionPool.from_url(
38
+ self.connection_pool: ConnectionPool = BlockingConnectionPool.from_url(
39
39
  url=url,
40
40
  max_connections=max_connection_pool_size,
41
41
  **connection_kwargs,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: taskiq-redis
3
- Version: 0.5.5
3
+ Version: 0.5.6
4
4
  Summary: Redis integration for taskiq
5
5
  Home-page: https://github.com/taskiq-python/taskiq-redis
6
6
  Keywords: taskiq,tasks,distributed,async,redis,result_backend
@@ -93,6 +93,9 @@ Brokers parameters:
93
93
  * `result_backend` - custom result backend.
94
94
  * `queue_name` - name of the pub/sub channel in redis.
95
95
  * `max_connection_pool_size` - maximum number of connections in pool.
96
+ * Any other keyword arguments are passed to `redis.asyncio.BlockingConnectionPool`.
97
+ Notably, you can use `timeout` to set custom timeout in seconds for reconnects
98
+ (or set it to `None` to try reconnects indefinitely).
96
99
 
97
100
  ## RedisAsyncResultBackend configuration
98
101
 
@@ -101,6 +104,9 @@ RedisAsyncResultBackend parameters:
101
104
  * `keep_results` - flag to not remove results from Redis after reading.
102
105
  * `result_ex_time` - expire time in seconds (by default - not specified)
103
106
  * `result_px_time` - expire time in milliseconds (by default - not specified)
107
+ * Any other keyword arguments are passed to `redis.asyncio.BlockingConnectionPool`.
108
+ Notably, you can use `timeout` to set custom timeout in seconds for reconnects
109
+ (or set it to `None` to try reconnects indefinitely).
104
110
  > IMPORTANT: **It is highly recommended to use expire time ​​in RedisAsyncResultBackend**
105
111
  > If you want to add expiration, either `result_ex_time` or `result_px_time` must be set.
106
112
  >```python
@@ -0,0 +1,11 @@
1
+ taskiq_redis/__init__.py,sha256=fMdXYxulcaKur66UUlmqAQf_q24jT5UHDYsMYP6J4fw,602
2
+ taskiq_redis/exceptions.py,sha256=eS4bfZVAjyMsnFs3IF74uYwO1KZOlrYxhxgPqD49ztU,561
3
+ taskiq_redis/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
+ taskiq_redis/redis_backend.py,sha256=BwzFWXLHqpQEhZ675dvt2ueOfB7xjJOAGTSHZSyoR7A,8914
5
+ taskiq_redis/redis_broker.py,sha256=b5oOKXP-uuqffGnNhUsT4HgTPmBiBdAfpUOd5V0VfFc,4254
6
+ taskiq_redis/redis_cluster_broker.py,sha256=CgPKkoEHZ1moNM-VNmzPQdjjNOrhiVUCNV-7FrUgqTo,2121
7
+ taskiq_redis/schedule_source.py,sha256=uznI6wbrdSbD-hIAF3xDGTVXD99SnfWYAuYsTQUIL8E,6202
8
+ taskiq_redis/serializer.py,sha256=x-1ExYoD_EnDiM53lyvI99MdTpNj_pORMIaCL07-6nU,416
9
+ taskiq_redis-0.5.6.dist-info/METADATA,sha256=zM6LiFFui-OFZqAVJnQ-qdh-U7_wKrTBnHYNMJuar4M,4030
10
+ taskiq_redis-0.5.6.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
11
+ taskiq_redis-0.5.6.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 1.8.1
2
+ Generator: poetry-core 1.9.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
@@ -1,11 +0,0 @@
1
- taskiq_redis/__init__.py,sha256=fMdXYxulcaKur66UUlmqAQf_q24jT5UHDYsMYP6J4fw,602
2
- taskiq_redis/exceptions.py,sha256=eS4bfZVAjyMsnFs3IF74uYwO1KZOlrYxhxgPqD49ztU,561
3
- taskiq_redis/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
- taskiq_redis/redis_backend.py,sha256=Q_pJ1Bz-NpTyFT68UswBvsNYWkLtnSPF1x8QbOnbWI0,8357
5
- taskiq_redis/redis_broker.py,sha256=qQLWWvY-NacVXkgDGVCe2fyWYPkjZiOnggB0hpPStqw,3957
6
- taskiq_redis/redis_cluster_broker.py,sha256=CgPKkoEHZ1moNM-VNmzPQdjjNOrhiVUCNV-7FrUgqTo,2121
7
- taskiq_redis/schedule_source.py,sha256=kFJP4418JwhqzhOMoP98EIaLNYGFvQWLKtlNUU0EvsY,6166
8
- taskiq_redis/serializer.py,sha256=x-1ExYoD_EnDiM53lyvI99MdTpNj_pORMIaCL07-6nU,416
9
- taskiq_redis-0.5.5.dist-info/METADATA,sha256=5GSOGw4oUTeMojhu9IpIz6ZeEkqbIFrdqcMUbIaQbv4,3588
10
- taskiq_redis-0.5.5.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
11
- taskiq_redis-0.5.5.dist-info/RECORD,,