taskiq-redis 1.0.4__py3-none-any.whl → 1.0.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -13,7 +13,7 @@ logger = getLogger("taskiq.redis_schedule_source")
13
13
 
14
14
 
15
15
  class ListRedisScheduleSource(ScheduleSource):
16
- """Schecule source based on arrays."""
16
+ """Schedule source based on arrays."""
17
17
 
18
18
  def __init__(
19
19
  self,
@@ -21,13 +21,13 @@ class ListRedisScheduleSource(ScheduleSource):
21
21
  prefix: str = "schedule",
22
22
  max_connection_pool_size: Optional[int] = None,
23
23
  serializer: Optional[TaskiqSerializer] = None,
24
- bufffer_size: int = 50,
24
+ buffer_size: int = 50,
25
25
  skip_past_schedules: bool = False,
26
26
  **connection_kwargs: Any,
27
27
  ) -> None:
28
28
  super().__init__()
29
29
  self._prefix = prefix
30
- self._buffer_size = bufffer_size
30
+ self._buffer_size = buffer_size
31
31
  self._connection_pool = BlockingConnectionPool.from_url(
32
32
  url=url,
33
33
  max_connections=max_connection_pool_size,
@@ -185,11 +185,11 @@ class ListRedisScheduleSource(ScheduleSource):
185
185
  async with Redis(connection_pool=self._connection_pool) as redis:
186
186
  buffer = []
187
187
  crons = await redis.lrange(self._get_cron_key(), 0, -1) # type: ignore
188
- logger.debug("Got cron scheduleds: %s", crons)
188
+ logger.debug("Got %d cron schedules", len(crons))
189
189
  if crons:
190
190
  buffer.extend(crons)
191
191
  timed.extend(await redis.lrange(self._get_time_key(current_time), 0, -1)) # type: ignore
192
- logger.debug("Got timed scheduleds: %s", crons)
192
+ logger.debug("Got %d timed schedules", len(timed))
193
193
  if timed:
194
194
  buffer.extend(timed)
195
195
  while buffer:
@@ -164,7 +164,10 @@ class RedisStreamBroker(BaseRedisBroker):
164
164
  consumer_name: Optional[str] = None,
165
165
  consumer_id: str = "$",
166
166
  mkstream: bool = True,
167
- xread_block: int = 10000,
167
+ xread_block: int = 2000,
168
+ maxlen: Optional[int] = None,
169
+ idle_timeout: int = 600000, # 10 minutes
170
+ unacknowledged_batch_size: int = 100,
168
171
  additional_streams: Optional[Dict[str, str]] = None,
169
172
  **connection_kwargs: Any,
170
173
  ) -> None:
@@ -184,8 +187,12 @@ class RedisStreamBroker(BaseRedisBroker):
184
187
  :param mkstream: create stream if it does not exist.
185
188
  :param xread_block: block time in ms for xreadgroup.
186
189
  Better to set it to a bigger value, to avoid unnecessary calls.
190
+ :param maxlen: sets the maximum length of the stream
191
+ trims (the old values of) the stream each time a new element is added
187
192
  :param additional_streams: additional streams to read from.
188
193
  Each key is a stream name, value is a consumer id.
194
+ :param redeliver_timeout: time in ms to wait before redelivering a message.
195
+ :param unacknowledged_batch_size: number of unacknowledged messages to fetch.
189
196
  """
190
197
  super().__init__(
191
198
  url,
@@ -200,7 +207,10 @@ class RedisStreamBroker(BaseRedisBroker):
200
207
  self.consumer_id = consumer_id
201
208
  self.mkstream = mkstream
202
209
  self.block = xread_block
210
+ self.maxlen = maxlen
203
211
  self.additional_streams = additional_streams or {}
212
+ self.idle_timeout = idle_timeout
213
+ self.unacknowledged_batch_size = unacknowledged_batch_size
204
214
 
205
215
  async def _declare_consumer_group(self) -> None:
206
216
  """
@@ -235,7 +245,11 @@ class RedisStreamBroker(BaseRedisBroker):
235
245
  :param message: message to append.
236
246
  """
237
247
  async with Redis(connection_pool=self.connection_pool) as redis_conn:
238
- await redis_conn.xadd(self.queue_name, {b"data": message.message})
248
+ await redis_conn.xadd(
249
+ self.queue_name,
250
+ {b"data": message.message},
251
+ maxlen=self.maxlen,
252
+ )
239
253
 
240
254
  def _ack_generator(self, id: str) -> Callable[[], Awaitable[None]]:
241
255
  async def _ack() -> None:
@@ -252,6 +266,7 @@ class RedisStreamBroker(BaseRedisBroker):
252
266
  """Listen to incoming messages."""
253
267
  async with Redis(connection_pool=self.connection_pool) as redis_conn:
254
268
  while True:
269
+ logger.debug("Starting fetching new messages")
255
270
  fetched = await redis_conn.xreadgroup(
256
271
  self.consumer_group_name,
257
272
  self.consumer_name,
@@ -269,3 +284,29 @@ class RedisStreamBroker(BaseRedisBroker):
269
284
  data=msg[b"data"],
270
285
  ack=self._ack_generator(msg_id),
271
286
  )
287
+ logger.debug("Starting fetching unacknowledged messages")
288
+ for stream in [self.queue_name, *self.additional_streams.keys()]:
289
+ lock = redis_conn.lock(
290
+ f"autoclaim:{self.consumer_group_name}:{stream}",
291
+ )
292
+ if await lock.locked():
293
+ continue
294
+ async with lock:
295
+ pending = await redis_conn.xautoclaim(
296
+ name=stream,
297
+ groupname=self.consumer_group_name,
298
+ consumername=self.consumer_name,
299
+ min_idle_time=self.idle_timeout,
300
+ count=self.unacknowledged_batch_size,
301
+ )
302
+ logger.debug(
303
+ "Found %d pending messages in stream %s",
304
+ len(pending),
305
+ stream,
306
+ )
307
+ for msg_id, msg in pending[1]:
308
+ logger.debug("Received message: %s", msg)
309
+ yield AckableMessage(
310
+ data=msg[b"data"],
311
+ ack=self._ack_generator(msg_id),
312
+ )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: taskiq-redis
3
- Version: 1.0.4
3
+ Version: 1.0.6
4
4
  Summary: Redis integration for taskiq
5
5
  Keywords: taskiq,tasks,distributed,async,redis,result_backend
6
6
  Author: taskiq-team
@@ -1,13 +1,13 @@
1
1
  taskiq_redis/__init__.py,sha256=Sl4m9rKxweU1t0m289Qtf0qm4xSSkkFHoOfKq6qaz6g,1192
2
2
  taskiq_redis/exceptions.py,sha256=7buBJ7CRVWd5WqVqSjtHO8cVL7QzZg-DOM3nB87t-Sk,738
3
- taskiq_redis/list_schedule_source.py,sha256=guWql2hs2WT35vZtrsW1W9-TvaHsX5Lq_CyRjrl0tGA,9458
3
+ taskiq_redis/list_schedule_source.py,sha256=w7lSJ1-n889PXOwXiA_Jp6Wj4RsZpuXwFCVhOIc5KAw,9462
4
4
  taskiq_redis/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
5
  taskiq_redis/redis_backend.py,sha256=MLBaUN3Zx-DLvm1r-lgPU82_WZq9kc6oTxYI8LQjd6k,19882
6
- taskiq_redis/redis_broker.py,sha256=ZLn7LAHj8Sh_oyW5hMgD7PZPQfUdXNPKdqhBcr9Okmg,9775
6
+ taskiq_redis/redis_broker.py,sha256=EZqWrxD-xtMXyebmwBvliDtYeWIjqdYkm-RrDShYLQw,11829
7
7
  taskiq_redis/redis_cluster_broker.py,sha256=FuWl5fP7Fwr9FbytErmhcUGjRCdPexDK2Co2u6kpDlo,6591
8
8
  taskiq_redis/redis_sentinel_broker.py,sha256=wHnbG3xuD_ruhhwp4AXo91NNjq8v2iufUZ0i_HbBRVQ,9073
9
9
  taskiq_redis/schedule_source.py,sha256=hqpcs2D8W90KUDHREKblisnhGCE9dbVOtKtuJcOTGZw,9915
10
- taskiq_redis-1.0.4.dist-info/LICENSE,sha256=lEHEEE-ZxmuItxYgUMPiFWdRcAITxE8DFMNyAg4eOYE,1075
11
- taskiq_redis-1.0.4.dist-info/METADATA,sha256=sv_06NsLK3SODn9rj404w-mGKpnIWrI5iGLZEPyaBj8,6573
12
- taskiq_redis-1.0.4.dist-info/WHEEL,sha256=fGIA9gx4Qxk2KDKeNJCbOEwSrmLtjWCwzBz351GyrPQ,88
13
- taskiq_redis-1.0.4.dist-info/RECORD,,
10
+ taskiq_redis-1.0.6.dist-info/LICENSE,sha256=lEHEEE-ZxmuItxYgUMPiFWdRcAITxE8DFMNyAg4eOYE,1075
11
+ taskiq_redis-1.0.6.dist-info/METADATA,sha256=MdlYxz3CQ6s8qqsLO-8KqGFHpppikkMycEJAkK5G2-E,6573
12
+ taskiq_redis-1.0.6.dist-info/WHEEL,sha256=fGIA9gx4Qxk2KDKeNJCbOEwSrmLtjWCwzBz351GyrPQ,88
13
+ taskiq_redis-1.0.6.dist-info/RECORD,,