taskiq-redis 1.0.5__py3-none-any.whl → 1.0.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -25,6 +25,17 @@ class ListRedisScheduleSource(ScheduleSource):
25
25
  skip_past_schedules: bool = False,
26
26
  **connection_kwargs: Any,
27
27
  ) -> None:
28
+ """
29
+ Create a new schedule source.
30
+
31
+ :param url: Redis URL
32
+ :param prefix: Prefix for all the keys
33
+ :param max_connection_pool_size: Maximum size of the connection pool
34
+ :param serializer: Serializer to use for the schedules
35
+ :param buffer_size: Buffer size for getting schedules
36
+ :param skip_past_schedules: Skip schedules that are in the past.
37
+ :param connection_kwargs: Additional connection kwargs
38
+ """
28
39
  super().__init__()
29
40
  self._prefix = prefix
30
41
  self._buffer_size = buffer_size
@@ -179,7 +190,7 @@ class ListRedisScheduleSource(ScheduleSource):
179
190
  current_time = datetime.datetime.now(datetime.timezone.utc)
180
191
  timed: list[bytes] = []
181
192
  # Only during first run, we need to get previous time schedules
182
- if self._is_first_run and not self._skip_past_schedules:
193
+ if not self._skip_past_schedules:
183
194
  timed = await self._get_previous_time_schedules()
184
195
  self._is_first_run = False
185
196
  async with Redis(connection_pool=self._connection_pool) as redis:
@@ -164,8 +164,10 @@ class RedisStreamBroker(BaseRedisBroker):
164
164
  consumer_name: Optional[str] = None,
165
165
  consumer_id: str = "$",
166
166
  mkstream: bool = True,
167
- xread_block: int = 10000,
167
+ xread_block: int = 2000,
168
168
  maxlen: Optional[int] = None,
169
+ idle_timeout: int = 600000, # 10 minutes
170
+ unacknowledged_batch_size: int = 100,
169
171
  additional_streams: Optional[Dict[str, str]] = None,
170
172
  **connection_kwargs: Any,
171
173
  ) -> None:
@@ -189,6 +191,8 @@ class RedisStreamBroker(BaseRedisBroker):
189
191
  trims (the old values of) the stream each time a new element is added
190
192
  :param additional_streams: additional streams to read from.
191
193
  Each key is a stream name, value is a consumer id.
194
+ :param redeliver_timeout: time in ms to wait before redelivering a message.
195
+ :param unacknowledged_batch_size: number of unacknowledged messages to fetch.
192
196
  """
193
197
  super().__init__(
194
198
  url,
@@ -205,6 +209,8 @@ class RedisStreamBroker(BaseRedisBroker):
205
209
  self.block = xread_block
206
210
  self.maxlen = maxlen
207
211
  self.additional_streams = additional_streams or {}
212
+ self.idle_timeout = idle_timeout
213
+ self.unacknowledged_batch_size = unacknowledged_batch_size
208
214
 
209
215
  async def _declare_consumer_group(self) -> None:
210
216
  """
@@ -260,6 +266,7 @@ class RedisStreamBroker(BaseRedisBroker):
260
266
  """Listen to incoming messages."""
261
267
  async with Redis(connection_pool=self.connection_pool) as redis_conn:
262
268
  while True:
269
+ logger.debug("Starting fetching new messages")
263
270
  fetched = await redis_conn.xreadgroup(
264
271
  self.consumer_group_name,
265
272
  self.consumer_name,
@@ -277,3 +284,29 @@ class RedisStreamBroker(BaseRedisBroker):
277
284
  data=msg[b"data"],
278
285
  ack=self._ack_generator(msg_id),
279
286
  )
287
+ logger.debug("Starting fetching unacknowledged messages")
288
+ for stream in [self.queue_name, *self.additional_streams.keys()]:
289
+ lock = redis_conn.lock(
290
+ f"autoclaim:{self.consumer_group_name}:{stream}",
291
+ )
292
+ if await lock.locked():
293
+ continue
294
+ async with lock:
295
+ pending = await redis_conn.xautoclaim(
296
+ name=stream,
297
+ groupname=self.consumer_group_name,
298
+ consumername=self.consumer_name,
299
+ min_idle_time=self.idle_timeout,
300
+ count=self.unacknowledged_batch_size,
301
+ )
302
+ logger.debug(
303
+ "Found %d pending messages in stream %s",
304
+ len(pending),
305
+ stream,
306
+ )
307
+ for msg_id, msg in pending[1]:
308
+ logger.debug("Received message: %s", msg)
309
+ yield AckableMessage(
310
+ data=msg[b"data"],
311
+ ack=self._ack_generator(msg_id),
312
+ )
@@ -1,4 +1,5 @@
1
1
  import sys
2
+ import warnings
2
3
  from contextlib import asynccontextmanager
3
4
  from typing import TYPE_CHECKING, Any, AsyncIterator, List, Optional, Tuple
4
5
 
@@ -53,6 +54,12 @@ class RedisScheduleSource(ScheduleSource):
53
54
  serializer: Optional[TaskiqSerializer] = None,
54
55
  **connection_kwargs: Any,
55
56
  ) -> None:
57
+ warnings.warn(
58
+ "RedisScheduleSource is deprecated. "
59
+ "Please switch to ListRedisScheduleSource",
60
+ DeprecationWarning,
61
+ stacklevel=2,
62
+ )
56
63
  self.prefix = prefix
57
64
  self.connection_pool: _BlockingConnectionPool = BlockingConnectionPool.from_url(
58
65
  url=url,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: taskiq-redis
3
- Version: 1.0.5
3
+ Version: 1.0.7
4
4
  Summary: Redis integration for taskiq
5
5
  Keywords: taskiq,tasks,distributed,async,redis,result_backend
6
6
  Author: taskiq-team
@@ -1,13 +1,13 @@
1
1
  taskiq_redis/__init__.py,sha256=Sl4m9rKxweU1t0m289Qtf0qm4xSSkkFHoOfKq6qaz6g,1192
2
2
  taskiq_redis/exceptions.py,sha256=7buBJ7CRVWd5WqVqSjtHO8cVL7QzZg-DOM3nB87t-Sk,738
3
- taskiq_redis/list_schedule_source.py,sha256=w7lSJ1-n889PXOwXiA_Jp6Wj4RsZpuXwFCVhOIc5KAw,9462
3
+ taskiq_redis/list_schedule_source.py,sha256=NlHqtvwsYmWpAXLz_0BFcdSIPJCq7ch_r27b0QVmcGE,9917
4
4
  taskiq_redis/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
5
  taskiq_redis/redis_backend.py,sha256=MLBaUN3Zx-DLvm1r-lgPU82_WZq9kc6oTxYI8LQjd6k,19882
6
- taskiq_redis/redis_broker.py,sha256=ep31kUxXx4XhGKjrzVjdNBmA6wLbTQoy7-DlKuwtLz4,10068
6
+ taskiq_redis/redis_broker.py,sha256=EZqWrxD-xtMXyebmwBvliDtYeWIjqdYkm-RrDShYLQw,11829
7
7
  taskiq_redis/redis_cluster_broker.py,sha256=FuWl5fP7Fwr9FbytErmhcUGjRCdPexDK2Co2u6kpDlo,6591
8
8
  taskiq_redis/redis_sentinel_broker.py,sha256=wHnbG3xuD_ruhhwp4AXo91NNjq8v2iufUZ0i_HbBRVQ,9073
9
- taskiq_redis/schedule_source.py,sha256=hqpcs2D8W90KUDHREKblisnhGCE9dbVOtKtuJcOTGZw,9915
10
- taskiq_redis-1.0.5.dist-info/LICENSE,sha256=lEHEEE-ZxmuItxYgUMPiFWdRcAITxE8DFMNyAg4eOYE,1075
11
- taskiq_redis-1.0.5.dist-info/METADATA,sha256=uV2AJOyyE-f1_5A1LsvGpbf74zSueiJaNNRJ6kIqpvw,6573
12
- taskiq_redis-1.0.5.dist-info/WHEEL,sha256=fGIA9gx4Qxk2KDKeNJCbOEwSrmLtjWCwzBz351GyrPQ,88
13
- taskiq_redis-1.0.5.dist-info/RECORD,,
9
+ taskiq_redis/schedule_source.py,sha256=mDYlAlAuzIzMICpJiQ1AwWOF-9_OHVGJWXA45Gm2Trg,10128
10
+ taskiq_redis-1.0.7.dist-info/LICENSE,sha256=lEHEEE-ZxmuItxYgUMPiFWdRcAITxE8DFMNyAg4eOYE,1075
11
+ taskiq_redis-1.0.7.dist-info/METADATA,sha256=7oouvMMNze9lzj8XcvGQY2DonXiT7PLkv7ErtNN1i0s,6573
12
+ taskiq_redis-1.0.7.dist-info/WHEEL,sha256=fGIA9gx4Qxk2KDKeNJCbOEwSrmLtjWCwzBz351GyrPQ,88
13
+ taskiq_redis-1.0.7.dist-info/RECORD,,