taskiq-redis 1.0.5__tar.gz → 1.0.6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: taskiq-redis
3
- Version: 1.0.5
3
+ Version: 1.0.6
4
4
  Summary: Redis integration for taskiq
5
5
  Keywords: taskiq,tasks,distributed,async,redis,result_backend
6
6
  Author: taskiq-team
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "taskiq-redis"
3
- version = "1.0.5"
3
+ version = "1.0.6"
4
4
  description = "Redis integration for taskiq"
5
5
  authors = ["taskiq-team <taskiq@norely.com>"]
6
6
  readme = "README.md"
@@ -164,8 +164,10 @@ class RedisStreamBroker(BaseRedisBroker):
164
164
  consumer_name: Optional[str] = None,
165
165
  consumer_id: str = "$",
166
166
  mkstream: bool = True,
167
- xread_block: int = 10000,
167
+ xread_block: int = 2000,
168
168
  maxlen: Optional[int] = None,
169
+ idle_timeout: int = 600000, # 10 minutes
170
+ unacknowledged_batch_size: int = 100,
169
171
  additional_streams: Optional[Dict[str, str]] = None,
170
172
  **connection_kwargs: Any,
171
173
  ) -> None:
@@ -189,6 +191,8 @@ class RedisStreamBroker(BaseRedisBroker):
189
191
  trims (the old values of) the stream each time a new element is added
190
192
  :param additional_streams: additional streams to read from.
191
193
  Each key is a stream name, value is a consumer id.
194
+ :param redeliver_timeout: time in ms to wait before redelivering a message.
195
+ :param unacknowledged_batch_size: number of unacknowledged messages to fetch.
192
196
  """
193
197
  super().__init__(
194
198
  url,
@@ -205,6 +209,8 @@ class RedisStreamBroker(BaseRedisBroker):
205
209
  self.block = xread_block
206
210
  self.maxlen = maxlen
207
211
  self.additional_streams = additional_streams or {}
212
+ self.idle_timeout = idle_timeout
213
+ self.unacknowledged_batch_size = unacknowledged_batch_size
208
214
 
209
215
  async def _declare_consumer_group(self) -> None:
210
216
  """
@@ -260,6 +266,7 @@ class RedisStreamBroker(BaseRedisBroker):
260
266
  """Listen to incoming messages."""
261
267
  async with Redis(connection_pool=self.connection_pool) as redis_conn:
262
268
  while True:
269
+ logger.debug("Starting fetching new messages")
263
270
  fetched = await redis_conn.xreadgroup(
264
271
  self.consumer_group_name,
265
272
  self.consumer_name,
@@ -277,3 +284,29 @@ class RedisStreamBroker(BaseRedisBroker):
277
284
  data=msg[b"data"],
278
285
  ack=self._ack_generator(msg_id),
279
286
  )
287
+ logger.debug("Starting fetching unacknowledged messages")
288
+ for stream in [self.queue_name, *self.additional_streams.keys()]:
289
+ lock = redis_conn.lock(
290
+ f"autoclaim:{self.consumer_group_name}:{stream}",
291
+ )
292
+ if await lock.locked():
293
+ continue
294
+ async with lock:
295
+ pending = await redis_conn.xautoclaim(
296
+ name=stream,
297
+ groupname=self.consumer_group_name,
298
+ consumername=self.consumer_name,
299
+ min_idle_time=self.idle_timeout,
300
+ count=self.unacknowledged_batch_size,
301
+ )
302
+ logger.debug(
303
+ "Found %d pending messages in stream %s",
304
+ len(pending),
305
+ stream,
306
+ )
307
+ for msg_id, msg in pending[1]:
308
+ logger.debug("Received message: %s", msg)
309
+ yield AckableMessage(
310
+ data=msg[b"data"],
311
+ ack=self._ack_generator(msg_id),
312
+ )
File without changes
File without changes