taskiq-redis 1.0.6__tar.gz → 1.0.8__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: taskiq-redis
3
- Version: 1.0.6
3
+ Version: 1.0.8
4
4
  Summary: Redis integration for taskiq
5
5
  Keywords: taskiq,tasks,distributed,async,redis,result_backend
6
6
  Author: taskiq-team
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "taskiq-redis"
3
- version = "1.0.6"
3
+ version = "1.0.8"
4
4
  description = "Redis integration for taskiq"
5
5
  authors = ["taskiq-team <taskiq@norely.com>"]
6
6
  readme = "README.md"
@@ -25,6 +25,17 @@ class ListRedisScheduleSource(ScheduleSource):
25
25
  skip_past_schedules: bool = False,
26
26
  **connection_kwargs: Any,
27
27
  ) -> None:
28
+ """
29
+ Create a new schedule source.
30
+
31
+ :param url: Redis URL
32
+ :param prefix: Prefix for all the keys
33
+ :param max_connection_pool_size: Maximum size of the connection pool
34
+ :param serializer: Serializer to use for the schedules
35
+ :param buffer_size: Buffer size for getting schedules
36
+ :param skip_past_schedules: Skip schedules that are in the past.
37
+ :param connection_kwargs: Additional connection kwargs
38
+ """
28
39
  super().__init__()
29
40
  self._prefix = prefix
30
41
  self._buffer_size = buffer_size
@@ -179,7 +190,7 @@ class ListRedisScheduleSource(ScheduleSource):
179
190
  current_time = datetime.datetime.now(datetime.timezone.utc)
180
191
  timed: list[bytes] = []
181
192
  # Only during first run, we need to get previous time schedules
182
- if self._is_first_run and not self._skip_past_schedules:
193
+ if not self._skip_past_schedules:
183
194
  timed = await self._get_previous_time_schedules()
184
195
  self._is_first_run = False
185
196
  async with Redis(connection_pool=self._connection_pool) as redis:
@@ -168,6 +168,7 @@ class RedisStreamBroker(BaseRedisBroker):
168
168
  maxlen: Optional[int] = None,
169
169
  idle_timeout: int = 600000, # 10 minutes
170
170
  unacknowledged_batch_size: int = 100,
171
+ xread_count: Optional[int] = 100,
171
172
  additional_streams: Optional[Dict[str, str]] = None,
172
173
  **connection_kwargs: Any,
173
174
  ) -> None:
@@ -189,6 +190,7 @@ class RedisStreamBroker(BaseRedisBroker):
189
190
  Better to set it to a bigger value, to avoid unnecessary calls.
190
191
  :param maxlen: sets the maximum length of the stream
191
192
  trims (the old values of) the stream each time a new element is added
193
+ :param xread_count: number of messages to fetch from the stream at once.
192
194
  :param additional_streams: additional streams to read from.
193
195
  Each key is a stream name, value is a consumer id.
194
196
  :param redeliver_timeout: time in ms to wait before redelivering a message.
@@ -211,6 +213,7 @@ class RedisStreamBroker(BaseRedisBroker):
211
213
  self.additional_streams = additional_streams or {}
212
214
  self.idle_timeout = idle_timeout
213
215
  self.unacknowledged_batch_size = unacknowledged_batch_size
216
+ self.count = xread_count
214
217
 
215
218
  async def _declare_consumer_group(self) -> None:
216
219
  """
@@ -276,6 +279,7 @@ class RedisStreamBroker(BaseRedisBroker):
276
279
  },
277
280
  block=self.block,
278
281
  noack=False,
282
+ count=self.count,
279
283
  )
280
284
  for _, msg_list in fetched:
281
285
  for msg_id, msg in msg_list:
@@ -1,4 +1,5 @@
1
1
  import sys
2
+ import warnings
2
3
  from contextlib import asynccontextmanager
3
4
  from typing import TYPE_CHECKING, Any, AsyncIterator, List, Optional, Tuple
4
5
 
@@ -53,6 +54,12 @@ class RedisScheduleSource(ScheduleSource):
53
54
  serializer: Optional[TaskiqSerializer] = None,
54
55
  **connection_kwargs: Any,
55
56
  ) -> None:
57
+ warnings.warn(
58
+ "RedisScheduleSource is deprecated. "
59
+ "Please switch to ListRedisScheduleSource",
60
+ DeprecationWarning,
61
+ stacklevel=2,
62
+ )
56
63
  self.prefix = prefix
57
64
  self.connection_pool: _BlockingConnectionPool = BlockingConnectionPool.from_url(
58
65
  url=url,
File without changes
File without changes