taskiq-redis 1.0.4__tar.gz → 1.0.5__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {taskiq_redis-1.0.4 → taskiq_redis-1.0.5}/PKG-INFO +1 -1
- {taskiq_redis-1.0.4 → taskiq_redis-1.0.5}/pyproject.toml +1 -1
- {taskiq_redis-1.0.4 → taskiq_redis-1.0.5}/taskiq_redis/list_schedule_source.py +5 -5
- {taskiq_redis-1.0.4 → taskiq_redis-1.0.5}/taskiq_redis/redis_broker.py +9 -1
- {taskiq_redis-1.0.4 → taskiq_redis-1.0.5}/LICENSE +0 -0
- {taskiq_redis-1.0.4 → taskiq_redis-1.0.5}/README.md +0 -0
- {taskiq_redis-1.0.4 → taskiq_redis-1.0.5}/taskiq_redis/__init__.py +0 -0
- {taskiq_redis-1.0.4 → taskiq_redis-1.0.5}/taskiq_redis/exceptions.py +0 -0
- {taskiq_redis-1.0.4 → taskiq_redis-1.0.5}/taskiq_redis/py.typed +0 -0
- {taskiq_redis-1.0.4 → taskiq_redis-1.0.5}/taskiq_redis/redis_backend.py +0 -0
- {taskiq_redis-1.0.4 → taskiq_redis-1.0.5}/taskiq_redis/redis_cluster_broker.py +0 -0
- {taskiq_redis-1.0.4 → taskiq_redis-1.0.5}/taskiq_redis/redis_sentinel_broker.py +0 -0
- {taskiq_redis-1.0.4 → taskiq_redis-1.0.5}/taskiq_redis/schedule_source.py +0 -0
|
@@ -13,7 +13,7 @@ logger = getLogger("taskiq.redis_schedule_source")
|
|
|
13
13
|
|
|
14
14
|
|
|
15
15
|
class ListRedisScheduleSource(ScheduleSource):
|
|
16
|
-
"""
|
|
16
|
+
"""Schedule source based on arrays."""
|
|
17
17
|
|
|
18
18
|
def __init__(
|
|
19
19
|
self,
|
|
@@ -21,13 +21,13 @@ class ListRedisScheduleSource(ScheduleSource):
|
|
|
21
21
|
prefix: str = "schedule",
|
|
22
22
|
max_connection_pool_size: Optional[int] = None,
|
|
23
23
|
serializer: Optional[TaskiqSerializer] = None,
|
|
24
|
-
|
|
24
|
+
buffer_size: int = 50,
|
|
25
25
|
skip_past_schedules: bool = False,
|
|
26
26
|
**connection_kwargs: Any,
|
|
27
27
|
) -> None:
|
|
28
28
|
super().__init__()
|
|
29
29
|
self._prefix = prefix
|
|
30
|
-
self._buffer_size =
|
|
30
|
+
self._buffer_size = buffer_size
|
|
31
31
|
self._connection_pool = BlockingConnectionPool.from_url(
|
|
32
32
|
url=url,
|
|
33
33
|
max_connections=max_connection_pool_size,
|
|
@@ -185,11 +185,11 @@ class ListRedisScheduleSource(ScheduleSource):
|
|
|
185
185
|
async with Redis(connection_pool=self._connection_pool) as redis:
|
|
186
186
|
buffer = []
|
|
187
187
|
crons = await redis.lrange(self._get_cron_key(), 0, -1) # type: ignore
|
|
188
|
-
logger.debug("Got cron
|
|
188
|
+
logger.debug("Got %d cron schedules", len(crons))
|
|
189
189
|
if crons:
|
|
190
190
|
buffer.extend(crons)
|
|
191
191
|
timed.extend(await redis.lrange(self._get_time_key(current_time), 0, -1)) # type: ignore
|
|
192
|
-
logger.debug("Got timed
|
|
192
|
+
logger.debug("Got %d timed schedules", len(timed))
|
|
193
193
|
if timed:
|
|
194
194
|
buffer.extend(timed)
|
|
195
195
|
while buffer:
|
|
@@ -165,6 +165,7 @@ class RedisStreamBroker(BaseRedisBroker):
|
|
|
165
165
|
consumer_id: str = "$",
|
|
166
166
|
mkstream: bool = True,
|
|
167
167
|
xread_block: int = 10000,
|
|
168
|
+
maxlen: Optional[int] = None,
|
|
168
169
|
additional_streams: Optional[Dict[str, str]] = None,
|
|
169
170
|
**connection_kwargs: Any,
|
|
170
171
|
) -> None:
|
|
@@ -184,6 +185,8 @@ class RedisStreamBroker(BaseRedisBroker):
|
|
|
184
185
|
:param mkstream: create stream if it does not exist.
|
|
185
186
|
:param xread_block: block time in ms for xreadgroup.
|
|
186
187
|
Better to set it to a bigger value, to avoid unnecessary calls.
|
|
188
|
+
:param maxlen: sets the maximum length of the stream
|
|
189
|
+
trims (the old values of) the stream each time a new element is added
|
|
187
190
|
:param additional_streams: additional streams to read from.
|
|
188
191
|
Each key is a stream name, value is a consumer id.
|
|
189
192
|
"""
|
|
@@ -200,6 +203,7 @@ class RedisStreamBroker(BaseRedisBroker):
|
|
|
200
203
|
self.consumer_id = consumer_id
|
|
201
204
|
self.mkstream = mkstream
|
|
202
205
|
self.block = xread_block
|
|
206
|
+
self.maxlen = maxlen
|
|
203
207
|
self.additional_streams = additional_streams or {}
|
|
204
208
|
|
|
205
209
|
async def _declare_consumer_group(self) -> None:
|
|
@@ -235,7 +239,11 @@ class RedisStreamBroker(BaseRedisBroker):
|
|
|
235
239
|
:param message: message to append.
|
|
236
240
|
"""
|
|
237
241
|
async with Redis(connection_pool=self.connection_pool) as redis_conn:
|
|
238
|
-
await redis_conn.xadd(
|
|
242
|
+
await redis_conn.xadd(
|
|
243
|
+
self.queue_name,
|
|
244
|
+
{b"data": message.message},
|
|
245
|
+
maxlen=self.maxlen,
|
|
246
|
+
)
|
|
239
247
|
|
|
240
248
|
def _ack_generator(self, id: str) -> Callable[[], Awaitable[None]]:
|
|
241
249
|
async def _ack() -> None:
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|