taskiq-redis 1.0.8__py3-none-any.whl → 1.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- taskiq_redis/list_schedule_source.py +7 -7
- taskiq_redis/redis_backend.py +2 -2
- taskiq_redis/redis_broker.py +8 -3
- taskiq_redis/redis_cluster_broker.py +14 -1
- taskiq_redis/redis_sentinel_broker.py +14 -1
- {taskiq_redis-1.0.8.dist-info → taskiq_redis-1.1.0.dist-info}/METADATA +2 -2
- taskiq_redis-1.1.0.dist-info/RECORD +13 -0
- {taskiq_redis-1.0.8.dist-info → taskiq_redis-1.1.0.dist-info}/WHEEL +1 -1
- taskiq_redis-1.0.8.dist-info/RECORD +0 -13
- {taskiq_redis-1.0.8.dist-info → taskiq_redis-1.1.0.dist-info}/LICENSE +0 -0
|
@@ -130,7 +130,7 @@ class ListRedisScheduleSource(ScheduleSource):
|
|
|
130
130
|
if key_time and key_time <= minute_before:
|
|
131
131
|
time_keys.append(key.decode())
|
|
132
132
|
for key in time_keys:
|
|
133
|
-
schedules.extend(await redis.lrange(key, 0, -1))
|
|
133
|
+
schedules.extend(await redis.lrange(key, 0, -1))
|
|
134
134
|
|
|
135
135
|
return schedules
|
|
136
136
|
|
|
@@ -146,10 +146,10 @@ class ListRedisScheduleSource(ScheduleSource):
|
|
|
146
146
|
)
|
|
147
147
|
# We need to remove the schedule from the cron or time list.
|
|
148
148
|
if schedule.cron is not None:
|
|
149
|
-
await redis.lrem(self._get_cron_key(), 0, schedule_id)
|
|
149
|
+
await redis.lrem(self._get_cron_key(), 0, schedule_id)
|
|
150
150
|
elif schedule.time is not None:
|
|
151
151
|
time_key = self._get_time_key(schedule.time)
|
|
152
|
-
await redis.lrem(time_key, 0, schedule_id)
|
|
152
|
+
await redis.lrem(time_key, 0, schedule_id)
|
|
153
153
|
|
|
154
154
|
async def add_schedule(self, schedule: "ScheduledTask") -> None:
|
|
155
155
|
"""Add a schedule to the source."""
|
|
@@ -163,9 +163,9 @@ class ListRedisScheduleSource(ScheduleSource):
|
|
|
163
163
|
# This is an optimization, so we can get all the schedules
|
|
164
164
|
# for the current time much faster.
|
|
165
165
|
if schedule.cron is not None:
|
|
166
|
-
await redis.rpush(self._get_cron_key(), schedule.schedule_id)
|
|
166
|
+
await redis.rpush(self._get_cron_key(), schedule.schedule_id)
|
|
167
167
|
elif schedule.time is not None:
|
|
168
|
-
await redis.rpush(
|
|
168
|
+
await redis.rpush(
|
|
169
169
|
self._get_time_key(schedule.time),
|
|
170
170
|
schedule.schedule_id,
|
|
171
171
|
)
|
|
@@ -195,11 +195,11 @@ class ListRedisScheduleSource(ScheduleSource):
|
|
|
195
195
|
self._is_first_run = False
|
|
196
196
|
async with Redis(connection_pool=self._connection_pool) as redis:
|
|
197
197
|
buffer = []
|
|
198
|
-
crons = await redis.lrange(self._get_cron_key(), 0, -1)
|
|
198
|
+
crons = await redis.lrange(self._get_cron_key(), 0, -1)
|
|
199
199
|
logger.debug("Got %d cron schedules", len(crons))
|
|
200
200
|
if crons:
|
|
201
201
|
buffer.extend(crons)
|
|
202
|
-
timed.extend(await redis.lrange(self._get_time_key(current_time), 0, -1))
|
|
202
|
+
timed.extend(await redis.lrange(self._get_time_key(current_time), 0, -1))
|
|
203
203
|
logger.debug("Got %d timed schedules", len(timed))
|
|
204
204
|
if timed:
|
|
205
205
|
buffer.extend(timed)
|
taskiq_redis/redis_backend.py
CHANGED
|
@@ -131,7 +131,7 @@ class RedisAsyncResultBackend(AsyncResultBackend[_ReturnType]):
|
|
|
131
131
|
redis_set_params["px"] = self.result_px_time
|
|
132
132
|
|
|
133
133
|
async with Redis(connection_pool=self.redis_pool) as redis:
|
|
134
|
-
await redis.set(**redis_set_params)
|
|
134
|
+
await redis.set(**redis_set_params)
|
|
135
135
|
|
|
136
136
|
async def is_result_ready(self, task_id: str) -> bool:
|
|
137
137
|
"""
|
|
@@ -205,7 +205,7 @@ class RedisAsyncResultBackend(AsyncResultBackend[_ReturnType]):
|
|
|
205
205
|
redis_set_params["px"] = self.result_px_time
|
|
206
206
|
|
|
207
207
|
async with Redis(connection_pool=self.redis_pool) as redis:
|
|
208
|
-
await redis.set(**redis_set_params)
|
|
208
|
+
await redis.set(**redis_set_params)
|
|
209
209
|
|
|
210
210
|
async def get_progress(
|
|
211
211
|
self,
|
taskiq_redis/redis_broker.py
CHANGED
|
@@ -122,7 +122,7 @@ class ListQueueBroker(BaseRedisBroker):
|
|
|
122
122
|
"""
|
|
123
123
|
queue_name = message.labels.get("queue_name") or self.queue_name
|
|
124
124
|
async with Redis(connection_pool=self.connection_pool) as redis_conn:
|
|
125
|
-
await redis_conn.lpush(queue_name, message.message)
|
|
125
|
+
await redis_conn.lpush(queue_name, message.message)
|
|
126
126
|
|
|
127
127
|
async def listen(self) -> AsyncGenerator[bytes, None]:
|
|
128
128
|
"""
|
|
@@ -137,7 +137,7 @@ class ListQueueBroker(BaseRedisBroker):
|
|
|
137
137
|
while True:
|
|
138
138
|
try:
|
|
139
139
|
async with Redis(connection_pool=self.connection_pool) as redis_conn:
|
|
140
|
-
yield (await redis_conn.brpop(self.queue_name))[
|
|
140
|
+
yield (await redis_conn.brpop(self.queue_name))[
|
|
141
141
|
redis_brpop_data_position
|
|
142
142
|
]
|
|
143
143
|
except ConnectionError as exc:
|
|
@@ -166,6 +166,7 @@ class RedisStreamBroker(BaseRedisBroker):
|
|
|
166
166
|
mkstream: bool = True,
|
|
167
167
|
xread_block: int = 2000,
|
|
168
168
|
maxlen: Optional[int] = None,
|
|
169
|
+
approximate: bool = True,
|
|
169
170
|
idle_timeout: int = 600000, # 10 minutes
|
|
170
171
|
unacknowledged_batch_size: int = 100,
|
|
171
172
|
xread_count: Optional[int] = 100,
|
|
@@ -190,6 +191,8 @@ class RedisStreamBroker(BaseRedisBroker):
|
|
|
190
191
|
Better to set it to a bigger value, to avoid unnecessary calls.
|
|
191
192
|
:param maxlen: sets the maximum length of the stream
|
|
192
193
|
trims (the old values of) the stream each time a new element is added
|
|
194
|
+
:param approximate: decides wether to trim the stream immediately (False) or
|
|
195
|
+
later on (True)
|
|
193
196
|
:param xread_count: number of messages to fetch from the stream at once.
|
|
194
197
|
:param additional_streams: additional streams to read from.
|
|
195
198
|
Each key is a stream name, value is a consumer id.
|
|
@@ -210,6 +213,7 @@ class RedisStreamBroker(BaseRedisBroker):
|
|
|
210
213
|
self.mkstream = mkstream
|
|
211
214
|
self.block = xread_block
|
|
212
215
|
self.maxlen = maxlen
|
|
216
|
+
self.approximate = approximate
|
|
213
217
|
self.additional_streams = additional_streams or {}
|
|
214
218
|
self.idle_timeout = idle_timeout
|
|
215
219
|
self.unacknowledged_batch_size = unacknowledged_batch_size
|
|
@@ -252,6 +256,7 @@ class RedisStreamBroker(BaseRedisBroker):
|
|
|
252
256
|
self.queue_name,
|
|
253
257
|
{b"data": message.message},
|
|
254
258
|
maxlen=self.maxlen,
|
|
259
|
+
approximate=self.approximate,
|
|
255
260
|
)
|
|
256
261
|
|
|
257
262
|
def _ack_generator(self, id: str) -> Callable[[], Awaitable[None]]:
|
|
@@ -275,7 +280,7 @@ class RedisStreamBroker(BaseRedisBroker):
|
|
|
275
280
|
self.consumer_name,
|
|
276
281
|
{
|
|
277
282
|
self.queue_name: ">",
|
|
278
|
-
**self.additional_streams,
|
|
283
|
+
**self.additional_streams,
|
|
279
284
|
},
|
|
280
285
|
block=self.block,
|
|
281
286
|
noack=False,
|
|
@@ -92,6 +92,8 @@ class RedisStreamClusterBroker(BaseRedisClusterBroker):
|
|
|
92
92
|
consumer_id: str = "$",
|
|
93
93
|
mkstream: bool = True,
|
|
94
94
|
xread_block: int = 10000,
|
|
95
|
+
maxlen: Optional[int] = None,
|
|
96
|
+
approximate: bool = True,
|
|
95
97
|
additional_streams: Optional[Dict[str, str]] = None,
|
|
96
98
|
**connection_kwargs: Any,
|
|
97
99
|
) -> None:
|
|
@@ -111,6 +113,10 @@ class RedisStreamClusterBroker(BaseRedisClusterBroker):
|
|
|
111
113
|
:param mkstream: create stream if it does not exist.
|
|
112
114
|
:param xread_block: block time in ms for xreadgroup.
|
|
113
115
|
Better to set it to a bigger value, to avoid unnecessary calls.
|
|
116
|
+
:param maxlen: sets the maximum length of the stream
|
|
117
|
+
trims (the old values of) the stream each time a new element is added
|
|
118
|
+
:param approximate: decides wether to trim the stream immediately (False) or
|
|
119
|
+
later on (True)
|
|
114
120
|
:param additional_streams: additional streams to read from.
|
|
115
121
|
Each key is a stream name, value is a consumer id.
|
|
116
122
|
"""
|
|
@@ -125,6 +131,8 @@ class RedisStreamClusterBroker(BaseRedisClusterBroker):
|
|
|
125
131
|
self.consumer_id = consumer_id
|
|
126
132
|
self.mkstream = mkstream
|
|
127
133
|
self.block = xread_block
|
|
134
|
+
self.maxlen = maxlen
|
|
135
|
+
self.approximate = approximate
|
|
128
136
|
self.additional_streams = additional_streams or {}
|
|
129
137
|
|
|
130
138
|
async def _declare_consumer_group(self) -> None:
|
|
@@ -154,7 +162,12 @@ class RedisStreamClusterBroker(BaseRedisClusterBroker):
|
|
|
154
162
|
|
|
155
163
|
:param message: message to append.
|
|
156
164
|
"""
|
|
157
|
-
await self.redis.xadd(
|
|
165
|
+
await self.redis.xadd(
|
|
166
|
+
self.queue_name,
|
|
167
|
+
{b"data": message.message},
|
|
168
|
+
maxlen=self.maxlen,
|
|
169
|
+
approximate=self.approximate,
|
|
170
|
+
)
|
|
158
171
|
|
|
159
172
|
def _ack_generator(self, id: str) -> Callable[[], Awaitable[None]]:
|
|
160
173
|
async def _ack() -> None:
|
|
@@ -157,6 +157,8 @@ class RedisStreamSentinelBroker(BaseSentinelBroker):
|
|
|
157
157
|
consumer_id: str = "$",
|
|
158
158
|
mkstream: bool = True,
|
|
159
159
|
xread_block: int = 10000,
|
|
160
|
+
maxlen: Optional[int] = None,
|
|
161
|
+
approximate: bool = True,
|
|
160
162
|
additional_streams: Optional[Dict[str, str]] = None,
|
|
161
163
|
**connection_kwargs: Any,
|
|
162
164
|
) -> None:
|
|
@@ -176,6 +178,10 @@ class RedisStreamSentinelBroker(BaseSentinelBroker):
|
|
|
176
178
|
:param mkstream: create stream if it does not exist.
|
|
177
179
|
:param xread_block: block time in ms for xreadgroup.
|
|
178
180
|
Better to set it to a bigger value, to avoid unnecessary calls.
|
|
181
|
+
:param maxlen: sets the maximum length of the stream
|
|
182
|
+
trims (the old values of) the stream each time a new element is added
|
|
183
|
+
:param approximate: decides wether to trim the stream immediately (False) or
|
|
184
|
+
later on (True)
|
|
179
185
|
:param additional_streams: additional streams to read from.
|
|
180
186
|
Each key is a stream name, value is a consumer id.
|
|
181
187
|
"""
|
|
@@ -193,6 +199,8 @@ class RedisStreamSentinelBroker(BaseSentinelBroker):
|
|
|
193
199
|
self.consumer_id = consumer_id
|
|
194
200
|
self.mkstream = mkstream
|
|
195
201
|
self.block = xread_block
|
|
202
|
+
self.maxlen = maxlen
|
|
203
|
+
self.approximate = approximate
|
|
196
204
|
self.additional_streams = additional_streams or {}
|
|
197
205
|
|
|
198
206
|
async def _declare_consumer_group(self) -> None:
|
|
@@ -223,7 +231,12 @@ class RedisStreamSentinelBroker(BaseSentinelBroker):
|
|
|
223
231
|
:param message: message to append.
|
|
224
232
|
"""
|
|
225
233
|
async with self._acquire_master_conn() as redis_conn:
|
|
226
|
-
await redis_conn.xadd(
|
|
234
|
+
await redis_conn.xadd(
|
|
235
|
+
self.queue_name,
|
|
236
|
+
{b"data": message.message},
|
|
237
|
+
maxlen=self.maxlen,
|
|
238
|
+
approximate=self.approximate,
|
|
239
|
+
)
|
|
227
240
|
|
|
228
241
|
def _ack_generator(self, id: str) -> Callable[[], Awaitable[None]]:
|
|
229
242
|
async def _ack() -> None:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: taskiq-redis
|
|
3
|
-
Version: 1.0
|
|
3
|
+
Version: 1.1.0
|
|
4
4
|
Summary: Redis integration for taskiq
|
|
5
5
|
Keywords: taskiq,tasks,distributed,async,redis,result_backend
|
|
6
6
|
Author: taskiq-team
|
|
@@ -15,7 +15,7 @@ Classifier: Programming Language :: Python :: 3.12
|
|
|
15
15
|
Classifier: Programming Language :: Python :: 3.13
|
|
16
16
|
Classifier: Programming Language :: Python :: 3 :: Only
|
|
17
17
|
Classifier: Programming Language :: Python :: 3.8
|
|
18
|
-
Requires-Dist: redis (>=
|
|
18
|
+
Requires-Dist: redis (>=6,<7)
|
|
19
19
|
Requires-Dist: taskiq (>=0.11.12,<1)
|
|
20
20
|
Project-URL: Homepage, https://github.com/taskiq-python/taskiq-redis
|
|
21
21
|
Project-URL: Repository, https://github.com/taskiq-python/taskiq-redis
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
taskiq_redis/__init__.py,sha256=Sl4m9rKxweU1t0m289Qtf0qm4xSSkkFHoOfKq6qaz6g,1192
|
|
2
|
+
taskiq_redis/exceptions.py,sha256=7buBJ7CRVWd5WqVqSjtHO8cVL7QzZg-DOM3nB87t-Sk,738
|
|
3
|
+
taskiq_redis/list_schedule_source.py,sha256=aMM_LCJrbg2GIb8BTJJEZaFIWxziR68TwGELMTKI1q8,9805
|
|
4
|
+
taskiq_redis/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
5
|
+
taskiq_redis/redis_backend.py,sha256=sE4XeGr1fX0IE5FBdCJ9uP6DH7khAKlt8va2SwTv4go,19850
|
|
6
|
+
taskiq_redis/redis_broker.py,sha256=WNiQOrekllNlAIHLknwEGUBNoUGqATCni2vQ3d8a5To,12207
|
|
7
|
+
taskiq_redis/redis_cluster_broker.py,sha256=27Qr4y0GNrcx4h35TbE9pCWiB8oOv9ON2st7EXqh1zY,7096
|
|
8
|
+
taskiq_redis/redis_sentinel_broker.py,sha256=4aBTM2L-iYpAej4yasQJbocH0koV4wZ_US_-COTfkK8,9598
|
|
9
|
+
taskiq_redis/schedule_source.py,sha256=mDYlAlAuzIzMICpJiQ1AwWOF-9_OHVGJWXA45Gm2Trg,10128
|
|
10
|
+
taskiq_redis-1.1.0.dist-info/LICENSE,sha256=lEHEEE-ZxmuItxYgUMPiFWdRcAITxE8DFMNyAg4eOYE,1075
|
|
11
|
+
taskiq_redis-1.1.0.dist-info/METADATA,sha256=QChMA4jQQZAAHoqcKLNjtd972XHyHNGlOmq9O7fbijw,6573
|
|
12
|
+
taskiq_redis-1.1.0.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
|
|
13
|
+
taskiq_redis-1.1.0.dist-info/RECORD,,
|
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
taskiq_redis/__init__.py,sha256=Sl4m9rKxweU1t0m289Qtf0qm4xSSkkFHoOfKq6qaz6g,1192
|
|
2
|
-
taskiq_redis/exceptions.py,sha256=7buBJ7CRVWd5WqVqSjtHO8cVL7QzZg-DOM3nB87t-Sk,738
|
|
3
|
-
taskiq_redis/list_schedule_source.py,sha256=NlHqtvwsYmWpAXLz_0BFcdSIPJCq7ch_r27b0QVmcGE,9917
|
|
4
|
-
taskiq_redis/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
5
|
-
taskiq_redis/redis_backend.py,sha256=MLBaUN3Zx-DLvm1r-lgPU82_WZq9kc6oTxYI8LQjd6k,19882
|
|
6
|
-
taskiq_redis/redis_broker.py,sha256=XoP9ZxcDSwwg5fUV4zK8LGbXBw32X1w_5BzHXaddN-w,12023
|
|
7
|
-
taskiq_redis/redis_cluster_broker.py,sha256=FuWl5fP7Fwr9FbytErmhcUGjRCdPexDK2Co2u6kpDlo,6591
|
|
8
|
-
taskiq_redis/redis_sentinel_broker.py,sha256=wHnbG3xuD_ruhhwp4AXo91NNjq8v2iufUZ0i_HbBRVQ,9073
|
|
9
|
-
taskiq_redis/schedule_source.py,sha256=mDYlAlAuzIzMICpJiQ1AwWOF-9_OHVGJWXA45Gm2Trg,10128
|
|
10
|
-
taskiq_redis-1.0.8.dist-info/LICENSE,sha256=lEHEEE-ZxmuItxYgUMPiFWdRcAITxE8DFMNyAg4eOYE,1075
|
|
11
|
-
taskiq_redis-1.0.8.dist-info/METADATA,sha256=mBylEuqCrG2PWjlBlgDRiT_TPFeI1vqwh1deaVCB-TU,6573
|
|
12
|
-
taskiq_redis-1.0.8.dist-info/WHEEL,sha256=fGIA9gx4Qxk2KDKeNJCbOEwSrmLtjWCwzBz351GyrPQ,88
|
|
13
|
-
taskiq_redis-1.0.8.dist-info/RECORD,,
|
|
File without changes
|