taskiq-redis 1.1.0__tar.gz → 1.1.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,7 +1,8 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.4
2
2
  Name: taskiq-redis
3
- Version: 1.1.0
3
+ Version: 1.1.2
4
4
  Summary: Redis integration for taskiq
5
+ License-File: LICENSE
5
6
  Keywords: taskiq,tasks,distributed,async,redis,result_backend
6
7
  Author: taskiq-team
7
8
  Author-email: taskiq@norely.com
@@ -13,6 +14,7 @@ Classifier: Programming Language :: Python :: 3.10
13
14
  Classifier: Programming Language :: Python :: 3.11
14
15
  Classifier: Programming Language :: Python :: 3.12
15
16
  Classifier: Programming Language :: Python :: 3.13
17
+ Classifier: Programming Language :: Python :: 3.14
16
18
  Classifier: Programming Language :: Python :: 3 :: Only
17
19
  Classifier: Programming Language :: Python :: 3.8
18
20
  Requires-Dist: redis (>=6,<7)
@@ -213,3 +215,17 @@ scheduler = TaskiqScheduler(broker, [array_source])
213
215
 
214
216
  During startup the scheduler will try to migrate schedules from an old source to a new one. Please be sure to specify different prefixe just to avoid any kind of collision between these two.
215
217
 
218
+
219
+ ## Dynamic queue names
220
+
221
+
222
+ Brokers supports dynamic queue names, allowing you to specify different queues when kicking tasks. This is useful for routing tasks to specific queues based on runtime conditions, such as priority levels, tenant isolation, or environment-specific processing.
223
+
224
+ Simply pass the desired queue name as message's label when kicking a task to override the broker's default queue configuration.
225
+
226
+ ```python
227
+ @broker.task(queue_name="low_priority")
228
+ async def low_priority_task() -> None:
229
+ print("I don't mind waiting a little longer")
230
+ ```
231
+
@@ -189,3 +189,17 @@ scheduler = TaskiqScheduler(broker, [array_source])
189
189
  ```
190
190
 
191
191
  During startup the scheduler will try to migrate schedules from an old source to a new one. Please be sure to specify different prefixe just to avoid any kind of collision between these two.
192
+
193
+
194
+ ## Dynamic queue names
195
+
196
+
197
+ Brokers supports dynamic queue names, allowing you to specify different queues when kicking tasks. This is useful for routing tasks to specific queues based on runtime conditions, such as priority levels, tenant isolation, or environment-specific processing.
198
+
199
+ Simply pass the desired queue name as message's label when kicking a task to override the broker's default queue configuration.
200
+
201
+ ```python
202
+ @broker.task(queue_name="low_priority")
203
+ async def low_priority_task() -> None:
204
+ print("I don't mind waiting a little longer")
205
+ ```
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "taskiq-redis"
3
- version = "1.1.0"
3
+ version = "1.1.2"
4
4
  description = "Redis integration for taskiq"
5
5
  authors = ["taskiq-team <taskiq@norely.com>"]
6
6
  readme = "README.md"
@@ -251,19 +251,20 @@ class RedisStreamBroker(BaseRedisBroker):
251
251
 
252
252
  :param message: message to append.
253
253
  """
254
+ queue_name = message.labels.get("queue_name") or self.queue_name
254
255
  async with Redis(connection_pool=self.connection_pool) as redis_conn:
255
256
  await redis_conn.xadd(
256
- self.queue_name,
257
+ queue_name,
257
258
  {b"data": message.message},
258
259
  maxlen=self.maxlen,
259
260
  approximate=self.approximate,
260
261
  )
261
262
 
262
- def _ack_generator(self, id: str) -> Callable[[], Awaitable[None]]:
263
+ def _ack_generator(self, id: str, queue_name: str) -> Callable[[], Awaitable[None]]:
263
264
  async def _ack() -> None:
264
265
  async with Redis(connection_pool=self.connection_pool) as redis_conn:
265
266
  await redis_conn.xack(
266
- self.queue_name,
267
+ queue_name,
267
268
  self.consumer_group_name,
268
269
  id,
269
270
  )
@@ -286,12 +287,12 @@ class RedisStreamBroker(BaseRedisBroker):
286
287
  noack=False,
287
288
  count=self.count,
288
289
  )
289
- for _, msg_list in fetched:
290
+ for stream, msg_list in fetched:
290
291
  for msg_id, msg in msg_list:
291
292
  logger.debug("Received message: %s", msg)
292
293
  yield AckableMessage(
293
294
  data=msg[b"data"],
294
- ack=self._ack_generator(msg_id),
295
+ ack=self._ack_generator(id=msg_id, queue_name=stream),
295
296
  )
296
297
  logger.debug("Starting fetching unacknowledged messages")
297
298
  for stream in [self.queue_name, *self.additional_streams.keys()]:
@@ -310,12 +311,12 @@ class RedisStreamBroker(BaseRedisBroker):
310
311
  )
311
312
  logger.debug(
312
313
  "Found %d pending messages in stream %s",
313
- len(pending),
314
+ len(pending[1]),
314
315
  stream,
315
316
  )
316
317
  for msg_id, msg in pending[1]:
317
318
  logger.debug("Received message: %s", msg)
318
319
  yield AckableMessage(
319
320
  data=msg[b"data"],
320
- ack=self._ack_generator(msg_id),
321
+ ack=self._ack_generator(id=msg_id, queue_name=stream),
321
322
  )
@@ -55,7 +55,8 @@ class ListQueueClusterBroker(BaseRedisClusterBroker):
55
55
 
56
56
  :param message: message to append.
57
57
  """
58
- await self.redis.lpush(self.queue_name, message.message) # type: ignore
58
+ queue_name = message.labels.get("queue_name") or self.queue_name
59
+ await self.redis.lpush(queue_name, message.message) # type: ignore
59
60
 
60
61
  async def listen(self) -> AsyncGenerator[bytes, None]:
61
62
  """
@@ -162,17 +163,18 @@ class RedisStreamClusterBroker(BaseRedisClusterBroker):
162
163
 
163
164
  :param message: message to append.
164
165
  """
166
+ queue_name = message.labels.get("queue_name") or self.queue_name
165
167
  await self.redis.xadd(
166
- self.queue_name,
168
+ queue_name,
167
169
  {b"data": message.message},
168
170
  maxlen=self.maxlen,
169
171
  approximate=self.approximate,
170
172
  )
171
173
 
172
- def _ack_generator(self, id: str) -> Callable[[], Awaitable[None]]:
174
+ def _ack_generator(self, id: str, queue_name: str) -> Callable[[], Awaitable[None]]:
173
175
  async def _ack() -> None:
174
176
  await self.redis.xack(
175
- self.queue_name,
177
+ queue_name,
176
178
  self.consumer_group_name,
177
179
  id,
178
180
  )
@@ -192,10 +194,10 @@ class RedisStreamClusterBroker(BaseRedisClusterBroker):
192
194
  block=self.block,
193
195
  noack=False,
194
196
  )
195
- for _, msg_list in fetched:
197
+ for stream, msg_list in fetched:
196
198
  for msg_id, msg in msg_list:
197
199
  logger.debug("Received message: %s", msg)
198
200
  yield AckableMessage(
199
201
  data=msg[b"data"],
200
- ack=self._ack_generator(msg_id),
202
+ ack=self._ack_generator(id=msg_id, queue_name=stream),
201
203
  )
@@ -230,19 +230,20 @@ class RedisStreamSentinelBroker(BaseSentinelBroker):
230
230
 
231
231
  :param message: message to append.
232
232
  """
233
+ queue_name = message.labels.get("queue_name") or self.queue_name
233
234
  async with self._acquire_master_conn() as redis_conn:
234
235
  await redis_conn.xadd(
235
- self.queue_name,
236
+ queue_name,
236
237
  {b"data": message.message},
237
238
  maxlen=self.maxlen,
238
239
  approximate=self.approximate,
239
240
  )
240
241
 
241
- def _ack_generator(self, id: str) -> Callable[[], Awaitable[None]]:
242
+ def _ack_generator(self, id: str, queue_name: str) -> Callable[[], Awaitable[None]]:
242
243
  async def _ack() -> None:
243
244
  async with self._acquire_master_conn() as redis_conn:
244
245
  await redis_conn.xack(
245
- self.queue_name,
246
+ queue_name,
246
247
  self.consumer_group_name,
247
248
  id,
248
249
  )
@@ -263,10 +264,10 @@ class RedisStreamSentinelBroker(BaseSentinelBroker):
263
264
  block=self.block,
264
265
  noack=False,
265
266
  )
266
- for _, msg_list in fetched:
267
+ for stream, msg_list in fetched:
267
268
  for msg_id, msg in msg_list:
268
269
  logger.debug("Received message: %s", msg)
269
270
  yield AckableMessage(
270
271
  data=msg[b"data"],
271
- ack=self._ack_generator(msg_id),
272
+ ack=self._ack_generator(id=msg_id, queue_name=stream),
272
273
  )
File without changes