taskiq-redis 1.1.2__tar.gz → 1.2.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,26 +1,26 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: taskiq-redis
3
- Version: 1.1.2
3
+ Version: 1.2.1
4
4
  Summary: Redis integration for taskiq
5
+ Keywords: async,distributed,redis,result_backend,taskiq,tasks
6
+ Author: Taskiq team
7
+ Author-email: Taskiq team <taskiq@no-reply.com>
8
+ License-Expression: MIT
5
9
  License-File: LICENSE
6
- Keywords: taskiq,tasks,distributed,async,redis,result_backend
7
- Author: taskiq-team
8
- Author-email: taskiq@norely.com
9
- Requires-Python: >=3.9,<4.0
10
10
  Classifier: Programming Language :: Python
11
11
  Classifier: Programming Language :: Python :: 3
12
- Classifier: Programming Language :: Python :: 3.9
12
+ Classifier: Programming Language :: Python :: 3 :: Only
13
13
  Classifier: Programming Language :: Python :: 3.10
14
14
  Classifier: Programming Language :: Python :: 3.11
15
15
  Classifier: Programming Language :: Python :: 3.12
16
16
  Classifier: Programming Language :: Python :: 3.13
17
- Classifier: Programming Language :: Python :: 3.14
18
- Classifier: Programming Language :: Python :: 3 :: Only
19
- Classifier: Programming Language :: Python :: 3.8
20
- Requires-Dist: redis (>=6,<7)
21
- Requires-Dist: taskiq (>=0.11.12,<1)
22
- Project-URL: Homepage, https://github.com/taskiq-python/taskiq-redis
23
- Project-URL: Repository, https://github.com/taskiq-python/taskiq-redis
17
+ Requires-Dist: redis>=7.0.0,<8
18
+ Requires-Dist: taskiq>=0.12.0
19
+ Maintainer: Taskiq team
20
+ Maintainer-email: Taskiq team <taskiq@no-reply.com>
21
+ Requires-Python: >=3.10
22
+ Project-URL: homepage, https://github.com/taskiq-python/taskiq-redis
23
+ Project-URL: repository, https://github.com/taskiq-python/taskiq-redis
24
24
  Description-Content-Type: text/markdown
25
25
 
26
26
  # TaskIQ-Redis
@@ -228,4 +228,3 @@ Simply pass the desired queue name as message's label when kicking a task to ove
228
228
  async def low_priority_task() -> None:
229
229
  print("I don't mind waiting a little longer")
230
230
  ```
231
-
@@ -0,0 +1,155 @@
1
+ [project]
2
+ name = "taskiq-redis"
3
+ version = "1.2.1"
4
+ description = "Redis integration for taskiq"
5
+ readme = "README.md"
6
+ requires-python = ">=3.10"
7
+ license = "MIT"
8
+ license-files = ["LICENSE"]
9
+ authors = [{ name = "Taskiq team", email = "taskiq@no-reply.com" }]
10
+ maintainers = [{ name = "Taskiq team", email = "taskiq@no-reply.com" }]
11
+ keywords = [
12
+ "async",
13
+ "distributed",
14
+ "redis",
15
+ "result_backend",
16
+ "taskiq",
17
+ "tasks",
18
+ ]
19
+ classifiers = [
20
+ "Programming Language :: Python",
21
+ "Programming Language :: Python :: 3",
22
+ "Programming Language :: Python :: 3 :: Only",
23
+ "Programming Language :: Python :: 3.10",
24
+ "Programming Language :: Python :: 3.11",
25
+ "Programming Language :: Python :: 3.12",
26
+ "Programming Language :: Python :: 3.13",
27
+ ]
28
+ dependencies = [
29
+ "redis>=7.0.0,<8", # TODO: fix issues in tests with 7.1.0
30
+ "taskiq>=0.12.0",
31
+ ]
32
+
33
+ [project.urls]
34
+ homepage = "https://github.com/taskiq-python/taskiq-redis"
35
+ repository = "https://github.com/taskiq-python/taskiq-redis"
36
+
37
+ [dependency-groups]
38
+ dev = [
39
+ "pre-commit>=4.5.0",
40
+ { include-group = "lint" },
41
+ { include-group = "test" },
42
+ ]
43
+ lint = [
44
+ "black>=25.11.0",
45
+ "mypy>=1.19.0",
46
+ "ruff>=0.14.7",
47
+ ]
48
+ test = [
49
+ "fakeredis>=2.32.1",
50
+ "freezegun>=1.5.5",
51
+ "pytest>=9.0.1",
52
+ "pytest-cov>=7.0.0",
53
+ "pytest-env>=1.2.0",
54
+ "pytest-xdist>=3.8.0",
55
+ ]
56
+
57
+ [build-system]
58
+ requires = ["uv_build>=0.9.13,<0.10.0"]
59
+ build-backend = "uv_build"
60
+
61
+ [tool.mypy]
62
+ strict = true
63
+ ignore_missing_imports = true
64
+ allow_subclassing_any = true
65
+ allow_untyped_calls = true
66
+ pretty = true
67
+ show_error_codes = true
68
+ implicit_reexport = true
69
+ allow_untyped_decorators = true
70
+ warn_return_any = false
71
+
72
+ [[tool.mypy.overrides]]
73
+ module = ["redis"]
74
+ ignore_missing_imports = true
75
+ ignore_errors = true
76
+ strict = false
77
+
78
+ [tool.pytest.ini_options]
79
+ filterwarnings = [
80
+ # about deprecated RedisScheduleSource usage - delete after removing RedisScheduleSource
81
+ "ignore:RedisScheduleSource is deprecated:DeprecationWarning",
82
+ ]
83
+
84
+ [tool.ruff]
85
+ # List of enabled rulsets.
86
+ # See https://docs.astral.sh/ruff/rules/ for more information.
87
+ lint.select = [
88
+ "E", # Error
89
+ "F", # Pyflakes
90
+ "W", # Pycodestyle
91
+ "C90", # McCabe complexity
92
+ "I", # Isort
93
+ "N", # pep8-naming
94
+ "D", # Pydocstyle
95
+ "ANN", # Pytype annotations
96
+ "S", # Bandit
97
+ "B", # Bugbear
98
+ "COM", # Commas
99
+ "C4", # Comprehensions
100
+ "ISC", # Implicit string concat
101
+ "PIE", # Unnecessary code
102
+ "T20", # Catch prints
103
+ "PYI", # validate pyi files
104
+ "Q", # Checks for quotes
105
+ "RSE", # Checks raise statements
106
+ "RET", # Checks return statements
107
+ "SLF", # Self checks
108
+ "SIM", # Simplificator
109
+ "PTH", # Pathlib checks
110
+ "ERA", # Checks for commented out code
111
+ "PL", # PyLint checks
112
+ "RUF", # Specific to Ruff checks
113
+ "FA102", # Future annotations
114
+ "UP", # Pyupgrade
115
+ ]
116
+ lint.ignore = [
117
+ "D105", # Missing docstring in magic method
118
+ "D107", # Missing docstring in __init__
119
+ "D212", # Multi-line docstring summary should start at the first line
120
+ "D401", # First line should be in imperative mood
121
+ "D104", # Missing docstring in public package
122
+ "D100", # Missing docstring in public module
123
+ "ANN401", # typing.Any are disallowed in `**kwargs
124
+ "PLR0913", # Too many arguments for function call
125
+ "D106", # Missing docstring in public nested class
126
+ ]
127
+ exclude = [".venv/"]
128
+ line-length = 88
129
+
130
+ [tool.ruff.lint.mccabe]
131
+ max-complexity = 10
132
+
133
+ [tool.ruff.lint.per-file-ignores]
134
+ "tests/*" = [
135
+ "S101", # Use of assert detected
136
+ "S301", # Use of pickle detected
137
+ "D103", # Missing docstring in public function
138
+ "SLF001", # Private member accessed
139
+ "S311", # Standard pseudo-random generators are not suitable for security/cryptographic purposes
140
+ "D101", # Missing docstring in public class
141
+ ]
142
+
143
+ [tool.ruff.lint.pydocstyle]
144
+ convention = "pep257"
145
+ ignore-decorators = ["typing.overload"]
146
+
147
+ [tool.ruff.lint.pylint]
148
+ allow-magic-value-types = ["int", "str", "float"]
149
+
150
+ [tool.ruff.lint.flake8-bugbear]
151
+ extend-immutable-calls = ["taskiq_dependencies.Depends", "taskiq.TaskiqDepends"]
152
+
153
+ [tool.uv.build-backend]
154
+ module-root = ""
155
+ module-name = "taskiq_redis"
@@ -1,6 +1,6 @@
1
1
  import datetime
2
2
  from logging import getLogger
3
- from typing import Any, List, Optional
3
+ from typing import Any
4
4
 
5
5
  from redis.asyncio import BlockingConnectionPool, Redis
6
6
  from taskiq import ScheduledTask, ScheduleSource
@@ -19,8 +19,8 @@ class ListRedisScheduleSource(ScheduleSource):
19
19
  self,
20
20
  url: str,
21
21
  prefix: str = "schedule",
22
- max_connection_pool_size: Optional[int] = None,
23
- serializer: Optional[TaskiqSerializer] = None,
22
+ max_connection_pool_size: int | None = None,
23
+ serializer: TaskiqSerializer | None = None,
24
24
  buffer_size: int = 50,
25
25
  skip_past_schedules: bool = False,
26
26
  **connection_kwargs: Any,
@@ -48,7 +48,7 @@ class ListRedisScheduleSource(ScheduleSource):
48
48
  serializer = PickleSerializer()
49
49
  self._serializer = serializer
50
50
  self._is_first_run = True
51
- self._previous_schedule_source: Optional[ScheduleSource] = None
51
+ self._previous_schedule_source: ScheduleSource | None = None
52
52
  self._delete_schedules_after_migration: bool = True
53
53
  self._skip_past_schedules = skip_past_schedules
54
54
 
@@ -85,11 +85,14 @@ class ListRedisScheduleSource(ScheduleSource):
85
85
  """Get the key for a cron-based schedule."""
86
86
  return f"{self._prefix}:cron"
87
87
 
88
+ def _get_interval_key(self) -> str:
89
+ return f"{self._prefix}:interval"
90
+
88
91
  def _get_data_key(self, schedule_id: str) -> str:
89
92
  """Get the key for a schedule data."""
90
93
  return f"{self._prefix}:data:{schedule_id}"
91
94
 
92
- def _parse_time_key(self, key: str) -> Optional[datetime.datetime]:
95
+ def _parse_time_key(self, key: str) -> datetime.datetime | None:
93
96
  """Get time value from the timed-key."""
94
97
  try:
95
98
  dt_str = key.split(":", 2)[2]
@@ -130,7 +133,7 @@ class ListRedisScheduleSource(ScheduleSource):
130
133
  if key_time and key_time <= minute_before:
131
134
  time_keys.append(key.decode())
132
135
  for key in time_keys:
133
- schedules.extend(await redis.lrange(key, 0, -1))
136
+ schedules.extend(await redis.lrange(key, 0, -1)) # type: ignore[misc]
134
137
 
135
138
  return schedules
136
139
 
@@ -146,10 +149,12 @@ class ListRedisScheduleSource(ScheduleSource):
146
149
  )
147
150
  # We need to remove the schedule from the cron or time list.
148
151
  if schedule.cron is not None:
149
- await redis.lrem(self._get_cron_key(), 0, schedule_id)
152
+ await redis.lrem(self._get_cron_key(), 0, schedule_id) # type: ignore[misc]
150
153
  elif schedule.time is not None:
151
154
  time_key = self._get_time_key(schedule.time)
152
- await redis.lrem(time_key, 0, schedule_id)
155
+ await redis.lrem(time_key, 0, schedule_id) # type: ignore[misc]
156
+ elif schedule.interval:
157
+ await redis.lrem(self._get_interval_key(), 0, schedule_id) # type: ignore[misc]
153
158
 
154
159
  async def add_schedule(self, schedule: "ScheduledTask") -> None:
155
160
  """Add a schedule to the source."""
@@ -163,19 +168,24 @@ class ListRedisScheduleSource(ScheduleSource):
163
168
  # This is an optimization, so we can get all the schedules
164
169
  # for the current time much faster.
165
170
  if schedule.cron is not None:
166
- await redis.rpush(self._get_cron_key(), schedule.schedule_id)
171
+ await redis.rpush(self._get_cron_key(), schedule.schedule_id) # type: ignore[misc]
167
172
  elif schedule.time is not None:
168
- await redis.rpush(
173
+ await redis.rpush( # type: ignore[misc]
169
174
  self._get_time_key(schedule.time),
170
175
  schedule.schedule_id,
171
176
  )
177
+ elif schedule.interval:
178
+ await redis.rpush( # type: ignore[misc]
179
+ self._get_interval_key(),
180
+ schedule.schedule_id,
181
+ )
172
182
 
173
183
  async def post_send(self, task: ScheduledTask) -> None:
174
184
  """Delete a task after it's completed."""
175
185
  if task.time is not None:
176
186
  await self.delete_schedule(task.schedule_id)
177
187
 
178
- async def get_schedules(self) -> List["ScheduledTask"]:
188
+ async def get_schedules(self) -> list["ScheduledTask"]:
179
189
  """
180
190
  Get all schedules.
181
191
 
@@ -195,11 +205,15 @@ class ListRedisScheduleSource(ScheduleSource):
195
205
  self._is_first_run = False
196
206
  async with Redis(connection_pool=self._connection_pool) as redis:
197
207
  buffer = []
198
- crons = await redis.lrange(self._get_cron_key(), 0, -1)
208
+ crons = await redis.lrange(self._get_cron_key(), 0, -1) # type: ignore[misc]
199
209
  logger.debug("Got %d cron schedules", len(crons))
200
210
  if crons:
201
211
  buffer.extend(crons)
202
- timed.extend(await redis.lrange(self._get_time_key(current_time), 0, -1))
212
+ intervals = await redis.lrange(self._get_interval_key(), 0, -1) # type: ignore[misc]
213
+ logger.debug("Got %d interval schedules", len(intervals))
214
+ if intervals:
215
+ buffer.extend(intervals)
216
+ timed.extend(await redis.lrange(self._get_time_key(current_time), 0, -1)) # type: ignore[misc]
203
217
  logger.debug("Got %d timed schedules", len(timed))
204
218
  if timed:
205
219
  buffer.extend(timed)
@@ -1,16 +1,6 @@
1
- import sys
1
+ from collections.abc import AsyncIterator
2
2
  from contextlib import asynccontextmanager
3
- from typing import (
4
- TYPE_CHECKING,
5
- Any,
6
- AsyncIterator,
7
- Dict,
8
- List,
9
- Optional,
10
- Tuple,
11
- TypeVar,
12
- Union,
13
- )
3
+ from typing import TYPE_CHECKING, Any, TypeAlias, TypeVar
14
4
 
15
5
  from redis.asyncio import BlockingConnectionPool, Redis, Sentinel
16
6
  from redis.asyncio.cluster import RedisCluster
@@ -28,11 +18,6 @@ from taskiq_redis.exceptions import (
28
18
  ResultIsMissingError,
29
19
  )
30
20
 
31
- if sys.version_info >= (3, 10):
32
- from typing import TypeAlias
33
- else:
34
- from typing_extensions import TypeAlias
35
-
36
21
  if TYPE_CHECKING:
37
22
  _Redis: TypeAlias = Redis[bytes] # type: ignore
38
23
  _BlockingConnectionPool: TypeAlias = BlockingConnectionPool[Connection] # type: ignore
@@ -52,11 +37,11 @@ class RedisAsyncResultBackend(AsyncResultBackend[_ReturnType]):
52
37
  self,
53
38
  redis_url: str,
54
39
  keep_results: bool = True,
55
- result_ex_time: Optional[int] = None,
56
- result_px_time: Optional[int] = None,
57
- max_connection_pool_size: Optional[int] = None,
58
- serializer: Optional[TaskiqSerializer] = None,
59
- prefix_str: Optional[str] = None,
40
+ result_ex_time: int | None = None,
41
+ result_px_time: int | None = None,
42
+ max_connection_pool_size: int | None = None,
43
+ serializer: TaskiqSerializer | None = None,
44
+ prefix_str: str | None = None,
60
45
  **connection_kwargs: Any,
61
46
  ) -> None:
62
47
  """
@@ -121,17 +106,15 @@ class RedisAsyncResultBackend(AsyncResultBackend[_ReturnType]):
121
106
  :param task_id: ID of the task.
122
107
  :param result: TaskiqResult instance.
123
108
  """
124
- redis_set_params: Dict[str, Union[str, int, bytes]] = {
125
- "name": self._task_name(task_id),
126
- "value": self.serializer.dumpb(model_dump(result)),
127
- }
128
- if self.result_ex_time:
129
- redis_set_params["ex"] = self.result_ex_time
130
- elif self.result_px_time:
131
- redis_set_params["px"] = self.result_px_time
132
-
109
+ name = self._task_name(task_id)
110
+ value = self.serializer.dumpb(model_dump(result))
133
111
  async with Redis(connection_pool=self.redis_pool) as redis:
134
- await redis.set(**redis_set_params)
112
+ if self.result_ex_time:
113
+ await redis.set(name=name, value=value, ex=self.result_ex_time)
114
+ elif self.result_px_time:
115
+ await redis.set(name=name, value=value, px=self.result_px_time)
116
+ else:
117
+ await redis.set(name=name, value=value)
135
118
 
136
119
  async def is_result_ready(self, task_id: str) -> bool:
137
120
  """
@@ -195,22 +178,20 @@ class RedisAsyncResultBackend(AsyncResultBackend[_ReturnType]):
195
178
  :param task_id: ID of the task.
196
179
  :param result: task's TaskProgress instance.
197
180
  """
198
- redis_set_params: Dict[str, Union[str, int, bytes]] = {
199
- "name": self._task_name(task_id) + PROGRESS_KEY_SUFFIX,
200
- "value": self.serializer.dumpb(model_dump(progress)),
201
- }
202
- if self.result_ex_time:
203
- redis_set_params["ex"] = self.result_ex_time
204
- elif self.result_px_time:
205
- redis_set_params["px"] = self.result_px_time
206
-
181
+ name = self._task_name(task_id) + PROGRESS_KEY_SUFFIX
182
+ value = self.serializer.dumpb(model_dump(progress))
207
183
  async with Redis(connection_pool=self.redis_pool) as redis:
208
- await redis.set(**redis_set_params)
184
+ if self.result_ex_time:
185
+ await redis.set(name=name, value=value, ex=self.result_ex_time)
186
+ elif self.result_px_time:
187
+ await redis.set(name=name, value=value, px=self.result_px_time)
188
+ else:
189
+ await redis.set(name=name, value=value)
209
190
 
210
191
  async def get_progress(
211
192
  self,
212
193
  task_id: str,
213
- ) -> Union[TaskProgress[_ReturnType], None]:
194
+ ) -> TaskProgress[_ReturnType] | None:
214
195
  """
215
196
  Gets progress results from the task.
216
197
 
@@ -238,10 +219,10 @@ class RedisAsyncClusterResultBackend(AsyncResultBackend[_ReturnType]):
238
219
  self,
239
220
  redis_url: str,
240
221
  keep_results: bool = True,
241
- result_ex_time: Optional[int] = None,
242
- result_px_time: Optional[int] = None,
243
- serializer: Optional[TaskiqSerializer] = None,
244
- prefix_str: Optional[str] = None,
222
+ result_ex_time: int | None = None,
223
+ result_px_time: int | None = None,
224
+ serializer: TaskiqSerializer | None = None,
225
+ prefix_str: str | None = None,
245
226
  **connection_kwargs: Any,
246
227
  ) -> None:
247
228
  """
@@ -258,7 +239,7 @@ class RedisAsyncClusterResultBackend(AsyncResultBackend[_ReturnType]):
258
239
  :raises ExpireTimeMustBeMoreThanZeroError: if result_ex_time
259
240
  and result_px_time are equal zero.
260
241
  """
261
- self.redis: "RedisCluster" = RedisCluster.from_url(
242
+ self.redis: RedisCluster = RedisCluster.from_url(
262
243
  redis_url,
263
244
  **connection_kwargs,
264
245
  )
@@ -296,24 +277,23 @@ class RedisAsyncClusterResultBackend(AsyncResultBackend[_ReturnType]):
296
277
  result: TaskiqResult[_ReturnType],
297
278
  ) -> None:
298
279
  """
299
- Sets task result in redis.
280
+ Sets task result in redis cluster.
300
281
 
301
282
  Dumps TaskiqResult instance into the bytes and writes
302
- it to redis.
283
+ it to redis cluster.
303
284
 
304
285
  :param task_id: ID of the task.
305
286
  :param result: TaskiqResult instance.
306
287
  """
307
- redis_set_params: Dict[str, Union[str, bytes, int]] = {
308
- "name": self._task_name(task_id),
309
- "value": self.serializer.dumpb(model_dump(result)),
310
- }
311
- if self.result_ex_time:
312
- redis_set_params["ex"] = self.result_ex_time
313
- elif self.result_px_time:
314
- redis_set_params["px"] = self.result_px_time
315
-
316
- await self.redis.set(**redis_set_params) # type: ignore
288
+ name = self._task_name(task_id)
289
+ value = self.serializer.dumpb(model_dump(result))
290
+ async with self.redis as redis:
291
+ if self.result_ex_time:
292
+ await redis.set(name=name, value=value, ex=self.result_ex_time)
293
+ elif self.result_px_time:
294
+ await redis.set(name=name, value=value, px=self.result_px_time)
295
+ else:
296
+ await redis.set(name=name, value=value)
317
297
 
318
298
  async def is_result_ready(self, task_id: str) -> bool:
319
299
  """
@@ -367,29 +347,28 @@ class RedisAsyncClusterResultBackend(AsyncResultBackend[_ReturnType]):
367
347
  progress: TaskProgress[_ReturnType],
368
348
  ) -> None:
369
349
  """
370
- Sets task progress in redis.
350
+ Sets task progress in redis cluster.
371
351
 
372
352
  Dumps TaskProgress instance into the bytes and writes
373
- it to redis with a standard suffix on the task_id as the key
353
+ it to redis cluster with a standard suffix on the task_id as the key
374
354
 
375
355
  :param task_id: ID of the task.
376
356
  :param result: task's TaskProgress instance.
377
357
  """
378
- redis_set_params: Dict[str, Union[str, int, bytes]] = {
379
- "name": self._task_name(task_id) + PROGRESS_KEY_SUFFIX,
380
- "value": self.serializer.dumpb(model_dump(progress)),
381
- }
382
- if self.result_ex_time:
383
- redis_set_params["ex"] = self.result_ex_time
384
- elif self.result_px_time:
385
- redis_set_params["px"] = self.result_px_time
386
-
387
- await self.redis.set(**redis_set_params) # type: ignore
358
+ name = self._task_name(task_id) + PROGRESS_KEY_SUFFIX
359
+ value = self.serializer.dumpb(model_dump(progress))
360
+ async with self.redis as redis:
361
+ if self.result_ex_time:
362
+ await redis.set(name=name, value=value, ex=self.result_ex_time)
363
+ elif self.result_px_time:
364
+ await redis.set(name=name, value=value, px=self.result_px_time)
365
+ else:
366
+ await redis.set(name=name, value=value)
388
367
 
389
368
  async def get_progress(
390
369
  self,
391
370
  task_id: str,
392
- ) -> Union[TaskProgress[_ReturnType], None]:
371
+ ) -> TaskProgress[_ReturnType] | None:
393
372
  """
394
373
  Gets progress results from the task.
395
374
 
@@ -414,15 +393,15 @@ class RedisAsyncSentinelResultBackend(AsyncResultBackend[_ReturnType]):
414
393
 
415
394
  def __init__(
416
395
  self,
417
- sentinels: List[Tuple[str, int]],
396
+ sentinels: list[tuple[str, int]],
418
397
  master_name: str,
419
398
  keep_results: bool = True,
420
- result_ex_time: Optional[int] = None,
421
- result_px_time: Optional[int] = None,
399
+ result_ex_time: int | None = None,
400
+ result_px_time: int | None = None,
422
401
  min_other_sentinels: int = 0,
423
- sentinel_kwargs: Optional[Any] = None,
424
- serializer: Optional[TaskiqSerializer] = None,
425
- prefix_str: Optional[str] = None,
402
+ sentinel_kwargs: Any | None = None,
403
+ serializer: TaskiqSerializer | None = None,
404
+ prefix_str: str | None = None,
426
405
  **connection_kwargs: Any,
427
406
  ) -> None:
428
407
  """
@@ -490,17 +469,15 @@ class RedisAsyncSentinelResultBackend(AsyncResultBackend[_ReturnType]):
490
469
  :param task_id: ID of the task.
491
470
  :param result: TaskiqResult instance.
492
471
  """
493
- redis_set_params: Dict[str, Union[str, bytes, int]] = {
494
- "name": self._task_name(task_id),
495
- "value": self.serializer.dumpb(model_dump(result)),
496
- }
497
- if self.result_ex_time:
498
- redis_set_params["ex"] = self.result_ex_time
499
- elif self.result_px_time:
500
- redis_set_params["px"] = self.result_px_time
501
-
472
+ name = self._task_name(task_id)
473
+ value = self.serializer.dumpb(model_dump(result))
502
474
  async with self._acquire_master_conn() as redis:
503
- await redis.set(**redis_set_params) # type: ignore
475
+ if self.result_ex_time:
476
+ await redis.set(name=name, value=value, ex=self.result_ex_time)
477
+ elif self.result_px_time:
478
+ await redis.set(name=name, value=value, px=self.result_px_time)
479
+ else:
480
+ await redis.set(name=name, value=value)
504
481
 
505
482
  async def is_result_ready(self, task_id: str) -> bool:
506
483
  """
@@ -559,27 +536,25 @@ class RedisAsyncSentinelResultBackend(AsyncResultBackend[_ReturnType]):
559
536
  Sets task progress in redis.
560
537
 
561
538
  Dumps TaskProgress instance into the bytes and writes
562
- it to redis with a standard suffix on the task_id as the key
539
+ it to redis via sentinel with a standard suffix on the task_id as the key
563
540
 
564
541
  :param task_id: ID of the task.
565
542
  :param result: task's TaskProgress instance.
566
543
  """
567
- redis_set_params: Dict[str, Union[str, int, bytes]] = {
568
- "name": self._task_name(task_id) + PROGRESS_KEY_SUFFIX,
569
- "value": self.serializer.dumpb(model_dump(progress)),
570
- }
571
- if self.result_ex_time:
572
- redis_set_params["ex"] = self.result_ex_time
573
- elif self.result_px_time:
574
- redis_set_params["px"] = self.result_px_time
575
-
544
+ name = self._task_name(task_id) + PROGRESS_KEY_SUFFIX
545
+ value = self.serializer.dumpb(model_dump(progress))
576
546
  async with self._acquire_master_conn() as redis:
577
- await redis.set(**redis_set_params) # type: ignore
547
+ if self.result_ex_time:
548
+ await redis.set(name=name, value=value, ex=self.result_ex_time)
549
+ elif self.result_px_time:
550
+ await redis.set(name=name, value=value, px=self.result_px_time)
551
+ else:
552
+ await redis.set(name=name, value=value)
578
553
 
579
554
  async def get_progress(
580
555
  self,
581
556
  task_id: str,
582
- ) -> Union[TaskProgress[_ReturnType], None]:
557
+ ) -> TaskProgress[_ReturnType] | None:
583
558
  """
584
559
  Gets progress results from the task.
585
560
 
@@ -1,14 +1,10 @@
1
- import sys
2
1
  import uuid
2
+ from collections.abc import AsyncGenerator, Awaitable, Callable
3
3
  from logging import getLogger
4
4
  from typing import (
5
5
  TYPE_CHECKING,
6
6
  Any,
7
- AsyncGenerator,
8
- Awaitable,
9
- Callable,
10
- Dict,
11
- Optional,
7
+ TypeAlias,
12
8
  TypeVar,
13
9
  )
14
10
 
@@ -22,10 +18,6 @@ _T = TypeVar("_T")
22
18
 
23
19
  logger = getLogger("taskiq.redis_broker")
24
20
 
25
- if sys.version_info >= (3, 10):
26
- from typing import TypeAlias
27
- else:
28
- from typing_extensions import TypeAlias
29
21
 
30
22
  if TYPE_CHECKING:
31
23
  _BlockingConnectionPool: TypeAlias = BlockingConnectionPool[Connection] # type: ignore
@@ -39,10 +31,10 @@ class BaseRedisBroker(AsyncBroker):
39
31
  def __init__(
40
32
  self,
41
33
  url: str,
42
- task_id_generator: Optional[Callable[[], str]] = None,
43
- result_backend: Optional[AsyncResultBackend[_T]] = None,
34
+ task_id_generator: Callable[[], str] | None = None,
35
+ result_backend: AsyncResultBackend[_T] | None = None,
44
36
  queue_name: str = "taskiq",
45
- max_connection_pool_size: Optional[int] = None,
37
+ max_connection_pool_size: int | None = None,
46
38
  **connection_kwargs: Any,
47
39
  ) -> None:
48
40
  """
@@ -122,7 +114,7 @@ class ListQueueBroker(BaseRedisBroker):
122
114
  """
123
115
  queue_name = message.labels.get("queue_name") or self.queue_name
124
116
  async with Redis(connection_pool=self.connection_pool) as redis_conn:
125
- await redis_conn.lpush(queue_name, message.message)
117
+ await redis_conn.lpush(queue_name, message.message) # type: ignore
126
118
 
127
119
  async def listen(self) -> AsyncGenerator[bytes, None]:
128
120
  """
@@ -137,7 +129,7 @@ class ListQueueBroker(BaseRedisBroker):
137
129
  while True:
138
130
  try:
139
131
  async with Redis(connection_pool=self.connection_pool) as redis_conn:
140
- yield (await redis_conn.brpop(self.queue_name))[
132
+ yield (await redis_conn.brpop(self.queue_name))[ # type: ignore
141
133
  redis_brpop_data_position
142
134
  ]
143
135
  except ConnectionError as exc:
@@ -159,18 +151,18 @@ class RedisStreamBroker(BaseRedisBroker):
159
151
  self,
160
152
  url: str,
161
153
  queue_name: str = "taskiq",
162
- max_connection_pool_size: Optional[int] = None,
154
+ max_connection_pool_size: int | None = None,
163
155
  consumer_group_name: str = "taskiq",
164
- consumer_name: Optional[str] = None,
156
+ consumer_name: str | None = None,
165
157
  consumer_id: str = "$",
166
158
  mkstream: bool = True,
167
159
  xread_block: int = 2000,
168
- maxlen: Optional[int] = None,
160
+ maxlen: int | None = None,
169
161
  approximate: bool = True,
170
162
  idle_timeout: int = 600000, # 10 minutes
171
163
  unacknowledged_batch_size: int = 100,
172
- xread_count: Optional[int] = 100,
173
- additional_streams: Optional[Dict[str, str]] = None,
164
+ xread_count: int | None = 100,
165
+ additional_streams: dict[str, str | int] | None = None,
174
166
  **connection_kwargs: Any,
175
167
  ) -> None:
176
168
  """
@@ -281,7 +273,7 @@ class RedisStreamBroker(BaseRedisBroker):
281
273
  self.consumer_name,
282
274
  {
283
275
  self.queue_name: ">",
284
- **self.additional_streams,
276
+ **self.additional_streams, # type: ignore[dict-item]
285
277
  },
286
278
  block=self.block,
287
279
  noack=False,
@@ -1,6 +1,7 @@
1
1
  import uuid
2
+ from collections.abc import AsyncGenerator, Awaitable, Callable
2
3
  from logging import getLogger
3
- from typing import Any, AsyncGenerator, Awaitable, Callable, Dict, Optional
4
+ from typing import Any
4
5
 
5
6
  from redis.asyncio import RedisCluster, ResponseError
6
7
  from taskiq import AckableMessage
@@ -30,7 +31,7 @@ class BaseRedisClusterBroker(AsyncBroker):
30
31
  """
31
32
  super().__init__()
32
33
 
33
- self.redis: "RedisCluster[bytes]" = RedisCluster.from_url( # type: ignore
34
+ self.redis: RedisCluster[bytes] = RedisCluster.from_url( # type: ignore
34
35
  url=url,
35
36
  max_connections=max_connection_pool_size,
36
37
  **connection_kwargs,
@@ -89,13 +90,13 @@ class RedisStreamClusterBroker(BaseRedisClusterBroker):
89
90
  queue_name: str = "taskiq",
90
91
  max_connection_pool_size: int = 2**31,
91
92
  consumer_group_name: str = "taskiq",
92
- consumer_name: Optional[str] = None,
93
+ consumer_name: str | None = None,
93
94
  consumer_id: str = "$",
94
95
  mkstream: bool = True,
95
96
  xread_block: int = 10000,
96
- maxlen: Optional[int] = None,
97
+ maxlen: int | None = None,
97
98
  approximate: bool = True,
98
- additional_streams: Optional[Dict[str, str]] = None,
99
+ additional_streams: dict[str, str] | None = None,
99
100
  **connection_kwargs: Any,
100
101
  ) -> None:
101
102
  """
@@ -1,18 +1,11 @@
1
- import sys
2
1
  import uuid
2
+ from collections.abc import AsyncGenerator, AsyncIterator, Awaitable, Callable
3
3
  from contextlib import asynccontextmanager
4
4
  from logging import getLogger
5
5
  from typing import (
6
6
  TYPE_CHECKING,
7
7
  Any,
8
- AsyncGenerator,
9
- AsyncIterator,
10
- Awaitable,
11
- Callable,
12
- Dict,
13
- List,
14
- Optional,
15
- Tuple,
8
+ TypeAlias,
16
9
  TypeVar,
17
10
  )
18
11
 
@@ -21,11 +14,6 @@ from redis.asyncio import Redis, Sentinel
21
14
  from taskiq import AckableMessage, AsyncResultBackend, BrokerMessage
22
15
  from taskiq.abc.broker import AsyncBroker
23
16
 
24
- if sys.version_info >= (3, 10):
25
- from typing import TypeAlias
26
- else:
27
- from typing_extensions import TypeAlias
28
-
29
17
  if TYPE_CHECKING:
30
18
  _Redis: TypeAlias = Redis[bytes] # type: ignore
31
19
  else:
@@ -41,13 +29,13 @@ class BaseSentinelBroker(AsyncBroker):
41
29
 
42
30
  def __init__(
43
31
  self,
44
- sentinels: List[Tuple[str, int]],
32
+ sentinels: list[tuple[str, int]],
45
33
  master_name: str,
46
- result_backend: Optional[AsyncResultBackend[_T]] = None,
47
- task_id_generator: Optional[Callable[[], str]] = None,
34
+ result_backend: AsyncResultBackend[_T] | None = None,
35
+ task_id_generator: Callable[[], str] | None = None,
48
36
  queue_name: str = "taskiq",
49
37
  min_other_sentinels: int = 0,
50
- sentinel_kwargs: Optional[Any] = None,
38
+ sentinel_kwargs: Any | None = None,
51
39
  **connection_kwargs: Any,
52
40
  ) -> None:
53
41
  super().__init__(
@@ -148,18 +136,18 @@ class RedisStreamSentinelBroker(BaseSentinelBroker):
148
136
 
149
137
  def __init__(
150
138
  self,
151
- sentinels: List[Tuple[str, int]],
139
+ sentinels: list[tuple[str, int]],
152
140
  master_name: str,
153
141
  min_other_sentinels: int = 0,
154
142
  queue_name: str = "taskiq",
155
143
  consumer_group_name: str = "taskiq",
156
- consumer_name: Optional[str] = None,
144
+ consumer_name: str | None = None,
157
145
  consumer_id: str = "$",
158
146
  mkstream: bool = True,
159
147
  xread_block: int = 10000,
160
- maxlen: Optional[int] = None,
148
+ maxlen: int | None = None,
161
149
  approximate: bool = True,
162
- additional_streams: Optional[Dict[str, str]] = None,
150
+ additional_streams: dict[str, str] | None = None,
163
151
  **connection_kwargs: Any,
164
152
  ) -> None:
165
153
  """
@@ -1,7 +1,7 @@
1
- import sys
2
1
  import warnings
2
+ from collections.abc import AsyncIterator
3
3
  from contextlib import asynccontextmanager
4
- from typing import TYPE_CHECKING, Any, AsyncIterator, List, Optional, Tuple
4
+ from typing import TYPE_CHECKING, Any, TypeAlias
5
5
 
6
6
  from redis.asyncio import (
7
7
  BlockingConnectionPool,
@@ -16,11 +16,6 @@ from taskiq.compat import model_dump, model_validate
16
16
  from taskiq.scheduler.scheduled_task import ScheduledTask
17
17
  from taskiq.serializers import PickleSerializer
18
18
 
19
- if sys.version_info >= (3, 10):
20
- from typing import TypeAlias
21
- else:
22
- from typing_extensions import TypeAlias
23
-
24
19
  if TYPE_CHECKING:
25
20
  _Redis: TypeAlias = Redis[bytes] # type: ignore
26
21
  _BlockingConnectionPool: TypeAlias = BlockingConnectionPool[Connection] # type: ignore
@@ -50,8 +45,8 @@ class RedisScheduleSource(ScheduleSource):
50
45
  url: str,
51
46
  prefix: str = "schedule",
52
47
  buffer_size: int = 50,
53
- max_connection_pool_size: Optional[int] = None,
54
- serializer: Optional[TaskiqSerializer] = None,
48
+ max_connection_pool_size: int | None = None,
49
+ serializer: TaskiqSerializer | None = None,
55
50
  **connection_kwargs: Any,
56
51
  ) -> None:
57
52
  warnings.warn(
@@ -89,7 +84,7 @@ class RedisScheduleSource(ScheduleSource):
89
84
  self.serializer.dumpb(model_dump(schedule)),
90
85
  )
91
86
 
92
- async def get_schedules(self) -> List[ScheduledTask]:
87
+ async def get_schedules(self) -> list[ScheduledTask]:
93
88
  """
94
89
  Get all schedules from redis.
95
90
 
@@ -143,11 +138,11 @@ class RedisClusterScheduleSource(ScheduleSource):
143
138
  self,
144
139
  url: str,
145
140
  prefix: str = "schedule",
146
- serializer: Optional[TaskiqSerializer] = None,
141
+ serializer: TaskiqSerializer | None = None,
147
142
  **connection_kwargs: Any,
148
143
  ) -> None:
149
144
  self.prefix = prefix
150
- self.redis: "RedisCluster" = RedisCluster.from_url(
145
+ self.redis: RedisCluster = RedisCluster.from_url(
151
146
  url,
152
147
  **connection_kwargs,
153
148
  )
@@ -171,7 +166,7 @@ class RedisClusterScheduleSource(ScheduleSource):
171
166
  self.serializer.dumpb(model_dump(schedule)),
172
167
  )
173
168
 
174
- async def get_schedules(self) -> List[ScheduledTask]:
169
+ async def get_schedules(self) -> list[ScheduledTask]:
175
170
  """
176
171
  Get all schedules from redis.
177
172
 
@@ -218,13 +213,13 @@ class RedisSentinelScheduleSource(ScheduleSource):
218
213
 
219
214
  def __init__(
220
215
  self,
221
- sentinels: List[Tuple[str, int]],
216
+ sentinels: list[tuple[str, int]],
222
217
  master_name: str,
223
218
  prefix: str = "schedule",
224
219
  buffer_size: int = 50,
225
- serializer: Optional[TaskiqSerializer] = None,
220
+ serializer: TaskiqSerializer | None = None,
226
221
  min_other_sentinels: int = 0,
227
- sentinel_kwargs: Optional[Any] = None,
222
+ sentinel_kwargs: Any | None = None,
228
223
  **connection_kwargs: Any,
229
224
  ) -> None:
230
225
  self.prefix = prefix
@@ -263,7 +258,7 @@ class RedisSentinelScheduleSource(ScheduleSource):
263
258
  self.serializer.dumpb(model_dump(schedule)),
264
259
  )
265
260
 
266
- async def get_schedules(self) -> List[ScheduledTask]:
261
+ async def get_schedules(self) -> list[ScheduledTask]:
267
262
  """
268
263
  Get all schedules from redis.
269
264
 
@@ -1,129 +0,0 @@
1
- [tool.poetry]
2
- name = "taskiq-redis"
3
- version = "1.1.2"
4
- description = "Redis integration for taskiq"
5
- authors = ["taskiq-team <taskiq@norely.com>"]
6
- readme = "README.md"
7
- classifiers = [
8
- "Programming Language :: Python",
9
- "Programming Language :: Python :: 3",
10
- "Programming Language :: Python :: 3 :: Only",
11
- "Programming Language :: Python :: 3.8",
12
- "Programming Language :: Python :: 3.9",
13
- "Programming Language :: Python :: 3.10",
14
- "Programming Language :: Python :: 3.11",
15
- ]
16
- homepage = "https://github.com/taskiq-python/taskiq-redis"
17
- repository = "https://github.com/taskiq-python/taskiq-redis"
18
- keywords = [
19
- "taskiq",
20
- "tasks",
21
- "distributed",
22
- "async",
23
- "redis",
24
- "result_backend",
25
- ]
26
-
27
- [tool.poetry.dependencies]
28
- python = "^3.9"
29
- taskiq = ">=0.11.12,<1"
30
- redis = "^6"
31
-
32
- [tool.poetry.group.dev.dependencies]
33
- pytest = "^8"
34
- mypy = "^1"
35
- black = "^25"
36
- pytest-cov = "^6"
37
- anyio = "^4"
38
- pytest-env = "^1"
39
- fakeredis = "^2"
40
- pre-commit = "^4"
41
- pytest-xdist = { version = "^3", extras = ["psutil"] }
42
- ruff = "^0"
43
- freezegun = "^1.5.1"
44
-
45
- [tool.mypy]
46
- strict = true
47
- ignore_missing_imports = true
48
- allow_subclassing_any = true
49
- allow_untyped_calls = true
50
- pretty = true
51
- show_error_codes = true
52
- implicit_reexport = true
53
- allow_untyped_decorators = true
54
- warn_return_any = false
55
-
56
- [[tool.mypy.overrides]]
57
- module = ['redis']
58
- ignore_missing_imports = true
59
- ignore_errors = true
60
- strict = false
61
-
62
- [build-system]
63
- requires = ["poetry-core>=1.0.0"]
64
- build-backend = "poetry.core.masonry.api"
65
-
66
- [tool.ruff]
67
- # List of enabled rulsets.
68
- # See https://docs.astral.sh/ruff/rules/ for more information.
69
- lint.select = [
70
- "E", # Error
71
- "F", # Pyflakes
72
- "W", # Pycodestyle
73
- "C90", # McCabe complexity
74
- "I", # Isort
75
- "N", # pep8-naming
76
- "D", # Pydocstyle
77
- "ANN", # Pytype annotations
78
- "S", # Bandit
79
- "B", # Bugbear
80
- "COM", # Commas
81
- "C4", # Comprehensions
82
- "ISC", # Implicit string concat
83
- "PIE", # Unnecessary code
84
- "T20", # Catch prints
85
- "PYI", # validate pyi files
86
- "Q", # Checks for quotes
87
- "RSE", # Checks raise statements
88
- "RET", # Checks return statements
89
- "SLF", # Self checks
90
- "SIM", # Simplificator
91
- "PTH", # Pathlib checks
92
- "ERA", # Checks for commented out code
93
- "PL", # PyLint checks
94
- "RUF", # Specific to Ruff checks
95
- ]
96
- lint.ignore = [
97
- "D105", # Missing docstring in magic method
98
- "D107", # Missing docstring in __init__
99
- "D212", # Multi-line docstring summary should start at the first line
100
- "D401", # First line should be in imperative mood
101
- "D104", # Missing docstring in public package
102
- "D100", # Missing docstring in public module
103
- "ANN401", # typing.Any are disallowed in `**kwargs
104
- "PLR0913", # Too many arguments for function call
105
- "D106", # Missing docstring in public nested class
106
- ]
107
- exclude = [".venv/"]
108
- lint.mccabe = { max-complexity = 10 }
109
- line-length = 88
110
-
111
- [tool.ruff.lint.per-file-ignores]
112
- "tests/*" = [
113
- "S101", # Use of assert detected
114
- "S301", # Use of pickle detected
115
- "D103", # Missing docstring in public function
116
- "SLF001", # Private member accessed
117
- "S311", # Standard pseudo-random generators are not suitable for security/cryptographic purposes
118
- "D101", # Missing docstring in public class
119
- ]
120
-
121
- [tool.ruff.lint.pydocstyle]
122
- convention = "pep257"
123
- ignore-decorators = ["typing.overload"]
124
-
125
- [tool.ruff.lint.pylint]
126
- allow-magic-value-types = ["int", "str", "float"]
127
-
128
- [tool.ruff.lint.flake8-bugbear]
129
- extend-immutable-calls = ["taskiq_dependencies.Depends", "taskiq.TaskiqDepends"]
File without changes
File without changes