taskiq-redis 1.1.1__tar.gz → 1.2.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {taskiq_redis-1.1.1 → taskiq_redis-1.2.0}/PKG-INFO +13 -14
- {taskiq_redis-1.1.1 → taskiq_redis-1.2.0}/pyproject.toml +56 -26
- {taskiq_redis-1.1.1 → taskiq_redis-1.2.0}/taskiq_redis/list_schedule_source.py +13 -13
- {taskiq_redis-1.1.1 → taskiq_redis-1.2.0}/taskiq_redis/redis_backend.py +76 -101
- {taskiq_redis-1.1.1 → taskiq_redis-1.2.0}/taskiq_redis/redis_broker.py +18 -26
- {taskiq_redis-1.1.1 → taskiq_redis-1.2.0}/taskiq_redis/redis_cluster_broker.py +10 -9
- {taskiq_redis-1.1.1 → taskiq_redis-1.2.0}/taskiq_redis/redis_sentinel_broker.py +14 -26
- {taskiq_redis-1.1.1 → taskiq_redis-1.2.0}/taskiq_redis/schedule_source.py +12 -17
- {taskiq_redis-1.1.1 → taskiq_redis-1.2.0}/LICENSE +0 -0
- {taskiq_redis-1.1.1 → taskiq_redis-1.2.0}/README.md +0 -0
- {taskiq_redis-1.1.1 → taskiq_redis-1.2.0}/taskiq_redis/__init__.py +0 -0
- {taskiq_redis-1.1.1 → taskiq_redis-1.2.0}/taskiq_redis/exceptions.py +0 -0
- {taskiq_redis-1.1.1 → taskiq_redis-1.2.0}/taskiq_redis/py.typed +0 -0
|
@@ -1,26 +1,26 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: taskiq-redis
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.2.0
|
|
4
4
|
Summary: Redis integration for taskiq
|
|
5
|
-
License-File: LICENSE
|
|
6
5
|
Keywords: taskiq,tasks,distributed,async,redis,result_backend
|
|
7
|
-
Author:
|
|
8
|
-
Author-email: taskiq@
|
|
9
|
-
|
|
6
|
+
Author: Taskiq team
|
|
7
|
+
Author-email: Taskiq team <taskiq@no-reply.com>
|
|
8
|
+
License-Expression: MIT
|
|
9
|
+
License-File: LICENSE
|
|
10
10
|
Classifier: Programming Language :: Python
|
|
11
11
|
Classifier: Programming Language :: Python :: 3
|
|
12
|
-
Classifier: Programming Language :: Python :: 3
|
|
12
|
+
Classifier: Programming Language :: Python :: 3 :: Only
|
|
13
13
|
Classifier: Programming Language :: Python :: 3.10
|
|
14
14
|
Classifier: Programming Language :: Python :: 3.11
|
|
15
15
|
Classifier: Programming Language :: Python :: 3.12
|
|
16
16
|
Classifier: Programming Language :: Python :: 3.13
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
Requires-
|
|
22
|
-
Project-URL:
|
|
23
|
-
Project-URL:
|
|
17
|
+
Requires-Dist: redis>=7.0.0,<7.1.0
|
|
18
|
+
Requires-Dist: taskiq>=0.12.0
|
|
19
|
+
Maintainer: Taskiq team
|
|
20
|
+
Maintainer-email: Taskiq team <taskiq@no-reply.com>
|
|
21
|
+
Requires-Python: >=3.10
|
|
22
|
+
Project-URL: homepage, https://github.com/taskiq-python/taskiq-redis
|
|
23
|
+
Project-URL: repository, https://github.com/taskiq-python/taskiq-redis
|
|
24
24
|
Description-Content-Type: text/markdown
|
|
25
25
|
|
|
26
26
|
# TaskIQ-Redis
|
|
@@ -228,4 +228,3 @@ Simply pass the desired queue name as message's label when kicking a task to ove
|
|
|
228
228
|
async def low_priority_task() -> None:
|
|
229
229
|
print("I don't mind waiting a little longer")
|
|
230
230
|
```
|
|
231
|
-
|
|
@@ -1,20 +1,26 @@
|
|
|
1
|
-
[
|
|
1
|
+
[project]
|
|
2
2
|
name = "taskiq-redis"
|
|
3
|
-
version = "1.
|
|
3
|
+
version = "1.2.0"
|
|
4
4
|
description = "Redis integration for taskiq"
|
|
5
|
-
authors = [
|
|
5
|
+
authors = [
|
|
6
|
+
{ name = "Taskiq team", email = "taskiq@no-reply.com" }
|
|
7
|
+
]
|
|
8
|
+
maintainers = [
|
|
9
|
+
{ name = "Taskiq team", email = "taskiq@no-reply.com" }
|
|
10
|
+
]
|
|
11
|
+
license = "MIT"
|
|
12
|
+
license-files = ["LICENSE"]
|
|
6
13
|
readme = "README.md"
|
|
14
|
+
requires-python = ">=3.10"
|
|
7
15
|
classifiers = [
|
|
8
16
|
"Programming Language :: Python",
|
|
9
17
|
"Programming Language :: Python :: 3",
|
|
10
18
|
"Programming Language :: Python :: 3 :: Only",
|
|
11
|
-
"Programming Language :: Python :: 3.8",
|
|
12
|
-
"Programming Language :: Python :: 3.9",
|
|
13
19
|
"Programming Language :: Python :: 3.10",
|
|
14
20
|
"Programming Language :: Python :: 3.11",
|
|
21
|
+
"Programming Language :: Python :: 3.12",
|
|
22
|
+
"Programming Language :: Python :: 3.13",
|
|
15
23
|
]
|
|
16
|
-
homepage = "https://github.com/taskiq-python/taskiq-redis"
|
|
17
|
-
repository = "https://github.com/taskiq-python/taskiq-redis"
|
|
18
24
|
keywords = [
|
|
19
25
|
"taskiq",
|
|
20
26
|
"tasks",
|
|
@@ -23,24 +29,34 @@ keywords = [
|
|
|
23
29
|
"redis",
|
|
24
30
|
"result_backend",
|
|
25
31
|
]
|
|
32
|
+
dependencies = [
|
|
33
|
+
"redis>=7.0.0,<7.1.0", # TODO: fix issues in tests with 7.1.0
|
|
34
|
+
"taskiq>=0.12.0",
|
|
35
|
+
]
|
|
26
36
|
|
|
27
|
-
[
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
37
|
+
[dependency-groups]
|
|
38
|
+
dev = [
|
|
39
|
+
{include-group = "lint"},
|
|
40
|
+
{include-group = "test"},
|
|
41
|
+
"pre-commit>=4.5.0",
|
|
42
|
+
]
|
|
43
|
+
test = [
|
|
44
|
+
"fakeredis>=2.32.1",
|
|
45
|
+
"freezegun>=1.5.5",
|
|
46
|
+
"pytest>=9.0.1",
|
|
47
|
+
"pytest-cov>=7.0.0",
|
|
48
|
+
"pytest-env>=1.2.0",
|
|
49
|
+
"pytest-xdist>=3.8.0",
|
|
50
|
+
]
|
|
51
|
+
lint = [
|
|
52
|
+
"black>=25.11.0",
|
|
53
|
+
"mypy>=1.19.0",
|
|
54
|
+
"ruff>=0.14.7",
|
|
55
|
+
]
|
|
31
56
|
|
|
32
|
-
[
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
black = "^25"
|
|
36
|
-
pytest-cov = "^6"
|
|
37
|
-
anyio = "^4"
|
|
38
|
-
pytest-env = "^1"
|
|
39
|
-
fakeredis = "^2"
|
|
40
|
-
pre-commit = "^4"
|
|
41
|
-
pytest-xdist = { version = "^3", extras = ["psutil"] }
|
|
42
|
-
ruff = "^0"
|
|
43
|
-
freezegun = "^1.5.1"
|
|
57
|
+
[project.urls]
|
|
58
|
+
homepage = "https://github.com/taskiq-python/taskiq-redis"
|
|
59
|
+
repository = "https://github.com/taskiq-python/taskiq-redis"
|
|
44
60
|
|
|
45
61
|
[tool.mypy]
|
|
46
62
|
strict = true
|
|
@@ -60,8 +76,12 @@ ignore_errors = true
|
|
|
60
76
|
strict = false
|
|
61
77
|
|
|
62
78
|
[build-system]
|
|
63
|
-
requires = ["
|
|
64
|
-
build-backend = "
|
|
79
|
+
requires = ["uv_build>=0.9.13,<0.10.0"]
|
|
80
|
+
build-backend = "uv_build"
|
|
81
|
+
|
|
82
|
+
[tool.uv.build-backend]
|
|
83
|
+
module-root = ""
|
|
84
|
+
module-name = "taskiq_redis"
|
|
65
85
|
|
|
66
86
|
[tool.ruff]
|
|
67
87
|
# List of enabled rulsets.
|
|
@@ -92,6 +112,8 @@ lint.select = [
|
|
|
92
112
|
"ERA", # Checks for commented out code
|
|
93
113
|
"PL", # PyLint checks
|
|
94
114
|
"RUF", # Specific to Ruff checks
|
|
115
|
+
"FA102", # Future annotations
|
|
116
|
+
"UP", # Pyupgrade
|
|
95
117
|
]
|
|
96
118
|
lint.ignore = [
|
|
97
119
|
"D105", # Missing docstring in magic method
|
|
@@ -105,9 +127,11 @@ lint.ignore = [
|
|
|
105
127
|
"D106", # Missing docstring in public nested class
|
|
106
128
|
]
|
|
107
129
|
exclude = [".venv/"]
|
|
108
|
-
lint.mccabe = { max-complexity = 10 }
|
|
109
130
|
line-length = 88
|
|
110
131
|
|
|
132
|
+
[tool.ruff.lint.mccabe]
|
|
133
|
+
max-complexity = 10
|
|
134
|
+
|
|
111
135
|
[tool.ruff.lint.per-file-ignores]
|
|
112
136
|
"tests/*" = [
|
|
113
137
|
"S101", # Use of assert detected
|
|
@@ -127,3 +151,9 @@ allow-magic-value-types = ["int", "str", "float"]
|
|
|
127
151
|
|
|
128
152
|
[tool.ruff.lint.flake8-bugbear]
|
|
129
153
|
extend-immutable-calls = ["taskiq_dependencies.Depends", "taskiq.TaskiqDepends"]
|
|
154
|
+
|
|
155
|
+
[tool.pytest.ini_options]
|
|
156
|
+
filterwarnings = [
|
|
157
|
+
# about deprecated RedisScheduleSource usage - delete after removing RedisScheduleSource
|
|
158
|
+
'ignore:RedisScheduleSource is deprecated:DeprecationWarning',
|
|
159
|
+
]
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import datetime
|
|
2
2
|
from logging import getLogger
|
|
3
|
-
from typing import Any
|
|
3
|
+
from typing import Any
|
|
4
4
|
|
|
5
5
|
from redis.asyncio import BlockingConnectionPool, Redis
|
|
6
6
|
from taskiq import ScheduledTask, ScheduleSource
|
|
@@ -19,8 +19,8 @@ class ListRedisScheduleSource(ScheduleSource):
|
|
|
19
19
|
self,
|
|
20
20
|
url: str,
|
|
21
21
|
prefix: str = "schedule",
|
|
22
|
-
max_connection_pool_size:
|
|
23
|
-
serializer:
|
|
22
|
+
max_connection_pool_size: int | None = None,
|
|
23
|
+
serializer: TaskiqSerializer | None = None,
|
|
24
24
|
buffer_size: int = 50,
|
|
25
25
|
skip_past_schedules: bool = False,
|
|
26
26
|
**connection_kwargs: Any,
|
|
@@ -48,7 +48,7 @@ class ListRedisScheduleSource(ScheduleSource):
|
|
|
48
48
|
serializer = PickleSerializer()
|
|
49
49
|
self._serializer = serializer
|
|
50
50
|
self._is_first_run = True
|
|
51
|
-
self._previous_schedule_source:
|
|
51
|
+
self._previous_schedule_source: ScheduleSource | None = None
|
|
52
52
|
self._delete_schedules_after_migration: bool = True
|
|
53
53
|
self._skip_past_schedules = skip_past_schedules
|
|
54
54
|
|
|
@@ -89,7 +89,7 @@ class ListRedisScheduleSource(ScheduleSource):
|
|
|
89
89
|
"""Get the key for a schedule data."""
|
|
90
90
|
return f"{self._prefix}:data:{schedule_id}"
|
|
91
91
|
|
|
92
|
-
def _parse_time_key(self, key: str) ->
|
|
92
|
+
def _parse_time_key(self, key: str) -> datetime.datetime | None:
|
|
93
93
|
"""Get time value from the timed-key."""
|
|
94
94
|
try:
|
|
95
95
|
dt_str = key.split(":", 2)[2]
|
|
@@ -130,7 +130,7 @@ class ListRedisScheduleSource(ScheduleSource):
|
|
|
130
130
|
if key_time and key_time <= minute_before:
|
|
131
131
|
time_keys.append(key.decode())
|
|
132
132
|
for key in time_keys:
|
|
133
|
-
schedules.extend(await redis.lrange(key, 0, -1))
|
|
133
|
+
schedules.extend(await redis.lrange(key, 0, -1)) # type: ignore[misc]
|
|
134
134
|
|
|
135
135
|
return schedules
|
|
136
136
|
|
|
@@ -146,10 +146,10 @@ class ListRedisScheduleSource(ScheduleSource):
|
|
|
146
146
|
)
|
|
147
147
|
# We need to remove the schedule from the cron or time list.
|
|
148
148
|
if schedule.cron is not None:
|
|
149
|
-
await redis.lrem(self._get_cron_key(), 0, schedule_id)
|
|
149
|
+
await redis.lrem(self._get_cron_key(), 0, schedule_id) # type: ignore[misc]
|
|
150
150
|
elif schedule.time is not None:
|
|
151
151
|
time_key = self._get_time_key(schedule.time)
|
|
152
|
-
await redis.lrem(time_key, 0, schedule_id)
|
|
152
|
+
await redis.lrem(time_key, 0, schedule_id) # type: ignore[misc]
|
|
153
153
|
|
|
154
154
|
async def add_schedule(self, schedule: "ScheduledTask") -> None:
|
|
155
155
|
"""Add a schedule to the source."""
|
|
@@ -163,9 +163,9 @@ class ListRedisScheduleSource(ScheduleSource):
|
|
|
163
163
|
# This is an optimization, so we can get all the schedules
|
|
164
164
|
# for the current time much faster.
|
|
165
165
|
if schedule.cron is not None:
|
|
166
|
-
await redis.rpush(self._get_cron_key(), schedule.schedule_id)
|
|
166
|
+
await redis.rpush(self._get_cron_key(), schedule.schedule_id) # type: ignore[misc]
|
|
167
167
|
elif schedule.time is not None:
|
|
168
|
-
await redis.rpush(
|
|
168
|
+
await redis.rpush( # type: ignore[misc]
|
|
169
169
|
self._get_time_key(schedule.time),
|
|
170
170
|
schedule.schedule_id,
|
|
171
171
|
)
|
|
@@ -175,7 +175,7 @@ class ListRedisScheduleSource(ScheduleSource):
|
|
|
175
175
|
if task.time is not None:
|
|
176
176
|
await self.delete_schedule(task.schedule_id)
|
|
177
177
|
|
|
178
|
-
async def get_schedules(self) ->
|
|
178
|
+
async def get_schedules(self) -> list["ScheduledTask"]:
|
|
179
179
|
"""
|
|
180
180
|
Get all schedules.
|
|
181
181
|
|
|
@@ -195,11 +195,11 @@ class ListRedisScheduleSource(ScheduleSource):
|
|
|
195
195
|
self._is_first_run = False
|
|
196
196
|
async with Redis(connection_pool=self._connection_pool) as redis:
|
|
197
197
|
buffer = []
|
|
198
|
-
crons = await redis.lrange(self._get_cron_key(), 0, -1)
|
|
198
|
+
crons = await redis.lrange(self._get_cron_key(), 0, -1) # type: ignore[misc]
|
|
199
199
|
logger.debug("Got %d cron schedules", len(crons))
|
|
200
200
|
if crons:
|
|
201
201
|
buffer.extend(crons)
|
|
202
|
-
timed.extend(await redis.lrange(self._get_time_key(current_time), 0, -1))
|
|
202
|
+
timed.extend(await redis.lrange(self._get_time_key(current_time), 0, -1)) # type: ignore[misc]
|
|
203
203
|
logger.debug("Got %d timed schedules", len(timed))
|
|
204
204
|
if timed:
|
|
205
205
|
buffer.extend(timed)
|
|
@@ -1,16 +1,6 @@
|
|
|
1
|
-
import
|
|
1
|
+
from collections.abc import AsyncIterator
|
|
2
2
|
from contextlib import asynccontextmanager
|
|
3
|
-
from typing import
|
|
4
|
-
TYPE_CHECKING,
|
|
5
|
-
Any,
|
|
6
|
-
AsyncIterator,
|
|
7
|
-
Dict,
|
|
8
|
-
List,
|
|
9
|
-
Optional,
|
|
10
|
-
Tuple,
|
|
11
|
-
TypeVar,
|
|
12
|
-
Union,
|
|
13
|
-
)
|
|
3
|
+
from typing import TYPE_CHECKING, Any, TypeAlias, TypeVar
|
|
14
4
|
|
|
15
5
|
from redis.asyncio import BlockingConnectionPool, Redis, Sentinel
|
|
16
6
|
from redis.asyncio.cluster import RedisCluster
|
|
@@ -28,11 +18,6 @@ from taskiq_redis.exceptions import (
|
|
|
28
18
|
ResultIsMissingError,
|
|
29
19
|
)
|
|
30
20
|
|
|
31
|
-
if sys.version_info >= (3, 10):
|
|
32
|
-
from typing import TypeAlias
|
|
33
|
-
else:
|
|
34
|
-
from typing_extensions import TypeAlias
|
|
35
|
-
|
|
36
21
|
if TYPE_CHECKING:
|
|
37
22
|
_Redis: TypeAlias = Redis[bytes] # type: ignore
|
|
38
23
|
_BlockingConnectionPool: TypeAlias = BlockingConnectionPool[Connection] # type: ignore
|
|
@@ -52,11 +37,11 @@ class RedisAsyncResultBackend(AsyncResultBackend[_ReturnType]):
|
|
|
52
37
|
self,
|
|
53
38
|
redis_url: str,
|
|
54
39
|
keep_results: bool = True,
|
|
55
|
-
result_ex_time:
|
|
56
|
-
result_px_time:
|
|
57
|
-
max_connection_pool_size:
|
|
58
|
-
serializer:
|
|
59
|
-
prefix_str:
|
|
40
|
+
result_ex_time: int | None = None,
|
|
41
|
+
result_px_time: int | None = None,
|
|
42
|
+
max_connection_pool_size: int | None = None,
|
|
43
|
+
serializer: TaskiqSerializer | None = None,
|
|
44
|
+
prefix_str: str | None = None,
|
|
60
45
|
**connection_kwargs: Any,
|
|
61
46
|
) -> None:
|
|
62
47
|
"""
|
|
@@ -121,17 +106,15 @@ class RedisAsyncResultBackend(AsyncResultBackend[_ReturnType]):
|
|
|
121
106
|
:param task_id: ID of the task.
|
|
122
107
|
:param result: TaskiqResult instance.
|
|
123
108
|
"""
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
"value": self.serializer.dumpb(model_dump(result)),
|
|
127
|
-
}
|
|
128
|
-
if self.result_ex_time:
|
|
129
|
-
redis_set_params["ex"] = self.result_ex_time
|
|
130
|
-
elif self.result_px_time:
|
|
131
|
-
redis_set_params["px"] = self.result_px_time
|
|
132
|
-
|
|
109
|
+
name = self._task_name(task_id)
|
|
110
|
+
value = self.serializer.dumpb(model_dump(result))
|
|
133
111
|
async with Redis(connection_pool=self.redis_pool) as redis:
|
|
134
|
-
|
|
112
|
+
if self.result_ex_time:
|
|
113
|
+
await redis.set(name=name, value=value, ex=self.result_ex_time)
|
|
114
|
+
elif self.result_px_time:
|
|
115
|
+
await redis.set(name=name, value=value, px=self.result_px_time)
|
|
116
|
+
else:
|
|
117
|
+
await redis.set(name=name, value=value)
|
|
135
118
|
|
|
136
119
|
async def is_result_ready(self, task_id: str) -> bool:
|
|
137
120
|
"""
|
|
@@ -195,22 +178,20 @@ class RedisAsyncResultBackend(AsyncResultBackend[_ReturnType]):
|
|
|
195
178
|
:param task_id: ID of the task.
|
|
196
179
|
:param result: task's TaskProgress instance.
|
|
197
180
|
"""
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
"value": self.serializer.dumpb(model_dump(progress)),
|
|
201
|
-
}
|
|
202
|
-
if self.result_ex_time:
|
|
203
|
-
redis_set_params["ex"] = self.result_ex_time
|
|
204
|
-
elif self.result_px_time:
|
|
205
|
-
redis_set_params["px"] = self.result_px_time
|
|
206
|
-
|
|
181
|
+
name = self._task_name(task_id) + PROGRESS_KEY_SUFFIX
|
|
182
|
+
value = self.serializer.dumpb(model_dump(progress))
|
|
207
183
|
async with Redis(connection_pool=self.redis_pool) as redis:
|
|
208
|
-
|
|
184
|
+
if self.result_ex_time:
|
|
185
|
+
await redis.set(name=name, value=value, ex=self.result_ex_time)
|
|
186
|
+
elif self.result_px_time:
|
|
187
|
+
await redis.set(name=name, value=value, px=self.result_px_time)
|
|
188
|
+
else:
|
|
189
|
+
await redis.set(name=name, value=value)
|
|
209
190
|
|
|
210
191
|
async def get_progress(
|
|
211
192
|
self,
|
|
212
193
|
task_id: str,
|
|
213
|
-
) ->
|
|
194
|
+
) -> TaskProgress[_ReturnType] | None:
|
|
214
195
|
"""
|
|
215
196
|
Gets progress results from the task.
|
|
216
197
|
|
|
@@ -238,10 +219,10 @@ class RedisAsyncClusterResultBackend(AsyncResultBackend[_ReturnType]):
|
|
|
238
219
|
self,
|
|
239
220
|
redis_url: str,
|
|
240
221
|
keep_results: bool = True,
|
|
241
|
-
result_ex_time:
|
|
242
|
-
result_px_time:
|
|
243
|
-
serializer:
|
|
244
|
-
prefix_str:
|
|
222
|
+
result_ex_time: int | None = None,
|
|
223
|
+
result_px_time: int | None = None,
|
|
224
|
+
serializer: TaskiqSerializer | None = None,
|
|
225
|
+
prefix_str: str | None = None,
|
|
245
226
|
**connection_kwargs: Any,
|
|
246
227
|
) -> None:
|
|
247
228
|
"""
|
|
@@ -258,7 +239,7 @@ class RedisAsyncClusterResultBackend(AsyncResultBackend[_ReturnType]):
|
|
|
258
239
|
:raises ExpireTimeMustBeMoreThanZeroError: if result_ex_time
|
|
259
240
|
and result_px_time are equal zero.
|
|
260
241
|
"""
|
|
261
|
-
self.redis:
|
|
242
|
+
self.redis: RedisCluster = RedisCluster.from_url(
|
|
262
243
|
redis_url,
|
|
263
244
|
**connection_kwargs,
|
|
264
245
|
)
|
|
@@ -296,24 +277,23 @@ class RedisAsyncClusterResultBackend(AsyncResultBackend[_ReturnType]):
|
|
|
296
277
|
result: TaskiqResult[_ReturnType],
|
|
297
278
|
) -> None:
|
|
298
279
|
"""
|
|
299
|
-
Sets task result in redis.
|
|
280
|
+
Sets task result in redis cluster.
|
|
300
281
|
|
|
301
282
|
Dumps TaskiqResult instance into the bytes and writes
|
|
302
|
-
it to redis.
|
|
283
|
+
it to redis cluster.
|
|
303
284
|
|
|
304
285
|
:param task_id: ID of the task.
|
|
305
286
|
:param result: TaskiqResult instance.
|
|
306
287
|
"""
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
await self.redis.set(**redis_set_params) # type: ignore
|
|
288
|
+
name = self._task_name(task_id)
|
|
289
|
+
value = self.serializer.dumpb(model_dump(result))
|
|
290
|
+
async with self.redis as redis:
|
|
291
|
+
if self.result_ex_time:
|
|
292
|
+
await redis.set(name=name, value=value, ex=self.result_ex_time)
|
|
293
|
+
elif self.result_px_time:
|
|
294
|
+
await redis.set(name=name, value=value, px=self.result_px_time)
|
|
295
|
+
else:
|
|
296
|
+
await redis.set(name=name, value=value)
|
|
317
297
|
|
|
318
298
|
async def is_result_ready(self, task_id: str) -> bool:
|
|
319
299
|
"""
|
|
@@ -367,29 +347,28 @@ class RedisAsyncClusterResultBackend(AsyncResultBackend[_ReturnType]):
|
|
|
367
347
|
progress: TaskProgress[_ReturnType],
|
|
368
348
|
) -> None:
|
|
369
349
|
"""
|
|
370
|
-
Sets task progress in redis.
|
|
350
|
+
Sets task progress in redis cluster.
|
|
371
351
|
|
|
372
352
|
Dumps TaskProgress instance into the bytes and writes
|
|
373
|
-
it to redis with a standard suffix on the task_id as the key
|
|
353
|
+
it to redis cluster with a standard suffix on the task_id as the key
|
|
374
354
|
|
|
375
355
|
:param task_id: ID of the task.
|
|
376
356
|
:param result: task's TaskProgress instance.
|
|
377
357
|
"""
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
await self.redis.set(**redis_set_params) # type: ignore
|
|
358
|
+
name = self._task_name(task_id) + PROGRESS_KEY_SUFFIX
|
|
359
|
+
value = self.serializer.dumpb(model_dump(progress))
|
|
360
|
+
async with self.redis as redis:
|
|
361
|
+
if self.result_ex_time:
|
|
362
|
+
await redis.set(name=name, value=value, ex=self.result_ex_time)
|
|
363
|
+
elif self.result_px_time:
|
|
364
|
+
await redis.set(name=name, value=value, px=self.result_px_time)
|
|
365
|
+
else:
|
|
366
|
+
await redis.set(name=name, value=value)
|
|
388
367
|
|
|
389
368
|
async def get_progress(
|
|
390
369
|
self,
|
|
391
370
|
task_id: str,
|
|
392
|
-
) ->
|
|
371
|
+
) -> TaskProgress[_ReturnType] | None:
|
|
393
372
|
"""
|
|
394
373
|
Gets progress results from the task.
|
|
395
374
|
|
|
@@ -414,15 +393,15 @@ class RedisAsyncSentinelResultBackend(AsyncResultBackend[_ReturnType]):
|
|
|
414
393
|
|
|
415
394
|
def __init__(
|
|
416
395
|
self,
|
|
417
|
-
sentinels:
|
|
396
|
+
sentinels: list[tuple[str, int]],
|
|
418
397
|
master_name: str,
|
|
419
398
|
keep_results: bool = True,
|
|
420
|
-
result_ex_time:
|
|
421
|
-
result_px_time:
|
|
399
|
+
result_ex_time: int | None = None,
|
|
400
|
+
result_px_time: int | None = None,
|
|
422
401
|
min_other_sentinels: int = 0,
|
|
423
|
-
sentinel_kwargs:
|
|
424
|
-
serializer:
|
|
425
|
-
prefix_str:
|
|
402
|
+
sentinel_kwargs: Any | None = None,
|
|
403
|
+
serializer: TaskiqSerializer | None = None,
|
|
404
|
+
prefix_str: str | None = None,
|
|
426
405
|
**connection_kwargs: Any,
|
|
427
406
|
) -> None:
|
|
428
407
|
"""
|
|
@@ -490,17 +469,15 @@ class RedisAsyncSentinelResultBackend(AsyncResultBackend[_ReturnType]):
|
|
|
490
469
|
:param task_id: ID of the task.
|
|
491
470
|
:param result: TaskiqResult instance.
|
|
492
471
|
"""
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
"value": self.serializer.dumpb(model_dump(result)),
|
|
496
|
-
}
|
|
497
|
-
if self.result_ex_time:
|
|
498
|
-
redis_set_params["ex"] = self.result_ex_time
|
|
499
|
-
elif self.result_px_time:
|
|
500
|
-
redis_set_params["px"] = self.result_px_time
|
|
501
|
-
|
|
472
|
+
name = self._task_name(task_id)
|
|
473
|
+
value = self.serializer.dumpb(model_dump(result))
|
|
502
474
|
async with self._acquire_master_conn() as redis:
|
|
503
|
-
|
|
475
|
+
if self.result_ex_time:
|
|
476
|
+
await redis.set(name=name, value=value, ex=self.result_ex_time)
|
|
477
|
+
elif self.result_px_time:
|
|
478
|
+
await redis.set(name=name, value=value, px=self.result_px_time)
|
|
479
|
+
else:
|
|
480
|
+
await redis.set(name=name, value=value)
|
|
504
481
|
|
|
505
482
|
async def is_result_ready(self, task_id: str) -> bool:
|
|
506
483
|
"""
|
|
@@ -559,27 +536,25 @@ class RedisAsyncSentinelResultBackend(AsyncResultBackend[_ReturnType]):
|
|
|
559
536
|
Sets task progress in redis.
|
|
560
537
|
|
|
561
538
|
Dumps TaskProgress instance into the bytes and writes
|
|
562
|
-
it to redis with a standard suffix on the task_id as the key
|
|
539
|
+
it to redis via sentinel with a standard suffix on the task_id as the key
|
|
563
540
|
|
|
564
541
|
:param task_id: ID of the task.
|
|
565
542
|
:param result: task's TaskProgress instance.
|
|
566
543
|
"""
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
"value": self.serializer.dumpb(model_dump(progress)),
|
|
570
|
-
}
|
|
571
|
-
if self.result_ex_time:
|
|
572
|
-
redis_set_params["ex"] = self.result_ex_time
|
|
573
|
-
elif self.result_px_time:
|
|
574
|
-
redis_set_params["px"] = self.result_px_time
|
|
575
|
-
|
|
544
|
+
name = self._task_name(task_id) + PROGRESS_KEY_SUFFIX
|
|
545
|
+
value = self.serializer.dumpb(model_dump(progress))
|
|
576
546
|
async with self._acquire_master_conn() as redis:
|
|
577
|
-
|
|
547
|
+
if self.result_ex_time:
|
|
548
|
+
await redis.set(name=name, value=value, ex=self.result_ex_time)
|
|
549
|
+
elif self.result_px_time:
|
|
550
|
+
await redis.set(name=name, value=value, px=self.result_px_time)
|
|
551
|
+
else:
|
|
552
|
+
await redis.set(name=name, value=value)
|
|
578
553
|
|
|
579
554
|
async def get_progress(
|
|
580
555
|
self,
|
|
581
556
|
task_id: str,
|
|
582
|
-
) ->
|
|
557
|
+
) -> TaskProgress[_ReturnType] | None:
|
|
583
558
|
"""
|
|
584
559
|
Gets progress results from the task.
|
|
585
560
|
|
|
@@ -1,14 +1,10 @@
|
|
|
1
|
-
import sys
|
|
2
1
|
import uuid
|
|
2
|
+
from collections.abc import AsyncGenerator, Awaitable, Callable
|
|
3
3
|
from logging import getLogger
|
|
4
4
|
from typing import (
|
|
5
5
|
TYPE_CHECKING,
|
|
6
6
|
Any,
|
|
7
|
-
|
|
8
|
-
Awaitable,
|
|
9
|
-
Callable,
|
|
10
|
-
Dict,
|
|
11
|
-
Optional,
|
|
7
|
+
TypeAlias,
|
|
12
8
|
TypeVar,
|
|
13
9
|
)
|
|
14
10
|
|
|
@@ -22,10 +18,6 @@ _T = TypeVar("_T")
|
|
|
22
18
|
|
|
23
19
|
logger = getLogger("taskiq.redis_broker")
|
|
24
20
|
|
|
25
|
-
if sys.version_info >= (3, 10):
|
|
26
|
-
from typing import TypeAlias
|
|
27
|
-
else:
|
|
28
|
-
from typing_extensions import TypeAlias
|
|
29
21
|
|
|
30
22
|
if TYPE_CHECKING:
|
|
31
23
|
_BlockingConnectionPool: TypeAlias = BlockingConnectionPool[Connection] # type: ignore
|
|
@@ -39,10 +31,10 @@ class BaseRedisBroker(AsyncBroker):
|
|
|
39
31
|
def __init__(
|
|
40
32
|
self,
|
|
41
33
|
url: str,
|
|
42
|
-
task_id_generator:
|
|
43
|
-
result_backend:
|
|
34
|
+
task_id_generator: Callable[[], str] | None = None,
|
|
35
|
+
result_backend: AsyncResultBackend[_T] | None = None,
|
|
44
36
|
queue_name: str = "taskiq",
|
|
45
|
-
max_connection_pool_size:
|
|
37
|
+
max_connection_pool_size: int | None = None,
|
|
46
38
|
**connection_kwargs: Any,
|
|
47
39
|
) -> None:
|
|
48
40
|
"""
|
|
@@ -122,7 +114,7 @@ class ListQueueBroker(BaseRedisBroker):
|
|
|
122
114
|
"""
|
|
123
115
|
queue_name = message.labels.get("queue_name") or self.queue_name
|
|
124
116
|
async with Redis(connection_pool=self.connection_pool) as redis_conn:
|
|
125
|
-
await redis_conn.lpush(queue_name, message.message)
|
|
117
|
+
await redis_conn.lpush(queue_name, message.message) # type: ignore
|
|
126
118
|
|
|
127
119
|
async def listen(self) -> AsyncGenerator[bytes, None]:
|
|
128
120
|
"""
|
|
@@ -137,7 +129,7 @@ class ListQueueBroker(BaseRedisBroker):
|
|
|
137
129
|
while True:
|
|
138
130
|
try:
|
|
139
131
|
async with Redis(connection_pool=self.connection_pool) as redis_conn:
|
|
140
|
-
yield (await redis_conn.brpop(self.queue_name))[
|
|
132
|
+
yield (await redis_conn.brpop(self.queue_name))[ # type: ignore
|
|
141
133
|
redis_brpop_data_position
|
|
142
134
|
]
|
|
143
135
|
except ConnectionError as exc:
|
|
@@ -159,18 +151,18 @@ class RedisStreamBroker(BaseRedisBroker):
|
|
|
159
151
|
self,
|
|
160
152
|
url: str,
|
|
161
153
|
queue_name: str = "taskiq",
|
|
162
|
-
max_connection_pool_size:
|
|
154
|
+
max_connection_pool_size: int | None = None,
|
|
163
155
|
consumer_group_name: str = "taskiq",
|
|
164
|
-
consumer_name:
|
|
156
|
+
consumer_name: str | None = None,
|
|
165
157
|
consumer_id: str = "$",
|
|
166
158
|
mkstream: bool = True,
|
|
167
159
|
xread_block: int = 2000,
|
|
168
|
-
maxlen:
|
|
160
|
+
maxlen: int | None = None,
|
|
169
161
|
approximate: bool = True,
|
|
170
162
|
idle_timeout: int = 600000, # 10 minutes
|
|
171
163
|
unacknowledged_batch_size: int = 100,
|
|
172
|
-
xread_count:
|
|
173
|
-
additional_streams:
|
|
164
|
+
xread_count: int | None = 100,
|
|
165
|
+
additional_streams: dict[str, str | int] | None = None,
|
|
174
166
|
**connection_kwargs: Any,
|
|
175
167
|
) -> None:
|
|
176
168
|
"""
|
|
@@ -260,11 +252,11 @@ class RedisStreamBroker(BaseRedisBroker):
|
|
|
260
252
|
approximate=self.approximate,
|
|
261
253
|
)
|
|
262
254
|
|
|
263
|
-
def _ack_generator(self, id: str) -> Callable[[], Awaitable[None]]:
|
|
255
|
+
def _ack_generator(self, id: str, queue_name: str) -> Callable[[], Awaitable[None]]:
|
|
264
256
|
async def _ack() -> None:
|
|
265
257
|
async with Redis(connection_pool=self.connection_pool) as redis_conn:
|
|
266
258
|
await redis_conn.xack(
|
|
267
|
-
|
|
259
|
+
queue_name,
|
|
268
260
|
self.consumer_group_name,
|
|
269
261
|
id,
|
|
270
262
|
)
|
|
@@ -281,18 +273,18 @@ class RedisStreamBroker(BaseRedisBroker):
|
|
|
281
273
|
self.consumer_name,
|
|
282
274
|
{
|
|
283
275
|
self.queue_name: ">",
|
|
284
|
-
**self.additional_streams,
|
|
276
|
+
**self.additional_streams, # type: ignore[dict-item]
|
|
285
277
|
},
|
|
286
278
|
block=self.block,
|
|
287
279
|
noack=False,
|
|
288
280
|
count=self.count,
|
|
289
281
|
)
|
|
290
|
-
for
|
|
282
|
+
for stream, msg_list in fetched:
|
|
291
283
|
for msg_id, msg in msg_list:
|
|
292
284
|
logger.debug("Received message: %s", msg)
|
|
293
285
|
yield AckableMessage(
|
|
294
286
|
data=msg[b"data"],
|
|
295
|
-
ack=self._ack_generator(msg_id),
|
|
287
|
+
ack=self._ack_generator(id=msg_id, queue_name=stream),
|
|
296
288
|
)
|
|
297
289
|
logger.debug("Starting fetching unacknowledged messages")
|
|
298
290
|
for stream in [self.queue_name, *self.additional_streams.keys()]:
|
|
@@ -318,5 +310,5 @@ class RedisStreamBroker(BaseRedisBroker):
|
|
|
318
310
|
logger.debug("Received message: %s", msg)
|
|
319
311
|
yield AckableMessage(
|
|
320
312
|
data=msg[b"data"],
|
|
321
|
-
ack=self._ack_generator(msg_id),
|
|
313
|
+
ack=self._ack_generator(id=msg_id, queue_name=stream),
|
|
322
314
|
)
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import uuid
|
|
2
|
+
from collections.abc import AsyncGenerator, Awaitable, Callable
|
|
2
3
|
from logging import getLogger
|
|
3
|
-
from typing import Any
|
|
4
|
+
from typing import Any
|
|
4
5
|
|
|
5
6
|
from redis.asyncio import RedisCluster, ResponseError
|
|
6
7
|
from taskiq import AckableMessage
|
|
@@ -30,7 +31,7 @@ class BaseRedisClusterBroker(AsyncBroker):
|
|
|
30
31
|
"""
|
|
31
32
|
super().__init__()
|
|
32
33
|
|
|
33
|
-
self.redis:
|
|
34
|
+
self.redis: RedisCluster[bytes] = RedisCluster.from_url( # type: ignore
|
|
34
35
|
url=url,
|
|
35
36
|
max_connections=max_connection_pool_size,
|
|
36
37
|
**connection_kwargs,
|
|
@@ -89,13 +90,13 @@ class RedisStreamClusterBroker(BaseRedisClusterBroker):
|
|
|
89
90
|
queue_name: str = "taskiq",
|
|
90
91
|
max_connection_pool_size: int = 2**31,
|
|
91
92
|
consumer_group_name: str = "taskiq",
|
|
92
|
-
consumer_name:
|
|
93
|
+
consumer_name: str | None = None,
|
|
93
94
|
consumer_id: str = "$",
|
|
94
95
|
mkstream: bool = True,
|
|
95
96
|
xread_block: int = 10000,
|
|
96
|
-
maxlen:
|
|
97
|
+
maxlen: int | None = None,
|
|
97
98
|
approximate: bool = True,
|
|
98
|
-
additional_streams:
|
|
99
|
+
additional_streams: dict[str, str] | None = None,
|
|
99
100
|
**connection_kwargs: Any,
|
|
100
101
|
) -> None:
|
|
101
102
|
"""
|
|
@@ -171,10 +172,10 @@ class RedisStreamClusterBroker(BaseRedisClusterBroker):
|
|
|
171
172
|
approximate=self.approximate,
|
|
172
173
|
)
|
|
173
174
|
|
|
174
|
-
def _ack_generator(self, id: str) -> Callable[[], Awaitable[None]]:
|
|
175
|
+
def _ack_generator(self, id: str, queue_name: str) -> Callable[[], Awaitable[None]]:
|
|
175
176
|
async def _ack() -> None:
|
|
176
177
|
await self.redis.xack(
|
|
177
|
-
|
|
178
|
+
queue_name,
|
|
178
179
|
self.consumer_group_name,
|
|
179
180
|
id,
|
|
180
181
|
)
|
|
@@ -194,10 +195,10 @@ class RedisStreamClusterBroker(BaseRedisClusterBroker):
|
|
|
194
195
|
block=self.block,
|
|
195
196
|
noack=False,
|
|
196
197
|
)
|
|
197
|
-
for
|
|
198
|
+
for stream, msg_list in fetched:
|
|
198
199
|
for msg_id, msg in msg_list:
|
|
199
200
|
logger.debug("Received message: %s", msg)
|
|
200
201
|
yield AckableMessage(
|
|
201
202
|
data=msg[b"data"],
|
|
202
|
-
ack=self._ack_generator(msg_id),
|
|
203
|
+
ack=self._ack_generator(id=msg_id, queue_name=stream),
|
|
203
204
|
)
|
|
@@ -1,18 +1,11 @@
|
|
|
1
|
-
import sys
|
|
2
1
|
import uuid
|
|
2
|
+
from collections.abc import AsyncGenerator, AsyncIterator, Awaitable, Callable
|
|
3
3
|
from contextlib import asynccontextmanager
|
|
4
4
|
from logging import getLogger
|
|
5
5
|
from typing import (
|
|
6
6
|
TYPE_CHECKING,
|
|
7
7
|
Any,
|
|
8
|
-
|
|
9
|
-
AsyncIterator,
|
|
10
|
-
Awaitable,
|
|
11
|
-
Callable,
|
|
12
|
-
Dict,
|
|
13
|
-
List,
|
|
14
|
-
Optional,
|
|
15
|
-
Tuple,
|
|
8
|
+
TypeAlias,
|
|
16
9
|
TypeVar,
|
|
17
10
|
)
|
|
18
11
|
|
|
@@ -21,11 +14,6 @@ from redis.asyncio import Redis, Sentinel
|
|
|
21
14
|
from taskiq import AckableMessage, AsyncResultBackend, BrokerMessage
|
|
22
15
|
from taskiq.abc.broker import AsyncBroker
|
|
23
16
|
|
|
24
|
-
if sys.version_info >= (3, 10):
|
|
25
|
-
from typing import TypeAlias
|
|
26
|
-
else:
|
|
27
|
-
from typing_extensions import TypeAlias
|
|
28
|
-
|
|
29
17
|
if TYPE_CHECKING:
|
|
30
18
|
_Redis: TypeAlias = Redis[bytes] # type: ignore
|
|
31
19
|
else:
|
|
@@ -41,13 +29,13 @@ class BaseSentinelBroker(AsyncBroker):
|
|
|
41
29
|
|
|
42
30
|
def __init__(
|
|
43
31
|
self,
|
|
44
|
-
sentinels:
|
|
32
|
+
sentinels: list[tuple[str, int]],
|
|
45
33
|
master_name: str,
|
|
46
|
-
result_backend:
|
|
47
|
-
task_id_generator:
|
|
34
|
+
result_backend: AsyncResultBackend[_T] | None = None,
|
|
35
|
+
task_id_generator: Callable[[], str] | None = None,
|
|
48
36
|
queue_name: str = "taskiq",
|
|
49
37
|
min_other_sentinels: int = 0,
|
|
50
|
-
sentinel_kwargs:
|
|
38
|
+
sentinel_kwargs: Any | None = None,
|
|
51
39
|
**connection_kwargs: Any,
|
|
52
40
|
) -> None:
|
|
53
41
|
super().__init__(
|
|
@@ -148,18 +136,18 @@ class RedisStreamSentinelBroker(BaseSentinelBroker):
|
|
|
148
136
|
|
|
149
137
|
def __init__(
|
|
150
138
|
self,
|
|
151
|
-
sentinels:
|
|
139
|
+
sentinels: list[tuple[str, int]],
|
|
152
140
|
master_name: str,
|
|
153
141
|
min_other_sentinels: int = 0,
|
|
154
142
|
queue_name: str = "taskiq",
|
|
155
143
|
consumer_group_name: str = "taskiq",
|
|
156
|
-
consumer_name:
|
|
144
|
+
consumer_name: str | None = None,
|
|
157
145
|
consumer_id: str = "$",
|
|
158
146
|
mkstream: bool = True,
|
|
159
147
|
xread_block: int = 10000,
|
|
160
|
-
maxlen:
|
|
148
|
+
maxlen: int | None = None,
|
|
161
149
|
approximate: bool = True,
|
|
162
|
-
additional_streams:
|
|
150
|
+
additional_streams: dict[str, str] | None = None,
|
|
163
151
|
**connection_kwargs: Any,
|
|
164
152
|
) -> None:
|
|
165
153
|
"""
|
|
@@ -239,11 +227,11 @@ class RedisStreamSentinelBroker(BaseSentinelBroker):
|
|
|
239
227
|
approximate=self.approximate,
|
|
240
228
|
)
|
|
241
229
|
|
|
242
|
-
def _ack_generator(self, id: str) -> Callable[[], Awaitable[None]]:
|
|
230
|
+
def _ack_generator(self, id: str, queue_name: str) -> Callable[[], Awaitable[None]]:
|
|
243
231
|
async def _ack() -> None:
|
|
244
232
|
async with self._acquire_master_conn() as redis_conn:
|
|
245
233
|
await redis_conn.xack(
|
|
246
|
-
|
|
234
|
+
queue_name,
|
|
247
235
|
self.consumer_group_name,
|
|
248
236
|
id,
|
|
249
237
|
)
|
|
@@ -264,10 +252,10 @@ class RedisStreamSentinelBroker(BaseSentinelBroker):
|
|
|
264
252
|
block=self.block,
|
|
265
253
|
noack=False,
|
|
266
254
|
)
|
|
267
|
-
for
|
|
255
|
+
for stream, msg_list in fetched:
|
|
268
256
|
for msg_id, msg in msg_list:
|
|
269
257
|
logger.debug("Received message: %s", msg)
|
|
270
258
|
yield AckableMessage(
|
|
271
259
|
data=msg[b"data"],
|
|
272
|
-
ack=self._ack_generator(msg_id),
|
|
260
|
+
ack=self._ack_generator(id=msg_id, queue_name=stream),
|
|
273
261
|
)
|
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
import sys
|
|
2
1
|
import warnings
|
|
2
|
+
from collections.abc import AsyncIterator
|
|
3
3
|
from contextlib import asynccontextmanager
|
|
4
|
-
from typing import TYPE_CHECKING, Any,
|
|
4
|
+
from typing import TYPE_CHECKING, Any, TypeAlias
|
|
5
5
|
|
|
6
6
|
from redis.asyncio import (
|
|
7
7
|
BlockingConnectionPool,
|
|
@@ -16,11 +16,6 @@ from taskiq.compat import model_dump, model_validate
|
|
|
16
16
|
from taskiq.scheduler.scheduled_task import ScheduledTask
|
|
17
17
|
from taskiq.serializers import PickleSerializer
|
|
18
18
|
|
|
19
|
-
if sys.version_info >= (3, 10):
|
|
20
|
-
from typing import TypeAlias
|
|
21
|
-
else:
|
|
22
|
-
from typing_extensions import TypeAlias
|
|
23
|
-
|
|
24
19
|
if TYPE_CHECKING:
|
|
25
20
|
_Redis: TypeAlias = Redis[bytes] # type: ignore
|
|
26
21
|
_BlockingConnectionPool: TypeAlias = BlockingConnectionPool[Connection] # type: ignore
|
|
@@ -50,8 +45,8 @@ class RedisScheduleSource(ScheduleSource):
|
|
|
50
45
|
url: str,
|
|
51
46
|
prefix: str = "schedule",
|
|
52
47
|
buffer_size: int = 50,
|
|
53
|
-
max_connection_pool_size:
|
|
54
|
-
serializer:
|
|
48
|
+
max_connection_pool_size: int | None = None,
|
|
49
|
+
serializer: TaskiqSerializer | None = None,
|
|
55
50
|
**connection_kwargs: Any,
|
|
56
51
|
) -> None:
|
|
57
52
|
warnings.warn(
|
|
@@ -89,7 +84,7 @@ class RedisScheduleSource(ScheduleSource):
|
|
|
89
84
|
self.serializer.dumpb(model_dump(schedule)),
|
|
90
85
|
)
|
|
91
86
|
|
|
92
|
-
async def get_schedules(self) ->
|
|
87
|
+
async def get_schedules(self) -> list[ScheduledTask]:
|
|
93
88
|
"""
|
|
94
89
|
Get all schedules from redis.
|
|
95
90
|
|
|
@@ -143,11 +138,11 @@ class RedisClusterScheduleSource(ScheduleSource):
|
|
|
143
138
|
self,
|
|
144
139
|
url: str,
|
|
145
140
|
prefix: str = "schedule",
|
|
146
|
-
serializer:
|
|
141
|
+
serializer: TaskiqSerializer | None = None,
|
|
147
142
|
**connection_kwargs: Any,
|
|
148
143
|
) -> None:
|
|
149
144
|
self.prefix = prefix
|
|
150
|
-
self.redis:
|
|
145
|
+
self.redis: RedisCluster = RedisCluster.from_url(
|
|
151
146
|
url,
|
|
152
147
|
**connection_kwargs,
|
|
153
148
|
)
|
|
@@ -171,7 +166,7 @@ class RedisClusterScheduleSource(ScheduleSource):
|
|
|
171
166
|
self.serializer.dumpb(model_dump(schedule)),
|
|
172
167
|
)
|
|
173
168
|
|
|
174
|
-
async def get_schedules(self) ->
|
|
169
|
+
async def get_schedules(self) -> list[ScheduledTask]:
|
|
175
170
|
"""
|
|
176
171
|
Get all schedules from redis.
|
|
177
172
|
|
|
@@ -218,13 +213,13 @@ class RedisSentinelScheduleSource(ScheduleSource):
|
|
|
218
213
|
|
|
219
214
|
def __init__(
|
|
220
215
|
self,
|
|
221
|
-
sentinels:
|
|
216
|
+
sentinels: list[tuple[str, int]],
|
|
222
217
|
master_name: str,
|
|
223
218
|
prefix: str = "schedule",
|
|
224
219
|
buffer_size: int = 50,
|
|
225
|
-
serializer:
|
|
220
|
+
serializer: TaskiqSerializer | None = None,
|
|
226
221
|
min_other_sentinels: int = 0,
|
|
227
|
-
sentinel_kwargs:
|
|
222
|
+
sentinel_kwargs: Any | None = None,
|
|
228
223
|
**connection_kwargs: Any,
|
|
229
224
|
) -> None:
|
|
230
225
|
self.prefix = prefix
|
|
@@ -263,7 +258,7 @@ class RedisSentinelScheduleSource(ScheduleSource):
|
|
|
263
258
|
self.serializer.dumpb(model_dump(schedule)),
|
|
264
259
|
)
|
|
265
260
|
|
|
266
|
-
async def get_schedules(self) ->
|
|
261
|
+
async def get_schedules(self) -> list[ScheduledTask]:
|
|
267
262
|
"""
|
|
268
263
|
Get all schedules from redis.
|
|
269
264
|
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|