ltq 0.3.0__py3-none-any.whl → 0.3.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ltq/__init__.py +11 -5
- ltq/app.py +30 -4
- ltq/broker.py +135 -0
- ltq/cli.py +112 -43
- ltq/errors.py +2 -2
- ltq/logger.py +13 -6
- ltq/message.py +6 -6
- ltq/middleware.py +95 -97
- ltq/scheduler.py +56 -37
- ltq/task.py +7 -10
- ltq/utils.py +0 -18
- ltq/worker.py +74 -67
- ltq-0.3.2.dist-info/METADATA +218 -0
- ltq-0.3.2.dist-info/RECORD +16 -0
- {ltq-0.3.0.dist-info → ltq-0.3.2.dist-info}/WHEEL +1 -1
- ltq/q.py +0 -82
- ltq-0.3.0.dist-info/METADATA +0 -137
- ltq-0.3.0.dist-info/RECORD +0 -16
- {ltq-0.3.0.dist-info → ltq-0.3.2.dist-info}/entry_points.txt +0 -0
ltq/middleware.py
CHANGED
|
@@ -1,117 +1,115 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
import
|
|
3
|
+
from datetime import timedelta
|
|
4
|
+
from functools import lru_cache
|
|
5
|
+
import random
|
|
4
6
|
import time
|
|
5
7
|
from abc import ABC, abstractmethod
|
|
6
|
-
from
|
|
8
|
+
from contextlib import asynccontextmanager
|
|
9
|
+
from typing import TYPE_CHECKING, AsyncIterator
|
|
7
10
|
|
|
8
|
-
from .errors import
|
|
9
|
-
from .message import Message
|
|
10
|
-
from .logger import get_logger
|
|
11
|
+
from .errors import RejectError, RetryError
|
|
11
12
|
|
|
12
|
-
|
|
13
|
-
|
|
13
|
+
if TYPE_CHECKING:
|
|
14
|
+
from .message import Message
|
|
15
|
+
from .task import Task
|
|
14
16
|
|
|
15
17
|
|
|
16
18
|
class Middleware(ABC):
|
|
17
19
|
@abstractmethod
|
|
18
|
-
|
|
20
|
+
@asynccontextmanager
|
|
21
|
+
async def __call__(self, message: Message, task: Task) -> AsyncIterator[None]:
|
|
22
|
+
yield
|
|
19
23
|
|
|
20
24
|
|
|
21
|
-
class
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
):
|
|
29
|
-
self.max_retries = max_retries
|
|
30
|
-
self.min_delay = min_delay
|
|
31
|
-
self.max_delay = max_delay
|
|
32
|
-
self.backoff = backoff
|
|
33
|
-
|
|
34
|
-
async def handle(self, message: Message, next_handler: Handler) -> Any:
|
|
35
|
-
retries = message.ctx.get("retries", 0)
|
|
25
|
+
class MaxTries(Middleware):
|
|
26
|
+
@asynccontextmanager
|
|
27
|
+
async def __call__(self, message: Message, task: Task) -> AsyncIterator[None]:
|
|
28
|
+
max_tries = task.options.get("max_tries")
|
|
29
|
+
if max_tries is not None:
|
|
30
|
+
if message.ctx.get("tries", 0) >= max_tries:
|
|
31
|
+
raise RejectError(f"Message {message.id} exceeded max tries")
|
|
36
32
|
|
|
37
33
|
try:
|
|
38
|
-
|
|
39
|
-
except Exception
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
self.
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
34
|
+
yield
|
|
35
|
+
except Exception:
|
|
36
|
+
if not message.ctx.pop("rate_limited", False):
|
|
37
|
+
message.ctx["tries"] = message.ctx.get("tries", 0) + 1
|
|
38
|
+
raise
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class MaxAge(Middleware):
|
|
42
|
+
@asynccontextmanager
|
|
43
|
+
async def __call__(self, message: Message, task: Task) -> AsyncIterator[None]:
|
|
44
|
+
max_age: timedelta | None = task.options.get("max_age")
|
|
45
|
+
created_at = message.ctx.get("created_at")
|
|
46
|
+
|
|
47
|
+
if max_age is not None and created_at is not None:
|
|
48
|
+
age = time.time() - float(created_at)
|
|
49
|
+
if age > max_age.total_seconds():
|
|
50
|
+
raise RejectError(f"Message {message.id} too old")
|
|
51
|
+
|
|
52
|
+
yield
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class MaxRate(Middleware):
|
|
56
|
+
def __init__(self) -> None:
|
|
57
|
+
self.last_times: dict[str, float] = {}
|
|
58
|
+
|
|
59
|
+
@lru_cache(maxsize=128)
|
|
60
|
+
def _parse_rate(self, rate: str) -> float:
|
|
61
|
+
count, unit = rate.split("/")
|
|
62
|
+
count = float(count)
|
|
63
|
+
unit = unit.strip().lower()
|
|
64
|
+
|
|
65
|
+
if unit == "s":
|
|
66
|
+
return count
|
|
67
|
+
elif unit == "m":
|
|
68
|
+
return count / 60
|
|
69
|
+
elif unit == "h":
|
|
70
|
+
return count / 3600
|
|
71
|
+
else:
|
|
72
|
+
raise ValueError(f"Invalid rate unit: {unit}. Use 's', 'm', or 'h'")
|
|
73
|
+
|
|
74
|
+
@asynccontextmanager
|
|
75
|
+
async def __call__(self, message: Message, task: Task) -> AsyncIterator[None]:
|
|
76
|
+
max_rate = task.options.get("max_rate")
|
|
77
|
+
if max_rate:
|
|
78
|
+
now = time.time()
|
|
79
|
+
last = self.last_times.get(message.task_name, 0.0)
|
|
80
|
+
elapsed = now - last
|
|
81
|
+
rate_per_sec = self._parse_rate(max_rate)
|
|
82
|
+
interval = 1.0 / rate_per_sec
|
|
83
|
+
|
|
84
|
+
if elapsed < interval:
|
|
85
|
+
base_delay = interval - elapsed
|
|
86
|
+
delay = base_delay * 0.5 + random.uniform(0, base_delay * 0.5)
|
|
87
|
+
message.ctx["rate_limited"] = True
|
|
88
|
+
raise RetryError(delay=delay)
|
|
89
|
+
|
|
90
|
+
self.last_times[message.task_name] = now
|
|
91
|
+
yield
|
|
73
92
|
|
|
74
93
|
|
|
75
|
-
class
|
|
76
|
-
def __init__(self,
|
|
77
|
-
self.
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
return await asyncio.wait_for(next_handler(message), timeout=self.timeout)
|
|
94
|
+
class Sentry(Middleware):
|
|
95
|
+
def __init__(self, dsn: str) -> None:
|
|
96
|
+
self.sentry = None
|
|
97
|
+
try:
|
|
98
|
+
import sentry_sdk # type: ignore
|
|
81
99
|
|
|
100
|
+
sentry_sdk.init(dsn=dsn)
|
|
101
|
+
self.sentry = sentry_sdk
|
|
102
|
+
except ImportError:
|
|
103
|
+
pass
|
|
82
104
|
|
|
83
|
-
|
|
84
|
-
def
|
|
105
|
+
@asynccontextmanager
|
|
106
|
+
async def __call__(self, message: Message, task: Task) -> AsyncIterator[None]:
|
|
85
107
|
try:
|
|
86
|
-
|
|
87
|
-
except
|
|
88
|
-
|
|
89
|
-
"Sentry middleware requires optional dependency 'sentry-sdk'. "
|
|
90
|
-
"Install with 'ltq[sentry]'."
|
|
91
|
-
) from exc
|
|
92
|
-
|
|
93
|
-
self.sentry = sentry_sdk
|
|
94
|
-
self.sentry.init(dsn=dsn, send_default_pii=True, **kwargs)
|
|
95
|
-
|
|
96
|
-
async def handle(self, message: Message, next_handler: Handler) -> Any:
|
|
97
|
-
with self.sentry.push_scope() as scope:
|
|
98
|
-
scope.set_tag("task", message.task_name)
|
|
99
|
-
scope.set_tag("message_id", message.id)
|
|
100
|
-
scope.set_context(
|
|
101
|
-
"message",
|
|
102
|
-
{
|
|
103
|
-
"id": message.id,
|
|
104
|
-
"task": message.task_name,
|
|
105
|
-
"args": message.args,
|
|
106
|
-
"kwargs": message.kwargs,
|
|
107
|
-
"ctx": message.ctx,
|
|
108
|
-
},
|
|
109
|
-
)
|
|
110
|
-
|
|
111
|
-
try:
|
|
112
|
-
return await next_handler(message)
|
|
113
|
-
except RetryMessage:
|
|
114
|
-
raise
|
|
115
|
-
except Exception as e:
|
|
108
|
+
yield
|
|
109
|
+
except Exception as e:
|
|
110
|
+
if self.sentry:
|
|
116
111
|
self.sentry.capture_exception(e)
|
|
117
|
-
|
|
112
|
+
raise
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
DEFAULT: list[Middleware] = [MaxTries(), MaxAge(), MaxRate()]
|
ltq/scheduler.py
CHANGED
|
@@ -1,27 +1,22 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import asyncio
|
|
4
|
-
import time
|
|
5
4
|
from dataclasses import dataclass, field
|
|
6
5
|
from datetime import datetime
|
|
7
|
-
from typing import
|
|
6
|
+
from typing import Any
|
|
8
7
|
|
|
8
|
+
from .broker import Broker
|
|
9
9
|
from .message import Message
|
|
10
|
-
from .utils import dispatch
|
|
11
10
|
from .logger import get_logger
|
|
12
11
|
|
|
13
12
|
try:
|
|
14
|
-
from croniter import croniter
|
|
13
|
+
from croniter import croniter # type: ignore
|
|
15
14
|
except ImportError:
|
|
16
15
|
croniter = None
|
|
17
16
|
|
|
18
|
-
if TYPE_CHECKING:
|
|
19
|
-
from .task import Task
|
|
20
|
-
|
|
21
17
|
|
|
22
18
|
@dataclass
|
|
23
19
|
class ScheduledJob:
|
|
24
|
-
task: Task
|
|
25
20
|
msg: Message
|
|
26
21
|
expr: str
|
|
27
22
|
_cron: Any = field(init=False, repr=False) # croniter instance
|
|
@@ -36,11 +31,16 @@ class ScheduledJob:
|
|
|
36
31
|
|
|
37
32
|
|
|
38
33
|
class Scheduler:
|
|
39
|
-
def __init__(
|
|
34
|
+
def __init__(
|
|
35
|
+
self,
|
|
36
|
+
broker_url: str = "redis://localhost:6379",
|
|
37
|
+
poll_interval: float = 10.0,
|
|
38
|
+
) -> None:
|
|
39
|
+
self.broker = Broker.from_url(broker_url)
|
|
40
40
|
self.poll_interval = poll_interval
|
|
41
41
|
self.jobs: list[ScheduledJob] = []
|
|
42
|
-
self.logger = get_logger("
|
|
43
|
-
self.
|
|
42
|
+
self.logger = get_logger("scheduler")
|
|
43
|
+
self.task: asyncio.Task[None] | None = None
|
|
44
44
|
|
|
45
45
|
def cron(self, expr: str, msg: Message) -> None:
|
|
46
46
|
if croniter is None:
|
|
@@ -48,35 +48,54 @@ class Scheduler:
|
|
|
48
48
|
"Scheduler requires optional dependency 'croniter'. "
|
|
49
49
|
"Install with 'ltq[scheduler]'."
|
|
50
50
|
)
|
|
51
|
-
|
|
52
|
-
raise ValueError("Message must have a task assigned to use with scheduler")
|
|
53
|
-
self.jobs.append(ScheduledJob(msg.task, msg, expr))
|
|
51
|
+
self.jobs.append(ScheduledJob(msg, expr))
|
|
54
52
|
|
|
55
|
-
def run(self) -> None:
|
|
56
|
-
self._running = True
|
|
53
|
+
async def run(self) -> None:
|
|
57
54
|
self.logger.info("Starting scheduler")
|
|
58
55
|
for job in self.jobs:
|
|
59
56
|
self.logger.info(
|
|
60
|
-
f"{job.
|
|
57
|
+
f"{job.msg.task_name} [{job.expr}] next={job.next_run:%H:%M:%S}"
|
|
61
58
|
)
|
|
62
59
|
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
60
|
+
try:
|
|
61
|
+
while True:
|
|
62
|
+
now = datetime.now()
|
|
63
|
+
due = [job for job in self.jobs if now >= job.next_run]
|
|
64
|
+
|
|
65
|
+
if due:
|
|
66
|
+
try:
|
|
67
|
+
for job in due:
|
|
68
|
+
await self.broker.publish(job.msg)
|
|
69
|
+
self.logger.info(
|
|
70
|
+
f"Enqueued {job.msg.task_name} scheduled={job.next_run:%H:%M:%S}"
|
|
71
|
+
)
|
|
72
|
+
job.advance()
|
|
73
|
+
except Exception:
|
|
74
|
+
self.logger.exception("Failed to send scheduled jobs")
|
|
75
|
+
# Don't advance jobs on failure - they'll retry next poll
|
|
76
|
+
|
|
77
|
+
await asyncio.sleep(self.poll_interval)
|
|
78
|
+
finally:
|
|
79
|
+
await self.broker.close()
|
|
80
|
+
|
|
81
|
+
def start(self) -> None:
|
|
82
|
+
try:
|
|
83
|
+
asyncio.run(self.run())
|
|
84
|
+
except KeyboardInterrupt:
|
|
85
|
+
self.logger.info("Scheduler stopped")
|
|
86
|
+
|
|
87
|
+
def start_background(self) -> None:
|
|
88
|
+
if self.task is not None:
|
|
89
|
+
raise RuntimeError("Scheduler is already running")
|
|
90
|
+
self.task = asyncio.create_task(self.run())
|
|
91
|
+
|
|
92
|
+
async def stop(self) -> None:
|
|
93
|
+
if self.task is None:
|
|
94
|
+
return
|
|
95
|
+
self.task.cancel()
|
|
96
|
+
try:
|
|
97
|
+
await self.task
|
|
98
|
+
except asyncio.CancelledError:
|
|
99
|
+
pass
|
|
100
|
+
self.task = None
|
|
101
|
+
self.logger.info("Scheduler stopped")
|
ltq/task.py
CHANGED
|
@@ -2,8 +2,8 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
from typing import Awaitable, Callable, Generic, ParamSpec, TypeVar
|
|
4
4
|
|
|
5
|
+
from .broker import Broker
|
|
5
6
|
from .message import Message
|
|
6
|
-
from .q import Queue
|
|
7
7
|
|
|
8
8
|
P = ParamSpec("P")
|
|
9
9
|
R = TypeVar("R")
|
|
@@ -12,28 +12,25 @@ R = TypeVar("R")
|
|
|
12
12
|
class Task(Generic[P, R]):
|
|
13
13
|
def __init__(
|
|
14
14
|
self,
|
|
15
|
+
broker: Broker,
|
|
15
16
|
name: str,
|
|
16
17
|
fn: Callable[P, Awaitable[R]],
|
|
17
|
-
|
|
18
|
-
ttl: int | None = None,
|
|
18
|
+
options: dict | None = None,
|
|
19
19
|
) -> None:
|
|
20
20
|
self.name = name
|
|
21
21
|
self.fn = fn
|
|
22
|
-
self.
|
|
23
|
-
self.
|
|
22
|
+
self.options = options or {}
|
|
23
|
+
self.broker = broker
|
|
24
24
|
|
|
25
25
|
def message(self, *args: P.args, **kwargs: P.kwargs) -> Message:
|
|
26
26
|
return Message(
|
|
27
27
|
args=args,
|
|
28
28
|
kwargs=kwargs,
|
|
29
|
-
task=self,
|
|
30
29
|
task_name=self.name,
|
|
31
30
|
)
|
|
32
31
|
|
|
33
|
-
async def send(self, *args: P.args, **kwargs: P.kwargs) ->
|
|
34
|
-
|
|
35
|
-
await self.queue.put([message], ttl=self.ttl)
|
|
36
|
-
return message.id
|
|
32
|
+
async def send(self, *args: P.args, **kwargs: P.kwargs) -> None:
|
|
33
|
+
await self.broker.publish(self.message(*args, **kwargs))
|
|
37
34
|
|
|
38
35
|
async def __call__(self, *args: P.args, **kwargs: P.kwargs) -> R:
|
|
39
36
|
return await self.fn(*args, **kwargs)
|
ltq/utils.py
CHANGED
|
@@ -1,19 +1 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
1
|
|
|
3
|
-
from collections import defaultdict
|
|
4
|
-
|
|
5
|
-
from .message import Message
|
|
6
|
-
from .q import Queue
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
async def dispatch(messages: list[Message]) -> list[str]:
|
|
10
|
-
by_queue: defaultdict[Queue, list[Message]] = defaultdict(list)
|
|
11
|
-
for msg in messages:
|
|
12
|
-
if msg.task is None:
|
|
13
|
-
raise ValueError(f"Message {msg.id} has no task assigned")
|
|
14
|
-
by_queue[msg.task.queue].append(msg)
|
|
15
|
-
|
|
16
|
-
for queue, batch in by_queue.items():
|
|
17
|
-
await queue.put(batch)
|
|
18
|
-
|
|
19
|
-
return [msg.id for msg in messages]
|
ltq/worker.py
CHANGED
|
@@ -1,24 +1,15 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import asyncio
|
|
4
|
-
from
|
|
5
|
-
from
|
|
6
|
-
from typing import TYPE_CHECKING, Any, Awaitable, Callable, ParamSpec, TypeVar
|
|
4
|
+
from contextlib import AsyncExitStack
|
|
5
|
+
from typing import Awaitable, Callable, ParamSpec, TypeVar
|
|
7
6
|
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
from .errors import RetryMessage
|
|
11
|
-
from .task import Task
|
|
12
|
-
from .message import Message
|
|
13
|
-
from .middleware import Handler, Middleware
|
|
14
|
-
from .q import Queue
|
|
7
|
+
from .broker import Broker
|
|
8
|
+
from .errors import RejectError, RetryError
|
|
15
9
|
from .logger import get_logger
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
logger = get_logger()
|
|
21
|
-
|
|
10
|
+
from .message import Message
|
|
11
|
+
from .middleware import DEFAULT, Middleware
|
|
12
|
+
from .task import Task
|
|
22
13
|
|
|
23
14
|
P = ParamSpec("P")
|
|
24
15
|
R = TypeVar("R")
|
|
@@ -27,82 +18,98 @@ R = TypeVar("R")
|
|
|
27
18
|
class Worker:
|
|
28
19
|
def __init__(
|
|
29
20
|
self,
|
|
30
|
-
|
|
21
|
+
name: str,
|
|
22
|
+
broker_url: str = "redis://localhost:6379",
|
|
23
|
+
concurrency: int = 100,
|
|
31
24
|
middlewares: list[Middleware] | None = None,
|
|
32
|
-
concurrency: int = 250,
|
|
33
|
-
poll_sleep: float = 0.1,
|
|
34
25
|
) -> None:
|
|
35
|
-
self.
|
|
26
|
+
self.name = name
|
|
27
|
+
self.broker = Broker.from_url(broker_url)
|
|
36
28
|
self.tasks: list[Task] = []
|
|
37
|
-
self.middlewares: list[Middleware] = middlewares or
|
|
29
|
+
self.middlewares: list[Middleware] = middlewares or list(DEFAULT)
|
|
38
30
|
self.concurrency: int = concurrency
|
|
39
|
-
self.
|
|
31
|
+
self.logger = get_logger(name)
|
|
40
32
|
|
|
33
|
+
def register_middleware(self, middleware: Middleware, pos: int = -1) -> None:
|
|
34
|
+
if pos == -1:
|
|
35
|
+
self.middlewares.append(middleware)
|
|
36
|
+
else:
|
|
37
|
+
self.middlewares.insert(pos, middleware)
|
|
41
38
|
|
|
42
39
|
def task(
|
|
43
40
|
self,
|
|
44
|
-
|
|
45
|
-
ttl: int | None = None,
|
|
41
|
+
**options,
|
|
46
42
|
) -> Callable[[Callable[P, Awaitable[R]]], Task[P, R]]:
|
|
47
43
|
def decorator(fn: Callable[P, Awaitable[R]]) -> Task[P, R]:
|
|
48
|
-
|
|
49
|
-
task_name = f"{filename}:{fn.__qualname__}"
|
|
50
|
-
queue = Queue(self.client, queue_name or task_name)
|
|
44
|
+
task_name = f"{self.name}:{fn.__qualname__}"
|
|
51
45
|
task = Task(
|
|
52
46
|
name=task_name,
|
|
53
47
|
fn=fn,
|
|
54
|
-
|
|
55
|
-
|
|
48
|
+
options=options,
|
|
49
|
+
broker=self.broker,
|
|
56
50
|
)
|
|
57
51
|
self.tasks.append(task)
|
|
58
52
|
return task
|
|
59
53
|
|
|
60
54
|
return decorator
|
|
61
55
|
|
|
62
|
-
async def
|
|
63
|
-
async def base(message: Message) -> Any:
|
|
64
|
-
return await task.fn(*message.args, **message.kwargs)
|
|
65
|
-
|
|
66
|
-
handler: Handler = base
|
|
67
|
-
for middleware in reversed(self.middlewares):
|
|
68
|
-
handler = partial(middleware.handle, next_handler=handler)
|
|
69
|
-
|
|
56
|
+
async def _poll(self, task: Task, broker: Broker) -> None:
|
|
70
57
|
sem = asyncio.Semaphore(self.concurrency)
|
|
71
|
-
|
|
58
|
+
self.logger.info(f"Polling for Task {task.name}")
|
|
72
59
|
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
f"Rejected after error in {task.name}: {e}",
|
|
83
|
-
exc_info=True,
|
|
84
|
-
)
|
|
85
|
-
|
|
86
|
-
while True:
|
|
87
|
-
messages = await task.queue.get(self.concurrency)
|
|
88
|
-
if not messages:
|
|
89
|
-
await asyncio.sleep(self.poll_sleep)
|
|
90
|
-
continue
|
|
91
|
-
|
|
92
|
-
logger.debug(f"Processing {len(messages)} messages for {task.name}")
|
|
60
|
+
try:
|
|
61
|
+
while True:
|
|
62
|
+
message = await broker.consume(task.name)
|
|
63
|
+
# concurrency limiter, without, queue would be drained in one go.
|
|
64
|
+
await sem.acquire()
|
|
65
|
+
asyncio.create_task(self._process(task, broker, sem, message))
|
|
66
|
+
except asyncio.CancelledError:
|
|
67
|
+
self.logger.info(f"Worker {task.name} cancelled...")
|
|
68
|
+
raise
|
|
93
69
|
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
70
|
+
async def _process(
|
|
71
|
+
self,
|
|
72
|
+
task: Task,
|
|
73
|
+
broker: Broker,
|
|
74
|
+
sem: asyncio.Semaphore,
|
|
75
|
+
message: Message,
|
|
76
|
+
) -> None:
|
|
77
|
+
try:
|
|
78
|
+
self.logger.debug(f"Processing message {message.id}")
|
|
79
|
+
try:
|
|
80
|
+
if message.task_name != task.name:
|
|
81
|
+
# This should never happen.
|
|
82
|
+
raise RejectError(
|
|
83
|
+
f"Message {message.id} for unknown task '{message.task_name}' (expected '{task.name}')"
|
|
84
|
+
)
|
|
97
85
|
|
|
98
|
-
|
|
99
|
-
|
|
86
|
+
async with AsyncExitStack() as stack:
|
|
87
|
+
for middleware in self.middlewares:
|
|
88
|
+
await stack.enter_async_context(middleware(message, task))
|
|
89
|
+
await task.fn(*message.args, **message.kwargs)
|
|
90
|
+
|
|
91
|
+
await broker.ack(message)
|
|
92
|
+
except RejectError as e:
|
|
93
|
+
self.logger.warning(f"Message {message.id} rejected: {e}")
|
|
94
|
+
await broker.nack(message, drop=True)
|
|
95
|
+
except RetryError as e:
|
|
96
|
+
self.logger.debug(f"Retrying in {e.delay}s: {e}")
|
|
97
|
+
await broker.nack(message, delay=e.delay or 0)
|
|
98
|
+
except Exception as e:
|
|
99
|
+
self.logger.error(
|
|
100
|
+
f"Rejected after error in {task.name}: {e}",
|
|
101
|
+
exc_info=True,
|
|
102
|
+
)
|
|
103
|
+
await broker.nack(message, drop=True)
|
|
104
|
+
finally:
|
|
105
|
+
sem.release()
|
|
100
106
|
|
|
101
107
|
async def run(self) -> None:
|
|
102
108
|
try:
|
|
103
|
-
|
|
104
|
-
|
|
109
|
+
await asyncio.gather(
|
|
110
|
+
*[self._poll(task, self.broker) for task in self.tasks]
|
|
111
|
+
)
|
|
105
112
|
except asyncio.CancelledError:
|
|
106
|
-
logger.info("Worker shutting down...")
|
|
113
|
+
self.logger.info("Worker shutting down...")
|
|
107
114
|
finally:
|
|
108
|
-
await self.
|
|
115
|
+
await self.broker.close()
|