jararaca 0.3.11a10__py3-none-any.whl → 0.3.11a11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of jararaca might be problematic. Click here for more details.
- jararaca/__init__.py +0 -3
- jararaca/cli.py +19 -143
- jararaca/messagebus/worker.py +260 -39
- jararaca/scheduler/{scheduler_v2.py → beat_worker.py} +11 -23
- jararaca/utils/rabbitmq_utils.py +5 -0
- {jararaca-0.3.11a10.dist-info → jararaca-0.3.11a11.dist-info}/METADATA +1 -1
- {jararaca-0.3.11a10.dist-info → jararaca-0.3.11a11.dist-info}/RECORD +10 -12
- jararaca/messagebus/worker_v2.py +0 -644
- jararaca/scheduler/scheduler.py +0 -181
- {jararaca-0.3.11a10.dist-info → jararaca-0.3.11a11.dist-info}/LICENSE +0 -0
- {jararaca-0.3.11a10.dist-info → jararaca-0.3.11a11.dist-info}/WHEEL +0 -0
- {jararaca-0.3.11a10.dist-info → jararaca-0.3.11a11.dist-info}/entry_points.txt +0 -0
jararaca/messagebus/worker_v2.py
DELETED
|
@@ -1,644 +0,0 @@
|
|
|
1
|
-
import asyncio
|
|
2
|
-
import inspect
|
|
3
|
-
import logging
|
|
4
|
-
import signal
|
|
5
|
-
from abc import ABC
|
|
6
|
-
from contextlib import asynccontextmanager, suppress
|
|
7
|
-
from dataclasses import dataclass
|
|
8
|
-
from datetime import UTC, datetime
|
|
9
|
-
from typing import Any, AsyncContextManager, AsyncGenerator, Type, get_origin
|
|
10
|
-
from urllib.parse import parse_qs, urlparse
|
|
11
|
-
|
|
12
|
-
import aio_pika
|
|
13
|
-
import aio_pika.abc
|
|
14
|
-
import uvloop
|
|
15
|
-
from aio_pika.exceptions import AMQPError, ChannelClosed, ChannelNotFoundEntity
|
|
16
|
-
from pydantic import BaseModel
|
|
17
|
-
|
|
18
|
-
from jararaca.broker_backend import MessageBrokerBackend
|
|
19
|
-
from jararaca.broker_backend.mapper import get_message_broker_backend_from_url
|
|
20
|
-
from jararaca.core.uow import UnitOfWorkContextProvider
|
|
21
|
-
from jararaca.di import Container
|
|
22
|
-
from jararaca.lifecycle import AppLifecycle
|
|
23
|
-
from jararaca.messagebus.bus_message_controller import (
|
|
24
|
-
BusMessageController,
|
|
25
|
-
provide_bus_message_controller,
|
|
26
|
-
)
|
|
27
|
-
from jararaca.messagebus.decorators import (
|
|
28
|
-
MESSAGE_HANDLER_DATA_SET,
|
|
29
|
-
SCHEDULED_ACTION_DATA_SET,
|
|
30
|
-
MessageBusController,
|
|
31
|
-
MessageHandler,
|
|
32
|
-
MessageHandlerData,
|
|
33
|
-
ScheduleDispatchData,
|
|
34
|
-
)
|
|
35
|
-
from jararaca.messagebus.message import Message, MessageOf
|
|
36
|
-
from jararaca.microservice import (
|
|
37
|
-
AppTransactionContext,
|
|
38
|
-
MessageBusTransactionData,
|
|
39
|
-
Microservice,
|
|
40
|
-
SchedulerTransactionData,
|
|
41
|
-
)
|
|
42
|
-
from jararaca.scheduler.decorators import ScheduledActionData
|
|
43
|
-
from jararaca.utils.rabbitmq_utils import RabbitmqUtils
|
|
44
|
-
|
|
45
|
-
logger = logging.getLogger(__name__)
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
@dataclass
|
|
49
|
-
class AioPikaWorkerConfig:
|
|
50
|
-
url: str
|
|
51
|
-
exchange: str
|
|
52
|
-
prefetch_count: int
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
class AioPikaMessage(MessageOf[Message]):
|
|
56
|
-
|
|
57
|
-
def __init__(
|
|
58
|
-
self,
|
|
59
|
-
aio_pika_message: aio_pika.abc.AbstractIncomingMessage,
|
|
60
|
-
model_type: Type[Message],
|
|
61
|
-
):
|
|
62
|
-
self.aio_pika_message = aio_pika_message
|
|
63
|
-
self.model_type = model_type
|
|
64
|
-
|
|
65
|
-
def payload(self) -> Message:
|
|
66
|
-
return self.model_type.model_validate_json(self.aio_pika_message.body)
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
class MessageProcessingLocker:
|
|
70
|
-
|
|
71
|
-
def __init__(self) -> None:
|
|
72
|
-
self.messages_lock = asyncio.Lock()
|
|
73
|
-
self.current_processing_messages_set: set[asyncio.Task[Any]] = set()
|
|
74
|
-
|
|
75
|
-
@asynccontextmanager
|
|
76
|
-
async def lock_message_task(
|
|
77
|
-
self, task: asyncio.Task[Any]
|
|
78
|
-
) -> AsyncGenerator[None, Any]:
|
|
79
|
-
async with self.messages_lock:
|
|
80
|
-
self.current_processing_messages_set.add(task)
|
|
81
|
-
try:
|
|
82
|
-
yield
|
|
83
|
-
finally:
|
|
84
|
-
self.current_processing_messages_set.discard(task)
|
|
85
|
-
|
|
86
|
-
async def wait_all_messages_processed(self) -> None:
|
|
87
|
-
if len(self.current_processing_messages_set) == 0:
|
|
88
|
-
return
|
|
89
|
-
|
|
90
|
-
await asyncio.gather(*self.current_processing_messages_set)
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
class MessageBusConsumer(ABC):
|
|
94
|
-
|
|
95
|
-
async def consume(self) -> None:
|
|
96
|
-
raise NotImplementedError("consume method not implemented")
|
|
97
|
-
|
|
98
|
-
def shutdown(self) -> None: ...
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
class AioPikaMicroserviceConsumer(MessageBusConsumer):
|
|
102
|
-
def __init__(
|
|
103
|
-
self,
|
|
104
|
-
broker_backend: MessageBrokerBackend,
|
|
105
|
-
config: AioPikaWorkerConfig,
|
|
106
|
-
message_handler_set: MESSAGE_HANDLER_DATA_SET,
|
|
107
|
-
scheduled_actions: SCHEDULED_ACTION_DATA_SET,
|
|
108
|
-
uow_context_provider: UnitOfWorkContextProvider,
|
|
109
|
-
):
|
|
110
|
-
|
|
111
|
-
self.broker_backend = broker_backend
|
|
112
|
-
self.config = config
|
|
113
|
-
self.message_handler_set = message_handler_set
|
|
114
|
-
self.scheduled_actions = scheduled_actions
|
|
115
|
-
self.incoming_map: dict[str, MessageHandlerData] = {}
|
|
116
|
-
self.uow_context_provider = uow_context_provider
|
|
117
|
-
self.shutdown_event = asyncio.Event()
|
|
118
|
-
self.lock = asyncio.Lock()
|
|
119
|
-
self.tasks: set[asyncio.Task[Any]] = set()
|
|
120
|
-
|
|
121
|
-
async def consume(self) -> None:
|
|
122
|
-
|
|
123
|
-
connection = await aio_pika.connect(self.config.url)
|
|
124
|
-
|
|
125
|
-
channel = await connection.channel()
|
|
126
|
-
|
|
127
|
-
await channel.set_qos(prefetch_count=self.config.prefetch_count)
|
|
128
|
-
|
|
129
|
-
# Get existing exchange and queues
|
|
130
|
-
try:
|
|
131
|
-
exchange = await RabbitmqUtils.get_main_exchange(
|
|
132
|
-
channel=channel,
|
|
133
|
-
exchange_name=self.config.exchange,
|
|
134
|
-
)
|
|
135
|
-
|
|
136
|
-
dlx = await RabbitmqUtils.get_dl_exchange(channel=channel)
|
|
137
|
-
dlq = await RabbitmqUtils.get_dl_queue(channel=channel)
|
|
138
|
-
except (ChannelNotFoundEntity, ChannelClosed, AMQPError) as e:
|
|
139
|
-
logger.critical(
|
|
140
|
-
f"Required exchange or queue infrastructure not found and passive mode is enabled. "
|
|
141
|
-
f"Please use the declare command first to create the required infrastructure. Error: {e}"
|
|
142
|
-
)
|
|
143
|
-
self.shutdown_event.set()
|
|
144
|
-
return
|
|
145
|
-
|
|
146
|
-
for handler in self.message_handler_set:
|
|
147
|
-
|
|
148
|
-
queue_name = f"{handler.message_type.MESSAGE_TOPIC}.{handler.instance_callable.__module__}.{handler.instance_callable.__qualname__}"
|
|
149
|
-
routing_key = f"{handler.message_type.MESSAGE_TOPIC}.#"
|
|
150
|
-
|
|
151
|
-
self.incoming_map[queue_name] = handler
|
|
152
|
-
|
|
153
|
-
try:
|
|
154
|
-
queue = await RabbitmqUtils.get_queue(
|
|
155
|
-
channel=channel, queue_name=queue_name
|
|
156
|
-
)
|
|
157
|
-
except (ChannelNotFoundEntity, ChannelClosed, AMQPError) as e:
|
|
158
|
-
logger.error(
|
|
159
|
-
f"Queue '{queue_name}' not found and passive mode is enabled. "
|
|
160
|
-
f"Please use the declare command first to create the queue. Error: {e}"
|
|
161
|
-
)
|
|
162
|
-
continue
|
|
163
|
-
|
|
164
|
-
await queue.consume(
|
|
165
|
-
callback=MessageHandlerCallback(
|
|
166
|
-
consumer=self,
|
|
167
|
-
queue_name=queue_name,
|
|
168
|
-
routing_key=routing_key,
|
|
169
|
-
message_handler=handler,
|
|
170
|
-
),
|
|
171
|
-
no_ack=handler.spec.auto_ack,
|
|
172
|
-
)
|
|
173
|
-
|
|
174
|
-
logger.info(f"Consuming message handler {queue_name}")
|
|
175
|
-
|
|
176
|
-
for scheduled_action in self.scheduled_actions:
|
|
177
|
-
|
|
178
|
-
queue_name = f"{scheduled_action.callable.__module__}.{scheduled_action.callable.__qualname__}"
|
|
179
|
-
|
|
180
|
-
routing_key = queue_name
|
|
181
|
-
|
|
182
|
-
try:
|
|
183
|
-
queue = await RabbitmqUtils.get_queue(
|
|
184
|
-
channel=channel, queue_name=queue_name
|
|
185
|
-
)
|
|
186
|
-
except (ChannelNotFoundEntity, ChannelClosed, AMQPError) as e:
|
|
187
|
-
logger.error(
|
|
188
|
-
f"Scheduler queue '{queue_name}' not found and passive mode is enabled. "
|
|
189
|
-
f"Please use the declare command first to create the queue. Error: {e}"
|
|
190
|
-
)
|
|
191
|
-
continue
|
|
192
|
-
|
|
193
|
-
await queue.consume(
|
|
194
|
-
callback=ScheduledMessageHandlerCallback(
|
|
195
|
-
consumer=self,
|
|
196
|
-
queue_name=queue_name,
|
|
197
|
-
routing_key=routing_key,
|
|
198
|
-
scheduled_action=scheduled_action,
|
|
199
|
-
),
|
|
200
|
-
no_ack=True,
|
|
201
|
-
)
|
|
202
|
-
|
|
203
|
-
logger.info(f"Consuming scheduler {queue_name}")
|
|
204
|
-
|
|
205
|
-
await self.shutdown_event.wait()
|
|
206
|
-
logger.info("Worker shutting down")
|
|
207
|
-
|
|
208
|
-
await self.wait_all_tasks_done()
|
|
209
|
-
|
|
210
|
-
await channel.close()
|
|
211
|
-
await connection.close()
|
|
212
|
-
|
|
213
|
-
async def wait_all_tasks_done(self) -> None:
|
|
214
|
-
async with self.lock:
|
|
215
|
-
await asyncio.gather(*self.tasks)
|
|
216
|
-
|
|
217
|
-
def shutdown(self) -> None:
|
|
218
|
-
self.shutdown_event.set()
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
def create_message_bus(
|
|
222
|
-
broker_url: str,
|
|
223
|
-
broker_backend: MessageBrokerBackend,
|
|
224
|
-
scheduled_actions: SCHEDULED_ACTION_DATA_SET,
|
|
225
|
-
message_handler_set: MESSAGE_HANDLER_DATA_SET,
|
|
226
|
-
uow_context_provider: UnitOfWorkContextProvider,
|
|
227
|
-
) -> MessageBusConsumer:
|
|
228
|
-
|
|
229
|
-
parsed_url = urlparse(broker_url)
|
|
230
|
-
|
|
231
|
-
if parsed_url.scheme == "amqp" or parsed_url.scheme == "amqps":
|
|
232
|
-
assert parsed_url.query, "Query string must be set for AMQP URLs"
|
|
233
|
-
|
|
234
|
-
query_params: dict[str, list[str]] = parse_qs(parsed_url.query)
|
|
235
|
-
|
|
236
|
-
assert "exchange" in query_params, "Exchange must be set in the query string"
|
|
237
|
-
assert (
|
|
238
|
-
len(query_params["exchange"]) == 1
|
|
239
|
-
), "Exchange must be set in the query string"
|
|
240
|
-
assert (
|
|
241
|
-
"prefetch_count" in query_params
|
|
242
|
-
), "Prefetch count must be set in the query string"
|
|
243
|
-
assert (
|
|
244
|
-
len(query_params["prefetch_count"]) == 1
|
|
245
|
-
), "Prefetch count must be set in the query string"
|
|
246
|
-
assert query_params["prefetch_count"][
|
|
247
|
-
0
|
|
248
|
-
].isdigit(), "Prefetch count must be an integer in the query string"
|
|
249
|
-
assert query_params["exchange"][0], "Exchange must be set in the query string"
|
|
250
|
-
assert query_params["prefetch_count"][
|
|
251
|
-
0
|
|
252
|
-
], "Prefetch count must be set in the query string"
|
|
253
|
-
|
|
254
|
-
exchange = query_params["exchange"][0]
|
|
255
|
-
prefetch_count = int(query_params["prefetch_count"][0])
|
|
256
|
-
|
|
257
|
-
config = AioPikaWorkerConfig(
|
|
258
|
-
url=broker_url,
|
|
259
|
-
exchange=exchange,
|
|
260
|
-
prefetch_count=prefetch_count,
|
|
261
|
-
)
|
|
262
|
-
|
|
263
|
-
return AioPikaMicroserviceConsumer(
|
|
264
|
-
config=config,
|
|
265
|
-
broker_backend=broker_backend,
|
|
266
|
-
message_handler_set=message_handler_set,
|
|
267
|
-
scheduled_actions=scheduled_actions,
|
|
268
|
-
uow_context_provider=uow_context_provider,
|
|
269
|
-
)
|
|
270
|
-
|
|
271
|
-
raise ValueError(
|
|
272
|
-
f"Unsupported broker URL scheme: {parsed_url.scheme}. Supported schemes are amqp and amqps"
|
|
273
|
-
)
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
class ScheduledMessageHandlerCallback:
|
|
277
|
-
def __init__(
|
|
278
|
-
self,
|
|
279
|
-
consumer: AioPikaMicroserviceConsumer,
|
|
280
|
-
queue_name: str,
|
|
281
|
-
routing_key: str,
|
|
282
|
-
scheduled_action: ScheduledActionData,
|
|
283
|
-
):
|
|
284
|
-
self.consumer = consumer
|
|
285
|
-
self.queue_name = queue_name
|
|
286
|
-
self.routing_key = routing_key
|
|
287
|
-
self.scheduled_action = scheduled_action
|
|
288
|
-
|
|
289
|
-
async def __call__(
|
|
290
|
-
self, aio_pika_message: aio_pika.abc.AbstractIncomingMessage
|
|
291
|
-
) -> None:
|
|
292
|
-
|
|
293
|
-
if self.consumer.shutdown_event.is_set():
|
|
294
|
-
return
|
|
295
|
-
|
|
296
|
-
async with self.consumer.lock:
|
|
297
|
-
task = asyncio.create_task(self.handle_message(aio_pika_message))
|
|
298
|
-
self.consumer.tasks.add(task)
|
|
299
|
-
task.add_done_callback(self.handle_message_consume_done)
|
|
300
|
-
|
|
301
|
-
def handle_message_consume_done(self, task: asyncio.Task[Any]) -> None:
|
|
302
|
-
self.consumer.tasks.discard(task)
|
|
303
|
-
|
|
304
|
-
async def handle_message(
|
|
305
|
-
self, aio_pika_message: aio_pika.abc.AbstractIncomingMessage
|
|
306
|
-
) -> None:
|
|
307
|
-
|
|
308
|
-
if self.consumer.shutdown_event.is_set():
|
|
309
|
-
logger.info("Shutdown event set. Rqueuing message")
|
|
310
|
-
await aio_pika_message.reject(requeue=True)
|
|
311
|
-
|
|
312
|
-
sig = inspect.signature(self.scheduled_action.callable)
|
|
313
|
-
if len(sig.parameters) == 1:
|
|
314
|
-
|
|
315
|
-
task = asyncio.create_task(
|
|
316
|
-
self.run_with_context(
|
|
317
|
-
self.scheduled_action,
|
|
318
|
-
(ScheduleDispatchData(int(aio_pika_message.body.decode("utf-8"))),),
|
|
319
|
-
{},
|
|
320
|
-
)
|
|
321
|
-
)
|
|
322
|
-
|
|
323
|
-
elif len(sig.parameters) == 0:
|
|
324
|
-
task = asyncio.create_task(
|
|
325
|
-
self.run_with_context(
|
|
326
|
-
self.scheduled_action,
|
|
327
|
-
(),
|
|
328
|
-
{},
|
|
329
|
-
)
|
|
330
|
-
)
|
|
331
|
-
else:
|
|
332
|
-
logger.warning(
|
|
333
|
-
"Scheduled action '%s' must have exactly one parameter of type ScheduleDispatchData or no parameters"
|
|
334
|
-
% self.queue_name
|
|
335
|
-
)
|
|
336
|
-
return
|
|
337
|
-
|
|
338
|
-
self.consumer.tasks.add(task)
|
|
339
|
-
task.add_done_callback(self.handle_message_consume_done)
|
|
340
|
-
|
|
341
|
-
try:
|
|
342
|
-
await task
|
|
343
|
-
except Exception as e:
|
|
344
|
-
|
|
345
|
-
logger.exception(
|
|
346
|
-
f"Error processing scheduled action {self.queue_name}: {e}"
|
|
347
|
-
)
|
|
348
|
-
|
|
349
|
-
async def run_with_context(
|
|
350
|
-
self,
|
|
351
|
-
scheduled_action: ScheduledActionData,
|
|
352
|
-
args: tuple[Any, ...],
|
|
353
|
-
kwargs: dict[str, Any],
|
|
354
|
-
) -> None:
|
|
355
|
-
async with self.consumer.uow_context_provider(
|
|
356
|
-
AppTransactionContext(
|
|
357
|
-
controller_member_reflect=scheduled_action.controller_member,
|
|
358
|
-
transaction_data=SchedulerTransactionData(
|
|
359
|
-
scheduled_to=datetime.now(UTC),
|
|
360
|
-
cron_expression=scheduled_action.spec.cron,
|
|
361
|
-
triggered_at=datetime.now(UTC),
|
|
362
|
-
),
|
|
363
|
-
)
|
|
364
|
-
):
|
|
365
|
-
|
|
366
|
-
await scheduled_action.callable(*args, **kwargs)
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
class MessageHandlerCallback:
|
|
370
|
-
|
|
371
|
-
def __init__(
|
|
372
|
-
self,
|
|
373
|
-
consumer: AioPikaMicroserviceConsumer,
|
|
374
|
-
queue_name: str,
|
|
375
|
-
routing_key: str,
|
|
376
|
-
message_handler: MessageHandlerData,
|
|
377
|
-
):
|
|
378
|
-
self.consumer = consumer
|
|
379
|
-
self.queue_name = queue_name
|
|
380
|
-
self.routing_key = routing_key
|
|
381
|
-
self.message_handler = message_handler
|
|
382
|
-
|
|
383
|
-
async def message_consumer(
|
|
384
|
-
self, aio_pika_message: aio_pika.abc.AbstractIncomingMessage
|
|
385
|
-
) -> None:
|
|
386
|
-
if self.consumer.shutdown_event.is_set():
|
|
387
|
-
return
|
|
388
|
-
|
|
389
|
-
async with self.consumer.lock:
|
|
390
|
-
task = asyncio.create_task(self.handle_message(aio_pika_message))
|
|
391
|
-
self.consumer.tasks.add(task)
|
|
392
|
-
task.add_done_callback(self.handle_message_consume_done)
|
|
393
|
-
|
|
394
|
-
def handle_message_consume_done(self, task: asyncio.Task[Any]) -> None:
|
|
395
|
-
self.consumer.tasks.discard(task)
|
|
396
|
-
if task.cancelled():
|
|
397
|
-
return
|
|
398
|
-
|
|
399
|
-
if (error := task.exception()) is not None:
|
|
400
|
-
logger.exception("Error processing message", exc_info=error)
|
|
401
|
-
|
|
402
|
-
async def __call__(
|
|
403
|
-
self, aio_pika_message: aio_pika.abc.AbstractIncomingMessage
|
|
404
|
-
) -> None:
|
|
405
|
-
await self.message_consumer(aio_pika_message)
|
|
406
|
-
|
|
407
|
-
async def handle_reject_message(
|
|
408
|
-
self,
|
|
409
|
-
aio_pika_message: aio_pika.abc.AbstractIncomingMessage,
|
|
410
|
-
requeue: bool = False,
|
|
411
|
-
) -> None:
|
|
412
|
-
if self.message_handler.spec.auto_ack is False:
|
|
413
|
-
await aio_pika_message.reject(requeue=requeue)
|
|
414
|
-
elif requeue:
|
|
415
|
-
logger.warning(
|
|
416
|
-
f"Message {aio_pika_message.message_id} ({self.queue_name}) cannot be requeued because auto_ack is enabled"
|
|
417
|
-
)
|
|
418
|
-
|
|
419
|
-
async def handle_message(
|
|
420
|
-
self, aio_pika_message: aio_pika.abc.AbstractIncomingMessage
|
|
421
|
-
) -> None:
|
|
422
|
-
|
|
423
|
-
routing_key = self.queue_name
|
|
424
|
-
|
|
425
|
-
if routing_key is None:
|
|
426
|
-
logger.warning("No topic found for message")
|
|
427
|
-
await self.handle_reject_message(aio_pika_message)
|
|
428
|
-
return
|
|
429
|
-
|
|
430
|
-
handler_data = self.message_handler
|
|
431
|
-
|
|
432
|
-
handler = handler_data.instance_callable
|
|
433
|
-
|
|
434
|
-
sig = inspect.signature(handler)
|
|
435
|
-
|
|
436
|
-
if len(sig.parameters) != 1:
|
|
437
|
-
logger.warning(
|
|
438
|
-
"Handler for topic '%s' must have exactly one parameter which is MessageOf[T extends Message]"
|
|
439
|
-
% routing_key
|
|
440
|
-
)
|
|
441
|
-
return
|
|
442
|
-
|
|
443
|
-
parameter = list(sig.parameters.values())[0]
|
|
444
|
-
|
|
445
|
-
param_origin = get_origin(parameter.annotation)
|
|
446
|
-
|
|
447
|
-
if param_origin is not MessageOf:
|
|
448
|
-
logger.warning(
|
|
449
|
-
"Handler for topic '%s' must have exactly one parameter of type Message"
|
|
450
|
-
% routing_key
|
|
451
|
-
)
|
|
452
|
-
return
|
|
453
|
-
|
|
454
|
-
if len(parameter.annotation.__args__) != 1:
|
|
455
|
-
logger.warning(
|
|
456
|
-
"Handler for topic '%s' must have exactly one parameter of type Message"
|
|
457
|
-
% routing_key
|
|
458
|
-
)
|
|
459
|
-
return
|
|
460
|
-
|
|
461
|
-
message_type = parameter.annotation.__args__[0]
|
|
462
|
-
|
|
463
|
-
if not issubclass(message_type, BaseModel):
|
|
464
|
-
logger.warning(
|
|
465
|
-
"Handler for topic '%s' must have exactly one parameter of type MessageOf[BaseModel]"
|
|
466
|
-
% routing_key
|
|
467
|
-
)
|
|
468
|
-
return
|
|
469
|
-
|
|
470
|
-
builded_message = AioPikaMessage(aio_pika_message, message_type)
|
|
471
|
-
|
|
472
|
-
incoming_message_spec = MessageHandler.get_message_incoming(handler)
|
|
473
|
-
assert incoming_message_spec is not None
|
|
474
|
-
|
|
475
|
-
async with self.consumer.uow_context_provider(
|
|
476
|
-
AppTransactionContext(
|
|
477
|
-
controller_member_reflect=handler_data.controller_member,
|
|
478
|
-
transaction_data=MessageBusTransactionData(
|
|
479
|
-
message=builded_message,
|
|
480
|
-
topic=routing_key,
|
|
481
|
-
),
|
|
482
|
-
)
|
|
483
|
-
):
|
|
484
|
-
ctx: AsyncContextManager[Any]
|
|
485
|
-
if incoming_message_spec.timeout is not None:
|
|
486
|
-
ctx = asyncio.timeout(incoming_message_spec.timeout)
|
|
487
|
-
else:
|
|
488
|
-
ctx = none_context()
|
|
489
|
-
async with ctx:
|
|
490
|
-
try:
|
|
491
|
-
with provide_bus_message_controller(
|
|
492
|
-
AioPikaMessageBusController(aio_pika_message)
|
|
493
|
-
):
|
|
494
|
-
await handler(builded_message)
|
|
495
|
-
if not incoming_message_spec.auto_ack:
|
|
496
|
-
with suppress(aio_pika.MessageProcessError):
|
|
497
|
-
await aio_pika_message.ack()
|
|
498
|
-
except BaseException as base_exc:
|
|
499
|
-
if incoming_message_spec.exception_handler is not None:
|
|
500
|
-
try:
|
|
501
|
-
incoming_message_spec.exception_handler(base_exc)
|
|
502
|
-
except Exception as nested_exc:
|
|
503
|
-
logger.exception(
|
|
504
|
-
f"Error processing exception handler: {base_exc} | {nested_exc}"
|
|
505
|
-
)
|
|
506
|
-
else:
|
|
507
|
-
logger.exception(
|
|
508
|
-
f"Error processing message on topic {routing_key}"
|
|
509
|
-
)
|
|
510
|
-
if incoming_message_spec.requeue_on_exception:
|
|
511
|
-
await self.handle_reject_message(aio_pika_message, requeue=True)
|
|
512
|
-
else:
|
|
513
|
-
await self.handle_reject_message(
|
|
514
|
-
aio_pika_message, requeue=False
|
|
515
|
-
)
|
|
516
|
-
else:
|
|
517
|
-
logger.info(
|
|
518
|
-
f"Message {aio_pika_message.message_id}#{self.queue_name} processed successfully"
|
|
519
|
-
)
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
@asynccontextmanager
|
|
523
|
-
async def none_context() -> AsyncGenerator[None, None]:
|
|
524
|
-
yield
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
class MessageBusWorker:
|
|
528
|
-
def __init__(
|
|
529
|
-
self,
|
|
530
|
-
app: Microservice,
|
|
531
|
-
broker_url: str,
|
|
532
|
-
backend_url: str,
|
|
533
|
-
handler_names: set[str] | None = None,
|
|
534
|
-
) -> None:
|
|
535
|
-
self.app = app
|
|
536
|
-
self.backend_url = backend_url
|
|
537
|
-
self.broker_url = broker_url
|
|
538
|
-
self.handler_names = handler_names
|
|
539
|
-
|
|
540
|
-
self.container = Container(app)
|
|
541
|
-
self.lifecycle = AppLifecycle(app, self.container)
|
|
542
|
-
|
|
543
|
-
self.uow_context_provider = UnitOfWorkContextProvider(
|
|
544
|
-
app=app, container=self.container
|
|
545
|
-
)
|
|
546
|
-
|
|
547
|
-
self._consumer: MessageBusConsumer | None = None
|
|
548
|
-
|
|
549
|
-
@property
|
|
550
|
-
def consumer(self) -> MessageBusConsumer:
|
|
551
|
-
if self._consumer is None:
|
|
552
|
-
raise RuntimeError("Consumer not started")
|
|
553
|
-
return self._consumer
|
|
554
|
-
|
|
555
|
-
async def start_async(self) -> None:
|
|
556
|
-
all_message_handlers_set: MESSAGE_HANDLER_DATA_SET = set()
|
|
557
|
-
all_scheduled_actions_set: SCHEDULED_ACTION_DATA_SET = set()
|
|
558
|
-
async with self.lifecycle():
|
|
559
|
-
for instance_class in self.app.controllers:
|
|
560
|
-
controller = MessageBusController.get_messagebus(instance_class)
|
|
561
|
-
|
|
562
|
-
if controller is None:
|
|
563
|
-
continue
|
|
564
|
-
|
|
565
|
-
instance: Any = self.container.get_by_type(instance_class)
|
|
566
|
-
|
|
567
|
-
factory = controller.get_messagebus_factory()
|
|
568
|
-
handlers, schedulers = factory(instance)
|
|
569
|
-
|
|
570
|
-
message_handler_data_map: dict[str, MessageHandlerData] = {}
|
|
571
|
-
all_scheduled_actions_set.update(schedulers)
|
|
572
|
-
for handler_data in handlers:
|
|
573
|
-
message_type = handler_data.spec.message_type
|
|
574
|
-
topic = message_type.MESSAGE_TOPIC
|
|
575
|
-
|
|
576
|
-
# Filter handlers by name if specified
|
|
577
|
-
if (
|
|
578
|
-
self.handler_names is not None
|
|
579
|
-
and handler_data.spec.name is not None
|
|
580
|
-
):
|
|
581
|
-
if handler_data.spec.name not in self.handler_names:
|
|
582
|
-
continue
|
|
583
|
-
elif (
|
|
584
|
-
self.handler_names is not None
|
|
585
|
-
and handler_data.spec.name is None
|
|
586
|
-
):
|
|
587
|
-
# Skip handlers without names when filtering is requested
|
|
588
|
-
continue
|
|
589
|
-
|
|
590
|
-
if (
|
|
591
|
-
topic in message_handler_data_map
|
|
592
|
-
and message_type.MESSAGE_TYPE == "task"
|
|
593
|
-
):
|
|
594
|
-
logger.warning(
|
|
595
|
-
"Task handler for topic '%s' already registered. Skipping"
|
|
596
|
-
% topic
|
|
597
|
-
)
|
|
598
|
-
continue
|
|
599
|
-
message_handler_data_map[topic] = handler_data
|
|
600
|
-
all_message_handlers_set.add(handler_data)
|
|
601
|
-
|
|
602
|
-
broker_backend = get_message_broker_backend_from_url(url=self.backend_url)
|
|
603
|
-
|
|
604
|
-
consumer = self._consumer = create_message_bus(
|
|
605
|
-
broker_url=self.broker_url,
|
|
606
|
-
broker_backend=broker_backend,
|
|
607
|
-
scheduled_actions=all_scheduled_actions_set,
|
|
608
|
-
message_handler_set=all_message_handlers_set,
|
|
609
|
-
uow_context_provider=self.uow_context_provider,
|
|
610
|
-
)
|
|
611
|
-
|
|
612
|
-
await consumer.consume()
|
|
613
|
-
|
|
614
|
-
def start_sync(self) -> None:
|
|
615
|
-
|
|
616
|
-
def on_shutdown(loop: asyncio.AbstractEventLoop) -> None:
|
|
617
|
-
logger.info("Shutting down")
|
|
618
|
-
self.consumer.shutdown()
|
|
619
|
-
|
|
620
|
-
with asyncio.Runner(loop_factory=uvloop.new_event_loop) as runner:
|
|
621
|
-
runner.get_loop().add_signal_handler(
|
|
622
|
-
signal.SIGINT, on_shutdown, runner.get_loop()
|
|
623
|
-
)
|
|
624
|
-
runner.run(self.start_async())
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
class AioPikaMessageBusController(BusMessageController):
|
|
628
|
-
def __init__(self, aio_pika_message: aio_pika.abc.AbstractIncomingMessage):
|
|
629
|
-
self.aio_pika_message = aio_pika_message
|
|
630
|
-
|
|
631
|
-
async def ack(self) -> None:
|
|
632
|
-
await self.aio_pika_message.ack()
|
|
633
|
-
|
|
634
|
-
async def nack(self) -> None:
|
|
635
|
-
await self.aio_pika_message.nack()
|
|
636
|
-
|
|
637
|
-
async def reject(self) -> None:
|
|
638
|
-
await self.aio_pika_message.reject()
|
|
639
|
-
|
|
640
|
-
async def retry(self) -> None:
|
|
641
|
-
await self.aio_pika_message.reject(requeue=True)
|
|
642
|
-
|
|
643
|
-
async def retry_later(self, delay: int) -> None:
|
|
644
|
-
raise NotImplementedError("Not implemented")
|