jararaca 0.3.11a8__py3-none-any.whl → 0.3.11a10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of jararaca might be problematic. Click here for more details.
- jararaca/__init__.py +3 -0
- jararaca/cli.py +72 -350
- jararaca/messagebus/decorators.py +1 -1
- jararaca/messagebus/worker.py +28 -32
- jararaca/messagebus/worker_v2.py +36 -26
- jararaca/microservice.py +76 -16
- jararaca/scheduler/scheduler_v2.py +8 -19
- jararaca/utils/rabbitmq_utils.py +240 -18
- {jararaca-0.3.11a8.dist-info → jararaca-0.3.11a10.dist-info}/METADATA +1 -1
- {jararaca-0.3.11a8.dist-info → jararaca-0.3.11a10.dist-info}/RECORD +13 -13
- {jararaca-0.3.11a8.dist-info → jararaca-0.3.11a10.dist-info}/LICENSE +0 -0
- {jararaca-0.3.11a8.dist-info → jararaca-0.3.11a10.dist-info}/WHEEL +0 -0
- {jararaca-0.3.11a8.dist-info → jararaca-0.3.11a10.dist-info}/entry_points.txt +0 -0
jararaca/messagebus/worker_v2.py
CHANGED
|
@@ -12,6 +12,7 @@ from urllib.parse import parse_qs, urlparse
|
|
|
12
12
|
import aio_pika
|
|
13
13
|
import aio_pika.abc
|
|
14
14
|
import uvloop
|
|
15
|
+
from aio_pika.exceptions import AMQPError, ChannelClosed, ChannelNotFoundEntity
|
|
15
16
|
from pydantic import BaseModel
|
|
16
17
|
|
|
17
18
|
from jararaca.broker_backend import MessageBrokerBackend
|
|
@@ -105,7 +106,6 @@ class AioPikaMicroserviceConsumer(MessageBusConsumer):
|
|
|
105
106
|
message_handler_set: MESSAGE_HANDLER_DATA_SET,
|
|
106
107
|
scheduled_actions: SCHEDULED_ACTION_DATA_SET,
|
|
107
108
|
uow_context_provider: UnitOfWorkContextProvider,
|
|
108
|
-
passive_declare: bool = False,
|
|
109
109
|
):
|
|
110
110
|
|
|
111
111
|
self.broker_backend = broker_backend
|
|
@@ -117,7 +117,6 @@ class AioPikaMicroserviceConsumer(MessageBusConsumer):
|
|
|
117
117
|
self.shutdown_event = asyncio.Event()
|
|
118
118
|
self.lock = asyncio.Lock()
|
|
119
119
|
self.tasks: set[asyncio.Task[Any]] = set()
|
|
120
|
-
self.passive_declare = passive_declare
|
|
121
120
|
|
|
122
121
|
async def consume(self) -> None:
|
|
123
122
|
|
|
@@ -127,21 +126,22 @@ class AioPikaMicroserviceConsumer(MessageBusConsumer):
|
|
|
127
126
|
|
|
128
127
|
await channel.set_qos(prefetch_count=self.config.prefetch_count)
|
|
129
128
|
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
dlx = await RabbitmqUtils.declare_dl_exchange(
|
|
137
|
-
channel=channel, passive=self.passive_declare
|
|
138
|
-
)
|
|
139
|
-
|
|
140
|
-
dlq = await RabbitmqUtils.declare_dl_queue(
|
|
141
|
-
channel=channel, passive=self.passive_declare
|
|
142
|
-
)
|
|
129
|
+
# Get existing exchange and queues
|
|
130
|
+
try:
|
|
131
|
+
exchange = await RabbitmqUtils.get_main_exchange(
|
|
132
|
+
channel=channel,
|
|
133
|
+
exchange_name=self.config.exchange,
|
|
134
|
+
)
|
|
143
135
|
|
|
144
|
-
|
|
136
|
+
dlx = await RabbitmqUtils.get_dl_exchange(channel=channel)
|
|
137
|
+
dlq = await RabbitmqUtils.get_dl_queue(channel=channel)
|
|
138
|
+
except (ChannelNotFoundEntity, ChannelClosed, AMQPError) as e:
|
|
139
|
+
logger.critical(
|
|
140
|
+
f"Required exchange or queue infrastructure not found and passive mode is enabled. "
|
|
141
|
+
f"Please use the declare command first to create the required infrastructure. Error: {e}"
|
|
142
|
+
)
|
|
143
|
+
self.shutdown_event.set()
|
|
144
|
+
return
|
|
145
145
|
|
|
146
146
|
for handler in self.message_handler_set:
|
|
147
147
|
|
|
@@ -150,11 +150,16 @@ class AioPikaMicroserviceConsumer(MessageBusConsumer):
|
|
|
150
150
|
|
|
151
151
|
self.incoming_map[queue_name] = handler
|
|
152
152
|
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
153
|
+
try:
|
|
154
|
+
queue = await RabbitmqUtils.get_queue(
|
|
155
|
+
channel=channel, queue_name=queue_name
|
|
156
|
+
)
|
|
157
|
+
except (ChannelNotFoundEntity, ChannelClosed, AMQPError) as e:
|
|
158
|
+
logger.error(
|
|
159
|
+
f"Queue '{queue_name}' not found and passive mode is enabled. "
|
|
160
|
+
f"Please use the declare command first to create the queue. Error: {e}"
|
|
161
|
+
)
|
|
162
|
+
continue
|
|
158
163
|
|
|
159
164
|
await queue.consume(
|
|
160
165
|
callback=MessageHandlerCallback(
|
|
@@ -174,11 +179,16 @@ class AioPikaMicroserviceConsumer(MessageBusConsumer):
|
|
|
174
179
|
|
|
175
180
|
routing_key = queue_name
|
|
176
181
|
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
+
try:
|
|
183
|
+
queue = await RabbitmqUtils.get_queue(
|
|
184
|
+
channel=channel, queue_name=queue_name
|
|
185
|
+
)
|
|
186
|
+
except (ChannelNotFoundEntity, ChannelClosed, AMQPError) as e:
|
|
187
|
+
logger.error(
|
|
188
|
+
f"Scheduler queue '{queue_name}' not found and passive mode is enabled. "
|
|
189
|
+
f"Please use the declare command first to create the queue. Error: {e}"
|
|
190
|
+
)
|
|
191
|
+
continue
|
|
182
192
|
|
|
183
193
|
await queue.consume(
|
|
184
194
|
callback=ScheduledMessageHandlerCallback(
|
jararaca/microservice.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import inspect
|
|
2
|
+
import logging
|
|
2
3
|
from contextlib import contextmanager, suppress
|
|
3
4
|
from contextvars import ContextVar
|
|
4
5
|
from dataclasses import dataclass, field
|
|
@@ -25,6 +26,8 @@ from jararaca.messagebus import MessageOf
|
|
|
25
26
|
from jararaca.messagebus.message import Message
|
|
26
27
|
from jararaca.reflect.controller_inspect import ControllerMemberReflect
|
|
27
28
|
|
|
29
|
+
logger = logging.getLogger(__name__)
|
|
30
|
+
|
|
28
31
|
if TYPE_CHECKING:
|
|
29
32
|
from typing_extensions import TypeIs
|
|
30
33
|
|
|
@@ -145,6 +148,49 @@ class Microservice:
|
|
|
145
148
|
)
|
|
146
149
|
|
|
147
150
|
|
|
151
|
+
@dataclass
|
|
152
|
+
class InstantiationNode:
|
|
153
|
+
property_name: str
|
|
154
|
+
parent: "InstantiationNode | None" = None
|
|
155
|
+
source_type: Any | None = None
|
|
156
|
+
target_type: Any | None = None
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
instantiation_vector_ctxvar = ContextVar[list[InstantiationNode]](
|
|
160
|
+
"instantiation_vector", default=[]
|
|
161
|
+
)
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def print_instantiation_vector(
|
|
165
|
+
instantiation_vector: list[InstantiationNode],
|
|
166
|
+
) -> None:
|
|
167
|
+
"""
|
|
168
|
+
Prints the instantiation vector for debugging purposes.
|
|
169
|
+
"""
|
|
170
|
+
for node in instantiation_vector:
|
|
171
|
+
print(
|
|
172
|
+
f"Property: {node.property_name}, Source: {node.source_type}, Target: {node.target_type}"
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
@contextmanager
|
|
177
|
+
def span_instantiation_vector(
|
|
178
|
+
instantiation_node: InstantiationNode,
|
|
179
|
+
) -> Generator[None, None, None]:
|
|
180
|
+
"""
|
|
181
|
+
Context manager to track instantiation nodes in a vector.
|
|
182
|
+
This is useful for debugging and tracing instantiation paths.
|
|
183
|
+
"""
|
|
184
|
+
current_vector = list(instantiation_vector_ctxvar.get())
|
|
185
|
+
current_vector.append(instantiation_node)
|
|
186
|
+
token = instantiation_vector_ctxvar.set(current_vector)
|
|
187
|
+
try:
|
|
188
|
+
yield
|
|
189
|
+
finally:
|
|
190
|
+
with suppress(ValueError):
|
|
191
|
+
instantiation_vector_ctxvar.reset(token)
|
|
192
|
+
|
|
193
|
+
|
|
148
194
|
class Container:
|
|
149
195
|
|
|
150
196
|
def __init__(self, app: Microservice) -> None:
|
|
@@ -161,40 +207,54 @@ class Container:
|
|
|
161
207
|
if provider.use_value:
|
|
162
208
|
self.instances_map[provider.provide] = provider.use_value
|
|
163
209
|
elif provider.use_class:
|
|
164
|
-
self.
|
|
210
|
+
self._get_and_register(provider.use_class, provider.provide)
|
|
165
211
|
elif provider.use_factory:
|
|
166
|
-
self.
|
|
212
|
+
self._get_and_register(provider.use_factory, provider.provide)
|
|
167
213
|
else:
|
|
168
|
-
self.
|
|
214
|
+
self._get_and_register(provider, provider)
|
|
169
215
|
|
|
170
|
-
def
|
|
216
|
+
def _instantiate(self, type_: type[Any] | Callable[..., Any]) -> Any:
|
|
171
217
|
|
|
172
|
-
dependencies = self.
|
|
218
|
+
dependencies = self._parse_dependencies(type_)
|
|
173
219
|
|
|
174
|
-
evaluated_dependencies = {
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
220
|
+
evaluated_dependencies: dict[str, Any] = {}
|
|
221
|
+
for name, dependency in dependencies.items():
|
|
222
|
+
with span_instantiation_vector(
|
|
223
|
+
InstantiationNode(
|
|
224
|
+
property_name=name,
|
|
225
|
+
source_type=type_,
|
|
226
|
+
target_type=dependency,
|
|
227
|
+
)
|
|
228
|
+
):
|
|
229
|
+
evaluated_dependencies[name] = self.get_or_register_token_or_type(
|
|
230
|
+
dependency
|
|
231
|
+
)
|
|
178
232
|
|
|
179
233
|
instance = type_(**evaluated_dependencies)
|
|
180
234
|
|
|
181
235
|
return instance
|
|
182
236
|
|
|
183
|
-
def
|
|
237
|
+
def _parse_dependencies(
|
|
184
238
|
self, provider: type[Any] | Callable[..., Any]
|
|
185
239
|
) -> dict[str, type[Any]]:
|
|
186
240
|
|
|
187
|
-
|
|
241
|
+
vector = instantiation_vector_ctxvar.get()
|
|
242
|
+
try:
|
|
243
|
+
signature = inspect.signature(provider)
|
|
244
|
+
except ValueError:
|
|
245
|
+
print("VECTOR:", vector)
|
|
246
|
+
print_instantiation_vector(vector)
|
|
247
|
+
raise
|
|
188
248
|
|
|
189
249
|
parameters = signature.parameters
|
|
190
250
|
|
|
191
251
|
return {
|
|
192
|
-
name: self.
|
|
252
|
+
name: self._lookup_parameter_type(parameter)
|
|
193
253
|
for name, parameter in parameters.items()
|
|
194
254
|
if parameter.annotation != inspect.Parameter.empty
|
|
195
255
|
}
|
|
196
256
|
|
|
197
|
-
def
|
|
257
|
+
def _lookup_parameter_type(self, parameter: inspect.Parameter) -> Any:
|
|
198
258
|
if parameter.annotation == inspect.Parameter.empty:
|
|
199
259
|
raise Exception(f"Parameter {parameter.name} has no type annotation")
|
|
200
260
|
|
|
@@ -227,14 +287,14 @@ class Container:
|
|
|
227
287
|
item_type = bind_to = token_or_type
|
|
228
288
|
|
|
229
289
|
if token_or_type not in self.instances_map:
|
|
230
|
-
return self.
|
|
290
|
+
return self._get_and_register(item_type, bind_to)
|
|
231
291
|
|
|
232
292
|
return cast(T, self.instances_map[bind_to])
|
|
233
293
|
|
|
234
|
-
def
|
|
294
|
+
def _get_and_register(
|
|
235
295
|
self, item_type: Type[T] | Callable[..., T], bind_to: Any
|
|
236
296
|
) -> T:
|
|
237
|
-
instance = self.
|
|
297
|
+
instance = self._instantiate(item_type)
|
|
238
298
|
self.register(instance, bind_to)
|
|
239
299
|
return cast(T, instance)
|
|
240
300
|
|
|
@@ -157,7 +157,7 @@ class RabbitMQBrokerDispatcher(MessageBrokerDispatcher):
|
|
|
157
157
|
|
|
158
158
|
logger.info(f"Dispatching message to {action_id} at {timestamp}")
|
|
159
159
|
async with self.channel_pool.acquire() as channel:
|
|
160
|
-
exchange = await
|
|
160
|
+
exchange = await RabbitmqUtils.get_main_exchange(channel, self.exchange)
|
|
161
161
|
|
|
162
162
|
await exchange.publish(
|
|
163
163
|
aio_pika.Message(body=str(timestamp).encode()),
|
|
@@ -175,7 +175,7 @@ class RabbitMQBrokerDispatcher(MessageBrokerDispatcher):
|
|
|
175
175
|
"""
|
|
176
176
|
async with self.channel_pool.acquire() as channel:
|
|
177
177
|
|
|
178
|
-
exchange = await
|
|
178
|
+
exchange = await RabbitmqUtils.get_main_exchange(channel, self.exchange)
|
|
179
179
|
await exchange.publish(
|
|
180
180
|
aio_pika.Message(
|
|
181
181
|
body=delayed_message.payload,
|
|
@@ -190,26 +190,15 @@ class RabbitMQBrokerDispatcher(MessageBrokerDispatcher):
|
|
|
190
190
|
"""
|
|
191
191
|
|
|
192
192
|
async with self.channel_pool.acquire() as channel:
|
|
193
|
-
|
|
194
|
-
await channel.declare_exchange(
|
|
195
|
-
name=self.exchange,
|
|
196
|
-
type="topic",
|
|
197
|
-
durable=True,
|
|
198
|
-
auto_delete=False,
|
|
199
|
-
)
|
|
193
|
+
await RabbitmqUtils.get_main_exchange(channel, self.exchange)
|
|
200
194
|
|
|
201
195
|
for sched_act_data in scheduled_actions:
|
|
202
|
-
|
|
203
|
-
channel=channel,
|
|
204
|
-
queue_name=ScheduledAction.get_function_id(sched_act_data.callable),
|
|
205
|
-
passive=False,
|
|
206
|
-
)
|
|
196
|
+
queue_name = ScheduledAction.get_function_id(sched_act_data.callable)
|
|
207
197
|
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
),
|
|
198
|
+
# Try to get existing queue
|
|
199
|
+
await RabbitmqUtils.get_scheduler_queue(
|
|
200
|
+
channel=channel,
|
|
201
|
+
queue_name=queue_name,
|
|
213
202
|
)
|
|
214
203
|
|
|
215
204
|
async def dispose(self) -> None:
|
jararaca/utils/rabbitmq_utils.py
CHANGED
|
@@ -1,4 +1,9 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
|
|
1
3
|
from aio_pika.abc import AbstractChannel, AbstractExchange, AbstractQueue
|
|
4
|
+
from aio_pika.exceptions import AMQPError, ChannelClosed, ChannelNotFoundEntity
|
|
5
|
+
|
|
6
|
+
logger = logging.getLogger(__name__)
|
|
2
7
|
|
|
3
8
|
|
|
4
9
|
class RabbitmqUtils:
|
|
@@ -6,6 +11,39 @@ class RabbitmqUtils:
|
|
|
6
11
|
DEAD_LETTER_EXCHANGE = "dlx"
|
|
7
12
|
DEAD_LETTER_QUEUE = "dlq"
|
|
8
13
|
|
|
14
|
+
# Note: get_worker_v1_queue method is already defined above
|
|
15
|
+
|
|
16
|
+
DEAD_LETTER_EXCHANGE = "dlx"
|
|
17
|
+
DEAD_LETTER_QUEUE = "dlq"
|
|
18
|
+
|
|
19
|
+
@classmethod
|
|
20
|
+
async def get_dl_exchange(cls, channel: AbstractChannel) -> AbstractExchange:
|
|
21
|
+
"""
|
|
22
|
+
Get the Dead Letter Exchange (DLX) for the given channel.
|
|
23
|
+
"""
|
|
24
|
+
try:
|
|
25
|
+
return await channel.get_exchange(
|
|
26
|
+
cls.DEAD_LETTER_EXCHANGE,
|
|
27
|
+
)
|
|
28
|
+
except ChannelNotFoundEntity as e:
|
|
29
|
+
logger.error(
|
|
30
|
+
f"Dead Letter Exchange '{cls.DEAD_LETTER_EXCHANGE}' does not exist. "
|
|
31
|
+
f"Please use the declare command to create it first. Error: {e}"
|
|
32
|
+
)
|
|
33
|
+
raise
|
|
34
|
+
except ChannelClosed as e:
|
|
35
|
+
logger.error(
|
|
36
|
+
f"Channel closed while getting Dead Letter Exchange '{cls.DEAD_LETTER_EXCHANGE}'. "
|
|
37
|
+
f"Error: {e}"
|
|
38
|
+
)
|
|
39
|
+
raise
|
|
40
|
+
except AMQPError as e:
|
|
41
|
+
logger.error(
|
|
42
|
+
f"AMQP error while getting Dead Letter Exchange '{cls.DEAD_LETTER_EXCHANGE}'. "
|
|
43
|
+
f"Error: {e}"
|
|
44
|
+
)
|
|
45
|
+
raise
|
|
46
|
+
|
|
9
47
|
@classmethod
|
|
10
48
|
async def declare_dl_exchange(
|
|
11
49
|
cls, channel: AbstractChannel, passive: bool
|
|
@@ -22,6 +60,34 @@ class RabbitmqUtils:
|
|
|
22
60
|
auto_delete=False,
|
|
23
61
|
)
|
|
24
62
|
|
|
63
|
+
@classmethod
|
|
64
|
+
async def get_dl_queue(cls, channel: AbstractChannel) -> AbstractQueue:
|
|
65
|
+
"""
|
|
66
|
+
Get the Dead Letter Queue (DLQ) for the given channel.
|
|
67
|
+
"""
|
|
68
|
+
try:
|
|
69
|
+
return await channel.get_queue(
|
|
70
|
+
cls.DEAD_LETTER_QUEUE,
|
|
71
|
+
)
|
|
72
|
+
except ChannelNotFoundEntity as e:
|
|
73
|
+
logger.error(
|
|
74
|
+
f"Dead Letter Queue '{cls.DEAD_LETTER_QUEUE}' does not exist. "
|
|
75
|
+
f"Please use the declare command to create it first. Error: {e}"
|
|
76
|
+
)
|
|
77
|
+
raise
|
|
78
|
+
except ChannelClosed as e:
|
|
79
|
+
logger.error(
|
|
80
|
+
f"Channel closed while getting Dead Letter Queue '{cls.DEAD_LETTER_QUEUE}'. "
|
|
81
|
+
f"Error: {e}"
|
|
82
|
+
)
|
|
83
|
+
raise
|
|
84
|
+
except AMQPError as e:
|
|
85
|
+
logger.error(
|
|
86
|
+
f"AMQP error while getting Dead Letter Queue '{cls.DEAD_LETTER_QUEUE}'. "
|
|
87
|
+
f"Error: {e}"
|
|
88
|
+
)
|
|
89
|
+
raise
|
|
90
|
+
|
|
25
91
|
@classmethod
|
|
26
92
|
async def declare_dl_queue(
|
|
27
93
|
cls, channel: AbstractChannel, passive: bool
|
|
@@ -40,6 +106,36 @@ class RabbitmqUtils:
|
|
|
40
106
|
},
|
|
41
107
|
)
|
|
42
108
|
|
|
109
|
+
@classmethod
|
|
110
|
+
async def get_dl_kit(
|
|
111
|
+
cls,
|
|
112
|
+
channel: AbstractChannel,
|
|
113
|
+
) -> tuple[AbstractExchange, AbstractQueue]:
|
|
114
|
+
"""
|
|
115
|
+
Get the Dead Letter Exchange and Queue (DLX and DLQ) for the given channel.
|
|
116
|
+
"""
|
|
117
|
+
try:
|
|
118
|
+
dlx = await cls.get_dl_exchange(channel)
|
|
119
|
+
dlq = await cls.get_dl_queue(channel)
|
|
120
|
+
return dlx, dlq
|
|
121
|
+
except ChannelNotFoundEntity as e:
|
|
122
|
+
logger.error(
|
|
123
|
+
f"Dead Letter infrastructure does not exist completely. "
|
|
124
|
+
f"Please use the declare command to create it first. Error: {e}"
|
|
125
|
+
)
|
|
126
|
+
raise
|
|
127
|
+
except ChannelClosed as e:
|
|
128
|
+
logger.error(
|
|
129
|
+
f"Channel closed while getting Dead Letter infrastructure. "
|
|
130
|
+
f"Error: {e}"
|
|
131
|
+
)
|
|
132
|
+
raise
|
|
133
|
+
except AMQPError as e:
|
|
134
|
+
logger.error(
|
|
135
|
+
f"AMQP error while getting Dead Letter infrastructure. " f"Error: {e}"
|
|
136
|
+
)
|
|
137
|
+
raise
|
|
138
|
+
|
|
43
139
|
@classmethod
|
|
44
140
|
async def declare_dl_kit(
|
|
45
141
|
cls,
|
|
@@ -54,6 +150,33 @@ class RabbitmqUtils:
|
|
|
54
150
|
await dlq.bind(dlx, routing_key=cls.DEAD_LETTER_EXCHANGE)
|
|
55
151
|
return dlx, dlq
|
|
56
152
|
|
|
153
|
+
@classmethod
|
|
154
|
+
async def get_main_exchange(
|
|
155
|
+
cls, channel: AbstractChannel, exchange_name: str
|
|
156
|
+
) -> AbstractExchange:
|
|
157
|
+
"""
|
|
158
|
+
Get the main exchange for the given channel.
|
|
159
|
+
"""
|
|
160
|
+
try:
|
|
161
|
+
return await channel.get_exchange(exchange_name)
|
|
162
|
+
except ChannelNotFoundEntity as e:
|
|
163
|
+
logger.error(
|
|
164
|
+
f"Exchange '{exchange_name}' does not exist. "
|
|
165
|
+
f"Please use the declare command to create it first. Error: {e}"
|
|
166
|
+
)
|
|
167
|
+
raise
|
|
168
|
+
except ChannelClosed as e:
|
|
169
|
+
logger.error(
|
|
170
|
+
f"Channel closed while getting exchange '{exchange_name}'. "
|
|
171
|
+
f"Error: {e}"
|
|
172
|
+
)
|
|
173
|
+
raise
|
|
174
|
+
except AMQPError as e:
|
|
175
|
+
logger.error(
|
|
176
|
+
f"AMQP error while getting exchange '{exchange_name}'. " f"Error: {e}"
|
|
177
|
+
)
|
|
178
|
+
raise
|
|
179
|
+
|
|
57
180
|
@classmethod
|
|
58
181
|
async def declare_main_exchange(
|
|
59
182
|
cls, channel: AbstractChannel, exchange_name: str, passive: bool
|
|
@@ -70,6 +193,34 @@ class RabbitmqUtils:
|
|
|
70
193
|
auto_delete=False,
|
|
71
194
|
)
|
|
72
195
|
|
|
196
|
+
@classmethod
|
|
197
|
+
async def get_queue(
|
|
198
|
+
cls,
|
|
199
|
+
channel: AbstractChannel,
|
|
200
|
+
queue_name: str,
|
|
201
|
+
) -> AbstractQueue:
|
|
202
|
+
"""
|
|
203
|
+
Get a queue with the given name.
|
|
204
|
+
"""
|
|
205
|
+
try:
|
|
206
|
+
return await channel.get_queue(queue_name)
|
|
207
|
+
except ChannelNotFoundEntity as e:
|
|
208
|
+
logger.error(
|
|
209
|
+
f"Queue '{queue_name}' does not exist. "
|
|
210
|
+
f"Please use the declare command to create it first. Error: {e}"
|
|
211
|
+
)
|
|
212
|
+
raise
|
|
213
|
+
except ChannelClosed as e:
|
|
214
|
+
logger.error(
|
|
215
|
+
f"Channel closed while getting queue '{queue_name}'. " f"Error: {e}"
|
|
216
|
+
)
|
|
217
|
+
raise
|
|
218
|
+
except AMQPError as e:
|
|
219
|
+
logger.error(
|
|
220
|
+
f"AMQP error while getting queue '{queue_name}'. " f"Error: {e}"
|
|
221
|
+
)
|
|
222
|
+
raise
|
|
223
|
+
|
|
73
224
|
@classmethod
|
|
74
225
|
async def declare_queue(
|
|
75
226
|
cls,
|
|
@@ -91,6 +242,35 @@ class RabbitmqUtils:
|
|
|
91
242
|
},
|
|
92
243
|
)
|
|
93
244
|
|
|
245
|
+
@classmethod
|
|
246
|
+
async def get_worker_v1_queue(
|
|
247
|
+
cls,
|
|
248
|
+
channel: AbstractChannel,
|
|
249
|
+
queue_name: str,
|
|
250
|
+
) -> AbstractQueue:
|
|
251
|
+
"""
|
|
252
|
+
Get a worker v1 queue.
|
|
253
|
+
"""
|
|
254
|
+
try:
|
|
255
|
+
return await channel.get_queue(queue_name)
|
|
256
|
+
except ChannelNotFoundEntity as e:
|
|
257
|
+
logger.error(
|
|
258
|
+
f"Worker queue '{queue_name}' does not exist. "
|
|
259
|
+
f"Please use the declare command to create it first. Error: {e}"
|
|
260
|
+
)
|
|
261
|
+
raise
|
|
262
|
+
except ChannelClosed as e:
|
|
263
|
+
logger.error(
|
|
264
|
+
f"Channel closed while getting worker queue '{queue_name}'. "
|
|
265
|
+
f"Error: {e}"
|
|
266
|
+
)
|
|
267
|
+
raise
|
|
268
|
+
except AMQPError as e:
|
|
269
|
+
logger.error(
|
|
270
|
+
f"AMQP error while getting worker queue '{queue_name}'. " f"Error: {e}"
|
|
271
|
+
)
|
|
272
|
+
raise
|
|
273
|
+
|
|
94
274
|
@classmethod
|
|
95
275
|
async def declare_worker_v1_queue(
|
|
96
276
|
cls,
|
|
@@ -113,6 +293,36 @@ class RabbitmqUtils:
|
|
|
113
293
|
durable=True,
|
|
114
294
|
)
|
|
115
295
|
|
|
296
|
+
@classmethod
|
|
297
|
+
async def get_scheduler_queue(
|
|
298
|
+
cls,
|
|
299
|
+
channel: AbstractChannel,
|
|
300
|
+
queue_name: str,
|
|
301
|
+
) -> AbstractQueue:
|
|
302
|
+
"""
|
|
303
|
+
Get a scheduler queue.
|
|
304
|
+
"""
|
|
305
|
+
try:
|
|
306
|
+
return await channel.get_queue(queue_name)
|
|
307
|
+
except ChannelNotFoundEntity as e:
|
|
308
|
+
logger.error(
|
|
309
|
+
f"Scheduler queue '{queue_name}' does not exist. "
|
|
310
|
+
f"Please use the declare command to create it first. Error: {e}"
|
|
311
|
+
)
|
|
312
|
+
raise
|
|
313
|
+
except ChannelClosed as e:
|
|
314
|
+
logger.error(
|
|
315
|
+
f"Channel closed while getting scheduler queue '{queue_name}'. "
|
|
316
|
+
f"Error: {e}"
|
|
317
|
+
)
|
|
318
|
+
raise
|
|
319
|
+
except AMQPError as e:
|
|
320
|
+
logger.error(
|
|
321
|
+
f"AMQP error while getting scheduler queue '{queue_name}'. "
|
|
322
|
+
f"Error: {e}"
|
|
323
|
+
)
|
|
324
|
+
raise
|
|
325
|
+
|
|
116
326
|
@classmethod
|
|
117
327
|
async def declare_scheduler_queue(
|
|
118
328
|
cls,
|
|
@@ -130,41 +340,53 @@ class RabbitmqUtils:
|
|
|
130
340
|
)
|
|
131
341
|
|
|
132
342
|
@classmethod
|
|
133
|
-
async def
|
|
343
|
+
async def delete_exchange(
|
|
134
344
|
cls,
|
|
135
345
|
channel: AbstractChannel,
|
|
136
|
-
|
|
346
|
+
exchange_name: str,
|
|
137
347
|
if_unused: bool = False,
|
|
138
|
-
if_empty: bool = False,
|
|
139
348
|
) -> None:
|
|
140
349
|
"""
|
|
141
|
-
Delete
|
|
350
|
+
Delete an exchange.
|
|
142
351
|
"""
|
|
143
352
|
try:
|
|
144
|
-
await channel.
|
|
145
|
-
|
|
353
|
+
await channel.exchange_delete(
|
|
354
|
+
exchange_name=exchange_name,
|
|
146
355
|
if_unused=if_unused,
|
|
147
|
-
if_empty=if_empty,
|
|
148
356
|
)
|
|
149
|
-
except
|
|
150
|
-
#
|
|
151
|
-
|
|
357
|
+
except ChannelNotFoundEntity:
|
|
358
|
+
# Exchange might not exist, which is fine
|
|
359
|
+
logger.info(
|
|
360
|
+
f"Exchange '{exchange_name}' does not exist, nothing to delete."
|
|
361
|
+
)
|
|
362
|
+
except ChannelClosed as e:
|
|
363
|
+
logger.warning(
|
|
364
|
+
f"Channel closed while deleting exchange '{exchange_name}': {e}"
|
|
365
|
+
)
|
|
366
|
+
except AMQPError as e:
|
|
367
|
+
logger.warning(f"AMQP error while deleting exchange '{exchange_name}': {e}")
|
|
152
368
|
|
|
153
369
|
@classmethod
|
|
154
|
-
async def
|
|
370
|
+
async def delete_queue(
|
|
155
371
|
cls,
|
|
156
372
|
channel: AbstractChannel,
|
|
157
|
-
|
|
373
|
+
queue_name: str,
|
|
158
374
|
if_unused: bool = False,
|
|
375
|
+
if_empty: bool = False,
|
|
159
376
|
) -> None:
|
|
160
377
|
"""
|
|
161
|
-
Delete
|
|
378
|
+
Delete a queue.
|
|
162
379
|
"""
|
|
163
380
|
try:
|
|
164
|
-
await channel.
|
|
165
|
-
|
|
381
|
+
await channel.queue_delete(
|
|
382
|
+
queue_name=queue_name,
|
|
166
383
|
if_unused=if_unused,
|
|
384
|
+
if_empty=if_empty,
|
|
167
385
|
)
|
|
168
|
-
except
|
|
169
|
-
#
|
|
170
|
-
|
|
386
|
+
except ChannelNotFoundEntity:
|
|
387
|
+
# Queue might not exist, which is fine
|
|
388
|
+
logger.info(f"Queue '{queue_name}' does not exist, nothing to delete.")
|
|
389
|
+
except ChannelClosed as e:
|
|
390
|
+
logger.warning(f"Channel closed while deleting queue '{queue_name}': {e}")
|
|
391
|
+
except AMQPError as e:
|
|
392
|
+
logger.warning(f"AMQP error while deleting queue '{queue_name}': {e}")
|