jararaca 0.3.9__py3-none-any.whl → 0.3.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of jararaca might be problematic. Click here for more details.

Files changed (35) hide show
  1. jararaca/__init__.py +76 -5
  2. jararaca/cli.py +460 -116
  3. jararaca/core/uow.py +17 -12
  4. jararaca/messagebus/decorators.py +33 -30
  5. jararaca/messagebus/interceptors/aiopika_publisher_interceptor.py +30 -2
  6. jararaca/messagebus/interceptors/publisher_interceptor.py +7 -3
  7. jararaca/messagebus/publisher.py +14 -6
  8. jararaca/messagebus/worker.py +1102 -88
  9. jararaca/microservice.py +137 -34
  10. jararaca/observability/decorators.py +7 -3
  11. jararaca/observability/interceptor.py +4 -2
  12. jararaca/observability/providers/otel.py +14 -10
  13. jararaca/persistence/base.py +2 -1
  14. jararaca/persistence/interceptors/aiosqa_interceptor.py +167 -16
  15. jararaca/persistence/utilities.py +32 -20
  16. jararaca/presentation/decorators.py +96 -10
  17. jararaca/presentation/server.py +31 -4
  18. jararaca/presentation/websocket/context.py +30 -4
  19. jararaca/presentation/websocket/types.py +2 -2
  20. jararaca/presentation/websocket/websocket_interceptor.py +28 -4
  21. jararaca/reflect/__init__.py +0 -0
  22. jararaca/reflect/controller_inspect.py +75 -0
  23. jararaca/{tools → reflect}/metadata.py +25 -5
  24. jararaca/scheduler/{scheduler_v2.py → beat_worker.py} +49 -53
  25. jararaca/scheduler/decorators.py +55 -20
  26. jararaca/tools/app_config/interceptor.py +4 -2
  27. jararaca/utils/rabbitmq_utils.py +259 -5
  28. jararaca/utils/retry.py +141 -0
  29. {jararaca-0.3.9.dist-info → jararaca-0.3.11.dist-info}/METADATA +2 -1
  30. {jararaca-0.3.9.dist-info → jararaca-0.3.11.dist-info}/RECORD +33 -32
  31. {jararaca-0.3.9.dist-info → jararaca-0.3.11.dist-info}/WHEEL +1 -1
  32. jararaca/messagebus/worker_v2.py +0 -617
  33. jararaca/scheduler/scheduler.py +0 -161
  34. {jararaca-0.3.9.dist-info → jararaca-0.3.11.dist-info}/LICENSE +0 -0
  35. {jararaca-0.3.9.dist-info → jararaca-0.3.11.dist-info}/entry_points.txt +0 -0
@@ -4,10 +4,9 @@ import logging
4
4
  import signal
5
5
  import time
6
6
  from abc import ABC, abstractmethod
7
- from contextlib import asynccontextmanager
8
7
  from datetime import UTC, datetime
9
8
  from types import FrameType
10
- from typing import Any, AsyncGenerator, Callable
9
+ from typing import Any
11
10
  from urllib.parse import parse_qs
12
11
 
13
12
  import aio_pika
@@ -25,25 +24,37 @@ from jararaca.core.uow import UnitOfWorkContextProvider
25
24
  from jararaca.di import Container
26
25
  from jararaca.lifecycle import AppLifecycle
27
26
  from jararaca.microservice import Microservice
28
- from jararaca.scheduler.decorators import ScheduledAction
27
+ from jararaca.scheduler.decorators import (
28
+ ScheduledAction,
29
+ ScheduledActionData,
30
+ get_type_scheduled_actions,
31
+ )
29
32
  from jararaca.scheduler.types import DelayedMessageData
33
+ from jararaca.utils.rabbitmq_utils import RabbitmqUtils
30
34
 
31
35
  logger = logging.getLogger(__name__)
32
36
 
33
- SCHEDULED_ACTION_LIST = list[tuple[Callable[..., Any], "ScheduledAction"]]
34
-
35
37
 
36
- def extract_scheduled_actions(
37
- app: Microservice, container: Container
38
- ) -> SCHEDULED_ACTION_LIST:
39
- scheduled_actions: SCHEDULED_ACTION_LIST = []
38
+ def _extract_scheduled_actions(
39
+ app: Microservice, container: Container, scheduler_names: set[str] | None = None
40
+ ) -> list[ScheduledActionData]:
41
+ scheduled_actions: list[ScheduledActionData] = []
40
42
  for controllers in app.controllers:
41
43
 
42
44
  controller_instance: Any = container.get_by_type(controllers)
43
45
 
44
- controller_scheduled_actions = ScheduledAction.get_type_scheduled_actions(
45
- controller_instance
46
- )
46
+ controller_scheduled_actions = get_type_scheduled_actions(controller_instance)
47
+
48
+ # Filter scheduled actions by name if scheduler_names is provided
49
+ if scheduler_names is not None:
50
+ filtered_actions = []
51
+ for action in controller_scheduled_actions:
52
+ # Include actions that have a name and it's in the provided set
53
+ if action.spec.name and action.spec.name in scheduler_names:
54
+ filtered_actions.append(action)
55
+ # Skip actions without names when filtering is active
56
+ controller_scheduled_actions = filtered_actions
57
+
47
58
  scheduled_actions.extend(controller_scheduled_actions)
48
59
 
49
60
  return scheduled_actions
@@ -52,7 +63,7 @@ def extract_scheduled_actions(
52
63
  # region Message Broker Dispatcher
53
64
 
54
65
 
55
- class MessageBrokerDispatcher(ABC):
66
+ class _MessageBrokerDispatcher(ABC):
56
67
 
57
68
  @abstractmethod
58
69
  async def dispatch_scheduled_action(
@@ -81,14 +92,14 @@ class MessageBrokerDispatcher(ABC):
81
92
  raise NotImplementedError("dispatch_delayed_message() is not implemented yet.")
82
93
 
83
94
  @abstractmethod
84
- async def initialize(self, scheduled_actions: SCHEDULED_ACTION_LIST) -> None:
95
+ async def initialize(self, scheduled_actions: list[ScheduledActionData]) -> None:
85
96
  raise NotImplementedError("initialize() is not implemented yet.")
86
97
 
87
98
  async def dispose(self) -> None:
88
99
  pass
89
100
 
90
101
 
91
- class RabbitMQBrokerDispatcher(MessageBrokerDispatcher):
102
+ class _RabbitMQBrokerDispatcher(_MessageBrokerDispatcher):
92
103
 
93
104
  def __init__(self, url: str) -> None:
94
105
  self.url = url
@@ -145,7 +156,7 @@ class RabbitMQBrokerDispatcher(MessageBrokerDispatcher):
145
156
 
146
157
  logger.info(f"Dispatching message to {action_id} at {timestamp}")
147
158
  async with self.channel_pool.acquire() as channel:
148
- exchange = await channel.get_exchange(self.exchange)
159
+ exchange = await RabbitmqUtils.get_main_exchange(channel, self.exchange)
149
160
 
150
161
  await exchange.publish(
151
162
  aio_pika.Message(body=str(timestamp).encode()),
@@ -163,7 +174,7 @@ class RabbitMQBrokerDispatcher(MessageBrokerDispatcher):
163
174
  """
164
175
  async with self.channel_pool.acquire() as channel:
165
176
 
166
- exchange = await channel.get_exchange(self.exchange)
177
+ exchange = await RabbitmqUtils.get_main_exchange(channel, self.exchange)
167
178
  await exchange.publish(
168
179
  aio_pika.Message(
169
180
  body=delayed_message.payload,
@@ -171,32 +182,22 @@ class RabbitMQBrokerDispatcher(MessageBrokerDispatcher):
171
182
  routing_key=f"{delayed_message.message_topic}.",
172
183
  )
173
184
 
174
- async def initialize(self, scheduled_actions: SCHEDULED_ACTION_LIST) -> None:
185
+ async def initialize(self, scheduled_actions: list[ScheduledActionData]) -> None:
175
186
  """
176
187
  Initialize the RabbitMQ server.
177
188
  This is used to create the exchange and queues for the scheduled actions.
178
189
  """
179
190
 
180
191
  async with self.channel_pool.acquire() as channel:
192
+ await RabbitmqUtils.get_main_exchange(channel, self.exchange)
181
193
 
182
- await channel.set_qos(prefetch_count=1)
183
-
184
- await channel.declare_exchange(
185
- name=self.exchange,
186
- type="topic",
187
- durable=True,
188
- auto_delete=False,
189
- )
194
+ for sched_act_data in scheduled_actions:
195
+ queue_name = ScheduledAction.get_function_id(sched_act_data.callable)
190
196
 
191
- for func, _ in scheduled_actions:
192
- queue = await channel.declare_queue(
193
- name=ScheduledAction.get_function_id(func),
194
- durable=True,
195
- )
196
-
197
- await queue.bind(
198
- exchange=self.exchange,
199
- routing_key=ScheduledAction.get_function_id(func),
197
+ # Try to get existing queue
198
+ await RabbitmqUtils.get_scheduled_action_queue(
199
+ channel=channel,
200
+ queue_name=queue_name,
200
201
  )
201
202
 
202
203
  async def dispose(self) -> None:
@@ -204,13 +205,13 @@ class RabbitMQBrokerDispatcher(MessageBrokerDispatcher):
204
205
  await self.conn_pool.close()
205
206
 
206
207
 
207
- def get_message_broker_dispatcher_from_url(url: str) -> MessageBrokerDispatcher:
208
+ def _get_message_broker_dispatcher_from_url(url: str) -> _MessageBrokerDispatcher:
208
209
  """
209
210
  Factory function to create a message broker instance from a URL.
210
211
  Currently, only RabbitMQ is supported.
211
212
  """
212
213
  if url.startswith("amqp://") or url.startswith("amqps://"):
213
- return RabbitMQBrokerDispatcher(url=url)
214
+ return _RabbitMQBrokerDispatcher(url=url)
214
215
  else:
215
216
  raise ValueError(f"Unsupported message broker URL: {url}")
216
217
 
@@ -218,7 +219,7 @@ def get_message_broker_dispatcher_from_url(url: str) -> MessageBrokerDispatcher:
218
219
  # endregion
219
220
 
220
221
 
221
- class SchedulerV2:
222
+ class BeatWorker:
222
223
 
223
224
  def __init__(
224
225
  self,
@@ -226,10 +227,11 @@ class SchedulerV2:
226
227
  interval: int,
227
228
  broker_url: str,
228
229
  backend_url: str,
230
+ scheduled_action_names: set[str] | None = None,
229
231
  ) -> None:
230
232
  self.app = app
231
233
 
232
- self.broker: MessageBrokerDispatcher = get_message_broker_dispatcher_from_url(
234
+ self.broker: _MessageBrokerDispatcher = _get_message_broker_dispatcher_from_url(
233
235
  broker_url
234
236
  )
235
237
  self.backend: MessageBrokerBackend = get_message_broker_backend_from_url(
@@ -237,6 +239,7 @@ class SchedulerV2:
237
239
  )
238
240
 
239
241
  self.interval = interval
242
+ self.scheduler_names = scheduled_action_names
240
243
  self.container = Container(self.app)
241
244
  self.uow_provider = UnitOfWorkContextProvider(app, self.container)
242
245
 
@@ -262,19 +265,23 @@ class SchedulerV2:
262
265
  """
263
266
  async with self.lifecycle():
264
267
 
265
- scheduled_actions = extract_scheduled_actions(self.app, self.container)
268
+ scheduled_actions = _extract_scheduled_actions(
269
+ self.app, self.container, self.scheduler_names
270
+ )
266
271
 
267
272
  await self.broker.initialize(scheduled_actions)
268
273
 
269
274
  await self.run_scheduled_actions(scheduled_actions)
270
275
 
271
276
  async def run_scheduled_actions(
272
- self, scheduled_actions: SCHEDULED_ACTION_LIST
277
+ self, scheduled_actions: list[ScheduledActionData]
273
278
  ) -> None:
274
279
 
275
280
  while not self.shutdown_event.is_set():
276
281
  now = int(time.time())
277
- for func, scheduled_action in scheduled_actions:
282
+ for sched_act_data in scheduled_actions:
283
+ func = sched_act_data.callable
284
+ scheduled_action = sched_act_data.spec
278
285
  if self.shutdown_event.is_set():
279
286
  break
280
287
 
@@ -333,14 +340,3 @@ class SchedulerV2:
333
340
 
334
341
  await self.backend.dispose()
335
342
  await self.broker.dispose()
336
-
337
-
338
- @asynccontextmanager
339
- async def none_context() -> AsyncGenerator[None, None]:
340
- yield
341
-
342
-
343
- logging.basicConfig(
344
- level=logging.INFO,
345
- format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
346
- )
@@ -1,5 +1,11 @@
1
1
  import inspect
2
- from typing import Any, Callable, TypeVar, cast
2
+ from dataclasses import dataclass
3
+ from typing import Any, Awaitable, Callable, TypeVar, cast
4
+
5
+ from jararaca.reflect.controller_inspect import (
6
+ ControllerMemberReflect,
7
+ inspect_controller,
8
+ )
3
9
 
4
10
  DECORATED_FUNC = TypeVar("DECORATED_FUNC", bound=Callable[..., Any])
5
11
 
@@ -14,6 +20,7 @@ class ScheduledAction:
14
20
  exclusive: bool = True,
15
21
  timeout: int | None = None,
16
22
  exception_handler: Callable[[BaseException], None] | None = None,
23
+ name: str | None = None,
17
24
  ) -> None:
18
25
  """
19
26
  :param cron: A string representing the cron expression for the scheduled action.
@@ -21,6 +28,7 @@ class ScheduledAction:
21
28
  :param exclusive: A boolean indicating if the scheduled action should be executed in one instance of the application. (Requires a distributed lock provided by a backend)
22
29
  :param exception_handler: A callable that will be called when an exception is raised during the execution of the scheduled action.
23
30
  :param timeout: An integer representing the timeout for the scheduled action in seconds. If the scheduled action takes longer than this time, it will be terminated.
31
+ :param name: An optional name for the scheduled action, used for filtering which actions to run.
24
32
  """
25
33
  self.cron = cron
26
34
  """
@@ -49,6 +57,11 @@ class ScheduledAction:
49
57
  If the scheduled action takes longer than this time, it will be terminated.
50
58
  """
51
59
 
60
+ self.name = name
61
+ """
62
+ An optional name for the scheduled action, used for filtering which actions to run.
63
+ """
64
+
52
65
  def __call__(self, func: DECORATED_FUNC) -> DECORATED_FUNC:
53
66
  ScheduledAction.register(func, self)
54
67
  return func
@@ -66,25 +79,6 @@ class ScheduledAction:
66
79
  ScheduledAction, getattr(func, ScheduledAction.SCHEDULED_ACTION_ATTR)
67
80
  )
68
81
 
69
- @staticmethod
70
- def get_type_scheduled_actions(
71
- instance: Any,
72
- ) -> list[tuple[Callable[..., Any], "ScheduledAction"]]:
73
-
74
- members = inspect.getmembers(instance, predicate=inspect.ismethod)
75
-
76
- scheduled_actions: list[tuple[Callable[..., Any], "ScheduledAction"]] = []
77
-
78
- for _, member in members:
79
- scheduled_action = ScheduledAction.get_scheduled_action(member)
80
-
81
- if scheduled_action is None:
82
- continue
83
-
84
- scheduled_actions.append((member, scheduled_action))
85
-
86
- return scheduled_actions
87
-
88
82
  @staticmethod
89
83
  def get_function_id(
90
84
  func: Callable[..., Any],
@@ -94,3 +88,44 @@ class ScheduledAction:
94
88
  This is used to identify the scheduled action in the message broker.
95
89
  """
96
90
  return f"{func.__module__}.{func.__qualname__}"
91
+
92
+
93
+ @dataclass(frozen=True)
94
+ class ScheduledActionData:
95
+ spec: ScheduledAction
96
+ controller_member: ControllerMemberReflect
97
+ callable: Callable[..., Awaitable[None]]
98
+
99
+
100
+ def get_type_scheduled_actions(
101
+ instance: Any,
102
+ ) -> list[ScheduledActionData]:
103
+
104
+ _, member_metadata_map = inspect_controller(instance.__class__)
105
+
106
+ members = inspect.getmembers(instance, predicate=inspect.ismethod)
107
+
108
+ scheduled_actions: list[ScheduledActionData] = []
109
+
110
+ for name, member in members:
111
+ scheduled_action = ScheduledAction.get_scheduled_action(member)
112
+
113
+ if scheduled_action is None:
114
+ continue
115
+
116
+ if name not in member_metadata_map:
117
+ raise Exception(
118
+ f"Member '{name}' is not a valid controller member in '{instance.__class__.__name__}'"
119
+ )
120
+
121
+ member_metadata = member_metadata_map[name]
122
+
123
+ scheduled_actions.append(
124
+ ScheduledActionData(
125
+ callable=member,
126
+ spec=scheduled_action,
127
+ controller_member=member_metadata,
128
+ )
129
+ )
130
+
131
+ return scheduled_actions
@@ -7,9 +7,9 @@ from pydantic import BaseModel
7
7
 
8
8
  from jararaca.core.providers import Token
9
9
  from jararaca.microservice import (
10
- AppContext,
11
10
  AppInterceptor,
12
11
  AppInterceptorWithLifecycle,
12
+ AppTransactionContext,
13
13
  Container,
14
14
  Microservice,
15
15
  )
@@ -40,7 +40,9 @@ class AppConfigurationInterceptor(AppInterceptor, AppInterceptorWithLifecycle):
40
40
  self.config_parser = config_parser
41
41
 
42
42
  @asynccontextmanager
43
- async def intercept(self, app_context: AppContext) -> AsyncGenerator[None, None]:
43
+ async def intercept(
44
+ self, app_context: AppTransactionContext
45
+ ) -> AsyncGenerator[None, None]:
44
46
  yield
45
47
 
46
48
  def instance_basemodels(self, basemodel_type: Type[BaseModel]) -> BaseModel:
@@ -1,4 +1,9 @@
1
+ import logging
2
+
1
3
  from aio_pika.abc import AbstractChannel, AbstractExchange, AbstractQueue
4
+ from aio_pika.exceptions import AMQPError, ChannelClosed, ChannelNotFoundEntity
5
+
6
+ logger = logging.getLogger(__name__)
2
7
 
3
8
 
4
9
  class RabbitmqUtils:
@@ -6,6 +11,39 @@ class RabbitmqUtils:
6
11
  DEAD_LETTER_EXCHANGE = "dlx"
7
12
  DEAD_LETTER_QUEUE = "dlq"
8
13
 
14
+ # Note: get_worker_v1_queue method is already defined above
15
+
16
+ DEAD_LETTER_EXCHANGE = "dlx"
17
+ DEAD_LETTER_QUEUE = "dlq"
18
+
19
+ @classmethod
20
+ async def get_dl_exchange(cls, channel: AbstractChannel) -> AbstractExchange:
21
+ """
22
+ Get the Dead Letter Exchange (DLX) for the given channel.
23
+ """
24
+ try:
25
+ return await channel.get_exchange(
26
+ cls.DEAD_LETTER_EXCHANGE,
27
+ )
28
+ except ChannelNotFoundEntity as e:
29
+ logger.error(
30
+ f"Dead Letter Exchange '{cls.DEAD_LETTER_EXCHANGE}' does not exist. "
31
+ f"Please use the declare command to create it first. Error: {e}"
32
+ )
33
+ raise
34
+ except ChannelClosed as e:
35
+ logger.error(
36
+ f"Channel closed while getting Dead Letter Exchange '{cls.DEAD_LETTER_EXCHANGE}'. "
37
+ f"Error: {e}"
38
+ )
39
+ raise
40
+ except AMQPError as e:
41
+ logger.error(
42
+ f"AMQP error while getting Dead Letter Exchange '{cls.DEAD_LETTER_EXCHANGE}'. "
43
+ f"Error: {e}"
44
+ )
45
+ raise
46
+
9
47
  @classmethod
10
48
  async def declare_dl_exchange(
11
49
  cls, channel: AbstractChannel, passive: bool
@@ -13,7 +51,7 @@ class RabbitmqUtils:
13
51
  """
14
52
  Declare a Dead Letter Exchange (DLX) for the given channel.
15
53
  """
16
- await channel.set_qos(prefetch_count=1)
54
+
17
55
  return await channel.declare_exchange(
18
56
  cls.DEAD_LETTER_EXCHANGE,
19
57
  passive=passive,
@@ -22,6 +60,34 @@ class RabbitmqUtils:
22
60
  auto_delete=False,
23
61
  )
24
62
 
63
+ @classmethod
64
+ async def get_dl_queue(cls, channel: AbstractChannel) -> AbstractQueue:
65
+ """
66
+ Get the Dead Letter Queue (DLQ) for the given channel.
67
+ """
68
+ try:
69
+ return await channel.get_queue(
70
+ cls.DEAD_LETTER_QUEUE,
71
+ )
72
+ except ChannelNotFoundEntity as e:
73
+ logger.error(
74
+ f"Dead Letter Queue '{cls.DEAD_LETTER_QUEUE}' does not exist. "
75
+ f"Please use the declare command to create it first. Error: {e}"
76
+ )
77
+ raise
78
+ except ChannelClosed as e:
79
+ logger.error(
80
+ f"Channel closed while getting Dead Letter Queue '{cls.DEAD_LETTER_QUEUE}'. "
81
+ f"Error: {e}"
82
+ )
83
+ raise
84
+ except AMQPError as e:
85
+ logger.error(
86
+ f"AMQP error while getting Dead Letter Queue '{cls.DEAD_LETTER_QUEUE}'. "
87
+ f"Error: {e}"
88
+ )
89
+ raise
90
+
25
91
  @classmethod
26
92
  async def declare_dl_queue(
27
93
  cls, channel: AbstractChannel, passive: bool
@@ -29,7 +95,7 @@ class RabbitmqUtils:
29
95
  """
30
96
  Declare a Dead Letter Queue (DLQ) for the given queue.
31
97
  """
32
- await channel.set_qos(prefetch_count=1)
98
+
33
99
  return await channel.declare_queue(
34
100
  cls.DEAD_LETTER_QUEUE,
35
101
  durable=True,
@@ -40,6 +106,36 @@ class RabbitmqUtils:
40
106
  },
41
107
  )
42
108
 
109
+ @classmethod
110
+ async def get_dl_kit(
111
+ cls,
112
+ channel: AbstractChannel,
113
+ ) -> tuple[AbstractExchange, AbstractQueue]:
114
+ """
115
+ Get the Dead Letter Exchange and Queue (DLX and DLQ) for the given channel.
116
+ """
117
+ try:
118
+ dlx = await cls.get_dl_exchange(channel)
119
+ dlq = await cls.get_dl_queue(channel)
120
+ return dlx, dlq
121
+ except ChannelNotFoundEntity as e:
122
+ logger.error(
123
+ f"Dead Letter infrastructure does not exist completely. "
124
+ f"Please use the declare command to create it first. Error: {e}"
125
+ )
126
+ raise
127
+ except ChannelClosed as e:
128
+ logger.error(
129
+ f"Channel closed while getting Dead Letter infrastructure. "
130
+ f"Error: {e}"
131
+ )
132
+ raise
133
+ except AMQPError as e:
134
+ logger.error(
135
+ f"AMQP error while getting Dead Letter infrastructure. " f"Error: {e}"
136
+ )
137
+ raise
138
+
43
139
  @classmethod
44
140
  async def declare_dl_kit(
45
141
  cls,
@@ -54,6 +150,33 @@ class RabbitmqUtils:
54
150
  await dlq.bind(dlx, routing_key=cls.DEAD_LETTER_EXCHANGE)
55
151
  return dlx, dlq
56
152
 
153
+ @classmethod
154
+ async def get_main_exchange(
155
+ cls, channel: AbstractChannel, exchange_name: str
156
+ ) -> AbstractExchange:
157
+ """
158
+ Get the main exchange for the given channel.
159
+ """
160
+ try:
161
+ return await channel.get_exchange(exchange_name)
162
+ except ChannelNotFoundEntity as e:
163
+ logger.error(
164
+ f"Exchange '{exchange_name}' does not exist. "
165
+ f"Please use the declare command to create it first. Error: {e}"
166
+ )
167
+ raise
168
+ except ChannelClosed as e:
169
+ logger.error(
170
+ f"Channel closed while getting exchange '{exchange_name}'. "
171
+ f"Error: {e}"
172
+ )
173
+ raise
174
+ except AMQPError as e:
175
+ logger.error(
176
+ f"AMQP error while getting exchange '{exchange_name}'. " f"Error: {e}"
177
+ )
178
+ raise
179
+
57
180
  @classmethod
58
181
  async def declare_main_exchange(
59
182
  cls, channel: AbstractChannel, exchange_name: str, passive: bool
@@ -61,7 +184,7 @@ class RabbitmqUtils:
61
184
  """
62
185
  Declare a main exchange for the given channel.
63
186
  """
64
- await channel.set_qos(prefetch_count=1)
187
+
65
188
  return await channel.declare_exchange(
66
189
  exchange_name,
67
190
  passive=passive,
@@ -71,7 +194,35 @@ class RabbitmqUtils:
71
194
  )
72
195
 
73
196
  @classmethod
74
- async def declare_queue(
197
+ async def get_queue(
198
+ cls,
199
+ channel: AbstractChannel,
200
+ queue_name: str,
201
+ ) -> AbstractQueue:
202
+ """
203
+ Get a queue with the given name.
204
+ """
205
+ try:
206
+ return await channel.get_queue(queue_name)
207
+ except ChannelNotFoundEntity as e:
208
+ logger.error(
209
+ f"Queue '{queue_name}' does not exist. "
210
+ f"Please use the declare command to create it first. Error: {e}"
211
+ )
212
+ raise
213
+ except ChannelClosed as e:
214
+ logger.error(
215
+ f"Channel closed while getting queue '{queue_name}'. " f"Error: {e}"
216
+ )
217
+ raise
218
+ except AMQPError as e:
219
+ logger.error(
220
+ f"AMQP error while getting queue '{queue_name}'. " f"Error: {e}"
221
+ )
222
+ raise
223
+
224
+ @classmethod
225
+ async def declare_worker_queue(
75
226
  cls,
76
227
  channel: AbstractChannel,
77
228
  queue_name: str,
@@ -80,7 +231,7 @@ class RabbitmqUtils:
80
231
  """
81
232
  Declare a queue with the given name and properties.
82
233
  """
83
- await channel.set_qos(prefetch_count=1)
234
+
84
235
  return await channel.declare_queue(
85
236
  queue_name,
86
237
  passive=passive,
@@ -90,3 +241,106 @@ class RabbitmqUtils:
90
241
  "x-dead-letter-routing-key": cls.DEAD_LETTER_EXCHANGE,
91
242
  },
92
243
  )
244
+
245
+ @classmethod
246
+ async def get_scheduled_action_queue(
247
+ cls,
248
+ channel: AbstractChannel,
249
+ queue_name: str,
250
+ ) -> AbstractQueue:
251
+ """
252
+ Get a scheduled action queue.
253
+ """
254
+ try:
255
+ return await channel.get_queue(queue_name)
256
+ except ChannelNotFoundEntity as e:
257
+ logger.error(
258
+ f"Scheduler queue '{queue_name}' does not exist. "
259
+ f"Please use the declare command to create it first. Error: {e}"
260
+ )
261
+ raise
262
+ except ChannelClosed as e:
263
+ logger.error(
264
+ f"Channel closed while getting scheduler queue '{queue_name}'. "
265
+ f"Error: {e}"
266
+ )
267
+ raise
268
+ except AMQPError as e:
269
+ logger.error(
270
+ f"AMQP error while getting scheduler queue '{queue_name}'. "
271
+ f"Error: {e}"
272
+ )
273
+ raise
274
+
275
+ @classmethod
276
+ async def declare_scheduled_action_queue(
277
+ cls,
278
+ channel: AbstractChannel,
279
+ queue_name: str,
280
+ passive: bool = False,
281
+ ) -> AbstractQueue:
282
+ """
283
+ Declare a scheduled action queue with simple durable configuration.
284
+ The queue has a max length of 1 to ensure only one scheduled task
285
+ is processed at a time.
286
+ """
287
+ return await channel.declare_queue(
288
+ name=queue_name,
289
+ durable=True,
290
+ passive=passive,
291
+ arguments={
292
+ "x-max-length": 1,
293
+ },
294
+ )
295
+
296
+ @classmethod
297
+ async def delete_exchange(
298
+ cls,
299
+ channel: AbstractChannel,
300
+ exchange_name: str,
301
+ if_unused: bool = False,
302
+ ) -> None:
303
+ """
304
+ Delete an exchange.
305
+ """
306
+ try:
307
+ await channel.exchange_delete(
308
+ exchange_name=exchange_name,
309
+ if_unused=if_unused,
310
+ )
311
+ except ChannelNotFoundEntity:
312
+ # Exchange might not exist, which is fine
313
+ logger.info(
314
+ f"Exchange '{exchange_name}' does not exist, nothing to delete."
315
+ )
316
+ except ChannelClosed as e:
317
+ logger.warning(
318
+ f"Channel closed while deleting exchange '{exchange_name}': {e}"
319
+ )
320
+ except AMQPError as e:
321
+ logger.warning(f"AMQP error while deleting exchange '{exchange_name}': {e}")
322
+
323
+ @classmethod
324
+ async def delete_queue(
325
+ cls,
326
+ channel: AbstractChannel,
327
+ queue_name: str,
328
+ if_unused: bool = False,
329
+ if_empty: bool = False,
330
+ ) -> None:
331
+ """
332
+ Delete a queue.
333
+ """
334
+ try:
335
+ await channel.queue_delete(
336
+ queue_name=queue_name,
337
+ if_unused=if_unused,
338
+ if_empty=if_empty,
339
+ )
340
+ except ChannelNotFoundEntity:
341
+ # Queue might not exist, which is fine
342
+ logger.info(f"Queue '{queue_name}' does not exist, nothing to delete.")
343
+ except ChannelClosed as e:
344
+ logger.warning(f"Channel closed while deleting queue '{queue_name}': {e}")
345
+ except AMQPError as e:
346
+ logger.warning(f"AMQP error while deleting queue '{queue_name}': {e}")