jararaca 0.2.37a12__py3-none-any.whl → 0.4.0a5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (91) hide show
  1. README.md +121 -0
  2. jararaca/__init__.py +267 -15
  3. jararaca/__main__.py +4 -0
  4. jararaca/broker_backend/__init__.py +106 -0
  5. jararaca/broker_backend/mapper.py +25 -0
  6. jararaca/broker_backend/redis_broker_backend.py +168 -0
  7. jararaca/cli.py +840 -103
  8. jararaca/common/__init__.py +3 -0
  9. jararaca/core/__init__.py +3 -0
  10. jararaca/core/providers.py +4 -0
  11. jararaca/core/uow.py +55 -16
  12. jararaca/di.py +4 -0
  13. jararaca/files/entity.py.mako +4 -0
  14. jararaca/lifecycle.py +6 -2
  15. jararaca/messagebus/__init__.py +5 -1
  16. jararaca/messagebus/bus_message_controller.py +4 -0
  17. jararaca/messagebus/consumers/__init__.py +3 -0
  18. jararaca/messagebus/decorators.py +90 -85
  19. jararaca/messagebus/implicit_headers.py +49 -0
  20. jararaca/messagebus/interceptors/__init__.py +3 -0
  21. jararaca/messagebus/interceptors/aiopika_publisher_interceptor.py +95 -37
  22. jararaca/messagebus/interceptors/publisher_interceptor.py +42 -0
  23. jararaca/messagebus/message.py +31 -0
  24. jararaca/messagebus/publisher.py +47 -4
  25. jararaca/messagebus/worker.py +1615 -135
  26. jararaca/microservice.py +248 -36
  27. jararaca/observability/constants.py +7 -0
  28. jararaca/observability/decorators.py +177 -16
  29. jararaca/observability/fastapi_exception_handler.py +37 -0
  30. jararaca/observability/hooks.py +109 -0
  31. jararaca/observability/interceptor.py +8 -2
  32. jararaca/observability/providers/__init__.py +3 -0
  33. jararaca/observability/providers/otel.py +213 -18
  34. jararaca/persistence/base.py +40 -3
  35. jararaca/persistence/exports.py +4 -0
  36. jararaca/persistence/interceptors/__init__.py +3 -0
  37. jararaca/persistence/interceptors/aiosqa_interceptor.py +187 -23
  38. jararaca/persistence/interceptors/constants.py +5 -0
  39. jararaca/persistence/interceptors/decorators.py +50 -0
  40. jararaca/persistence/session.py +3 -0
  41. jararaca/persistence/sort_filter.py +4 -0
  42. jararaca/persistence/utilities.py +74 -32
  43. jararaca/presentation/__init__.py +3 -0
  44. jararaca/presentation/decorators.py +170 -82
  45. jararaca/presentation/exceptions.py +23 -0
  46. jararaca/presentation/hooks.py +4 -0
  47. jararaca/presentation/http_microservice.py +4 -0
  48. jararaca/presentation/server.py +120 -41
  49. jararaca/presentation/websocket/__init__.py +3 -0
  50. jararaca/presentation/websocket/base_types.py +4 -0
  51. jararaca/presentation/websocket/context.py +34 -4
  52. jararaca/presentation/websocket/decorators.py +8 -41
  53. jararaca/presentation/websocket/redis.py +280 -53
  54. jararaca/presentation/websocket/types.py +6 -2
  55. jararaca/presentation/websocket/websocket_interceptor.py +74 -23
  56. jararaca/reflect/__init__.py +3 -0
  57. jararaca/reflect/controller_inspect.py +81 -0
  58. jararaca/reflect/decorators.py +238 -0
  59. jararaca/reflect/metadata.py +76 -0
  60. jararaca/rpc/__init__.py +3 -0
  61. jararaca/rpc/http/__init__.py +101 -0
  62. jararaca/rpc/http/backends/__init__.py +14 -0
  63. jararaca/rpc/http/backends/httpx.py +43 -9
  64. jararaca/rpc/http/backends/otel.py +4 -0
  65. jararaca/rpc/http/decorators.py +378 -113
  66. jararaca/rpc/http/httpx.py +3 -0
  67. jararaca/scheduler/__init__.py +3 -0
  68. jararaca/scheduler/beat_worker.py +758 -0
  69. jararaca/scheduler/decorators.py +89 -28
  70. jararaca/scheduler/types.py +11 -0
  71. jararaca/tools/app_config/__init__.py +3 -0
  72. jararaca/tools/app_config/decorators.py +7 -19
  73. jararaca/tools/app_config/interceptor.py +10 -4
  74. jararaca/tools/typescript/__init__.py +3 -0
  75. jararaca/tools/typescript/decorators.py +120 -0
  76. jararaca/tools/typescript/interface_parser.py +1126 -189
  77. jararaca/utils/__init__.py +3 -0
  78. jararaca/utils/rabbitmq_utils.py +372 -0
  79. jararaca/utils/retry.py +148 -0
  80. jararaca-0.4.0a5.dist-info/LICENSE +674 -0
  81. jararaca-0.4.0a5.dist-info/LICENSES/GPL-3.0-or-later.txt +232 -0
  82. {jararaca-0.2.37a12.dist-info → jararaca-0.4.0a5.dist-info}/METADATA +14 -7
  83. jararaca-0.4.0a5.dist-info/RECORD +88 -0
  84. {jararaca-0.2.37a12.dist-info → jararaca-0.4.0a5.dist-info}/WHEEL +1 -1
  85. pyproject.toml +131 -0
  86. jararaca/messagebus/types.py +0 -30
  87. jararaca/scheduler/scheduler.py +0 -154
  88. jararaca/tools/metadata.py +0 -47
  89. jararaca-0.2.37a12.dist-info/RECORD +0 -63
  90. /jararaca-0.2.37a12.dist-info/LICENSE → /LICENSE +0 -0
  91. {jararaca-0.2.37a12.dist-info → jararaca-0.4.0a5.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,3 @@
1
+ # SPDX-FileCopyrightText: 2025 Lucas S
2
+ #
3
+ # SPDX-License-Identifier: GPL-3.0-or-later
@@ -0,0 +1,372 @@
1
+ # SPDX-FileCopyrightText: 2025 Lucas S
2
+ #
3
+ # SPDX-License-Identifier: GPL-3.0-or-later
4
+
5
+ import logging
6
+
7
+ from aio_pika.abc import AbstractChannel, AbstractExchange, AbstractQueue
8
+ from aio_pika.exceptions import AMQPError, ChannelClosed, ChannelNotFoundEntity
9
+
10
+ logger = logging.getLogger(__name__)
11
+
12
+
13
+ class RabbitmqUtils:
14
+
15
+ DEAD_LETTER_EXCHANGE = "dlx"
16
+ DEAD_LETTER_QUEUE = "dlq"
17
+
18
+ # Note: get_worker_v1_queue method is already defined above
19
+
20
+ DEAD_LETTER_EXCHANGE = "dlx"
21
+ DEAD_LETTER_QUEUE = "dlq"
22
+
23
+ @classmethod
24
+ async def get_dl_exchange(cls, channel: AbstractChannel) -> AbstractExchange:
25
+ """
26
+ Get the Dead Letter Exchange (DLX) for the given channel.
27
+ """
28
+ try:
29
+ return await channel.get_exchange(
30
+ cls.DEAD_LETTER_EXCHANGE,
31
+ )
32
+ except ChannelNotFoundEntity as e:
33
+ logger.error(
34
+ "Dead Letter Exchange '%s' does not exist. "
35
+ "Please use the declare command to create it first. Error: %s",
36
+ cls.DEAD_LETTER_EXCHANGE,
37
+ e,
38
+ )
39
+ raise
40
+ except ChannelClosed as e:
41
+ logger.error(
42
+ "Channel closed while getting Dead Letter Exchange '%s'. " "Error: %s",
43
+ cls.DEAD_LETTER_EXCHANGE,
44
+ e,
45
+ )
46
+ raise
47
+ except AMQPError as e:
48
+ logger.error(
49
+ "AMQP error while getting Dead Letter Exchange '%s'. " "Error: %s",
50
+ cls.DEAD_LETTER_EXCHANGE,
51
+ e,
52
+ )
53
+ raise
54
+
55
+ @classmethod
56
+ async def declare_dl_exchange(
57
+ cls, channel: AbstractChannel, passive: bool
58
+ ) -> AbstractExchange:
59
+ """
60
+ Declare a Dead Letter Exchange (DLX) for the given channel.
61
+ """
62
+
63
+ return await channel.declare_exchange(
64
+ cls.DEAD_LETTER_EXCHANGE,
65
+ passive=passive,
66
+ type="direct",
67
+ durable=True,
68
+ auto_delete=False,
69
+ )
70
+
71
+ @classmethod
72
+ async def get_dl_queue(cls, channel: AbstractChannel) -> AbstractQueue:
73
+ """
74
+ Get the Dead Letter Queue (DLQ) for the given channel.
75
+ """
76
+ try:
77
+ return await channel.get_queue(
78
+ cls.DEAD_LETTER_QUEUE,
79
+ )
80
+ except ChannelNotFoundEntity as e:
81
+ logger.error(
82
+ "Dead Letter Queue '%s' does not exist. "
83
+ "Please use the declare command to create it first. Error: %s",
84
+ cls.DEAD_LETTER_QUEUE,
85
+ e,
86
+ )
87
+ raise
88
+ except ChannelClosed as e:
89
+ logger.error(
90
+ "Channel closed while getting Dead Letter Queue '%s'. " "Error: %s",
91
+ cls.DEAD_LETTER_QUEUE,
92
+ e,
93
+ )
94
+ raise
95
+ except AMQPError as e:
96
+ logger.error(
97
+ "AMQP error while getting Dead Letter Queue '%s'. " "Error: %s",
98
+ cls.DEAD_LETTER_QUEUE,
99
+ e,
100
+ )
101
+ raise
102
+
103
+ @classmethod
104
+ async def declare_dl_queue(
105
+ cls, channel: AbstractChannel, passive: bool
106
+ ) -> AbstractQueue:
107
+ """
108
+ Declare a Dead Letter Queue (DLQ) for the given queue.
109
+ """
110
+
111
+ return await channel.declare_queue(
112
+ cls.DEAD_LETTER_QUEUE,
113
+ durable=True,
114
+ passive=passive,
115
+ arguments={
116
+ "x-dead-letter-exchange": "",
117
+ "x-dead-letter-routing-key": cls.DEAD_LETTER_EXCHANGE,
118
+ },
119
+ )
120
+
121
+ @classmethod
122
+ async def get_dl_kit(
123
+ cls,
124
+ channel: AbstractChannel,
125
+ ) -> tuple[AbstractExchange, AbstractQueue]:
126
+ """
127
+ Get the Dead Letter Exchange and Queue (DLX and DLQ) for the given channel.
128
+ """
129
+ try:
130
+ dlx = await cls.get_dl_exchange(channel)
131
+ dlq = await cls.get_dl_queue(channel)
132
+ return dlx, dlq
133
+ except ChannelNotFoundEntity as e:
134
+ logger.error(
135
+ "Dead Letter infrastructure does not exist completely. "
136
+ "Please use the declare command to create it first. Error: %s",
137
+ e,
138
+ )
139
+ raise
140
+ except ChannelClosed as e:
141
+ logger.error(
142
+ "Channel closed while getting Dead Letter infrastructure. " "Error: %s",
143
+ e,
144
+ )
145
+ raise
146
+ except AMQPError as e:
147
+ logger.error(
148
+ "AMQP error while getting Dead Letter infrastructure. " "Error: %s", e
149
+ )
150
+ raise
151
+
152
+ @classmethod
153
+ async def declare_dl_kit(
154
+ cls,
155
+ channel: AbstractChannel,
156
+ passive: bool = False,
157
+ ) -> tuple[AbstractExchange, AbstractQueue]:
158
+ """
159
+ Declare a Dead Letter Exchange and Queue (DLX and DLQ) for the given channel.
160
+ """
161
+ dlx = await cls.declare_dl_exchange(channel, passive=passive)
162
+ dlq = await cls.declare_dl_queue(channel, passive=passive)
163
+ await dlq.bind(dlx, routing_key=cls.DEAD_LETTER_EXCHANGE)
164
+ return dlx, dlq
165
+
166
+ @classmethod
167
+ async def get_main_exchange(
168
+ cls, channel: AbstractChannel, exchange_name: str
169
+ ) -> AbstractExchange:
170
+ """
171
+ Get the main exchange for the given channel.
172
+ """
173
+ try:
174
+ return await channel.get_exchange(exchange_name)
175
+ except ChannelNotFoundEntity as e:
176
+ logger.error(
177
+ "Exchange '%s' does not exist. "
178
+ "Please use the declare command to create it first. Error: %s",
179
+ exchange_name,
180
+ e,
181
+ )
182
+ raise
183
+ except ChannelClosed as e:
184
+ logger.error(
185
+ "Channel closed while getting exchange '%s'. " "Error: %s",
186
+ exchange_name,
187
+ e,
188
+ )
189
+ raise
190
+ except AMQPError as e:
191
+ logger.error(
192
+ "AMQP error while getting exchange '%s'. " "Error: %s", exchange_name, e
193
+ )
194
+ raise
195
+
196
+ @classmethod
197
+ async def declare_main_exchange(
198
+ cls, channel: AbstractChannel, exchange_name: str, passive: bool
199
+ ) -> AbstractExchange:
200
+ """
201
+ Declare a main exchange for the given channel.
202
+ """
203
+
204
+ return await channel.declare_exchange(
205
+ exchange_name,
206
+ passive=passive,
207
+ type="topic",
208
+ durable=True,
209
+ auto_delete=False,
210
+ )
211
+
212
+ @classmethod
213
+ async def get_queue(
214
+ cls,
215
+ channel: AbstractChannel,
216
+ queue_name: str,
217
+ ) -> AbstractQueue:
218
+ """
219
+ Get a queue with the given name.
220
+ """
221
+ try:
222
+ return await channel.get_queue(queue_name)
223
+ except ChannelNotFoundEntity as e:
224
+ logger.error(
225
+ "Queue '%s' does not exist. "
226
+ "Please use the declare command to create it first. Error: %s",
227
+ queue_name,
228
+ e,
229
+ )
230
+ raise
231
+ except ChannelClosed as e:
232
+ logger.error(
233
+ "Channel closed while getting queue '%s'. " "Error: %s", queue_name, e
234
+ )
235
+ raise
236
+ except AMQPError as e:
237
+ logger.error(
238
+ "AMQP error while getting queue '%s'. " "Error: %s", queue_name, e
239
+ )
240
+ raise
241
+
242
+ @classmethod
243
+ async def declare_worker_queue(
244
+ cls,
245
+ channel: AbstractChannel,
246
+ queue_name: str,
247
+ passive: bool = False,
248
+ ) -> AbstractQueue:
249
+ """
250
+ Declare a queue with the given name and properties.
251
+ """
252
+
253
+ return await channel.declare_queue(
254
+ queue_name,
255
+ passive=passive,
256
+ durable=True,
257
+ arguments={
258
+ "x-dead-letter-exchange": cls.DEAD_LETTER_EXCHANGE,
259
+ "x-dead-letter-routing-key": cls.DEAD_LETTER_EXCHANGE,
260
+ },
261
+ )
262
+
263
+ @classmethod
264
+ async def get_scheduled_action_queue(
265
+ cls,
266
+ channel: AbstractChannel,
267
+ queue_name: str,
268
+ ) -> AbstractQueue:
269
+ """
270
+ Get a scheduled action queue.
271
+ """
272
+ try:
273
+ return await channel.get_queue(queue_name)
274
+ except ChannelNotFoundEntity as e:
275
+ logger.error(
276
+ "Scheduler queue '%s' does not exist. "
277
+ "Please use the declare command to create it first. Error: %s",
278
+ queue_name,
279
+ e,
280
+ )
281
+ raise
282
+ except ChannelClosed as e:
283
+ logger.error(
284
+ "Channel closed while getting scheduler queue '%s'. " "Error: %s",
285
+ queue_name,
286
+ e,
287
+ )
288
+ raise
289
+ except AMQPError as e:
290
+ logger.error(
291
+ "AMQP error while getting scheduler queue '%s'. " "Error: %s",
292
+ queue_name,
293
+ e,
294
+ )
295
+ raise
296
+
297
+ @classmethod
298
+ async def declare_scheduled_action_queue(
299
+ cls,
300
+ channel: AbstractChannel,
301
+ queue_name: str,
302
+ passive: bool = False,
303
+ ) -> AbstractQueue:
304
+ """
305
+ Declare a scheduled action queue with simple durable configuration.
306
+ The queue has a max length of 1 to ensure only one scheduled task
307
+ is processed at a time.
308
+ """
309
+ return await channel.declare_queue(
310
+ name=queue_name,
311
+ durable=True,
312
+ passive=passive,
313
+ arguments={
314
+ "x-max-length": 1,
315
+ },
316
+ )
317
+
318
+ @classmethod
319
+ async def delete_exchange(
320
+ cls,
321
+ channel: AbstractChannel,
322
+ exchange_name: str,
323
+ if_unused: bool = False,
324
+ ) -> None:
325
+ """
326
+ Delete an exchange.
327
+ """
328
+ try:
329
+ await channel.exchange_delete(
330
+ exchange_name=exchange_name,
331
+ if_unused=if_unused,
332
+ )
333
+ except ChannelNotFoundEntity:
334
+ # Exchange might not exist, which is fine
335
+ logger.debug(
336
+ "Exchange '%s' does not exist, nothing to delete.", exchange_name
337
+ )
338
+ except ChannelClosed as e:
339
+ logger.warning(
340
+ "Channel closed while deleting exchange '%s': %s", exchange_name, e
341
+ )
342
+ except AMQPError as e:
343
+ logger.warning(
344
+ "AMQP error while deleting exchange '%s': %s", exchange_name, e
345
+ )
346
+
347
+ @classmethod
348
+ async def delete_queue(
349
+ cls,
350
+ channel: AbstractChannel,
351
+ queue_name: str,
352
+ if_unused: bool = False,
353
+ if_empty: bool = False,
354
+ ) -> None:
355
+ """
356
+ Delete a queue.
357
+ """
358
+ try:
359
+ await channel.queue_delete(
360
+ queue_name=queue_name,
361
+ if_unused=if_unused,
362
+ if_empty=if_empty,
363
+ )
364
+ except ChannelNotFoundEntity:
365
+ # Queue might not exist, which is fine
366
+ logger.debug("Queue '%s' does not exist, nothing to delete.", queue_name)
367
+ except ChannelClosed as e:
368
+ logger.warning(
369
+ "Channel closed while deleting queue '%s': %s", queue_name, e
370
+ )
371
+ except AMQPError as e:
372
+ logger.warning("AMQP error while deleting queue '%s': %s", queue_name, e)
@@ -0,0 +1,148 @@
1
+ # SPDX-FileCopyrightText: 2025 Lucas S
2
+ #
3
+ # SPDX-License-Identifier: GPL-3.0-or-later
4
+
5
+ import asyncio
6
+ import logging
7
+ import random
8
+ from functools import wraps
9
+ from typing import Awaitable, Callable, Optional, ParamSpec, TypeVar
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+ P = ParamSpec("P")
14
+ T = TypeVar("T")
15
+
16
+
17
+ class RetryConfig:
18
+ """Configuration for the retry mechanism."""
19
+
20
+ def __init__(
21
+ self,
22
+ max_retries: int = 5,
23
+ initial_delay: float = 1.0,
24
+ max_delay: float = 60.0,
25
+ backoff_factor: float = 2.0,
26
+ jitter: bool = True,
27
+ ):
28
+ """
29
+ Initialize retry configuration.
30
+
31
+ Args:
32
+ max_retries: Maximum number of retry attempts (default: 5)
33
+ initial_delay: Initial delay in seconds between retries (default: 1.0)
34
+ max_delay: Maximum delay in seconds between retries (default: 60.0)
35
+ backoff_factor: Multiplier for the delay after each retry (default: 2.0)
36
+ jitter: Whether to add randomness to the delay to prevent thundering herd (default: True)
37
+ """
38
+ self.max_retries = max_retries
39
+ self.initial_delay = initial_delay
40
+ self.max_delay = max_delay
41
+ self.backoff_factor = backoff_factor
42
+ self.jitter = jitter
43
+
44
+
45
+ E = TypeVar("E", bound=Exception)
46
+
47
+
48
+ async def retry_with_backoff(
49
+ fn: Callable[[], Awaitable[T]],
50
+ # args: P.args,
51
+ # kwargs: P.kwargs,
52
+ retry_config: Optional[RetryConfig] = None,
53
+ on_retry_callback: Optional[Callable[[int, E, float], None]] = None,
54
+ retry_exceptions: tuple[type[E], ...] = (),
55
+ ) -> T:
56
+ """
57
+ Execute a function with exponential backoff retry mechanism.
58
+
59
+ Args:
60
+ fn: The async function to execute with retry
61
+ *args: Arguments to pass to the function
62
+ retry_config: Configuration for the retry mechanism
63
+ on_retry_callback: Optional callback function called on each retry with retry count, exception, and next delay
64
+ retry_exceptions: Tuple of exception types that should trigger a retry
65
+ **kwargs: Keyword arguments to pass to the function
66
+
67
+ Returns:
68
+ The result of the function if successful
69
+
70
+ Raises:
71
+ The last exception encountered if all retries fail
72
+ """
73
+ if retry_config is None:
74
+ retry_config = RetryConfig()
75
+
76
+ last_exception = None
77
+ delay = retry_config.initial_delay
78
+
79
+ for retry_count in range(retry_config.max_retries + 1):
80
+ try:
81
+ return await fn()
82
+ except retry_exceptions as e:
83
+ last_exception = e
84
+
85
+ if retry_count >= retry_config.max_retries:
86
+ logger.error(
87
+ "Max retries (%s) exceeded: %s", retry_config.max_retries, e
88
+ )
89
+ raise
90
+
91
+ # Calculate next delay with exponential backoff
92
+ if retry_count > 0: # Don't increase delay on the first failure
93
+ delay = min(delay * retry_config.backoff_factor, retry_config.max_delay)
94
+
95
+ # Apply jitter if configured (±25% randomness)
96
+ if retry_config.jitter:
97
+ jitter_amount = delay * 0.25
98
+ delay = delay + random.uniform(-jitter_amount, jitter_amount)
99
+ # Ensure delay doesn't go negative due to jitter
100
+ delay = max(delay, 0.1)
101
+
102
+ logger.warning(
103
+ "Retry %s/%s after error: %s. Retrying in %.2fs",
104
+ retry_count + 1,
105
+ retry_config.max_retries,
106
+ e,
107
+ delay,
108
+ )
109
+
110
+ # Call the optional retry callback if provided
111
+ if on_retry_callback:
112
+ on_retry_callback(retry_count, e, delay)
113
+
114
+ await asyncio.sleep(delay)
115
+
116
+ # This should never be reached with the current implementation
117
+ if last_exception:
118
+ raise last_exception
119
+ raise RuntimeError("Unexpected error in retry logic")
120
+
121
+
122
+ def with_retry(
123
+ retry_config: Optional[RetryConfig] = None,
124
+ retry_exceptions: tuple[type[Exception], ...] = (Exception,),
125
+ ) -> Callable[[Callable[P, Awaitable[T]]], Callable[P, Awaitable[T]]]:
126
+ """
127
+ Decorator to wrap an async function with retry logic.
128
+
129
+ Args:
130
+ retry_config: Configuration for the retry mechanism
131
+ retry_exceptions: Tuple of exception types that should trigger a retry
132
+
133
+ Returns:
134
+ Decorated function with retry mechanism
135
+ """
136
+
137
+ def decorator(fn: Callable[P, Awaitable[T]]) -> Callable[P, Awaitable[T]]:
138
+ @wraps(fn)
139
+ async def wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
140
+ return await retry_with_backoff(
141
+ lambda: fn(*args, **kwargs),
142
+ retry_config=retry_config,
143
+ retry_exceptions=retry_exceptions,
144
+ )
145
+
146
+ return wrapper
147
+
148
+ return decorator