jararaca 0.3.16__tar.gz → 0.3.17__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of jararaca might be problematic. Click here for more details.
- {jararaca-0.3.16 → jararaca-0.3.17}/PKG-INFO +1 -1
- {jararaca-0.3.16 → jararaca-0.3.17}/pyproject.toml +1 -1
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/messagebus/decorators.py +1 -1
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/messagebus/worker.py +111 -237
- {jararaca-0.3.16 → jararaca-0.3.17}/LICENSE +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/README.md +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/docs/CNAME +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/docs/architecture.md +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/docs/assets/_f04774c9-7e05-4da4-8b17-8be23f6a1475.jpeg +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/docs/assets/_f04774c9-7e05-4da4-8b17-8be23f6a1475.webp +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/docs/assets/tracing_example.png +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/docs/expose-type.md +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/docs/http-rpc.md +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/docs/index.md +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/docs/interceptors.md +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/docs/messagebus.md +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/docs/retry.md +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/docs/scheduler.md +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/docs/stylesheets/custom.css +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/docs/websocket.md +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/__init__.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/__main__.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/broker_backend/__init__.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/broker_backend/mapper.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/broker_backend/redis_broker_backend.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/cli.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/common/__init__.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/core/__init__.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/core/providers.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/core/uow.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/di.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/files/entity.py.mako +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/lifecycle.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/messagebus/__init__.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/messagebus/bus_message_controller.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/messagebus/consumers/__init__.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/messagebus/interceptors/__init__.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/messagebus/interceptors/aiopika_publisher_interceptor.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/messagebus/interceptors/publisher_interceptor.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/messagebus/message.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/messagebus/publisher.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/microservice.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/observability/decorators.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/observability/interceptor.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/observability/providers/__init__.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/observability/providers/otel.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/persistence/base.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/persistence/exports.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/persistence/interceptors/__init__.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/persistence/interceptors/aiosqa_interceptor.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/persistence/interceptors/constants.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/persistence/interceptors/decorators.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/persistence/session.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/persistence/sort_filter.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/persistence/utilities.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/presentation/__init__.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/presentation/decorators.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/presentation/hooks.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/presentation/http_microservice.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/presentation/server.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/presentation/websocket/__init__.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/presentation/websocket/base_types.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/presentation/websocket/context.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/presentation/websocket/decorators.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/presentation/websocket/redis.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/presentation/websocket/types.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/presentation/websocket/websocket_interceptor.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/py.typed +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/reflect/__init__.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/reflect/controller_inspect.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/reflect/metadata.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/rpc/__init__.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/rpc/http/__init__.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/rpc/http/backends/__init__.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/rpc/http/backends/httpx.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/rpc/http/backends/otel.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/rpc/http/decorators.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/rpc/http/httpx.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/scheduler/__init__.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/scheduler/beat_worker.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/scheduler/decorators.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/scheduler/types.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/tools/app_config/__init__.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/tools/app_config/decorators.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/tools/app_config/interceptor.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/tools/typescript/__init__.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/tools/typescript/decorators.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/tools/typescript/interface_parser.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/utils/__init__.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/utils/rabbitmq_utils.py +0 -0
- {jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/utils/retry.py +0 -0
|
@@ -69,7 +69,7 @@ class MessageHandler(Generic[INHERITS_MESSAGE_CO]):
|
|
|
69
69
|
class MessageHandlerData:
|
|
70
70
|
message_type: type[Any]
|
|
71
71
|
spec: MessageHandler[Message]
|
|
72
|
-
instance_callable: Callable[
|
|
72
|
+
instance_callable: Callable[..., Awaitable[None]]
|
|
73
73
|
controller_member: ControllerMemberReflect
|
|
74
74
|
|
|
75
75
|
|
|
@@ -188,11 +188,8 @@ class AioPikaMicroserviceConsumer(MessageBusConsumer):
|
|
|
188
188
|
# Connection resilience attributes
|
|
189
189
|
self.connection_healthy = False
|
|
190
190
|
self.connection_lock = asyncio.Lock()
|
|
191
|
-
self.reconnection_event = asyncio.Event()
|
|
192
|
-
self.reconnection_in_progress = False
|
|
193
191
|
self.consumer_tags: dict[str, str] = {} # Track consumer tags for cleanup
|
|
194
192
|
self.health_check_task: asyncio.Task[Any] | None = None
|
|
195
|
-
self.reconnection_task: asyncio.Task[Any] | None = None
|
|
196
193
|
|
|
197
194
|
async def _verify_infrastructure(self) -> bool:
|
|
198
195
|
"""
|
|
@@ -229,10 +226,6 @@ class AioPikaMicroserviceConsumer(MessageBusConsumer):
|
|
|
229
226
|
routing_key = f"{handler.message_type.MESSAGE_TOPIC}.#"
|
|
230
227
|
|
|
231
228
|
async def setup_consumer() -> None:
|
|
232
|
-
# Wait for connection to be healthy if reconnection is in progress
|
|
233
|
-
if self.reconnection_in_progress:
|
|
234
|
-
await self.reconnection_event.wait()
|
|
235
|
-
|
|
236
229
|
# Create a channel using the context manager
|
|
237
230
|
async with self.create_channel(queue_name) as channel:
|
|
238
231
|
queue = await RabbitmqUtils.get_queue(
|
|
@@ -289,10 +282,6 @@ class AioPikaMicroserviceConsumer(MessageBusConsumer):
|
|
|
289
282
|
routing_key = queue_name
|
|
290
283
|
|
|
291
284
|
async def setup_consumer() -> None:
|
|
292
|
-
# Wait for connection to be healthy if reconnection is in progress
|
|
293
|
-
if self.reconnection_in_progress:
|
|
294
|
-
await self.reconnection_event.wait()
|
|
295
|
-
|
|
296
285
|
# Create a channel using the context manager
|
|
297
286
|
async with self.create_channel(queue_name) as channel:
|
|
298
287
|
queue = await RabbitmqUtils.get_queue(
|
|
@@ -341,106 +330,107 @@ class AioPikaMicroserviceConsumer(MessageBusConsumer):
|
|
|
341
330
|
Main consume method that sets up all message handlers and scheduled actions with retry mechanisms.
|
|
342
331
|
"""
|
|
343
332
|
# Establish initial connection
|
|
344
|
-
|
|
345
|
-
self.
|
|
346
|
-
|
|
347
|
-
# Start connection health monitoring
|
|
348
|
-
self.health_check_task = asyncio.create_task(
|
|
349
|
-
self._monitor_connection_health()
|
|
350
|
-
)
|
|
333
|
+
try:
|
|
334
|
+
async with self.connect() as connection:
|
|
335
|
+
self.connection_healthy = True
|
|
351
336
|
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
retry_exceptions=(Exception,),
|
|
357
|
-
)
|
|
337
|
+
# Start connection health monitoring
|
|
338
|
+
self.health_check_task = asyncio.create_task(
|
|
339
|
+
self._monitor_connection_health()
|
|
340
|
+
)
|
|
358
341
|
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
342
|
+
# Verify infrastructure with retry
|
|
343
|
+
infra_check_success = await retry_with_backoff(
|
|
344
|
+
self._verify_infrastructure,
|
|
345
|
+
retry_config=self.config.connection_retry_config,
|
|
346
|
+
retry_exceptions=(Exception,),
|
|
362
347
|
)
|
|
363
|
-
self.shutdown_event.set()
|
|
364
|
-
return
|
|
365
348
|
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
349
|
+
if not infra_check_success:
|
|
350
|
+
logger.critical(
|
|
351
|
+
"Failed to verify RabbitMQ infrastructure. Shutting down."
|
|
352
|
+
)
|
|
353
|
+
self.shutdown_event.set()
|
|
354
|
+
return
|
|
370
355
|
|
|
371
|
-
|
|
356
|
+
async def wait_for(
|
|
357
|
+
type: str, name: str, coroutine: Awaitable[bool]
|
|
358
|
+
) -> tuple[str, str, bool]:
|
|
359
|
+
return type, name, await coroutine
|
|
372
360
|
|
|
373
|
-
|
|
374
|
-
for handler in self.message_handler_set:
|
|
375
|
-
queue_name = f"{handler.message_type.MESSAGE_TOPIC}.{handler.instance_callable.__module__}.{handler.instance_callable.__qualname__}"
|
|
376
|
-
self.incoming_map[queue_name] = handler
|
|
361
|
+
tasks: set[asyncio.Task[tuple[str, str, bool]]] = set()
|
|
377
362
|
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
queue_name,
|
|
383
|
-
self._setup_message_handler_consumer(handler),
|
|
384
|
-
)
|
|
385
|
-
)
|
|
386
|
-
)
|
|
363
|
+
# Setup message handlers
|
|
364
|
+
for handler in self.message_handler_set:
|
|
365
|
+
queue_name = f"{handler.message_type.MESSAGE_TOPIC}.{handler.instance_callable.__module__}.{handler.instance_callable.__qualname__}"
|
|
366
|
+
self.incoming_map[queue_name] = handler
|
|
387
367
|
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
queue_name,
|
|
396
|
-
self._setup_scheduled_action_consumer(scheduled_action),
|
|
368
|
+
tasks.add(
|
|
369
|
+
task := asyncio.create_task(
|
|
370
|
+
wait_for(
|
|
371
|
+
"message_handler",
|
|
372
|
+
queue_name,
|
|
373
|
+
self._setup_message_handler_consumer(handler),
|
|
374
|
+
)
|
|
397
375
|
)
|
|
398
376
|
)
|
|
399
|
-
)
|
|
400
377
|
|
|
401
|
-
|
|
402
|
-
for
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
378
|
+
# Setup scheduled actions
|
|
379
|
+
for scheduled_action in self.scheduled_actions:
|
|
380
|
+
queue_name = f"{scheduled_action.callable.__module__}.{scheduled_action.callable.__qualname__}"
|
|
381
|
+
tasks.add(
|
|
382
|
+
task := asyncio.create_task(
|
|
383
|
+
wait_for(
|
|
384
|
+
"scheduled_action",
|
|
385
|
+
queue_name,
|
|
386
|
+
self._setup_scheduled_action_consumer(scheduled_action),
|
|
387
|
+
)
|
|
409
388
|
)
|
|
389
|
+
)
|
|
410
390
|
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
391
|
+
async def handle_task_results() -> None:
|
|
392
|
+
for task in asyncio.as_completed(tasks):
|
|
393
|
+
type, name, success = await task
|
|
394
|
+
if success:
|
|
395
|
+
logger.info(
|
|
396
|
+
f"Successfully set up {type} consumer for {name}"
|
|
397
|
+
)
|
|
398
|
+
else:
|
|
399
|
+
logger.warning(
|
|
400
|
+
f"Failed to set up {type} consumer for {name}, will not process messages from this queue"
|
|
401
|
+
)
|
|
416
402
|
|
|
417
|
-
|
|
418
|
-
if self.health_check_task:
|
|
419
|
-
self.health_check_task.cancel()
|
|
420
|
-
with suppress(asyncio.CancelledError):
|
|
421
|
-
await self.health_check_task
|
|
403
|
+
handle_task_results_task = asyncio.create_task(handle_task_results())
|
|
422
404
|
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
with suppress(asyncio.CancelledError):
|
|
427
|
-
await self.reconnection_task
|
|
405
|
+
# Wait for shutdown signal
|
|
406
|
+
await self.shutdown_event.wait()
|
|
407
|
+
logger.info("Shutdown event received, stopping consumers")
|
|
428
408
|
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
for task in tasks:
|
|
433
|
-
if not task.done():
|
|
434
|
-
task.cancel()
|
|
409
|
+
# Cancel health monitoring
|
|
410
|
+
if self.health_check_task:
|
|
411
|
+
self.health_check_task.cancel()
|
|
435
412
|
with suppress(asyncio.CancelledError):
|
|
436
|
-
await
|
|
437
|
-
logger.info("Worker shutting down")
|
|
413
|
+
await self.health_check_task
|
|
438
414
|
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
415
|
+
handle_task_results_task.cancel()
|
|
416
|
+
with suppress(asyncio.CancelledError):
|
|
417
|
+
await handle_task_results_task
|
|
418
|
+
for task in tasks:
|
|
419
|
+
if not task.done():
|
|
420
|
+
task.cancel()
|
|
421
|
+
with suppress(asyncio.CancelledError):
|
|
422
|
+
await task
|
|
423
|
+
logger.info("Worker shutting down")
|
|
424
|
+
|
|
425
|
+
# Wait for all tasks to complete
|
|
426
|
+
await self.wait_all_tasks_done()
|
|
427
|
+
|
|
428
|
+
# Close all channels and the connection
|
|
429
|
+
await self.close_channels_and_connection()
|
|
430
|
+
except Exception as e:
|
|
431
|
+
logger.critical(f"Failed to establish initial connection to RabbitMQ: {e}")
|
|
432
|
+
# Re-raise the exception so it can be caught by the caller
|
|
433
|
+
raise
|
|
444
434
|
|
|
445
435
|
async def wait_all_tasks_done(self) -> None:
|
|
446
436
|
if not self.tasks:
|
|
@@ -478,12 +468,6 @@ class AioPikaMicroserviceConsumer(MessageBusConsumer):
|
|
|
478
468
|
with suppress(asyncio.CancelledError):
|
|
479
469
|
await self.health_check_task
|
|
480
470
|
|
|
481
|
-
# Cancel reconnection task if running
|
|
482
|
-
if self.reconnection_task:
|
|
483
|
-
self.reconnection_task.cancel()
|
|
484
|
-
with suppress(asyncio.CancelledError):
|
|
485
|
-
await self.reconnection_task
|
|
486
|
-
|
|
487
471
|
await self.wait_all_tasks_done()
|
|
488
472
|
await self.close_channels_and_connection()
|
|
489
473
|
|
|
@@ -492,16 +476,6 @@ class AioPikaMicroserviceConsumer(MessageBusConsumer):
|
|
|
492
476
|
Get the channel for a specific queue, or None if not found.
|
|
493
477
|
This helps with error handling when a channel might have been closed.
|
|
494
478
|
"""
|
|
495
|
-
# If reconnection is in progress, wait for it to complete
|
|
496
|
-
if self.reconnection_in_progress:
|
|
497
|
-
try:
|
|
498
|
-
await asyncio.wait_for(self.reconnection_event.wait(), timeout=30.0)
|
|
499
|
-
except asyncio.TimeoutError:
|
|
500
|
-
logger.warning(
|
|
501
|
-
f"Timeout waiting for reconnection when getting channel for {queue_name}"
|
|
502
|
-
)
|
|
503
|
-
return None
|
|
504
|
-
|
|
505
479
|
if queue_name not in self.channels:
|
|
506
480
|
logger.warning(f"No channel found for queue {queue_name}")
|
|
507
481
|
return None
|
|
@@ -530,17 +504,17 @@ class AioPikaMicroserviceConsumer(MessageBusConsumer):
|
|
|
530
504
|
logger.error(
|
|
531
505
|
f"Failed to recreate channel for {queue_name}: {e}"
|
|
532
506
|
)
|
|
533
|
-
# Trigger
|
|
507
|
+
# Trigger shutdown if channel creation fails
|
|
534
508
|
self._trigger_reconnection()
|
|
535
509
|
return None
|
|
536
510
|
else:
|
|
537
|
-
# Connection is not healthy, trigger
|
|
511
|
+
# Connection is not healthy, trigger shutdown
|
|
538
512
|
self._trigger_reconnection()
|
|
539
513
|
return None
|
|
540
514
|
return channel
|
|
541
515
|
except Exception as e:
|
|
542
516
|
logger.error(f"Error accessing channel for queue {queue_name}: {e}")
|
|
543
|
-
# Trigger
|
|
517
|
+
# Trigger shutdown on any channel access error
|
|
544
518
|
self._trigger_reconnection()
|
|
545
519
|
return None
|
|
546
520
|
|
|
@@ -691,33 +665,14 @@ class AioPikaMicroserviceConsumer(MessageBusConsumer):
|
|
|
691
665
|
yield new_channel
|
|
692
666
|
return
|
|
693
667
|
else:
|
|
694
|
-
# Connection is not healthy,
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
except asyncio.TimeoutError:
|
|
703
|
-
logger.warning(
|
|
704
|
-
f"Timeout waiting for reconnection for queue {queue_name}"
|
|
705
|
-
)
|
|
706
|
-
|
|
707
|
-
# Still no connection, trigger reconnection
|
|
708
|
-
if not self.reconnection_in_progress:
|
|
709
|
-
self._trigger_reconnection()
|
|
710
|
-
|
|
711
|
-
if attempt < max_retries - 1:
|
|
712
|
-
logger.info(
|
|
713
|
-
f"Retrying channel access for {queue_name} in {retry_delay}s"
|
|
714
|
-
)
|
|
715
|
-
await asyncio.sleep(retry_delay)
|
|
716
|
-
retry_delay *= 2
|
|
717
|
-
else:
|
|
718
|
-
raise RuntimeError(
|
|
719
|
-
f"Cannot get channel for queue {queue_name}: no connection available after {max_retries} attempts"
|
|
720
|
-
)
|
|
668
|
+
# Connection is not healthy, trigger shutdown
|
|
669
|
+
logger.error(
|
|
670
|
+
f"Connection not healthy while getting channel for {queue_name}, triggering shutdown"
|
|
671
|
+
)
|
|
672
|
+
self._trigger_reconnection()
|
|
673
|
+
raise RuntimeError(
|
|
674
|
+
f"Cannot get channel for queue {queue_name}: connection is not healthy"
|
|
675
|
+
)
|
|
721
676
|
|
|
722
677
|
except Exception as e:
|
|
723
678
|
if attempt < max_retries - 1:
|
|
@@ -734,7 +689,7 @@ class AioPikaMicroserviceConsumer(MessageBusConsumer):
|
|
|
734
689
|
|
|
735
690
|
async def _monitor_connection_health(self) -> None:
|
|
736
691
|
"""
|
|
737
|
-
Monitor connection health and trigger
|
|
692
|
+
Monitor connection health and trigger shutdown if connection is lost.
|
|
738
693
|
This runs as a background task.
|
|
739
694
|
"""
|
|
740
695
|
while not self.shutdown_event.is_set():
|
|
@@ -746,11 +701,11 @@ class AioPikaMicroserviceConsumer(MessageBusConsumer):
|
|
|
746
701
|
|
|
747
702
|
# Check connection health
|
|
748
703
|
if not await self._is_connection_healthy():
|
|
749
|
-
logger.
|
|
750
|
-
"Connection health check failed,
|
|
704
|
+
logger.error(
|
|
705
|
+
"Connection health check failed, initiating worker shutdown"
|
|
751
706
|
)
|
|
752
|
-
|
|
753
|
-
|
|
707
|
+
self.shutdown()
|
|
708
|
+
break
|
|
754
709
|
|
|
755
710
|
except asyncio.CancelledError:
|
|
756
711
|
logger.info("Connection health monitoring cancelled")
|
|
@@ -778,74 +733,12 @@ class AioPikaMicroserviceConsumer(MessageBusConsumer):
|
|
|
778
733
|
|
|
779
734
|
def _trigger_reconnection(self) -> None:
|
|
780
735
|
"""
|
|
781
|
-
Trigger
|
|
736
|
+
Trigger worker shutdown due to connection loss.
|
|
782
737
|
"""
|
|
783
|
-
if not self.
|
|
784
|
-
|
|
738
|
+
if not self.shutdown_event.is_set():
|
|
739
|
+
logger.error("Connection lost, initiating worker shutdown")
|
|
785
740
|
self.connection_healthy = False
|
|
786
|
-
self.
|
|
787
|
-
|
|
788
|
-
# Start reconnection task
|
|
789
|
-
self.reconnection_task = asyncio.create_task(self._handle_reconnection())
|
|
790
|
-
self.reconnection_task.add_done_callback(self._on_reconnection_done)
|
|
791
|
-
|
|
792
|
-
def _on_reconnection_done(self, task: asyncio.Task[Any]) -> None:
|
|
793
|
-
"""
|
|
794
|
-
Handle completion of reconnection task.
|
|
795
|
-
"""
|
|
796
|
-
self.reconnection_in_progress = False
|
|
797
|
-
if task.exception():
|
|
798
|
-
logger.error(f"Reconnection task failed: {task.exception()}")
|
|
799
|
-
else:
|
|
800
|
-
logger.info("Reconnection completed successfully")
|
|
801
|
-
|
|
802
|
-
async def _handle_reconnection(self) -> None:
|
|
803
|
-
"""
|
|
804
|
-
Handle the reconnection process with exponential backoff.
|
|
805
|
-
"""
|
|
806
|
-
logger.info("Starting reconnection process")
|
|
807
|
-
|
|
808
|
-
# Close existing connection and channels
|
|
809
|
-
await self._cleanup_connection()
|
|
810
|
-
|
|
811
|
-
reconnection_config = self.config.reconnection_backoff_config
|
|
812
|
-
attempt = 0
|
|
813
|
-
|
|
814
|
-
while not self.shutdown_event.is_set():
|
|
815
|
-
try:
|
|
816
|
-
attempt += 1
|
|
817
|
-
logger.info(f"Reconnection attempt {attempt}")
|
|
818
|
-
|
|
819
|
-
# Establish new connection
|
|
820
|
-
self.connection = await self._establish_connection()
|
|
821
|
-
self.connection_healthy = True
|
|
822
|
-
|
|
823
|
-
# Re-establish all consumers
|
|
824
|
-
await self._reestablish_consumers()
|
|
825
|
-
|
|
826
|
-
logger.info("Reconnection successful")
|
|
827
|
-
self.reconnection_event.set()
|
|
828
|
-
return
|
|
829
|
-
|
|
830
|
-
except Exception as e:
|
|
831
|
-
logger.error(f"Reconnection attempt {attempt} failed: {e}")
|
|
832
|
-
|
|
833
|
-
if self.shutdown_event.is_set():
|
|
834
|
-
break
|
|
835
|
-
|
|
836
|
-
# Calculate backoff delay
|
|
837
|
-
delay = reconnection_config.initial_delay * (
|
|
838
|
-
reconnection_config.backoff_factor ** (attempt - 1)
|
|
839
|
-
)
|
|
840
|
-
if reconnection_config.jitter:
|
|
841
|
-
jitter_amount = delay * 0.25
|
|
842
|
-
delay = delay + random.uniform(-jitter_amount, jitter_amount)
|
|
843
|
-
delay = max(delay, 0.1)
|
|
844
|
-
|
|
845
|
-
delay = min(delay, reconnection_config.max_delay)
|
|
846
|
-
|
|
847
|
-
logger.info(f"Retrying reconnection in {delay:.2f} seconds")
|
|
848
|
-
await asyncio.sleep(delay)
|
|
741
|
+
self.shutdown()
|
|
849
742
|
|
|
850
743
|
async def _cleanup_connection(self) -> None:
|
|
851
744
|
"""
|
|
@@ -889,32 +782,6 @@ class AioPikaMicroserviceConsumer(MessageBusConsumer):
|
|
|
889
782
|
self.connection = None
|
|
890
783
|
self.connection_healthy = False
|
|
891
784
|
|
|
892
|
-
async def _reestablish_consumers(self) -> None:
|
|
893
|
-
"""
|
|
894
|
-
Re-establish all consumers after reconnection.
|
|
895
|
-
"""
|
|
896
|
-
logger.info("Re-establishing consumers after reconnection")
|
|
897
|
-
|
|
898
|
-
# Re-establish message handlers
|
|
899
|
-
for handler in self.message_handler_set:
|
|
900
|
-
queue_name = f"{handler.message_type.MESSAGE_TOPIC}.{handler.instance_callable.__module__}.{handler.instance_callable.__qualname__}"
|
|
901
|
-
try:
|
|
902
|
-
await self._setup_message_handler_consumer(handler)
|
|
903
|
-
logger.info(f"Re-established consumer for {queue_name}")
|
|
904
|
-
except Exception as e:
|
|
905
|
-
logger.error(f"Failed to re-establish consumer for {queue_name}: {e}")
|
|
906
|
-
|
|
907
|
-
# Re-establish scheduled actions
|
|
908
|
-
for scheduled_action in self.scheduled_actions:
|
|
909
|
-
queue_name = f"{scheduled_action.callable.__module__}.{scheduled_action.callable.__qualname__}"
|
|
910
|
-
try:
|
|
911
|
-
await self._setup_scheduled_action_consumer(scheduled_action)
|
|
912
|
-
logger.info(f"Re-established scheduler consumer for {queue_name}")
|
|
913
|
-
except Exception as e:
|
|
914
|
-
logger.error(
|
|
915
|
-
f"Failed to re-establish scheduler consumer for {queue_name}: {e}"
|
|
916
|
-
)
|
|
917
|
-
|
|
918
785
|
|
|
919
786
|
def create_message_bus(
|
|
920
787
|
broker_url: str,
|
|
@@ -1798,7 +1665,14 @@ class MessageBusWorker:
|
|
|
1798
1665
|
loop.add_signal_handler(signal.SIGINT, on_shutdown, loop)
|
|
1799
1666
|
# Add graceful shutdown handler for SIGTERM as well
|
|
1800
1667
|
loop.add_signal_handler(signal.SIGTERM, on_shutdown, loop)
|
|
1801
|
-
|
|
1668
|
+
try:
|
|
1669
|
+
runner.run(self.start_async())
|
|
1670
|
+
except Exception as e:
|
|
1671
|
+
logger.critical(f"Worker failed to start due to connection error: {e}")
|
|
1672
|
+
# Exit with error code 1 to indicate startup failure
|
|
1673
|
+
import sys
|
|
1674
|
+
|
|
1675
|
+
sys.exit(1)
|
|
1802
1676
|
|
|
1803
1677
|
async def _graceful_shutdown(self) -> None:
|
|
1804
1678
|
"""Handles graceful shutdown process"""
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/messagebus/interceptors/publisher_interceptor.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/persistence/interceptors/aiosqa_interceptor.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{jararaca-0.3.16 → jararaca-0.3.17}/src/jararaca/presentation/websocket/websocket_interceptor.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|