jararaca 0.3.11a16__py3-none-any.whl → 0.4.0a19__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- README.md +121 -0
- jararaca/__init__.py +189 -17
- jararaca/__main__.py +4 -0
- jararaca/broker_backend/__init__.py +4 -0
- jararaca/broker_backend/mapper.py +4 -0
- jararaca/broker_backend/redis_broker_backend.py +9 -3
- jararaca/cli.py +915 -51
- jararaca/common/__init__.py +3 -0
- jararaca/core/__init__.py +3 -0
- jararaca/core/providers.py +8 -0
- jararaca/core/uow.py +41 -7
- jararaca/di.py +4 -0
- jararaca/files/entity.py.mako +4 -0
- jararaca/helpers/__init__.py +3 -0
- jararaca/helpers/global_scheduler/__init__.py +3 -0
- jararaca/helpers/global_scheduler/config.py +21 -0
- jararaca/helpers/global_scheduler/controller.py +42 -0
- jararaca/helpers/global_scheduler/registry.py +32 -0
- jararaca/lifecycle.py +6 -2
- jararaca/messagebus/__init__.py +4 -0
- jararaca/messagebus/bus_message_controller.py +4 -0
- jararaca/messagebus/consumers/__init__.py +3 -0
- jararaca/messagebus/decorators.py +121 -61
- jararaca/messagebus/implicit_headers.py +49 -0
- jararaca/messagebus/interceptors/__init__.py +3 -0
- jararaca/messagebus/interceptors/aiopika_publisher_interceptor.py +62 -11
- jararaca/messagebus/interceptors/message_publisher_collector.py +62 -0
- jararaca/messagebus/interceptors/publisher_interceptor.py +29 -3
- jararaca/messagebus/message.py +4 -0
- jararaca/messagebus/publisher.py +6 -0
- jararaca/messagebus/worker.py +1002 -459
- jararaca/microservice.py +113 -2
- jararaca/observability/constants.py +7 -0
- jararaca/observability/decorators.py +170 -13
- jararaca/observability/fastapi_exception_handler.py +37 -0
- jararaca/observability/hooks.py +109 -0
- jararaca/observability/interceptor.py +4 -0
- jararaca/observability/providers/__init__.py +3 -0
- jararaca/observability/providers/otel.py +225 -16
- jararaca/persistence/base.py +39 -3
- jararaca/persistence/exports.py +4 -0
- jararaca/persistence/interceptors/__init__.py +3 -0
- jararaca/persistence/interceptors/aiosqa_interceptor.py +86 -73
- jararaca/persistence/interceptors/constants.py +5 -0
- jararaca/persistence/interceptors/decorators.py +50 -0
- jararaca/persistence/session.py +3 -0
- jararaca/persistence/sort_filter.py +4 -0
- jararaca/persistence/utilities.py +73 -20
- jararaca/presentation/__init__.py +3 -0
- jararaca/presentation/decorators.py +88 -86
- jararaca/presentation/exceptions.py +23 -0
- jararaca/presentation/hooks.py +4 -0
- jararaca/presentation/http_microservice.py +4 -0
- jararaca/presentation/server.py +97 -45
- jararaca/presentation/websocket/__init__.py +3 -0
- jararaca/presentation/websocket/base_types.py +4 -0
- jararaca/presentation/websocket/context.py +4 -0
- jararaca/presentation/websocket/decorators.py +8 -41
- jararaca/presentation/websocket/redis.py +280 -53
- jararaca/presentation/websocket/types.py +4 -0
- jararaca/presentation/websocket/websocket_interceptor.py +46 -19
- jararaca/reflect/__init__.py +3 -0
- jararaca/reflect/controller_inspect.py +16 -10
- jararaca/reflect/decorators.py +252 -0
- jararaca/reflect/helpers.py +18 -0
- jararaca/reflect/metadata.py +34 -25
- jararaca/rpc/__init__.py +3 -0
- jararaca/rpc/http/__init__.py +101 -0
- jararaca/rpc/http/backends/__init__.py +14 -0
- jararaca/rpc/http/backends/httpx.py +43 -9
- jararaca/rpc/http/backends/otel.py +4 -0
- jararaca/rpc/http/decorators.py +380 -115
- jararaca/rpc/http/httpx.py +3 -0
- jararaca/scheduler/__init__.py +3 -0
- jararaca/scheduler/beat_worker.py +521 -105
- jararaca/scheduler/decorators.py +15 -22
- jararaca/scheduler/types.py +4 -0
- jararaca/tools/app_config/__init__.py +3 -0
- jararaca/tools/app_config/decorators.py +7 -19
- jararaca/tools/app_config/interceptor.py +6 -2
- jararaca/tools/typescript/__init__.py +3 -0
- jararaca/tools/typescript/decorators.py +120 -0
- jararaca/tools/typescript/interface_parser.py +1077 -174
- jararaca/utils/__init__.py +3 -0
- jararaca/utils/env_parse_utils.py +133 -0
- jararaca/utils/rabbitmq_utils.py +112 -39
- jararaca/utils/retry.py +19 -14
- jararaca-0.4.0a19.dist-info/LICENSE +674 -0
- jararaca-0.4.0a19.dist-info/LICENSES/GPL-3.0-or-later.txt +232 -0
- {jararaca-0.3.11a16.dist-info → jararaca-0.4.0a19.dist-info}/METADATA +12 -7
- jararaca-0.4.0a19.dist-info/RECORD +96 -0
- {jararaca-0.3.11a16.dist-info → jararaca-0.4.0a19.dist-info}/WHEEL +1 -1
- pyproject.toml +132 -0
- jararaca-0.3.11a16.dist-info/RECORD +0 -74
- /jararaca-0.3.11a16.dist-info/LICENSE → /LICENSE +0 -0
- {jararaca-0.3.11a16.dist-info → jararaca-0.4.0a19.dist-info}/entry_points.txt +0 -0
jararaca/cli.py
CHANGED
|
@@ -1,3 +1,7 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: 2025 Lucas S
|
|
2
|
+
#
|
|
3
|
+
# SPDX-License-Identifier: GPL-3.0-or-later
|
|
4
|
+
|
|
1
5
|
import asyncio
|
|
2
6
|
import importlib
|
|
3
7
|
import importlib.resources
|
|
@@ -5,9 +9,12 @@ import multiprocessing
|
|
|
5
9
|
import os
|
|
6
10
|
import sys
|
|
7
11
|
import time
|
|
12
|
+
import traceback
|
|
13
|
+
import typing
|
|
8
14
|
from codecs import StreamWriter
|
|
15
|
+
from dataclasses import dataclass
|
|
9
16
|
from pathlib import Path
|
|
10
|
-
from typing import Any
|
|
17
|
+
from typing import TYPE_CHECKING, Any, Callable
|
|
11
18
|
from urllib.parse import parse_qs, urlparse
|
|
12
19
|
|
|
13
20
|
import aio_pika
|
|
@@ -31,6 +38,9 @@ from jararaca.tools.typescript.interface_parser import (
|
|
|
31
38
|
)
|
|
32
39
|
from jararaca.utils.rabbitmq_utils import RabbitmqUtils
|
|
33
40
|
|
|
41
|
+
if TYPE_CHECKING:
|
|
42
|
+
from watchdog.observers.api import BaseObserver
|
|
43
|
+
|
|
34
44
|
LIBRARY_FILES_PATH = importlib.resources.files("jararaca.files")
|
|
35
45
|
ENTITY_TEMPLATE_PATH = LIBRARY_FILES_PATH / "entity.py.mako"
|
|
36
46
|
|
|
@@ -256,7 +266,7 @@ async def declare_controller_queues(
|
|
|
256
266
|
Declare all message handler and scheduled action queues for controllers.
|
|
257
267
|
"""
|
|
258
268
|
for instance_type in app.controllers:
|
|
259
|
-
controller_spec = MessageBusController.
|
|
269
|
+
controller_spec = MessageBusController.get_last(instance_type)
|
|
260
270
|
if controller_spec is None:
|
|
261
271
|
continue
|
|
262
272
|
|
|
@@ -266,7 +276,7 @@ async def declare_controller_queues(
|
|
|
266
276
|
for _, member in members.items():
|
|
267
277
|
# Check if it's a message handler
|
|
268
278
|
await declare_message_handler_queue(
|
|
269
|
-
connection, member, exchange, force, interactive_mode
|
|
279
|
+
connection, member, exchange, force, interactive_mode, controller_spec
|
|
270
280
|
)
|
|
271
281
|
|
|
272
282
|
# Check if it's a scheduled action
|
|
@@ -281,11 +291,12 @@ async def declare_message_handler_queue(
|
|
|
281
291
|
exchange: str,
|
|
282
292
|
force: bool,
|
|
283
293
|
interactive_mode: bool,
|
|
294
|
+
controller_spec: MessageBusController,
|
|
284
295
|
) -> None:
|
|
285
296
|
"""
|
|
286
297
|
Declare a queue for a message handler if the member is one.
|
|
287
298
|
"""
|
|
288
|
-
message_handler = MessageHandler.
|
|
299
|
+
message_handler = MessageHandler.get_last(member.member_function)
|
|
289
300
|
if message_handler is not None:
|
|
290
301
|
queue_name = f"{message_handler.message_type.MESSAGE_TOPIC}.{member.member_function.__module__}.{member.member_function.__qualname__}"
|
|
291
302
|
routing_key = f"{message_handler.message_type.MESSAGE_TOPIC}.#"
|
|
@@ -311,7 +322,7 @@ async def declare_scheduled_action_queue(
|
|
|
311
322
|
"""
|
|
312
323
|
Declare a queue for a scheduled action if the member is one.
|
|
313
324
|
"""
|
|
314
|
-
scheduled_action = ScheduledAction.
|
|
325
|
+
scheduled_action = ScheduledAction.get_last(member.member_function)
|
|
315
326
|
if scheduled_action is not None:
|
|
316
327
|
queue_name = (
|
|
317
328
|
f"{member.member_function.__module__}.{member.member_function.__qualname__}"
|
|
@@ -421,25 +432,52 @@ def cli() -> None:
|
|
|
421
432
|
@click.option(
|
|
422
433
|
"--handlers",
|
|
423
434
|
type=str,
|
|
435
|
+
envvar="HANDLERS",
|
|
424
436
|
help="Comma-separated list of handler names to listen to. If not specified, all handlers will be used.",
|
|
425
437
|
)
|
|
438
|
+
@click.option(
|
|
439
|
+
"--reload",
|
|
440
|
+
is_flag=True,
|
|
441
|
+
envvar="RELOAD",
|
|
442
|
+
help="Enable auto-reload when Python files change.",
|
|
443
|
+
)
|
|
444
|
+
@click.option(
|
|
445
|
+
"--src-dir",
|
|
446
|
+
type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
|
447
|
+
default="src",
|
|
448
|
+
envvar="SRC_DIR",
|
|
449
|
+
help="The source directory to watch for changes when --reload is enabled.",
|
|
450
|
+
)
|
|
451
|
+
@click.option(
|
|
452
|
+
"--gracious-shutdown-seconds",
|
|
453
|
+
type=int,
|
|
454
|
+
default=20,
|
|
455
|
+
envvar="GRACIOUS_SHUTDOWN_SECONDS",
|
|
456
|
+
help="Number of seconds to wait for graceful shutdown on reload",
|
|
457
|
+
)
|
|
426
458
|
def worker(
|
|
427
|
-
app_path: str,
|
|
459
|
+
app_path: str,
|
|
460
|
+
broker_url: str,
|
|
461
|
+
backend_url: str,
|
|
462
|
+
handlers: str | None,
|
|
463
|
+
reload: bool,
|
|
464
|
+
src_dir: str,
|
|
465
|
+
gracious_shutdown_seconds: int,
|
|
428
466
|
) -> None:
|
|
429
467
|
"""Start a message bus worker that processes asynchronous messages from a message queue."""
|
|
430
|
-
app = find_microservice_by_module_path(app_path)
|
|
431
468
|
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
469
|
+
if reload:
|
|
470
|
+
process_args = {
|
|
471
|
+
"app_path": app_path,
|
|
472
|
+
"broker_url": broker_url,
|
|
473
|
+
"backend_url": backend_url,
|
|
474
|
+
"handlers": handlers,
|
|
475
|
+
}
|
|
476
|
+
run_with_reload_watcher(
|
|
477
|
+
process_args, run_worker_process, src_dir, gracious_shutdown_seconds
|
|
478
|
+
)
|
|
479
|
+
else:
|
|
480
|
+
run_worker_process(app_path, broker_url, backend_url, handlers)
|
|
443
481
|
|
|
444
482
|
|
|
445
483
|
@cli.command()
|
|
@@ -485,51 +523,78 @@ def server(app_path: str, host: str, port: int) -> None:
|
|
|
485
523
|
@click.argument(
|
|
486
524
|
"app_path",
|
|
487
525
|
type=str,
|
|
526
|
+
envvar="APP_PATH",
|
|
488
527
|
)
|
|
489
528
|
@click.option(
|
|
490
529
|
"--interval",
|
|
491
530
|
type=int,
|
|
492
531
|
default=1,
|
|
493
532
|
required=True,
|
|
533
|
+
envvar="INTERVAL",
|
|
494
534
|
)
|
|
495
535
|
@click.option(
|
|
496
536
|
"--broker-url",
|
|
497
537
|
type=str,
|
|
498
538
|
required=True,
|
|
539
|
+
envvar="BROKER_URL",
|
|
499
540
|
)
|
|
500
541
|
@click.option(
|
|
501
542
|
"--backend-url",
|
|
502
543
|
type=str,
|
|
503
544
|
required=True,
|
|
545
|
+
envvar="BACKEND_URL",
|
|
504
546
|
)
|
|
505
547
|
@click.option(
|
|
506
548
|
"--actions",
|
|
507
549
|
type=str,
|
|
550
|
+
envvar="ACTIONS",
|
|
508
551
|
help="Comma-separated list of action names to run (only run actions with these names)",
|
|
509
552
|
)
|
|
553
|
+
@click.option(
|
|
554
|
+
"--reload",
|
|
555
|
+
is_flag=True,
|
|
556
|
+
envvar="RELOAD",
|
|
557
|
+
help="Enable auto-reload when Python files change.",
|
|
558
|
+
)
|
|
559
|
+
@click.option(
|
|
560
|
+
"--src-dir",
|
|
561
|
+
type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
|
562
|
+
default="src",
|
|
563
|
+
envvar="SRC_DIR",
|
|
564
|
+
help="The source directory to watch for changes when --reload is enabled.",
|
|
565
|
+
)
|
|
566
|
+
@click.option(
|
|
567
|
+
"--gracious-shutdown-seconds",
|
|
568
|
+
type=int,
|
|
569
|
+
default=20,
|
|
570
|
+
envvar="GRACIOUS_SHUTDOWN_SECONDS",
|
|
571
|
+
help="Number of seconds to wait for graceful shutdown on reload",
|
|
572
|
+
)
|
|
510
573
|
def beat(
|
|
511
574
|
interval: int,
|
|
512
575
|
broker_url: str,
|
|
513
576
|
backend_url: str,
|
|
514
577
|
app_path: str,
|
|
515
578
|
actions: str | None = None,
|
|
579
|
+
reload: bool = False,
|
|
580
|
+
src_dir: str = "src",
|
|
581
|
+
gracious_shutdown_seconds: int = 20,
|
|
516
582
|
) -> None:
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
beat_worker.run()
|
|
583
|
+
"""Start a scheduler that dispatches scheduled actions to workers."""
|
|
584
|
+
|
|
585
|
+
if reload:
|
|
586
|
+
process_args = {
|
|
587
|
+
"app_path": app_path,
|
|
588
|
+
"interval": interval,
|
|
589
|
+
"broker_url": broker_url,
|
|
590
|
+
"backend_url": backend_url,
|
|
591
|
+
"actions": actions,
|
|
592
|
+
}
|
|
593
|
+
run_with_reload_watcher(
|
|
594
|
+
process_args, run_beat_process, src_dir, gracious_shutdown_seconds
|
|
595
|
+
)
|
|
596
|
+
else:
|
|
597
|
+
run_beat_process(app_path, interval, broker_url, backend_url, actions)
|
|
533
598
|
|
|
534
599
|
|
|
535
600
|
def generate_interfaces(
|
|
@@ -581,6 +646,7 @@ def generate_interfaces(
|
|
|
581
646
|
return content
|
|
582
647
|
except Exception as e:
|
|
583
648
|
click.echo(f"Error generating TypeScript interfaces: {e}", file=sys.stderr)
|
|
649
|
+
traceback.print_exc(file=sys.stderr)
|
|
584
650
|
return ""
|
|
585
651
|
|
|
586
652
|
|
|
@@ -588,29 +654,35 @@ def generate_interfaces(
|
|
|
588
654
|
@click.argument(
|
|
589
655
|
"app_path",
|
|
590
656
|
type=str,
|
|
657
|
+
envvar="APP_PATH",
|
|
591
658
|
)
|
|
592
659
|
@click.argument(
|
|
593
660
|
"file_path",
|
|
594
661
|
type=click.Path(file_okay=True, dir_okay=False),
|
|
595
662
|
required=False,
|
|
663
|
+
envvar="FILE_PATH",
|
|
596
664
|
)
|
|
597
665
|
@click.option(
|
|
598
666
|
"--watch",
|
|
599
667
|
is_flag=True,
|
|
668
|
+
envvar="WATCH",
|
|
600
669
|
)
|
|
601
670
|
@click.option(
|
|
602
671
|
"--src-dir",
|
|
603
672
|
type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
|
604
673
|
default="src",
|
|
674
|
+
envvar="SRC_DIR",
|
|
605
675
|
)
|
|
606
676
|
@click.option(
|
|
607
677
|
"--stdout",
|
|
608
678
|
is_flag=True,
|
|
679
|
+
envvar="STDOUT",
|
|
609
680
|
help="Print generated interfaces to stdout instead of writing to a file",
|
|
610
681
|
)
|
|
611
682
|
@click.option(
|
|
612
683
|
"--post-process",
|
|
613
684
|
type=str,
|
|
685
|
+
envvar="POST_PROCESS",
|
|
614
686
|
help="Command to run after generating the interfaces, {file} will be replaced with the output file path",
|
|
615
687
|
)
|
|
616
688
|
def gen_tsi(
|
|
@@ -702,19 +774,23 @@ def gen_tsi(
|
|
|
702
774
|
|
|
703
775
|
# subprocess.run(cmd, check=False)
|
|
704
776
|
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
observer.schedule(PyFileChangeHandler(), src_dir, recursive=True) # type: ignore
|
|
708
|
-
observer.start() # type: ignore
|
|
777
|
+
@typing.no_type_check
|
|
778
|
+
def start_watchdog() -> None:
|
|
709
779
|
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
780
|
+
observer: "BaseObserver" = Observer()
|
|
781
|
+
observer.schedule(PyFileChangeHandler(), src_dir, recursive=True)
|
|
782
|
+
observer.start()
|
|
783
|
+
|
|
784
|
+
click.echo(f"Watching for changes in {os.path.abspath(src_dir)}...")
|
|
785
|
+
try:
|
|
786
|
+
while True:
|
|
787
|
+
time.sleep(1)
|
|
788
|
+
except KeyboardInterrupt:
|
|
789
|
+
observer.stop()
|
|
790
|
+
click.echo("Watch mode stopped")
|
|
791
|
+
observer.join()
|
|
792
|
+
|
|
793
|
+
start_watchdog()
|
|
718
794
|
|
|
719
795
|
|
|
720
796
|
def camel_case_to_snake_case(name: str) -> str:
|
|
@@ -730,10 +806,11 @@ def camel_case_to_pascal_case(name: str) -> str:
|
|
|
730
806
|
|
|
731
807
|
|
|
732
808
|
@cli.command()
|
|
733
|
-
@click.argument("entity_name", type=click.STRING)
|
|
809
|
+
@click.argument("entity_name", type=click.STRING, envvar="ENTITY_NAME")
|
|
734
810
|
@click.argument(
|
|
735
811
|
"file_path",
|
|
736
812
|
type=click.File("w"),
|
|
813
|
+
envvar="FILE_PATH",
|
|
737
814
|
)
|
|
738
815
|
def gen_entity(entity_name: str, file_path: StreamWriter) -> None:
|
|
739
816
|
|
|
@@ -744,10 +821,12 @@ def gen_entity(entity_name: str, file_path: StreamWriter) -> None:
|
|
|
744
821
|
entity_kebab_case = camel_case_to_kebab_case(entity_name)
|
|
745
822
|
|
|
746
823
|
file_path.write(
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
|
|
824
|
+
str(
|
|
825
|
+
template.render(
|
|
826
|
+
entityNameSnakeCase=entity_snake_case,
|
|
827
|
+
entityNamePascalCase=entity_pascal_case,
|
|
828
|
+
entityNameKebabCase=entity_kebab_case,
|
|
829
|
+
)
|
|
751
830
|
)
|
|
752
831
|
)
|
|
753
832
|
|
|
@@ -769,6 +848,7 @@ def gen_entity(entity_name: str, file_path: StreamWriter) -> None:
|
|
|
769
848
|
"--interactive-mode",
|
|
770
849
|
is_flag=True,
|
|
771
850
|
default=False,
|
|
851
|
+
envvar="INTERACTIVE_MODE",
|
|
772
852
|
help="Enable interactive mode for queue declaration (confirm before deleting existing queues)",
|
|
773
853
|
)
|
|
774
854
|
@click.option(
|
|
@@ -776,6 +856,7 @@ def gen_entity(entity_name: str, file_path: StreamWriter) -> None:
|
|
|
776
856
|
"--force",
|
|
777
857
|
is_flag=True,
|
|
778
858
|
default=False,
|
|
859
|
+
envvar="FORCE",
|
|
779
860
|
help="Force recreation by deleting existing exchanges and queues before declaring them",
|
|
780
861
|
)
|
|
781
862
|
def declare(
|
|
@@ -835,3 +916,786 @@ def declare(
|
|
|
835
916
|
raise
|
|
836
917
|
|
|
837
918
|
asyncio.run(run_declarations())
|
|
919
|
+
|
|
920
|
+
|
|
921
|
+
def run_worker_process(
|
|
922
|
+
app_path: str, broker_url: str, backend_url: str, handlers: str | None
|
|
923
|
+
) -> None:
|
|
924
|
+
"""Run a worker process with the given parameters."""
|
|
925
|
+
app = find_microservice_by_module_path(app_path)
|
|
926
|
+
|
|
927
|
+
# Parse handler names if provided
|
|
928
|
+
handler_names: set[str] | None = None
|
|
929
|
+
if handlers:
|
|
930
|
+
handler_names = {name.strip() for name in handlers.split(",") if name.strip()}
|
|
931
|
+
|
|
932
|
+
click.echo(f"Starting worker for {app_path}...")
|
|
933
|
+
worker_mod.MessageBusWorker(
|
|
934
|
+
app=app,
|
|
935
|
+
broker_url=broker_url,
|
|
936
|
+
backend_url=backend_url,
|
|
937
|
+
handler_names=handler_names,
|
|
938
|
+
).start_sync()
|
|
939
|
+
|
|
940
|
+
|
|
941
|
+
def run_beat_process(
|
|
942
|
+
app_path: str, interval: int, broker_url: str, backend_url: str, actions: str | None
|
|
943
|
+
) -> None:
|
|
944
|
+
"""Run a beat scheduler process with the given parameters."""
|
|
945
|
+
app = find_microservice_by_module_path(app_path)
|
|
946
|
+
|
|
947
|
+
# Parse scheduler names if provided
|
|
948
|
+
scheduler_names: set[str] | None = None
|
|
949
|
+
if actions:
|
|
950
|
+
scheduler_names = {name.strip() for name in actions.split(",") if name.strip()}
|
|
951
|
+
|
|
952
|
+
click.echo(f"Starting beat scheduler for {app_path}...")
|
|
953
|
+
beat_worker = BeatWorker(
|
|
954
|
+
app=app,
|
|
955
|
+
interval=interval,
|
|
956
|
+
backend_url=backend_url,
|
|
957
|
+
broker_url=broker_url,
|
|
958
|
+
scheduled_action_names=scheduler_names,
|
|
959
|
+
)
|
|
960
|
+
beat_worker.run()
|
|
961
|
+
|
|
962
|
+
|
|
963
|
+
def run_with_reload_watcher(
|
|
964
|
+
process_args: dict[str, Any],
|
|
965
|
+
process_target: Callable[..., Any],
|
|
966
|
+
src_dir: str = "src",
|
|
967
|
+
max_graceful_shutdown_seconds: int = 20,
|
|
968
|
+
) -> None:
|
|
969
|
+
"""
|
|
970
|
+
Run a process with a file watcher that will restart it when Python files change.
|
|
971
|
+
|
|
972
|
+
Args:
|
|
973
|
+
process_args: Arguments to pass to the process function
|
|
974
|
+
process_target: The function to run as the process
|
|
975
|
+
src_dir: The directory to watch for changes
|
|
976
|
+
"""
|
|
977
|
+
try:
|
|
978
|
+
from watchdog.events import FileSystemEvent, FileSystemEventHandler
|
|
979
|
+
from watchdog.observers import Observer
|
|
980
|
+
except ImportError:
|
|
981
|
+
click.echo(
|
|
982
|
+
"Watchdog is required for reload mode. Install it with: pip install watchdog",
|
|
983
|
+
file=sys.stderr,
|
|
984
|
+
)
|
|
985
|
+
return
|
|
986
|
+
|
|
987
|
+
# Run the initial process
|
|
988
|
+
process = multiprocessing.get_context("spawn").Process(
|
|
989
|
+
target=process_target,
|
|
990
|
+
kwargs=process_args,
|
|
991
|
+
daemon=False, # Non-daemon to ensure it completes properly
|
|
992
|
+
)
|
|
993
|
+
process.start() # Set up file system event handler
|
|
994
|
+
|
|
995
|
+
class PyFileChangeHandler(FileSystemEventHandler):
|
|
996
|
+
def __init__(self) -> None:
|
|
997
|
+
self.last_modified_time = time.time()
|
|
998
|
+
self.debounce_seconds = 1.0 # Debounce to avoid multiple restarts
|
|
999
|
+
self.active_process = process
|
|
1000
|
+
|
|
1001
|
+
def on_modified(self, event: FileSystemEvent) -> None:
|
|
1002
|
+
src_path = (
|
|
1003
|
+
event.src_path
|
|
1004
|
+
if isinstance(event.src_path, str)
|
|
1005
|
+
else str(event.src_path)
|
|
1006
|
+
)
|
|
1007
|
+
|
|
1008
|
+
# Ignore non-Python files and directories
|
|
1009
|
+
if event.is_directory or not src_path.endswith(".py"):
|
|
1010
|
+
return
|
|
1011
|
+
|
|
1012
|
+
# Debounce to avoid multiple restarts
|
|
1013
|
+
current_time = time.time()
|
|
1014
|
+
if current_time - self.last_modified_time < self.debounce_seconds:
|
|
1015
|
+
return
|
|
1016
|
+
self.last_modified_time = current_time
|
|
1017
|
+
|
|
1018
|
+
click.echo(f"Detected change in {src_path}")
|
|
1019
|
+
click.echo("Restarting process...")
|
|
1020
|
+
|
|
1021
|
+
# Terminate the current process
|
|
1022
|
+
if self.active_process and self.active_process.is_alive():
|
|
1023
|
+
self.active_process.terminate()
|
|
1024
|
+
self.active_process.join(timeout=max_graceful_shutdown_seconds)
|
|
1025
|
+
|
|
1026
|
+
# If process doesn't terminate, kill it
|
|
1027
|
+
if self.active_process.is_alive():
|
|
1028
|
+
click.echo("Process did not terminate gracefully, killing it")
|
|
1029
|
+
self.active_process.kill()
|
|
1030
|
+
self.active_process.join()
|
|
1031
|
+
|
|
1032
|
+
# Create a new process
|
|
1033
|
+
|
|
1034
|
+
self.active_process = multiprocessing.get_context("spawn").Process(
|
|
1035
|
+
target=process_target,
|
|
1036
|
+
kwargs=process_args,
|
|
1037
|
+
daemon=False,
|
|
1038
|
+
)
|
|
1039
|
+
self.active_process.start()
|
|
1040
|
+
|
|
1041
|
+
@typing.no_type_check
|
|
1042
|
+
def start_watchdog() -> None:
|
|
1043
|
+
|
|
1044
|
+
# Set up observer
|
|
1045
|
+
observer = Observer()
|
|
1046
|
+
observer.schedule(PyFileChangeHandler(), src_dir, recursive=True)
|
|
1047
|
+
observer.start()
|
|
1048
|
+
|
|
1049
|
+
click.echo(f"Watching for changes in {os.path.abspath(src_dir)}...")
|
|
1050
|
+
try:
|
|
1051
|
+
while True:
|
|
1052
|
+
time.sleep(1)
|
|
1053
|
+
except KeyboardInterrupt:
|
|
1054
|
+
observer.stop()
|
|
1055
|
+
if process.is_alive():
|
|
1056
|
+
click.echo("Stopping process...")
|
|
1057
|
+
process.terminate()
|
|
1058
|
+
process.join(timeout=max_graceful_shutdown_seconds)
|
|
1059
|
+
if process.is_alive():
|
|
1060
|
+
process.kill()
|
|
1061
|
+
process.join()
|
|
1062
|
+
click.echo("Reload mode stopped")
|
|
1063
|
+
observer.join()
|
|
1064
|
+
|
|
1065
|
+
start_watchdog()
|
|
1066
|
+
|
|
1067
|
+
|
|
1068
|
+
# =============================================================================
|
|
1069
|
+
# Dead Letter Queue (DLQ) Commands
|
|
1070
|
+
# =============================================================================
|
|
1071
|
+
|
|
1072
|
+
|
|
1073
|
+
@dataclass
|
|
1074
|
+
class DLQMessage:
|
|
1075
|
+
"""Represents a message in the Dead Letter Queue."""
|
|
1076
|
+
|
|
1077
|
+
body: bytes
|
|
1078
|
+
routing_key: str
|
|
1079
|
+
original_queue: str
|
|
1080
|
+
death_reason: str
|
|
1081
|
+
death_count: int
|
|
1082
|
+
first_death_time: str
|
|
1083
|
+
message_id: str | None
|
|
1084
|
+
content_type: str | None
|
|
1085
|
+
|
|
1086
|
+
|
|
1087
|
+
async def fetch_dlq_messages(
|
|
1088
|
+
connection: aio_pika.abc.AbstractConnection,
|
|
1089
|
+
limit: int | None = None,
|
|
1090
|
+
consume: bool = False,
|
|
1091
|
+
) -> list[tuple[DLQMessage, aio_pika.abc.AbstractIncomingMessage]]:
|
|
1092
|
+
"""
|
|
1093
|
+
Fetch messages from the Dead Letter Queue.
|
|
1094
|
+
|
|
1095
|
+
Args:
|
|
1096
|
+
connection: The AMQP connection
|
|
1097
|
+
limit: Maximum number of messages to fetch (None for all)
|
|
1098
|
+
consume: If True, messages are consumed (acked), otherwise they are requeued
|
|
1099
|
+
|
|
1100
|
+
Returns:
|
|
1101
|
+
List of DLQMessage objects with their raw messages
|
|
1102
|
+
"""
|
|
1103
|
+
messages: list[tuple[DLQMessage, aio_pika.abc.AbstractIncomingMessage]] = []
|
|
1104
|
+
|
|
1105
|
+
async with connection.channel() as channel:
|
|
1106
|
+
try:
|
|
1107
|
+
queue = await RabbitmqUtils.get_dl_queue(channel)
|
|
1108
|
+
|
|
1109
|
+
count = 0
|
|
1110
|
+
while True:
|
|
1111
|
+
if limit is not None and count >= limit:
|
|
1112
|
+
break
|
|
1113
|
+
|
|
1114
|
+
try:
|
|
1115
|
+
raw_message = await asyncio.wait_for(
|
|
1116
|
+
queue.get(no_ack=False), timeout=1.0
|
|
1117
|
+
)
|
|
1118
|
+
except asyncio.TimeoutError:
|
|
1119
|
+
break
|
|
1120
|
+
|
|
1121
|
+
if raw_message is None:
|
|
1122
|
+
break
|
|
1123
|
+
|
|
1124
|
+
# Extract x-death header information
|
|
1125
|
+
headers = raw_message.headers or {}
|
|
1126
|
+
x_death_raw = headers.get("x-death")
|
|
1127
|
+
|
|
1128
|
+
original_queue = ""
|
|
1129
|
+
death_reason = ""
|
|
1130
|
+
death_count = 0
|
|
1131
|
+
first_death_time = ""
|
|
1132
|
+
|
|
1133
|
+
# x-death is a list of dicts when messages are dead-lettered
|
|
1134
|
+
if isinstance(x_death_raw, list) and len(x_death_raw) > 0:
|
|
1135
|
+
death_info = x_death_raw[0]
|
|
1136
|
+
if isinstance(death_info, dict):
|
|
1137
|
+
original_queue = str(death_info.get("queue", "unknown"))
|
|
1138
|
+
death_reason = str(death_info.get("reason", "unknown"))
|
|
1139
|
+
count_val = death_info.get("count", 1)
|
|
1140
|
+
if isinstance(count_val, (int, float)):
|
|
1141
|
+
death_count = int(count_val)
|
|
1142
|
+
else:
|
|
1143
|
+
death_count = 0
|
|
1144
|
+
first_death_time_raw = death_info.get("time")
|
|
1145
|
+
if first_death_time_raw:
|
|
1146
|
+
first_death_time = str(first_death_time_raw)
|
|
1147
|
+
|
|
1148
|
+
dlq_message = DLQMessage(
|
|
1149
|
+
body=raw_message.body,
|
|
1150
|
+
routing_key=raw_message.routing_key or "",
|
|
1151
|
+
original_queue=original_queue,
|
|
1152
|
+
death_reason=death_reason,
|
|
1153
|
+
death_count=death_count,
|
|
1154
|
+
first_death_time=first_death_time,
|
|
1155
|
+
message_id=raw_message.message_id,
|
|
1156
|
+
content_type=raw_message.content_type,
|
|
1157
|
+
)
|
|
1158
|
+
|
|
1159
|
+
messages.append((dlq_message, raw_message))
|
|
1160
|
+
|
|
1161
|
+
if not consume:
|
|
1162
|
+
# Requeue the message so it stays in the DLQ
|
|
1163
|
+
await raw_message.nack(requeue=True)
|
|
1164
|
+
count += 1
|
|
1165
|
+
|
|
1166
|
+
except Exception as e:
|
|
1167
|
+
click.echo(f"Error fetching DLQ messages: {e}", err=True)
|
|
1168
|
+
raise
|
|
1169
|
+
|
|
1170
|
+
return messages
|
|
1171
|
+
|
|
1172
|
+
|
|
1173
|
+
async def get_dlq_stats_by_queue(
|
|
1174
|
+
connection: aio_pika.abc.AbstractConnection,
|
|
1175
|
+
) -> dict[str, dict[str, Any]]:
|
|
1176
|
+
"""
|
|
1177
|
+
Get DLQ statistics grouped by original queue.
|
|
1178
|
+
|
|
1179
|
+
Returns:
|
|
1180
|
+
Dictionary with queue names as keys and stats as values
|
|
1181
|
+
"""
|
|
1182
|
+
messages = await fetch_dlq_messages(connection, consume=False)
|
|
1183
|
+
|
|
1184
|
+
stats: dict[str, dict[str, Any]] = {}
|
|
1185
|
+
|
|
1186
|
+
for dlq_message, _ in messages:
|
|
1187
|
+
queue_name = dlq_message.original_queue or "unknown"
|
|
1188
|
+
|
|
1189
|
+
if queue_name not in stats:
|
|
1190
|
+
stats[queue_name] = {
|
|
1191
|
+
"count": 0,
|
|
1192
|
+
"reasons": {},
|
|
1193
|
+
"oldest_death": None,
|
|
1194
|
+
"newest_death": None,
|
|
1195
|
+
}
|
|
1196
|
+
|
|
1197
|
+
stats[queue_name]["count"] += 1
|
|
1198
|
+
|
|
1199
|
+
# Track death reasons
|
|
1200
|
+
reason = dlq_message.death_reason or "unknown"
|
|
1201
|
+
if reason not in stats[queue_name]["reasons"]:
|
|
1202
|
+
stats[queue_name]["reasons"][reason] = 0
|
|
1203
|
+
stats[queue_name]["reasons"][reason] += 1
|
|
1204
|
+
|
|
1205
|
+
# Track oldest/newest death times
|
|
1206
|
+
if dlq_message.first_death_time:
|
|
1207
|
+
death_time = dlq_message.first_death_time
|
|
1208
|
+
if (
|
|
1209
|
+
stats[queue_name]["oldest_death"] is None
|
|
1210
|
+
or death_time < stats[queue_name]["oldest_death"]
|
|
1211
|
+
):
|
|
1212
|
+
stats[queue_name]["oldest_death"] = death_time
|
|
1213
|
+
if (
|
|
1214
|
+
stats[queue_name]["newest_death"] is None
|
|
1215
|
+
or death_time > stats[queue_name]["newest_death"]
|
|
1216
|
+
):
|
|
1217
|
+
stats[queue_name]["newest_death"] = death_time
|
|
1218
|
+
|
|
1219
|
+
return stats
|
|
1220
|
+
|
|
1221
|
+
|
|
1222
|
+
@cli.group()
|
|
1223
|
+
def dlq() -> None:
|
|
1224
|
+
"""Dead Letter Queue (DLQ) management commands.
|
|
1225
|
+
|
|
1226
|
+
Commands for inspecting, managing, and recovering messages from the
|
|
1227
|
+
Dead Letter Queue.
|
|
1228
|
+
"""
|
|
1229
|
+
|
|
1230
|
+
|
|
1231
|
+
@dlq.command("stats")
|
|
1232
|
+
@click.option(
|
|
1233
|
+
"--broker-url",
|
|
1234
|
+
type=str,
|
|
1235
|
+
envvar="BROKER_URL",
|
|
1236
|
+
required=True,
|
|
1237
|
+
help="The URL for the message broker",
|
|
1238
|
+
)
|
|
1239
|
+
@click.option(
|
|
1240
|
+
"--json",
|
|
1241
|
+
"output_json",
|
|
1242
|
+
is_flag=True,
|
|
1243
|
+
default=False,
|
|
1244
|
+
help="Output statistics in JSON format",
|
|
1245
|
+
)
|
|
1246
|
+
def dlq_stats(broker_url: str, output_json: bool) -> None:
|
|
1247
|
+
"""Show statistics about the Dead Letter Queue.
|
|
1248
|
+
|
|
1249
|
+
Displays the total message count and a breakdown by original queue,
|
|
1250
|
+
including death reasons and timestamps.
|
|
1251
|
+
|
|
1252
|
+
Examples:
|
|
1253
|
+
|
|
1254
|
+
\b
|
|
1255
|
+
# Show DLQ stats
|
|
1256
|
+
jararaca dlq stats --broker-url amqp://guest:guest@localhost/
|
|
1257
|
+
|
|
1258
|
+
\b
|
|
1259
|
+
# Output as JSON
|
|
1260
|
+
jararaca dlq stats --broker-url amqp://guest:guest@localhost/ --json
|
|
1261
|
+
"""
|
|
1262
|
+
|
|
1263
|
+
async def run_stats() -> None:
|
|
1264
|
+
connection = await aio_pika.connect(broker_url)
|
|
1265
|
+
try:
|
|
1266
|
+
# Get total message count
|
|
1267
|
+
async with connection.channel() as channel:
|
|
1268
|
+
try:
|
|
1269
|
+
queue_info = await channel.declare_queue(
|
|
1270
|
+
RabbitmqUtils.DEAD_LETTER_QUEUE, passive=True
|
|
1271
|
+
)
|
|
1272
|
+
total_count = queue_info.declaration_result.message_count or 0
|
|
1273
|
+
except Exception:
|
|
1274
|
+
click.echo("Dead Letter Queue does not exist or is not accessible.")
|
|
1275
|
+
return
|
|
1276
|
+
|
|
1277
|
+
if total_count == 0:
|
|
1278
|
+
if output_json:
|
|
1279
|
+
import json
|
|
1280
|
+
|
|
1281
|
+
click.echo(
|
|
1282
|
+
json.dumps({"total_messages": 0, "queues": {}}, indent=2)
|
|
1283
|
+
)
|
|
1284
|
+
else:
|
|
1285
|
+
click.echo("✓ Dead Letter Queue is empty!")
|
|
1286
|
+
return
|
|
1287
|
+
|
|
1288
|
+
# Get detailed stats by queue
|
|
1289
|
+
stats = await get_dlq_stats_by_queue(connection)
|
|
1290
|
+
|
|
1291
|
+
if output_json:
|
|
1292
|
+
import json
|
|
1293
|
+
|
|
1294
|
+
result = {"total_messages": total_count, "queues": stats}
|
|
1295
|
+
click.echo(json.dumps(result, indent=2, default=str))
|
|
1296
|
+
else:
|
|
1297
|
+
click.echo(f"\n{'='*60}")
|
|
1298
|
+
click.echo("Dead Letter Queue Statistics")
|
|
1299
|
+
click.echo(f"{'='*60}")
|
|
1300
|
+
click.echo(f"\nTotal Messages: {total_count}")
|
|
1301
|
+
click.echo("\nBreakdown by Original Queue:")
|
|
1302
|
+
click.echo(f"{'-'*60}")
|
|
1303
|
+
|
|
1304
|
+
for queue_name, queue_stats in sorted(
|
|
1305
|
+
stats.items(), key=lambda x: x[1]["count"], reverse=True
|
|
1306
|
+
):
|
|
1307
|
+
click.echo(f"\n 📦 {queue_name}")
|
|
1308
|
+
click.echo(f" Messages: {queue_stats['count']}")
|
|
1309
|
+
click.echo(" Reasons: ")
|
|
1310
|
+
for reason, count in queue_stats["reasons"].items():
|
|
1311
|
+
click.echo(f" - {reason}: {count}")
|
|
1312
|
+
if queue_stats["oldest_death"]:
|
|
1313
|
+
click.echo(f" Oldest: {queue_stats['oldest_death']}")
|
|
1314
|
+
if queue_stats["newest_death"]:
|
|
1315
|
+
click.echo(f" Newest: {queue_stats['newest_death']}")
|
|
1316
|
+
|
|
1317
|
+
click.echo(f"\n{'='*60}")
|
|
1318
|
+
|
|
1319
|
+
finally:
|
|
1320
|
+
await connection.close()
|
|
1321
|
+
|
|
1322
|
+
asyncio.run(run_stats())
|
|
1323
|
+
|
|
1324
|
+
|
|
1325
|
+
@dlq.command("list")
|
|
1326
|
+
@click.option(
|
|
1327
|
+
"--broker-url",
|
|
1328
|
+
type=str,
|
|
1329
|
+
envvar="BROKER_URL",
|
|
1330
|
+
required=True,
|
|
1331
|
+
help="The URL for the message broker",
|
|
1332
|
+
)
|
|
1333
|
+
@click.option(
|
|
1334
|
+
"--limit",
|
|
1335
|
+
type=int,
|
|
1336
|
+
default=10,
|
|
1337
|
+
help="Maximum number of messages to display (default: 10)",
|
|
1338
|
+
)
|
|
1339
|
+
@click.option(
|
|
1340
|
+
"--queue",
|
|
1341
|
+
"queue_filter",
|
|
1342
|
+
type=str,
|
|
1343
|
+
default=None,
|
|
1344
|
+
help="Filter messages by original queue name (supports partial match)",
|
|
1345
|
+
)
|
|
1346
|
+
@click.option(
|
|
1347
|
+
"--show-body",
|
|
1348
|
+
is_flag=True,
|
|
1349
|
+
default=False,
|
|
1350
|
+
help="Show message body content",
|
|
1351
|
+
)
|
|
1352
|
+
@click.option(
|
|
1353
|
+
"--json",
|
|
1354
|
+
"output_json",
|
|
1355
|
+
is_flag=True,
|
|
1356
|
+
default=False,
|
|
1357
|
+
help="Output messages in JSON format",
|
|
1358
|
+
)
|
|
1359
|
+
def dlq_list(
|
|
1360
|
+
broker_url: str,
|
|
1361
|
+
limit: int,
|
|
1362
|
+
queue_filter: str | None,
|
|
1363
|
+
show_body: bool,
|
|
1364
|
+
output_json: bool,
|
|
1365
|
+
) -> None:
|
|
1366
|
+
"""List messages in the Dead Letter Queue.
|
|
1367
|
+
|
|
1368
|
+
Shows details about each message including the original queue,
|
|
1369
|
+
death reason, and timestamps.
|
|
1370
|
+
|
|
1371
|
+
Examples:
|
|
1372
|
+
|
|
1373
|
+
\b
|
|
1374
|
+
# List first 10 messages
|
|
1375
|
+
jararaca dlq list --broker-url amqp://guest:guest@localhost/
|
|
1376
|
+
|
|
1377
|
+
\b
|
|
1378
|
+
# List messages from a specific queue
|
|
1379
|
+
jararaca dlq list --broker-url amqp://guest:guest@localhost/ --queue user.events
|
|
1380
|
+
|
|
1381
|
+
\b
|
|
1382
|
+
# Show message bodies
|
|
1383
|
+
jararaca dlq list --broker-url amqp://guest:guest@localhost/ --show-body
|
|
1384
|
+
"""
|
|
1385
|
+
|
|
1386
|
+
async def run_list() -> None:
|
|
1387
|
+
connection = await aio_pika.connect(broker_url)
|
|
1388
|
+
try:
|
|
1389
|
+
messages = await fetch_dlq_messages(connection, limit=limit, consume=False)
|
|
1390
|
+
|
|
1391
|
+
if not messages:
|
|
1392
|
+
click.echo("No messages in the Dead Letter Queue.")
|
|
1393
|
+
return
|
|
1394
|
+
|
|
1395
|
+
# Filter by queue if specified
|
|
1396
|
+
if queue_filter:
|
|
1397
|
+
messages = [
|
|
1398
|
+
(msg, raw)
|
|
1399
|
+
for msg, raw in messages
|
|
1400
|
+
if queue_filter.lower() in msg.original_queue.lower()
|
|
1401
|
+
]
|
|
1402
|
+
|
|
1403
|
+
if not messages:
|
|
1404
|
+
click.echo(f"No messages found matching queue filter: '{queue_filter}'")
|
|
1405
|
+
return
|
|
1406
|
+
|
|
1407
|
+
if output_json:
|
|
1408
|
+
import json
|
|
1409
|
+
|
|
1410
|
+
result = []
|
|
1411
|
+
for dlq_msg, _ in messages:
|
|
1412
|
+
msg_dict: dict[str, Any] = {
|
|
1413
|
+
"message_id": dlq_msg.message_id,
|
|
1414
|
+
"original_queue": dlq_msg.original_queue,
|
|
1415
|
+
"routing_key": dlq_msg.routing_key,
|
|
1416
|
+
"death_reason": dlq_msg.death_reason,
|
|
1417
|
+
"death_count": dlq_msg.death_count,
|
|
1418
|
+
"first_death_time": dlq_msg.first_death_time,
|
|
1419
|
+
"content_type": dlq_msg.content_type,
|
|
1420
|
+
}
|
|
1421
|
+
if show_body:
|
|
1422
|
+
try:
|
|
1423
|
+
msg_dict["body"] = dlq_msg.body.decode("utf-8")
|
|
1424
|
+
except Exception:
|
|
1425
|
+
msg_dict["body"] = dlq_msg.body.hex()
|
|
1426
|
+
result.append(msg_dict)
|
|
1427
|
+
click.echo(json.dumps(result, indent=2, default=str))
|
|
1428
|
+
else:
|
|
1429
|
+
click.echo(f"\n{'='*70}")
|
|
1430
|
+
click.echo(f"Dead Letter Queue Messages (showing {len(messages)})")
|
|
1431
|
+
click.echo(f"{'='*70}")
|
|
1432
|
+
|
|
1433
|
+
for i, (dlq_msg, _) in enumerate(messages, 1):
|
|
1434
|
+
click.echo(f"\n[{i}] Message ID: {dlq_msg.message_id or 'N/A'}")
|
|
1435
|
+
click.echo(f" Original Queue: {dlq_msg.original_queue}")
|
|
1436
|
+
click.echo(f" Routing Key: {dlq_msg.routing_key}")
|
|
1437
|
+
click.echo(f" Death Reason: {dlq_msg.death_reason}")
|
|
1438
|
+
click.echo(f" Death Count: {dlq_msg.death_count}")
|
|
1439
|
+
click.echo(f" First Death: {dlq_msg.first_death_time or 'N/A'}")
|
|
1440
|
+
click.echo(f" Content-Type: {dlq_msg.content_type or 'N/A'}")
|
|
1441
|
+
|
|
1442
|
+
if show_body:
|
|
1443
|
+
try:
|
|
1444
|
+
body_str = dlq_msg.body.decode("utf-8")
|
|
1445
|
+
# Truncate if too long
|
|
1446
|
+
if len(body_str) > 500:
|
|
1447
|
+
body_str = body_str[:500] + "... (truncated)"
|
|
1448
|
+
click.echo(f" Body: {body_str}")
|
|
1449
|
+
except Exception:
|
|
1450
|
+
click.echo(f" Body (hex): {dlq_msg.body[:100].hex()}...")
|
|
1451
|
+
|
|
1452
|
+
click.echo(f"\n{'='*70}")
|
|
1453
|
+
|
|
1454
|
+
finally:
|
|
1455
|
+
await connection.close()
|
|
1456
|
+
|
|
1457
|
+
asyncio.run(run_list())
|
|
1458
|
+
|
|
1459
|
+
|
|
1460
|
+
@dlq.command("purge")
|
|
1461
|
+
@click.option(
|
|
1462
|
+
"--broker-url",
|
|
1463
|
+
type=str,
|
|
1464
|
+
envvar="BROKER_URL",
|
|
1465
|
+
required=True,
|
|
1466
|
+
help="The URL for the message broker",
|
|
1467
|
+
)
|
|
1468
|
+
@click.option(
|
|
1469
|
+
"--force",
|
|
1470
|
+
"-f",
|
|
1471
|
+
is_flag=True,
|
|
1472
|
+
default=False,
|
|
1473
|
+
help="Skip confirmation prompt",
|
|
1474
|
+
)
|
|
1475
|
+
def dlq_purge(broker_url: str, force: bool) -> None:
|
|
1476
|
+
"""Purge all messages from the Dead Letter Queue.
|
|
1477
|
+
|
|
1478
|
+
WARNING: This action is irreversible. All messages will be permanently deleted.
|
|
1479
|
+
|
|
1480
|
+
Examples:
|
|
1481
|
+
|
|
1482
|
+
\b
|
|
1483
|
+
# Purge with confirmation
|
|
1484
|
+
jararaca dlq purge --broker-url amqp://guest:guest@localhost/
|
|
1485
|
+
|
|
1486
|
+
\b
|
|
1487
|
+
# Purge without confirmation
|
|
1488
|
+
jararaca dlq purge --broker-url amqp://guest:guest@localhost/ --force
|
|
1489
|
+
"""
|
|
1490
|
+
|
|
1491
|
+
async def run_purge() -> None:
|
|
1492
|
+
connection = await aio_pika.connect(broker_url)
|
|
1493
|
+
try:
|
|
1494
|
+
async with connection.channel() as channel:
|
|
1495
|
+
# First check how many messages are in the queue
|
|
1496
|
+
try:
|
|
1497
|
+
queue_info = await channel.declare_queue(
|
|
1498
|
+
RabbitmqUtils.DEAD_LETTER_QUEUE, passive=True
|
|
1499
|
+
)
|
|
1500
|
+
message_count = queue_info.declaration_result.message_count or 0
|
|
1501
|
+
except Exception:
|
|
1502
|
+
click.echo("Dead Letter Queue does not exist or is not accessible.")
|
|
1503
|
+
return
|
|
1504
|
+
|
|
1505
|
+
if message_count == 0:
|
|
1506
|
+
click.echo("Dead Letter Queue is already empty.")
|
|
1507
|
+
return
|
|
1508
|
+
|
|
1509
|
+
if not force:
|
|
1510
|
+
if not click.confirm(
|
|
1511
|
+
f"Are you sure you want to purge {message_count} messages from the DLQ? This cannot be undone."
|
|
1512
|
+
):
|
|
1513
|
+
click.echo("Purge cancelled.")
|
|
1514
|
+
return
|
|
1515
|
+
|
|
1516
|
+
# Purge the queue
|
|
1517
|
+
purged = await RabbitmqUtils.purge_dl_queue(channel)
|
|
1518
|
+
click.echo(f"✓ Successfully purged {purged} messages from the DLQ.")
|
|
1519
|
+
|
|
1520
|
+
finally:
|
|
1521
|
+
await connection.close()
|
|
1522
|
+
|
|
1523
|
+
asyncio.run(run_purge())
|
|
1524
|
+
|
|
1525
|
+
|
|
1526
|
+
@dlq.command("requeue")
|
|
1527
|
+
@click.option(
|
|
1528
|
+
"--broker-url",
|
|
1529
|
+
type=str,
|
|
1530
|
+
envvar="BROKER_URL",
|
|
1531
|
+
required=True,
|
|
1532
|
+
help="The URL for the message broker",
|
|
1533
|
+
)
|
|
1534
|
+
@click.option(
|
|
1535
|
+
"--queue",
|
|
1536
|
+
"queue_filter",
|
|
1537
|
+
type=str,
|
|
1538
|
+
default=None,
|
|
1539
|
+
help="Only requeue messages from a specific original queue (supports partial match)",
|
|
1540
|
+
)
|
|
1541
|
+
@click.option(
|
|
1542
|
+
"--limit",
|
|
1543
|
+
type=int,
|
|
1544
|
+
default=None,
|
|
1545
|
+
help="Maximum number of messages to requeue",
|
|
1546
|
+
)
|
|
1547
|
+
@click.option(
|
|
1548
|
+
"--force",
|
|
1549
|
+
"-f",
|
|
1550
|
+
is_flag=True,
|
|
1551
|
+
default=False,
|
|
1552
|
+
help="Skip confirmation prompt",
|
|
1553
|
+
)
|
|
1554
|
+
def dlq_requeue(
|
|
1555
|
+
broker_url: str,
|
|
1556
|
+
queue_filter: str | None,
|
|
1557
|
+
limit: int | None,
|
|
1558
|
+
force: bool,
|
|
1559
|
+
) -> None:
|
|
1560
|
+
"""Requeue messages from the Dead Letter Queue back to their original queues.
|
|
1561
|
+
|
|
1562
|
+
This command retrieves messages from the DLQ and publishes them back to their
|
|
1563
|
+
original queues for reprocessing.
|
|
1564
|
+
|
|
1565
|
+
Examples:
|
|
1566
|
+
|
|
1567
|
+
\b
|
|
1568
|
+
# Requeue all messages
|
|
1569
|
+
jararaca dlq requeue --broker-url amqp://guest:guest@localhost/
|
|
1570
|
+
|
|
1571
|
+
\b
|
|
1572
|
+
# Requeue messages from a specific queue
|
|
1573
|
+
jararaca dlq requeue --broker-url amqp://guest:guest@localhost/ --queue user.events
|
|
1574
|
+
|
|
1575
|
+
\b
|
|
1576
|
+
# Requeue only 5 messages
|
|
1577
|
+
jararaca dlq requeue --broker-url amqp://guest:guest@localhost/ --limit 5
|
|
1578
|
+
"""
|
|
1579
|
+
|
|
1580
|
+
async def run_requeue() -> None:
|
|
1581
|
+
parsed_url = urlparse(broker_url)
|
|
1582
|
+
query_params = parse_qs(parsed_url.query)
|
|
1583
|
+
|
|
1584
|
+
if "exchange" not in query_params or not query_params["exchange"]:
|
|
1585
|
+
click.echo(
|
|
1586
|
+
"ERROR: Exchange must be set in the broker URL query string", err=True
|
|
1587
|
+
)
|
|
1588
|
+
return
|
|
1589
|
+
|
|
1590
|
+
exchange_name = query_params["exchange"][0]
|
|
1591
|
+
|
|
1592
|
+
connection = await aio_pika.connect(broker_url)
|
|
1593
|
+
try:
|
|
1594
|
+
# Fetch messages (will be consumed for requeuing)
|
|
1595
|
+
async with connection.channel() as channel:
|
|
1596
|
+
try:
|
|
1597
|
+
queue_info = await channel.declare_queue(
|
|
1598
|
+
RabbitmqUtils.DEAD_LETTER_QUEUE, passive=True
|
|
1599
|
+
)
|
|
1600
|
+
total_count = queue_info.declaration_result.message_count or 0
|
|
1601
|
+
except Exception:
|
|
1602
|
+
click.echo("Dead Letter Queue does not exist or is not accessible.")
|
|
1603
|
+
return
|
|
1604
|
+
|
|
1605
|
+
if total_count == 0:
|
|
1606
|
+
click.echo("Dead Letter Queue is empty.")
|
|
1607
|
+
return
|
|
1608
|
+
|
|
1609
|
+
# Get messages without consuming first to show count
|
|
1610
|
+
messages_preview = await fetch_dlq_messages(
|
|
1611
|
+
connection, limit=limit, consume=False
|
|
1612
|
+
)
|
|
1613
|
+
|
|
1614
|
+
if queue_filter:
|
|
1615
|
+
messages_preview = [
|
|
1616
|
+
(msg, raw)
|
|
1617
|
+
for msg, raw in messages_preview
|
|
1618
|
+
if queue_filter.lower() in msg.original_queue.lower()
|
|
1619
|
+
]
|
|
1620
|
+
|
|
1621
|
+
if not messages_preview:
|
|
1622
|
+
click.echo(
|
|
1623
|
+
f"No messages found matching queue filter: '{queue_filter}'"
|
|
1624
|
+
)
|
|
1625
|
+
return
|
|
1626
|
+
|
|
1627
|
+
requeue_count = len(messages_preview)
|
|
1628
|
+
|
|
1629
|
+
if not force:
|
|
1630
|
+
if not click.confirm(
|
|
1631
|
+
f"Are you sure you want to requeue {requeue_count} messages?"
|
|
1632
|
+
):
|
|
1633
|
+
click.echo("Requeue cancelled.")
|
|
1634
|
+
return
|
|
1635
|
+
|
|
1636
|
+
# Now actually consume and requeue messages
|
|
1637
|
+
async with connection.channel() as channel:
|
|
1638
|
+
queue = await RabbitmqUtils.get_dl_queue(channel)
|
|
1639
|
+
exchange = await RabbitmqUtils.get_main_exchange(channel, exchange_name)
|
|
1640
|
+
|
|
1641
|
+
requeued = 0
|
|
1642
|
+
errors = 0
|
|
1643
|
+
|
|
1644
|
+
count = 0
|
|
1645
|
+
while limit is None or count < limit:
|
|
1646
|
+
try:
|
|
1647
|
+
raw_message = await asyncio.wait_for(
|
|
1648
|
+
queue.get(no_ack=False), timeout=1.0
|
|
1649
|
+
)
|
|
1650
|
+
except asyncio.TimeoutError:
|
|
1651
|
+
break
|
|
1652
|
+
|
|
1653
|
+
if raw_message is None:
|
|
1654
|
+
break
|
|
1655
|
+
|
|
1656
|
+
# Apply queue filter
|
|
1657
|
+
headers = raw_message.headers or {}
|
|
1658
|
+
x_death_raw = headers.get("x-death")
|
|
1659
|
+
original_queue = ""
|
|
1660
|
+
if isinstance(x_death_raw, list) and len(x_death_raw) > 0:
|
|
1661
|
+
death_info = x_death_raw[0]
|
|
1662
|
+
if isinstance(death_info, dict):
|
|
1663
|
+
original_queue = str(death_info.get("queue", ""))
|
|
1664
|
+
|
|
1665
|
+
if (
|
|
1666
|
+
queue_filter
|
|
1667
|
+
and queue_filter.lower() not in original_queue.lower()
|
|
1668
|
+
):
|
|
1669
|
+
# Requeue back to DLQ (don't process this one)
|
|
1670
|
+
await raw_message.nack(requeue=True)
|
|
1671
|
+
continue
|
|
1672
|
+
|
|
1673
|
+
try:
|
|
1674
|
+
# Publish to the original routing key
|
|
1675
|
+
routing_key = raw_message.routing_key or original_queue
|
|
1676
|
+
await exchange.publish(
|
|
1677
|
+
aio_pika.Message(
|
|
1678
|
+
body=raw_message.body,
|
|
1679
|
+
content_type=raw_message.content_type,
|
|
1680
|
+
headers={"x-requeued-from-dlq": True},
|
|
1681
|
+
),
|
|
1682
|
+
routing_key=routing_key,
|
|
1683
|
+
)
|
|
1684
|
+
await raw_message.ack()
|
|
1685
|
+
requeued += 1
|
|
1686
|
+
except Exception as e:
|
|
1687
|
+
click.echo(f"Error requeuing message: {e}", err=True)
|
|
1688
|
+
await raw_message.nack(requeue=True)
|
|
1689
|
+
errors += 1
|
|
1690
|
+
|
|
1691
|
+
count += 1
|
|
1692
|
+
|
|
1693
|
+
click.echo("\n✓ Requeue complete:")
|
|
1694
|
+
click.echo(f" - Requeued: {requeued}")
|
|
1695
|
+
if errors:
|
|
1696
|
+
click.echo(f" - Errors: {errors}")
|
|
1697
|
+
|
|
1698
|
+
finally:
|
|
1699
|
+
await connection.close()
|
|
1700
|
+
|
|
1701
|
+
asyncio.run(run_requeue())
|