jararaca 0.4.0a5__py3-none-any.whl → 0.4.0a19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. jararaca/__init__.py +9 -9
  2. jararaca/cli.py +643 -4
  3. jararaca/core/providers.py +4 -0
  4. jararaca/helpers/__init__.py +3 -0
  5. jararaca/helpers/global_scheduler/__init__.py +3 -0
  6. jararaca/helpers/global_scheduler/config.py +21 -0
  7. jararaca/helpers/global_scheduler/controller.py +42 -0
  8. jararaca/helpers/global_scheduler/registry.py +32 -0
  9. jararaca/messagebus/decorators.py +104 -10
  10. jararaca/messagebus/interceptors/aiopika_publisher_interceptor.py +50 -8
  11. jararaca/messagebus/interceptors/message_publisher_collector.py +62 -0
  12. jararaca/messagebus/interceptors/publisher_interceptor.py +25 -3
  13. jararaca/messagebus/worker.py +276 -200
  14. jararaca/microservice.py +3 -1
  15. jararaca/observability/providers/otel.py +31 -13
  16. jararaca/persistence/base.py +1 -1
  17. jararaca/persistence/utilities.py +47 -24
  18. jararaca/presentation/decorators.py +3 -3
  19. jararaca/reflect/decorators.py +24 -10
  20. jararaca/reflect/helpers.py +18 -0
  21. jararaca/rpc/http/__init__.py +2 -2
  22. jararaca/rpc/http/decorators.py +9 -9
  23. jararaca/scheduler/beat_worker.py +14 -14
  24. jararaca/tools/typescript/decorators.py +4 -4
  25. jararaca/tools/typescript/interface_parser.py +3 -1
  26. jararaca/utils/env_parse_utils.py +133 -0
  27. jararaca/utils/rabbitmq_utils.py +47 -0
  28. jararaca/utils/retry.py +11 -13
  29. {jararaca-0.4.0a5.dist-info → jararaca-0.4.0a19.dist-info}/METADATA +2 -1
  30. {jararaca-0.4.0a5.dist-info → jararaca-0.4.0a19.dist-info}/RECORD +35 -27
  31. pyproject.toml +2 -1
  32. {jararaca-0.4.0a5.dist-info → jararaca-0.4.0a19.dist-info}/LICENSE +0 -0
  33. {jararaca-0.4.0a5.dist-info → jararaca-0.4.0a19.dist-info}/LICENSES/GPL-3.0-or-later.txt +0 -0
  34. {jararaca-0.4.0a5.dist-info → jararaca-0.4.0a19.dist-info}/WHEEL +0 -0
  35. {jararaca-0.4.0a5.dist-info → jararaca-0.4.0a19.dist-info}/entry_points.txt +0 -0
jararaca/cli.py CHANGED
@@ -12,6 +12,7 @@ import time
12
12
  import traceback
13
13
  import typing
14
14
  from codecs import StreamWriter
15
+ from dataclasses import dataclass
15
16
  from pathlib import Path
16
17
  from typing import TYPE_CHECKING, Any, Callable
17
18
  from urllib.parse import parse_qs, urlparse
@@ -820,10 +821,12 @@ def gen_entity(entity_name: str, file_path: StreamWriter) -> None:
820
821
  entity_kebab_case = camel_case_to_kebab_case(entity_name)
821
822
 
822
823
  file_path.write(
823
- template.render(
824
- entityNameSnakeCase=entity_snake_case,
825
- entityNamePascalCase=entity_pascal_case,
826
- entityNameKebabCase=entity_kebab_case,
824
+ str(
825
+ template.render(
826
+ entityNameSnakeCase=entity_snake_case,
827
+ entityNamePascalCase=entity_pascal_case,
828
+ entityNameKebabCase=entity_kebab_case,
829
+ )
827
830
  )
828
831
  )
829
832
 
@@ -1060,3 +1063,639 @@ def run_with_reload_watcher(
1060
1063
  observer.join()
1061
1064
 
1062
1065
  start_watchdog()
1066
+
1067
+
1068
+ # =============================================================================
1069
+ # Dead Letter Queue (DLQ) Commands
1070
+ # =============================================================================
1071
+
1072
+
1073
+ @dataclass
1074
+ class DLQMessage:
1075
+ """Represents a message in the Dead Letter Queue."""
1076
+
1077
+ body: bytes
1078
+ routing_key: str
1079
+ original_queue: str
1080
+ death_reason: str
1081
+ death_count: int
1082
+ first_death_time: str
1083
+ message_id: str | None
1084
+ content_type: str | None
1085
+
1086
+
1087
+ async def fetch_dlq_messages(
1088
+ connection: aio_pika.abc.AbstractConnection,
1089
+ limit: int | None = None,
1090
+ consume: bool = False,
1091
+ ) -> list[tuple[DLQMessage, aio_pika.abc.AbstractIncomingMessage]]:
1092
+ """
1093
+ Fetch messages from the Dead Letter Queue.
1094
+
1095
+ Args:
1096
+ connection: The AMQP connection
1097
+ limit: Maximum number of messages to fetch (None for all)
1098
+ consume: If True, messages are consumed (acked), otherwise they are requeued
1099
+
1100
+ Returns:
1101
+ List of DLQMessage objects with their raw messages
1102
+ """
1103
+ messages: list[tuple[DLQMessage, aio_pika.abc.AbstractIncomingMessage]] = []
1104
+
1105
+ async with connection.channel() as channel:
1106
+ try:
1107
+ queue = await RabbitmqUtils.get_dl_queue(channel)
1108
+
1109
+ count = 0
1110
+ while True:
1111
+ if limit is not None and count >= limit:
1112
+ break
1113
+
1114
+ try:
1115
+ raw_message = await asyncio.wait_for(
1116
+ queue.get(no_ack=False), timeout=1.0
1117
+ )
1118
+ except asyncio.TimeoutError:
1119
+ break
1120
+
1121
+ if raw_message is None:
1122
+ break
1123
+
1124
+ # Extract x-death header information
1125
+ headers = raw_message.headers or {}
1126
+ x_death_raw = headers.get("x-death")
1127
+
1128
+ original_queue = ""
1129
+ death_reason = ""
1130
+ death_count = 0
1131
+ first_death_time = ""
1132
+
1133
+ # x-death is a list of dicts when messages are dead-lettered
1134
+ if isinstance(x_death_raw, list) and len(x_death_raw) > 0:
1135
+ death_info = x_death_raw[0]
1136
+ if isinstance(death_info, dict):
1137
+ original_queue = str(death_info.get("queue", "unknown"))
1138
+ death_reason = str(death_info.get("reason", "unknown"))
1139
+ count_val = death_info.get("count", 1)
1140
+ if isinstance(count_val, (int, float)):
1141
+ death_count = int(count_val)
1142
+ else:
1143
+ death_count = 0
1144
+ first_death_time_raw = death_info.get("time")
1145
+ if first_death_time_raw:
1146
+ first_death_time = str(first_death_time_raw)
1147
+
1148
+ dlq_message = DLQMessage(
1149
+ body=raw_message.body,
1150
+ routing_key=raw_message.routing_key or "",
1151
+ original_queue=original_queue,
1152
+ death_reason=death_reason,
1153
+ death_count=death_count,
1154
+ first_death_time=first_death_time,
1155
+ message_id=raw_message.message_id,
1156
+ content_type=raw_message.content_type,
1157
+ )
1158
+
1159
+ messages.append((dlq_message, raw_message))
1160
+
1161
+ if not consume:
1162
+ # Requeue the message so it stays in the DLQ
1163
+ await raw_message.nack(requeue=True)
1164
+ count += 1
1165
+
1166
+ except Exception as e:
1167
+ click.echo(f"Error fetching DLQ messages: {e}", err=True)
1168
+ raise
1169
+
1170
+ return messages
1171
+
1172
+
1173
+ async def get_dlq_stats_by_queue(
1174
+ connection: aio_pika.abc.AbstractConnection,
1175
+ ) -> dict[str, dict[str, Any]]:
1176
+ """
1177
+ Get DLQ statistics grouped by original queue.
1178
+
1179
+ Returns:
1180
+ Dictionary with queue names as keys and stats as values
1181
+ """
1182
+ messages = await fetch_dlq_messages(connection, consume=False)
1183
+
1184
+ stats: dict[str, dict[str, Any]] = {}
1185
+
1186
+ for dlq_message, _ in messages:
1187
+ queue_name = dlq_message.original_queue or "unknown"
1188
+
1189
+ if queue_name not in stats:
1190
+ stats[queue_name] = {
1191
+ "count": 0,
1192
+ "reasons": {},
1193
+ "oldest_death": None,
1194
+ "newest_death": None,
1195
+ }
1196
+
1197
+ stats[queue_name]["count"] += 1
1198
+
1199
+ # Track death reasons
1200
+ reason = dlq_message.death_reason or "unknown"
1201
+ if reason not in stats[queue_name]["reasons"]:
1202
+ stats[queue_name]["reasons"][reason] = 0
1203
+ stats[queue_name]["reasons"][reason] += 1
1204
+
1205
+ # Track oldest/newest death times
1206
+ if dlq_message.first_death_time:
1207
+ death_time = dlq_message.first_death_time
1208
+ if (
1209
+ stats[queue_name]["oldest_death"] is None
1210
+ or death_time < stats[queue_name]["oldest_death"]
1211
+ ):
1212
+ stats[queue_name]["oldest_death"] = death_time
1213
+ if (
1214
+ stats[queue_name]["newest_death"] is None
1215
+ or death_time > stats[queue_name]["newest_death"]
1216
+ ):
1217
+ stats[queue_name]["newest_death"] = death_time
1218
+
1219
+ return stats
1220
+
1221
+
1222
+ @cli.group()
1223
+ def dlq() -> None:
1224
+ """Dead Letter Queue (DLQ) management commands.
1225
+
1226
+ Commands for inspecting, managing, and recovering messages from the
1227
+ Dead Letter Queue.
1228
+ """
1229
+
1230
+
1231
+ @dlq.command("stats")
1232
+ @click.option(
1233
+ "--broker-url",
1234
+ type=str,
1235
+ envvar="BROKER_URL",
1236
+ required=True,
1237
+ help="The URL for the message broker",
1238
+ )
1239
+ @click.option(
1240
+ "--json",
1241
+ "output_json",
1242
+ is_flag=True,
1243
+ default=False,
1244
+ help="Output statistics in JSON format",
1245
+ )
1246
+ def dlq_stats(broker_url: str, output_json: bool) -> None:
1247
+ """Show statistics about the Dead Letter Queue.
1248
+
1249
+ Displays the total message count and a breakdown by original queue,
1250
+ including death reasons and timestamps.
1251
+
1252
+ Examples:
1253
+
1254
+ \b
1255
+ # Show DLQ stats
1256
+ jararaca dlq stats --broker-url amqp://guest:guest@localhost/
1257
+
1258
+ \b
1259
+ # Output as JSON
1260
+ jararaca dlq stats --broker-url amqp://guest:guest@localhost/ --json
1261
+ """
1262
+
1263
+ async def run_stats() -> None:
1264
+ connection = await aio_pika.connect(broker_url)
1265
+ try:
1266
+ # Get total message count
1267
+ async with connection.channel() as channel:
1268
+ try:
1269
+ queue_info = await channel.declare_queue(
1270
+ RabbitmqUtils.DEAD_LETTER_QUEUE, passive=True
1271
+ )
1272
+ total_count = queue_info.declaration_result.message_count or 0
1273
+ except Exception:
1274
+ click.echo("Dead Letter Queue does not exist or is not accessible.")
1275
+ return
1276
+
1277
+ if total_count == 0:
1278
+ if output_json:
1279
+ import json
1280
+
1281
+ click.echo(
1282
+ json.dumps({"total_messages": 0, "queues": {}}, indent=2)
1283
+ )
1284
+ else:
1285
+ click.echo("✓ Dead Letter Queue is empty!")
1286
+ return
1287
+
1288
+ # Get detailed stats by queue
1289
+ stats = await get_dlq_stats_by_queue(connection)
1290
+
1291
+ if output_json:
1292
+ import json
1293
+
1294
+ result = {"total_messages": total_count, "queues": stats}
1295
+ click.echo(json.dumps(result, indent=2, default=str))
1296
+ else:
1297
+ click.echo(f"\n{'='*60}")
1298
+ click.echo("Dead Letter Queue Statistics")
1299
+ click.echo(f"{'='*60}")
1300
+ click.echo(f"\nTotal Messages: {total_count}")
1301
+ click.echo("\nBreakdown by Original Queue:")
1302
+ click.echo(f"{'-'*60}")
1303
+
1304
+ for queue_name, queue_stats in sorted(
1305
+ stats.items(), key=lambda x: x[1]["count"], reverse=True
1306
+ ):
1307
+ click.echo(f"\n 📦 {queue_name}")
1308
+ click.echo(f" Messages: {queue_stats['count']}")
1309
+ click.echo(" Reasons: ")
1310
+ for reason, count in queue_stats["reasons"].items():
1311
+ click.echo(f" - {reason}: {count}")
1312
+ if queue_stats["oldest_death"]:
1313
+ click.echo(f" Oldest: {queue_stats['oldest_death']}")
1314
+ if queue_stats["newest_death"]:
1315
+ click.echo(f" Newest: {queue_stats['newest_death']}")
1316
+
1317
+ click.echo(f"\n{'='*60}")
1318
+
1319
+ finally:
1320
+ await connection.close()
1321
+
1322
+ asyncio.run(run_stats())
1323
+
1324
+
1325
+ @dlq.command("list")
1326
+ @click.option(
1327
+ "--broker-url",
1328
+ type=str,
1329
+ envvar="BROKER_URL",
1330
+ required=True,
1331
+ help="The URL for the message broker",
1332
+ )
1333
+ @click.option(
1334
+ "--limit",
1335
+ type=int,
1336
+ default=10,
1337
+ help="Maximum number of messages to display (default: 10)",
1338
+ )
1339
+ @click.option(
1340
+ "--queue",
1341
+ "queue_filter",
1342
+ type=str,
1343
+ default=None,
1344
+ help="Filter messages by original queue name (supports partial match)",
1345
+ )
1346
+ @click.option(
1347
+ "--show-body",
1348
+ is_flag=True,
1349
+ default=False,
1350
+ help="Show message body content",
1351
+ )
1352
+ @click.option(
1353
+ "--json",
1354
+ "output_json",
1355
+ is_flag=True,
1356
+ default=False,
1357
+ help="Output messages in JSON format",
1358
+ )
1359
+ def dlq_list(
1360
+ broker_url: str,
1361
+ limit: int,
1362
+ queue_filter: str | None,
1363
+ show_body: bool,
1364
+ output_json: bool,
1365
+ ) -> None:
1366
+ """List messages in the Dead Letter Queue.
1367
+
1368
+ Shows details about each message including the original queue,
1369
+ death reason, and timestamps.
1370
+
1371
+ Examples:
1372
+
1373
+ \b
1374
+ # List first 10 messages
1375
+ jararaca dlq list --broker-url amqp://guest:guest@localhost/
1376
+
1377
+ \b
1378
+ # List messages from a specific queue
1379
+ jararaca dlq list --broker-url amqp://guest:guest@localhost/ --queue user.events
1380
+
1381
+ \b
1382
+ # Show message bodies
1383
+ jararaca dlq list --broker-url amqp://guest:guest@localhost/ --show-body
1384
+ """
1385
+
1386
+ async def run_list() -> None:
1387
+ connection = await aio_pika.connect(broker_url)
1388
+ try:
1389
+ messages = await fetch_dlq_messages(connection, limit=limit, consume=False)
1390
+
1391
+ if not messages:
1392
+ click.echo("No messages in the Dead Letter Queue.")
1393
+ return
1394
+
1395
+ # Filter by queue if specified
1396
+ if queue_filter:
1397
+ messages = [
1398
+ (msg, raw)
1399
+ for msg, raw in messages
1400
+ if queue_filter.lower() in msg.original_queue.lower()
1401
+ ]
1402
+
1403
+ if not messages:
1404
+ click.echo(f"No messages found matching queue filter: '{queue_filter}'")
1405
+ return
1406
+
1407
+ if output_json:
1408
+ import json
1409
+
1410
+ result = []
1411
+ for dlq_msg, _ in messages:
1412
+ msg_dict: dict[str, Any] = {
1413
+ "message_id": dlq_msg.message_id,
1414
+ "original_queue": dlq_msg.original_queue,
1415
+ "routing_key": dlq_msg.routing_key,
1416
+ "death_reason": dlq_msg.death_reason,
1417
+ "death_count": dlq_msg.death_count,
1418
+ "first_death_time": dlq_msg.first_death_time,
1419
+ "content_type": dlq_msg.content_type,
1420
+ }
1421
+ if show_body:
1422
+ try:
1423
+ msg_dict["body"] = dlq_msg.body.decode("utf-8")
1424
+ except Exception:
1425
+ msg_dict["body"] = dlq_msg.body.hex()
1426
+ result.append(msg_dict)
1427
+ click.echo(json.dumps(result, indent=2, default=str))
1428
+ else:
1429
+ click.echo(f"\n{'='*70}")
1430
+ click.echo(f"Dead Letter Queue Messages (showing {len(messages)})")
1431
+ click.echo(f"{'='*70}")
1432
+
1433
+ for i, (dlq_msg, _) in enumerate(messages, 1):
1434
+ click.echo(f"\n[{i}] Message ID: {dlq_msg.message_id or 'N/A'}")
1435
+ click.echo(f" Original Queue: {dlq_msg.original_queue}")
1436
+ click.echo(f" Routing Key: {dlq_msg.routing_key}")
1437
+ click.echo(f" Death Reason: {dlq_msg.death_reason}")
1438
+ click.echo(f" Death Count: {dlq_msg.death_count}")
1439
+ click.echo(f" First Death: {dlq_msg.first_death_time or 'N/A'}")
1440
+ click.echo(f" Content-Type: {dlq_msg.content_type or 'N/A'}")
1441
+
1442
+ if show_body:
1443
+ try:
1444
+ body_str = dlq_msg.body.decode("utf-8")
1445
+ # Truncate if too long
1446
+ if len(body_str) > 500:
1447
+ body_str = body_str[:500] + "... (truncated)"
1448
+ click.echo(f" Body: {body_str}")
1449
+ except Exception:
1450
+ click.echo(f" Body (hex): {dlq_msg.body[:100].hex()}...")
1451
+
1452
+ click.echo(f"\n{'='*70}")
1453
+
1454
+ finally:
1455
+ await connection.close()
1456
+
1457
+ asyncio.run(run_list())
1458
+
1459
+
1460
+ @dlq.command("purge")
1461
+ @click.option(
1462
+ "--broker-url",
1463
+ type=str,
1464
+ envvar="BROKER_URL",
1465
+ required=True,
1466
+ help="The URL for the message broker",
1467
+ )
1468
+ @click.option(
1469
+ "--force",
1470
+ "-f",
1471
+ is_flag=True,
1472
+ default=False,
1473
+ help="Skip confirmation prompt",
1474
+ )
1475
+ def dlq_purge(broker_url: str, force: bool) -> None:
1476
+ """Purge all messages from the Dead Letter Queue.
1477
+
1478
+ WARNING: This action is irreversible. All messages will be permanently deleted.
1479
+
1480
+ Examples:
1481
+
1482
+ \b
1483
+ # Purge with confirmation
1484
+ jararaca dlq purge --broker-url amqp://guest:guest@localhost/
1485
+
1486
+ \b
1487
+ # Purge without confirmation
1488
+ jararaca dlq purge --broker-url amqp://guest:guest@localhost/ --force
1489
+ """
1490
+
1491
+ async def run_purge() -> None:
1492
+ connection = await aio_pika.connect(broker_url)
1493
+ try:
1494
+ async with connection.channel() as channel:
1495
+ # First check how many messages are in the queue
1496
+ try:
1497
+ queue_info = await channel.declare_queue(
1498
+ RabbitmqUtils.DEAD_LETTER_QUEUE, passive=True
1499
+ )
1500
+ message_count = queue_info.declaration_result.message_count or 0
1501
+ except Exception:
1502
+ click.echo("Dead Letter Queue does not exist or is not accessible.")
1503
+ return
1504
+
1505
+ if message_count == 0:
1506
+ click.echo("Dead Letter Queue is already empty.")
1507
+ return
1508
+
1509
+ if not force:
1510
+ if not click.confirm(
1511
+ f"Are you sure you want to purge {message_count} messages from the DLQ? This cannot be undone."
1512
+ ):
1513
+ click.echo("Purge cancelled.")
1514
+ return
1515
+
1516
+ # Purge the queue
1517
+ purged = await RabbitmqUtils.purge_dl_queue(channel)
1518
+ click.echo(f"✓ Successfully purged {purged} messages from the DLQ.")
1519
+
1520
+ finally:
1521
+ await connection.close()
1522
+
1523
+ asyncio.run(run_purge())
1524
+
1525
+
1526
+ @dlq.command("requeue")
1527
+ @click.option(
1528
+ "--broker-url",
1529
+ type=str,
1530
+ envvar="BROKER_URL",
1531
+ required=True,
1532
+ help="The URL for the message broker",
1533
+ )
1534
+ @click.option(
1535
+ "--queue",
1536
+ "queue_filter",
1537
+ type=str,
1538
+ default=None,
1539
+ help="Only requeue messages from a specific original queue (supports partial match)",
1540
+ )
1541
+ @click.option(
1542
+ "--limit",
1543
+ type=int,
1544
+ default=None,
1545
+ help="Maximum number of messages to requeue",
1546
+ )
1547
+ @click.option(
1548
+ "--force",
1549
+ "-f",
1550
+ is_flag=True,
1551
+ default=False,
1552
+ help="Skip confirmation prompt",
1553
+ )
1554
+ def dlq_requeue(
1555
+ broker_url: str,
1556
+ queue_filter: str | None,
1557
+ limit: int | None,
1558
+ force: bool,
1559
+ ) -> None:
1560
+ """Requeue messages from the Dead Letter Queue back to their original queues.
1561
+
1562
+ This command retrieves messages from the DLQ and publishes them back to their
1563
+ original queues for reprocessing.
1564
+
1565
+ Examples:
1566
+
1567
+ \b
1568
+ # Requeue all messages
1569
+ jararaca dlq requeue --broker-url amqp://guest:guest@localhost/
1570
+
1571
+ \b
1572
+ # Requeue messages from a specific queue
1573
+ jararaca dlq requeue --broker-url amqp://guest:guest@localhost/ --queue user.events
1574
+
1575
+ \b
1576
+ # Requeue only 5 messages
1577
+ jararaca dlq requeue --broker-url amqp://guest:guest@localhost/ --limit 5
1578
+ """
1579
+
1580
+ async def run_requeue() -> None:
1581
+ parsed_url = urlparse(broker_url)
1582
+ query_params = parse_qs(parsed_url.query)
1583
+
1584
+ if "exchange" not in query_params or not query_params["exchange"]:
1585
+ click.echo(
1586
+ "ERROR: Exchange must be set in the broker URL query string", err=True
1587
+ )
1588
+ return
1589
+
1590
+ exchange_name = query_params["exchange"][0]
1591
+
1592
+ connection = await aio_pika.connect(broker_url)
1593
+ try:
1594
+ # Fetch messages (will be consumed for requeuing)
1595
+ async with connection.channel() as channel:
1596
+ try:
1597
+ queue_info = await channel.declare_queue(
1598
+ RabbitmqUtils.DEAD_LETTER_QUEUE, passive=True
1599
+ )
1600
+ total_count = queue_info.declaration_result.message_count or 0
1601
+ except Exception:
1602
+ click.echo("Dead Letter Queue does not exist or is not accessible.")
1603
+ return
1604
+
1605
+ if total_count == 0:
1606
+ click.echo("Dead Letter Queue is empty.")
1607
+ return
1608
+
1609
+ # Get messages without consuming first to show count
1610
+ messages_preview = await fetch_dlq_messages(
1611
+ connection, limit=limit, consume=False
1612
+ )
1613
+
1614
+ if queue_filter:
1615
+ messages_preview = [
1616
+ (msg, raw)
1617
+ for msg, raw in messages_preview
1618
+ if queue_filter.lower() in msg.original_queue.lower()
1619
+ ]
1620
+
1621
+ if not messages_preview:
1622
+ click.echo(
1623
+ f"No messages found matching queue filter: '{queue_filter}'"
1624
+ )
1625
+ return
1626
+
1627
+ requeue_count = len(messages_preview)
1628
+
1629
+ if not force:
1630
+ if not click.confirm(
1631
+ f"Are you sure you want to requeue {requeue_count} messages?"
1632
+ ):
1633
+ click.echo("Requeue cancelled.")
1634
+ return
1635
+
1636
+ # Now actually consume and requeue messages
1637
+ async with connection.channel() as channel:
1638
+ queue = await RabbitmqUtils.get_dl_queue(channel)
1639
+ exchange = await RabbitmqUtils.get_main_exchange(channel, exchange_name)
1640
+
1641
+ requeued = 0
1642
+ errors = 0
1643
+
1644
+ count = 0
1645
+ while limit is None or count < limit:
1646
+ try:
1647
+ raw_message = await asyncio.wait_for(
1648
+ queue.get(no_ack=False), timeout=1.0
1649
+ )
1650
+ except asyncio.TimeoutError:
1651
+ break
1652
+
1653
+ if raw_message is None:
1654
+ break
1655
+
1656
+ # Apply queue filter
1657
+ headers = raw_message.headers or {}
1658
+ x_death_raw = headers.get("x-death")
1659
+ original_queue = ""
1660
+ if isinstance(x_death_raw, list) and len(x_death_raw) > 0:
1661
+ death_info = x_death_raw[0]
1662
+ if isinstance(death_info, dict):
1663
+ original_queue = str(death_info.get("queue", ""))
1664
+
1665
+ if (
1666
+ queue_filter
1667
+ and queue_filter.lower() not in original_queue.lower()
1668
+ ):
1669
+ # Requeue back to DLQ (don't process this one)
1670
+ await raw_message.nack(requeue=True)
1671
+ continue
1672
+
1673
+ try:
1674
+ # Publish to the original routing key
1675
+ routing_key = raw_message.routing_key or original_queue
1676
+ await exchange.publish(
1677
+ aio_pika.Message(
1678
+ body=raw_message.body,
1679
+ content_type=raw_message.content_type,
1680
+ headers={"x-requeued-from-dlq": True},
1681
+ ),
1682
+ routing_key=routing_key,
1683
+ )
1684
+ await raw_message.ack()
1685
+ requeued += 1
1686
+ except Exception as e:
1687
+ click.echo(f"Error requeuing message: {e}", err=True)
1688
+ await raw_message.nack(requeue=True)
1689
+ errors += 1
1690
+
1691
+ count += 1
1692
+
1693
+ click.echo("\n✓ Requeue complete:")
1694
+ click.echo(f" - Requeued: {requeued}")
1695
+ if errors:
1696
+ click.echo(f" - Errors: {errors}")
1697
+
1698
+ finally:
1699
+ await connection.close()
1700
+
1701
+ asyncio.run(run_requeue())
@@ -13,6 +13,10 @@ class Token(Generic[T]):
13
13
  type_: Type[T]
14
14
  name: str
15
15
 
16
+ @classmethod
17
+ def create(cls, type_: Type[T], name: str) -> "Token[T]":
18
+ return cls(type_=type_, name=name)
19
+
16
20
 
17
21
  @dataclass
18
22
  class ProviderSpec:
@@ -0,0 +1,3 @@
1
+ # SPDX-FileCopyrightText: 2025 Lucas S
2
+ #
3
+ # SPDX-License-Identifier: GPL-3.0-or-later
@@ -0,0 +1,3 @@
1
+ # SPDX-FileCopyrightText: 2025 Lucas S
2
+ #
3
+ # SPDX-License-Identifier: GPL-3.0-or-later
@@ -0,0 +1,21 @@
1
+ # SPDX-FileCopyrightText: 2025 Lucas S
2
+ #
3
+ # SPDX-License-Identifier: GPL-3.0-or-later
4
+
5
+ from typing import Annotated
6
+
7
+ from pydantic import BaseModel
8
+
9
+ from jararaca.core.providers import Token
10
+
11
+
12
+ class GlobalSchedulerConfig(BaseModel):
13
+ MAX_CONCURRENT_JOBS: int = 10
14
+
15
+
16
+ GlobalSchedulerConfigToken = Token.create(
17
+ GlobalSchedulerConfig, "GlobalSchedulerConfig"
18
+ )
19
+ GlobalSchedulerConfigAnnotated = Annotated[
20
+ GlobalSchedulerConfig, GlobalSchedulerConfigToken
21
+ ]