rrq 0.5.0__py3-none-any.whl → 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,853 @@
1
+ """DLQ (Dead Letter Queue) management commands"""
2
+
3
+ import json
4
+ from datetime import datetime
5
+ from typing import Dict, List, Optional
6
+
7
+ import click
8
+ from rich.console import Console
9
+ from rich.table import Table
10
+
11
+ from rrq.cli_commands.base import AsyncCommand, get_job_store, load_app_settings
12
+ from rrq.cli_commands.utils import format_timestamp
13
+ from rrq.constants import DLQ_KEY_PREFIX, JOB_KEY_PREFIX
14
+
15
+ # Error truncation lengths for consistency
16
+ ERROR_DISPLAY_LENGTH = 50 # For tables and display
17
+ ERROR_GROUPING_LENGTH = 100 # For statistics and grouping
18
+
19
+
20
+ class DLQCommands(AsyncCommand):
21
+ """DLQ management commands"""
22
+
23
+ def register(self, cli_group: click.Group) -> None:
24
+ """Register DLQ commands with the CLI"""
25
+
26
+ @cli_group.group("dlq")
27
+ def dlq_group():
28
+ """Manage Dead Letter Queue (DLQ) jobs"""
29
+ pass
30
+
31
+ @dlq_group.command("list")
32
+ @click.option(
33
+ "--settings",
34
+ "settings_object_path",
35
+ type=str,
36
+ required=False,
37
+ default=None,
38
+ help="Python path to settings object",
39
+ )
40
+ @click.option(
41
+ "--queue",
42
+ "original_queue",
43
+ type=str,
44
+ required=False,
45
+ help="Filter by original queue name",
46
+ )
47
+ @click.option(
48
+ "--function",
49
+ "function_name",
50
+ type=str,
51
+ required=False,
52
+ help="Filter by function name",
53
+ )
54
+ @click.option(
55
+ "--limit", type=int, default=20, help="Maximum number of jobs to show"
56
+ )
57
+ @click.option("--offset", type=int, default=0, help="Offset for pagination")
58
+ @click.option(
59
+ "--dlq-name",
60
+ type=str,
61
+ required=False,
62
+ help="Name of the DLQ to inspect (defaults to settings.default_dlq_name)",
63
+ )
64
+ @click.option("--raw", is_flag=True, help="Show raw job data as JSON")
65
+ @click.option(
66
+ "--batch-size",
67
+ type=int,
68
+ default=100,
69
+ help="Batch size for fetching jobs (optimization for large DLQs)",
70
+ )
71
+ def list_dlq_jobs(
72
+ settings_object_path: str,
73
+ original_queue: Optional[str],
74
+ function_name: Optional[str],
75
+ limit: int,
76
+ offset: int,
77
+ dlq_name: Optional[str],
78
+ raw: bool,
79
+ batch_size: int,
80
+ ):
81
+ """List jobs in the Dead Letter Queue"""
82
+ return self.make_async(self._list_dlq_jobs_async)(
83
+ settings_object_path,
84
+ original_queue,
85
+ function_name,
86
+ limit,
87
+ offset,
88
+ dlq_name,
89
+ raw,
90
+ batch_size,
91
+ )
92
+
93
+ @dlq_group.command("stats")
94
+ @click.option(
95
+ "--settings",
96
+ "settings_object_path",
97
+ type=str,
98
+ required=False,
99
+ default=None,
100
+ help="Python path to settings object",
101
+ )
102
+ @click.option(
103
+ "--dlq-name",
104
+ type=str,
105
+ required=False,
106
+ help="Name of the DLQ to analyze (defaults to settings.default_dlq_name)",
107
+ )
108
+ def dlq_stats(settings_object_path: str, dlq_name: Optional[str]):
109
+ """Show DLQ statistics and error patterns"""
110
+ return self.make_async(self._dlq_stats_async)(
111
+ settings_object_path, dlq_name
112
+ )
113
+
114
+ @dlq_group.command("requeue")
115
+ @click.option(
116
+ "--settings",
117
+ "settings_object_path",
118
+ type=str,
119
+ required=False,
120
+ default=None,
121
+ help="Python path to settings object",
122
+ )
123
+ @click.option(
124
+ "--dlq-name",
125
+ type=str,
126
+ required=False,
127
+ help="Name of the DLQ (defaults to settings.default_dlq_name)",
128
+ )
129
+ @click.option(
130
+ "--target-queue",
131
+ type=str,
132
+ required=False,
133
+ help="Target queue name (defaults to settings.default_queue_name)",
134
+ )
135
+ @click.option(
136
+ "--queue",
137
+ "original_queue",
138
+ type=str,
139
+ required=False,
140
+ help="Filter by original queue name",
141
+ )
142
+ @click.option(
143
+ "--function",
144
+ "function_name",
145
+ type=str,
146
+ required=False,
147
+ help="Filter by function name",
148
+ )
149
+ @click.option(
150
+ "--job-id", type=str, required=False, help="Requeue specific job by ID"
151
+ )
152
+ @click.option(
153
+ "--limit",
154
+ type=int,
155
+ required=False,
156
+ help="Maximum number of jobs to requeue",
157
+ )
158
+ @click.option(
159
+ "--all",
160
+ "requeue_all",
161
+ is_flag=True,
162
+ help="Requeue all jobs (required if no other filters specified)",
163
+ )
164
+ @click.option(
165
+ "--dry-run",
166
+ is_flag=True,
167
+ help="Show what would be requeued without actually doing it",
168
+ )
169
+ def requeue_dlq_jobs(
170
+ settings_object_path: str,
171
+ dlq_name: Optional[str],
172
+ target_queue: Optional[str],
173
+ original_queue: Optional[str],
174
+ function_name: Optional[str],
175
+ job_id: Optional[str],
176
+ limit: Optional[int],
177
+ requeue_all: bool,
178
+ dry_run: bool,
179
+ ):
180
+ """Requeue jobs from DLQ back to a live queue with filtering"""
181
+ return self.make_async(self._requeue_dlq_jobs_async)(
182
+ settings_object_path,
183
+ dlq_name,
184
+ target_queue,
185
+ original_queue,
186
+ function_name,
187
+ job_id,
188
+ limit,
189
+ requeue_all,
190
+ dry_run,
191
+ )
192
+
193
+ @dlq_group.command("inspect")
194
+ @click.argument("job_id")
195
+ @click.option(
196
+ "--settings",
197
+ "settings_object_path",
198
+ type=str,
199
+ required=False,
200
+ default=None,
201
+ help="Python path to settings object",
202
+ )
203
+ @click.option("--raw", is_flag=True, help="Show raw job data as JSON")
204
+ def inspect_dlq_job(job_id: str, settings_object_path: str, raw: bool):
205
+ """Inspect a specific job in the DLQ"""
206
+ return self.make_async(self._inspect_dlq_job_async)(
207
+ job_id, settings_object_path, raw
208
+ )
209
+
210
+ async def _get_dlq_jobs(
211
+ self,
212
+ job_store,
213
+ dlq_name: str,
214
+ original_queue: Optional[str] = None,
215
+ function_name: Optional[str] = None,
216
+ limit: int = 20,
217
+ offset: int = 0,
218
+ batch_size: int = 100,
219
+ ) -> List[Dict]:
220
+ """Get jobs from DLQ with filtering and batch fetching optimization"""
221
+ dlq_key = f"{DLQ_KEY_PREFIX}{dlq_name}"
222
+
223
+ # Get job IDs from DLQ (Redis list)
224
+ job_ids = await job_store.redis.lrange(dlq_key, 0, -1)
225
+ job_ids = [job_id.decode("utf-8") for job_id in job_ids]
226
+
227
+ # Batch fetch job data using pipelines to reduce round-trip overhead
228
+ jobs = []
229
+ for i in range(0, len(job_ids), batch_size):
230
+ batch_job_ids = job_ids[i : i + batch_size]
231
+ batch_jobs = await self._batch_get_jobs(job_store, batch_job_ids)
232
+
233
+ for job_data in batch_jobs:
234
+ if not job_data:
235
+ continue
236
+
237
+ # Apply filters
238
+ if original_queue and job_data.get("queue_name") != original_queue:
239
+ continue
240
+ if function_name and job_data.get("function_name") != function_name:
241
+ continue
242
+
243
+ jobs.append(job_data)
244
+
245
+ # Sort by completion time (newest first)
246
+ jobs.sort(key=lambda x: x.get("completion_time", 0), reverse=True)
247
+
248
+ # Apply pagination
249
+ return jobs[offset : offset + limit]
250
+
251
+ async def _batch_get_jobs(
252
+ self, job_store, job_ids: List[str]
253
+ ) -> List[Optional[Dict]]:
254
+ """Efficiently fetch multiple jobs using Redis pipeline"""
255
+ from rrq.constants import JOB_KEY_PREFIX
256
+
257
+ if not job_ids:
258
+ return []
259
+
260
+ async with job_store.redis.pipeline(transaction=False) as pipe:
261
+ for job_id in job_ids:
262
+ job_key = f"{JOB_KEY_PREFIX}{job_id}"
263
+ pipe.hgetall(job_key)
264
+
265
+ results = await pipe.execute()
266
+
267
+ # Convert results to job data dicts
268
+ jobs = []
269
+ for result in results:
270
+ if result:
271
+ # Convert bytes to strings like the original get_job method
272
+ job_data = {
273
+ k.decode("utf-8"): v.decode("utf-8") for k, v in result.items()
274
+ }
275
+ jobs.append(job_data)
276
+ else:
277
+ jobs.append(None)
278
+
279
+ return jobs
280
+
281
+ async def _get_dlq_job_count(self, job_store, dlq_name: str) -> int:
282
+ """Get total count of jobs in DLQ"""
283
+ dlq_key = f"{DLQ_KEY_PREFIX}{dlq_name}"
284
+ return await job_store.redis.llen(dlq_key)
285
+
286
+ async def _get_dlq_statistics(self, job_store, dlq_name: str) -> Dict:
287
+ """Get comprehensive DLQ statistics"""
288
+ dlq_key = f"{DLQ_KEY_PREFIX}{dlq_name}"
289
+
290
+ # Get all job IDs from DLQ
291
+ job_ids = await self._get_dlq_job_ids(job_store, dlq_key)
292
+
293
+ if not job_ids:
294
+ return self._create_empty_statistics()
295
+
296
+ # Fetch job data
297
+ jobs = await self._fetch_job_data_batch(job_store, job_ids)
298
+
299
+ # Calculate all statistics
300
+ return self._calculate_dlq_statistics(jobs)
301
+
302
+ async def _get_dlq_job_ids(self, job_store, dlq_key: str) -> List[str]:
303
+ """Get all job IDs from DLQ"""
304
+ job_ids = await job_store.redis.lrange(dlq_key, 0, -1)
305
+ return [job_id.decode("utf-8") for job_id in job_ids]
306
+
307
+ def _create_empty_statistics(self) -> Dict:
308
+ """Create empty statistics structure"""
309
+ return {
310
+ "total_jobs": 0,
311
+ "oldest_job_time": None,
312
+ "newest_job_time": None,
313
+ "avg_retries": 0,
314
+ "by_queue": {},
315
+ "by_function": {},
316
+ "top_errors": {},
317
+ }
318
+
319
+ async def _fetch_job_data_batch(self, job_store, job_ids: List[str]) -> List[Dict]:
320
+ """Fetch job data for multiple job IDs"""
321
+ jobs = []
322
+ for job_id in job_ids:
323
+ job_data = await job_store.get_job(job_id)
324
+ if job_data:
325
+ jobs.append(job_data)
326
+ return jobs
327
+
328
+ def _calculate_dlq_statistics(self, jobs: List[Dict]) -> Dict:
329
+ """Calculate comprehensive statistics from job data"""
330
+ completion_times = self._extract_completion_times(jobs)
331
+ retries = self._extract_retries(jobs)
332
+
333
+ by_queue = self._count_by_field(jobs, "queue_name")
334
+ by_function = self._count_by_field(jobs, "function_name")
335
+ top_errors = self._count_top_errors(jobs)
336
+
337
+ return {
338
+ "total_jobs": len(jobs),
339
+ "oldest_job_time": min(completion_times) if completion_times else None,
340
+ "newest_job_time": max(completion_times) if completion_times else None,
341
+ "avg_retries": sum(retries) / len(retries) if retries else 0,
342
+ "by_queue": dict(
343
+ sorted(by_queue.items(), key=lambda x: x[1], reverse=True)
344
+ ),
345
+ "by_function": dict(
346
+ sorted(by_function.items(), key=lambda x: x[1], reverse=True)
347
+ ),
348
+ "top_errors": top_errors,
349
+ }
350
+
351
+ def _extract_completion_times(self, jobs: List[Dict]) -> List[float]:
352
+ """Extract completion times from jobs"""
353
+ return [
354
+ job.get("completion_time", 0) for job in jobs if job.get("completion_time")
355
+ ]
356
+
357
+ def _extract_retries(self, jobs: List[Dict]) -> List[int]:
358
+ """Extract retry counts from jobs"""
359
+ return [job.get("current_retries", 0) for job in jobs]
360
+
361
+ def _count_by_field(self, jobs: List[Dict], field_name: str) -> Dict[str, int]:
362
+ """Count jobs by a specific field"""
363
+ counts = {}
364
+ for job in jobs:
365
+ value = job.get(field_name, "Unknown")
366
+ counts[value] = counts.get(value, 0) + 1
367
+ return counts
368
+
369
+ def _count_top_errors(self, jobs: List[Dict]) -> Dict[str, int]:
370
+ """Count and return top 10 error patterns"""
371
+ error_counts = {}
372
+ for job in jobs:
373
+ error = job.get("last_error", "Unknown error")
374
+ # Take first N chars of error for grouping
375
+ error_key = error[:ERROR_GROUPING_LENGTH]
376
+ error_counts[error_key] = error_counts.get(error_key, 0) + 1
377
+
378
+ # Return top 10 errors
379
+ return dict(sorted(error_counts.items(), key=lambda x: x[1], reverse=True)[:10])
380
+
381
+ async def _get_dlq_jobs_by_id(
382
+ self, job_store, dlq_name: str, job_ids: List[str]
383
+ ) -> List[Dict]:
384
+ """Get specific jobs from DLQ by their IDs"""
385
+ jobs = []
386
+ for job_id in job_ids:
387
+ job_data = await job_store.get_job(job_id)
388
+ if job_data and await self._is_job_in_dlq(job_store, dlq_name, job_id):
389
+ jobs.append(job_data)
390
+ return jobs
391
+
392
+ async def _is_job_in_dlq(self, job_store, dlq_name: str, job_id: str) -> bool:
393
+ """Check if a specific job is in the DLQ"""
394
+ dlq_key = f"{DLQ_KEY_PREFIX}{dlq_name}"
395
+ job_ids = await job_store.redis.lrange(dlq_key, 0, -1)
396
+ job_ids = [jid.decode("utf-8") for jid in job_ids]
397
+ return job_id in job_ids
398
+
399
+ async def _requeue_specific_jobs(
400
+ self, job_store, dlq_name: str, target_queue: str, jobs: List[Dict]
401
+ ) -> int:
402
+ """Requeue specific jobs from DLQ to target queue"""
403
+ dlq_key = f"{DLQ_KEY_PREFIX}{dlq_name}"
404
+ requeued_count = 0
405
+
406
+ for job in jobs:
407
+ job_id = job["id"]
408
+
409
+ # Remove from DLQ list
410
+ removed = await job_store.redis.lrem(dlq_key, 1, job_id.encode("utf-8"))
411
+ if removed > 0:
412
+ # Add to target queue with current timestamp
413
+ now_ms = int(datetime.now().timestamp() * 1000)
414
+ await job_store.add_job_to_queue(target_queue, job_id, now_ms)
415
+
416
+ # Update job status back to PENDING
417
+ job_key = f"{JOB_KEY_PREFIX}{job_id}"
418
+ await job_store.redis.hset(
419
+ job_key,
420
+ mapping={
421
+ "status": "pending".encode("utf-8"),
422
+ "queue_name": target_queue.encode("utf-8"),
423
+ },
424
+ )
425
+
426
+ requeued_count += 1
427
+
428
+ return requeued_count
429
+
430
+ async def _list_dlq_jobs_async(
431
+ self,
432
+ settings_object_path: str,
433
+ original_queue: Optional[str],
434
+ function_name: Optional[str],
435
+ limit: int,
436
+ offset: int,
437
+ dlq_name: Optional[str],
438
+ raw: bool,
439
+ batch_size: int = 100,
440
+ ):
441
+ """Async implementation for listing DLQ jobs"""
442
+ settings = load_app_settings(settings_object_path)
443
+ job_store = await get_job_store(settings)
444
+
445
+ try:
446
+ dlq_to_use = dlq_name or settings.default_dlq_name
447
+ console = Console()
448
+
449
+ jobs = await self._get_dlq_jobs(
450
+ job_store,
451
+ dlq_to_use,
452
+ original_queue,
453
+ function_name,
454
+ limit,
455
+ offset,
456
+ batch_size,
457
+ )
458
+
459
+ if raw:
460
+ self._display_raw_jobs(console, jobs)
461
+ return
462
+
463
+ if not jobs:
464
+ self._display_no_jobs_message(
465
+ console, dlq_to_use, original_queue, function_name
466
+ )
467
+ return
468
+
469
+ # Create and display table
470
+ table = self._create_dlq_jobs_table(dlq_to_use, jobs)
471
+ console.print(table)
472
+
473
+ # Show pagination info
474
+ await self._display_pagination_info(
475
+ console, job_store, dlq_to_use, offset, limit, len(jobs)
476
+ )
477
+
478
+ finally:
479
+ await job_store.aclose()
480
+
481
+ def _display_raw_jobs(self, console: Console, jobs: List[Dict]):
482
+ """Display jobs in raw JSON format"""
483
+ for job in jobs:
484
+ console.print(json.dumps(job, indent=2, default=str))
485
+
486
+ def _display_no_jobs_message(
487
+ self,
488
+ console: Console,
489
+ dlq_name: str,
490
+ original_queue: Optional[str],
491
+ function_name: Optional[str],
492
+ ):
493
+ """Display message when no jobs found"""
494
+ console.print(f"[yellow]No jobs found in DLQ: {dlq_name}")
495
+ if original_queue:
496
+ console.print(f"[yellow]Filtered by queue: {original_queue}")
497
+ if function_name:
498
+ console.print(f"[yellow]Filtered by function: {function_name}")
499
+
500
+ def _create_dlq_jobs_table(self, dlq_name: str, jobs: List[Dict]) -> Table:
501
+ """Create table for displaying DLQ jobs"""
502
+ table = Table(title=f"Jobs in DLQ: {dlq_name}")
503
+ table.add_column("Job ID", style="cyan", no_wrap=True, max_width=20)
504
+ table.add_column("Function", style="green", max_width=15)
505
+ table.add_column("Queue", style="blue", max_width=12)
506
+ table.add_column("Error", style="red", max_width=25)
507
+ table.add_column("Failed At", style="yellow", max_width=16)
508
+ table.add_column("Retries", style="magenta", justify="right", max_width=8)
509
+
510
+ for job in jobs:
511
+ error_text = self._truncate_error_text(
512
+ job.get("last_error", "Unknown error")
513
+ )
514
+
515
+ table.add_row(
516
+ self._truncate_job_id(job["id"]),
517
+ job.get("function_name", "N/A"),
518
+ job.get("queue_name", "N/A"),
519
+ error_text,
520
+ format_timestamp(job.get("completion_time")),
521
+ str(job.get("current_retries", 0)),
522
+ )
523
+
524
+ return table
525
+
526
+ def _truncate_error_text(self, error_text: str) -> str:
527
+ """Truncate long error messages for display"""
528
+ if len(error_text) <= ERROR_DISPLAY_LENGTH:
529
+ return error_text
530
+ return error_text[: ERROR_DISPLAY_LENGTH - 3] + "..."
531
+
532
+ def _truncate_job_id(self, job_id: str) -> str:
533
+ """Truncate long job IDs"""
534
+ return job_id[:18] + "..." if len(job_id) > 18 else job_id
535
+
536
+ async def _display_pagination_info(
537
+ self,
538
+ console: Console,
539
+ job_store,
540
+ dlq_name: str,
541
+ offset: int,
542
+ limit: int,
543
+ jobs_shown: int,
544
+ ):
545
+ """Display pagination information"""
546
+ total_jobs = await self._get_dlq_job_count(job_store, dlq_name)
547
+ start_idx = offset + 1
548
+ end_idx = min(offset + jobs_shown, total_jobs)
549
+
550
+ console.print(f"\n[dim]Showing {start_idx}-{end_idx} of {total_jobs} jobs")
551
+ if end_idx < total_jobs:
552
+ console.print(f"[dim]Use --offset {offset + limit} to see more")
553
+
554
+ async def _dlq_stats_async(
555
+ self, settings_object_path: str, dlq_name: Optional[str]
556
+ ):
557
+ """Async implementation for DLQ statistics"""
558
+ settings = load_app_settings(settings_object_path)
559
+ job_store = await get_job_store(settings)
560
+
561
+ try:
562
+ dlq_to_use = dlq_name or settings.default_dlq_name
563
+ console = Console()
564
+
565
+ stats = await self._get_dlq_statistics(job_store, dlq_to_use)
566
+
567
+ if stats["total_jobs"] == 0:
568
+ console.print(f"[yellow]DLQ '{dlq_to_use}' is empty")
569
+ return
570
+
571
+ # Display all statistics tables
572
+ self._display_overall_stats(console, dlq_to_use, stats)
573
+ self._display_queue_breakdown(console, stats)
574
+ self._display_function_breakdown(console, stats)
575
+ self._display_error_patterns(console, stats)
576
+
577
+ finally:
578
+ await job_store.aclose()
579
+
580
+ def _display_overall_stats(self, console: Console, dlq_name: str, stats: Dict):
581
+ """Display overall DLQ statistics table"""
582
+ stats_table = Table(title=f"DLQ Statistics: {dlq_name}")
583
+ stats_table.add_column("Metric", style="cyan")
584
+ stats_table.add_column("Value", style="green")
585
+
586
+ stats_table.add_row("Total Jobs", str(stats["total_jobs"]))
587
+ stats_table.add_row("Oldest Job", format_timestamp(stats["oldest_job_time"]))
588
+ stats_table.add_row("Newest Job", format_timestamp(stats["newest_job_time"]))
589
+ stats_table.add_row("Average Retries", f"{stats['avg_retries']:.1f}")
590
+
591
+ console.print(stats_table)
592
+
593
+ def _display_queue_breakdown(self, console: Console, stats: Dict):
594
+ """Display queue breakdown table"""
595
+ if not stats["by_queue"]:
596
+ return
597
+
598
+ queue_table = Table(title="Jobs by Original Queue")
599
+ queue_table.add_column("Queue", style="blue")
600
+ queue_table.add_column("Count", style="green", justify="right")
601
+ queue_table.add_column("Percentage", style="yellow", justify="right")
602
+
603
+ for queue, count in stats["by_queue"].items():
604
+ percentage = (count / stats["total_jobs"]) * 100
605
+ queue_table.add_row(queue, str(count), f"{percentage:.1f}%")
606
+
607
+ console.print(queue_table)
608
+
609
+ def _display_function_breakdown(self, console: Console, stats: Dict):
610
+ """Display function breakdown table"""
611
+ if not stats["by_function"]:
612
+ return
613
+
614
+ func_table = Table(title="Jobs by Function")
615
+ func_table.add_column("Function", style="green")
616
+ func_table.add_column("Count", style="green", justify="right")
617
+ func_table.add_column("Percentage", style="yellow", justify="right")
618
+
619
+ for func, count in stats["by_function"].items():
620
+ percentage = (count / stats["total_jobs"]) * 100
621
+ func_table.add_row(func, str(count), f"{percentage:.1f}%")
622
+
623
+ console.print(func_table)
624
+
625
+ def _display_error_patterns(self, console: Console, stats: Dict):
626
+ """Display error patterns table"""
627
+ if not stats["top_errors"]:
628
+ return
629
+
630
+ error_table = Table(title="Top Error Patterns")
631
+ error_table.add_column(
632
+ f"Error (first {ERROR_DISPLAY_LENGTH} chars)", style="red"
633
+ )
634
+ error_table.add_column("Count", style="green", justify="right")
635
+
636
+ for error, count in stats["top_errors"].items():
637
+ error_display = self._truncate_error_text(error)
638
+ error_table.add_row(error_display, str(count))
639
+
640
+ console.print(error_table)
641
+
642
+ async def _requeue_dlq_jobs_async(
643
+ self,
644
+ settings_object_path: str,
645
+ dlq_name: Optional[str],
646
+ target_queue: Optional[str],
647
+ original_queue: Optional[str],
648
+ function_name: Optional[str],
649
+ job_id: Optional[str],
650
+ limit: Optional[int],
651
+ requeue_all: bool,
652
+ dry_run: bool,
653
+ ):
654
+ """Async implementation for requeuing DLQ jobs"""
655
+ settings = load_app_settings(settings_object_path)
656
+ job_store = await get_job_store(settings)
657
+
658
+ try:
659
+ dlq_to_use = dlq_name or settings.default_dlq_name
660
+ target_queue_to_use = target_queue or settings.default_queue_name
661
+ console = Console()
662
+
663
+ # Validate filters
664
+ if not self._validate_requeue_filters(
665
+ console, original_queue, function_name, job_id, requeue_all
666
+ ):
667
+ return
668
+
669
+ # Get matching jobs
670
+ matching_jobs = await self._get_matching_jobs_for_requeue(
671
+ job_store, dlq_to_use, job_id, original_queue, function_name, limit
672
+ )
673
+
674
+ if not matching_jobs:
675
+ console.print(f"[yellow]No matching jobs found in DLQ: {dlq_to_use}")
676
+ return
677
+
678
+ console.print(f"[cyan]Found {len(matching_jobs)} matching jobs to requeue")
679
+
680
+ if dry_run:
681
+ self._display_dry_run_results(
682
+ console, matching_jobs, target_queue_to_use
683
+ )
684
+ return
685
+
686
+ # Actually requeue the jobs
687
+ requeued_count = await self._requeue_specific_jobs(
688
+ job_store, dlq_to_use, target_queue_to_use, matching_jobs
689
+ )
690
+
691
+ console.print(
692
+ f"[green]Successfully requeued {requeued_count} jobs from DLQ '{dlq_to_use}' to queue '{target_queue_to_use}'"
693
+ )
694
+
695
+ finally:
696
+ await job_store.aclose()
697
+
698
+ def _validate_requeue_filters(
699
+ self,
700
+ console: Console,
701
+ original_queue: Optional[str],
702
+ function_name: Optional[str],
703
+ job_id: Optional[str],
704
+ requeue_all: bool,
705
+ ) -> bool:
706
+ """Validate that at least one filter or --all is specified"""
707
+ has_filters = any([original_queue, function_name, job_id, requeue_all])
708
+ if not has_filters:
709
+ console.print(
710
+ "[red]Error: Must specify --all or at least one filter (--queue, --function, --job-id)"
711
+ )
712
+ return False
713
+ return True
714
+
715
+ async def _get_matching_jobs_for_requeue(
716
+ self,
717
+ job_store,
718
+ dlq_name: str,
719
+ job_id: Optional[str],
720
+ original_queue: Optional[str],
721
+ function_name: Optional[str],
722
+ limit: Optional[int],
723
+ ) -> List[Dict]:
724
+ """Get jobs matching the requeue criteria"""
725
+ if job_id:
726
+ # Single job requeue
727
+ return await self._get_dlq_jobs_by_id(job_store, dlq_name, [job_id])
728
+ else:
729
+ # Filtered requeue
730
+ matching_jobs = await self._get_dlq_jobs(
731
+ job_store,
732
+ dlq_name,
733
+ original_queue,
734
+ function_name,
735
+ limit or 10000,
736
+ 0, # Large limit for getting all matching
737
+ )
738
+
739
+ if limit:
740
+ matching_jobs = matching_jobs[:limit]
741
+
742
+ return matching_jobs
743
+
744
+ def _display_dry_run_results(
745
+ self, console: Console, matching_jobs: List[Dict], target_queue: str
746
+ ):
747
+ """Display dry run results"""
748
+ console.print(f"[yellow]DRY RUN: Would requeue {len(matching_jobs)} jobs")
749
+
750
+ # Show what would be requeued
751
+ table = Table(title="Jobs to Requeue (Dry Run)")
752
+ table.add_column("Job ID", style="cyan", max_width=20)
753
+ table.add_column("Function", style="green", max_width=15)
754
+ table.add_column("Original Queue", style="blue", max_width=12)
755
+ table.add_column("Target Queue", style="magenta", max_width=12)
756
+
757
+ for job in matching_jobs[:10]: # Show first 10
758
+ table.add_row(
759
+ self._truncate_job_id(job["id"]),
760
+ job.get("function_name", "N/A"),
761
+ job.get("queue_name", "N/A"),
762
+ target_queue,
763
+ )
764
+
765
+ console.print(table)
766
+ if len(matching_jobs) > 10:
767
+ console.print(f"[dim]... and {len(matching_jobs) - 10} more jobs")
768
+
769
+ async def _inspect_dlq_job_async(
770
+ self, job_id: str, settings_object_path: str, raw: bool
771
+ ):
772
+ """Async implementation for inspecting a DLQ job"""
773
+ settings = load_app_settings(settings_object_path)
774
+ job_store = await get_job_store(settings)
775
+
776
+ try:
777
+ console = Console()
778
+ job_data = await job_store.get_job(job_id)
779
+
780
+ if not job_data:
781
+ console.print(f"[red]Job {job_id} not found")
782
+ return
783
+
784
+ if raw:
785
+ console.print(json.dumps(job_data, indent=2, default=str))
786
+ return
787
+
788
+ # Display detailed job information
789
+ self._display_job_details(console, job_id, job_data)
790
+ self._display_timing_info(console, job_data)
791
+ self._display_worker_info(console, job_data)
792
+ self._display_job_arguments(console, job_data)
793
+ self._display_error_info(console, job_data)
794
+ self._display_unique_key_info(console, job_data)
795
+
796
+ finally:
797
+ await job_store.aclose()
798
+
799
+ def _display_job_details(self, console: Console, job_id: str, job_data: Dict):
800
+ """Display basic job details"""
801
+ console.print(f"[bold cyan]Job Details: {job_id}")
802
+ console.print(f"[bold]Status:[/] {job_data.get('status', 'Unknown')}")
803
+ console.print(f"[bold]Function:[/] {job_data.get('function_name', 'N/A')}")
804
+ console.print(f"[bold]Original Queue:[/] {job_data.get('queue_name', 'N/A')}")
805
+ console.print(f"[bold]DLQ Name:[/] {job_data.get('dlq_name', 'N/A')}")
806
+ console.print(
807
+ f"[bold]Retries:[/] {job_data.get('current_retries', 0)}/{job_data.get('max_retries', 0)}"
808
+ )
809
+
810
+ def _display_timing_info(self, console: Console, job_data: Dict):
811
+ """Display timing information"""
812
+ console.print("\n[bold cyan]Timing Information")
813
+ console.print(
814
+ f"[bold]Enqueued At:[/] {format_timestamp(job_data.get('enqueue_time'))}"
815
+ )
816
+ console.print(
817
+ f"[bold]Failed At:[/] {format_timestamp(job_data.get('completion_time'))}"
818
+ )
819
+
820
+ def _display_worker_info(self, console: Console, job_data: Dict):
821
+ """Display worker information"""
822
+ if job_data.get("worker_id"):
823
+ console.print(f"[bold]Last Worker:[/] {job_data.get('worker_id')}")
824
+
825
+ def _display_job_arguments(self, console: Console, job_data: Dict):
826
+ """Display job arguments"""
827
+ if job_data.get("job_args"):
828
+ console.print("\n[bold cyan]Arguments")
829
+ args = json.loads(job_data.get("job_args", "[]"))
830
+ for i, arg in enumerate(args):
831
+ console.print(f"[bold]Arg {i}:[/] {arg}")
832
+
833
+ if job_data.get("job_kwargs"):
834
+ console.print("\n[bold cyan]Keyword Arguments")
835
+ kwargs = json.loads(job_data.get("job_kwargs", "{}"))
836
+ for key, value in kwargs.items():
837
+ console.print(f"[bold]{key}:[/] {value}")
838
+
839
+ def _display_error_info(self, console: Console, job_data: Dict):
840
+ """Display error information"""
841
+ if job_data.get("last_error"):
842
+ console.print("\n[bold red]Error Information")
843
+ console.print(f"[bold]Error:[/] {job_data.get('last_error')}")
844
+
845
+ if job_data.get("traceback"):
846
+ console.print("\n[bold red]Traceback:")
847
+ console.print(job_data.get("traceback"))
848
+
849
+ def _display_unique_key_info(self, console: Console, job_data: Dict):
850
+ """Display unique key information"""
851
+ if job_data.get("job_unique_key"):
852
+ console.print("\n[bold cyan]Unique Key")
853
+ console.print(f"[bold]Key:[/] {job_data.get('job_unique_key')}")