rrq 0.5.0__py3-none-any.whl → 0.7.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,539 @@
1
+ """Queue management and statistics commands"""
2
+
3
+ from datetime import datetime
4
+ from typing import Dict, List, Tuple
5
+
6
+ import click
7
+
8
+ from rrq.constants import JOB_KEY_PREFIX, QUEUE_KEY_PREFIX, DLQ_KEY_PREFIX
9
+ from rrq.store import JobStore
10
+ from rrq.cli_commands.base import AsyncCommand, load_app_settings, get_job_store
11
+ from ..utils import (
12
+ console,
13
+ create_progress,
14
+ create_table,
15
+ format_duration,
16
+ format_queue_name,
17
+ format_status,
18
+ format_timestamp,
19
+ print_error,
20
+ print_info,
21
+ print_warning,
22
+ )
23
+
24
+
25
+ class QueueCommands(AsyncCommand):
26
+ """Commands for queue management and statistics"""
27
+
28
+ def register(self, cli_group: click.Group) -> None:
29
+ """Register queue commands"""
30
+
31
+ @cli_group.group("queue")
32
+ def queue_group():
33
+ """Manage and inspect queues"""
34
+ pass
35
+
36
+ # List all queues
37
+ @queue_group.command("list")
38
+ @click.option(
39
+ "--settings",
40
+ "settings_object_path",
41
+ type=str,
42
+ help="Python settings path (e.g., myapp.settings.rrq_settings)",
43
+ )
44
+ @click.option(
45
+ "--show-empty",
46
+ is_flag=True,
47
+ help="Show queues with no pending jobs",
48
+ )
49
+ def list_queues(settings_object_path: str, show_empty: bool):
50
+ """List all active queues with job counts"""
51
+ self.make_async(self._list_queues)(settings_object_path, show_empty)
52
+
53
+ # Show queue statistics
54
+ @queue_group.command("stats")
55
+ @click.option(
56
+ "--settings",
57
+ "settings_object_path",
58
+ type=str,
59
+ help="Python settings path (e.g., myapp.settings.rrq_settings)",
60
+ )
61
+ @click.option(
62
+ "--queue",
63
+ "queue_names",
64
+ multiple=True,
65
+ help="Specific queue(s) to show stats for",
66
+ )
67
+ @click.option(
68
+ "--max-scan",
69
+ type=int,
70
+ default=1000,
71
+ help="Maximum jobs to scan for status breakdown (0 = unlimited, may be slow)",
72
+ )
73
+ def queue_stats(
74
+ settings_object_path: str, queue_names: Tuple[str], max_scan: int
75
+ ):
76
+ """Show detailed statistics for queues"""
77
+ self.make_async(self._queue_stats)(
78
+ settings_object_path, queue_names, max_scan
79
+ )
80
+
81
+ # Inspect a specific queue
82
+ @queue_group.command("inspect")
83
+ @click.argument("queue_name")
84
+ @click.option(
85
+ "--settings",
86
+ "settings_object_path",
87
+ type=str,
88
+ help="Python settings path (e.g., myapp.settings.rrq_settings)",
89
+ )
90
+ @click.option(
91
+ "--limit",
92
+ type=int,
93
+ default=20,
94
+ help="Number of jobs to show",
95
+ )
96
+ @click.option(
97
+ "--offset",
98
+ type=int,
99
+ default=0,
100
+ help="Offset for pagination",
101
+ )
102
+ def inspect_queue(
103
+ queue_name: str, settings_object_path: str, limit: int, offset: int
104
+ ):
105
+ """Inspect jobs in a specific queue"""
106
+ self.make_async(self._inspect_queue)(
107
+ queue_name, settings_object_path, limit, offset
108
+ )
109
+
110
+ async def _list_queues(self, settings_object_path: str, show_empty: bool) -> None:
111
+ """List all active queues"""
112
+ settings = load_app_settings(settings_object_path)
113
+ job_store = await get_job_store(settings)
114
+
115
+ try:
116
+ queue_keys = await self._get_all_queue_keys(job_store)
117
+ if not queue_keys:
118
+ print_warning("No active queues found")
119
+ return
120
+
121
+ table = self._create_queue_list_table()
122
+ queue_data = await self._fetch_queue_details(
123
+ job_store, queue_keys, show_empty
124
+ )
125
+
126
+ if not queue_data:
127
+ print_warning("No queues to display")
128
+ return
129
+
130
+ self._populate_queue_table(table, queue_data)
131
+ self._display_queue_summary(table, queue_data)
132
+
133
+ finally:
134
+ await job_store.aclose()
135
+
136
+ async def _get_all_queue_keys(self, job_store: JobStore) -> list[str]:
137
+ """Get all queue keys from Redis"""
138
+ queue_pattern = f"{QUEUE_KEY_PREFIX}*"
139
+ queue_keys = []
140
+ async for key in job_store.redis.scan_iter(match=queue_pattern):
141
+ # Keep the key in the format returned by scan_iter (bytes or string depending on Redis client)
142
+ if isinstance(key, bytes):
143
+ queue_keys.append(key.decode())
144
+ else:
145
+ queue_keys.append(key)
146
+ return queue_keys
147
+
148
+ def _create_queue_list_table(self):
149
+ """Create table for queue listing"""
150
+ table = create_table("Active Queues")
151
+ table.add_column("Queue Name", style="cyan")
152
+ table.add_column("Pending Jobs", justify="right")
153
+ table.add_column("Next Job", style="dim")
154
+ table.add_column("Oldest Job", style="dim")
155
+ return table
156
+
157
+ async def _fetch_queue_details(
158
+ self, job_store: JobStore, queue_keys: list[str], show_empty: bool
159
+ ) -> list[tuple]:
160
+ """Fetch detailed information for each queue"""
161
+ with create_progress() as progress:
162
+ task = progress.add_task(
163
+ "Fetching queue information...", total=len(queue_keys)
164
+ )
165
+ queue_data = []
166
+
167
+ for queue_key in queue_keys:
168
+ queue_name = queue_key.replace(QUEUE_KEY_PREFIX, "")
169
+ # Ensure queue_key is passed as the original format from scan_iter
170
+ size = await job_store.redis.zcard(queue_key)
171
+
172
+ if size == 0 and not show_empty:
173
+ progress.update(task, advance=1)
174
+ continue
175
+
176
+ oldest_ts, newest_ts = await self._get_queue_timestamp_range(
177
+ job_store, queue_key, size
178
+ )
179
+ queue_data.append((queue_name, size, newest_ts, oldest_ts))
180
+ progress.update(task, advance=1)
181
+
182
+ # Sort by pending jobs count
183
+ queue_data.sort(key=lambda x: x[1], reverse=True)
184
+ return queue_data
185
+
186
+ async def _get_queue_timestamp_range(
187
+ self, job_store: JobStore, queue_key: str, size: int
188
+ ) -> tuple:
189
+ """Get oldest and newest job timestamps for a queue"""
190
+ oldest_ts = None
191
+ newest_ts = None
192
+
193
+ if size > 0:
194
+ # Get oldest (first) job
195
+ oldest = await job_store.redis.zrange(queue_key, 0, 0, withscores=True)
196
+ if oldest:
197
+ oldest_ts = oldest[0][1]
198
+
199
+ # Get newest (last) job
200
+ newest = await job_store.redis.zrange(queue_key, -1, -1, withscores=True)
201
+ if newest:
202
+ newest_ts = newest[0][1]
203
+
204
+ return oldest_ts, newest_ts
205
+
206
+ def _populate_queue_table(self, table, queue_data: list[tuple]) -> None:
207
+ """Add queue data rows to the table"""
208
+ for queue_name, size, newest_ts, oldest_ts in queue_data:
209
+ table.add_row(
210
+ format_queue_name(queue_name),
211
+ str(size),
212
+ format_timestamp(newest_ts) if newest_ts else "N/A",
213
+ format_timestamp(oldest_ts) if oldest_ts else "N/A",
214
+ )
215
+
216
+ def _display_queue_summary(self, table, queue_data: list[tuple]) -> None:
217
+ """Display queue summary information"""
218
+ total_jobs = sum(size for _, size, _, _ in queue_data)
219
+ console.print(table)
220
+ console.print(
221
+ f"\nTotal: [bold]{len(queue_data)}[/bold] queues, [bold]{total_jobs}[/bold] pending jobs"
222
+ )
223
+
224
+ async def _queue_stats(
225
+ self, settings_object_path: str, queue_names: Tuple[str], max_scan: int = 1000
226
+ ) -> None:
227
+ """Show detailed queue statistics"""
228
+ settings = load_app_settings(settings_object_path)
229
+ job_store = await get_job_store(settings)
230
+
231
+ try:
232
+ # If no specific queues specified, get all queues
233
+ if not queue_names:
234
+ queue_pattern = f"{QUEUE_KEY_PREFIX}*"
235
+ queue_keys = []
236
+ async for key in job_store.redis.scan_iter(match=queue_pattern):
237
+ queue_name = key.decode().replace(QUEUE_KEY_PREFIX, "")
238
+ queue_keys.append(queue_name)
239
+ queue_names = tuple(queue_keys)
240
+
241
+ if not queue_names:
242
+ print_warning("No queues found")
243
+ return
244
+
245
+ # Create overall stats table
246
+ stats_table = create_table("Queue Statistics")
247
+ stats_table.add_column("Queue", style="cyan")
248
+ stats_table.add_column("Total", justify="right")
249
+ stats_table.add_column("Pending", justify="right", style="yellow")
250
+ stats_table.add_column("Active", justify="right", style="blue")
251
+ stats_table.add_column("Completed", justify="right", style="green")
252
+ stats_table.add_column("Failed", justify="right", style="red")
253
+ stats_table.add_column("DLQ", justify="right", style="magenta")
254
+ stats_table.add_column("Avg Wait", justify="right")
255
+ stats_table.add_column("Throughput", justify="right")
256
+
257
+ with create_progress() as progress:
258
+ task = progress.add_task("Analyzing queues...", total=len(queue_names))
259
+
260
+ for queue_name in queue_names:
261
+ stats = await self._get_queue_statistics(
262
+ job_store, queue_name, max_scan
263
+ )
264
+
265
+ if stats["total"] == 0:
266
+ progress.update(task, advance=1)
267
+ continue
268
+
269
+ stats_table.add_row(
270
+ format_queue_name(queue_name),
271
+ str(stats["total"]),
272
+ str(stats["pending"]),
273
+ str(stats["active"]),
274
+ str(stats["completed"]),
275
+ str(stats["failed"]),
276
+ str(stats["dlq_jobs"]),
277
+ format_duration(stats["avg_wait_time"]),
278
+ f"{stats['throughput']:.1f}/min",
279
+ )
280
+
281
+ progress.update(task, advance=1)
282
+
283
+ console.print(stats_table)
284
+
285
+ # Show scan limitation note
286
+ if max_scan > 0:
287
+ console.print(
288
+ f"\n[dim]Note: Active/Completed/Failed counts based on scanning up to {max_scan:,} jobs.[/dim]"
289
+ )
290
+ console.print(
291
+ "[dim]Use --max-scan 0 for complete scan (may be slow for large datasets).[/dim]"
292
+ )
293
+
294
+ finally:
295
+ await job_store.aclose()
296
+
297
+ async def _inspect_queue(
298
+ self, queue_name: str, settings_object_path: str, limit: int, offset: int
299
+ ) -> None:
300
+ """Inspect jobs in a specific queue"""
301
+ settings = load_app_settings(settings_object_path)
302
+ job_store = await get_job_store(settings)
303
+
304
+ try:
305
+ queue_key = f"{QUEUE_KEY_PREFIX}{queue_name}"
306
+
307
+ # Check if queue exists
308
+ if not await job_store.redis.exists(queue_key):
309
+ print_error(f"Queue '{queue_name}' not found")
310
+ return
311
+
312
+ # Get queue size
313
+ total_size = await job_store.redis.zcard(queue_key)
314
+
315
+ if total_size == 0:
316
+ print_info(f"Queue '{queue_name}' is empty")
317
+ return
318
+
319
+ # Get job IDs with scores
320
+ job_entries = await job_store.redis.zrange(
321
+ queue_key, offset, offset + limit - 1, withscores=True
322
+ )
323
+
324
+ # Create jobs table
325
+ table = create_table(f"Jobs in Queue: {queue_name}")
326
+ table.add_column("#", justify="right", style="dim")
327
+ table.add_column("Job ID", style="cyan")
328
+ table.add_column("Function", style="yellow")
329
+ table.add_column("Status", justify="center")
330
+ table.add_column("Scheduled", style="dim")
331
+ table.add_column("Retries", justify="right")
332
+ table.add_column("Priority", justify="right")
333
+
334
+ # Fetch job details
335
+ with create_progress() as progress:
336
+ task = progress.add_task(
337
+ "Fetching job details...", total=len(job_entries)
338
+ )
339
+
340
+ for idx, (job_id_bytes, score) in enumerate(job_entries):
341
+ job_id = job_id_bytes.decode()
342
+
343
+ # Get job data using the new helper method
344
+ job_dict = await job_store.get_job_data_dict(job_id)
345
+
346
+ if not job_dict:
347
+ # Job key missing
348
+ table.add_row(
349
+ str(offset + idx + 1),
350
+ job_id,
351
+ "[red]<missing>[/red]",
352
+ format_status("missing"),
353
+ format_timestamp(score),
354
+ "N/A",
355
+ "N/A",
356
+ )
357
+ else:
358
+ # Parse job data
359
+ function_name = job_dict.get("function_name", "")
360
+ status = job_dict.get("status", "pending")
361
+ retries = job_dict.get("retries", "0")
362
+ priority = score # Score is used as priority
363
+
364
+ table.add_row(
365
+ str(offset + idx + 1),
366
+ job_id[:8] + "...", # Truncate job ID
367
+ function_name or "[unknown]",
368
+ format_status(status),
369
+ format_timestamp(score),
370
+ retries,
371
+ f"{priority:.0f}",
372
+ )
373
+
374
+ progress.update(task, advance=1)
375
+
376
+ console.print(table)
377
+
378
+ # Show pagination info
379
+ showing_start = offset + 1
380
+ showing_end = min(offset + limit, total_size)
381
+ console.print(
382
+ f"\nShowing [bold]{showing_start}-{showing_end}[/bold] of [bold]{total_size}[/bold] jobs"
383
+ )
384
+
385
+ if showing_end < total_size:
386
+ console.print(f"[dim]Use --offset {showing_end} to see more[/dim]")
387
+
388
+ finally:
389
+ await job_store.aclose()
390
+
391
+ async def _get_queue_statistics(
392
+ self, job_store: JobStore, queue_name: str, max_scan: int = 1000
393
+ ) -> Dict[str, any]:
394
+ """Get detailed statistics for a queue"""
395
+ stats = {
396
+ "total": 0,
397
+ "pending": 0,
398
+ "active": 0,
399
+ "completed": 0,
400
+ "failed": 0,
401
+ "dlq_jobs": 0,
402
+ "avg_wait_time": None,
403
+ "throughput": 0.0,
404
+ }
405
+
406
+ queue_key = f"{QUEUE_KEY_PREFIX}{queue_name}"
407
+
408
+ # Get pending jobs count
409
+ stats["pending"] = await job_store.redis.zcard(queue_key)
410
+
411
+ # Get comprehensive job status breakdowns by scanning job records
412
+ status_counts = await self._get_job_status_counts(
413
+ job_store, queue_name, max_scan
414
+ )
415
+ stats.update(status_counts)
416
+
417
+ # Calculate total from all status counts
418
+ stats["total"] = (
419
+ stats["pending"] + stats["active"] + stats["completed"] + stats["failed"]
420
+ )
421
+
422
+ # Calculate average wait time for pending jobs
423
+ if stats["pending"] > 0:
424
+ # Sample first 100 jobs
425
+ job_entries = await job_store.redis.zrange(
426
+ queue_key, 0, 99, withscores=True
427
+ )
428
+ if job_entries:
429
+ now = datetime.now().timestamp()
430
+ wait_times = [now - score for _, score in job_entries]
431
+ stats["avg_wait_time"] = sum(wait_times) / len(wait_times)
432
+
433
+ # Get DLQ jobs for this queue
434
+ stats["dlq_jobs"] = await self._count_dlq_jobs_for_queue(job_store, queue_name)
435
+
436
+ return stats
437
+
438
+ async def _get_job_status_counts(
439
+ self, job_store: JobStore, queue_name: str, max_scan: int = 1000
440
+ ) -> Dict[str, int]:
441
+ """Get job counts by status for a queue by scanning job records.
442
+
443
+ Args:
444
+ job_store: Redis job store
445
+ queue_name: Queue to analyze
446
+ max_scan: Maximum number of jobs to scan for performance (0 = unlimited)
447
+
448
+ Note: This method scans job records in Redis which may be slow for large datasets.
449
+ The max_scan parameter limits scanning for performance.
450
+ """
451
+
452
+ counts = {"active": 0, "completed": 0, "failed": 0}
453
+
454
+ # Get a sampling of job keys to analyze
455
+ pattern = f"{JOB_KEY_PREFIX}*"
456
+
457
+ # Collect job keys in batches for pipeline processing
458
+ job_keys_batch = []
459
+ scanned_count = 0
460
+ batch_size = 50 # Process jobs in batches of 50
461
+
462
+ async for job_key in job_store.redis.scan_iter(match=pattern, count=100):
463
+ if max_scan > 0 and scanned_count >= max_scan:
464
+ break
465
+
466
+ job_keys_batch.append(job_key)
467
+ scanned_count += 1
468
+
469
+ # Process batch when it's full or we've reached the end
470
+ if len(job_keys_batch) >= batch_size:
471
+ await self._process_job_batch(
472
+ job_store, job_keys_batch, queue_name, counts
473
+ )
474
+ job_keys_batch = []
475
+
476
+ # Process remaining jobs in the last batch
477
+ if job_keys_batch:
478
+ await self._process_job_batch(job_store, job_keys_batch, queue_name, counts)
479
+
480
+ return counts
481
+
482
+ async def _process_job_batch(
483
+ self,
484
+ job_store: JobStore,
485
+ job_keys: List[bytes],
486
+ queue_name: str,
487
+ counts: Dict[str, int],
488
+ ) -> None:
489
+ """Process a batch of job keys using Redis pipeline for efficiency"""
490
+ from rrq.job import JobStatus
491
+
492
+ if not job_keys:
493
+ return
494
+
495
+ # Use pipeline to fetch all job data in one round trip
496
+ async with job_store.redis.pipeline(transaction=False) as pipe:
497
+ for job_key in job_keys:
498
+ pipe.hmget(job_key, ["queue_name", "status"])
499
+
500
+ results = await pipe.execute()
501
+
502
+ # Process results
503
+ for result in results:
504
+ if not result or len(result) < 2:
505
+ continue
506
+
507
+ job_queue = result[0].decode("utf-8") if result[0] else ""
508
+ job_status = result[1].decode("utf-8") if result[1] else ""
509
+
510
+ # Only count jobs that belong to this queue
511
+ if job_queue != queue_name:
512
+ continue
513
+
514
+ # Count by status
515
+ if job_status == JobStatus.ACTIVE.value:
516
+ counts["active"] += 1
517
+ elif job_status == JobStatus.COMPLETED.value:
518
+ counts["completed"] += 1
519
+ elif job_status == JobStatus.FAILED.value:
520
+ counts["failed"] += 1
521
+
522
+ async def _count_dlq_jobs_for_queue(
523
+ self, job_store: JobStore, queue_name: str
524
+ ) -> int:
525
+ """Count DLQ jobs that originated from a specific queue"""
526
+ dlq_name = job_store.settings.default_dlq_name
527
+ dlq_key = f"{DLQ_KEY_PREFIX}{dlq_name}"
528
+
529
+ # Get all job IDs from DLQ
530
+ job_ids = await job_store.redis.lrange(dlq_key, 0, -1)
531
+ job_ids = [job_id.decode("utf-8") for job_id in job_ids]
532
+
533
+ count = 0
534
+ for job_id in job_ids:
535
+ job_data = await job_store.get_job(job_id)
536
+ if job_data and job_data.get("queue_name") == queue_name:
537
+ count += 1
538
+
539
+ return count
@@ -0,0 +1,161 @@
1
+ """Utilities for RRQ CLI formatting and display"""
2
+
3
+ from datetime import datetime
4
+ from typing import Any
5
+
6
+ from rich.console import Console
7
+ from rich.table import Table
8
+ from rich.progress import Progress, SpinnerColumn, TextColumn
9
+ from rich.panel import Panel
10
+ from rich.syntax import Syntax
11
+ from rich.text import Text
12
+
13
+ from ..job import JobStatus
14
+
15
+
16
+ console = Console()
17
+
18
+
19
+ def create_table(title: str | None = None, **kwargs) -> Table:
20
+ """Create a rich table with default styling"""
21
+ table = Table(
22
+ title=title,
23
+ show_header=True,
24
+ header_style="bold magenta",
25
+ show_lines=False,
26
+ expand=False,
27
+ **kwargs,
28
+ )
29
+ return table
30
+
31
+
32
+ def format_status(status: JobStatus | str) -> Text:
33
+ """Format job status with color"""
34
+ if isinstance(status, JobStatus):
35
+ status_str = status.value
36
+ else:
37
+ status_str = str(status)
38
+
39
+ color_map = {
40
+ "pending": "yellow",
41
+ "active": "blue",
42
+ "completed": "green",
43
+ "failed": "red",
44
+ "retrying": "orange",
45
+ "cancelled": "dim",
46
+ }
47
+
48
+ color = color_map.get(status_str.lower(), "white")
49
+ return Text(status_str.upper(), style=color)
50
+
51
+
52
+ def format_timestamp(ts: float | None) -> str:
53
+ """Format timestamp for display"""
54
+ if ts is None:
55
+ return "N/A"
56
+ try:
57
+ dt = datetime.fromtimestamp(ts)
58
+ # Show relative time if recent
59
+ now = datetime.now()
60
+ diff = now - dt
61
+
62
+ if diff.total_seconds() < 60:
63
+ return f"{int(diff.total_seconds())}s ago"
64
+ elif diff.total_seconds() < 3600:
65
+ return f"{int(diff.total_seconds() / 60)}m ago"
66
+ elif diff.total_seconds() < 86400:
67
+ return f"{int(diff.total_seconds() / 3600)}h ago"
68
+ else:
69
+ return dt.strftime("%Y-%m-%d %H:%M:%S")
70
+ except Exception:
71
+ return str(ts)
72
+
73
+
74
+ def format_duration(seconds: float | None) -> str:
75
+ """Format duration for display"""
76
+ if seconds is None:
77
+ return "N/A"
78
+
79
+ if seconds < 0.001:
80
+ return f"{seconds * 1000000:.0f}μs"
81
+ elif seconds < 1:
82
+ return f"{seconds * 1000:.1f}ms"
83
+ elif seconds < 60:
84
+ return f"{seconds:.1f}s"
85
+ elif seconds < 3600:
86
+ minutes = int(seconds / 60)
87
+ secs = int(seconds % 60)
88
+ return f"{minutes}m {secs}s"
89
+ else:
90
+ hours = int(seconds / 3600)
91
+ minutes = int((seconds % 3600) / 60)
92
+ return f"{hours}h {minutes}m"
93
+
94
+
95
+ def format_bytes(size: int) -> str:
96
+ """Format byte size for display"""
97
+ for unit in ["B", "KB", "MB", "GB", "TB"]:
98
+ if size < 1024.0:
99
+ return f"{size:.1f}{unit}"
100
+ size /= 1024.0
101
+ return f"{size:.1f}PB"
102
+
103
+
104
+ def print_error(message: str) -> None:
105
+ """Print an error message"""
106
+ console.print(f"[red]ERROR:[/red] {message}")
107
+
108
+
109
+ def print_success(message: str) -> None:
110
+ """Print a success message"""
111
+ console.print(f"[green]✓[/green] {message}")
112
+
113
+
114
+ def print_warning(message: str) -> None:
115
+ """Print a warning message"""
116
+ console.print(f"[yellow]WARNING:[/yellow] {message}")
117
+
118
+
119
+ def print_info(message: str) -> None:
120
+ """Print an info message"""
121
+ console.print(f"[blue]INFO:[/blue] {message}")
122
+
123
+
124
+ def create_progress() -> Progress:
125
+ """Create a progress bar"""
126
+ return Progress(
127
+ SpinnerColumn(),
128
+ TextColumn("[progress.description]{task.description}"),
129
+ console=console,
130
+ )
131
+
132
+
133
+ def print_json(data: Any, title: str | None = None) -> None:
134
+ """Print JSON data with syntax highlighting"""
135
+ import json
136
+
137
+ json_str = json.dumps(data, indent=2, default=str)
138
+ syntax = Syntax(json_str, "json", theme="monokai", line_numbers=False)
139
+
140
+ if title:
141
+ panel = Panel(syntax, title=title, border_style="blue")
142
+ console.print(panel)
143
+ else:
144
+ console.print(syntax)
145
+
146
+
147
+ def truncate_string(s: str, max_length: int = 50) -> str:
148
+ """Truncate a string to a maximum length"""
149
+ if len(s) <= max_length:
150
+ return s
151
+ return s[: max_length - 3] + "..."
152
+
153
+
154
+ def format_queue_name(queue: str) -> Text:
155
+ """Format queue name with color based on type"""
156
+ if queue.endswith("_dlq"):
157
+ return Text(queue, style="red")
158
+ elif queue == "default":
159
+ return Text(queue, style="cyan")
160
+ else:
161
+ return Text(queue, style="blue")