rrq 0.5.0__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- rrq/cli.py +39 -64
- rrq/cli_commands/__init__.py +1 -0
- rrq/cli_commands/base.py +102 -0
- rrq/cli_commands/commands/__init__.py +1 -0
- rrq/cli_commands/commands/debug.py +551 -0
- rrq/cli_commands/commands/dlq.py +853 -0
- rrq/cli_commands/commands/jobs.py +516 -0
- rrq/cli_commands/commands/monitor.py +776 -0
- rrq/cli_commands/commands/queues.py +539 -0
- rrq/cli_commands/utils.py +161 -0
- rrq/client.py +39 -35
- rrq/constants.py +10 -0
- rrq/cron.py +67 -8
- rrq/hooks.py +217 -0
- rrq/job.py +5 -5
- rrq/registry.py +0 -3
- rrq/settings.py +13 -1
- rrq/store.py +211 -53
- rrq/worker.py +6 -6
- {rrq-0.5.0.dist-info → rrq-0.7.0.dist-info}/METADATA +208 -25
- rrq-0.7.0.dist-info/RECORD +26 -0
- rrq-0.5.0.dist-info/RECORD +0 -16
- {rrq-0.5.0.dist-info → rrq-0.7.0.dist-info}/WHEEL +0 -0
- {rrq-0.5.0.dist-info → rrq-0.7.0.dist-info}/entry_points.txt +0 -0
- {rrq-0.5.0.dist-info → rrq-0.7.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,516 @@
|
|
|
1
|
+
"""Job inspection and management commands"""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from typing import Optional
|
|
5
|
+
|
|
6
|
+
import click
|
|
7
|
+
from rich.panel import Panel
|
|
8
|
+
from rich.syntax import Syntax
|
|
9
|
+
|
|
10
|
+
from rrq.constants import JOB_KEY_PREFIX, QUEUE_KEY_PREFIX
|
|
11
|
+
from rrq.cli_commands.base import AsyncCommand, load_app_settings, get_job_store
|
|
12
|
+
from ..utils import (
|
|
13
|
+
console,
|
|
14
|
+
create_progress,
|
|
15
|
+
create_table,
|
|
16
|
+
format_duration,
|
|
17
|
+
format_status,
|
|
18
|
+
format_timestamp,
|
|
19
|
+
print_error,
|
|
20
|
+
print_json,
|
|
21
|
+
print_success,
|
|
22
|
+
print_warning,
|
|
23
|
+
truncate_string,
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class JobCommands(AsyncCommand):
|
|
28
|
+
"""Commands for job inspection and management"""
|
|
29
|
+
|
|
30
|
+
def register(self, cli_group: click.Group) -> None:
|
|
31
|
+
"""Register job commands"""
|
|
32
|
+
|
|
33
|
+
@cli_group.group("job")
|
|
34
|
+
def job_group():
|
|
35
|
+
"""Inspect and manage jobs"""
|
|
36
|
+
pass
|
|
37
|
+
|
|
38
|
+
# Show job details
|
|
39
|
+
@job_group.command("show")
|
|
40
|
+
@click.argument("job_id")
|
|
41
|
+
@click.option(
|
|
42
|
+
"--settings",
|
|
43
|
+
"settings_object_path",
|
|
44
|
+
type=str,
|
|
45
|
+
help="Python settings path (e.g., myapp.settings.rrq_settings)",
|
|
46
|
+
)
|
|
47
|
+
@click.option(
|
|
48
|
+
"--raw",
|
|
49
|
+
is_flag=True,
|
|
50
|
+
help="Show raw job data as JSON",
|
|
51
|
+
)
|
|
52
|
+
def show_job(job_id: str, settings_object_path: str, raw: bool):
|
|
53
|
+
"""Show detailed information about a job"""
|
|
54
|
+
self.make_async(self._show_job)(job_id, settings_object_path, raw)
|
|
55
|
+
|
|
56
|
+
# List jobs
|
|
57
|
+
@job_group.command("list")
|
|
58
|
+
@click.option(
|
|
59
|
+
"--settings",
|
|
60
|
+
"settings_object_path",
|
|
61
|
+
type=str,
|
|
62
|
+
help="Python settings path (e.g., myapp.settings.rrq_settings)",
|
|
63
|
+
)
|
|
64
|
+
@click.option(
|
|
65
|
+
"--status",
|
|
66
|
+
type=click.Choice(["pending", "active", "completed", "failed", "retrying"]),
|
|
67
|
+
help="Filter by job status",
|
|
68
|
+
)
|
|
69
|
+
@click.option(
|
|
70
|
+
"--queue",
|
|
71
|
+
help="Filter by queue name",
|
|
72
|
+
)
|
|
73
|
+
@click.option(
|
|
74
|
+
"--function",
|
|
75
|
+
help="Filter by function name",
|
|
76
|
+
)
|
|
77
|
+
@click.option(
|
|
78
|
+
"--limit",
|
|
79
|
+
type=int,
|
|
80
|
+
default=20,
|
|
81
|
+
help="Number of jobs to show",
|
|
82
|
+
)
|
|
83
|
+
def list_jobs(
|
|
84
|
+
settings_object_path: str,
|
|
85
|
+
status: Optional[str],
|
|
86
|
+
queue: Optional[str],
|
|
87
|
+
function: Optional[str],
|
|
88
|
+
limit: int,
|
|
89
|
+
):
|
|
90
|
+
"""List jobs with filters"""
|
|
91
|
+
self.make_async(self._list_jobs)(
|
|
92
|
+
settings_object_path, status, queue, function, limit
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
# Replay a job
|
|
96
|
+
@job_group.command("replay")
|
|
97
|
+
@click.argument("job_id")
|
|
98
|
+
@click.option(
|
|
99
|
+
"--settings",
|
|
100
|
+
"settings_object_path",
|
|
101
|
+
type=str,
|
|
102
|
+
help="Python settings path (e.g., myapp.settings.rrq_settings)",
|
|
103
|
+
)
|
|
104
|
+
@click.option(
|
|
105
|
+
"--queue",
|
|
106
|
+
help="Override target queue",
|
|
107
|
+
)
|
|
108
|
+
def replay_job(job_id: str, settings_object_path: str, queue: Optional[str]):
|
|
109
|
+
"""Replay a job with the same parameters"""
|
|
110
|
+
self.make_async(self._replay_job)(job_id, settings_object_path, queue)
|
|
111
|
+
|
|
112
|
+
# Cancel a job
|
|
113
|
+
@job_group.command("cancel")
|
|
114
|
+
@click.argument("job_id")
|
|
115
|
+
@click.option(
|
|
116
|
+
"--settings",
|
|
117
|
+
"settings_object_path",
|
|
118
|
+
type=str,
|
|
119
|
+
help="Python settings path (e.g., myapp.settings.rrq_settings)",
|
|
120
|
+
)
|
|
121
|
+
def cancel_job(job_id: str, settings_object_path: str):
|
|
122
|
+
"""Cancel a pending job"""
|
|
123
|
+
self.make_async(self._cancel_job)(job_id, settings_object_path)
|
|
124
|
+
|
|
125
|
+
# Show job trace/timeline
|
|
126
|
+
@job_group.command("trace")
|
|
127
|
+
@click.argument("job_id")
|
|
128
|
+
@click.option(
|
|
129
|
+
"--settings",
|
|
130
|
+
"settings_object_path",
|
|
131
|
+
type=str,
|
|
132
|
+
help="Python settings path (e.g., myapp.settings.rrq_settings)",
|
|
133
|
+
)
|
|
134
|
+
def trace_job(job_id: str, settings_object_path: str):
|
|
135
|
+
"""Show job execution timeline"""
|
|
136
|
+
self.make_async(self._trace_job)(job_id, settings_object_path)
|
|
137
|
+
|
|
138
|
+
async def _show_job(
|
|
139
|
+
self, job_id: str, settings_object_path: str, raw: bool
|
|
140
|
+
) -> None:
|
|
141
|
+
"""Show detailed job information"""
|
|
142
|
+
settings = load_app_settings(settings_object_path)
|
|
143
|
+
job_store = await get_job_store(settings)
|
|
144
|
+
|
|
145
|
+
try:
|
|
146
|
+
# Get job data using the new helper method
|
|
147
|
+
job_dict = await job_store.get_job_data_dict(job_id)
|
|
148
|
+
|
|
149
|
+
if not job_dict:
|
|
150
|
+
print_error(f"Job '{job_id}' not found")
|
|
151
|
+
return
|
|
152
|
+
|
|
153
|
+
if raw:
|
|
154
|
+
# Show raw JSON data
|
|
155
|
+
print_json(job_dict, title=f"Job {job_id}")
|
|
156
|
+
return
|
|
157
|
+
|
|
158
|
+
# Parse job data
|
|
159
|
+
status = job_dict.get("status", "unknown")
|
|
160
|
+
function_name = job_dict.get("function_name", "unknown")
|
|
161
|
+
queue_name = job_dict.get("queue_name", "unknown")
|
|
162
|
+
|
|
163
|
+
# Create info panel
|
|
164
|
+
info_lines = [
|
|
165
|
+
f"[bold]Job ID:[/bold] {job_id}",
|
|
166
|
+
f"[bold]Status:[/bold] {format_status(status)}",
|
|
167
|
+
f"[bold]Function:[/bold] {function_name}",
|
|
168
|
+
f"[bold]Queue:[/bold] {queue_name}",
|
|
169
|
+
]
|
|
170
|
+
|
|
171
|
+
# Add timestamps
|
|
172
|
+
if "created_at" in job_dict:
|
|
173
|
+
created_at = float(job_dict["created_at"])
|
|
174
|
+
info_lines.append(
|
|
175
|
+
f"[bold]Created:[/bold] {format_timestamp(created_at)}"
|
|
176
|
+
)
|
|
177
|
+
|
|
178
|
+
if "started_at" in job_dict:
|
|
179
|
+
started_at = float(job_dict["started_at"])
|
|
180
|
+
info_lines.append(
|
|
181
|
+
f"[bold]Started:[/bold] {format_timestamp(started_at)}"
|
|
182
|
+
)
|
|
183
|
+
|
|
184
|
+
if "completed_at" in job_dict:
|
|
185
|
+
completed_at = float(job_dict["completed_at"])
|
|
186
|
+
info_lines.append(
|
|
187
|
+
f"[bold]Completed:[/bold] {format_timestamp(completed_at)}"
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
# Calculate duration
|
|
191
|
+
if "started_at" in job_dict:
|
|
192
|
+
duration = completed_at - float(job_dict["started_at"])
|
|
193
|
+
info_lines.append(
|
|
194
|
+
f"[bold]Duration:[/bold] {format_duration(duration)}"
|
|
195
|
+
)
|
|
196
|
+
|
|
197
|
+
# Add retry info
|
|
198
|
+
retries = int(job_dict.get("retries", 0))
|
|
199
|
+
max_retries = int(job_dict.get("max_retries", 3))
|
|
200
|
+
info_lines.append(f"[bold]Retries:[/bold] {retries}/{max_retries}")
|
|
201
|
+
|
|
202
|
+
# Show info panel
|
|
203
|
+
console.print(
|
|
204
|
+
Panel(
|
|
205
|
+
"\n".join(info_lines), title="Job Information", border_style="blue"
|
|
206
|
+
)
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
# Show arguments
|
|
210
|
+
if "args" in job_dict:
|
|
211
|
+
args = json.loads(job_dict["args"])
|
|
212
|
+
if args:
|
|
213
|
+
console.print("\n[bold]Arguments:[/bold]")
|
|
214
|
+
print_json(args)
|
|
215
|
+
|
|
216
|
+
if "kwargs" in job_dict:
|
|
217
|
+
kwargs = json.loads(job_dict["kwargs"])
|
|
218
|
+
if kwargs:
|
|
219
|
+
console.print("\n[bold]Keyword Arguments:[/bold]")
|
|
220
|
+
print_json(kwargs)
|
|
221
|
+
|
|
222
|
+
# Show result or error
|
|
223
|
+
if status == "completed" and "result" in job_dict:
|
|
224
|
+
console.print("\n[bold]Result:[/bold]")
|
|
225
|
+
try:
|
|
226
|
+
result = json.loads(job_dict["result"])
|
|
227
|
+
print_json(result)
|
|
228
|
+
except (json.JSONDecodeError, ValueError):
|
|
229
|
+
console.print(job_dict["result"])
|
|
230
|
+
|
|
231
|
+
elif status in ["failed", "retrying"] and "error" in job_dict:
|
|
232
|
+
console.print("\n[bold red]Error:[/bold red]")
|
|
233
|
+
console.print(job_dict["error"])
|
|
234
|
+
|
|
235
|
+
if "traceback" in job_dict:
|
|
236
|
+
console.print("\n[bold]Traceback:[/bold]")
|
|
237
|
+
syntax = Syntax(
|
|
238
|
+
job_dict["traceback"],
|
|
239
|
+
"python",
|
|
240
|
+
theme="monokai",
|
|
241
|
+
line_numbers=True,
|
|
242
|
+
)
|
|
243
|
+
console.print(syntax)
|
|
244
|
+
|
|
245
|
+
finally:
|
|
246
|
+
await job_store.aclose()
|
|
247
|
+
|
|
248
|
+
async def _list_jobs(
|
|
249
|
+
self,
|
|
250
|
+
settings_object_path: str,
|
|
251
|
+
status: Optional[str],
|
|
252
|
+
queue: Optional[str],
|
|
253
|
+
function: Optional[str],
|
|
254
|
+
limit: int,
|
|
255
|
+
) -> None:
|
|
256
|
+
"""List jobs with filters"""
|
|
257
|
+
settings = load_app_settings(settings_object_path)
|
|
258
|
+
job_store = await get_job_store(settings)
|
|
259
|
+
|
|
260
|
+
try:
|
|
261
|
+
# Get all job keys
|
|
262
|
+
job_pattern = f"{JOB_KEY_PREFIX}*"
|
|
263
|
+
job_keys = []
|
|
264
|
+
async for key in job_store.redis.scan_iter(match=job_pattern):
|
|
265
|
+
job_keys.append(key.decode())
|
|
266
|
+
|
|
267
|
+
if not job_keys:
|
|
268
|
+
print_warning("No jobs found")
|
|
269
|
+
return
|
|
270
|
+
|
|
271
|
+
# Create table
|
|
272
|
+
table = create_table("Jobs")
|
|
273
|
+
table.add_column("Job ID", style="cyan")
|
|
274
|
+
table.add_column("Function", style="yellow")
|
|
275
|
+
table.add_column("Queue", style="blue")
|
|
276
|
+
table.add_column("Status", justify="center")
|
|
277
|
+
table.add_column("Created", style="dim")
|
|
278
|
+
table.add_column("Duration", justify="right")
|
|
279
|
+
|
|
280
|
+
# Fetch and filter jobs
|
|
281
|
+
jobs = []
|
|
282
|
+
with create_progress() as progress:
|
|
283
|
+
task = progress.add_task("Fetching jobs...", total=len(job_keys))
|
|
284
|
+
|
|
285
|
+
for job_key in job_keys:
|
|
286
|
+
job_id = job_key.replace(JOB_KEY_PREFIX, "")
|
|
287
|
+
job_dict = await job_store.get_job_data_dict(job_id)
|
|
288
|
+
|
|
289
|
+
if job_dict:
|
|
290
|
+
# Apply filters
|
|
291
|
+
if status and job_dict.get("status") != status:
|
|
292
|
+
progress.update(task, advance=1)
|
|
293
|
+
continue
|
|
294
|
+
|
|
295
|
+
if queue and job_dict.get("queue_name") != queue:
|
|
296
|
+
progress.update(task, advance=1)
|
|
297
|
+
continue
|
|
298
|
+
|
|
299
|
+
if function and job_dict.get("function_name") != function:
|
|
300
|
+
progress.update(task, advance=1)
|
|
301
|
+
continue
|
|
302
|
+
jobs.append((job_id, job_dict))
|
|
303
|
+
|
|
304
|
+
progress.update(task, advance=1)
|
|
305
|
+
|
|
306
|
+
# Sort by created_at
|
|
307
|
+
jobs.sort(key=lambda x: float(x[1].get("created_at", 0)), reverse=True)
|
|
308
|
+
|
|
309
|
+
# Limit results
|
|
310
|
+
jobs = jobs[:limit]
|
|
311
|
+
|
|
312
|
+
# Add rows to table
|
|
313
|
+
for job_id, job_dict in jobs:
|
|
314
|
+
# Calculate duration
|
|
315
|
+
duration = None
|
|
316
|
+
if job_dict.get("completed_at") and job_dict.get("started_at"):
|
|
317
|
+
duration = float(job_dict["completed_at"]) - float(
|
|
318
|
+
job_dict["started_at"]
|
|
319
|
+
)
|
|
320
|
+
elif job_dict.get("started_at") and job_dict.get("status") == "active":
|
|
321
|
+
import time
|
|
322
|
+
|
|
323
|
+
duration = time.time() - float(job_dict["started_at"])
|
|
324
|
+
|
|
325
|
+
table.add_row(
|
|
326
|
+
job_id[:8] + "...",
|
|
327
|
+
truncate_string(job_dict.get("function_name", "unknown"), 30),
|
|
328
|
+
job_dict.get("queue_name", "unknown"),
|
|
329
|
+
format_status(job_dict.get("status", "unknown")),
|
|
330
|
+
format_timestamp(float(job_dict.get("created_at", 0))),
|
|
331
|
+
format_duration(duration) if duration else "N/A",
|
|
332
|
+
)
|
|
333
|
+
|
|
334
|
+
console.print(table)
|
|
335
|
+
console.print(
|
|
336
|
+
f"\n[dim]Showing {len(jobs)} of {len(job_keys)} total jobs[/dim]"
|
|
337
|
+
)
|
|
338
|
+
|
|
339
|
+
finally:
|
|
340
|
+
await job_store.aclose()
|
|
341
|
+
|
|
342
|
+
async def _replay_job(
|
|
343
|
+
self, job_id: str, settings_object_path: str, queue: Optional[str]
|
|
344
|
+
) -> None:
|
|
345
|
+
"""Replay a job"""
|
|
346
|
+
settings = load_app_settings(settings_object_path)
|
|
347
|
+
job_store = await get_job_store(settings)
|
|
348
|
+
|
|
349
|
+
try:
|
|
350
|
+
# Get original job data using the new helper method
|
|
351
|
+
job_dict = await job_store.get_job_data_dict(job_id)
|
|
352
|
+
|
|
353
|
+
if not job_dict:
|
|
354
|
+
print_error(f"Job '{job_id}' not found")
|
|
355
|
+
return
|
|
356
|
+
|
|
357
|
+
# Create new job with same parameters
|
|
358
|
+
from rrq.client import RRQClient
|
|
359
|
+
|
|
360
|
+
client = RRQClient(settings=settings)
|
|
361
|
+
|
|
362
|
+
# Parse arguments
|
|
363
|
+
args = json.loads(job_dict.get("args", "[]"))
|
|
364
|
+
kwargs = json.loads(job_dict.get("kwargs", "{}"))
|
|
365
|
+
|
|
366
|
+
# Enqueue new job
|
|
367
|
+
new_job_id = await client.enqueue(
|
|
368
|
+
function_name=job_dict["function_name"],
|
|
369
|
+
args=args,
|
|
370
|
+
kwargs=kwargs,
|
|
371
|
+
queue_name=queue
|
|
372
|
+
or job_dict.get("queue_name", settings.default_queue_name),
|
|
373
|
+
)
|
|
374
|
+
|
|
375
|
+
print_success(f"Job replayed with new ID: {new_job_id}")
|
|
376
|
+
|
|
377
|
+
finally:
|
|
378
|
+
await job_store.aclose()
|
|
379
|
+
|
|
380
|
+
async def _cancel_job(self, job_id: str, settings_object_path: str) -> None:
|
|
381
|
+
"""Cancel a pending job"""
|
|
382
|
+
settings = load_app_settings(settings_object_path)
|
|
383
|
+
job_store = await get_job_store(settings)
|
|
384
|
+
|
|
385
|
+
try:
|
|
386
|
+
# Get job data using the new helper method
|
|
387
|
+
job_dict = await job_store.get_job_data_dict(job_id)
|
|
388
|
+
|
|
389
|
+
if not job_dict:
|
|
390
|
+
print_error(f"Job '{job_id}' not found")
|
|
391
|
+
return
|
|
392
|
+
|
|
393
|
+
# Check status
|
|
394
|
+
status = job_dict.get("status", "")
|
|
395
|
+
if status != "pending":
|
|
396
|
+
print_error(f"Can only cancel pending jobs. Job is currently: {status}")
|
|
397
|
+
return
|
|
398
|
+
|
|
399
|
+
# Remove from queue
|
|
400
|
+
queue_name = job_dict.get("queue_name", "")
|
|
401
|
+
if queue_name:
|
|
402
|
+
queue_key = f"{QUEUE_KEY_PREFIX}{queue_name}"
|
|
403
|
+
removed = await job_store.redis.zrem(queue_key, job_id)
|
|
404
|
+
|
|
405
|
+
if removed:
|
|
406
|
+
# Update job status
|
|
407
|
+
job_key = f"{JOB_KEY_PREFIX}{job_id}"
|
|
408
|
+
await job_store.redis.hset(job_key, "status", "cancelled")
|
|
409
|
+
print_success(f"Job '{job_id}' cancelled successfully")
|
|
410
|
+
else:
|
|
411
|
+
print_error("Failed to remove job from queue")
|
|
412
|
+
else:
|
|
413
|
+
print_error("Job has no associated queue")
|
|
414
|
+
|
|
415
|
+
finally:
|
|
416
|
+
await job_store.aclose()
|
|
417
|
+
|
|
418
|
+
async def _trace_job(self, job_id: str, settings_object_path: str) -> None:
|
|
419
|
+
"""Show job execution timeline"""
|
|
420
|
+
settings = load_app_settings(settings_object_path)
|
|
421
|
+
job_store = await get_job_store(settings)
|
|
422
|
+
|
|
423
|
+
try:
|
|
424
|
+
# Get job data using the new helper method
|
|
425
|
+
job_dict = await job_store.get_job_data_dict(job_id)
|
|
426
|
+
|
|
427
|
+
if not job_dict:
|
|
428
|
+
print_error(f"Job '{job_id}' not found")
|
|
429
|
+
return
|
|
430
|
+
|
|
431
|
+
# Create timeline table
|
|
432
|
+
table = create_table(f"Job Timeline: {job_id[:8]}...")
|
|
433
|
+
table.add_column("Event", style="cyan")
|
|
434
|
+
table.add_column("Timestamp", style="dim")
|
|
435
|
+
table.add_column("Details", style="yellow")
|
|
436
|
+
|
|
437
|
+
# Add events
|
|
438
|
+
events = []
|
|
439
|
+
|
|
440
|
+
# Job created
|
|
441
|
+
if "created_at" in job_dict:
|
|
442
|
+
events.append(
|
|
443
|
+
(
|
|
444
|
+
"Created",
|
|
445
|
+
float(job_dict["created_at"]),
|
|
446
|
+
f"Function: {job_dict.get('function_name', 'unknown')}",
|
|
447
|
+
)
|
|
448
|
+
)
|
|
449
|
+
|
|
450
|
+
# Job started
|
|
451
|
+
if "started_at" in job_dict:
|
|
452
|
+
events.append(
|
|
453
|
+
(
|
|
454
|
+
"Started",
|
|
455
|
+
float(job_dict["started_at"]),
|
|
456
|
+
f"Worker: {job_dict.get('worker_id', 'unknown')}",
|
|
457
|
+
)
|
|
458
|
+
)
|
|
459
|
+
|
|
460
|
+
# Retries
|
|
461
|
+
retries = int(job_dict.get("retries", 0))
|
|
462
|
+
if retries > 0:
|
|
463
|
+
for i in range(retries):
|
|
464
|
+
retry_key = f"retry_{i}_at"
|
|
465
|
+
if retry_key in job_dict:
|
|
466
|
+
events.append(
|
|
467
|
+
(
|
|
468
|
+
f"Retry {i + 1}",
|
|
469
|
+
float(job_dict[retry_key]),
|
|
470
|
+
f"Attempt {i + 1} of {job_dict.get('max_retries', 3)}",
|
|
471
|
+
)
|
|
472
|
+
)
|
|
473
|
+
|
|
474
|
+
# Job completed/failed
|
|
475
|
+
if "completed_at" in job_dict:
|
|
476
|
+
status = job_dict.get("status", "unknown")
|
|
477
|
+
if status == "completed":
|
|
478
|
+
events.append(
|
|
479
|
+
("Completed", float(job_dict["completed_at"]), "Success")
|
|
480
|
+
)
|
|
481
|
+
else:
|
|
482
|
+
error_msg = job_dict.get("error", "Unknown error")
|
|
483
|
+
events.append(
|
|
484
|
+
(
|
|
485
|
+
"Failed",
|
|
486
|
+
float(job_dict["completed_at"]),
|
|
487
|
+
truncate_string(error_msg, 50),
|
|
488
|
+
)
|
|
489
|
+
)
|
|
490
|
+
|
|
491
|
+
# Sort events by timestamp
|
|
492
|
+
events.sort(key=lambda x: x[1])
|
|
493
|
+
|
|
494
|
+
# Add rows
|
|
495
|
+
prev_timestamp = None
|
|
496
|
+
for event, timestamp, details in events:
|
|
497
|
+
# Calculate time since previous event
|
|
498
|
+
time_diff = ""
|
|
499
|
+
if prev_timestamp:
|
|
500
|
+
diff = timestamp - prev_timestamp
|
|
501
|
+
time_diff = f" (+{format_duration(diff)})"
|
|
502
|
+
|
|
503
|
+
table.add_row(event, format_timestamp(timestamp) + time_diff, details)
|
|
504
|
+
prev_timestamp = timestamp
|
|
505
|
+
|
|
506
|
+
console.print(table)
|
|
507
|
+
|
|
508
|
+
# Show total duration
|
|
509
|
+
if events:
|
|
510
|
+
total_duration = events[-1][1] - events[0][1]
|
|
511
|
+
console.print(
|
|
512
|
+
f"\n[bold]Total Duration:[/bold] {format_duration(total_duration)}"
|
|
513
|
+
)
|
|
514
|
+
|
|
515
|
+
finally:
|
|
516
|
+
await job_store.aclose()
|