pydocket 0.15.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
docket/cli.py ADDED
@@ -0,0 +1,1185 @@
1
+ import asyncio
2
+ import enum
3
+ import importlib
4
+ import logging
5
+ import os
6
+ import socket
7
+ import sys
8
+ import time
9
+ from datetime import datetime, timedelta, timezone
10
+ from functools import partial
11
+ from typing import Annotated, Any, AsyncIterator, Collection
12
+
13
+ import typer
14
+ from rich.console import Console
15
+ from rich.layout import Layout
16
+ from rich.live import Live
17
+ from rich.progress import (
18
+ BarColumn,
19
+ Progress,
20
+ TaskProgressColumn,
21
+ TextColumn,
22
+ TimeElapsedColumn,
23
+ TaskID,
24
+ )
25
+ from rich.table import Table
26
+
27
+ from . import __version__, tasks
28
+ from .docket import Docket, DocketSnapshot, WorkerInfo
29
+ from .execution import ExecutionState, Operator
30
+ from .worker import Worker
31
+
32
+
33
+ async def iterate_with_timeout(
34
+ iterator: AsyncIterator[dict[str, Any]], timeout: float
35
+ ) -> AsyncIterator[dict[str, Any] | None]:
36
+ """Iterate over an async iterator with timeout, ensuring proper cleanup.
37
+
38
+ Wraps an async iterator to add timeout support and guaranteed cleanup.
39
+ On timeout, yields None to allow the caller to handle polling fallback.
40
+
41
+ Args:
42
+ iterator: An async iterator (must have __anext__ and aclose methods)
43
+ timeout: Timeout in seconds for each iteration
44
+
45
+ Yields:
46
+ Items from the iterator, or None if timeout expires
47
+ """
48
+ try:
49
+ while True:
50
+ try:
51
+ yield await asyncio.wait_for(iterator.__anext__(), timeout=timeout)
52
+ except asyncio.TimeoutError:
53
+ # Yield None to signal timeout, allowing caller to handle polling
54
+ yield None
55
+ except StopAsyncIteration:
56
+ break
57
+ finally:
58
+ await iterator.aclose()
59
+
60
+
61
+ app: typer.Typer = typer.Typer(
62
+ help="Docket - A distributed background task system for Python functions",
63
+ add_completion=True,
64
+ no_args_is_help=True,
65
+ )
66
+
67
+
68
+ class LogLevel(str, enum.Enum):
69
+ DEBUG = "DEBUG"
70
+ INFO = "INFO"
71
+ WARNING = "WARNING"
72
+ ERROR = "ERROR"
73
+ CRITICAL = "CRITICAL"
74
+
75
+
76
+ class LogFormat(str, enum.Enum):
77
+ RICH = "rich"
78
+ PLAIN = "plain"
79
+ JSON = "json"
80
+
81
+
82
+ def local_time(when: datetime) -> str:
83
+ return when.astimezone().strftime("%Y-%m-%d %H:%M:%S %z")
84
+
85
+
86
+ def default_worker_name() -> str:
87
+ return f"{socket.gethostname()}#{os.getpid()}"
88
+
89
+
90
+ def duration(duration_str: str | timedelta) -> timedelta:
91
+ """
92
+ Parse a duration string into a timedelta.
93
+
94
+ Supported formats:
95
+ - 123 = 123 seconds
96
+ - 123s = 123 seconds
97
+ - 123m = 123 minutes
98
+ - 123h = 123 hours
99
+ - 00:00 = mm:ss
100
+ - 00:00:00 = hh:mm:ss
101
+ """
102
+ if isinstance(duration_str, timedelta):
103
+ return duration_str
104
+
105
+ if ":" in duration_str:
106
+ parts = duration_str.split(":")
107
+ if len(parts) == 2: # mm:ss
108
+ minutes, seconds = map(int, parts)
109
+ return timedelta(minutes=minutes, seconds=seconds)
110
+ elif len(parts) == 3: # hh:mm:ss
111
+ hours, minutes, seconds = map(int, parts)
112
+ return timedelta(hours=hours, minutes=minutes, seconds=seconds)
113
+ else:
114
+ raise ValueError(f"Invalid duration string: {duration_str}")
115
+ elif duration_str.endswith("s"):
116
+ return timedelta(seconds=int(duration_str[:-1]))
117
+ elif duration_str.endswith("m"):
118
+ return timedelta(minutes=int(duration_str[:-1]))
119
+ elif duration_str.endswith("h"):
120
+ return timedelta(hours=int(duration_str[:-1]))
121
+ else:
122
+ return timedelta(seconds=int(duration_str))
123
+
124
+
125
+ def set_logging_format(format: LogFormat) -> None:
126
+ root_logger = logging.getLogger()
127
+ if format == LogFormat.JSON:
128
+ from pythonjsonlogger.json import JsonFormatter
129
+
130
+ formatter = JsonFormatter(
131
+ "{name}{asctime}{levelname}{message}{exc_info}", style="{"
132
+ )
133
+ handler = logging.StreamHandler(stream=sys.stdout)
134
+ handler.setFormatter(formatter)
135
+ root_logger.addHandler(handler)
136
+ elif format == LogFormat.PLAIN:
137
+ handler = logging.StreamHandler(stream=sys.stdout)
138
+ formatter = logging.Formatter(
139
+ "[%(asctime)s] %(levelname)s - %(name)s - %(message)s",
140
+ datefmt="%Y-%m-%d %H:%M:%S",
141
+ )
142
+ handler.setFormatter(formatter)
143
+ root_logger.addHandler(handler)
144
+ else:
145
+ from rich.logging import RichHandler
146
+
147
+ handler = RichHandler()
148
+ formatter = logging.Formatter("%(message)s", datefmt="[%X]")
149
+ handler.setFormatter(formatter)
150
+ root_logger.addHandler(handler)
151
+
152
+
153
+ def set_logging_level(level: LogLevel) -> None:
154
+ logging.getLogger().setLevel(level.value)
155
+
156
+
157
+ def validate_url(url: str) -> str:
158
+ """
159
+ Validate that the provided URL is compatible with the CLI.
160
+
161
+ The memory:// backend is not compatible with the CLI as it doesn't persist
162
+ across processes.
163
+ """
164
+ if url.startswith("memory://"):
165
+ raise typer.BadParameter(
166
+ "The memory:// URL scheme is not supported by the CLI.\n"
167
+ "The memory backend does not persist across processes.\n"
168
+ "Please use a persistent backend like Redis or Valkey."
169
+ )
170
+ return url
171
+
172
+
173
+ def handle_strike_wildcard(value: str) -> str | None:
174
+ if value in ("", "*"):
175
+ return None
176
+ return value
177
+
178
+
179
+ def interpret_python_value(value: str | None) -> Any:
180
+ if value is None:
181
+ return None
182
+
183
+ type, _, value = value.rpartition(":")
184
+ if not type:
185
+ # without a type hint, we assume the value is a string
186
+ return value
187
+
188
+ module_name, _, member_name = type.rpartition(".")
189
+ module = importlib.import_module(module_name or "builtins")
190
+ member = getattr(module, member_name)
191
+
192
+ # special cases for common useful types
193
+ if member is timedelta:
194
+ return timedelta(seconds=int(value))
195
+ elif member is bool:
196
+ return value.lower() == "true"
197
+ else:
198
+ return member(value)
199
+
200
+
201
+ @app.command(
202
+ help="Print the version of docket",
203
+ )
204
+ def version() -> None:
205
+ print(__version__)
206
+
207
+
208
+ @app.command(
209
+ help="Start a worker to process tasks",
210
+ )
211
+ def worker(
212
+ tasks: Annotated[
213
+ list[str],
214
+ typer.Option(
215
+ "--tasks",
216
+ help=(
217
+ "The dotted path of a task collection to register with the docket. "
218
+ "This can be specified multiple times. A task collection is any "
219
+ "iterable of async functions."
220
+ ),
221
+ envvar="DOCKET_TASKS",
222
+ ),
223
+ ] = ["docket.tasks:standard_tasks"],
224
+ docket_: Annotated[
225
+ str,
226
+ typer.Option(
227
+ "--docket",
228
+ help="The name of the docket",
229
+ envvar="DOCKET_NAME",
230
+ ),
231
+ ] = "docket",
232
+ url: Annotated[
233
+ str,
234
+ typer.Option(
235
+ help="The URL of the Redis server",
236
+ envvar="DOCKET_URL",
237
+ callback=validate_url,
238
+ ),
239
+ ] = "redis://localhost:6379/0",
240
+ name: Annotated[
241
+ str | None,
242
+ typer.Option(
243
+ help="The name of the worker",
244
+ envvar="DOCKET_WORKER_NAME",
245
+ ),
246
+ ] = default_worker_name(),
247
+ logging_level: Annotated[
248
+ LogLevel,
249
+ typer.Option(
250
+ help="The logging level",
251
+ envvar="DOCKET_LOGGING_LEVEL",
252
+ callback=set_logging_level,
253
+ ),
254
+ ] = LogLevel.INFO,
255
+ logging_format: Annotated[
256
+ LogFormat,
257
+ typer.Option(
258
+ help="The logging format",
259
+ envvar="DOCKET_LOGGING_FORMAT",
260
+ callback=set_logging_format,
261
+ ),
262
+ ] = LogFormat.RICH if sys.stdout.isatty() else LogFormat.PLAIN,
263
+ concurrency: Annotated[
264
+ int,
265
+ typer.Option(
266
+ help="The maximum number of tasks to process concurrently",
267
+ envvar="DOCKET_WORKER_CONCURRENCY",
268
+ ),
269
+ ] = 10,
270
+ redelivery_timeout: Annotated[
271
+ timedelta,
272
+ typer.Option(
273
+ parser=duration,
274
+ help="How long to wait before redelivering a task to another worker",
275
+ envvar="DOCKET_WORKER_REDELIVERY_TIMEOUT",
276
+ ),
277
+ ] = timedelta(minutes=5),
278
+ reconnection_delay: Annotated[
279
+ timedelta,
280
+ typer.Option(
281
+ parser=duration,
282
+ help=(
283
+ "How long to wait before reconnecting to the Redis server after "
284
+ "a connection error"
285
+ ),
286
+ envvar="DOCKET_WORKER_RECONNECTION_DELAY",
287
+ ),
288
+ ] = timedelta(seconds=5),
289
+ minimum_check_interval: Annotated[
290
+ timedelta,
291
+ typer.Option(
292
+ parser=duration,
293
+ help="The minimum interval to check for tasks",
294
+ envvar="DOCKET_WORKER_MINIMUM_CHECK_INTERVAL",
295
+ ),
296
+ ] = timedelta(milliseconds=100),
297
+ scheduling_resolution: Annotated[
298
+ timedelta,
299
+ typer.Option(
300
+ parser=duration,
301
+ help="How frequently to check for future tasks to be scheduled",
302
+ envvar="DOCKET_WORKER_SCHEDULING_RESOLUTION",
303
+ ),
304
+ ] = timedelta(milliseconds=250),
305
+ schedule_automatic_tasks: Annotated[
306
+ bool,
307
+ typer.Option(
308
+ "--schedule-automatic-tasks",
309
+ help="Schedule automatic tasks",
310
+ ),
311
+ ] = True,
312
+ until_finished: Annotated[
313
+ bool,
314
+ typer.Option(
315
+ "--until-finished",
316
+ help="Exit after the current docket is finished",
317
+ ),
318
+ ] = False,
319
+ healthcheck_port: Annotated[
320
+ int | None,
321
+ typer.Option(
322
+ "--healthcheck-port",
323
+ help="The port to serve a healthcheck on",
324
+ envvar="DOCKET_WORKER_HEALTHCHECK_PORT",
325
+ ),
326
+ ] = None,
327
+ metrics_port: Annotated[
328
+ int | None,
329
+ typer.Option(
330
+ "--metrics-port",
331
+ help="The port to serve Prometheus metrics on",
332
+ envvar="DOCKET_WORKER_METRICS_PORT",
333
+ ),
334
+ ] = None,
335
+ ) -> None:
336
+ asyncio.run(
337
+ Worker.run(
338
+ docket_name=docket_,
339
+ url=url,
340
+ name=name,
341
+ concurrency=concurrency,
342
+ redelivery_timeout=redelivery_timeout,
343
+ reconnection_delay=reconnection_delay,
344
+ minimum_check_interval=minimum_check_interval,
345
+ scheduling_resolution=scheduling_resolution,
346
+ schedule_automatic_tasks=schedule_automatic_tasks,
347
+ until_finished=until_finished,
348
+ healthcheck_port=healthcheck_port,
349
+ metrics_port=metrics_port,
350
+ tasks=tasks,
351
+ )
352
+ )
353
+
354
+
355
+ @app.command(help="Strikes a task or parameters from the docket")
356
+ def strike(
357
+ function: Annotated[
358
+ str,
359
+ typer.Argument(
360
+ help="The function to strike",
361
+ callback=handle_strike_wildcard,
362
+ ),
363
+ ] = "*",
364
+ parameter: Annotated[
365
+ str,
366
+ typer.Argument(
367
+ help="The parameter to strike",
368
+ callback=handle_strike_wildcard,
369
+ ),
370
+ ] = "*",
371
+ operator: Annotated[
372
+ Operator,
373
+ typer.Argument(
374
+ help="The operator to compare the value against",
375
+ ),
376
+ ] = Operator.EQUAL,
377
+ value: Annotated[
378
+ str | None,
379
+ typer.Argument(
380
+ help="The value to strike from the docket",
381
+ ),
382
+ ] = None,
383
+ docket_: Annotated[
384
+ str,
385
+ typer.Option(
386
+ "--docket",
387
+ help="The name of the docket",
388
+ envvar="DOCKET_NAME",
389
+ ),
390
+ ] = "docket",
391
+ url: Annotated[
392
+ str,
393
+ typer.Option(
394
+ help="The URL of the Redis server",
395
+ envvar="DOCKET_URL",
396
+ callback=validate_url,
397
+ ),
398
+ ] = "redis://localhost:6379/0",
399
+ ) -> None:
400
+ if not function and not parameter:
401
+ raise typer.BadParameter(
402
+ message="Must provide either a function and/or a parameter",
403
+ )
404
+
405
+ value_ = interpret_python_value(value)
406
+ if parameter:
407
+ function_name = f"{function or '(all tasks)'}"
408
+ print(f"Striking {function_name} {parameter} {operator.value} {value_!r}")
409
+ else:
410
+ print(f"Striking {function}")
411
+
412
+ async def run() -> None:
413
+ async with Docket(name=docket_, url=url) as docket:
414
+ await docket.strike(function, parameter, operator, value_)
415
+
416
+ asyncio.run(run())
417
+
418
+
419
+ @app.command(help="Clear all queued and scheduled tasks from the docket")
420
+ def clear(
421
+ docket_: Annotated[
422
+ str,
423
+ typer.Option(
424
+ "--docket",
425
+ help="The name of the docket",
426
+ envvar="DOCKET_NAME",
427
+ ),
428
+ ] = "docket",
429
+ url: Annotated[
430
+ str,
431
+ typer.Option(
432
+ help="The URL of the Redis server",
433
+ envvar="DOCKET_URL",
434
+ callback=validate_url,
435
+ ),
436
+ ] = "redis://localhost:6379/0",
437
+ ) -> None:
438
+ async def run() -> None:
439
+ async with Docket(name=docket_, url=url) as docket:
440
+ cleared_count = await docket.clear()
441
+ print(f"Cleared {cleared_count} tasks from docket '{docket_}'")
442
+
443
+ asyncio.run(run())
444
+
445
+
446
+ @app.command(help="Restores a task or parameters to the Docket")
447
+ def restore(
448
+ function: Annotated[
449
+ str,
450
+ typer.Argument(
451
+ help="The function to restore",
452
+ callback=handle_strike_wildcard,
453
+ ),
454
+ ] = "*",
455
+ parameter: Annotated[
456
+ str,
457
+ typer.Argument(
458
+ help="The parameter to restore",
459
+ callback=handle_strike_wildcard,
460
+ ),
461
+ ] = "*",
462
+ operator: Annotated[
463
+ Operator,
464
+ typer.Argument(
465
+ help="The operator to compare the value against",
466
+ ),
467
+ ] = Operator.EQUAL,
468
+ value: Annotated[
469
+ str | None,
470
+ typer.Argument(
471
+ help="The value to restore to the docket",
472
+ ),
473
+ ] = None,
474
+ docket_: Annotated[
475
+ str,
476
+ typer.Option(
477
+ "--docket",
478
+ help="The name of the docket",
479
+ envvar="DOCKET_NAME",
480
+ ),
481
+ ] = "docket",
482
+ url: Annotated[
483
+ str,
484
+ typer.Option(
485
+ help="The URL of the Redis server",
486
+ envvar="DOCKET_URL",
487
+ callback=validate_url,
488
+ ),
489
+ ] = "redis://localhost:6379/0",
490
+ ) -> None:
491
+ if not function and not parameter:
492
+ raise typer.BadParameter(
493
+ message="Must provide either a function and/or a parameter",
494
+ )
495
+
496
+ value_ = interpret_python_value(value)
497
+ if parameter:
498
+ function_name = f"{function or '(all tasks)'}"
499
+ print(f"Restoring {function_name} {parameter} {operator.value} {value_!r}")
500
+ else:
501
+ print(f"Restoring {function}")
502
+
503
+ async def run() -> None:
504
+ async with Docket(name=docket_, url=url) as docket:
505
+ await docket.restore(function, parameter, operator, value_)
506
+
507
+ asyncio.run(run())
508
+
509
+
510
+ tasks_app: typer.Typer = typer.Typer(
511
+ help="Run docket's built-in tasks", no_args_is_help=True
512
+ )
513
+ app.add_typer(tasks_app, name="tasks")
514
+
515
+
516
+ @tasks_app.command(help="Adds a trace task to the Docket")
517
+ def trace(
518
+ docket_: Annotated[
519
+ str,
520
+ typer.Option(
521
+ "--docket",
522
+ help="The name of the docket",
523
+ envvar="DOCKET_NAME",
524
+ ),
525
+ ] = "docket",
526
+ url: Annotated[
527
+ str,
528
+ typer.Option(
529
+ help="The URL of the Redis server",
530
+ envvar="DOCKET_URL",
531
+ callback=validate_url,
532
+ ),
533
+ ] = "redis://localhost:6379/0",
534
+ message: Annotated[
535
+ str,
536
+ typer.Argument(
537
+ help="The message to print",
538
+ ),
539
+ ] = "Howdy!",
540
+ delay: Annotated[
541
+ timedelta,
542
+ typer.Option(
543
+ parser=duration,
544
+ help="The delay before the task is added to the docket",
545
+ ),
546
+ ] = timedelta(seconds=0),
547
+ ) -> None:
548
+ async def run() -> None:
549
+ async with Docket(name=docket_, url=url) as docket:
550
+ when = datetime.now(timezone.utc) + delay
551
+ execution = await docket.add(tasks.trace, when)(message)
552
+ print(f"Added trace task {execution.key!r} to the docket {docket.name!r}")
553
+
554
+ asyncio.run(run())
555
+
556
+
557
+ @tasks_app.command(help="Adds a fail task to the Docket")
558
+ def fail(
559
+ docket_: Annotated[
560
+ str,
561
+ typer.Option(
562
+ "--docket",
563
+ help="The name of the docket",
564
+ envvar="DOCKET_NAME",
565
+ ),
566
+ ] = "docket",
567
+ url: Annotated[
568
+ str,
569
+ typer.Option(
570
+ help="The URL of the Redis server",
571
+ envvar="DOCKET_URL",
572
+ callback=validate_url,
573
+ ),
574
+ ] = "redis://localhost:6379/0",
575
+ message: Annotated[
576
+ str,
577
+ typer.Argument(
578
+ help="The message to print",
579
+ ),
580
+ ] = "Howdy!",
581
+ delay: Annotated[
582
+ timedelta,
583
+ typer.Option(
584
+ parser=duration,
585
+ help="The delay before the task is added to the docket",
586
+ ),
587
+ ] = timedelta(seconds=0),
588
+ ) -> None:
589
+ async def run() -> None:
590
+ async with Docket(name=docket_, url=url) as docket:
591
+ when = datetime.now(timezone.utc) + delay
592
+ execution = await docket.add(tasks.fail, when)(message)
593
+ print(f"Added fail task {execution.key!r} to the docket {docket.name!r}")
594
+
595
+ asyncio.run(run())
596
+
597
+
598
+ @tasks_app.command(help="Adds a sleep task to the Docket")
599
+ def sleep(
600
+ docket_: Annotated[
601
+ str,
602
+ typer.Option(
603
+ "--docket",
604
+ help="The name of the docket",
605
+ envvar="DOCKET_NAME",
606
+ ),
607
+ ] = "docket",
608
+ url: Annotated[
609
+ str,
610
+ typer.Option(
611
+ help="The URL of the Redis server",
612
+ envvar="DOCKET_URL",
613
+ callback=validate_url,
614
+ ),
615
+ ] = "redis://localhost:6379/0",
616
+ seconds: Annotated[
617
+ float,
618
+ typer.Argument(
619
+ help="The number of seconds to sleep",
620
+ ),
621
+ ] = 1,
622
+ delay: Annotated[
623
+ timedelta,
624
+ typer.Option(
625
+ parser=duration,
626
+ help="The delay before the task is added to the docket",
627
+ ),
628
+ ] = timedelta(seconds=0),
629
+ ) -> None:
630
+ async def run() -> None:
631
+ async with Docket(name=docket_, url=url) as docket:
632
+ when = datetime.now(timezone.utc) + delay
633
+ execution = await docket.add(tasks.sleep, when)(seconds)
634
+ print(f"Added sleep task {execution.key!r} to the docket {docket.name!r}")
635
+
636
+ asyncio.run(run())
637
+
638
+
639
+ def relative_time(now: datetime, when: datetime) -> str:
640
+ delta = now - when
641
+ if delta < -timedelta(minutes=30):
642
+ return f"at {local_time(when)}"
643
+ elif delta < timedelta(0):
644
+ return f"in {-delta}"
645
+ elif delta < timedelta(minutes=30):
646
+ return f"{delta} ago"
647
+ else:
648
+ return f"at {local_time(when)}"
649
+
650
+
651
+ def get_task_stats(
652
+ snapshot: DocketSnapshot,
653
+ ) -> dict[str, dict[str, int | datetime | None]]:
654
+ """Get task count statistics by function name with timestamp data."""
655
+ stats: dict[str, dict[str, int | datetime | None]] = {}
656
+
657
+ # Count running tasks by function
658
+ for execution in snapshot.running:
659
+ func_name = execution.function.__name__
660
+ if func_name not in stats:
661
+ stats[func_name] = {
662
+ "running": 0,
663
+ "queued": 0,
664
+ "total": 0,
665
+ "oldest_queued": None,
666
+ "latest_queued": None,
667
+ "oldest_started": None,
668
+ "latest_started": None,
669
+ }
670
+ stats[func_name]["running"] += 1
671
+ stats[func_name]["total"] += 1
672
+
673
+ # Track oldest/latest started times for running tasks
674
+ started = execution.started
675
+ if (
676
+ stats[func_name]["oldest_started"] is None
677
+ or started < stats[func_name]["oldest_started"]
678
+ ):
679
+ stats[func_name]["oldest_started"] = started
680
+ if (
681
+ stats[func_name]["latest_started"] is None
682
+ or started > stats[func_name]["latest_started"]
683
+ ):
684
+ stats[func_name]["latest_started"] = started
685
+
686
+ # Count future tasks by function
687
+ for execution in snapshot.future:
688
+ func_name = execution.function.__name__
689
+ if func_name not in stats:
690
+ stats[func_name] = {
691
+ "running": 0,
692
+ "queued": 0,
693
+ "total": 0,
694
+ "oldest_queued": None,
695
+ "latest_queued": None,
696
+ "oldest_started": None,
697
+ "latest_started": None,
698
+ }
699
+ stats[func_name]["queued"] += 1
700
+ stats[func_name]["total"] += 1
701
+
702
+ # Track oldest/latest queued times for future tasks
703
+ when = execution.when
704
+ if (
705
+ stats[func_name]["oldest_queued"] is None
706
+ or when < stats[func_name]["oldest_queued"]
707
+ ):
708
+ stats[func_name]["oldest_queued"] = when
709
+ if (
710
+ stats[func_name]["latest_queued"] is None
711
+ or when > stats[func_name]["latest_queued"]
712
+ ):
713
+ stats[func_name]["latest_queued"] = when
714
+
715
+ return stats
716
+
717
+
718
+ @app.command(help="Shows a snapshot of what's on the docket right now")
719
+ def snapshot(
720
+ tasks: Annotated[
721
+ list[str],
722
+ typer.Option(
723
+ "--tasks",
724
+ help=(
725
+ "The dotted path of a task collection to register with the docket. "
726
+ "This can be specified multiple times. A task collection is any "
727
+ "iterable of async functions."
728
+ ),
729
+ envvar="DOCKET_TASKS",
730
+ ),
731
+ ] = ["docket.tasks:standard_tasks"],
732
+ docket_: Annotated[
733
+ str,
734
+ typer.Option(
735
+ "--docket",
736
+ help="The name of the docket",
737
+ envvar="DOCKET_NAME",
738
+ ),
739
+ ] = "docket",
740
+ url: Annotated[
741
+ str,
742
+ typer.Option(
743
+ help="The URL of the Redis server",
744
+ envvar="DOCKET_URL",
745
+ callback=validate_url,
746
+ ),
747
+ ] = "redis://localhost:6379/0",
748
+ stats: Annotated[
749
+ bool,
750
+ typer.Option(
751
+ "--stats",
752
+ help="Show task count statistics by function name",
753
+ ),
754
+ ] = False,
755
+ ) -> None:
756
+ async def run() -> DocketSnapshot:
757
+ async with Docket(name=docket_, url=url) as docket:
758
+ for task_path in tasks:
759
+ docket.register_collection(task_path)
760
+
761
+ return await docket.snapshot()
762
+
763
+ snapshot = asyncio.run(run())
764
+
765
+ relative = partial(relative_time, snapshot.taken)
766
+
767
+ console = Console()
768
+
769
+ summary_lines = [
770
+ f"Docket: {docket_!r}",
771
+ f"as of {local_time(snapshot.taken)}",
772
+ (
773
+ f"{len(snapshot.workers)} workers, "
774
+ f"{len(snapshot.running)}/{snapshot.total_tasks} running"
775
+ ),
776
+ ]
777
+ table = Table(title="\n".join(summary_lines))
778
+ table.add_column("When", style="green")
779
+ table.add_column("Function", style="cyan")
780
+ table.add_column("Key", style="cyan")
781
+ table.add_column("Worker", style="yellow")
782
+ table.add_column("Started", style="green")
783
+
784
+ for execution in snapshot.running:
785
+ table.add_row(
786
+ relative(execution.when),
787
+ execution.function.__name__,
788
+ execution.key,
789
+ execution.worker,
790
+ relative(execution.started),
791
+ )
792
+
793
+ for execution in snapshot.future:
794
+ table.add_row(
795
+ relative(execution.when),
796
+ execution.function.__name__,
797
+ execution.key,
798
+ "",
799
+ "",
800
+ )
801
+
802
+ console.print(table)
803
+
804
+ # Display task statistics if requested. On Linux the Click runner executes
805
+ # this CLI in a subprocess, so coverage cannot observe it. Mark as no cover.
806
+ if stats: # pragma: no cover
807
+ task_stats = get_task_stats(snapshot)
808
+ if task_stats: # pragma: no cover
809
+ console.print() # Add spacing between tables
810
+ stats_table = Table(title="Task Count Statistics by Function")
811
+ stats_table.add_column("Function", style="cyan")
812
+ stats_table.add_column("Total", style="bold magenta", justify="right")
813
+ stats_table.add_column("Running", style="green", justify="right")
814
+ stats_table.add_column("Queued", style="yellow", justify="right")
815
+ stats_table.add_column("Oldest Queued", style="dim yellow", justify="right")
816
+ stats_table.add_column("Latest Queued", style="dim yellow", justify="right")
817
+
818
+ # Sort by total count descending to highlight potential runaway tasks
819
+ for func_name in sorted(
820
+ task_stats.keys(), key=lambda x: task_stats[x]["total"], reverse=True
821
+ ):
822
+ counts = task_stats[func_name]
823
+
824
+ # Format timestamp columns
825
+ oldest_queued = ""
826
+ latest_queued = ""
827
+ if counts["oldest_queued"] is not None:
828
+ oldest_queued = relative(counts["oldest_queued"])
829
+ if counts["latest_queued"] is not None:
830
+ latest_queued = relative(counts["latest_queued"])
831
+
832
+ stats_table.add_row(
833
+ func_name,
834
+ str(counts["total"]),
835
+ str(counts["running"]),
836
+ str(counts["queued"]),
837
+ oldest_queued,
838
+ latest_queued,
839
+ )
840
+
841
+ console.print(stats_table)
842
+
843
+
844
+ @app.command(help="Monitor progress of a specific task execution")
845
+ def watch(
846
+ key: Annotated[str, typer.Argument(help="The task execution key to monitor")],
847
+ url: Annotated[
848
+ str,
849
+ typer.Option(
850
+ "--url",
851
+ "-u",
852
+ envvar="DOCKET_REDIS_URL",
853
+ help="Redis URL (e.g., redis://localhost:6379/0)",
854
+ ),
855
+ ] = "redis://localhost:6379/0",
856
+ docket_name: Annotated[
857
+ str,
858
+ typer.Option(
859
+ "--docket",
860
+ "-d",
861
+ envvar="DOCKET_NAME",
862
+ help="Docket name",
863
+ ),
864
+ ] = "docket",
865
+ ) -> None:
866
+ """Monitor the progress of a specific task execution in real-time using event-driven updates."""
867
+
868
+ async def monitor() -> None:
869
+ async with Docket(docket_name, url) as docket:
870
+ execution = await docket.get_execution(key)
871
+ if not execution:
872
+ console = Console()
873
+ console.print(
874
+ f"[red]Error:[/red] Task with key '{key}' not found or function not registered",
875
+ style="bold",
876
+ )
877
+ return
878
+
879
+ console = Console()
880
+
881
+ # State colors for display
882
+ state_colors = {
883
+ ExecutionState.SCHEDULED: "yellow",
884
+ ExecutionState.QUEUED: "cyan",
885
+ ExecutionState.RUNNING: "blue",
886
+ ExecutionState.COMPLETED: "green",
887
+ ExecutionState.FAILED: "red",
888
+ }
889
+
890
+ # Load initial snapshot
891
+ await execution.sync()
892
+
893
+ # Track current state for display
894
+ current_state = execution.state
895
+ worker_name: str | None = execution.worker
896
+ error_message: str | None = execution.error
897
+
898
+ # Initialize progress values
899
+ current_val = (
900
+ execution.progress.current
901
+ if execution.progress.current is not None
902
+ else 0
903
+ )
904
+ total_val = execution.progress.total
905
+ progress_message = execution.progress.message
906
+
907
+ active_progress = Progress(
908
+ TextColumn("[bold blue]{task.description}"),
909
+ BarColumn(bar_width=None), # Auto width
910
+ TaskProgressColumn(),
911
+ TimeElapsedColumn(),
912
+ expand=True,
913
+ )
914
+
915
+ progress_task_id = None
916
+
917
+ def set_progress_start_time(task_id: TaskID, started_at: datetime) -> None:
918
+ """Set progress bar start time based on execution start time."""
919
+ elapsed_since_start = datetime.now(timezone.utc) - started_at
920
+ monotonic_start = time.monotonic() - elapsed_since_start.total_seconds()
921
+ active_progress.tasks[task_id].start_time = monotonic_start
922
+
923
+ # Initialize progress task if we have progress data
924
+ if current_val > 0 and total_val > 0:
925
+ progress_task_id = active_progress.add_task( # pragma: no cover
926
+ progress_message or "Processing...",
927
+ total=total_val,
928
+ completed=current_val,
929
+ )
930
+ # Set start time based on execution.started_at if available
931
+ if execution.started_at is not None: # pragma: no cover
932
+ set_progress_start_time(progress_task_id, execution.started_at)
933
+
934
+ def create_display_layout() -> Layout:
935
+ """Create the layout for watch display."""
936
+ layout = Layout()
937
+
938
+ # Build info lines
939
+ info_lines = [
940
+ f"[bold]Task:[/bold] {key}",
941
+ f"[bold]Docket:[/bold] {docket_name}",
942
+ ]
943
+
944
+ # Add state with color
945
+ state_color = state_colors.get(current_state, "white")
946
+ info_lines.append(
947
+ f"[bold]State:[/bold] [{state_color}]{current_state.value.upper()}[/{state_color}]"
948
+ )
949
+
950
+ # Add worker if available
951
+ if worker_name: # pragma: no branch
952
+ info_lines.append(f"[bold]Worker:[/bold] {worker_name}")
953
+
954
+ # Add error if failed
955
+ if error_message:
956
+ info_lines.append(f"[red bold]Error:[/red bold] {error_message}")
957
+
958
+ # Add completion status
959
+ if current_state == ExecutionState.COMPLETED:
960
+ info_lines.append(
961
+ "[green bold]✓ Task completed successfully[/green bold]"
962
+ )
963
+ elif current_state == ExecutionState.FAILED:
964
+ info_lines.append("[red bold]✗ Task failed[/red bold]")
965
+
966
+ info_section = "\n".join(info_lines)
967
+
968
+ # Build layout without big gaps
969
+ if progress_task_id is not None:
970
+ # Choose the right progress instance
971
+ # Show info and progress together with minimal spacing
972
+ layout.split_column(
973
+ Layout(info_section, name="info", size=len(info_lines)),
974
+ Layout(active_progress, name="progress", size=2),
975
+ )
976
+ else:
977
+ # Just show info
978
+ layout.update(Layout(info_section, name="info"))
979
+
980
+ return layout
981
+
982
+ # Create initial layout
983
+ layout = create_display_layout()
984
+
985
+ # If already in terminal state, display once and exit
986
+ if current_state in (ExecutionState.COMPLETED, ExecutionState.FAILED):
987
+ console.print(layout)
988
+ return
989
+
990
+ # Use Live for smooth updates
991
+ with Live(layout, console=console, refresh_per_second=4) as live:
992
+ # Subscribe to events and update display
993
+ # Use polling fallback to handle missed pub/sub events
994
+ poll_interval = 1.0 # Check state every 1 second if no events
995
+
996
+ async for event in iterate_with_timeout(
997
+ execution.subscribe(), poll_interval
998
+ ): # pragma: no cover
999
+ if event is None:
1000
+ # Timeout - poll state directly as fallback
1001
+ await execution.sync()
1002
+ if execution.state != current_state:
1003
+ # State changed, create synthetic state event
1004
+ event = {
1005
+ "type": "state",
1006
+ "state": execution.state.value,
1007
+ "worker": execution.worker,
1008
+ "error": execution.error,
1009
+ "started_at": (
1010
+ execution.started_at.isoformat()
1011
+ if execution.started_at
1012
+ else None
1013
+ ),
1014
+ }
1015
+ else:
1016
+ # No state change, continue waiting
1017
+ continue
1018
+
1019
+ # Process the event (from pub/sub or synthetic from polling)
1020
+ if event["type"] == "state":
1021
+ # Update state information
1022
+ current_state = ExecutionState(event["state"])
1023
+ if worker := event.get("worker"):
1024
+ worker_name = worker
1025
+ if error := event.get("error"):
1026
+ error_message = error
1027
+ if started_at := event.get("started_at"):
1028
+ execution.started_at = datetime.fromisoformat(started_at)
1029
+ # Update progress bar start time if we have a progress task
1030
+ if progress_task_id is not None:
1031
+ set_progress_start_time(
1032
+ progress_task_id, execution.started_at
1033
+ )
1034
+
1035
+ # Update layout
1036
+ layout = create_display_layout()
1037
+ live.update(layout)
1038
+
1039
+ # Exit if terminal state reached
1040
+ if current_state in (
1041
+ ExecutionState.COMPLETED,
1042
+ ExecutionState.FAILED,
1043
+ ):
1044
+ break
1045
+
1046
+ elif event["type"] == "progress":
1047
+ # Update progress information
1048
+ current_val = event["current"]
1049
+ total_val: int = event.get("total", execution.progress.total)
1050
+ progress_message = event.get(
1051
+ "message", execution.progress.message
1052
+ )
1053
+
1054
+ # Update or create progress task
1055
+ if total_val > 0 and execution.started_at is not None:
1056
+ if progress_task_id is None:
1057
+ # Create new progress task (first time only)
1058
+ progress_task_id = active_progress.add_task(
1059
+ progress_message or "Processing...",
1060
+ total=total_val,
1061
+ completed=current_val or 0,
1062
+ )
1063
+ # Set start time based on execution.started_at if available
1064
+ if started_at := execution.started_at:
1065
+ set_progress_start_time(
1066
+ progress_task_id, execution.started_at
1067
+ )
1068
+ else:
1069
+ # Update existing progress task
1070
+ active_progress.update(
1071
+ progress_task_id,
1072
+ completed=current_val,
1073
+ total=total_val,
1074
+ description=progress_message or "Processing...",
1075
+ )
1076
+
1077
+ # Update layout
1078
+ layout = create_display_layout()
1079
+ live.update(layout)
1080
+
1081
+ asyncio.run(monitor())
1082
+
1083
+
1084
+ workers_app: typer.Typer = typer.Typer(
1085
+ help="Look at the workers on a docket", no_args_is_help=True
1086
+ )
1087
+ app.add_typer(workers_app, name="workers")
1088
+
1089
+
1090
+ def print_workers(
1091
+ docket_name: str,
1092
+ workers: Collection[WorkerInfo],
1093
+ highlight_task: str | None = None,
1094
+ ) -> None:
1095
+ sorted_workers = sorted(workers, key=lambda w: w.last_seen, reverse=True)
1096
+
1097
+ table = Table(title=f"Workers in Docket: {docket_name}")
1098
+
1099
+ table.add_column("Name", style="cyan")
1100
+ table.add_column("Last Seen", style="green")
1101
+ table.add_column("Tasks", style="yellow")
1102
+
1103
+ now = datetime.now(timezone.utc)
1104
+
1105
+ for worker in sorted_workers:
1106
+ time_ago = now - worker.last_seen
1107
+
1108
+ tasks = [
1109
+ f"[bold]{task}[/bold]" if task == highlight_task else task
1110
+ for task in sorted(worker.tasks)
1111
+ ]
1112
+
1113
+ table.add_row(
1114
+ worker.name,
1115
+ f"{time_ago} ago",
1116
+ "\n".join(tasks) if tasks else "(none)",
1117
+ )
1118
+
1119
+ console = Console()
1120
+ console.print(table)
1121
+
1122
+
1123
+ @workers_app.command(name="ls", help="List all workers on the docket")
1124
+ def list_workers(
1125
+ docket_: Annotated[
1126
+ str,
1127
+ typer.Option(
1128
+ "--docket",
1129
+ help="The name of the docket",
1130
+ envvar="DOCKET_NAME",
1131
+ ),
1132
+ ] = "docket",
1133
+ url: Annotated[
1134
+ str,
1135
+ typer.Option(
1136
+ help="The URL of the Redis server",
1137
+ envvar="DOCKET_URL",
1138
+ callback=validate_url,
1139
+ ),
1140
+ ] = "redis://localhost:6379/0",
1141
+ ) -> None:
1142
+ async def run() -> Collection[WorkerInfo]:
1143
+ async with Docket(name=docket_, url=url) as docket:
1144
+ return await docket.workers()
1145
+
1146
+ workers = asyncio.run(run())
1147
+
1148
+ print_workers(docket_, workers)
1149
+
1150
+
1151
+ @workers_app.command(
1152
+ name="for-task",
1153
+ help="List the workers on the docket that can process a certain task",
1154
+ )
1155
+ def workers_for_task(
1156
+ task: Annotated[
1157
+ str,
1158
+ typer.Argument(
1159
+ help="The name of the task",
1160
+ ),
1161
+ ],
1162
+ docket_: Annotated[
1163
+ str,
1164
+ typer.Option(
1165
+ "--docket",
1166
+ help="The name of the docket",
1167
+ envvar="DOCKET_NAME",
1168
+ ),
1169
+ ] = "docket",
1170
+ url: Annotated[
1171
+ str,
1172
+ typer.Option(
1173
+ help="The URL of the Redis server",
1174
+ envvar="DOCKET_URL",
1175
+ callback=validate_url,
1176
+ ),
1177
+ ] = "redis://localhost:6379/0",
1178
+ ) -> None:
1179
+ async def run() -> Collection[WorkerInfo]:
1180
+ async with Docket(name=docket_, url=url) as docket:
1181
+ return await docket.task_workers(task)
1182
+
1183
+ workers = asyncio.run(run())
1184
+
1185
+ print_workers(docket_, workers, highlight_task=task)