pydocket 0.6.3__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pydocket might be problematic. Click here for more details.
- docket/annotations.py +23 -1
- docket/cli.py +10 -0
- docket/dependencies.py +181 -10
- docket/docket.py +105 -5
- docket/instrumentation.py +30 -4
- docket/worker.py +18 -1
- pydocket-0.7.0.dist-info/METADATA +139 -0
- pydocket-0.7.0.dist-info/RECORD +16 -0
- pydocket-0.6.3.dist-info/METADATA +0 -389
- pydocket-0.6.3.dist-info/RECORD +0 -16
- {pydocket-0.6.3.dist-info → pydocket-0.7.0.dist-info}/WHEEL +0 -0
- {pydocket-0.6.3.dist-info → pydocket-0.7.0.dist-info}/entry_points.txt +0 -0
- {pydocket-0.6.3.dist-info → pydocket-0.7.0.dist-info}/licenses/LICENSE +0 -0
docket/annotations.py
CHANGED
|
@@ -34,7 +34,29 @@ class Annotation(abc.ABC):
|
|
|
34
34
|
|
|
35
35
|
|
|
36
36
|
class Logged(Annotation):
|
|
37
|
-
"""Instructs docket to include arguments to this parameter in the log.
|
|
37
|
+
"""Instructs docket to include arguments to this parameter in the log.
|
|
38
|
+
|
|
39
|
+
If `length_only` is `True`, only the length of the argument will be included in
|
|
40
|
+
the log.
|
|
41
|
+
|
|
42
|
+
Example:
|
|
43
|
+
|
|
44
|
+
```python
|
|
45
|
+
@task
|
|
46
|
+
def setup_new_customer(
|
|
47
|
+
customer_id: Annotated[int, Logged],
|
|
48
|
+
addresses: Annotated[list[Address], Logged(length_only=True)],
|
|
49
|
+
password: str,
|
|
50
|
+
) -> None:
|
|
51
|
+
...
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
In the logs, you's see the task referenced as:
|
|
55
|
+
|
|
56
|
+
```
|
|
57
|
+
setup_new_customer(customer_id=123, addresses[len 2], password=...)
|
|
58
|
+
```
|
|
59
|
+
"""
|
|
38
60
|
|
|
39
61
|
length_only: bool = False
|
|
40
62
|
|
docket/cli.py
CHANGED
|
@@ -259,11 +259,20 @@ def worker(
|
|
|
259
259
|
help="Exit after the current docket is finished",
|
|
260
260
|
),
|
|
261
261
|
] = False,
|
|
262
|
+
healthcheck_port: Annotated[
|
|
263
|
+
int | None,
|
|
264
|
+
typer.Option(
|
|
265
|
+
"--healthcheck-port",
|
|
266
|
+
help="The port to serve a healthcheck on",
|
|
267
|
+
envvar="DOCKET_WORKER_HEALTHCHECK_PORT",
|
|
268
|
+
),
|
|
269
|
+
] = None,
|
|
262
270
|
metrics_port: Annotated[
|
|
263
271
|
int | None,
|
|
264
272
|
typer.Option(
|
|
265
273
|
"--metrics-port",
|
|
266
274
|
help="The port to serve Prometheus metrics on",
|
|
275
|
+
envvar="DOCKET_WORKER_METRICS_PORT",
|
|
267
276
|
),
|
|
268
277
|
] = None,
|
|
269
278
|
) -> None:
|
|
@@ -279,6 +288,7 @@ def worker(
|
|
|
279
288
|
scheduling_resolution=scheduling_resolution,
|
|
280
289
|
schedule_automatic_tasks=schedule_automatic_tasks,
|
|
281
290
|
until_finished=until_finished,
|
|
291
|
+
healthcheck_port=healthcheck_port,
|
|
282
292
|
metrics_port=metrics_port,
|
|
283
293
|
tasks=tasks,
|
|
284
294
|
)
|
docket/dependencies.py
CHANGED
|
@@ -3,7 +3,7 @@ import logging
|
|
|
3
3
|
import time
|
|
4
4
|
from contextlib import AsyncExitStack, asynccontextmanager
|
|
5
5
|
from contextvars import ContextVar
|
|
6
|
-
from datetime import timedelta
|
|
6
|
+
from datetime import datetime, timedelta, timezone
|
|
7
7
|
from types import TracebackType
|
|
8
8
|
from typing import (
|
|
9
9
|
TYPE_CHECKING,
|
|
@@ -14,6 +14,7 @@ from typing import (
|
|
|
14
14
|
Callable,
|
|
15
15
|
Counter,
|
|
16
16
|
Generic,
|
|
17
|
+
NoReturn,
|
|
17
18
|
TypeVar,
|
|
18
19
|
cast,
|
|
19
20
|
)
|
|
@@ -49,6 +50,16 @@ class _CurrentWorker(Dependency):
|
|
|
49
50
|
|
|
50
51
|
|
|
51
52
|
def CurrentWorker() -> "Worker":
|
|
53
|
+
"""A dependency to access the current Worker.
|
|
54
|
+
|
|
55
|
+
Example:
|
|
56
|
+
|
|
57
|
+
```python
|
|
58
|
+
@task
|
|
59
|
+
async def my_task(worker: Worker = CurrentWorker()) -> None:
|
|
60
|
+
assert isinstance(worker, Worker)
|
|
61
|
+
```
|
|
62
|
+
"""
|
|
52
63
|
return cast("Worker", _CurrentWorker())
|
|
53
64
|
|
|
54
65
|
|
|
@@ -58,6 +69,16 @@ class _CurrentDocket(Dependency):
|
|
|
58
69
|
|
|
59
70
|
|
|
60
71
|
def CurrentDocket() -> Docket:
|
|
72
|
+
"""A dependency to access the current Docket.
|
|
73
|
+
|
|
74
|
+
Example:
|
|
75
|
+
|
|
76
|
+
```python
|
|
77
|
+
@task
|
|
78
|
+
async def my_task(docket: Docket = CurrentDocket()) -> None:
|
|
79
|
+
assert isinstance(docket, Docket)
|
|
80
|
+
```
|
|
81
|
+
"""
|
|
61
82
|
return cast(Docket, _CurrentDocket())
|
|
62
83
|
|
|
63
84
|
|
|
@@ -67,6 +88,16 @@ class _CurrentExecution(Dependency):
|
|
|
67
88
|
|
|
68
89
|
|
|
69
90
|
def CurrentExecution() -> Execution:
|
|
91
|
+
"""A dependency to access the current Execution.
|
|
92
|
+
|
|
93
|
+
Example:
|
|
94
|
+
|
|
95
|
+
```python
|
|
96
|
+
@task
|
|
97
|
+
async def my_task(execution: Execution = CurrentExecution()) -> None:
|
|
98
|
+
assert isinstance(execution, Execution)
|
|
99
|
+
```
|
|
100
|
+
"""
|
|
70
101
|
return cast(Execution, _CurrentExecution())
|
|
71
102
|
|
|
72
103
|
|
|
@@ -76,6 +107,16 @@ class _TaskKey(Dependency):
|
|
|
76
107
|
|
|
77
108
|
|
|
78
109
|
def TaskKey() -> str:
|
|
110
|
+
"""A dependency to access the key of the currently executing task.
|
|
111
|
+
|
|
112
|
+
Example:
|
|
113
|
+
|
|
114
|
+
```python
|
|
115
|
+
@task
|
|
116
|
+
async def my_task(key: str = TaskKey()) -> None:
|
|
117
|
+
assert isinstance(key, str)
|
|
118
|
+
```
|
|
119
|
+
"""
|
|
79
120
|
return cast(str, _TaskKey())
|
|
80
121
|
|
|
81
122
|
|
|
@@ -99,6 +140,22 @@ class _TaskArgument(Dependency):
|
|
|
99
140
|
|
|
100
141
|
|
|
101
142
|
def TaskArgument(parameter: str | None = None, optional: bool = False) -> Any:
|
|
143
|
+
"""A dependency to access a argument of the currently executing task. This is
|
|
144
|
+
often useful in dependency functions so they can access the arguments of the
|
|
145
|
+
task they are injected into.
|
|
146
|
+
|
|
147
|
+
Example:
|
|
148
|
+
|
|
149
|
+
```python
|
|
150
|
+
async def customer_name(customer_id: int = TaskArgument()) -> str:
|
|
151
|
+
...look up the customer's name by ID...
|
|
152
|
+
return "John Doe"
|
|
153
|
+
|
|
154
|
+
@task
|
|
155
|
+
async def greet_customer(customer_id: int, name: str = Depends(customer_name)) -> None:
|
|
156
|
+
print(f"Hello, {name}!")
|
|
157
|
+
```
|
|
158
|
+
"""
|
|
102
159
|
return cast(Any, _TaskArgument(parameter, optional))
|
|
103
160
|
|
|
104
161
|
|
|
@@ -117,15 +174,49 @@ class _TaskLogger(Dependency):
|
|
|
117
174
|
|
|
118
175
|
|
|
119
176
|
def TaskLogger() -> logging.LoggerAdapter[logging.Logger]:
|
|
177
|
+
"""A dependency to access a logger for the currently executing task. The logger
|
|
178
|
+
will automatically inject contextual information such as the worker and docket
|
|
179
|
+
name, the task key, and the current execution attempt number.
|
|
180
|
+
|
|
181
|
+
Example:
|
|
182
|
+
|
|
183
|
+
```python
|
|
184
|
+
@task
|
|
185
|
+
async def my_task(logger: LoggerAdapter[Logger] = TaskLogger()) -> None:
|
|
186
|
+
logger.info("Hello, world!")
|
|
187
|
+
```
|
|
188
|
+
"""
|
|
120
189
|
return cast(logging.LoggerAdapter[logging.Logger], _TaskLogger())
|
|
121
190
|
|
|
122
191
|
|
|
192
|
+
class ForcedRetry(Exception):
|
|
193
|
+
"""Raised when a task requests a retry via `in_` or `at`"""
|
|
194
|
+
|
|
195
|
+
|
|
123
196
|
class Retry(Dependency):
|
|
197
|
+
"""Configures linear retries for a task. You can specify the total number of
|
|
198
|
+
attempts (or `None` to retry indefinitely), and the delay between attempts.
|
|
199
|
+
|
|
200
|
+
Example:
|
|
201
|
+
|
|
202
|
+
```python
|
|
203
|
+
@task
|
|
204
|
+
async def my_task(retry: Retry = Retry(attempts=3)) -> None:
|
|
205
|
+
...
|
|
206
|
+
```
|
|
207
|
+
"""
|
|
208
|
+
|
|
124
209
|
single: bool = True
|
|
125
210
|
|
|
126
211
|
def __init__(
|
|
127
212
|
self, attempts: int | None = 1, delay: timedelta = timedelta(0)
|
|
128
213
|
) -> None:
|
|
214
|
+
"""
|
|
215
|
+
Args:
|
|
216
|
+
attempts: The total number of attempts to make. If `None`, the task will
|
|
217
|
+
be retried indefinitely.
|
|
218
|
+
delay: The delay between attempts.
|
|
219
|
+
"""
|
|
129
220
|
self.attempts = attempts
|
|
130
221
|
self.delay = delay
|
|
131
222
|
self.attempt = 1
|
|
@@ -136,18 +227,46 @@ class Retry(Dependency):
|
|
|
136
227
|
retry.attempt = execution.attempt
|
|
137
228
|
return retry
|
|
138
229
|
|
|
230
|
+
def at(self, when: datetime) -> NoReturn:
|
|
231
|
+
now = datetime.now(timezone.utc)
|
|
232
|
+
diff = when - now
|
|
233
|
+
diff = diff if diff.total_seconds() >= 0 else timedelta(0)
|
|
234
|
+
|
|
235
|
+
self.in_(diff)
|
|
236
|
+
|
|
237
|
+
def in_(self, when: timedelta) -> NoReturn:
|
|
238
|
+
self.delay: timedelta = when
|
|
239
|
+
raise ForcedRetry()
|
|
240
|
+
|
|
139
241
|
|
|
140
242
|
class ExponentialRetry(Retry):
|
|
141
|
-
|
|
243
|
+
"""Configures exponential retries for a task. You can specify the total number
|
|
244
|
+
of attempts (or `None` to retry indefinitely), and the minimum and maximum delays
|
|
245
|
+
between attempts.
|
|
246
|
+
|
|
247
|
+
Example:
|
|
248
|
+
|
|
249
|
+
```python
|
|
250
|
+
@task
|
|
251
|
+
async def my_task(retry: ExponentialRetry = ExponentialRetry(attempts=3)) -> None:
|
|
252
|
+
...
|
|
253
|
+
```
|
|
254
|
+
"""
|
|
142
255
|
|
|
143
256
|
def __init__(
|
|
144
257
|
self,
|
|
145
|
-
attempts: int = 1,
|
|
258
|
+
attempts: int | None = 1,
|
|
146
259
|
minimum_delay: timedelta = timedelta(seconds=1),
|
|
147
260
|
maximum_delay: timedelta = timedelta(seconds=64),
|
|
148
261
|
) -> None:
|
|
262
|
+
"""
|
|
263
|
+
Args:
|
|
264
|
+
attempts: The total number of attempts to make. If `None`, the task will
|
|
265
|
+
be retried indefinitely.
|
|
266
|
+
minimum_delay: The minimum delay between attempts.
|
|
267
|
+
maximum_delay: The maximum delay between attempts.
|
|
268
|
+
"""
|
|
149
269
|
super().__init__(attempts=attempts, delay=minimum_delay)
|
|
150
|
-
self.minimum_delay = minimum_delay
|
|
151
270
|
self.maximum_delay = maximum_delay
|
|
152
271
|
|
|
153
272
|
async def __aenter__(self) -> "ExponentialRetry":
|
|
@@ -155,14 +274,14 @@ class ExponentialRetry(Retry):
|
|
|
155
274
|
|
|
156
275
|
retry = ExponentialRetry(
|
|
157
276
|
attempts=self.attempts,
|
|
158
|
-
minimum_delay=self.
|
|
277
|
+
minimum_delay=self.delay,
|
|
159
278
|
maximum_delay=self.maximum_delay,
|
|
160
279
|
)
|
|
161
280
|
retry.attempt = execution.attempt
|
|
162
281
|
|
|
163
282
|
if execution.attempt > 1:
|
|
164
283
|
backoff_factor = 2 ** (execution.attempt - 1)
|
|
165
|
-
calculated_delay = self.
|
|
284
|
+
calculated_delay = self.delay * backoff_factor
|
|
166
285
|
|
|
167
286
|
if calculated_delay > self.maximum_delay:
|
|
168
287
|
retry.delay = self.maximum_delay
|
|
@@ -173,6 +292,19 @@ class ExponentialRetry(Retry):
|
|
|
173
292
|
|
|
174
293
|
|
|
175
294
|
class Perpetual(Dependency):
|
|
295
|
+
"""Declare a task that should be run perpetually. Perpetual tasks are automatically
|
|
296
|
+
rescheduled for the future after they finish (whether they succeed or fail). A
|
|
297
|
+
perpetual task can be scheduled at worker startup with the `automatic=True`.
|
|
298
|
+
|
|
299
|
+
Example:
|
|
300
|
+
|
|
301
|
+
```python
|
|
302
|
+
@task
|
|
303
|
+
async def my_task(perpetual: Perpetual = Perpetual()) -> None:
|
|
304
|
+
...
|
|
305
|
+
```
|
|
306
|
+
"""
|
|
307
|
+
|
|
176
308
|
single = True
|
|
177
309
|
|
|
178
310
|
every: timedelta
|
|
@@ -188,8 +320,7 @@ class Perpetual(Dependency):
|
|
|
188
320
|
every: timedelta = timedelta(0),
|
|
189
321
|
automatic: bool = False,
|
|
190
322
|
) -> None:
|
|
191
|
-
"""
|
|
192
|
-
|
|
323
|
+
"""
|
|
193
324
|
Args:
|
|
194
325
|
every: The target interval between task executions.
|
|
195
326
|
automatic: If set, this task will be automatically scheduled during worker
|
|
@@ -217,13 +348,29 @@ class Perpetual(Dependency):
|
|
|
217
348
|
|
|
218
349
|
|
|
219
350
|
class Timeout(Dependency):
|
|
220
|
-
|
|
351
|
+
"""Configures a timeout for a task. You can specify the base timeout, and the
|
|
352
|
+
task will be cancelled if it exceeds this duration. The timeout may be extended
|
|
353
|
+
within the context of a single running task.
|
|
221
354
|
|
|
222
|
-
|
|
355
|
+
Example:
|
|
356
|
+
|
|
357
|
+
```python
|
|
358
|
+
@task
|
|
359
|
+
async def my_task(timeout: Timeout = Timeout(timedelta(seconds=10))) -> None:
|
|
360
|
+
...
|
|
361
|
+
```
|
|
362
|
+
"""
|
|
223
363
|
|
|
364
|
+
single: bool = True
|
|
365
|
+
|
|
366
|
+
base: timedelta
|
|
224
367
|
_deadline: float
|
|
225
368
|
|
|
226
369
|
def __init__(self, base: timedelta) -> None:
|
|
370
|
+
"""
|
|
371
|
+
Args:
|
|
372
|
+
base: The base timeout duration.
|
|
373
|
+
"""
|
|
227
374
|
self.base = base
|
|
228
375
|
|
|
229
376
|
async def __aenter__(self) -> "Timeout":
|
|
@@ -238,9 +385,16 @@ class Timeout(Dependency):
|
|
|
238
385
|
return time.monotonic() >= self._deadline
|
|
239
386
|
|
|
240
387
|
def remaining(self) -> timedelta:
|
|
388
|
+
"""Get the remaining time until the timeout expires."""
|
|
241
389
|
return timedelta(seconds=self._deadline - time.monotonic())
|
|
242
390
|
|
|
243
391
|
def extend(self, by: timedelta | None = None) -> None:
|
|
392
|
+
"""Extend the timeout by a given duration. If no duration is provided, the
|
|
393
|
+
base timeout will be used.
|
|
394
|
+
|
|
395
|
+
Args:
|
|
396
|
+
by: The duration to extend the timeout by.
|
|
397
|
+
"""
|
|
244
398
|
if by is None:
|
|
245
399
|
by = self.base
|
|
246
400
|
self._deadline += by.total_seconds()
|
|
@@ -328,6 +482,23 @@ class _Depends(Dependency, Generic[R]):
|
|
|
328
482
|
|
|
329
483
|
|
|
330
484
|
def Depends(dependency: DependencyFunction[R]) -> R:
|
|
485
|
+
"""Include a user-defined function as a dependency. Dependencies may either return
|
|
486
|
+
a value or an async context manager. If it returns a context manager, the
|
|
487
|
+
dependency will be entered and exited around the task, giving an opportunity to
|
|
488
|
+
control the lifetime of a resource, like a database connection.
|
|
489
|
+
|
|
490
|
+
Example:
|
|
491
|
+
|
|
492
|
+
```python
|
|
493
|
+
|
|
494
|
+
async def my_dependency() -> str:
|
|
495
|
+
return "Hello, world!"
|
|
496
|
+
|
|
497
|
+
@task async def my_task(dependency: str = Depends(my_dependency)) -> None:
|
|
498
|
+
print(dependency)
|
|
499
|
+
|
|
500
|
+
```
|
|
501
|
+
"""
|
|
331
502
|
return cast(R, _Depends(dependency))
|
|
332
503
|
|
|
333
504
|
|
docket/docket.py
CHANGED
|
@@ -112,6 +112,20 @@ class DocketSnapshot:
|
|
|
112
112
|
|
|
113
113
|
|
|
114
114
|
class Docket:
|
|
115
|
+
"""A Docket represents a collection of tasks that may be scheduled for later
|
|
116
|
+
execution. With a Docket, you can add, replace, and cancel tasks.
|
|
117
|
+
Example:
|
|
118
|
+
|
|
119
|
+
```python
|
|
120
|
+
@task
|
|
121
|
+
async def my_task(greeting: str, recipient: str) -> None:
|
|
122
|
+
print(f"{greeting}, {recipient}!")
|
|
123
|
+
|
|
124
|
+
async with Docket() as docket:
|
|
125
|
+
docket.add(my_task)("Hello", recipient="world")
|
|
126
|
+
```
|
|
127
|
+
"""
|
|
128
|
+
|
|
115
129
|
tasks: dict[str, TaskFunction]
|
|
116
130
|
strike_list: StrikeList
|
|
117
131
|
|
|
@@ -199,6 +213,11 @@ class Docket:
|
|
|
199
213
|
await asyncio.shield(r.__aexit__(None, None, None))
|
|
200
214
|
|
|
201
215
|
def register(self, function: TaskFunction) -> None:
|
|
216
|
+
"""Register a task with the Docket.
|
|
217
|
+
|
|
218
|
+
Args:
|
|
219
|
+
function: The task to register.
|
|
220
|
+
"""
|
|
202
221
|
from .dependencies import validate_dependencies
|
|
203
222
|
|
|
204
223
|
validate_dependencies(function)
|
|
@@ -229,7 +248,14 @@ class Docket:
|
|
|
229
248
|
function: Callable[P, Awaitable[R]],
|
|
230
249
|
when: datetime | None = None,
|
|
231
250
|
key: str | None = None,
|
|
232
|
-
) -> Callable[P, Awaitable[Execution]]:
|
|
251
|
+
) -> Callable[P, Awaitable[Execution]]:
|
|
252
|
+
"""Add a task to the Docket.
|
|
253
|
+
|
|
254
|
+
Args:
|
|
255
|
+
function: The task function to add.
|
|
256
|
+
when: The time to schedule the task.
|
|
257
|
+
key: The key to schedule the task under.
|
|
258
|
+
"""
|
|
233
259
|
|
|
234
260
|
@overload
|
|
235
261
|
def add(
|
|
@@ -237,7 +263,14 @@ class Docket:
|
|
|
237
263
|
function: str,
|
|
238
264
|
when: datetime | None = None,
|
|
239
265
|
key: str | None = None,
|
|
240
|
-
) -> Callable[..., Awaitable[Execution]]:
|
|
266
|
+
) -> Callable[..., Awaitable[Execution]]:
|
|
267
|
+
"""Add a task to the Docket.
|
|
268
|
+
|
|
269
|
+
Args:
|
|
270
|
+
function: The name of a task to add.
|
|
271
|
+
when: The time to schedule the task.
|
|
272
|
+
key: The key to schedule the task under.
|
|
273
|
+
"""
|
|
241
274
|
|
|
242
275
|
def add(
|
|
243
276
|
self,
|
|
@@ -245,6 +278,13 @@ class Docket:
|
|
|
245
278
|
when: datetime | None = None,
|
|
246
279
|
key: str | None = None,
|
|
247
280
|
) -> Callable[..., Awaitable[Execution]]:
|
|
281
|
+
"""Add a task to the Docket.
|
|
282
|
+
|
|
283
|
+
Args:
|
|
284
|
+
function: The task to add.
|
|
285
|
+
when: The time to schedule the task.
|
|
286
|
+
key: The key to schedule the task under.
|
|
287
|
+
"""
|
|
248
288
|
if isinstance(function, str):
|
|
249
289
|
function = self.tasks[function]
|
|
250
290
|
else:
|
|
@@ -277,7 +317,14 @@ class Docket:
|
|
|
277
317
|
function: Callable[P, Awaitable[R]],
|
|
278
318
|
when: datetime,
|
|
279
319
|
key: str,
|
|
280
|
-
) -> Callable[P, Awaitable[Execution]]:
|
|
320
|
+
) -> Callable[P, Awaitable[Execution]]:
|
|
321
|
+
"""Replace a previously scheduled task on the Docket.
|
|
322
|
+
|
|
323
|
+
Args:
|
|
324
|
+
function: The task function to replace.
|
|
325
|
+
when: The time to schedule the task.
|
|
326
|
+
key: The key to schedule the task under.
|
|
327
|
+
"""
|
|
281
328
|
|
|
282
329
|
@overload
|
|
283
330
|
def replace(
|
|
@@ -285,7 +332,14 @@ class Docket:
|
|
|
285
332
|
function: str,
|
|
286
333
|
when: datetime,
|
|
287
334
|
key: str,
|
|
288
|
-
) -> Callable[..., Awaitable[Execution]]:
|
|
335
|
+
) -> Callable[..., Awaitable[Execution]]:
|
|
336
|
+
"""Replace a previously scheduled task on the Docket.
|
|
337
|
+
|
|
338
|
+
Args:
|
|
339
|
+
function: The name of a task to replace.
|
|
340
|
+
when: The time to schedule the task.
|
|
341
|
+
key: The key to schedule the task under.
|
|
342
|
+
"""
|
|
289
343
|
|
|
290
344
|
def replace(
|
|
291
345
|
self,
|
|
@@ -293,6 +347,13 @@ class Docket:
|
|
|
293
347
|
when: datetime,
|
|
294
348
|
key: str,
|
|
295
349
|
) -> Callable[..., Awaitable[Execution]]:
|
|
350
|
+
"""Replace a previously scheduled task on the Docket.
|
|
351
|
+
|
|
352
|
+
Args:
|
|
353
|
+
function: The task to replace.
|
|
354
|
+
when: The time to schedule the task.
|
|
355
|
+
key: The key to schedule the task under.
|
|
356
|
+
"""
|
|
296
357
|
if isinstance(function, str):
|
|
297
358
|
function = self.tasks[function]
|
|
298
359
|
|
|
@@ -329,6 +390,11 @@ class Docket:
|
|
|
329
390
|
TASKS_SCHEDULED.add(1, {**self.labels(), **execution.general_labels()})
|
|
330
391
|
|
|
331
392
|
async def cancel(self, key: str) -> None:
|
|
393
|
+
"""Cancel a previously scheduled task on the Docket.
|
|
394
|
+
|
|
395
|
+
Args:
|
|
396
|
+
key: The key of the task to cancel.
|
|
397
|
+
"""
|
|
332
398
|
with tracer.start_as_current_span(
|
|
333
399
|
"docket.cancel",
|
|
334
400
|
attributes={**self.labels(), "docket.key": key},
|
|
@@ -421,6 +487,14 @@ class Docket:
|
|
|
421
487
|
operator: Operator | LiteralOperator = "==",
|
|
422
488
|
value: Hashable | None = None,
|
|
423
489
|
) -> None:
|
|
490
|
+
"""Strike a task from the Docket.
|
|
491
|
+
|
|
492
|
+
Args:
|
|
493
|
+
function: The task to strike.
|
|
494
|
+
parameter: The parameter to strike on.
|
|
495
|
+
operator: The operator to use.
|
|
496
|
+
value: The value to strike on.
|
|
497
|
+
"""
|
|
424
498
|
if not isinstance(function, (str, type(None))):
|
|
425
499
|
function = function.__name__
|
|
426
500
|
|
|
@@ -436,6 +510,14 @@ class Docket:
|
|
|
436
510
|
operator: Operator | LiteralOperator = "==",
|
|
437
511
|
value: Hashable | None = None,
|
|
438
512
|
) -> None:
|
|
513
|
+
"""Restore a previously stricken task to the Docket.
|
|
514
|
+
|
|
515
|
+
Args:
|
|
516
|
+
function: The task to restore.
|
|
517
|
+
parameter: The parameter to restore on.
|
|
518
|
+
operator: The operator to use.
|
|
519
|
+
value: The value to restore on.
|
|
520
|
+
"""
|
|
439
521
|
if not isinstance(function, (str, type(None))):
|
|
440
522
|
function = function.__name__
|
|
441
523
|
|
|
@@ -501,6 +583,12 @@ class Docket:
|
|
|
501
583
|
await asyncio.sleep(1)
|
|
502
584
|
|
|
503
585
|
async def snapshot(self) -> DocketSnapshot:
|
|
586
|
+
"""Get a snapshot of the Docket, including which tasks are scheduled or currently
|
|
587
|
+
running, as well as which workers are active.
|
|
588
|
+
|
|
589
|
+
Returns:
|
|
590
|
+
A snapshot of the Docket.
|
|
591
|
+
"""
|
|
504
592
|
running: list[RunningExecution] = []
|
|
505
593
|
future: list[Execution] = []
|
|
506
594
|
|
|
@@ -585,6 +673,11 @@ class Docket:
|
|
|
585
673
|
return f"{self.name}:task-workers:{task_name}"
|
|
586
674
|
|
|
587
675
|
async def workers(self) -> Collection[WorkerInfo]:
|
|
676
|
+
"""Get a list of all workers that have sent heartbeats to the Docket.
|
|
677
|
+
|
|
678
|
+
Returns:
|
|
679
|
+
A list of all workers that have sent heartbeats to the Docket.
|
|
680
|
+
"""
|
|
588
681
|
workers: list[WorkerInfo] = []
|
|
589
682
|
|
|
590
683
|
oldest = datetime.now(timezone.utc).timestamp() - (
|
|
@@ -615,8 +708,15 @@ class Docket:
|
|
|
615
708
|
return workers
|
|
616
709
|
|
|
617
710
|
async def task_workers(self, task_name: str) -> Collection[WorkerInfo]:
|
|
618
|
-
|
|
711
|
+
"""Get a list of all workers that are able to execute a given task.
|
|
712
|
+
|
|
713
|
+
Args:
|
|
714
|
+
task_name: The name of the task.
|
|
619
715
|
|
|
716
|
+
Returns:
|
|
717
|
+
A list of all workers that are able to execute the given task.
|
|
718
|
+
"""
|
|
719
|
+
workers: list[WorkerInfo] = []
|
|
620
720
|
oldest = datetime.now(timezone.utc).timestamp() - (
|
|
621
721
|
self.heartbeat_interval.total_seconds() * self.missed_heartbeats
|
|
622
722
|
)
|
docket/instrumentation.py
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import threading
|
|
2
1
|
from contextlib import contextmanager
|
|
2
|
+
from threading import Thread
|
|
3
3
|
from typing import Generator, cast
|
|
4
4
|
|
|
5
5
|
from opentelemetry import metrics
|
|
@@ -145,6 +145,34 @@ message_getter: MessageGetter = MessageGetter()
|
|
|
145
145
|
message_setter: MessageSetter = MessageSetter()
|
|
146
146
|
|
|
147
147
|
|
|
148
|
+
@contextmanager
|
|
149
|
+
def healthcheck_server(
|
|
150
|
+
host: str = "0.0.0.0", port: int | None = None
|
|
151
|
+
) -> Generator[None, None, None]:
|
|
152
|
+
if port is None:
|
|
153
|
+
yield
|
|
154
|
+
return
|
|
155
|
+
|
|
156
|
+
from http.server import BaseHTTPRequestHandler, HTTPServer
|
|
157
|
+
|
|
158
|
+
class HealthcheckHandler(BaseHTTPRequestHandler):
|
|
159
|
+
def do_GET(self):
|
|
160
|
+
self.send_response(200)
|
|
161
|
+
self.send_header("Content-type", "text/plain")
|
|
162
|
+
self.end_headers()
|
|
163
|
+
self.wfile.write(b"OK")
|
|
164
|
+
|
|
165
|
+
def log_message(self, format: str, *args: object) -> None:
|
|
166
|
+
# Suppress access logs from the webserver
|
|
167
|
+
pass
|
|
168
|
+
|
|
169
|
+
server = HTTPServer((host, port), HealthcheckHandler)
|
|
170
|
+
with server:
|
|
171
|
+
Thread(target=server.serve_forever, daemon=True).start()
|
|
172
|
+
|
|
173
|
+
yield
|
|
174
|
+
|
|
175
|
+
|
|
148
176
|
@contextmanager
|
|
149
177
|
def metrics_server(
|
|
150
178
|
host: str = "0.0.0.0", port: int | None = None
|
|
@@ -173,8 +201,6 @@ def metrics_server(
|
|
|
173
201
|
handler_class=_SilentHandler,
|
|
174
202
|
)
|
|
175
203
|
with server:
|
|
176
|
-
|
|
177
|
-
t.daemon = True
|
|
178
|
-
t.start()
|
|
204
|
+
Thread(target=server.serve_forever, daemon=True).start()
|
|
179
205
|
|
|
180
206
|
yield
|
docket/worker.py
CHANGED
|
@@ -51,6 +51,7 @@ from .instrumentation import (
|
|
|
51
51
|
TASKS_STARTED,
|
|
52
52
|
TASKS_STRICKEN,
|
|
53
53
|
TASKS_SUCCEEDED,
|
|
54
|
+
healthcheck_server,
|
|
54
55
|
metrics_server,
|
|
55
56
|
)
|
|
56
57
|
|
|
@@ -65,6 +66,18 @@ class _stream_due_tasks(Protocol):
|
|
|
65
66
|
|
|
66
67
|
|
|
67
68
|
class Worker:
|
|
69
|
+
"""A Worker executes tasks on a Docket. You may run as many workers as you like
|
|
70
|
+
to work a single Docket.
|
|
71
|
+
|
|
72
|
+
Example:
|
|
73
|
+
|
|
74
|
+
```python
|
|
75
|
+
async with Docket() as docket:
|
|
76
|
+
async with Worker(docket) as worker:
|
|
77
|
+
await worker.run_forever()
|
|
78
|
+
```
|
|
79
|
+
"""
|
|
80
|
+
|
|
68
81
|
docket: Docket
|
|
69
82
|
name: str
|
|
70
83
|
concurrency: int
|
|
@@ -140,10 +153,14 @@ class Worker:
|
|
|
140
153
|
scheduling_resolution: timedelta = timedelta(milliseconds=250),
|
|
141
154
|
schedule_automatic_tasks: bool = True,
|
|
142
155
|
until_finished: bool = False,
|
|
156
|
+
healthcheck_port: int | None = None,
|
|
143
157
|
metrics_port: int | None = None,
|
|
144
158
|
tasks: list[str] = ["docket.tasks:standard_tasks"],
|
|
145
159
|
) -> None:
|
|
146
|
-
with
|
|
160
|
+
with (
|
|
161
|
+
healthcheck_server(port=healthcheck_port),
|
|
162
|
+
metrics_server(port=metrics_port),
|
|
163
|
+
):
|
|
147
164
|
async with Docket(name=docket_name, url=url) as docket:
|
|
148
165
|
for task_path in tasks:
|
|
149
166
|
docket.register_collection(task_path)
|
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: pydocket
|
|
3
|
+
Version: 0.7.0
|
|
4
|
+
Summary: A distributed background task system for Python functions
|
|
5
|
+
Project-URL: Homepage, https://github.com/chrisguidry/docket
|
|
6
|
+
Project-URL: Bug Tracker, https://github.com/chrisguidry/docket/issues
|
|
7
|
+
Author-email: Chris Guidry <guid@omg.lol>
|
|
8
|
+
License: # Released under MIT License
|
|
9
|
+
|
|
10
|
+
Copyright (c) 2025 Chris Guidry.
|
|
11
|
+
|
|
12
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
|
13
|
+
|
|
14
|
+
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
|
15
|
+
|
|
16
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
17
|
+
License-File: LICENSE
|
|
18
|
+
Classifier: Development Status :: 4 - Beta
|
|
19
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
20
|
+
Classifier: Operating System :: OS Independent
|
|
21
|
+
Classifier: Programming Language :: Python :: 3
|
|
22
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
23
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
24
|
+
Classifier: Typing :: Typed
|
|
25
|
+
Requires-Python: >=3.12
|
|
26
|
+
Requires-Dist: cloudpickle>=3.1.1
|
|
27
|
+
Requires-Dist: opentelemetry-api>=1.30.0
|
|
28
|
+
Requires-Dist: opentelemetry-exporter-prometheus>=0.51b0
|
|
29
|
+
Requires-Dist: prometheus-client>=0.21.1
|
|
30
|
+
Requires-Dist: python-json-logger>=3.2.1
|
|
31
|
+
Requires-Dist: redis>=4.6
|
|
32
|
+
Requires-Dist: rich>=13.9.4
|
|
33
|
+
Requires-Dist: typer>=0.15.1
|
|
34
|
+
Requires-Dist: uuid7>=0.1.0
|
|
35
|
+
Description-Content-Type: text/markdown
|
|
36
|
+
|
|
37
|
+
Docket is a distributed background task system for Python functions with a focus
|
|
38
|
+
on the scheduling of future work as seamlessly and efficiently as immediate work.
|
|
39
|
+
|
|
40
|
+
[](https://pypi.org/project/pydocket/)
|
|
41
|
+
[](https://pypi.org/project/pydocket/)
|
|
42
|
+
[](https://github.com/chrisguidry/docket/actions/workflows/ci.yml)
|
|
43
|
+
[](https://app.codecov.io/gh/chrisguidry/docket)
|
|
44
|
+
[](https://github.com/chrisguidry/docket/blob/main/LICENSE)
|
|
45
|
+
[](https://chrisguidry.github.io/docket/)
|
|
46
|
+
|
|
47
|
+
## At a glance
|
|
48
|
+
|
|
49
|
+
```python
|
|
50
|
+
from datetime import datetime, timedelta, timezone
|
|
51
|
+
|
|
52
|
+
from docket import Docket
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
async def greet(name: str, greeting="Hello") -> None:
|
|
56
|
+
print(f"{greeting}, {name} at {datetime.now()}!")
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
async with Docket() as docket:
|
|
60
|
+
await docket.add(greet)("Jane")
|
|
61
|
+
|
|
62
|
+
now = datetime.now(timezone.utc)
|
|
63
|
+
soon = now + timedelta(seconds=3)
|
|
64
|
+
await docket.add(greet, when=soon)("John", greeting="Howdy")
|
|
65
|
+
```
|
|
66
|
+
|
|
67
|
+
```python
|
|
68
|
+
from docket import Docket, Worker
|
|
69
|
+
|
|
70
|
+
async with Docket() as docket:
|
|
71
|
+
async with Worker(docket) as worker:
|
|
72
|
+
await worker.run_until_finished()
|
|
73
|
+
```
|
|
74
|
+
|
|
75
|
+
```
|
|
76
|
+
Hello, Jane at 2025-03-05 13:58:21.552644!
|
|
77
|
+
Howdy, John at 2025-03-05 13:58:24.550773!
|
|
78
|
+
```
|
|
79
|
+
|
|
80
|
+
Check out our docs for more [details](http://chrisguidry.github.io/docket/),
|
|
81
|
+
[examples](https://chrisguidry.github.io/docket/getting-started/), and the [API
|
|
82
|
+
reference](https://chrisguidry.github.io/docket/api-reference/).
|
|
83
|
+
|
|
84
|
+
## Why `docket`?
|
|
85
|
+
|
|
86
|
+
⚡️ Snappy one-way background task processing without any bloat
|
|
87
|
+
|
|
88
|
+
📅 Schedule immediate or future work seamlessly with the same interface
|
|
89
|
+
|
|
90
|
+
⏭️ Skip problematic tasks or parameters without redeploying
|
|
91
|
+
|
|
92
|
+
🌊 Purpose-built for Redis streams
|
|
93
|
+
|
|
94
|
+
🧩 Fully type-complete and type-aware for your background task functions
|
|
95
|
+
|
|
96
|
+
## Installing `docket`
|
|
97
|
+
|
|
98
|
+
Docket is [available on PyPI](https://pypi.org/project/pydocket/) under the package name
|
|
99
|
+
`pydocket`. It targets Python 3.12 or above.
|
|
100
|
+
|
|
101
|
+
With [`uv`](https://docs.astral.sh/uv/):
|
|
102
|
+
|
|
103
|
+
```bash
|
|
104
|
+
uv pip install pydocket
|
|
105
|
+
|
|
106
|
+
or
|
|
107
|
+
|
|
108
|
+
uv add pydocket
|
|
109
|
+
```
|
|
110
|
+
|
|
111
|
+
With `pip`:
|
|
112
|
+
|
|
113
|
+
```bash
|
|
114
|
+
pip install pydocket
|
|
115
|
+
```
|
|
116
|
+
|
|
117
|
+
Docket requires a [Redis](http://redis.io/) server with Streams support (which was
|
|
118
|
+
introduced in Redis 5.0.0). Docket is tested with Redis 6 and 7.
|
|
119
|
+
|
|
120
|
+
# Hacking on `docket`
|
|
121
|
+
|
|
122
|
+
We use [`uv`](https://docs.astral.sh/uv/) for project management, so getting set up
|
|
123
|
+
should be as simple as cloning the repo and running:
|
|
124
|
+
|
|
125
|
+
```bash
|
|
126
|
+
uv sync
|
|
127
|
+
```
|
|
128
|
+
|
|
129
|
+
The to run the test suite:
|
|
130
|
+
|
|
131
|
+
```bash
|
|
132
|
+
pytest
|
|
133
|
+
```
|
|
134
|
+
|
|
135
|
+
We aim to maintain 100% test coverage, which is required for all PRs to `docket`. We
|
|
136
|
+
believe that `docket` should stay small, simple, understandable, and reliable, and that
|
|
137
|
+
begins with testing all the dusty branches and corners. This will give us the
|
|
138
|
+
confidence to upgrade dependencies quickly and to adapt to new versions of Redis over
|
|
139
|
+
time.
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
docket/__init__.py,sha256=sY1T_NVsXQNOmOhOnfYmZ95dcE_52Ov6DSIVIMZp-1w,869
|
|
2
|
+
docket/__main__.py,sha256=wcCrL4PjG51r5wVKqJhcoJPTLfHW0wNbD31DrUN0MWI,28
|
|
3
|
+
docket/annotations.py,sha256=SFBrOMbpAh7P67u8fRTH-u3MVvJQxe0qYi92WAShAsw,2173
|
|
4
|
+
docket/cli.py,sha256=WPm_URZ54h8gHjrsHKP8SXpRzdeepmyH_FhQHai-Qus,20899
|
|
5
|
+
docket/dependencies.py,sha256=fX4vafGjQf7s4x0YROaw7fzQPlYW7TZtCqNhu7Kxj40,16831
|
|
6
|
+
docket/docket.py,sha256=5e101CGLZ2tWNcADo4cdewapmXab47ieMCeQr6d92YQ,24478
|
|
7
|
+
docket/execution.py,sha256=6KozjnS96byvyCMTQ2-IkcIrPsqaPIVu2HZU0U4Be9E,14813
|
|
8
|
+
docket/instrumentation.py,sha256=f-GG5VS6EdS2It30qxjVpzWUBOZQcTnat-3KzPwwDgQ,5367
|
|
9
|
+
docket/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
10
|
+
docket/tasks.py,sha256=RIlSM2omh-YDwVnCz6M5MtmK8T_m_s1w2OlRRxDUs6A,1437
|
|
11
|
+
docket/worker.py,sha256=tJfk2rlHODzHaWBzpBXT8h-Lo7RDQ6gb6HU8b3T9gFA,27878
|
|
12
|
+
pydocket-0.7.0.dist-info/METADATA,sha256=soXf7ybhgvSykxRDH56pMJX2DaXf3SJfDFUFLbebAvM,5335
|
|
13
|
+
pydocket-0.7.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
14
|
+
pydocket-0.7.0.dist-info/entry_points.txt,sha256=4WOk1nUlBsUT5O3RyMci2ImuC5XFswuopElYcLHtD5k,47
|
|
15
|
+
pydocket-0.7.0.dist-info/licenses/LICENSE,sha256=YuVWU_ZXO0K_k2FG8xWKe5RGxV24AhJKTvQmKfqXuyk,1087
|
|
16
|
+
pydocket-0.7.0.dist-info/RECORD,,
|
|
@@ -1,389 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.4
|
|
2
|
-
Name: pydocket
|
|
3
|
-
Version: 0.6.3
|
|
4
|
-
Summary: A distributed background task system for Python functions
|
|
5
|
-
Project-URL: Homepage, https://github.com/chrisguidry/docket
|
|
6
|
-
Project-URL: Bug Tracker, https://github.com/chrisguidry/docket/issues
|
|
7
|
-
Author-email: Chris Guidry <guid@omg.lol>
|
|
8
|
-
License: # Released under MIT License
|
|
9
|
-
|
|
10
|
-
Copyright (c) 2025 Chris Guidry.
|
|
11
|
-
|
|
12
|
-
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
|
13
|
-
|
|
14
|
-
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
|
15
|
-
|
|
16
|
-
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
17
|
-
License-File: LICENSE
|
|
18
|
-
Classifier: Development Status :: 4 - Beta
|
|
19
|
-
Classifier: License :: OSI Approved :: MIT License
|
|
20
|
-
Classifier: Operating System :: OS Independent
|
|
21
|
-
Classifier: Programming Language :: Python :: 3
|
|
22
|
-
Classifier: Programming Language :: Python :: 3.12
|
|
23
|
-
Classifier: Programming Language :: Python :: 3.13
|
|
24
|
-
Classifier: Typing :: Typed
|
|
25
|
-
Requires-Python: >=3.12
|
|
26
|
-
Requires-Dist: cloudpickle>=3.1.1
|
|
27
|
-
Requires-Dist: opentelemetry-api>=1.30.0
|
|
28
|
-
Requires-Dist: opentelemetry-exporter-prometheus>=0.51b0
|
|
29
|
-
Requires-Dist: prometheus-client>=0.21.1
|
|
30
|
-
Requires-Dist: python-json-logger>=3.2.1
|
|
31
|
-
Requires-Dist: redis>=4.6
|
|
32
|
-
Requires-Dist: rich>=13.9.4
|
|
33
|
-
Requires-Dist: typer>=0.15.1
|
|
34
|
-
Requires-Dist: uuid7>=0.1.0
|
|
35
|
-
Description-Content-Type: text/markdown
|
|
36
|
-
|
|
37
|
-
Docket is a distributed background task system for Python functions with a focus
|
|
38
|
-
on the scheduling of future work as seamlessly and efficiency as immediate work.
|
|
39
|
-
|
|
40
|
-
[](https://pypi.org/project/pydocket/)
|
|
41
|
-
[](https://pypi.org/project/pydocket/)
|
|
42
|
-
[](https://github.com/chrisguidry/docket/actions/workflows/ci.yml)
|
|
43
|
-
[](https://app.codecov.io/gh/chrisguidry/docket)
|
|
44
|
-
[](https://github.com/chrisguidry/docket/blob/main/LICENSE)
|
|
45
|
-
|
|
46
|
-
## At a glance
|
|
47
|
-
|
|
48
|
-
```python
|
|
49
|
-
from datetime import datetime, timedelta, timezone
|
|
50
|
-
|
|
51
|
-
from docket import Docket
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
async def greet(name: str, greeting="Hello") -> None:
|
|
55
|
-
print(f"{greeting}, {name} at {datetime.now()}!")
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
async with Docket() as docket:
|
|
59
|
-
await docket.add(greet)("Jane")
|
|
60
|
-
|
|
61
|
-
now = datetime.now(timezone.utc)
|
|
62
|
-
soon = now + timedelta(seconds=3)
|
|
63
|
-
await docket.add(greet, when=soon)("John", greeting="Howdy")
|
|
64
|
-
```
|
|
65
|
-
|
|
66
|
-
```python
|
|
67
|
-
from docket import Docket, Worker
|
|
68
|
-
|
|
69
|
-
async with Docket() as docket:
|
|
70
|
-
async with Worker(docket) as worker:
|
|
71
|
-
await worker.run_until_finished()
|
|
72
|
-
```
|
|
73
|
-
|
|
74
|
-
```
|
|
75
|
-
Hello, Jane at 2025-03-05 13:58:21.552644!
|
|
76
|
-
Howdy, John at 2025-03-05 13:58:24.550773!
|
|
77
|
-
```
|
|
78
|
-
|
|
79
|
-
## Why `docket`?
|
|
80
|
-
|
|
81
|
-
⚡️ Snappy one-way background task processing without any bloat
|
|
82
|
-
|
|
83
|
-
📅 Schedule immediate or future work seamlessly with the same interface
|
|
84
|
-
|
|
85
|
-
⏭️ Skip problematic tasks or parameters without redeploying
|
|
86
|
-
|
|
87
|
-
🌊 Purpose-built for Redis streams
|
|
88
|
-
|
|
89
|
-
🧩 Fully type-complete and type-aware for your background task functions
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
## Installing `docket`
|
|
93
|
-
|
|
94
|
-
Docket is [available on PyPI](https://pypi.org/project/pydocket/) under the package name
|
|
95
|
-
`pydocket`. It targets Python 3.12 or above.
|
|
96
|
-
|
|
97
|
-
With [`uv`](https://docs.astral.sh/uv/):
|
|
98
|
-
|
|
99
|
-
```bash
|
|
100
|
-
uv pip install pydocket
|
|
101
|
-
|
|
102
|
-
or
|
|
103
|
-
|
|
104
|
-
uv add pydocket
|
|
105
|
-
```
|
|
106
|
-
|
|
107
|
-
With `pip`:
|
|
108
|
-
|
|
109
|
-
```bash
|
|
110
|
-
pip install pydocket
|
|
111
|
-
```
|
|
112
|
-
|
|
113
|
-
Docket requires a [Redis](http://redis.io/) server with Streams support (which was
|
|
114
|
-
introduced in Redis 5.0.0). Docket is tested with Redis 7.
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
## Creating a `Docket`
|
|
118
|
-
|
|
119
|
-
Each `Docket` should have a name that will be shared across your system, like the name
|
|
120
|
-
of a topic or queue. By default this is `"docket"`. You can support many separate
|
|
121
|
-
dockets on a single Redis server as long as they have different names.
|
|
122
|
-
|
|
123
|
-
Docket accepts a URL to connect to the Redis server (defaulting to the local
|
|
124
|
-
server), and you can pass any additional connection configuration you need on that
|
|
125
|
-
connection URL.
|
|
126
|
-
|
|
127
|
-
```python
|
|
128
|
-
async with Docket(name="orders", url="redis://my-redis:6379/0") as docket:
|
|
129
|
-
...
|
|
130
|
-
```
|
|
131
|
-
|
|
132
|
-
The `name` and `url` together represent a single shared docket of work across all your
|
|
133
|
-
system.
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
## Scheduling work
|
|
137
|
-
|
|
138
|
-
A `Docket` is the entrypoint to scheduling immediate and future work. You define work
|
|
139
|
-
in the form of `async` functions that return `None`. These task functions can accept
|
|
140
|
-
any parameter types, so long as they can be serialized with
|
|
141
|
-
[`cloudpickle`](https://github.com/cloudpipe/cloudpickle).
|
|
142
|
-
|
|
143
|
-
```python
|
|
144
|
-
def now() -> datetime:
|
|
145
|
-
return datetime.now(timezone.utc)
|
|
146
|
-
|
|
147
|
-
async def send_welcome_email(customer_id: int, name: str) -> None:
|
|
148
|
-
...
|
|
149
|
-
|
|
150
|
-
async def send_followup_email(customer_id: int, name: str) -> None:
|
|
151
|
-
...
|
|
152
|
-
|
|
153
|
-
async with Docket() as docket:
|
|
154
|
-
await docket.add(send_welcome_email)(12345, "Jane Smith")
|
|
155
|
-
|
|
156
|
-
tomorrow = now() + timedelta(days=1)
|
|
157
|
-
await docket.add(send_followup_email, when=tomorrow)(12345, "Jane Smith")
|
|
158
|
-
```
|
|
159
|
-
|
|
160
|
-
`docket.add` schedules both immediate work (the default) or future work (with the
|
|
161
|
-
`when: datetime` parameter).
|
|
162
|
-
|
|
163
|
-
All task executions are identified with a `key` that captures the unique essence of that
|
|
164
|
-
piece of work. By default they are randomly assigned UUIDs, but assigning your own keys
|
|
165
|
-
unlocks many powerful capabilities.
|
|
166
|
-
|
|
167
|
-
```python
|
|
168
|
-
async with Docket() as docket:
|
|
169
|
-
await docket.add(send_welcome_email)(12345, "Jane Smith")
|
|
170
|
-
|
|
171
|
-
tomorrow = now() + timedelta(days=1)
|
|
172
|
-
key = "welcome-email-for-12345"
|
|
173
|
-
await docket.add(send_followup_email, when=tomorrow, key=key)(12345, "Jane Smith")
|
|
174
|
-
```
|
|
175
|
-
|
|
176
|
-
If you've given your future work a `key`, then only one unique instance of that
|
|
177
|
-
execution will exist in the future:
|
|
178
|
-
|
|
179
|
-
```python
|
|
180
|
-
key = "welcome-email-for-12345"
|
|
181
|
-
await docket.add(send_followup_email, when=tomorrow, key=key)(12345, "Jane Smith")
|
|
182
|
-
```
|
|
183
|
-
|
|
184
|
-
Calling `.add` a second time with the same key won't do anything, so luckily your
|
|
185
|
-
customer won't get two emails!
|
|
186
|
-
|
|
187
|
-
However, at any time later you can replace that task execution to alter _when_ it will
|
|
188
|
-
happen:
|
|
189
|
-
|
|
190
|
-
```python
|
|
191
|
-
key = "welcome-email-for-12345"
|
|
192
|
-
next_week = now() + timedelta(days=7)
|
|
193
|
-
await docket.replace(send_followup_email, when=next_week, key=key)(12345, "Jane Smith")
|
|
194
|
-
```
|
|
195
|
-
|
|
196
|
-
_what arguments_ will be passed:
|
|
197
|
-
|
|
198
|
-
```python
|
|
199
|
-
key = "welcome-email-for-12345"
|
|
200
|
-
await docket.replace(send_followup_email, when=tomorrow, key=key)(12345, "Jane Q. Smith")
|
|
201
|
-
```
|
|
202
|
-
|
|
203
|
-
Or just cancel it outright:
|
|
204
|
-
|
|
205
|
-
```python
|
|
206
|
-
await docket.cancel("welcome-email-for-12345")
|
|
207
|
-
```
|
|
208
|
-
|
|
209
|
-
Tasks may also be called by name, in cases where you can't or don't want to import the
|
|
210
|
-
module that has your tasks. This may be common in a distributed environment where the
|
|
211
|
-
code of your task system just isn't available, or it requires heavyweight libraries that
|
|
212
|
-
you wouldn't want to import into your web server. In this case, you will lose the
|
|
213
|
-
type-checking for `.add` and `.replace` calls, but otherwise everything will work as
|
|
214
|
-
it does with the actual function:
|
|
215
|
-
|
|
216
|
-
```python
|
|
217
|
-
await docket.add("send_followup_email", when=tomorrow)(12345, "Jane Smith")
|
|
218
|
-
```
|
|
219
|
-
|
|
220
|
-
These primitives of `.add`, `.replace`, and `.cancel` are sufficient to build a
|
|
221
|
-
large-scale and robust system of background tasks for your application.
|
|
222
|
-
|
|
223
|
-
## Writing tasks
|
|
224
|
-
|
|
225
|
-
Tasks are any `async` function that takes `cloudpickle`-able parameters, and returns
|
|
226
|
-
`None`. Returning `None` is a strong signal that these are _fire-and-forget_ tasks
|
|
227
|
-
whose results aren't used or waited-on by your application. These are the only kinds of
|
|
228
|
-
tasks that Docket supports.
|
|
229
|
-
|
|
230
|
-
Docket uses a parameter-based dependency and configuration pattern, which has become
|
|
231
|
-
common in frameworks like [FastAPI](https://fastapi.tiangolo.com/),
|
|
232
|
-
[Typer](https://typer.tiangolo.com/), or [FastMCP](https://github.com/jlowin/fastmcp).
|
|
233
|
-
As such, there is no decorator for tasks.
|
|
234
|
-
|
|
235
|
-
A very common requirement for tasks is that they have access to schedule further work
|
|
236
|
-
on their own docket, especially for chains of self-perpetuating tasks to implement
|
|
237
|
-
distributed polling and other periodic systems. One of the first dependencies you may
|
|
238
|
-
look for is the `CurrentDocket`:
|
|
239
|
-
|
|
240
|
-
```python
|
|
241
|
-
from docket import Docket, CurrentDocket
|
|
242
|
-
|
|
243
|
-
POLLING_INTERVAL = timedelta(seconds=10)
|
|
244
|
-
|
|
245
|
-
async def poll_for_changes(file: Path, docket: Docket = CurrentDocket()) -> None:
|
|
246
|
-
if file.exists():
|
|
247
|
-
...do something interesting...
|
|
248
|
-
return
|
|
249
|
-
else:
|
|
250
|
-
await docket.add(poll_for_changes, when=now() + POLLING_INTERVAL)(file)
|
|
251
|
-
```
|
|
252
|
-
|
|
253
|
-
Here the argument to `docket` is an instance of `Docket` with the same name and URL as
|
|
254
|
-
the worker it's running on. You can ask for the `CurrentWorker` and `CurrentExecution`
|
|
255
|
-
as well. Many times it could be useful to have your own task `key` available in order
|
|
256
|
-
to idempotently schedule future work:
|
|
257
|
-
|
|
258
|
-
```python
|
|
259
|
-
from docket import Docket, CurrentDocket, TaskKey
|
|
260
|
-
|
|
261
|
-
async def poll_for_changes(
|
|
262
|
-
file: Path,
|
|
263
|
-
key: str = TaskKey(),
|
|
264
|
-
docket: Docket = CurrentDocket()
|
|
265
|
-
) -> None:
|
|
266
|
-
if file.exists():
|
|
267
|
-
...do something interesting...
|
|
268
|
-
return
|
|
269
|
-
else:
|
|
270
|
-
await docket.add(poll_for_changes, when=now() + POLLING_INTERVAL, key=key)(file)
|
|
271
|
-
```
|
|
272
|
-
|
|
273
|
-
This helps to ensure that there is one continuous "chain" of these future tasks, as they
|
|
274
|
-
all use the same key.
|
|
275
|
-
|
|
276
|
-
Configuring the retry behavior for a task is also done with a dependency:
|
|
277
|
-
|
|
278
|
-
```python
|
|
279
|
-
from datetime import timedelta
|
|
280
|
-
from docket import Retry
|
|
281
|
-
|
|
282
|
-
async def faily(retry: Retry = Retry(attempts=5, delay=timedelta(seconds=3))):
|
|
283
|
-
if retry.attempt == 4:
|
|
284
|
-
print("whew!")
|
|
285
|
-
return
|
|
286
|
-
|
|
287
|
-
raise ValueError("whoops!")
|
|
288
|
-
```
|
|
289
|
-
|
|
290
|
-
In this case, the task `faily` will run 4 times with a delay of 3 seconds between each
|
|
291
|
-
attempt. If it were to get to 5 attempts, no more would be attempted. This is a
|
|
292
|
-
linear retry, and an `ExponentialRetry` is also available:
|
|
293
|
-
|
|
294
|
-
```python
|
|
295
|
-
from datetime import timedelta
|
|
296
|
-
from docket import Retry, ExponentialRetry
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
async def faily(
|
|
300
|
-
retry: Retry = Retry(
|
|
301
|
-
attempts=5,
|
|
302
|
-
minimum_delay=timedelta(seconds=2),
|
|
303
|
-
maximum_delay=timedelta(seconds=32),
|
|
304
|
-
),
|
|
305
|
-
):
|
|
306
|
-
if retry.attempt == 4:
|
|
307
|
-
print("whew!")
|
|
308
|
-
return
|
|
309
|
-
|
|
310
|
-
raise ValueError("whoops!")
|
|
311
|
-
```
|
|
312
|
-
|
|
313
|
-
This would retry in 2, 4, 8, then 16 seconds before that fourth attempt succeeded.
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
## Running workers
|
|
317
|
-
|
|
318
|
-
You can run as many workers as you like to process the tasks on your docket. You can
|
|
319
|
-
either run a worker programmatically in Python, or via the CLI. Clients using docket
|
|
320
|
-
have the advantage that they are usually passing the task functions, but workers don't
|
|
321
|
-
necessarily know which tasks they are supposed to run. Docket solves this by allowing
|
|
322
|
-
you to explicitly register tasks.
|
|
323
|
-
|
|
324
|
-
In `my_tasks.py`:
|
|
325
|
-
|
|
326
|
-
```python
|
|
327
|
-
async def my_first_task():
|
|
328
|
-
...
|
|
329
|
-
|
|
330
|
-
async def my_second_task():
|
|
331
|
-
...
|
|
332
|
-
|
|
333
|
-
my_task_collection = [
|
|
334
|
-
my_first_task,
|
|
335
|
-
my_second_task,
|
|
336
|
-
]
|
|
337
|
-
```
|
|
338
|
-
|
|
339
|
-
From Python:
|
|
340
|
-
|
|
341
|
-
```python
|
|
342
|
-
from my_tasks import my_task_collection
|
|
343
|
-
|
|
344
|
-
async with Docket() as docket:
|
|
345
|
-
for task in my_task_collection:
|
|
346
|
-
docket.register(task)
|
|
347
|
-
|
|
348
|
-
async with Worker(docket) as worker:
|
|
349
|
-
await worker.run_forever()
|
|
350
|
-
```
|
|
351
|
-
|
|
352
|
-
From the CLI:
|
|
353
|
-
|
|
354
|
-
```bash
|
|
355
|
-
docket worker --tasks my_tasks:my_task_collection
|
|
356
|
-
```
|
|
357
|
-
|
|
358
|
-
By default, workers will process up to 10 tasks concurrently, but you can adjust this
|
|
359
|
-
to your needs with the `concurrency=` keyword argument or the `--concurrency` CLI
|
|
360
|
-
option.
|
|
361
|
-
|
|
362
|
-
When a worker crashes ungracefully, any tasks it was currently executing will be held
|
|
363
|
-
for a period of time before being redelivered to other workers. You can control this
|
|
364
|
-
time period with `redelivery_timeout=` or `--redelivery-timeout`. You'd want to set
|
|
365
|
-
this to a value higher than the longest task you expect to run. For queues of very fast
|
|
366
|
-
tasks, a few seconds may be ideal; for long data-processing steps involving large
|
|
367
|
-
amount of data, you may need minutes.
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
# Hacking on `docket`
|
|
371
|
-
|
|
372
|
-
We use [`uv`](https://docs.astral.sh/uv/) for project management, so getting set up
|
|
373
|
-
should be as simple as cloning the repo and running:
|
|
374
|
-
|
|
375
|
-
```bash
|
|
376
|
-
uv sync
|
|
377
|
-
```
|
|
378
|
-
|
|
379
|
-
The to run the test suite:
|
|
380
|
-
|
|
381
|
-
```bash
|
|
382
|
-
pytest
|
|
383
|
-
```
|
|
384
|
-
|
|
385
|
-
We aim to main 100% test coverage, which is required for all PRs to `docket`. We
|
|
386
|
-
believe that `docket` should stay small, simple, understandable, and reliable, and that
|
|
387
|
-
begins with testing all the dusty branches and corners. This will give us the
|
|
388
|
-
confidence to upgrade dependencies quickly and to adapt to new versions of Redis over
|
|
389
|
-
time.
|
pydocket-0.6.3.dist-info/RECORD
DELETED
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
docket/__init__.py,sha256=sY1T_NVsXQNOmOhOnfYmZ95dcE_52Ov6DSIVIMZp-1w,869
|
|
2
|
-
docket/__main__.py,sha256=wcCrL4PjG51r5wVKqJhcoJPTLfHW0wNbD31DrUN0MWI,28
|
|
3
|
-
docket/annotations.py,sha256=6sCgQxsgOjBN6ithFdXulXq4CPNSdyFocwyJ1gK9v2Q,1688
|
|
4
|
-
docket/cli.py,sha256=znHN7eqaD_PFpSFn7iXa_uZlKzVWDrKkrmOd1CNuZRk,20561
|
|
5
|
-
docket/dependencies.py,sha256=-gruEho5jf07Jx9fEh2YBFg4gDSJFm7X5qhQjArVXjU,11910
|
|
6
|
-
docket/docket.py,sha256=r5TNcGmaQuxST56OVKNjFXDsrU5-Ioz3Y_I38PkLqRM,21411
|
|
7
|
-
docket/execution.py,sha256=6KozjnS96byvyCMTQ2-IkcIrPsqaPIVu2HZU0U4Be9E,14813
|
|
8
|
-
docket/instrumentation.py,sha256=bZlGA02JoJcY0J1WGm5_qXDfY0AXKr0ZLAYu67wkeKY,4611
|
|
9
|
-
docket/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
10
|
-
docket/tasks.py,sha256=RIlSM2omh-YDwVnCz6M5MtmK8T_m_s1w2OlRRxDUs6A,1437
|
|
11
|
-
docket/worker.py,sha256=Xf6_7GyrIUNq1jG8YjbJk5KkRQdvxs0CniF9XW8kdJg,27450
|
|
12
|
-
pydocket-0.6.3.dist-info/METADATA,sha256=LRtykRFP2dcauKjzQDoNpC_xe6aVjvleAN1xS5cSIUY,13120
|
|
13
|
-
pydocket-0.6.3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
14
|
-
pydocket-0.6.3.dist-info/entry_points.txt,sha256=4WOk1nUlBsUT5O3RyMci2ImuC5XFswuopElYcLHtD5k,47
|
|
15
|
-
pydocket-0.6.3.dist-info/licenses/LICENSE,sha256=YuVWU_ZXO0K_k2FG8xWKe5RGxV24AhJKTvQmKfqXuyk,1087
|
|
16
|
-
pydocket-0.6.3.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|