pydocket 0.1.4__py3-none-any.whl → 0.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pydocket might be problematic. Click here for more details.
- docket/cli.py +8 -0
- docket/dependencies.py +8 -9
- docket/docket.py +37 -40
- docket/execution.py +19 -8
- docket/instrumentation.py +53 -0
- docket/tasks.py +11 -9
- docket/worker.py +112 -56
- {pydocket-0.1.4.dist-info → pydocket-0.2.1.dist-info}/METADATA +1 -1
- pydocket-0.2.1.dist-info/RECORD +16 -0
- pydocket-0.1.4.dist-info/RECORD +0 -16
- {pydocket-0.1.4.dist-info → pydocket-0.2.1.dist-info}/WHEEL +0 -0
- {pydocket-0.1.4.dist-info → pydocket-0.2.1.dist-info}/entry_points.txt +0 -0
- {pydocket-0.1.4.dist-info → pydocket-0.2.1.dist-info}/licenses/LICENSE +0 -0
docket/cli.py
CHANGED
|
@@ -243,6 +243,13 @@ def worker(
|
|
|
243
243
|
help="Exit after the current docket is finished",
|
|
244
244
|
),
|
|
245
245
|
] = False,
|
|
246
|
+
metrics_port: Annotated[
|
|
247
|
+
int | None,
|
|
248
|
+
typer.Option(
|
|
249
|
+
"--metrics-port",
|
|
250
|
+
help="The port to serve Prometheus metrics on",
|
|
251
|
+
),
|
|
252
|
+
] = None,
|
|
246
253
|
) -> None:
|
|
247
254
|
asyncio.run(
|
|
248
255
|
Worker.run(
|
|
@@ -254,6 +261,7 @@ def worker(
|
|
|
254
261
|
reconnection_delay=reconnection_delay,
|
|
255
262
|
minimum_check_interval=minimum_check_interval,
|
|
256
263
|
until_finished=until_finished,
|
|
264
|
+
metrics_port=metrics_port,
|
|
257
265
|
tasks=tasks,
|
|
258
266
|
)
|
|
259
267
|
)
|
docket/dependencies.py
CHANGED
|
@@ -61,15 +61,14 @@ class _TaskLogger(Dependency):
|
|
|
61
61
|
self, docket: Docket, worker: Worker, execution: Execution
|
|
62
62
|
) -> logging.LoggerAdapter[logging.Logger]:
|
|
63
63
|
logger = logging.getLogger(f"docket.task.{execution.function.__name__}")
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
return logging.LoggerAdapter(logger, extra)
|
|
64
|
+
return logging.LoggerAdapter(
|
|
65
|
+
logger,
|
|
66
|
+
{
|
|
67
|
+
**docket.labels(),
|
|
68
|
+
**worker.labels(),
|
|
69
|
+
**execution.specific_labels(),
|
|
70
|
+
},
|
|
71
|
+
)
|
|
73
72
|
|
|
74
73
|
|
|
75
74
|
def TaskLogger() -> logging.LoggerAdapter[logging.Logger]:
|
docket/docket.py
CHANGED
|
@@ -13,6 +13,7 @@ from typing import (
|
|
|
13
13
|
Collection,
|
|
14
14
|
Hashable,
|
|
15
15
|
Iterable,
|
|
16
|
+
Mapping,
|
|
16
17
|
NoReturn,
|
|
17
18
|
ParamSpec,
|
|
18
19
|
Self,
|
|
@@ -26,7 +27,7 @@ from uuid import uuid4
|
|
|
26
27
|
|
|
27
28
|
import redis.exceptions
|
|
28
29
|
from opentelemetry import propagate, trace
|
|
29
|
-
from redis.asyncio import Redis
|
|
30
|
+
from redis.asyncio import ConnectionPool, Redis
|
|
30
31
|
|
|
31
32
|
from .execution import (
|
|
32
33
|
Execution,
|
|
@@ -112,11 +113,14 @@ class Docket:
|
|
|
112
113
|
tasks: dict[str, Callable[..., Awaitable[Any]]]
|
|
113
114
|
strike_list: StrikeList
|
|
114
115
|
|
|
116
|
+
_monitor_strikes_task: asyncio.Task[None]
|
|
117
|
+
_connection_pool: ConnectionPool
|
|
118
|
+
|
|
115
119
|
def __init__(
|
|
116
120
|
self,
|
|
117
121
|
name: str = "docket",
|
|
118
122
|
url: str = "redis://localhost:6379/0",
|
|
119
|
-
heartbeat_interval: timedelta = timedelta(seconds=
|
|
123
|
+
heartbeat_interval: timedelta = timedelta(seconds=2),
|
|
120
124
|
missed_heartbeats: int = 5,
|
|
121
125
|
) -> None:
|
|
122
126
|
"""
|
|
@@ -144,6 +148,7 @@ class Docket:
|
|
|
144
148
|
self.tasks = {fn.__name__: fn for fn in standard_tasks}
|
|
145
149
|
self.strike_list = StrikeList()
|
|
146
150
|
|
|
151
|
+
self._connection_pool = ConnectionPool.from_url(self.url) # type: ignore
|
|
147
152
|
self._monitor_strikes_task = asyncio.create_task(self._monitor_strikes())
|
|
148
153
|
|
|
149
154
|
# Ensure that the stream and worker group exist
|
|
@@ -176,23 +181,17 @@ class Docket:
|
|
|
176
181
|
except asyncio.CancelledError:
|
|
177
182
|
pass
|
|
178
183
|
|
|
184
|
+
await asyncio.shield(self._connection_pool.disconnect())
|
|
185
|
+
del self._connection_pool
|
|
186
|
+
|
|
179
187
|
@asynccontextmanager
|
|
180
188
|
async def redis(self) -> AsyncGenerator[Redis, None]:
|
|
181
|
-
|
|
189
|
+
r = Redis(connection_pool=self._connection_pool)
|
|
190
|
+
await r.__aenter__()
|
|
182
191
|
try:
|
|
183
|
-
|
|
184
|
-
self.url,
|
|
185
|
-
single_connection_client=True,
|
|
186
|
-
)
|
|
187
|
-
await redis.__aenter__()
|
|
188
|
-
try:
|
|
189
|
-
yield redis
|
|
190
|
-
finally:
|
|
191
|
-
await asyncio.shield(redis.__aexit__(None, None, None))
|
|
192
|
+
yield r
|
|
192
193
|
finally:
|
|
193
|
-
|
|
194
|
-
if redis:
|
|
195
|
-
await asyncio.shield(redis.connection_pool.disconnect())
|
|
194
|
+
await asyncio.shield(r.__aexit__(None, None, None))
|
|
196
195
|
|
|
197
196
|
def register(self, function: Callable[..., Awaitable[Any]]) -> None:
|
|
198
197
|
from .dependencies import validate_dependencies
|
|
@@ -214,6 +213,11 @@ class Docket:
|
|
|
214
213
|
for function in collection:
|
|
215
214
|
self.register(function)
|
|
216
215
|
|
|
216
|
+
def labels(self) -> Mapping[str, str]:
|
|
217
|
+
return {
|
|
218
|
+
"docket.name": self.name,
|
|
219
|
+
}
|
|
220
|
+
|
|
217
221
|
@overload
|
|
218
222
|
def add(
|
|
219
223
|
self,
|
|
@@ -251,7 +255,7 @@ class Docket:
|
|
|
251
255
|
execution = Execution(function, args, kwargs, when, key, attempt=1)
|
|
252
256
|
await self.schedule(execution)
|
|
253
257
|
|
|
254
|
-
TASKS_ADDED.add(1, {
|
|
258
|
+
TASKS_ADDED.add(1, {**self.labels(), **execution.general_labels()})
|
|
255
259
|
|
|
256
260
|
return execution
|
|
257
261
|
|
|
@@ -287,7 +291,7 @@ class Docket:
|
|
|
287
291
|
await self.cancel(key)
|
|
288
292
|
await self.schedule(execution)
|
|
289
293
|
|
|
290
|
-
TASKS_REPLACED.add(1, {
|
|
294
|
+
TASKS_REPLACED.add(1, {**self.labels(), **execution.general_labels()})
|
|
291
295
|
|
|
292
296
|
return execution
|
|
293
297
|
|
|
@@ -314,9 +318,9 @@ class Docket:
|
|
|
314
318
|
TASKS_STRICKEN.add(
|
|
315
319
|
1,
|
|
316
320
|
{
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
"where": "docket",
|
|
321
|
+
**self.labels(),
|
|
322
|
+
**execution.specific_labels(),
|
|
323
|
+
"docket.where": "docket",
|
|
320
324
|
},
|
|
321
325
|
)
|
|
322
326
|
return
|
|
@@ -327,10 +331,8 @@ class Docket:
|
|
|
327
331
|
with tracer.start_as_current_span(
|
|
328
332
|
"docket.schedule",
|
|
329
333
|
attributes={
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
"docket.execution.key": execution.key,
|
|
333
|
-
"docket.execution.attempt": execution.attempt,
|
|
334
|
+
**self.labels(),
|
|
335
|
+
**execution.specific_labels(),
|
|
334
336
|
"code.function.name": execution.function.__name__,
|
|
335
337
|
},
|
|
336
338
|
):
|
|
@@ -350,16 +352,14 @@ class Docket:
|
|
|
350
352
|
pipe.zadd(self.queue_key, {key: when.timestamp()})
|
|
351
353
|
await pipe.execute()
|
|
352
354
|
|
|
353
|
-
TASKS_SCHEDULED.add(
|
|
354
|
-
1, {"docket": self.name, "task": execution.function.__name__}
|
|
355
|
-
)
|
|
355
|
+
TASKS_SCHEDULED.add(1, {**self.labels(), **execution.general_labels()})
|
|
356
356
|
|
|
357
357
|
async def cancel(self, key: str) -> None:
|
|
358
358
|
with tracer.start_as_current_span(
|
|
359
359
|
"docket.cancel",
|
|
360
360
|
attributes={
|
|
361
|
-
|
|
362
|
-
"docket.
|
|
361
|
+
**self.labels(),
|
|
362
|
+
"docket.key": key,
|
|
363
363
|
},
|
|
364
364
|
):
|
|
365
365
|
async with self.redis() as redis:
|
|
@@ -368,7 +368,7 @@ class Docket:
|
|
|
368
368
|
pipe.zrem(self.queue_key, key)
|
|
369
369
|
await pipe.execute()
|
|
370
370
|
|
|
371
|
-
TASKS_CANCELLED.add(1,
|
|
371
|
+
TASKS_CANCELLED.add(1, self.labels())
|
|
372
372
|
|
|
373
373
|
@property
|
|
374
374
|
def strike_key(self) -> str:
|
|
@@ -408,8 +408,8 @@ class Docket:
|
|
|
408
408
|
with tracer.start_as_current_span(
|
|
409
409
|
f"docket.{instruction.direction}",
|
|
410
410
|
attributes={
|
|
411
|
-
|
|
412
|
-
**instruction.
|
|
411
|
+
**self.labels(),
|
|
412
|
+
**instruction.labels(),
|
|
413
413
|
},
|
|
414
414
|
):
|
|
415
415
|
async with self.redis() as redis:
|
|
@@ -441,18 +441,15 @@ class Docket:
|
|
|
441
441
|
else "Restoring"
|
|
442
442
|
),
|
|
443
443
|
instruction.call_repr(),
|
|
444
|
-
extra=
|
|
444
|
+
extra=self.labels(),
|
|
445
445
|
)
|
|
446
446
|
|
|
447
|
-
counter_labels = {"docket": self.name}
|
|
448
|
-
if instruction.function:
|
|
449
|
-
counter_labels["task"] = instruction.function
|
|
450
|
-
if instruction.parameter:
|
|
451
|
-
counter_labels["parameter"] = instruction.parameter
|
|
452
|
-
|
|
453
447
|
STRIKES_IN_EFFECT.add(
|
|
454
448
|
1 if instruction.direction == "strike" else -1,
|
|
455
|
-
|
|
449
|
+
{
|
|
450
|
+
**self.labels(),
|
|
451
|
+
**instruction.labels(),
|
|
452
|
+
},
|
|
456
453
|
)
|
|
457
454
|
|
|
458
455
|
except redis.exceptions.ConnectionError: # pragma: no cover
|
docket/execution.py
CHANGED
|
@@ -3,7 +3,7 @@ import enum
|
|
|
3
3
|
import inspect
|
|
4
4
|
import logging
|
|
5
5
|
from datetime import datetime
|
|
6
|
-
from typing import Any, Awaitable, Callable, Hashable, Literal, Self, cast
|
|
6
|
+
from typing import Any, Awaitable, Callable, Hashable, Literal, Mapping, Self, cast
|
|
7
7
|
|
|
8
8
|
import cloudpickle # type: ignore[import]
|
|
9
9
|
|
|
@@ -55,6 +55,17 @@ class Execution:
|
|
|
55
55
|
attempt=int(message[b"attempt"].decode()),
|
|
56
56
|
)
|
|
57
57
|
|
|
58
|
+
def general_labels(self) -> Mapping[str, str]:
|
|
59
|
+
return {"docket.task": self.function.__name__}
|
|
60
|
+
|
|
61
|
+
def specific_labels(self) -> Mapping[str, str | int]:
|
|
62
|
+
return {
|
|
63
|
+
"docket.task": self.function.__name__,
|
|
64
|
+
"docket.key": self.key,
|
|
65
|
+
"docket.when": self.when.isoformat(),
|
|
66
|
+
"docket.attempt": self.attempt,
|
|
67
|
+
}
|
|
68
|
+
|
|
58
69
|
def call_repr(self) -> str:
|
|
59
70
|
arguments: list[str] = []
|
|
60
71
|
signature = inspect.signature(self.function)
|
|
@@ -131,17 +142,17 @@ class StrikeInstruction(abc.ABC):
|
|
|
131
142
|
else:
|
|
132
143
|
return Restore(function, parameter, operator, value)
|
|
133
144
|
|
|
134
|
-
def
|
|
135
|
-
|
|
145
|
+
def labels(self) -> Mapping[str, str]:
|
|
146
|
+
labels: dict[str, str] = {}
|
|
136
147
|
if self.function:
|
|
137
|
-
|
|
148
|
+
labels["docket.task"] = self.function
|
|
138
149
|
|
|
139
150
|
if self.parameter:
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
151
|
+
labels["docket.parameter"] = self.parameter
|
|
152
|
+
labels["docket.operator"] = self.operator
|
|
153
|
+
labels["docket.value"] = repr(self.value)
|
|
143
154
|
|
|
144
|
-
return
|
|
155
|
+
return labels
|
|
145
156
|
|
|
146
157
|
def call_repr(self) -> str:
|
|
147
158
|
return (
|
docket/instrumentation.py
CHANGED
|
@@ -1,5 +1,12 @@
|
|
|
1
|
+
import threading
|
|
2
|
+
from contextlib import contextmanager
|
|
3
|
+
from typing import Generator, cast
|
|
4
|
+
|
|
1
5
|
from opentelemetry import metrics
|
|
6
|
+
from opentelemetry.exporter.prometheus import PrometheusMetricReader
|
|
7
|
+
from opentelemetry.metrics import set_meter_provider
|
|
2
8
|
from opentelemetry.propagators.textmap import Getter, Setter
|
|
9
|
+
from opentelemetry.sdk.metrics import MeterProvider
|
|
3
10
|
|
|
4
11
|
meter: metrics.Meter = metrics.get_meter("docket")
|
|
5
12
|
|
|
@@ -93,6 +100,17 @@ STRIKES_IN_EFFECT = meter.create_up_down_counter(
|
|
|
93
100
|
unit="1",
|
|
94
101
|
)
|
|
95
102
|
|
|
103
|
+
QUEUE_DEPTH = meter.create_gauge(
|
|
104
|
+
"docket_queue_depth",
|
|
105
|
+
description="How many tasks are due to be executed now",
|
|
106
|
+
unit="1",
|
|
107
|
+
)
|
|
108
|
+
SCHEDULE_DEPTH = meter.create_gauge(
|
|
109
|
+
"docket_schedule_depth",
|
|
110
|
+
description="How many tasks are scheduled to be executed in the future",
|
|
111
|
+
unit="1",
|
|
112
|
+
)
|
|
113
|
+
|
|
96
114
|
Message = dict[bytes, bytes]
|
|
97
115
|
|
|
98
116
|
|
|
@@ -119,3 +137,38 @@ class MessageSetter(Setter[Message]):
|
|
|
119
137
|
|
|
120
138
|
message_getter: MessageGetter = MessageGetter()
|
|
121
139
|
message_setter: MessageSetter = MessageSetter()
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
@contextmanager
|
|
143
|
+
def metrics_server(
|
|
144
|
+
host: str = "0.0.0.0", port: int | None = None
|
|
145
|
+
) -> Generator[None, None, None]:
|
|
146
|
+
if port is None:
|
|
147
|
+
yield
|
|
148
|
+
return
|
|
149
|
+
|
|
150
|
+
from wsgiref.types import WSGIApplication
|
|
151
|
+
|
|
152
|
+
from prometheus_client import REGISTRY
|
|
153
|
+
from prometheus_client.exposition import (
|
|
154
|
+
ThreadingWSGIServer,
|
|
155
|
+
_SilentHandler, # type: ignore[member-access]
|
|
156
|
+
make_server, # type: ignore[import]
|
|
157
|
+
make_wsgi_app, # type: ignore[import]
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
set_meter_provider(MeterProvider(metric_readers=[PrometheusMetricReader()]))
|
|
161
|
+
|
|
162
|
+
server = make_server(
|
|
163
|
+
host,
|
|
164
|
+
port,
|
|
165
|
+
cast(WSGIApplication, make_wsgi_app(registry=REGISTRY)),
|
|
166
|
+
ThreadingWSGIServer,
|
|
167
|
+
handler_class=_SilentHandler,
|
|
168
|
+
)
|
|
169
|
+
with server:
|
|
170
|
+
t = threading.Thread(target=server.serve_forever)
|
|
171
|
+
t.daemon = True
|
|
172
|
+
t.start()
|
|
173
|
+
|
|
174
|
+
yield
|
docket/tasks.py
CHANGED
|
@@ -2,16 +2,21 @@ import asyncio
|
|
|
2
2
|
import logging
|
|
3
3
|
from datetime import datetime, timezone
|
|
4
4
|
|
|
5
|
-
from .dependencies import
|
|
5
|
+
from .dependencies import (
|
|
6
|
+
CurrentDocket,
|
|
7
|
+
CurrentExecution,
|
|
8
|
+
CurrentWorker,
|
|
9
|
+
Retry,
|
|
10
|
+
TaskLogger,
|
|
11
|
+
)
|
|
6
12
|
from .docket import Docket, TaskCollection
|
|
7
13
|
from .execution import Execution
|
|
8
14
|
from .worker import Worker
|
|
9
15
|
|
|
10
|
-
logger: logging.Logger = logging.getLogger(__name__)
|
|
11
|
-
|
|
12
16
|
|
|
13
17
|
async def trace(
|
|
14
18
|
message: str,
|
|
19
|
+
logger: logging.LoggerAdapter[logging.Logger] = TaskLogger(),
|
|
15
20
|
docket: Docket = CurrentDocket(),
|
|
16
21
|
worker: Worker = CurrentWorker(),
|
|
17
22
|
execution: Execution = CurrentExecution(),
|
|
@@ -23,11 +28,6 @@ async def trace(
|
|
|
23
28
|
docket.name,
|
|
24
29
|
(datetime.now(timezone.utc) - execution.when),
|
|
25
30
|
worker.name,
|
|
26
|
-
extra={
|
|
27
|
-
"docket.name": docket.name,
|
|
28
|
-
"worker.name": worker.name,
|
|
29
|
-
"execution.key": execution.key,
|
|
30
|
-
},
|
|
31
31
|
)
|
|
32
32
|
|
|
33
33
|
|
|
@@ -45,7 +45,9 @@ async def fail(
|
|
|
45
45
|
)
|
|
46
46
|
|
|
47
47
|
|
|
48
|
-
async def sleep(
|
|
48
|
+
async def sleep(
|
|
49
|
+
seconds: float, logger: logging.LoggerAdapter[logging.Logger] = TaskLogger()
|
|
50
|
+
) -> None:
|
|
49
51
|
logger.info("Sleeping for %s seconds", seconds)
|
|
50
52
|
await asyncio.sleep(seconds)
|
|
51
53
|
|
docket/worker.py
CHANGED
|
@@ -7,6 +7,8 @@ from types import TracebackType
|
|
|
7
7
|
from typing import (
|
|
8
8
|
TYPE_CHECKING,
|
|
9
9
|
Any,
|
|
10
|
+
Callable,
|
|
11
|
+
Mapping,
|
|
10
12
|
Protocol,
|
|
11
13
|
Self,
|
|
12
14
|
TypeVar,
|
|
@@ -27,7 +29,9 @@ from .docket import (
|
|
|
27
29
|
RedisReadGroupResponse,
|
|
28
30
|
)
|
|
29
31
|
from .instrumentation import (
|
|
32
|
+
QUEUE_DEPTH,
|
|
30
33
|
REDIS_DISRUPTIONS,
|
|
34
|
+
SCHEDULE_DEPTH,
|
|
31
35
|
TASK_DURATION,
|
|
32
36
|
TASK_PUNCTUALITY,
|
|
33
37
|
TASKS_COMPLETED,
|
|
@@ -38,6 +42,7 @@ from .instrumentation import (
|
|
|
38
42
|
TASKS_STRICKEN,
|
|
39
43
|
TASKS_SUCCEEDED,
|
|
40
44
|
message_getter,
|
|
45
|
+
metrics_server,
|
|
41
46
|
)
|
|
42
47
|
|
|
43
48
|
logger: logging.Logger = logging.getLogger(__name__)
|
|
@@ -59,6 +64,11 @@ class _stream_due_tasks(Protocol):
|
|
|
59
64
|
class Worker:
|
|
60
65
|
docket: Docket
|
|
61
66
|
name: str
|
|
67
|
+
concurrency: int
|
|
68
|
+
redelivery_timeout: timedelta
|
|
69
|
+
reconnection_delay: timedelta
|
|
70
|
+
minimum_check_interval: timedelta
|
|
71
|
+
_strike_conditions: list[Callable[[Execution], bool]] = []
|
|
62
72
|
|
|
63
73
|
def __init__(
|
|
64
74
|
self,
|
|
@@ -76,6 +86,10 @@ class Worker:
|
|
|
76
86
|
self.reconnection_delay = reconnection_delay
|
|
77
87
|
self.minimum_check_interval = minimum_check_interval
|
|
78
88
|
|
|
89
|
+
self._strike_conditions = [
|
|
90
|
+
docket.strike_list.is_stricken,
|
|
91
|
+
]
|
|
92
|
+
|
|
79
93
|
async def __aenter__(self) -> Self:
|
|
80
94
|
self._heartbeat_task = asyncio.create_task(self._heartbeat())
|
|
81
95
|
|
|
@@ -94,11 +108,17 @@ class Worker:
|
|
|
94
108
|
pass
|
|
95
109
|
del self._heartbeat_task
|
|
96
110
|
|
|
97
|
-
|
|
98
|
-
def _log_context(self) -> dict[str, str]:
|
|
111
|
+
def labels(self) -> Mapping[str, str]:
|
|
99
112
|
return {
|
|
100
|
-
|
|
101
|
-
"
|
|
113
|
+
**self.docket.labels(),
|
|
114
|
+
"docket.worker": self.name,
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
def _log_context(self) -> Mapping[str, str]:
|
|
118
|
+
return {
|
|
119
|
+
**self.labels(),
|
|
120
|
+
"docket.queue_key": self.docket.queue_key,
|
|
121
|
+
"docket.stream_key": self.docket.stream_key,
|
|
102
122
|
}
|
|
103
123
|
|
|
104
124
|
@classmethod
|
|
@@ -112,24 +132,26 @@ class Worker:
|
|
|
112
132
|
reconnection_delay: timedelta = timedelta(seconds=5),
|
|
113
133
|
minimum_check_interval: timedelta = timedelta(milliseconds=100),
|
|
114
134
|
until_finished: bool = False,
|
|
135
|
+
metrics_port: int | None = None,
|
|
115
136
|
tasks: list[str] = ["docket.tasks:standard_tasks"],
|
|
116
137
|
) -> None:
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
138
|
+
with metrics_server(port=metrics_port):
|
|
139
|
+
async with Docket(name=docket_name, url=url) as docket:
|
|
140
|
+
for task_path in tasks:
|
|
141
|
+
docket.register_collection(task_path)
|
|
142
|
+
|
|
143
|
+
async with Worker(
|
|
144
|
+
docket=docket,
|
|
145
|
+
name=name,
|
|
146
|
+
concurrency=concurrency,
|
|
147
|
+
redelivery_timeout=redelivery_timeout,
|
|
148
|
+
reconnection_delay=reconnection_delay,
|
|
149
|
+
minimum_check_interval=minimum_check_interval,
|
|
150
|
+
) as worker:
|
|
151
|
+
if until_finished:
|
|
152
|
+
await worker.run_until_finished()
|
|
153
|
+
else:
|
|
154
|
+
await worker.run_forever() # pragma: no cover
|
|
133
155
|
|
|
134
156
|
async def run_until_finished(self) -> None:
|
|
135
157
|
"""Run the worker until there are no more tasks to process."""
|
|
@@ -139,6 +161,35 @@ class Worker:
|
|
|
139
161
|
"""Run the worker indefinitely."""
|
|
140
162
|
return await self._run(forever=True) # pragma: no cover
|
|
141
163
|
|
|
164
|
+
async def run_at_most(self, iterations_by_key: Mapping[str, int]) -> None:
|
|
165
|
+
"""
|
|
166
|
+
Run the worker until there are no more tasks to process, but limit specified
|
|
167
|
+
task keys to a maximum number of iterations.
|
|
168
|
+
|
|
169
|
+
This is particularly useful for testing self-perpetuating tasks that would
|
|
170
|
+
otherwise run indefinitely.
|
|
171
|
+
|
|
172
|
+
Args:
|
|
173
|
+
iterations_by_key: Maps task keys to their maximum allowed executions
|
|
174
|
+
"""
|
|
175
|
+
execution_counts: dict[str, int] = {key: 0 for key in iterations_by_key}
|
|
176
|
+
|
|
177
|
+
def has_reached_max_iterations(execution: Execution) -> bool:
|
|
178
|
+
if execution.key not in iterations_by_key:
|
|
179
|
+
return False
|
|
180
|
+
|
|
181
|
+
if execution_counts[execution.key] >= iterations_by_key[execution.key]:
|
|
182
|
+
return True
|
|
183
|
+
|
|
184
|
+
execution_counts[execution.key] += 1
|
|
185
|
+
return False
|
|
186
|
+
|
|
187
|
+
self._strike_conditions.insert(0, has_reached_max_iterations)
|
|
188
|
+
try:
|
|
189
|
+
await self.run_until_finished()
|
|
190
|
+
finally:
|
|
191
|
+
self._strike_conditions.remove(has_reached_max_iterations)
|
|
192
|
+
|
|
142
193
|
async def _run(self, forever: bool = False) -> None:
|
|
143
194
|
logger.info("Starting worker %r with the following tasks:", self.name)
|
|
144
195
|
for task_name, task in self.docket.tasks.items():
|
|
@@ -149,9 +200,7 @@ class Worker:
|
|
|
149
200
|
try:
|
|
150
201
|
return await self._worker_loop(forever=forever)
|
|
151
202
|
except redis.exceptions.ConnectionError:
|
|
152
|
-
REDIS_DISRUPTIONS.add(
|
|
153
|
-
1, {"docket": self.docket.name, "worker": self.name}
|
|
154
|
-
)
|
|
203
|
+
REDIS_DISRUPTIONS.add(1, self.labels())
|
|
155
204
|
logger.warning(
|
|
156
205
|
"Error connecting to redis, retrying in %s...",
|
|
157
206
|
self.reconnection_delay,
|
|
@@ -263,7 +312,7 @@ class Worker:
|
|
|
263
312
|
future_work,
|
|
264
313
|
self.docket.queue_key,
|
|
265
314
|
self.docket.stream_key,
|
|
266
|
-
extra=self._log_context,
|
|
315
|
+
extra=self._log_context(),
|
|
267
316
|
)
|
|
268
317
|
|
|
269
318
|
redeliveries: RedisMessages
|
|
@@ -304,7 +353,7 @@ class Worker:
|
|
|
304
353
|
logger.info(
|
|
305
354
|
"Shutdown requested, finishing %d active tasks...",
|
|
306
355
|
len(active_tasks),
|
|
307
|
-
extra=self._log_context,
|
|
356
|
+
extra=self._log_context(),
|
|
308
357
|
)
|
|
309
358
|
finally:
|
|
310
359
|
if active_tasks:
|
|
@@ -312,36 +361,28 @@ class Worker:
|
|
|
312
361
|
await process_completed_tasks()
|
|
313
362
|
|
|
314
363
|
async def _execute(self, message: RedisMessage) -> None:
|
|
364
|
+
log_context: Mapping[str, str | float] = self._log_context()
|
|
365
|
+
|
|
315
366
|
function_name = message[b"function"].decode()
|
|
316
367
|
function = self.docket.tasks.get(function_name)
|
|
317
368
|
if function is None:
|
|
318
369
|
logger.warning(
|
|
319
|
-
"Task function %r not found", function_name, extra=
|
|
370
|
+
"Task function %r not found", function_name, extra=log_context
|
|
320
371
|
)
|
|
321
372
|
return
|
|
322
373
|
|
|
323
374
|
execution = Execution.from_message(function, message)
|
|
324
|
-
name = execution.function.__name__
|
|
325
|
-
key = execution.key
|
|
326
375
|
|
|
327
|
-
log_context
|
|
328
|
-
|
|
329
|
-
"task": name,
|
|
330
|
-
"key": key,
|
|
331
|
-
}
|
|
332
|
-
counter_labels = {
|
|
333
|
-
"docket": self.docket.name,
|
|
334
|
-
"worker": self.name,
|
|
335
|
-
"task": name,
|
|
336
|
-
}
|
|
376
|
+
log_context = {**log_context, **execution.specific_labels()}
|
|
377
|
+
counter_labels = {**self.labels(), **execution.general_labels()}
|
|
337
378
|
|
|
338
379
|
arrow = "↬" if execution.attempt > 1 else "↪"
|
|
339
380
|
call = execution.call_repr()
|
|
340
381
|
|
|
341
|
-
if
|
|
382
|
+
if any(condition(execution) for condition in self._strike_conditions):
|
|
342
383
|
arrow = "🗙"
|
|
343
384
|
logger.warning("%s %s", arrow, call, extra=log_context)
|
|
344
|
-
TASKS_STRICKEN.add(1, counter_labels | {"where": "worker"})
|
|
385
|
+
TASKS_STRICKEN.add(1, counter_labels | {"docket.where": "worker"})
|
|
345
386
|
return
|
|
346
387
|
|
|
347
388
|
dependencies = self._get_dependencies(execution)
|
|
@@ -352,7 +393,7 @@ class Worker:
|
|
|
352
393
|
|
|
353
394
|
start = datetime.now(timezone.utc)
|
|
354
395
|
punctuality = start - execution.when
|
|
355
|
-
log_context
|
|
396
|
+
log_context = {**log_context, "punctuality": punctuality.total_seconds()}
|
|
356
397
|
duration = timedelta(0)
|
|
357
398
|
|
|
358
399
|
TASKS_STARTED.add(1, counter_labels)
|
|
@@ -366,11 +407,8 @@ class Worker:
|
|
|
366
407
|
execution.function.__name__,
|
|
367
408
|
kind=trace.SpanKind.CONSUMER,
|
|
368
409
|
attributes={
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
"docket.execution.key": execution.key,
|
|
372
|
-
"docket.execution.attempt": execution.attempt,
|
|
373
|
-
"docket.execution.punctuality": punctuality.total_seconds(),
|
|
410
|
+
**self.labels(),
|
|
411
|
+
**execution.specific_labels(),
|
|
374
412
|
"code.function.name": execution.function.__name__,
|
|
375
413
|
},
|
|
376
414
|
links=links,
|
|
@@ -438,12 +476,7 @@ class Worker:
|
|
|
438
476
|
execution.attempt += 1
|
|
439
477
|
await self.docket.schedule(execution)
|
|
440
478
|
|
|
441
|
-
|
|
442
|
-
"docket": self.docket.name,
|
|
443
|
-
"worker": self.name,
|
|
444
|
-
"task": execution.function.__name__,
|
|
445
|
-
}
|
|
446
|
-
TASKS_RETRIED.add(1, counter_labels)
|
|
479
|
+
TASKS_RETRIED.add(1, {**self.labels(), **execution.specific_labels()})
|
|
447
480
|
return True
|
|
448
481
|
|
|
449
482
|
return False
|
|
@@ -487,12 +520,35 @@ class Worker:
|
|
|
487
520
|
)
|
|
488
521
|
|
|
489
522
|
await pipeline.execute()
|
|
523
|
+
|
|
524
|
+
async with r.pipeline() as pipeline:
|
|
525
|
+
pipeline.xlen(self.docket.stream_key)
|
|
526
|
+
pipeline.zcount(self.docket.queue_key, 0, now)
|
|
527
|
+
pipeline.zcount(self.docket.queue_key, now, "+inf")
|
|
528
|
+
|
|
529
|
+
(
|
|
530
|
+
stream_depth,
|
|
531
|
+
overdue_depth,
|
|
532
|
+
schedule_depth,
|
|
533
|
+
) = await pipeline.execute()
|
|
534
|
+
|
|
535
|
+
QUEUE_DEPTH.set(
|
|
536
|
+
stream_depth + overdue_depth, self.docket.labels()
|
|
537
|
+
)
|
|
538
|
+
SCHEDULE_DEPTH.set(schedule_depth, self.docket.labels())
|
|
539
|
+
|
|
490
540
|
except asyncio.CancelledError: # pragma: no cover
|
|
491
541
|
return
|
|
492
542
|
except redis.exceptions.ConnectionError:
|
|
493
|
-
REDIS_DISRUPTIONS.add(
|
|
494
|
-
|
|
543
|
+
REDIS_DISRUPTIONS.add(1, self.labels())
|
|
544
|
+
logger.exception(
|
|
545
|
+
"Error sending worker heartbeat",
|
|
546
|
+
exc_info=True,
|
|
547
|
+
extra=self._log_context(),
|
|
495
548
|
)
|
|
496
|
-
logger.exception("Error sending worker heartbeat", exc_info=True)
|
|
497
549
|
except Exception:
|
|
498
|
-
logger.exception(
|
|
550
|
+
logger.exception(
|
|
551
|
+
"Error sending worker heartbeat",
|
|
552
|
+
exc_info=True,
|
|
553
|
+
extra=self._log_context(),
|
|
554
|
+
)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: pydocket
|
|
3
|
-
Version: 0.1
|
|
3
|
+
Version: 0.2.1
|
|
4
4
|
Summary: A distributed background task system for Python functions
|
|
5
5
|
Project-URL: Homepage, https://github.com/chrisguidry/docket
|
|
6
6
|
Project-URL: Bug Tracker, https://github.com/chrisguidry/docket/issues
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
docket/__init__.py,sha256=GoJYpyuO6QFeBB8GNaxGGvMMuai55Eaw_8u-o1PM3hk,743
|
|
2
|
+
docket/__main__.py,sha256=Vkuh7aJ-Bl7QVpVbbkUksAd_hn05FiLmWbc-8kbhZQ4,34
|
|
3
|
+
docket/annotations.py,sha256=GZwOPtPXyeIhnsLh3TQMBnXrjtTtSmF4Ratv4vjPx8U,950
|
|
4
|
+
docket/cli.py,sha256=EseF0Sj7IEgd9QDC-FSbHSffvF7DNsrmDGYGgZBdJc8,19413
|
|
5
|
+
docket/dependencies.py,sha256=gIDwcBUhrLk7xGh0ZxdqpsnSeX-hZzGMNvUrVFfqbJI,4281
|
|
6
|
+
docket/docket.py,sha256=zva6ofTm7i5hRwAaAnNtlgIqoMPaNLqCTs2PXGka_8s,19723
|
|
7
|
+
docket/execution.py,sha256=ShP8MoLmxEslk2pAuhKi6KEEKbHdneyQukR9oQwXdjQ,11732
|
|
8
|
+
docket/instrumentation.py,sha256=SUVhVFf8AX2HAfmi0HPTT_QvQezlGPJEKs_1YAmrCbA,4454
|
|
9
|
+
docket/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
10
|
+
docket/tasks.py,sha256=RIlSM2omh-YDwVnCz6M5MtmK8T_m_s1w2OlRRxDUs6A,1437
|
|
11
|
+
docket/worker.py,sha256=DH15hW8QBGHaZdOdkpH7bjYtLEydi4sGh-Ei8lEXGOo,20556
|
|
12
|
+
pydocket-0.2.1.dist-info/METADATA,sha256=9DxwXrPzeTCOlxDGn9JUOzQN-k6OjhAJbiRPeMhcNNo,13092
|
|
13
|
+
pydocket-0.2.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
14
|
+
pydocket-0.2.1.dist-info/entry_points.txt,sha256=4WOk1nUlBsUT5O3RyMci2ImuC5XFswuopElYcLHtD5k,47
|
|
15
|
+
pydocket-0.2.1.dist-info/licenses/LICENSE,sha256=YuVWU_ZXO0K_k2FG8xWKe5RGxV24AhJKTvQmKfqXuyk,1087
|
|
16
|
+
pydocket-0.2.1.dist-info/RECORD,,
|
pydocket-0.1.4.dist-info/RECORD
DELETED
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
docket/__init__.py,sha256=GoJYpyuO6QFeBB8GNaxGGvMMuai55Eaw_8u-o1PM3hk,743
|
|
2
|
-
docket/__main__.py,sha256=Vkuh7aJ-Bl7QVpVbbkUksAd_hn05FiLmWbc-8kbhZQ4,34
|
|
3
|
-
docket/annotations.py,sha256=GZwOPtPXyeIhnsLh3TQMBnXrjtTtSmF4Ratv4vjPx8U,950
|
|
4
|
-
docket/cli.py,sha256=N0vp1zO5Wau4nBDMJOU34hYn11HR3PaYY3Ybk1gS8XY,19188
|
|
5
|
-
docket/dependencies.py,sha256=Vht3qKbik-HQ7jsAU5k-eig4_yuru56-ZewjBVVu4yM,4325
|
|
6
|
-
docket/docket.py,sha256=TWeZ63NfN6Eq4lFzKoQTJz88ECZsH3-gqYszhQl-bXs,20124
|
|
7
|
-
docket/execution.py,sha256=rHsQ60BbNREzcpUC_RvbGUctdLaprYp1x46sT6jTrdc,11416
|
|
8
|
-
docket/instrumentation.py,sha256=USo8ptCFcwQj_YaUpJvsUHPb0QfQr50i9dF4tYgYde4,2992
|
|
9
|
-
docket/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
10
|
-
docket/tasks.py,sha256=K1f_W1z4m9RVz1GJ1ymWY5ZaRmqHO1SebNBVENlkelU,1471
|
|
11
|
-
docket/worker.py,sha256=8wnWxHj7ctHPxEGSRxPTsHksZ6OWRoG5dKpSkvIZP88,18479
|
|
12
|
-
pydocket-0.1.4.dist-info/METADATA,sha256=y8PTR9Xwh8MeMr7ZhPJzUQGtQUjQXN3QRpYTvxtKfv0,13092
|
|
13
|
-
pydocket-0.1.4.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
14
|
-
pydocket-0.1.4.dist-info/entry_points.txt,sha256=4WOk1nUlBsUT5O3RyMci2ImuC5XFswuopElYcLHtD5k,47
|
|
15
|
-
pydocket-0.1.4.dist-info/licenses/LICENSE,sha256=YuVWU_ZXO0K_k2FG8xWKe5RGxV24AhJKTvQmKfqXuyk,1087
|
|
16
|
-
pydocket-0.1.4.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|