pydocket 0.1.3__py3-none-any.whl → 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pydocket might be problematic. Click here for more details.
- docket/cli.py +8 -0
- docket/dependencies.py +8 -9
- docket/docket.py +37 -37
- docket/execution.py +19 -8
- docket/instrumentation.py +53 -0
- docket/tasks.py +11 -9
- docket/worker.py +72 -58
- {pydocket-0.1.3.dist-info → pydocket-0.2.0.dist-info}/METADATA +1 -1
- pydocket-0.2.0.dist-info/RECORD +16 -0
- pydocket-0.1.3.dist-info/RECORD +0 -16
- {pydocket-0.1.3.dist-info → pydocket-0.2.0.dist-info}/WHEEL +0 -0
- {pydocket-0.1.3.dist-info → pydocket-0.2.0.dist-info}/entry_points.txt +0 -0
- {pydocket-0.1.3.dist-info → pydocket-0.2.0.dist-info}/licenses/LICENSE +0 -0
docket/cli.py
CHANGED
|
@@ -243,6 +243,13 @@ def worker(
|
|
|
243
243
|
help="Exit after the current docket is finished",
|
|
244
244
|
),
|
|
245
245
|
] = False,
|
|
246
|
+
metrics_port: Annotated[
|
|
247
|
+
int | None,
|
|
248
|
+
typer.Option(
|
|
249
|
+
"--metrics-port",
|
|
250
|
+
help="The port to serve Prometheus metrics on",
|
|
251
|
+
),
|
|
252
|
+
] = None,
|
|
246
253
|
) -> None:
|
|
247
254
|
asyncio.run(
|
|
248
255
|
Worker.run(
|
|
@@ -254,6 +261,7 @@ def worker(
|
|
|
254
261
|
reconnection_delay=reconnection_delay,
|
|
255
262
|
minimum_check_interval=minimum_check_interval,
|
|
256
263
|
until_finished=until_finished,
|
|
264
|
+
metrics_port=metrics_port,
|
|
257
265
|
tasks=tasks,
|
|
258
266
|
)
|
|
259
267
|
)
|
docket/dependencies.py
CHANGED
|
@@ -61,15 +61,14 @@ class _TaskLogger(Dependency):
|
|
|
61
61
|
self, docket: Docket, worker: Worker, execution: Execution
|
|
62
62
|
) -> logging.LoggerAdapter[logging.Logger]:
|
|
63
63
|
logger = logging.getLogger(f"docket.task.{execution.function.__name__}")
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
return logging.LoggerAdapter(logger, extra)
|
|
64
|
+
return logging.LoggerAdapter(
|
|
65
|
+
logger,
|
|
66
|
+
{
|
|
67
|
+
**docket.labels(),
|
|
68
|
+
**worker.labels(),
|
|
69
|
+
**execution.specific_labels(),
|
|
70
|
+
},
|
|
71
|
+
)
|
|
73
72
|
|
|
74
73
|
|
|
75
74
|
def TaskLogger() -> logging.LoggerAdapter[logging.Logger]:
|
docket/docket.py
CHANGED
|
@@ -13,6 +13,7 @@ from typing import (
|
|
|
13
13
|
Collection,
|
|
14
14
|
Hashable,
|
|
15
15
|
Iterable,
|
|
16
|
+
Mapping,
|
|
16
17
|
NoReturn,
|
|
17
18
|
ParamSpec,
|
|
18
19
|
Self,
|
|
@@ -26,7 +27,7 @@ from uuid import uuid4
|
|
|
26
27
|
|
|
27
28
|
import redis.exceptions
|
|
28
29
|
from opentelemetry import propagate, trace
|
|
29
|
-
from redis.asyncio import Redis
|
|
30
|
+
from redis.asyncio import ConnectionPool, Redis
|
|
30
31
|
|
|
31
32
|
from .execution import (
|
|
32
33
|
Execution,
|
|
@@ -112,11 +113,14 @@ class Docket:
|
|
|
112
113
|
tasks: dict[str, Callable[..., Awaitable[Any]]]
|
|
113
114
|
strike_list: StrikeList
|
|
114
115
|
|
|
116
|
+
_monitor_strikes_task: asyncio.Task[None]
|
|
117
|
+
_connection_pool: ConnectionPool
|
|
118
|
+
|
|
115
119
|
def __init__(
|
|
116
120
|
self,
|
|
117
121
|
name: str = "docket",
|
|
118
122
|
url: str = "redis://localhost:6379/0",
|
|
119
|
-
heartbeat_interval: timedelta = timedelta(seconds=
|
|
123
|
+
heartbeat_interval: timedelta = timedelta(seconds=2),
|
|
120
124
|
missed_heartbeats: int = 5,
|
|
121
125
|
) -> None:
|
|
122
126
|
"""
|
|
@@ -144,6 +148,7 @@ class Docket:
|
|
|
144
148
|
self.tasks = {fn.__name__: fn for fn in standard_tasks}
|
|
145
149
|
self.strike_list = StrikeList()
|
|
146
150
|
|
|
151
|
+
self._connection_pool = ConnectionPool.from_url(self.url) # type: ignore
|
|
147
152
|
self._monitor_strikes_task = asyncio.create_task(self._monitor_strikes())
|
|
148
153
|
|
|
149
154
|
# Ensure that the stream and worker group exist
|
|
@@ -176,20 +181,17 @@ class Docket:
|
|
|
176
181
|
except asyncio.CancelledError:
|
|
177
182
|
pass
|
|
178
183
|
|
|
184
|
+
await asyncio.shield(self._connection_pool.disconnect())
|
|
185
|
+
del self._connection_pool
|
|
186
|
+
|
|
179
187
|
@asynccontextmanager
|
|
180
188
|
async def redis(self) -> AsyncGenerator[Redis, None]:
|
|
181
|
-
|
|
189
|
+
r = Redis(connection_pool=self._connection_pool)
|
|
190
|
+
await r.__aenter__()
|
|
182
191
|
try:
|
|
183
|
-
|
|
184
|
-
self.url,
|
|
185
|
-
single_connection_client=True,
|
|
186
|
-
)
|
|
187
|
-
async with redis:
|
|
188
|
-
yield redis
|
|
192
|
+
yield r
|
|
189
193
|
finally:
|
|
190
|
-
|
|
191
|
-
if redis:
|
|
192
|
-
await redis.connection_pool.disconnect()
|
|
194
|
+
await asyncio.shield(r.__aexit__(None, None, None))
|
|
193
195
|
|
|
194
196
|
def register(self, function: Callable[..., Awaitable[Any]]) -> None:
|
|
195
197
|
from .dependencies import validate_dependencies
|
|
@@ -211,6 +213,11 @@ class Docket:
|
|
|
211
213
|
for function in collection:
|
|
212
214
|
self.register(function)
|
|
213
215
|
|
|
216
|
+
def labels(self) -> Mapping[str, str]:
|
|
217
|
+
return {
|
|
218
|
+
"docket.name": self.name,
|
|
219
|
+
}
|
|
220
|
+
|
|
214
221
|
@overload
|
|
215
222
|
def add(
|
|
216
223
|
self,
|
|
@@ -248,7 +255,7 @@ class Docket:
|
|
|
248
255
|
execution = Execution(function, args, kwargs, when, key, attempt=1)
|
|
249
256
|
await self.schedule(execution)
|
|
250
257
|
|
|
251
|
-
TASKS_ADDED.add(1, {
|
|
258
|
+
TASKS_ADDED.add(1, {**self.labels(), **execution.general_labels()})
|
|
252
259
|
|
|
253
260
|
return execution
|
|
254
261
|
|
|
@@ -284,7 +291,7 @@ class Docket:
|
|
|
284
291
|
await self.cancel(key)
|
|
285
292
|
await self.schedule(execution)
|
|
286
293
|
|
|
287
|
-
TASKS_REPLACED.add(1, {
|
|
294
|
+
TASKS_REPLACED.add(1, {**self.labels(), **execution.general_labels()})
|
|
288
295
|
|
|
289
296
|
return execution
|
|
290
297
|
|
|
@@ -311,9 +318,9 @@ class Docket:
|
|
|
311
318
|
TASKS_STRICKEN.add(
|
|
312
319
|
1,
|
|
313
320
|
{
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
"where": "docket",
|
|
321
|
+
**self.labels(),
|
|
322
|
+
**execution.specific_labels(),
|
|
323
|
+
"docket.where": "docket",
|
|
317
324
|
},
|
|
318
325
|
)
|
|
319
326
|
return
|
|
@@ -324,10 +331,8 @@ class Docket:
|
|
|
324
331
|
with tracer.start_as_current_span(
|
|
325
332
|
"docket.schedule",
|
|
326
333
|
attributes={
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
"docket.execution.key": execution.key,
|
|
330
|
-
"docket.execution.attempt": execution.attempt,
|
|
334
|
+
**self.labels(),
|
|
335
|
+
**execution.specific_labels(),
|
|
331
336
|
"code.function.name": execution.function.__name__,
|
|
332
337
|
},
|
|
333
338
|
):
|
|
@@ -347,16 +352,14 @@ class Docket:
|
|
|
347
352
|
pipe.zadd(self.queue_key, {key: when.timestamp()})
|
|
348
353
|
await pipe.execute()
|
|
349
354
|
|
|
350
|
-
TASKS_SCHEDULED.add(
|
|
351
|
-
1, {"docket": self.name, "task": execution.function.__name__}
|
|
352
|
-
)
|
|
355
|
+
TASKS_SCHEDULED.add(1, {**self.labels(), **execution.general_labels()})
|
|
353
356
|
|
|
354
357
|
async def cancel(self, key: str) -> None:
|
|
355
358
|
with tracer.start_as_current_span(
|
|
356
359
|
"docket.cancel",
|
|
357
360
|
attributes={
|
|
358
|
-
|
|
359
|
-
"docket.
|
|
361
|
+
**self.labels(),
|
|
362
|
+
"docket.key": key,
|
|
360
363
|
},
|
|
361
364
|
):
|
|
362
365
|
async with self.redis() as redis:
|
|
@@ -365,7 +368,7 @@ class Docket:
|
|
|
365
368
|
pipe.zrem(self.queue_key, key)
|
|
366
369
|
await pipe.execute()
|
|
367
370
|
|
|
368
|
-
TASKS_CANCELLED.add(1,
|
|
371
|
+
TASKS_CANCELLED.add(1, self.labels())
|
|
369
372
|
|
|
370
373
|
@property
|
|
371
374
|
def strike_key(self) -> str:
|
|
@@ -405,8 +408,8 @@ class Docket:
|
|
|
405
408
|
with tracer.start_as_current_span(
|
|
406
409
|
f"docket.{instruction.direction}",
|
|
407
410
|
attributes={
|
|
408
|
-
|
|
409
|
-
**instruction.
|
|
411
|
+
**self.labels(),
|
|
412
|
+
**instruction.labels(),
|
|
410
413
|
},
|
|
411
414
|
):
|
|
412
415
|
async with self.redis() as redis:
|
|
@@ -438,18 +441,15 @@ class Docket:
|
|
|
438
441
|
else "Restoring"
|
|
439
442
|
),
|
|
440
443
|
instruction.call_repr(),
|
|
441
|
-
extra=
|
|
444
|
+
extra=self.labels(),
|
|
442
445
|
)
|
|
443
446
|
|
|
444
|
-
counter_labels = {"docket": self.name}
|
|
445
|
-
if instruction.function:
|
|
446
|
-
counter_labels["task"] = instruction.function
|
|
447
|
-
if instruction.parameter:
|
|
448
|
-
counter_labels["parameter"] = instruction.parameter
|
|
449
|
-
|
|
450
447
|
STRIKES_IN_EFFECT.add(
|
|
451
448
|
1 if instruction.direction == "strike" else -1,
|
|
452
|
-
|
|
449
|
+
{
|
|
450
|
+
**self.labels(),
|
|
451
|
+
**instruction.labels(),
|
|
452
|
+
},
|
|
453
453
|
)
|
|
454
454
|
|
|
455
455
|
except redis.exceptions.ConnectionError: # pragma: no cover
|
docket/execution.py
CHANGED
|
@@ -3,7 +3,7 @@ import enum
|
|
|
3
3
|
import inspect
|
|
4
4
|
import logging
|
|
5
5
|
from datetime import datetime
|
|
6
|
-
from typing import Any, Awaitable, Callable, Hashable, Literal, Self, cast
|
|
6
|
+
from typing import Any, Awaitable, Callable, Hashable, Literal, Mapping, Self, cast
|
|
7
7
|
|
|
8
8
|
import cloudpickle # type: ignore[import]
|
|
9
9
|
|
|
@@ -55,6 +55,17 @@ class Execution:
|
|
|
55
55
|
attempt=int(message[b"attempt"].decode()),
|
|
56
56
|
)
|
|
57
57
|
|
|
58
|
+
def general_labels(self) -> Mapping[str, str]:
|
|
59
|
+
return {"docket.task": self.function.__name__}
|
|
60
|
+
|
|
61
|
+
def specific_labels(self) -> Mapping[str, str | int]:
|
|
62
|
+
return {
|
|
63
|
+
"docket.task": self.function.__name__,
|
|
64
|
+
"docket.key": self.key,
|
|
65
|
+
"docket.when": self.when.isoformat(),
|
|
66
|
+
"docket.attempt": self.attempt,
|
|
67
|
+
}
|
|
68
|
+
|
|
58
69
|
def call_repr(self) -> str:
|
|
59
70
|
arguments: list[str] = []
|
|
60
71
|
signature = inspect.signature(self.function)
|
|
@@ -131,17 +142,17 @@ class StrikeInstruction(abc.ABC):
|
|
|
131
142
|
else:
|
|
132
143
|
return Restore(function, parameter, operator, value)
|
|
133
144
|
|
|
134
|
-
def
|
|
135
|
-
|
|
145
|
+
def labels(self) -> Mapping[str, str]:
|
|
146
|
+
labels: dict[str, str] = {}
|
|
136
147
|
if self.function:
|
|
137
|
-
|
|
148
|
+
labels["docket.task"] = self.function
|
|
138
149
|
|
|
139
150
|
if self.parameter:
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
151
|
+
labels["docket.parameter"] = self.parameter
|
|
152
|
+
labels["docket.operator"] = self.operator
|
|
153
|
+
labels["docket.value"] = repr(self.value)
|
|
143
154
|
|
|
144
|
-
return
|
|
155
|
+
return labels
|
|
145
156
|
|
|
146
157
|
def call_repr(self) -> str:
|
|
147
158
|
return (
|
docket/instrumentation.py
CHANGED
|
@@ -1,5 +1,12 @@
|
|
|
1
|
+
import threading
|
|
2
|
+
from contextlib import contextmanager
|
|
3
|
+
from typing import Generator, cast
|
|
4
|
+
|
|
1
5
|
from opentelemetry import metrics
|
|
6
|
+
from opentelemetry.exporter.prometheus import PrometheusMetricReader
|
|
7
|
+
from opentelemetry.metrics import set_meter_provider
|
|
2
8
|
from opentelemetry.propagators.textmap import Getter, Setter
|
|
9
|
+
from opentelemetry.sdk.metrics import MeterProvider
|
|
3
10
|
|
|
4
11
|
meter: metrics.Meter = metrics.get_meter("docket")
|
|
5
12
|
|
|
@@ -93,6 +100,17 @@ STRIKES_IN_EFFECT = meter.create_up_down_counter(
|
|
|
93
100
|
unit="1",
|
|
94
101
|
)
|
|
95
102
|
|
|
103
|
+
QUEUE_DEPTH = meter.create_gauge(
|
|
104
|
+
"docket_queue_depth",
|
|
105
|
+
description="How many tasks are due to be executed now",
|
|
106
|
+
unit="1",
|
|
107
|
+
)
|
|
108
|
+
SCHEDULE_DEPTH = meter.create_gauge(
|
|
109
|
+
"docket_schedule_depth",
|
|
110
|
+
description="How many tasks are scheduled to be executed in the future",
|
|
111
|
+
unit="1",
|
|
112
|
+
)
|
|
113
|
+
|
|
96
114
|
Message = dict[bytes, bytes]
|
|
97
115
|
|
|
98
116
|
|
|
@@ -119,3 +137,38 @@ class MessageSetter(Setter[Message]):
|
|
|
119
137
|
|
|
120
138
|
message_getter: MessageGetter = MessageGetter()
|
|
121
139
|
message_setter: MessageSetter = MessageSetter()
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
@contextmanager
|
|
143
|
+
def metrics_server(
|
|
144
|
+
host: str = "0.0.0.0", port: int | None = None
|
|
145
|
+
) -> Generator[None, None, None]:
|
|
146
|
+
if port is None:
|
|
147
|
+
yield
|
|
148
|
+
return
|
|
149
|
+
|
|
150
|
+
from wsgiref.types import WSGIApplication
|
|
151
|
+
|
|
152
|
+
from prometheus_client import REGISTRY
|
|
153
|
+
from prometheus_client.exposition import (
|
|
154
|
+
ThreadingWSGIServer,
|
|
155
|
+
_SilentHandler, # type: ignore[member-access]
|
|
156
|
+
make_server, # type: ignore[import]
|
|
157
|
+
make_wsgi_app, # type: ignore[import]
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
set_meter_provider(MeterProvider(metric_readers=[PrometheusMetricReader()]))
|
|
161
|
+
|
|
162
|
+
server = make_server(
|
|
163
|
+
host,
|
|
164
|
+
port,
|
|
165
|
+
cast(WSGIApplication, make_wsgi_app(registry=REGISTRY)),
|
|
166
|
+
ThreadingWSGIServer,
|
|
167
|
+
handler_class=_SilentHandler,
|
|
168
|
+
)
|
|
169
|
+
with server:
|
|
170
|
+
t = threading.Thread(target=server.serve_forever)
|
|
171
|
+
t.daemon = True
|
|
172
|
+
t.start()
|
|
173
|
+
|
|
174
|
+
yield
|
docket/tasks.py
CHANGED
|
@@ -2,16 +2,21 @@ import asyncio
|
|
|
2
2
|
import logging
|
|
3
3
|
from datetime import datetime, timezone
|
|
4
4
|
|
|
5
|
-
from .dependencies import
|
|
5
|
+
from .dependencies import (
|
|
6
|
+
CurrentDocket,
|
|
7
|
+
CurrentExecution,
|
|
8
|
+
CurrentWorker,
|
|
9
|
+
Retry,
|
|
10
|
+
TaskLogger,
|
|
11
|
+
)
|
|
6
12
|
from .docket import Docket, TaskCollection
|
|
7
13
|
from .execution import Execution
|
|
8
14
|
from .worker import Worker
|
|
9
15
|
|
|
10
|
-
logger: logging.Logger = logging.getLogger(__name__)
|
|
11
|
-
|
|
12
16
|
|
|
13
17
|
async def trace(
|
|
14
18
|
message: str,
|
|
19
|
+
logger: logging.LoggerAdapter[logging.Logger] = TaskLogger(),
|
|
15
20
|
docket: Docket = CurrentDocket(),
|
|
16
21
|
worker: Worker = CurrentWorker(),
|
|
17
22
|
execution: Execution = CurrentExecution(),
|
|
@@ -23,11 +28,6 @@ async def trace(
|
|
|
23
28
|
docket.name,
|
|
24
29
|
(datetime.now(timezone.utc) - execution.when),
|
|
25
30
|
worker.name,
|
|
26
|
-
extra={
|
|
27
|
-
"docket.name": docket.name,
|
|
28
|
-
"worker.name": worker.name,
|
|
29
|
-
"execution.key": execution.key,
|
|
30
|
-
},
|
|
31
31
|
)
|
|
32
32
|
|
|
33
33
|
|
|
@@ -45,7 +45,9 @@ async def fail(
|
|
|
45
45
|
)
|
|
46
46
|
|
|
47
47
|
|
|
48
|
-
async def sleep(
|
|
48
|
+
async def sleep(
|
|
49
|
+
seconds: float, logger: logging.LoggerAdapter[logging.Logger] = TaskLogger()
|
|
50
|
+
) -> None:
|
|
49
51
|
logger.info("Sleeping for %s seconds", seconds)
|
|
50
52
|
await asyncio.sleep(seconds)
|
|
51
53
|
|
docket/worker.py
CHANGED
|
@@ -7,6 +7,7 @@ from types import TracebackType
|
|
|
7
7
|
from typing import (
|
|
8
8
|
TYPE_CHECKING,
|
|
9
9
|
Any,
|
|
10
|
+
Mapping,
|
|
10
11
|
Protocol,
|
|
11
12
|
Self,
|
|
12
13
|
TypeVar,
|
|
@@ -27,7 +28,9 @@ from .docket import (
|
|
|
27
28
|
RedisReadGroupResponse,
|
|
28
29
|
)
|
|
29
30
|
from .instrumentation import (
|
|
31
|
+
QUEUE_DEPTH,
|
|
30
32
|
REDIS_DISRUPTIONS,
|
|
33
|
+
SCHEDULE_DEPTH,
|
|
31
34
|
TASK_DURATION,
|
|
32
35
|
TASK_PUNCTUALITY,
|
|
33
36
|
TASKS_COMPLETED,
|
|
@@ -38,6 +41,7 @@ from .instrumentation import (
|
|
|
38
41
|
TASKS_STRICKEN,
|
|
39
42
|
TASKS_SUCCEEDED,
|
|
40
43
|
message_getter,
|
|
44
|
+
metrics_server,
|
|
41
45
|
)
|
|
42
46
|
|
|
43
47
|
logger: logging.Logger = logging.getLogger(__name__)
|
|
@@ -94,11 +98,17 @@ class Worker:
|
|
|
94
98
|
pass
|
|
95
99
|
del self._heartbeat_task
|
|
96
100
|
|
|
97
|
-
|
|
98
|
-
|
|
101
|
+
def labels(self) -> Mapping[str, str]:
|
|
102
|
+
return {
|
|
103
|
+
**self.docket.labels(),
|
|
104
|
+
"docket.worker": self.name,
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
def _log_context(self) -> Mapping[str, str]:
|
|
99
108
|
return {
|
|
100
|
-
|
|
101
|
-
"
|
|
109
|
+
**self.labels(),
|
|
110
|
+
"docket.queue_key": self.docket.queue_key,
|
|
111
|
+
"docket.stream_key": self.docket.stream_key,
|
|
102
112
|
}
|
|
103
113
|
|
|
104
114
|
@classmethod
|
|
@@ -112,24 +122,26 @@ class Worker:
|
|
|
112
122
|
reconnection_delay: timedelta = timedelta(seconds=5),
|
|
113
123
|
minimum_check_interval: timedelta = timedelta(milliseconds=100),
|
|
114
124
|
until_finished: bool = False,
|
|
125
|
+
metrics_port: int | None = None,
|
|
115
126
|
tasks: list[str] = ["docket.tasks:standard_tasks"],
|
|
116
127
|
) -> None:
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
128
|
+
with metrics_server(port=metrics_port):
|
|
129
|
+
async with Docket(name=docket_name, url=url) as docket:
|
|
130
|
+
for task_path in tasks:
|
|
131
|
+
docket.register_collection(task_path)
|
|
132
|
+
|
|
133
|
+
async with Worker(
|
|
134
|
+
docket=docket,
|
|
135
|
+
name=name,
|
|
136
|
+
concurrency=concurrency,
|
|
137
|
+
redelivery_timeout=redelivery_timeout,
|
|
138
|
+
reconnection_delay=reconnection_delay,
|
|
139
|
+
minimum_check_interval=minimum_check_interval,
|
|
140
|
+
) as worker:
|
|
141
|
+
if until_finished:
|
|
142
|
+
await worker.run_until_finished()
|
|
143
|
+
else:
|
|
144
|
+
await worker.run_forever() # pragma: no cover
|
|
133
145
|
|
|
134
146
|
async def run_until_finished(self) -> None:
|
|
135
147
|
"""Run the worker until there are no more tasks to process."""
|
|
@@ -149,9 +161,7 @@ class Worker:
|
|
|
149
161
|
try:
|
|
150
162
|
return await self._worker_loop(forever=forever)
|
|
151
163
|
except redis.exceptions.ConnectionError:
|
|
152
|
-
REDIS_DISRUPTIONS.add(
|
|
153
|
-
1, {"docket": self.docket.name, "worker": self.name}
|
|
154
|
-
)
|
|
164
|
+
REDIS_DISRUPTIONS.add(1, self.labels())
|
|
155
165
|
logger.warning(
|
|
156
166
|
"Error connecting to redis, retrying in %s...",
|
|
157
167
|
self.reconnection_delay,
|
|
@@ -263,7 +273,7 @@ class Worker:
|
|
|
263
273
|
future_work,
|
|
264
274
|
self.docket.queue_key,
|
|
265
275
|
self.docket.stream_key,
|
|
266
|
-
extra=self._log_context,
|
|
276
|
+
extra=self._log_context(),
|
|
267
277
|
)
|
|
268
278
|
|
|
269
279
|
redeliveries: RedisMessages
|
|
@@ -280,8 +290,6 @@ class Worker:
|
|
|
280
290
|
|
|
281
291
|
for message_id, message in redeliveries:
|
|
282
292
|
start_task(message_id, message)
|
|
283
|
-
if available_slots <= 0:
|
|
284
|
-
break
|
|
285
293
|
|
|
286
294
|
if available_slots <= 0:
|
|
287
295
|
continue
|
|
@@ -300,14 +308,13 @@ class Worker:
|
|
|
300
308
|
for _, messages in new_deliveries:
|
|
301
309
|
for message_id, message in messages:
|
|
302
310
|
start_task(message_id, message)
|
|
303
|
-
|
|
304
|
-
break
|
|
311
|
+
|
|
305
312
|
except asyncio.CancelledError:
|
|
306
313
|
if active_tasks: # pragma: no cover
|
|
307
314
|
logger.info(
|
|
308
315
|
"Shutdown requested, finishing %d active tasks...",
|
|
309
316
|
len(active_tasks),
|
|
310
|
-
extra=self._log_context,
|
|
317
|
+
extra=self._log_context(),
|
|
311
318
|
)
|
|
312
319
|
finally:
|
|
313
320
|
if active_tasks:
|
|
@@ -315,28 +322,20 @@ class Worker:
|
|
|
315
322
|
await process_completed_tasks()
|
|
316
323
|
|
|
317
324
|
async def _execute(self, message: RedisMessage) -> None:
|
|
325
|
+
log_context: dict[str, str | float] = self._log_context()
|
|
326
|
+
|
|
318
327
|
function_name = message[b"function"].decode()
|
|
319
328
|
function = self.docket.tasks.get(function_name)
|
|
320
329
|
if function is None:
|
|
321
330
|
logger.warning(
|
|
322
|
-
"Task function %r not found", function_name, extra=
|
|
331
|
+
"Task function %r not found", function_name, extra=log_context
|
|
323
332
|
)
|
|
324
333
|
return
|
|
325
334
|
|
|
326
335
|
execution = Execution.from_message(function, message)
|
|
327
|
-
name = execution.function.__name__
|
|
328
|
-
key = execution.key
|
|
329
336
|
|
|
330
|
-
log_context
|
|
331
|
-
|
|
332
|
-
"task": name,
|
|
333
|
-
"key": key,
|
|
334
|
-
}
|
|
335
|
-
counter_labels = {
|
|
336
|
-
"docket": self.docket.name,
|
|
337
|
-
"worker": self.name,
|
|
338
|
-
"task": name,
|
|
339
|
-
}
|
|
337
|
+
log_context |= execution.specific_labels()
|
|
338
|
+
counter_labels = {**self.labels(), **execution.general_labels()}
|
|
340
339
|
|
|
341
340
|
arrow = "↬" if execution.attempt > 1 else "↪"
|
|
342
341
|
call = execution.call_repr()
|
|
@@ -344,7 +343,7 @@ class Worker:
|
|
|
344
343
|
if self.docket.strike_list.is_stricken(execution):
|
|
345
344
|
arrow = "🗙"
|
|
346
345
|
logger.warning("%s %s", arrow, call, extra=log_context)
|
|
347
|
-
TASKS_STRICKEN.add(1, counter_labels | {"where": "worker"})
|
|
346
|
+
TASKS_STRICKEN.add(1, counter_labels | {"docket.where": "worker"})
|
|
348
347
|
return
|
|
349
348
|
|
|
350
349
|
dependencies = self._get_dependencies(execution)
|
|
@@ -369,11 +368,8 @@ class Worker:
|
|
|
369
368
|
execution.function.__name__,
|
|
370
369
|
kind=trace.SpanKind.CONSUMER,
|
|
371
370
|
attributes={
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
"docket.execution.key": execution.key,
|
|
375
|
-
"docket.execution.attempt": execution.attempt,
|
|
376
|
-
"docket.execution.punctuality": punctuality.total_seconds(),
|
|
371
|
+
**self.labels(),
|
|
372
|
+
**execution.specific_labels(),
|
|
377
373
|
"code.function.name": execution.function.__name__,
|
|
378
374
|
},
|
|
379
375
|
links=links,
|
|
@@ -441,12 +437,7 @@ class Worker:
|
|
|
441
437
|
execution.attempt += 1
|
|
442
438
|
await self.docket.schedule(execution)
|
|
443
439
|
|
|
444
|
-
|
|
445
|
-
"docket": self.docket.name,
|
|
446
|
-
"worker": self.name,
|
|
447
|
-
"task": execution.function.__name__,
|
|
448
|
-
}
|
|
449
|
-
TASKS_RETRIED.add(1, counter_labels)
|
|
440
|
+
TASKS_RETRIED.add(1, {**self.labels(), **execution.specific_labels()})
|
|
450
441
|
return True
|
|
451
442
|
|
|
452
443
|
return False
|
|
@@ -490,12 +481,35 @@ class Worker:
|
|
|
490
481
|
)
|
|
491
482
|
|
|
492
483
|
await pipeline.execute()
|
|
484
|
+
|
|
485
|
+
async with r.pipeline() as pipeline:
|
|
486
|
+
pipeline.xlen(self.docket.stream_key)
|
|
487
|
+
pipeline.zcount(self.docket.queue_key, 0, now)
|
|
488
|
+
pipeline.zcount(self.docket.queue_key, now, "+inf")
|
|
489
|
+
|
|
490
|
+
(
|
|
491
|
+
stream_depth,
|
|
492
|
+
overdue_depth,
|
|
493
|
+
schedule_depth,
|
|
494
|
+
) = await pipeline.execute()
|
|
495
|
+
|
|
496
|
+
QUEUE_DEPTH.set(
|
|
497
|
+
stream_depth + overdue_depth, self.docket.labels()
|
|
498
|
+
)
|
|
499
|
+
SCHEDULE_DEPTH.set(schedule_depth, self.docket.labels())
|
|
500
|
+
|
|
493
501
|
except asyncio.CancelledError: # pragma: no cover
|
|
494
502
|
return
|
|
495
503
|
except redis.exceptions.ConnectionError:
|
|
496
|
-
REDIS_DISRUPTIONS.add(
|
|
497
|
-
|
|
504
|
+
REDIS_DISRUPTIONS.add(1, self.labels())
|
|
505
|
+
logger.exception(
|
|
506
|
+
"Error sending worker heartbeat",
|
|
507
|
+
exc_info=True,
|
|
508
|
+
extra=self._log_context(),
|
|
498
509
|
)
|
|
499
|
-
logger.exception("Error sending worker heartbeat", exc_info=True)
|
|
500
510
|
except Exception:
|
|
501
|
-
logger.exception(
|
|
511
|
+
logger.exception(
|
|
512
|
+
"Error sending worker heartbeat",
|
|
513
|
+
exc_info=True,
|
|
514
|
+
extra=self._log_context(),
|
|
515
|
+
)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: pydocket
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.2.0
|
|
4
4
|
Summary: A distributed background task system for Python functions
|
|
5
5
|
Project-URL: Homepage, https://github.com/chrisguidry/docket
|
|
6
6
|
Project-URL: Bug Tracker, https://github.com/chrisguidry/docket/issues
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
docket/__init__.py,sha256=GoJYpyuO6QFeBB8GNaxGGvMMuai55Eaw_8u-o1PM3hk,743
|
|
2
|
+
docket/__main__.py,sha256=Vkuh7aJ-Bl7QVpVbbkUksAd_hn05FiLmWbc-8kbhZQ4,34
|
|
3
|
+
docket/annotations.py,sha256=GZwOPtPXyeIhnsLh3TQMBnXrjtTtSmF4Ratv4vjPx8U,950
|
|
4
|
+
docket/cli.py,sha256=EseF0Sj7IEgd9QDC-FSbHSffvF7DNsrmDGYGgZBdJc8,19413
|
|
5
|
+
docket/dependencies.py,sha256=gIDwcBUhrLk7xGh0ZxdqpsnSeX-hZzGMNvUrVFfqbJI,4281
|
|
6
|
+
docket/docket.py,sha256=zva6ofTm7i5hRwAaAnNtlgIqoMPaNLqCTs2PXGka_8s,19723
|
|
7
|
+
docket/execution.py,sha256=ShP8MoLmxEslk2pAuhKi6KEEKbHdneyQukR9oQwXdjQ,11732
|
|
8
|
+
docket/instrumentation.py,sha256=SUVhVFf8AX2HAfmi0HPTT_QvQezlGPJEKs_1YAmrCbA,4454
|
|
9
|
+
docket/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
10
|
+
docket/tasks.py,sha256=RIlSM2omh-YDwVnCz6M5MtmK8T_m_s1w2OlRRxDUs6A,1437
|
|
11
|
+
docket/worker.py,sha256=UZIPfAsIhsBsr2tBCgGGkLKU1mJs_nnP8-Retwl3218,19104
|
|
12
|
+
pydocket-0.2.0.dist-info/METADATA,sha256=X8Yqvi_cqCqYaGu6ZGr4dMvxqcvy6otYvt-J2jwCHOs,13092
|
|
13
|
+
pydocket-0.2.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
14
|
+
pydocket-0.2.0.dist-info/entry_points.txt,sha256=4WOk1nUlBsUT5O3RyMci2ImuC5XFswuopElYcLHtD5k,47
|
|
15
|
+
pydocket-0.2.0.dist-info/licenses/LICENSE,sha256=YuVWU_ZXO0K_k2FG8xWKe5RGxV24AhJKTvQmKfqXuyk,1087
|
|
16
|
+
pydocket-0.2.0.dist-info/RECORD,,
|
pydocket-0.1.3.dist-info/RECORD
DELETED
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
docket/__init__.py,sha256=GoJYpyuO6QFeBB8GNaxGGvMMuai55Eaw_8u-o1PM3hk,743
|
|
2
|
-
docket/__main__.py,sha256=Vkuh7aJ-Bl7QVpVbbkUksAd_hn05FiLmWbc-8kbhZQ4,34
|
|
3
|
-
docket/annotations.py,sha256=GZwOPtPXyeIhnsLh3TQMBnXrjtTtSmF4Ratv4vjPx8U,950
|
|
4
|
-
docket/cli.py,sha256=N0vp1zO5Wau4nBDMJOU34hYn11HR3PaYY3Ybk1gS8XY,19188
|
|
5
|
-
docket/dependencies.py,sha256=Vht3qKbik-HQ7jsAU5k-eig4_yuru56-ZewjBVVu4yM,4325
|
|
6
|
-
docket/docket.py,sha256=p0bKDkOiEWh2_L-sv1iie1Y2QVu3R7F7IU0K2AulE08,19991
|
|
7
|
-
docket/execution.py,sha256=rHsQ60BbNREzcpUC_RvbGUctdLaprYp1x46sT6jTrdc,11416
|
|
8
|
-
docket/instrumentation.py,sha256=USo8ptCFcwQj_YaUpJvsUHPb0QfQr50i9dF4tYgYde4,2992
|
|
9
|
-
docket/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
10
|
-
docket/tasks.py,sha256=K1f_W1z4m9RVz1GJ1ymWY5ZaRmqHO1SebNBVENlkelU,1471
|
|
11
|
-
docket/worker.py,sha256=FlBvfdaQHS-F5mtIGOLGIJsyawkjsUEu-E8bek3vCxQ,18652
|
|
12
|
-
pydocket-0.1.3.dist-info/METADATA,sha256=i-EtdoGrCe7D105-gNAQfug1DEDrTjzIVdR78vF0njk,13092
|
|
13
|
-
pydocket-0.1.3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
14
|
-
pydocket-0.1.3.dist-info/entry_points.txt,sha256=4WOk1nUlBsUT5O3RyMci2ImuC5XFswuopElYcLHtD5k,47
|
|
15
|
-
pydocket-0.1.3.dist-info/licenses/LICENSE,sha256=YuVWU_ZXO0K_k2FG8xWKe5RGxV24AhJKTvQmKfqXuyk,1087
|
|
16
|
-
pydocket-0.1.3.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|