pydocket 0.8.0__py3-none-any.whl → 0.9.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pydocket might be problematic. Click here for more details.
- docket/docket.py +151 -35
- docket/worker.py +6 -3
- {pydocket-0.8.0.dist-info → pydocket-0.9.0.dist-info}/METADATA +1 -1
- {pydocket-0.8.0.dist-info → pydocket-0.9.0.dist-info}/RECORD +7 -7
- {pydocket-0.8.0.dist-info → pydocket-0.9.0.dist-info}/WHEEL +0 -0
- {pydocket-0.8.0.dist-info → pydocket-0.9.0.dist-info}/entry_points.txt +0 -0
- {pydocket-0.8.0.dist-info → pydocket-0.9.0.dist-info}/licenses/LICENSE +0 -0
docket/docket.py
CHANGED
|
@@ -16,6 +16,7 @@ from typing import (
|
|
|
16
16
|
Mapping,
|
|
17
17
|
NoReturn,
|
|
18
18
|
ParamSpec,
|
|
19
|
+
Protocol,
|
|
19
20
|
Self,
|
|
20
21
|
Sequence,
|
|
21
22
|
TypedDict,
|
|
@@ -27,7 +28,6 @@ from typing import (
|
|
|
27
28
|
import redis.exceptions
|
|
28
29
|
from opentelemetry import propagate, trace
|
|
29
30
|
from redis.asyncio import ConnectionPool, Redis
|
|
30
|
-
from redis.asyncio.client import Pipeline
|
|
31
31
|
from uuid_extensions import uuid7
|
|
32
32
|
|
|
33
33
|
from .execution import (
|
|
@@ -55,6 +55,18 @@ logger: logging.Logger = logging.getLogger(__name__)
|
|
|
55
55
|
tracer: trace.Tracer = trace.get_tracer(__name__)
|
|
56
56
|
|
|
57
57
|
|
|
58
|
+
class _schedule_task(Protocol):
|
|
59
|
+
async def __call__(
|
|
60
|
+
self, keys: list[str], args: list[str | float | bytes]
|
|
61
|
+
) -> str: ... # pragma: no cover
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
class _cancel_task(Protocol):
|
|
65
|
+
async def __call__(
|
|
66
|
+
self, keys: list[str], args: list[str]
|
|
67
|
+
) -> str: ... # pragma: no cover
|
|
68
|
+
|
|
69
|
+
|
|
58
70
|
P = ParamSpec("P")
|
|
59
71
|
R = TypeVar("R")
|
|
60
72
|
|
|
@@ -131,6 +143,8 @@ class Docket:
|
|
|
131
143
|
|
|
132
144
|
_monitor_strikes_task: asyncio.Task[None]
|
|
133
145
|
_connection_pool: ConnectionPool
|
|
146
|
+
_schedule_task_script: _schedule_task | None
|
|
147
|
+
_cancel_task_script: _cancel_task | None
|
|
134
148
|
|
|
135
149
|
def __init__(
|
|
136
150
|
self,
|
|
@@ -156,6 +170,8 @@ class Docket:
|
|
|
156
170
|
self.url = url
|
|
157
171
|
self.heartbeat_interval = heartbeat_interval
|
|
158
172
|
self.missed_heartbeats = missed_heartbeats
|
|
173
|
+
self._schedule_task_script = None
|
|
174
|
+
self._cancel_task_script = None
|
|
159
175
|
|
|
160
176
|
@property
|
|
161
177
|
def worker_group_name(self) -> str:
|
|
@@ -300,9 +316,7 @@ class Docket:
|
|
|
300
316
|
execution = Execution(function, args, kwargs, when, key, attempt=1)
|
|
301
317
|
|
|
302
318
|
async with self.redis() as redis:
|
|
303
|
-
|
|
304
|
-
await self._schedule(redis, pipeline, execution, replace=False)
|
|
305
|
-
await pipeline.execute()
|
|
319
|
+
await self._schedule(redis, execution, replace=False)
|
|
306
320
|
|
|
307
321
|
TASKS_ADDED.add(1, {**self.labels(), **execution.general_labels()})
|
|
308
322
|
TASKS_SCHEDULED.add(1, {**self.labels(), **execution.general_labels()})
|
|
@@ -361,9 +375,7 @@ class Docket:
|
|
|
361
375
|
execution = Execution(function, args, kwargs, when, key, attempt=1)
|
|
362
376
|
|
|
363
377
|
async with self.redis() as redis:
|
|
364
|
-
|
|
365
|
-
await self._schedule(redis, pipeline, execution, replace=True)
|
|
366
|
-
await pipeline.execute()
|
|
378
|
+
await self._schedule(redis, execution, replace=True)
|
|
367
379
|
|
|
368
380
|
TASKS_REPLACED.add(1, {**self.labels(), **execution.general_labels()})
|
|
369
381
|
TASKS_CANCELLED.add(1, {**self.labels(), **execution.general_labels()})
|
|
@@ -383,9 +395,7 @@ class Docket:
|
|
|
383
395
|
},
|
|
384
396
|
):
|
|
385
397
|
async with self.redis() as redis:
|
|
386
|
-
|
|
387
|
-
await self._schedule(redis, pipeline, execution, replace=False)
|
|
388
|
-
await pipeline.execute()
|
|
398
|
+
await self._schedule(redis, execution, replace=False)
|
|
389
399
|
|
|
390
400
|
TASKS_SCHEDULED.add(1, {**self.labels(), **execution.general_labels()})
|
|
391
401
|
|
|
@@ -400,9 +410,7 @@ class Docket:
|
|
|
400
410
|
attributes={**self.labels(), "docket.key": key},
|
|
401
411
|
):
|
|
402
412
|
async with self.redis() as redis:
|
|
403
|
-
|
|
404
|
-
await self._cancel(pipeline, key)
|
|
405
|
-
await pipeline.execute()
|
|
413
|
+
await self._cancel(redis, key)
|
|
406
414
|
|
|
407
415
|
TASKS_CANCELLED.add(1, self.labels())
|
|
408
416
|
|
|
@@ -423,10 +431,17 @@ class Docket:
|
|
|
423
431
|
async def _schedule(
|
|
424
432
|
self,
|
|
425
433
|
redis: Redis,
|
|
426
|
-
pipeline: Pipeline,
|
|
427
434
|
execution: Execution,
|
|
428
435
|
replace: bool = False,
|
|
429
436
|
) -> None:
|
|
437
|
+
"""Schedule a task atomically.
|
|
438
|
+
|
|
439
|
+
Handles:
|
|
440
|
+
- Checking for task existence
|
|
441
|
+
- Cancelling existing tasks when replacing
|
|
442
|
+
- Adding tasks to stream (immediate) or queue (future)
|
|
443
|
+
- Tracking stream message IDs for later cancellation
|
|
444
|
+
"""
|
|
430
445
|
if self.strike_list.is_stricken(execution):
|
|
431
446
|
logger.warning(
|
|
432
447
|
"%r is stricken, skipping schedule of %r",
|
|
@@ -449,32 +464,133 @@ class Docket:
|
|
|
449
464
|
key = execution.key
|
|
450
465
|
when = execution.when
|
|
451
466
|
known_task_key = self.known_task_key(key)
|
|
467
|
+
is_immediate = when <= datetime.now(timezone.utc)
|
|
452
468
|
|
|
469
|
+
# Lock per task key to prevent race conditions between concurrent operations
|
|
453
470
|
async with redis.lock(f"{known_task_key}:lock", timeout=10):
|
|
454
|
-
if
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
"
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
471
|
+
if self._schedule_task_script is None:
|
|
472
|
+
self._schedule_task_script = cast(
|
|
473
|
+
_schedule_task,
|
|
474
|
+
redis.register_script(
|
|
475
|
+
# KEYS: stream_key, known_key, parked_key, queue_key
|
|
476
|
+
# ARGV: task_key, when_timestamp, is_immediate, replace, ...message_fields
|
|
477
|
+
"""
|
|
478
|
+
local stream_key = KEYS[1]
|
|
479
|
+
local known_key = KEYS[2]
|
|
480
|
+
local parked_key = KEYS[3]
|
|
481
|
+
local queue_key = KEYS[4]
|
|
482
|
+
|
|
483
|
+
local task_key = ARGV[1]
|
|
484
|
+
local when_timestamp = ARGV[2]
|
|
485
|
+
local is_immediate = ARGV[3] == '1'
|
|
486
|
+
local replace = ARGV[4] == '1'
|
|
487
|
+
|
|
488
|
+
-- Extract message fields from ARGV[5] onwards
|
|
489
|
+
local message = {}
|
|
490
|
+
for i = 5, #ARGV, 2 do
|
|
491
|
+
message[#message + 1] = ARGV[i] -- field name
|
|
492
|
+
message[#message + 1] = ARGV[i + 1] -- field value
|
|
493
|
+
end
|
|
494
|
+
|
|
495
|
+
-- Handle replacement: cancel existing task if needed
|
|
496
|
+
if replace then
|
|
497
|
+
local existing_message_id = redis.call('HGET', known_key, 'stream_message_id')
|
|
498
|
+
if existing_message_id then
|
|
499
|
+
redis.call('XDEL', stream_key, existing_message_id)
|
|
500
|
+
end
|
|
501
|
+
redis.call('DEL', known_key, parked_key)
|
|
502
|
+
redis.call('ZREM', queue_key, task_key)
|
|
503
|
+
else
|
|
504
|
+
-- Check if task already exists
|
|
505
|
+
if redis.call('EXISTS', known_key) == 1 then
|
|
506
|
+
return 'EXISTS'
|
|
507
|
+
end
|
|
508
|
+
end
|
|
509
|
+
|
|
510
|
+
if is_immediate then
|
|
511
|
+
-- Add to stream and store message ID for later cancellation
|
|
512
|
+
local message_id = redis.call('XADD', stream_key, '*', unpack(message))
|
|
513
|
+
redis.call('HSET', known_key, 'when', when_timestamp, 'stream_message_id', message_id)
|
|
514
|
+
return message_id
|
|
515
|
+
else
|
|
516
|
+
-- Add to queue with task data in parked hash
|
|
517
|
+
redis.call('HSET', known_key, 'when', when_timestamp)
|
|
518
|
+
redis.call('HSET', parked_key, unpack(message))
|
|
519
|
+
redis.call('ZADD', queue_key, when_timestamp, task_key)
|
|
520
|
+
return 'QUEUED'
|
|
521
|
+
end
|
|
522
|
+
"""
|
|
523
|
+
),
|
|
524
|
+
)
|
|
525
|
+
schedule_task = self._schedule_task_script
|
|
465
526
|
|
|
466
|
-
|
|
527
|
+
await schedule_task(
|
|
528
|
+
keys=[
|
|
529
|
+
self.stream_key,
|
|
530
|
+
known_task_key,
|
|
531
|
+
self.parked_task_key(key),
|
|
532
|
+
self.queue_key,
|
|
533
|
+
],
|
|
534
|
+
args=[
|
|
535
|
+
key,
|
|
536
|
+
str(when.timestamp()),
|
|
537
|
+
"1" if is_immediate else "0",
|
|
538
|
+
"1" if replace else "0",
|
|
539
|
+
*[
|
|
540
|
+
item
|
|
541
|
+
for field, value in message.items()
|
|
542
|
+
for item in (field, value)
|
|
543
|
+
],
|
|
544
|
+
],
|
|
545
|
+
)
|
|
467
546
|
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
else:
|
|
471
|
-
pipeline.hset(self.parked_task_key(key), mapping=message) # type: ignore[arg-type]
|
|
472
|
-
pipeline.zadd(self.queue_key, {key: when.timestamp()})
|
|
547
|
+
async def _cancel(self, redis: Redis, key: str) -> None:
|
|
548
|
+
"""Cancel a task atomically.
|
|
473
549
|
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
550
|
+
Handles cancellation regardless of task location:
|
|
551
|
+
- From the stream (using stored message ID)
|
|
552
|
+
- From the queue (scheduled tasks)
|
|
553
|
+
- Cleans up all associated metadata keys
|
|
554
|
+
"""
|
|
555
|
+
if self._cancel_task_script is None:
|
|
556
|
+
self._cancel_task_script = cast(
|
|
557
|
+
_cancel_task,
|
|
558
|
+
redis.register_script(
|
|
559
|
+
# KEYS: stream_key, known_key, parked_key, queue_key
|
|
560
|
+
# ARGV: task_key
|
|
561
|
+
"""
|
|
562
|
+
local stream_key = KEYS[1]
|
|
563
|
+
local known_key = KEYS[2]
|
|
564
|
+
local parked_key = KEYS[3]
|
|
565
|
+
local queue_key = KEYS[4]
|
|
566
|
+
local task_key = ARGV[1]
|
|
567
|
+
|
|
568
|
+
-- Delete from stream if message ID exists
|
|
569
|
+
local message_id = redis.call('HGET', known_key, 'stream_message_id')
|
|
570
|
+
if message_id then
|
|
571
|
+
redis.call('XDEL', stream_key, message_id)
|
|
572
|
+
end
|
|
573
|
+
|
|
574
|
+
-- Clean up all task-related keys
|
|
575
|
+
redis.call('DEL', known_key, parked_key)
|
|
576
|
+
redis.call('ZREM', queue_key, task_key)
|
|
577
|
+
|
|
578
|
+
return 'OK'
|
|
579
|
+
"""
|
|
580
|
+
),
|
|
581
|
+
)
|
|
582
|
+
cancel_task = self._cancel_task_script
|
|
583
|
+
|
|
584
|
+
# Execute the cancellation script
|
|
585
|
+
await cancel_task(
|
|
586
|
+
keys=[
|
|
587
|
+
self.stream_key,
|
|
588
|
+
self.known_task_key(key),
|
|
589
|
+
self.parked_task_key(key),
|
|
590
|
+
self.queue_key,
|
|
591
|
+
],
|
|
592
|
+
args=[key],
|
|
593
|
+
)
|
|
478
594
|
|
|
479
595
|
@property
|
|
480
596
|
def strike_key(self) -> str:
|
docket/worker.py
CHANGED
|
@@ -286,7 +286,7 @@ class Worker:
|
|
|
286
286
|
count=available_slots,
|
|
287
287
|
)
|
|
288
288
|
|
|
289
|
-
|
|
289
|
+
def start_task(message_id: RedisMessageID, message: RedisMessage) -> bool:
|
|
290
290
|
function_name = message[b"function"].decode()
|
|
291
291
|
if not (function := self.docket.tasks.get(function_name)):
|
|
292
292
|
logger.warning(
|
|
@@ -347,7 +347,7 @@ class Worker:
|
|
|
347
347
|
if not message: # pragma: no cover
|
|
348
348
|
continue
|
|
349
349
|
|
|
350
|
-
task_started =
|
|
350
|
+
task_started = start_task(message_id, message)
|
|
351
351
|
if not task_started:
|
|
352
352
|
# Other errors - delete and ack
|
|
353
353
|
await self._delete_known_task(redis, message)
|
|
@@ -406,7 +406,7 @@ class Worker:
|
|
|
406
406
|
task[task_data[j]] = task_data[j+1]
|
|
407
407
|
end
|
|
408
408
|
|
|
409
|
-
redis.call('XADD', KEYS[2], '*',
|
|
409
|
+
local message_id = redis.call('XADD', KEYS[2], '*',
|
|
410
410
|
'key', task['key'],
|
|
411
411
|
'when', task['when'],
|
|
412
412
|
'function', task['function'],
|
|
@@ -414,6 +414,9 @@ class Worker:
|
|
|
414
414
|
'kwargs', task['kwargs'],
|
|
415
415
|
'attempt', task['attempt']
|
|
416
416
|
)
|
|
417
|
+
-- Store the message ID in the known task key
|
|
418
|
+
local known_key = ARGV[2] .. ":known:" .. key
|
|
419
|
+
redis.call('HSET', known_key, 'stream_message_id', message_id)
|
|
417
420
|
redis.call('DEL', hash_key)
|
|
418
421
|
due_work = due_work + 1
|
|
419
422
|
end
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: pydocket
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.9.0
|
|
4
4
|
Summary: A distributed background task system for Python functions
|
|
5
5
|
Project-URL: Homepage, https://github.com/chrisguidry/docket
|
|
6
6
|
Project-URL: Bug Tracker, https://github.com/chrisguidry/docket/issues
|
|
@@ -3,14 +3,14 @@ docket/__main__.py,sha256=wcCrL4PjG51r5wVKqJhcoJPTLfHW0wNbD31DrUN0MWI,28
|
|
|
3
3
|
docket/annotations.py,sha256=wttix9UOeMFMAWXAIJUfUw5GjESJZsACb4YXJCozP7Q,2348
|
|
4
4
|
docket/cli.py,sha256=rTfri2--u4Q5PlXyh7Ub_F5uh3-TtZOWLUp9WY_TvAE,25750
|
|
5
5
|
docket/dependencies.py,sha256=BC0bnt10cr9_S1p5JAP_bnC9RwZkTr9ulPBrxC7eZnA,20247
|
|
6
|
-
docket/docket.py,sha256=
|
|
6
|
+
docket/docket.py,sha256=0nQCHDDHy7trv2a0eYygGgIKiA7fWq5GcOXye3_CPWM,30847
|
|
7
7
|
docket/execution.py,sha256=r_2RGC1qhtAcBUg7E6wewLEgftrf3hIxNbH0HnYPbek,14961
|
|
8
8
|
docket/instrumentation.py,sha256=ogvzrfKbWsdPGfdg4hByH3_r5d3b5AwwQkSrmXw0hRg,5492
|
|
9
9
|
docket/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
10
10
|
docket/tasks.py,sha256=RIlSM2omh-YDwVnCz6M5MtmK8T_m_s1w2OlRRxDUs6A,1437
|
|
11
|
-
docket/worker.py,sha256=
|
|
12
|
-
pydocket-0.
|
|
13
|
-
pydocket-0.
|
|
14
|
-
pydocket-0.
|
|
15
|
-
pydocket-0.
|
|
16
|
-
pydocket-0.
|
|
11
|
+
docket/worker.py,sha256=jqVYqtQyxbk-BIy3shY8haX-amVT9Np97VhJuaQTfpM,35174
|
|
12
|
+
pydocket-0.9.0.dist-info/METADATA,sha256=kymp9PKG7UwMj0i0qGSSCHKu-g-tS__qydr6mYuMLtg,5418
|
|
13
|
+
pydocket-0.9.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
14
|
+
pydocket-0.9.0.dist-info/entry_points.txt,sha256=4WOk1nUlBsUT5O3RyMci2ImuC5XFswuopElYcLHtD5k,47
|
|
15
|
+
pydocket-0.9.0.dist-info/licenses/LICENSE,sha256=YuVWU_ZXO0K_k2FG8xWKe5RGxV24AhJKTvQmKfqXuyk,1087
|
|
16
|
+
pydocket-0.9.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|