pydocket 0.9.1__py3-none-any.whl → 0.10.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pydocket might be problematic. Click here for more details.
- docket/docket.py +35 -151
- {pydocket-0.9.1.dist-info → pydocket-0.10.0.dist-info}/METADATA +1 -1
- {pydocket-0.9.1.dist-info → pydocket-0.10.0.dist-info}/RECORD +6 -6
- {pydocket-0.9.1.dist-info → pydocket-0.10.0.dist-info}/WHEEL +0 -0
- {pydocket-0.9.1.dist-info → pydocket-0.10.0.dist-info}/entry_points.txt +0 -0
- {pydocket-0.9.1.dist-info → pydocket-0.10.0.dist-info}/licenses/LICENSE +0 -0
docket/docket.py
CHANGED
|
@@ -16,7 +16,6 @@ from typing import (
|
|
|
16
16
|
Mapping,
|
|
17
17
|
NoReturn,
|
|
18
18
|
ParamSpec,
|
|
19
|
-
Protocol,
|
|
20
19
|
Self,
|
|
21
20
|
Sequence,
|
|
22
21
|
TypedDict,
|
|
@@ -28,6 +27,7 @@ from typing import (
|
|
|
28
27
|
import redis.exceptions
|
|
29
28
|
from opentelemetry import propagate, trace
|
|
30
29
|
from redis.asyncio import ConnectionPool, Redis
|
|
30
|
+
from redis.asyncio.client import Pipeline
|
|
31
31
|
from uuid_extensions import uuid7
|
|
32
32
|
|
|
33
33
|
from .execution import (
|
|
@@ -55,18 +55,6 @@ logger: logging.Logger = logging.getLogger(__name__)
|
|
|
55
55
|
tracer: trace.Tracer = trace.get_tracer(__name__)
|
|
56
56
|
|
|
57
57
|
|
|
58
|
-
class _schedule_task(Protocol):
|
|
59
|
-
async def __call__(
|
|
60
|
-
self, keys: list[str], args: list[str | float | bytes]
|
|
61
|
-
) -> str: ... # pragma: no cover
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
class _cancel_task(Protocol):
|
|
65
|
-
async def __call__(
|
|
66
|
-
self, keys: list[str], args: list[str]
|
|
67
|
-
) -> str: ... # pragma: no cover
|
|
68
|
-
|
|
69
|
-
|
|
70
58
|
P = ParamSpec("P")
|
|
71
59
|
R = TypeVar("R")
|
|
72
60
|
|
|
@@ -143,8 +131,6 @@ class Docket:
|
|
|
143
131
|
|
|
144
132
|
_monitor_strikes_task: asyncio.Task[None]
|
|
145
133
|
_connection_pool: ConnectionPool
|
|
146
|
-
_schedule_task_script: _schedule_task | None
|
|
147
|
-
_cancel_task_script: _cancel_task | None
|
|
148
134
|
|
|
149
135
|
def __init__(
|
|
150
136
|
self,
|
|
@@ -170,8 +156,6 @@ class Docket:
|
|
|
170
156
|
self.url = url
|
|
171
157
|
self.heartbeat_interval = heartbeat_interval
|
|
172
158
|
self.missed_heartbeats = missed_heartbeats
|
|
173
|
-
self._schedule_task_script = None
|
|
174
|
-
self._cancel_task_script = None
|
|
175
159
|
|
|
176
160
|
@property
|
|
177
161
|
def worker_group_name(self) -> str:
|
|
@@ -316,7 +300,9 @@ class Docket:
|
|
|
316
300
|
execution = Execution(function, args, kwargs, when, key, attempt=1)
|
|
317
301
|
|
|
318
302
|
async with self.redis() as redis:
|
|
319
|
-
|
|
303
|
+
async with redis.pipeline() as pipeline:
|
|
304
|
+
await self._schedule(redis, pipeline, execution, replace=False)
|
|
305
|
+
await pipeline.execute()
|
|
320
306
|
|
|
321
307
|
TASKS_ADDED.add(1, {**self.labels(), **execution.general_labels()})
|
|
322
308
|
TASKS_SCHEDULED.add(1, {**self.labels(), **execution.general_labels()})
|
|
@@ -375,7 +361,9 @@ class Docket:
|
|
|
375
361
|
execution = Execution(function, args, kwargs, when, key, attempt=1)
|
|
376
362
|
|
|
377
363
|
async with self.redis() as redis:
|
|
378
|
-
|
|
364
|
+
async with redis.pipeline() as pipeline:
|
|
365
|
+
await self._schedule(redis, pipeline, execution, replace=True)
|
|
366
|
+
await pipeline.execute()
|
|
379
367
|
|
|
380
368
|
TASKS_REPLACED.add(1, {**self.labels(), **execution.general_labels()})
|
|
381
369
|
TASKS_CANCELLED.add(1, {**self.labels(), **execution.general_labels()})
|
|
@@ -395,7 +383,9 @@ class Docket:
|
|
|
395
383
|
},
|
|
396
384
|
):
|
|
397
385
|
async with self.redis() as redis:
|
|
398
|
-
|
|
386
|
+
async with redis.pipeline() as pipeline:
|
|
387
|
+
await self._schedule(redis, pipeline, execution, replace=False)
|
|
388
|
+
await pipeline.execute()
|
|
399
389
|
|
|
400
390
|
TASKS_SCHEDULED.add(1, {**self.labels(), **execution.general_labels()})
|
|
401
391
|
|
|
@@ -410,7 +400,9 @@ class Docket:
|
|
|
410
400
|
attributes={**self.labels(), "docket.key": key},
|
|
411
401
|
):
|
|
412
402
|
async with self.redis() as redis:
|
|
413
|
-
|
|
403
|
+
async with redis.pipeline() as pipeline:
|
|
404
|
+
await self._cancel(pipeline, key)
|
|
405
|
+
await pipeline.execute()
|
|
414
406
|
|
|
415
407
|
TASKS_CANCELLED.add(1, self.labels())
|
|
416
408
|
|
|
@@ -431,17 +423,10 @@ class Docket:
|
|
|
431
423
|
async def _schedule(
|
|
432
424
|
self,
|
|
433
425
|
redis: Redis,
|
|
426
|
+
pipeline: Pipeline,
|
|
434
427
|
execution: Execution,
|
|
435
428
|
replace: bool = False,
|
|
436
429
|
) -> None:
|
|
437
|
-
"""Schedule a task atomically.
|
|
438
|
-
|
|
439
|
-
Handles:
|
|
440
|
-
- Checking for task existence
|
|
441
|
-
- Cancelling existing tasks when replacing
|
|
442
|
-
- Adding tasks to stream (immediate) or queue (future)
|
|
443
|
-
- Tracking stream message IDs for later cancellation
|
|
444
|
-
"""
|
|
445
430
|
if self.strike_list.is_stricken(execution):
|
|
446
431
|
logger.warning(
|
|
447
432
|
"%r is stricken, skipping schedule of %r",
|
|
@@ -464,133 +449,32 @@ class Docket:
|
|
|
464
449
|
key = execution.key
|
|
465
450
|
when = execution.when
|
|
466
451
|
known_task_key = self.known_task_key(key)
|
|
467
|
-
is_immediate = when <= datetime.now(timezone.utc)
|
|
468
452
|
|
|
469
|
-
# Lock per task key to prevent race conditions between concurrent operations
|
|
470
453
|
async with redis.lock(f"{known_task_key}:lock", timeout=10):
|
|
471
|
-
if
|
|
472
|
-
self.
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
""
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
local task_key = ARGV[1]
|
|
484
|
-
local when_timestamp = ARGV[2]
|
|
485
|
-
local is_immediate = ARGV[3] == '1'
|
|
486
|
-
local replace = ARGV[4] == '1'
|
|
487
|
-
|
|
488
|
-
-- Extract message fields from ARGV[5] onwards
|
|
489
|
-
local message = {}
|
|
490
|
-
for i = 5, #ARGV, 2 do
|
|
491
|
-
message[#message + 1] = ARGV[i] -- field name
|
|
492
|
-
message[#message + 1] = ARGV[i + 1] -- field value
|
|
493
|
-
end
|
|
494
|
-
|
|
495
|
-
-- Handle replacement: cancel existing task if needed
|
|
496
|
-
if replace then
|
|
497
|
-
local existing_message_id = redis.call('HGET', known_key, 'stream_message_id')
|
|
498
|
-
if existing_message_id then
|
|
499
|
-
redis.call('XDEL', stream_key, existing_message_id)
|
|
500
|
-
end
|
|
501
|
-
redis.call('DEL', known_key, parked_key)
|
|
502
|
-
redis.call('ZREM', queue_key, task_key)
|
|
503
|
-
else
|
|
504
|
-
-- Check if task already exists
|
|
505
|
-
if redis.call('EXISTS', known_key) == 1 then
|
|
506
|
-
return 'EXISTS'
|
|
507
|
-
end
|
|
508
|
-
end
|
|
509
|
-
|
|
510
|
-
if is_immediate then
|
|
511
|
-
-- Add to stream and store message ID for later cancellation
|
|
512
|
-
local message_id = redis.call('XADD', stream_key, '*', unpack(message))
|
|
513
|
-
redis.call('HSET', known_key, 'when', when_timestamp, 'stream_message_id', message_id)
|
|
514
|
-
return message_id
|
|
515
|
-
else
|
|
516
|
-
-- Add to queue with task data in parked hash
|
|
517
|
-
redis.call('HSET', known_key, 'when', when_timestamp)
|
|
518
|
-
redis.call('HSET', parked_key, unpack(message))
|
|
519
|
-
redis.call('ZADD', queue_key, when_timestamp, task_key)
|
|
520
|
-
return 'QUEUED'
|
|
521
|
-
end
|
|
522
|
-
"""
|
|
523
|
-
),
|
|
524
|
-
)
|
|
525
|
-
schedule_task = self._schedule_task_script
|
|
454
|
+
if replace:
|
|
455
|
+
await self._cancel(pipeline, key)
|
|
456
|
+
else:
|
|
457
|
+
# if the task is already in the queue or stream, retain it
|
|
458
|
+
if await redis.exists(known_task_key):
|
|
459
|
+
logger.debug(
|
|
460
|
+
"Task %r is already in the queue or stream, not scheduling",
|
|
461
|
+
key,
|
|
462
|
+
extra=self.labels(),
|
|
463
|
+
)
|
|
464
|
+
return
|
|
526
465
|
|
|
527
|
-
|
|
528
|
-
keys=[
|
|
529
|
-
self.stream_key,
|
|
530
|
-
known_task_key,
|
|
531
|
-
self.parked_task_key(key),
|
|
532
|
-
self.queue_key,
|
|
533
|
-
],
|
|
534
|
-
args=[
|
|
535
|
-
key,
|
|
536
|
-
str(when.timestamp()),
|
|
537
|
-
"1" if is_immediate else "0",
|
|
538
|
-
"1" if replace else "0",
|
|
539
|
-
*[
|
|
540
|
-
item
|
|
541
|
-
for field, value in message.items()
|
|
542
|
-
for item in (field, value)
|
|
543
|
-
],
|
|
544
|
-
],
|
|
545
|
-
)
|
|
466
|
+
pipeline.set(known_task_key, when.timestamp())
|
|
546
467
|
|
|
547
|
-
|
|
548
|
-
|
|
468
|
+
if when <= datetime.now(timezone.utc):
|
|
469
|
+
pipeline.xadd(self.stream_key, message) # type: ignore[arg-type]
|
|
470
|
+
else:
|
|
471
|
+
pipeline.hset(self.parked_task_key(key), mapping=message) # type: ignore[arg-type]
|
|
472
|
+
pipeline.zadd(self.queue_key, {key: when.timestamp()})
|
|
549
473
|
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
"""
|
|
555
|
-
if self._cancel_task_script is None:
|
|
556
|
-
self._cancel_task_script = cast(
|
|
557
|
-
_cancel_task,
|
|
558
|
-
redis.register_script(
|
|
559
|
-
# KEYS: stream_key, known_key, parked_key, queue_key
|
|
560
|
-
# ARGV: task_key
|
|
561
|
-
"""
|
|
562
|
-
local stream_key = KEYS[1]
|
|
563
|
-
local known_key = KEYS[2]
|
|
564
|
-
local parked_key = KEYS[3]
|
|
565
|
-
local queue_key = KEYS[4]
|
|
566
|
-
local task_key = ARGV[1]
|
|
567
|
-
|
|
568
|
-
-- Delete from stream if message ID exists
|
|
569
|
-
local message_id = redis.call('HGET', known_key, 'stream_message_id')
|
|
570
|
-
if message_id then
|
|
571
|
-
redis.call('XDEL', stream_key, message_id)
|
|
572
|
-
end
|
|
573
|
-
|
|
574
|
-
-- Clean up all task-related keys
|
|
575
|
-
redis.call('DEL', known_key, parked_key)
|
|
576
|
-
redis.call('ZREM', queue_key, task_key)
|
|
577
|
-
|
|
578
|
-
return 'OK'
|
|
579
|
-
"""
|
|
580
|
-
),
|
|
581
|
-
)
|
|
582
|
-
cancel_task = self._cancel_task_script
|
|
583
|
-
|
|
584
|
-
# Execute the cancellation script
|
|
585
|
-
await cancel_task(
|
|
586
|
-
keys=[
|
|
587
|
-
self.stream_key,
|
|
588
|
-
self.known_task_key(key),
|
|
589
|
-
self.parked_task_key(key),
|
|
590
|
-
self.queue_key,
|
|
591
|
-
],
|
|
592
|
-
args=[key],
|
|
593
|
-
)
|
|
474
|
+
async def _cancel(self, pipeline: Pipeline, key: str) -> None:
|
|
475
|
+
pipeline.delete(self.known_task_key(key))
|
|
476
|
+
pipeline.delete(self.parked_task_key(key))
|
|
477
|
+
pipeline.zrem(self.queue_key, key)
|
|
594
478
|
|
|
595
479
|
@property
|
|
596
480
|
def strike_key(self) -> str:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: pydocket
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.10.0
|
|
4
4
|
Summary: A distributed background task system for Python functions
|
|
5
5
|
Project-URL: Homepage, https://github.com/chrisguidry/docket
|
|
6
6
|
Project-URL: Bug Tracker, https://github.com/chrisguidry/docket/issues
|
|
@@ -3,14 +3,14 @@ docket/__main__.py,sha256=wcCrL4PjG51r5wVKqJhcoJPTLfHW0wNbD31DrUN0MWI,28
|
|
|
3
3
|
docket/annotations.py,sha256=wttix9UOeMFMAWXAIJUfUw5GjESJZsACb4YXJCozP7Q,2348
|
|
4
4
|
docket/cli.py,sha256=rTfri2--u4Q5PlXyh7Ub_F5uh3-TtZOWLUp9WY_TvAE,25750
|
|
5
5
|
docket/dependencies.py,sha256=BC0bnt10cr9_S1p5JAP_bnC9RwZkTr9ulPBrxC7eZnA,20247
|
|
6
|
-
docket/docket.py,sha256=
|
|
6
|
+
docket/docket.py,sha256=Cw7QB1d0eDwSgwn0Rj26WjFsXSe7MJtfsUBBHGalL7A,26262
|
|
7
7
|
docket/execution.py,sha256=r_2RGC1qhtAcBUg7E6wewLEgftrf3hIxNbH0HnYPbek,14961
|
|
8
8
|
docket/instrumentation.py,sha256=ogvzrfKbWsdPGfdg4hByH3_r5d3b5AwwQkSrmXw0hRg,5492
|
|
9
9
|
docket/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
10
10
|
docket/tasks.py,sha256=RIlSM2omh-YDwVnCz6M5MtmK8T_m_s1w2OlRRxDUs6A,1437
|
|
11
11
|
docket/worker.py,sha256=pOBRoEbakUwAGVKAuCNPSMyHRBSalUxtMc93QZewX7M,34928
|
|
12
|
-
pydocket-0.
|
|
13
|
-
pydocket-0.
|
|
14
|
-
pydocket-0.
|
|
15
|
-
pydocket-0.
|
|
16
|
-
pydocket-0.
|
|
12
|
+
pydocket-0.10.0.dist-info/METADATA,sha256=Tsm_S5NTj5yOPmt-q4KAKjdEDjH6ZRzz_ITVapnFk64,5419
|
|
13
|
+
pydocket-0.10.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
14
|
+
pydocket-0.10.0.dist-info/entry_points.txt,sha256=4WOk1nUlBsUT5O3RyMci2ImuC5XFswuopElYcLHtD5k,47
|
|
15
|
+
pydocket-0.10.0.dist-info/licenses/LICENSE,sha256=YuVWU_ZXO0K_k2FG8xWKe5RGxV24AhJKTvQmKfqXuyk,1087
|
|
16
|
+
pydocket-0.10.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|