pydocket 0.9.2__py3-none-any.whl → 0.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pydocket might be problematic. Click here for more details.

docket/docket.py CHANGED
@@ -16,7 +16,6 @@ from typing import (
16
16
  Mapping,
17
17
  NoReturn,
18
18
  ParamSpec,
19
- Protocol,
20
19
  Self,
21
20
  Sequence,
22
21
  TypedDict,
@@ -28,6 +27,7 @@ from typing import (
28
27
  import redis.exceptions
29
28
  from opentelemetry import propagate, trace
30
29
  from redis.asyncio import ConnectionPool, Redis
30
+ from redis.asyncio.client import Pipeline
31
31
  from uuid_extensions import uuid7
32
32
 
33
33
  from .execution import (
@@ -55,18 +55,6 @@ logger: logging.Logger = logging.getLogger(__name__)
55
55
  tracer: trace.Tracer = trace.get_tracer(__name__)
56
56
 
57
57
 
58
- class _schedule_task(Protocol):
59
- async def __call__(
60
- self, keys: list[str], args: list[str | float | bytes]
61
- ) -> str: ... # pragma: no cover
62
-
63
-
64
- class _cancel_task(Protocol):
65
- async def __call__(
66
- self, keys: list[str], args: list[str]
67
- ) -> str: ... # pragma: no cover
68
-
69
-
70
58
  P = ParamSpec("P")
71
59
  R = TypeVar("R")
72
60
 
@@ -143,8 +131,6 @@ class Docket:
143
131
 
144
132
  _monitor_strikes_task: asyncio.Task[None]
145
133
  _connection_pool: ConnectionPool
146
- _schedule_task_script: _schedule_task | None
147
- _cancel_task_script: _cancel_task | None
148
134
 
149
135
  def __init__(
150
136
  self,
@@ -170,8 +156,6 @@ class Docket:
170
156
  self.url = url
171
157
  self.heartbeat_interval = heartbeat_interval
172
158
  self.missed_heartbeats = missed_heartbeats
173
- self._schedule_task_script = None
174
- self._cancel_task_script = None
175
159
 
176
160
  @property
177
161
  def worker_group_name(self) -> str:
@@ -316,7 +300,9 @@ class Docket:
316
300
  execution = Execution(function, args, kwargs, when, key, attempt=1)
317
301
 
318
302
  async with self.redis() as redis:
319
- await self._schedule(redis, execution, replace=False)
303
+ async with redis.pipeline() as pipeline:
304
+ await self._schedule(redis, pipeline, execution, replace=False)
305
+ await pipeline.execute()
320
306
 
321
307
  TASKS_ADDED.add(1, {**self.labels(), **execution.general_labels()})
322
308
  TASKS_SCHEDULED.add(1, {**self.labels(), **execution.general_labels()})
@@ -375,7 +361,9 @@ class Docket:
375
361
  execution = Execution(function, args, kwargs, when, key, attempt=1)
376
362
 
377
363
  async with self.redis() as redis:
378
- await self._schedule(redis, execution, replace=True)
364
+ async with redis.pipeline() as pipeline:
365
+ await self._schedule(redis, pipeline, execution, replace=True)
366
+ await pipeline.execute()
379
367
 
380
368
  TASKS_REPLACED.add(1, {**self.labels(), **execution.general_labels()})
381
369
  TASKS_CANCELLED.add(1, {**self.labels(), **execution.general_labels()})
@@ -395,7 +383,9 @@ class Docket:
395
383
  },
396
384
  ):
397
385
  async with self.redis() as redis:
398
- await self._schedule(redis, execution, replace=False)
386
+ async with redis.pipeline() as pipeline:
387
+ await self._schedule(redis, pipeline, execution, replace=False)
388
+ await pipeline.execute()
399
389
 
400
390
  TASKS_SCHEDULED.add(1, {**self.labels(), **execution.general_labels()})
401
391
 
@@ -410,7 +400,9 @@ class Docket:
410
400
  attributes={**self.labels(), "docket.key": key},
411
401
  ):
412
402
  async with self.redis() as redis:
413
- await self._cancel(redis, key)
403
+ async with redis.pipeline() as pipeline:
404
+ await self._cancel(pipeline, key)
405
+ await pipeline.execute()
414
406
 
415
407
  TASKS_CANCELLED.add(1, self.labels())
416
408
 
@@ -428,23 +420,13 @@ class Docket:
428
420
  def parked_task_key(self, key: str) -> str:
429
421
  return f"{self.name}:{key}"
430
422
 
431
- def stream_id_key(self, key: str) -> str:
432
- return f"{self.name}:stream-id:{key}"
433
-
434
423
  async def _schedule(
435
424
  self,
436
425
  redis: Redis,
426
+ pipeline: Pipeline,
437
427
  execution: Execution,
438
428
  replace: bool = False,
439
429
  ) -> None:
440
- """Schedule a task atomically.
441
-
442
- Handles:
443
- - Checking for task existence
444
- - Cancelling existing tasks when replacing
445
- - Adding tasks to stream (immediate) or queue (future)
446
- - Tracking stream message IDs for later cancellation
447
- """
448
430
  if self.strike_list.is_stricken(execution):
449
431
  logger.warning(
450
432
  "%r is stricken, skipping schedule of %r",
@@ -467,138 +449,32 @@ class Docket:
467
449
  key = execution.key
468
450
  when = execution.when
469
451
  known_task_key = self.known_task_key(key)
470
- is_immediate = when <= datetime.now(timezone.utc)
471
452
 
472
- # Lock per task key to prevent race conditions between concurrent operations
473
453
  async with redis.lock(f"{known_task_key}:lock", timeout=10):
474
- if self._schedule_task_script is None:
475
- self._schedule_task_script = cast(
476
- _schedule_task,
477
- redis.register_script(
478
- # KEYS: stream_key, known_key, parked_key, queue_key, stream_id_key
479
- # ARGV: task_key, when_timestamp, is_immediate, replace, ...message_fields
480
- """
481
- local stream_key = KEYS[1]
482
- local known_key = KEYS[2]
483
- local parked_key = KEYS[3]
484
- local queue_key = KEYS[4]
485
- local stream_id_key = KEYS[5]
486
-
487
- local task_key = ARGV[1]
488
- local when_timestamp = ARGV[2]
489
- local is_immediate = ARGV[3] == '1'
490
- local replace = ARGV[4] == '1'
491
-
492
- -- Extract message fields from ARGV[5] onwards
493
- local message = {}
494
- for i = 5, #ARGV, 2 do
495
- message[#message + 1] = ARGV[i] -- field name
496
- message[#message + 1] = ARGV[i + 1] -- field value
497
- end
498
-
499
- -- Handle replacement: cancel existing task if needed
500
- if replace then
501
- local existing_message_id = redis.call('GET', stream_id_key)
502
- if existing_message_id then
503
- redis.call('XDEL', stream_key, existing_message_id)
504
- end
505
- redis.call('DEL', known_key, parked_key, stream_id_key)
506
- redis.call('ZREM', queue_key, task_key)
507
- else
508
- -- Check if task already exists
509
- if redis.call('EXISTS', known_key) == 1 then
510
- return 'EXISTS'
511
- end
512
- end
513
-
514
- if is_immediate then
515
- -- Add to stream and store message ID for later cancellation
516
- local message_id = redis.call('XADD', stream_key, '*', unpack(message))
517
- redis.call('SET', known_key, when_timestamp)
518
- redis.call('SET', stream_id_key, message_id)
519
- return message_id
520
- else
521
- -- Add to queue with task data in parked hash
522
- redis.call('SET', known_key, when_timestamp)
523
- redis.call('HSET', parked_key, unpack(message))
524
- redis.call('ZADD', queue_key, when_timestamp, task_key)
525
- return 'QUEUED'
526
- end
527
- """
528
- ),
529
- )
530
- schedule_task = self._schedule_task_script
454
+ if replace:
455
+ await self._cancel(pipeline, key)
456
+ else:
457
+ # if the task is already in the queue or stream, retain it
458
+ if await redis.exists(known_task_key):
459
+ logger.debug(
460
+ "Task %r is already in the queue or stream, not scheduling",
461
+ key,
462
+ extra=self.labels(),
463
+ )
464
+ return
531
465
 
532
- await schedule_task(
533
- keys=[
534
- self.stream_key,
535
- known_task_key,
536
- self.parked_task_key(key),
537
- self.queue_key,
538
- self.stream_id_key(key),
539
- ],
540
- args=[
541
- key,
542
- str(when.timestamp()),
543
- "1" if is_immediate else "0",
544
- "1" if replace else "0",
545
- *[
546
- item
547
- for field, value in message.items()
548
- for item in (field, value)
549
- ],
550
- ],
551
- )
466
+ pipeline.set(known_task_key, when.timestamp())
552
467
 
553
- async def _cancel(self, redis: Redis, key: str) -> None:
554
- """Cancel a task atomically.
468
+ if when <= datetime.now(timezone.utc):
469
+ pipeline.xadd(self.stream_key, message) # type: ignore[arg-type]
470
+ else:
471
+ pipeline.hset(self.parked_task_key(key), mapping=message) # type: ignore[arg-type]
472
+ pipeline.zadd(self.queue_key, {key: when.timestamp()})
555
473
 
556
- Handles cancellation regardless of task location:
557
- - From the stream (using stored message ID)
558
- - From the queue (scheduled tasks)
559
- - Cleans up all associated metadata keys
560
- """
561
- if self._cancel_task_script is None:
562
- self._cancel_task_script = cast(
563
- _cancel_task,
564
- redis.register_script(
565
- # KEYS: stream_key, known_key, parked_key, queue_key, stream_id_key
566
- # ARGV: task_key
567
- """
568
- local stream_key = KEYS[1]
569
- local known_key = KEYS[2]
570
- local parked_key = KEYS[3]
571
- local queue_key = KEYS[4]
572
- local stream_id_key = KEYS[5]
573
- local task_key = ARGV[1]
574
-
575
- -- Delete from stream if message ID exists
576
- local message_id = redis.call('GET', stream_id_key)
577
- if message_id then
578
- redis.call('XDEL', stream_key, message_id)
579
- end
580
-
581
- -- Clean up all task-related keys
582
- redis.call('DEL', known_key, parked_key, stream_id_key)
583
- redis.call('ZREM', queue_key, task_key)
584
-
585
- return 'OK'
586
- """
587
- ),
588
- )
589
- cancel_task = self._cancel_task_script
590
-
591
- # Execute the cancellation script
592
- await cancel_task(
593
- keys=[
594
- self.stream_key,
595
- self.known_task_key(key),
596
- self.parked_task_key(key),
597
- self.queue_key,
598
- self.stream_id_key(key),
599
- ],
600
- args=[key],
601
- )
474
+ async def _cancel(self, pipeline: Pipeline, key: str) -> None:
475
+ pipeline.delete(self.known_task_key(key))
476
+ pipeline.delete(self.parked_task_key(key))
477
+ pipeline.zrem(self.queue_key, key)
602
478
 
603
479
  @property
604
480
  def strike_key(self) -> str:
@@ -905,7 +781,6 @@ class Docket:
905
781
  key = key_bytes.decode()
906
782
  pipeline.delete(self.parked_task_key(key))
907
783
  pipeline.delete(self.known_task_key(key))
908
- pipeline.delete(self.stream_id_key(key))
909
784
 
910
785
  await pipeline.execute()
911
786
 
docket/worker.py CHANGED
@@ -495,8 +495,7 @@ class Worker:
495
495
 
496
496
  logger.debug("Deleting known task", extra=self._log_context())
497
497
  known_task_key = self.docket.known_task_key(key)
498
- stream_id_key = self.docket.stream_id_key(key)
499
- await redis.delete(known_task_key, stream_id_key)
498
+ await redis.delete(known_task_key)
500
499
 
501
500
  async def _execute(self, execution: Execution) -> None:
502
501
  log_context = {**self._log_context(), **execution.specific_labels()}
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pydocket
3
- Version: 0.9.2
3
+ Version: 0.10.0
4
4
  Summary: A distributed background task system for Python functions
5
5
  Project-URL: Homepage, https://github.com/chrisguidry/docket
6
6
  Project-URL: Bug Tracker, https://github.com/chrisguidry/docket/issues
@@ -3,14 +3,14 @@ docket/__main__.py,sha256=wcCrL4PjG51r5wVKqJhcoJPTLfHW0wNbD31DrUN0MWI,28
3
3
  docket/annotations.py,sha256=wttix9UOeMFMAWXAIJUfUw5GjESJZsACb4YXJCozP7Q,2348
4
4
  docket/cli.py,sha256=rTfri2--u4Q5PlXyh7Ub_F5uh3-TtZOWLUp9WY_TvAE,25750
5
5
  docket/dependencies.py,sha256=BC0bnt10cr9_S1p5JAP_bnC9RwZkTr9ulPBrxC7eZnA,20247
6
- docket/docket.py,sha256=jP5uI02in5chQvovRsnPaMhgLff3uiK42A-l3eBh2sE,31241
6
+ docket/docket.py,sha256=Cw7QB1d0eDwSgwn0Rj26WjFsXSe7MJtfsUBBHGalL7A,26262
7
7
  docket/execution.py,sha256=r_2RGC1qhtAcBUg7E6wewLEgftrf3hIxNbH0HnYPbek,14961
8
8
  docket/instrumentation.py,sha256=ogvzrfKbWsdPGfdg4hByH3_r5d3b5AwwQkSrmXw0hRg,5492
9
9
  docket/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
10
  docket/tasks.py,sha256=RIlSM2omh-YDwVnCz6M5MtmK8T_m_s1w2OlRRxDUs6A,1437
11
- docket/worker.py,sha256=S5HG87vHa_r1JKApHpEtNkVdUhkdi802zUw3h_zIHt0,34998
12
- pydocket-0.9.2.dist-info/METADATA,sha256=gWX-2gIAQ5pn3fQ1XwFweQYnbXwsgIpGVahW7KJJNtU,5418
13
- pydocket-0.9.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
14
- pydocket-0.9.2.dist-info/entry_points.txt,sha256=4WOk1nUlBsUT5O3RyMci2ImuC5XFswuopElYcLHtD5k,47
15
- pydocket-0.9.2.dist-info/licenses/LICENSE,sha256=YuVWU_ZXO0K_k2FG8xWKe5RGxV24AhJKTvQmKfqXuyk,1087
16
- pydocket-0.9.2.dist-info/RECORD,,
11
+ docket/worker.py,sha256=pOBRoEbakUwAGVKAuCNPSMyHRBSalUxtMc93QZewX7M,34928
12
+ pydocket-0.10.0.dist-info/METADATA,sha256=Tsm_S5NTj5yOPmt-q4KAKjdEDjH6ZRzz_ITVapnFk64,5419
13
+ pydocket-0.10.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
14
+ pydocket-0.10.0.dist-info/entry_points.txt,sha256=4WOk1nUlBsUT5O3RyMci2ImuC5XFswuopElYcLHtD5k,47
15
+ pydocket-0.10.0.dist-info/licenses/LICENSE,sha256=YuVWU_ZXO0K_k2FG8xWKe5RGxV24AhJKTvQmKfqXuyk,1087
16
+ pydocket-0.10.0.dist-info/RECORD,,