pydocket 0.6.4__py3-none-any.whl → 0.7.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pydocket might be problematic. Click here for more details.

docket/annotations.py CHANGED
@@ -2,6 +2,8 @@ import abc
2
2
  import inspect
3
3
  from typing import Any, Iterable, Mapping, Self
4
4
 
5
+ from .instrumentation import CACHE_SIZE
6
+
5
7
 
6
8
  class Annotation(abc.ABC):
7
9
  _cache: dict[tuple[type[Self], inspect.Signature], Mapping[str, Self]] = {}
@@ -10,6 +12,7 @@ class Annotation(abc.ABC):
10
12
  def annotated_parameters(cls, signature: inspect.Signature) -> Mapping[str, Self]:
11
13
  key = (cls, signature)
12
14
  if key in cls._cache:
15
+ CACHE_SIZE.set(len(cls._cache), {"cache": "annotation"})
13
16
  return cls._cache[key]
14
17
 
15
18
  annotated: dict[str, Self] = {}
@@ -30,6 +33,7 @@ class Annotation(abc.ABC):
30
33
  annotated[param_name] = arg_type()
31
34
 
32
35
  cls._cache[key] = annotated
36
+ CACHE_SIZE.set(len(cls._cache), {"cache": "annotation"})
33
37
  return annotated
34
38
 
35
39
 
docket/cli.py CHANGED
@@ -358,6 +358,32 @@ def strike(
358
358
  asyncio.run(run())
359
359
 
360
360
 
361
+ @app.command(help="Clear all pending and scheduled tasks from the docket")
362
+ def clear(
363
+ docket_: Annotated[
364
+ str,
365
+ typer.Option(
366
+ "--docket",
367
+ help="The name of the docket",
368
+ envvar="DOCKET_NAME",
369
+ ),
370
+ ] = "docket",
371
+ url: Annotated[
372
+ str,
373
+ typer.Option(
374
+ help="The URL of the Redis server",
375
+ envvar="DOCKET_URL",
376
+ ),
377
+ ] = "redis://localhost:6379/0",
378
+ ) -> None:
379
+ async def run() -> None:
380
+ async with Docket(name=docket_, url=url) as docket:
381
+ cleared_count = await docket.clear()
382
+ print(f"Cleared {cleared_count} tasks from docket '{docket_}'")
383
+
384
+ asyncio.run(run())
385
+
386
+
361
387
  @app.command(help="Restores a task or parameters to the Docket")
362
388
  def restore(
363
389
  function: Annotated[
docket/dependencies.py CHANGED
@@ -3,7 +3,7 @@ import logging
3
3
  import time
4
4
  from contextlib import AsyncExitStack, asynccontextmanager
5
5
  from contextvars import ContextVar
6
- from datetime import timedelta
6
+ from datetime import datetime, timedelta, timezone
7
7
  from types import TracebackType
8
8
  from typing import (
9
9
  TYPE_CHECKING,
@@ -14,12 +14,14 @@ from typing import (
14
14
  Callable,
15
15
  Counter,
16
16
  Generic,
17
+ NoReturn,
17
18
  TypeVar,
18
19
  cast,
19
20
  )
20
21
 
21
22
  from .docket import Docket
22
23
  from .execution import Execution, TaskFunction, get_signature
24
+ from .instrumentation import CACHE_SIZE
23
25
 
24
26
  if TYPE_CHECKING: # pragma: no cover
25
27
  from .worker import Worker
@@ -188,6 +190,10 @@ def TaskLogger() -> logging.LoggerAdapter[logging.Logger]:
188
190
  return cast(logging.LoggerAdapter[logging.Logger], _TaskLogger())
189
191
 
190
192
 
193
+ class ForcedRetry(Exception):
194
+ """Raised when a task requests a retry via `in_` or `at`"""
195
+
196
+
191
197
  class Retry(Dependency):
192
198
  """Configures linear retries for a task. You can specify the total number of
193
199
  attempts (or `None` to retry indefinitely), and the delay between attempts.
@@ -222,6 +228,17 @@ class Retry(Dependency):
222
228
  retry.attempt = execution.attempt
223
229
  return retry
224
230
 
231
+ def at(self, when: datetime) -> NoReturn:
232
+ now = datetime.now(timezone.utc)
233
+ diff = when - now
234
+ diff = diff if diff.total_seconds() >= 0 else timedelta(0)
235
+
236
+ self.in_(diff)
237
+
238
+ def in_(self, when: timedelta) -> NoReturn:
239
+ self.delay: timedelta = when
240
+ raise ForcedRetry()
241
+
225
242
 
226
243
  class ExponentialRetry(Retry):
227
244
  """Configures exponential retries for a task. You can specify the total number
@@ -251,7 +268,6 @@ class ExponentialRetry(Retry):
251
268
  maximum_delay: The maximum delay between attempts.
252
269
  """
253
270
  super().__init__(attempts=attempts, delay=minimum_delay)
254
- self.minimum_delay = minimum_delay
255
271
  self.maximum_delay = maximum_delay
256
272
 
257
273
  async def __aenter__(self) -> "ExponentialRetry":
@@ -259,14 +275,14 @@ class ExponentialRetry(Retry):
259
275
 
260
276
  retry = ExponentialRetry(
261
277
  attempts=self.attempts,
262
- minimum_delay=self.minimum_delay,
278
+ minimum_delay=self.delay,
263
279
  maximum_delay=self.maximum_delay,
264
280
  )
265
281
  retry.attempt = execution.attempt
266
282
 
267
283
  if execution.attempt > 1:
268
284
  backoff_factor = 2 ** (execution.attempt - 1)
269
- calculated_delay = self.minimum_delay * backoff_factor
285
+ calculated_delay = self.delay * backoff_factor
270
286
 
271
287
  if calculated_delay > self.maximum_delay:
272
288
  retry.delay = self.maximum_delay
@@ -400,6 +416,7 @@ def get_dependency_parameters(
400
416
  function: TaskFunction | DependencyFunction[Any],
401
417
  ) -> dict[str, Dependency]:
402
418
  if function in _parameter_cache:
419
+ CACHE_SIZE.set(len(_parameter_cache), {"cache": "parameter"})
403
420
  return _parameter_cache[function]
404
421
 
405
422
  dependencies: dict[str, Dependency] = {}
@@ -413,6 +430,7 @@ def get_dependency_parameters(
413
430
  dependencies[parameter] = param.default
414
431
 
415
432
  _parameter_cache[function] = dependencies
433
+ CACHE_SIZE.set(len(_parameter_cache), {"cache": "parameter"})
416
434
  return dependencies
417
435
 
418
436
 
docket/docket.py CHANGED
@@ -743,3 +743,46 @@ class Docket:
743
743
  workers.append(WorkerInfo(worker_name, last_seen, task_names))
744
744
 
745
745
  return workers
746
+
747
+ async def clear(self) -> int:
748
+ """Clear all pending and scheduled tasks from the docket.
749
+
750
+ This removes all tasks from the stream (immediate tasks) and queue
751
+ (scheduled tasks), along with their associated parked data. Running
752
+ tasks are not affected.
753
+
754
+ Returns:
755
+ The total number of tasks that were cleared.
756
+ """
757
+ with tracer.start_as_current_span(
758
+ "docket.clear",
759
+ attributes=self.labels(),
760
+ ):
761
+ async with self.redis() as redis:
762
+ async with redis.pipeline() as pipeline:
763
+ # Get counts before clearing
764
+ pipeline.xlen(self.stream_key)
765
+ pipeline.zcard(self.queue_key)
766
+ pipeline.zrange(self.queue_key, 0, -1)
767
+
768
+ stream_count: int
769
+ queue_count: int
770
+ scheduled_keys: list[bytes]
771
+ stream_count, queue_count, scheduled_keys = await pipeline.execute()
772
+
773
+ # Clear all data
774
+ # Trim stream to 0 messages instead of deleting it to preserve consumer group
775
+ if stream_count > 0:
776
+ pipeline.xtrim(self.stream_key, maxlen=0, approximate=False)
777
+ pipeline.delete(self.queue_key)
778
+
779
+ # Clear parked task data and known task keys
780
+ for key_bytes in scheduled_keys:
781
+ key = key_bytes.decode()
782
+ pipeline.delete(self.parked_task_key(key))
783
+ pipeline.delete(self.known_task_key(key))
784
+
785
+ await pipeline.execute()
786
+
787
+ total_cleared = stream_count + queue_count
788
+ return total_cleared
docket/execution.py CHANGED
@@ -19,7 +19,7 @@ import opentelemetry.context
19
19
  from opentelemetry import propagate, trace
20
20
 
21
21
  from .annotations import Logged
22
- from .instrumentation import message_getter
22
+ from .instrumentation import CACHE_SIZE, message_getter
23
23
 
24
24
  logger: logging.Logger = logging.getLogger(__name__)
25
25
 
@@ -32,10 +32,12 @@ _signature_cache: dict[Callable[..., Any], inspect.Signature] = {}
32
32
 
33
33
  def get_signature(function: Callable[..., Any]) -> inspect.Signature:
34
34
  if function in _signature_cache:
35
+ CACHE_SIZE.set(len(_signature_cache), {"cache": "signature"})
35
36
  return _signature_cache[function]
36
37
 
37
38
  signature = inspect.signature(function)
38
39
  _signature_cache[function] = signature
40
+ CACHE_SIZE.set(len(_signature_cache), {"cache": "signature"})
39
41
  return signature
40
42
 
41
43
 
docket/instrumentation.py CHANGED
@@ -117,6 +117,12 @@ SCHEDULE_DEPTH = meter.create_gauge(
117
117
  unit="1",
118
118
  )
119
119
 
120
+ CACHE_SIZE = meter.create_gauge(
121
+ "docket_cache_size",
122
+ description="Size of internal docket caches",
123
+ unit="1",
124
+ )
125
+
120
126
  Message = dict[bytes, bytes]
121
127
 
122
128
 
docket/worker.py CHANGED
@@ -15,7 +15,7 @@ from typing import (
15
15
  )
16
16
 
17
17
  from opentelemetry import trace
18
- from opentelemetry.trace import Tracer
18
+ from opentelemetry.trace import Status, StatusCode, Tracer
19
19
  from redis.asyncio import Redis
20
20
  from redis.exceptions import ConnectionError, LockError
21
21
 
@@ -531,7 +531,7 @@ class Worker:
531
531
  "code.function.name": execution.function.__name__,
532
532
  },
533
533
  links=execution.incoming_span_links(),
534
- ):
534
+ ) as span:
535
535
  try:
536
536
  async with resolved_dependencies(self, execution) as dependencies:
537
537
  # Preemptively reschedule the perpetual task for the future, or clear
@@ -576,6 +576,8 @@ class Worker:
576
576
  duration = log_context["duration"] = time.time() - start
577
577
  TASKS_SUCCEEDED.add(1, counter_labels)
578
578
 
579
+ span.set_status(Status(StatusCode.OK))
580
+
579
581
  rescheduled = await self._perpetuate_if_requested(
580
582
  execution, dependencies, timedelta(seconds=duration)
581
583
  )
@@ -584,10 +586,13 @@ class Worker:
584
586
  logger.info(
585
587
  "%s [%s] %s", arrow, ms(duration), call, extra=log_context
586
588
  )
587
- except Exception:
589
+ except Exception as e:
588
590
  duration = log_context["duration"] = time.time() - start
589
591
  TASKS_FAILED.add(1, counter_labels)
590
592
 
593
+ span.record_exception(e)
594
+ span.set_status(Status(StatusCode.ERROR, str(e)))
595
+
591
596
  retried = await self._retry_if_requested(execution, dependencies)
592
597
  if not retried:
593
598
  retried = await self._perpetuate_if_requested(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pydocket
3
- Version: 0.6.4
3
+ Version: 0.7.1
4
4
  Summary: A distributed background task system for Python functions
5
5
  Project-URL: Homepage, https://github.com/chrisguidry/docket
6
6
  Project-URL: Bug Tracker, https://github.com/chrisguidry/docket/issues
@@ -0,0 +1,16 @@
1
+ docket/__init__.py,sha256=sY1T_NVsXQNOmOhOnfYmZ95dcE_52Ov6DSIVIMZp-1w,869
2
+ docket/__main__.py,sha256=wcCrL4PjG51r5wVKqJhcoJPTLfHW0wNbD31DrUN0MWI,28
3
+ docket/annotations.py,sha256=wttix9UOeMFMAWXAIJUfUw5GjESJZsACb4YXJCozP7Q,2348
4
+ docket/cli.py,sha256=XG_mbjcqNRO0F0hh6l3AwH9bIZv9xJofZaeaAj9nChc,21608
5
+ docket/dependencies.py,sha256=GBwyEY198JFrfm7z5GkLbd84hv7sJktKBMJXv4veWig,17007
6
+ docket/docket.py,sha256=Cw7QB1d0eDwSgwn0Rj26WjFsXSe7MJtfsUBBHGalL7A,26262
7
+ docket/execution.py,sha256=r_2RGC1qhtAcBUg7E6wewLEgftrf3hIxNbH0HnYPbek,14961
8
+ docket/instrumentation.py,sha256=ogvzrfKbWsdPGfdg4hByH3_r5d3b5AwwQkSrmXw0hRg,5492
9
+ docket/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
+ docket/tasks.py,sha256=RIlSM2omh-YDwVnCz6M5MtmK8T_m_s1w2OlRRxDUs6A,1437
11
+ docket/worker.py,sha256=CY5Z9p8FZw-6WUwp7Ws4A0V7IFTmonSnBmYP-Cp8Fdw,28079
12
+ pydocket-0.7.1.dist-info/METADATA,sha256=00KHm5Er2R6dmjHLTYBUF13kKAeCRPHmDTdAcv5oRcQ,5335
13
+ pydocket-0.7.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
14
+ pydocket-0.7.1.dist-info/entry_points.txt,sha256=4WOk1nUlBsUT5O3RyMci2ImuC5XFswuopElYcLHtD5k,47
15
+ pydocket-0.7.1.dist-info/licenses/LICENSE,sha256=YuVWU_ZXO0K_k2FG8xWKe5RGxV24AhJKTvQmKfqXuyk,1087
16
+ pydocket-0.7.1.dist-info/RECORD,,
@@ -1,16 +0,0 @@
1
- docket/__init__.py,sha256=sY1T_NVsXQNOmOhOnfYmZ95dcE_52Ov6DSIVIMZp-1w,869
2
- docket/__main__.py,sha256=wcCrL4PjG51r5wVKqJhcoJPTLfHW0wNbD31DrUN0MWI,28
3
- docket/annotations.py,sha256=SFBrOMbpAh7P67u8fRTH-u3MVvJQxe0qYi92WAShAsw,2173
4
- docket/cli.py,sha256=WPm_URZ54h8gHjrsHKP8SXpRzdeepmyH_FhQHai-Qus,20899
5
- docket/dependencies.py,sha256=_31Fgn6A_4aWn5TJpXdbsPtimBVIPabNJkw49RRLJTc,16441
6
- docket/docket.py,sha256=5e101CGLZ2tWNcADo4cdewapmXab47ieMCeQr6d92YQ,24478
7
- docket/execution.py,sha256=6KozjnS96byvyCMTQ2-IkcIrPsqaPIVu2HZU0U4Be9E,14813
8
- docket/instrumentation.py,sha256=f-GG5VS6EdS2It30qxjVpzWUBOZQcTnat-3KzPwwDgQ,5367
9
- docket/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
- docket/tasks.py,sha256=RIlSM2omh-YDwVnCz6M5MtmK8T_m_s1w2OlRRxDUs6A,1437
11
- docket/worker.py,sha256=tJfk2rlHODzHaWBzpBXT8h-Lo7RDQ6gb6HU8b3T9gFA,27878
12
- pydocket-0.6.4.dist-info/METADATA,sha256=R3ODtTRkrNkplBvC5-8pVsRjSLSfYKYHKXqZCT9Qr-w,5335
13
- pydocket-0.6.4.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
14
- pydocket-0.6.4.dist-info/entry_points.txt,sha256=4WOk1nUlBsUT5O3RyMci2ImuC5XFswuopElYcLHtD5k,47
15
- pydocket-0.6.4.dist-info/licenses/LICENSE,sha256=YuVWU_ZXO0K_k2FG8xWKe5RGxV24AhJKTvQmKfqXuyk,1087
16
- pydocket-0.6.4.dist-info/RECORD,,