pydocket 0.2.0__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pydocket might be problematic. Click here for more details.

docket/__init__.py CHANGED
@@ -14,6 +14,7 @@ from .dependencies import (
14
14
  CurrentExecution,
15
15
  CurrentWorker,
16
16
  ExponentialRetry,
17
+ Perpetual,
17
18
  Retry,
18
19
  TaskKey,
19
20
  TaskLogger,
@@ -34,5 +35,6 @@ __all__ = [
34
35
  "Retry",
35
36
  "ExponentialRetry",
36
37
  "Logged",
38
+ "Perpetual",
37
39
  "__version__",
38
40
  ]
docket/dependencies.py CHANGED
@@ -126,6 +126,34 @@ class ExponentialRetry(Retry):
126
126
  return retry
127
127
 
128
128
 
129
+ class Perpetual(Dependency):
130
+ single = True
131
+
132
+ every: timedelta
133
+ args: tuple[Any, ...]
134
+ kwargs: dict[str, Any]
135
+ cancelled: bool
136
+
137
+ def __init__(self, every: timedelta = timedelta(0)) -> None:
138
+ self.every = every
139
+ self.cancelled = False
140
+
141
+ def __call__(
142
+ self, docket: Docket, worker: Worker, execution: Execution
143
+ ) -> "Perpetual":
144
+ perpetual = Perpetual(every=self.every)
145
+ perpetual.args = execution.args
146
+ perpetual.kwargs = execution.kwargs
147
+ return perpetual
148
+
149
+ def cancel(self) -> None:
150
+ self.cancelled = True
151
+
152
+ def perpetuate(self, *args: Any, **kwargs: Any) -> None:
153
+ self.args = args
154
+ self.kwargs = kwargs
155
+
156
+
129
157
  def get_dependency_parameters(
130
158
  function: Callable[..., Awaitable[Any]],
131
159
  ) -> dict[str, Dependency]:
docket/instrumentation.py CHANGED
@@ -70,6 +70,12 @@ TASKS_RETRIED = meter.create_counter(
70
70
  unit="1",
71
71
  )
72
72
 
73
+ TASKS_PERPETUATED = meter.create_counter(
74
+ "docket_tasks_perpetuated",
75
+ description="How many tasks that have been self-perpetuated",
76
+ unit="1",
77
+ )
78
+
73
79
  TASK_DURATION = meter.create_histogram(
74
80
  "docket_task_duration",
75
81
  description="How long tasks take to complete",
docket/worker.py CHANGED
@@ -7,6 +7,7 @@ from types import TracebackType
7
7
  from typing import (
8
8
  TYPE_CHECKING,
9
9
  Any,
10
+ Callable,
10
11
  Mapping,
11
12
  Protocol,
12
13
  Self,
@@ -35,6 +36,7 @@ from .instrumentation import (
35
36
  TASK_PUNCTUALITY,
36
37
  TASKS_COMPLETED,
37
38
  TASKS_FAILED,
39
+ TASKS_PERPETUATED,
38
40
  TASKS_RETRIED,
39
41
  TASKS_RUNNING,
40
42
  TASKS_STARTED,
@@ -63,6 +65,11 @@ class _stream_due_tasks(Protocol):
63
65
  class Worker:
64
66
  docket: Docket
65
67
  name: str
68
+ concurrency: int
69
+ redelivery_timeout: timedelta
70
+ reconnection_delay: timedelta
71
+ minimum_check_interval: timedelta
72
+ _strike_conditions: list[Callable[[Execution], bool]] = []
66
73
 
67
74
  def __init__(
68
75
  self,
@@ -80,6 +87,10 @@ class Worker:
80
87
  self.reconnection_delay = reconnection_delay
81
88
  self.minimum_check_interval = minimum_check_interval
82
89
 
90
+ self._strike_conditions = [
91
+ docket.strike_list.is_stricken,
92
+ ]
93
+
83
94
  async def __aenter__(self) -> Self:
84
95
  self._heartbeat_task = asyncio.create_task(self._heartbeat())
85
96
 
@@ -151,6 +162,35 @@ class Worker:
151
162
  """Run the worker indefinitely."""
152
163
  return await self._run(forever=True) # pragma: no cover
153
164
 
165
+ async def run_at_most(self, iterations_by_key: Mapping[str, int]) -> None:
166
+ """
167
+ Run the worker until there are no more tasks to process, but limit specified
168
+ task keys to a maximum number of iterations.
169
+
170
+ This is particularly useful for testing self-perpetuating tasks that would
171
+ otherwise run indefinitely.
172
+
173
+ Args:
174
+ iterations_by_key: Maps task keys to their maximum allowed executions
175
+ """
176
+ execution_counts: dict[str, int] = {key: 0 for key in iterations_by_key}
177
+
178
+ def has_reached_max_iterations(execution: Execution) -> bool:
179
+ if execution.key not in iterations_by_key:
180
+ return False
181
+
182
+ if execution_counts[execution.key] >= iterations_by_key[execution.key]:
183
+ return True
184
+
185
+ execution_counts[execution.key] += 1
186
+ return False
187
+
188
+ self._strike_conditions.insert(0, has_reached_max_iterations)
189
+ try:
190
+ await self.run_until_finished()
191
+ finally:
192
+ self._strike_conditions.remove(has_reached_max_iterations)
193
+
154
194
  async def _run(self, forever: bool = False) -> None:
155
195
  logger.info("Starting worker %r with the following tasks:", self.name)
156
196
  for task_name, task in self.docket.tasks.items():
@@ -322,7 +362,7 @@ class Worker:
322
362
  await process_completed_tasks()
323
363
 
324
364
  async def _execute(self, message: RedisMessage) -> None:
325
- log_context: dict[str, str | float] = self._log_context()
365
+ log_context: Mapping[str, str | float] = self._log_context()
326
366
 
327
367
  function_name = message[b"function"].decode()
328
368
  function = self.docket.tasks.get(function_name)
@@ -334,13 +374,13 @@ class Worker:
334
374
 
335
375
  execution = Execution.from_message(function, message)
336
376
 
337
- log_context |= execution.specific_labels()
377
+ log_context = {**log_context, **execution.specific_labels()}
338
378
  counter_labels = {**self.labels(), **execution.general_labels()}
339
379
 
340
380
  arrow = "↬" if execution.attempt > 1 else "↪"
341
381
  call = execution.call_repr()
342
382
 
343
- if self.docket.strike_list.is_stricken(execution):
383
+ if any(condition(execution) for condition in self._strike_conditions):
344
384
  arrow = "🗙"
345
385
  logger.warning("%s %s", arrow, call, extra=log_context)
346
386
  TASKS_STRICKEN.add(1, counter_labels | {"docket.where": "worker"})
@@ -354,7 +394,7 @@ class Worker:
354
394
 
355
395
  start = datetime.now(timezone.utc)
356
396
  punctuality = start - execution.when
357
- log_context["punctuality"] = punctuality.total_seconds()
397
+ log_context = {**log_context, "punctuality": punctuality.total_seconds()}
358
398
  duration = timedelta(0)
359
399
 
360
400
  TASKS_STARTED.add(1, counter_labels)
@@ -385,12 +425,20 @@ class Worker:
385
425
  TASKS_SUCCEEDED.add(1, counter_labels)
386
426
  duration = datetime.now(timezone.utc) - start
387
427
  log_context["duration"] = duration.total_seconds()
388
- logger.info("%s [%s] %s", "↩", duration, call, extra=log_context)
428
+ rescheduled = await self._perpetuate_if_requested(
429
+ execution, dependencies, duration
430
+ )
431
+ arrow = "↫" if rescheduled else "↩"
432
+ logger.info("%s [%s] %s", arrow, duration, call, extra=log_context)
389
433
  except Exception:
390
434
  TASKS_FAILED.add(1, counter_labels)
391
435
  duration = datetime.now(timezone.utc) - start
392
436
  log_context["duration"] = duration.total_seconds()
393
437
  retried = await self._retry_if_requested(execution, dependencies)
438
+ if not retried:
439
+ retried = await self._perpetuate_if_requested(
440
+ execution, dependencies, duration
441
+ )
394
442
  arrow = "↫" if retried else "↩"
395
443
  logger.exception("%s [%s] %s", arrow, duration, call, extra=log_context)
396
444
  finally:
@@ -442,6 +490,34 @@ class Worker:
442
490
 
443
491
  return False
444
492
 
493
+ async def _perpetuate_if_requested(
494
+ self, execution: Execution, dependencies: dict[str, Any], duration: timedelta
495
+ ) -> bool:
496
+ from .dependencies import Perpetual
497
+
498
+ perpetuals = [
499
+ perpetual
500
+ for perpetual in dependencies.values()
501
+ if isinstance(perpetual, Perpetual)
502
+ ]
503
+ if not perpetuals:
504
+ return False
505
+
506
+ perpetual = perpetuals[0]
507
+
508
+ if perpetual.cancelled:
509
+ return False
510
+
511
+ now = datetime.now(timezone.utc)
512
+ execution.when = max(now, now + perpetual.every - duration)
513
+ execution.args = perpetual.args
514
+ execution.kwargs = perpetual.kwargs
515
+
516
+ await self.docket.schedule(execution)
517
+
518
+ TASKS_PERPETUATED.add(1, {**self.labels(), **execution.specific_labels()})
519
+ return True
520
+
445
521
  @property
446
522
  def workers_set(self) -> str:
447
523
  return self.docket.workers_set
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pydocket
3
- Version: 0.2.0
3
+ Version: 0.3.0
4
4
  Summary: A distributed background task system for Python functions
5
5
  Project-URL: Homepage, https://github.com/chrisguidry/docket
6
6
  Project-URL: Bug Tracker, https://github.com/chrisguidry/docket/issues
@@ -0,0 +1,16 @@
1
+ docket/__init__.py,sha256=7oruGALDoU6W_ntF-mMxxv3FFtO970DVzj3lUgoVIiM,775
2
+ docket/__main__.py,sha256=Vkuh7aJ-Bl7QVpVbbkUksAd_hn05FiLmWbc-8kbhZQ4,34
3
+ docket/annotations.py,sha256=GZwOPtPXyeIhnsLh3TQMBnXrjtTtSmF4Ratv4vjPx8U,950
4
+ docket/cli.py,sha256=EseF0Sj7IEgd9QDC-FSbHSffvF7DNsrmDGYGgZBdJc8,19413
5
+ docket/dependencies.py,sha256=S3KqXxEF0Q2t_jO3R-kI5IIA3M-tqybtiSod2xnRO4o,4991
6
+ docket/docket.py,sha256=zva6ofTm7i5hRwAaAnNtlgIqoMPaNLqCTs2PXGka_8s,19723
7
+ docket/execution.py,sha256=ShP8MoLmxEslk2pAuhKi6KEEKbHdneyQukR9oQwXdjQ,11732
8
+ docket/instrumentation.py,sha256=bZlGA02JoJcY0J1WGm5_qXDfY0AXKr0ZLAYu67wkeKY,4611
9
+ docket/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
+ docket/tasks.py,sha256=RIlSM2omh-YDwVnCz6M5MtmK8T_m_s1w2OlRRxDUs6A,1437
11
+ docket/worker.py,sha256=A0jfi6f2QZ2OA5z9rVvEcstSiIAQPrxaJmKkFBHH48g,21752
12
+ pydocket-0.3.0.dist-info/METADATA,sha256=O6NoNE03rUVEMokkKArLaH6_sXhnrx-kWTnVBN8h5Ak,13092
13
+ pydocket-0.3.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
14
+ pydocket-0.3.0.dist-info/entry_points.txt,sha256=4WOk1nUlBsUT5O3RyMci2ImuC5XFswuopElYcLHtD5k,47
15
+ pydocket-0.3.0.dist-info/licenses/LICENSE,sha256=YuVWU_ZXO0K_k2FG8xWKe5RGxV24AhJKTvQmKfqXuyk,1087
16
+ pydocket-0.3.0.dist-info/RECORD,,
@@ -1,16 +0,0 @@
1
- docket/__init__.py,sha256=GoJYpyuO6QFeBB8GNaxGGvMMuai55Eaw_8u-o1PM3hk,743
2
- docket/__main__.py,sha256=Vkuh7aJ-Bl7QVpVbbkUksAd_hn05FiLmWbc-8kbhZQ4,34
3
- docket/annotations.py,sha256=GZwOPtPXyeIhnsLh3TQMBnXrjtTtSmF4Ratv4vjPx8U,950
4
- docket/cli.py,sha256=EseF0Sj7IEgd9QDC-FSbHSffvF7DNsrmDGYGgZBdJc8,19413
5
- docket/dependencies.py,sha256=gIDwcBUhrLk7xGh0ZxdqpsnSeX-hZzGMNvUrVFfqbJI,4281
6
- docket/docket.py,sha256=zva6ofTm7i5hRwAaAnNtlgIqoMPaNLqCTs2PXGka_8s,19723
7
- docket/execution.py,sha256=ShP8MoLmxEslk2pAuhKi6KEEKbHdneyQukR9oQwXdjQ,11732
8
- docket/instrumentation.py,sha256=SUVhVFf8AX2HAfmi0HPTT_QvQezlGPJEKs_1YAmrCbA,4454
9
- docket/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
- docket/tasks.py,sha256=RIlSM2omh-YDwVnCz6M5MtmK8T_m_s1w2OlRRxDUs6A,1437
11
- docket/worker.py,sha256=UZIPfAsIhsBsr2tBCgGGkLKU1mJs_nnP8-Retwl3218,19104
12
- pydocket-0.2.0.dist-info/METADATA,sha256=X8Yqvi_cqCqYaGu6ZGr4dMvxqcvy6otYvt-J2jwCHOs,13092
13
- pydocket-0.2.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
14
- pydocket-0.2.0.dist-info/entry_points.txt,sha256=4WOk1nUlBsUT5O3RyMci2ImuC5XFswuopElYcLHtD5k,47
15
- pydocket-0.2.0.dist-info/licenses/LICENSE,sha256=YuVWU_ZXO0K_k2FG8xWKe5RGxV24AhJKTvQmKfqXuyk,1087
16
- pydocket-0.2.0.dist-info/RECORD,,