pydocket 0.6.4__tar.gz → 0.7.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pydocket might be problematic. Click here for more details.

Files changed (61) hide show
  1. {pydocket-0.6.4 → pydocket-0.7.0}/PKG-INFO +1 -1
  2. {pydocket-0.6.4 → pydocket-0.7.0}/chaos/driver.py +24 -8
  3. {pydocket-0.6.4 → pydocket-0.7.0}/chaos/tasks.py +8 -1
  4. {pydocket-0.6.4 → pydocket-0.7.0}/src/docket/dependencies.py +19 -4
  5. {pydocket-0.6.4 → pydocket-0.7.0}/tests/test_dependencies.py +123 -1
  6. {pydocket-0.6.4 → pydocket-0.7.0}/.cursor/rules/general.mdc +0 -0
  7. {pydocket-0.6.4 → pydocket-0.7.0}/.cursor/rules/python-style.mdc +0 -0
  8. {pydocket-0.6.4 → pydocket-0.7.0}/.github/codecov.yml +0 -0
  9. {pydocket-0.6.4 → pydocket-0.7.0}/.github/workflows/chaos.yml +0 -0
  10. {pydocket-0.6.4 → pydocket-0.7.0}/.github/workflows/ci.yml +0 -0
  11. {pydocket-0.6.4 → pydocket-0.7.0}/.github/workflows/docs.yml +0 -0
  12. {pydocket-0.6.4 → pydocket-0.7.0}/.github/workflows/publish.yml +0 -0
  13. {pydocket-0.6.4 → pydocket-0.7.0}/.gitignore +0 -0
  14. {pydocket-0.6.4 → pydocket-0.7.0}/.pre-commit-config.yaml +0 -0
  15. {pydocket-0.6.4 → pydocket-0.7.0}/LICENSE +0 -0
  16. {pydocket-0.6.4 → pydocket-0.7.0}/README.md +0 -0
  17. {pydocket-0.6.4 → pydocket-0.7.0}/chaos/README.md +0 -0
  18. {pydocket-0.6.4 → pydocket-0.7.0}/chaos/__init__.py +0 -0
  19. {pydocket-0.6.4 → pydocket-0.7.0}/chaos/producer.py +0 -0
  20. {pydocket-0.6.4 → pydocket-0.7.0}/chaos/run +0 -0
  21. {pydocket-0.6.4 → pydocket-0.7.0}/docs/api-reference.md +0 -0
  22. {pydocket-0.6.4 → pydocket-0.7.0}/docs/getting-started.md +0 -0
  23. {pydocket-0.6.4 → pydocket-0.7.0}/docs/index.md +0 -0
  24. {pydocket-0.6.4 → pydocket-0.7.0}/examples/__init__.py +0 -0
  25. {pydocket-0.6.4 → pydocket-0.7.0}/examples/common.py +0 -0
  26. {pydocket-0.6.4 → pydocket-0.7.0}/examples/find_and_flood.py +0 -0
  27. {pydocket-0.6.4 → pydocket-0.7.0}/examples/self_perpetuating.py +0 -0
  28. {pydocket-0.6.4 → pydocket-0.7.0}/mkdocs.yml +0 -0
  29. {pydocket-0.6.4 → pydocket-0.7.0}/pyproject.toml +0 -0
  30. {pydocket-0.6.4 → pydocket-0.7.0}/src/docket/__init__.py +0 -0
  31. {pydocket-0.6.4 → pydocket-0.7.0}/src/docket/__main__.py +0 -0
  32. {pydocket-0.6.4 → pydocket-0.7.0}/src/docket/annotations.py +0 -0
  33. {pydocket-0.6.4 → pydocket-0.7.0}/src/docket/cli.py +0 -0
  34. {pydocket-0.6.4 → pydocket-0.7.0}/src/docket/docket.py +0 -0
  35. {pydocket-0.6.4 → pydocket-0.7.0}/src/docket/execution.py +0 -0
  36. {pydocket-0.6.4 → pydocket-0.7.0}/src/docket/instrumentation.py +0 -0
  37. {pydocket-0.6.4 → pydocket-0.7.0}/src/docket/py.typed +0 -0
  38. {pydocket-0.6.4 → pydocket-0.7.0}/src/docket/tasks.py +0 -0
  39. {pydocket-0.6.4 → pydocket-0.7.0}/src/docket/worker.py +0 -0
  40. {pydocket-0.6.4 → pydocket-0.7.0}/telemetry/.gitignore +0 -0
  41. {pydocket-0.6.4 → pydocket-0.7.0}/telemetry/start +0 -0
  42. {pydocket-0.6.4 → pydocket-0.7.0}/telemetry/stop +0 -0
  43. {pydocket-0.6.4 → pydocket-0.7.0}/tests/__init__.py +0 -0
  44. {pydocket-0.6.4 → pydocket-0.7.0}/tests/cli/__init__.py +0 -0
  45. {pydocket-0.6.4 → pydocket-0.7.0}/tests/cli/conftest.py +0 -0
  46. {pydocket-0.6.4 → pydocket-0.7.0}/tests/cli/test_module.py +0 -0
  47. {pydocket-0.6.4 → pydocket-0.7.0}/tests/cli/test_parsing.py +0 -0
  48. {pydocket-0.6.4 → pydocket-0.7.0}/tests/cli/test_snapshot.py +0 -0
  49. {pydocket-0.6.4 → pydocket-0.7.0}/tests/cli/test_striking.py +0 -0
  50. {pydocket-0.6.4 → pydocket-0.7.0}/tests/cli/test_tasks.py +0 -0
  51. {pydocket-0.6.4 → pydocket-0.7.0}/tests/cli/test_version.py +0 -0
  52. {pydocket-0.6.4 → pydocket-0.7.0}/tests/cli/test_worker.py +0 -0
  53. {pydocket-0.6.4 → pydocket-0.7.0}/tests/cli/test_workers.py +0 -0
  54. {pydocket-0.6.4 → pydocket-0.7.0}/tests/conftest.py +0 -0
  55. {pydocket-0.6.4 → pydocket-0.7.0}/tests/test_docket.py +0 -0
  56. {pydocket-0.6.4 → pydocket-0.7.0}/tests/test_execution.py +0 -0
  57. {pydocket-0.6.4 → pydocket-0.7.0}/tests/test_fundamentals.py +0 -0
  58. {pydocket-0.6.4 → pydocket-0.7.0}/tests/test_instrumentation.py +0 -0
  59. {pydocket-0.6.4 → pydocket-0.7.0}/tests/test_striking.py +0 -0
  60. {pydocket-0.6.4 → pydocket-0.7.0}/tests/test_worker.py +0 -0
  61. {pydocket-0.6.4 → pydocket-0.7.0}/uv.lock +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pydocket
3
- Version: 0.6.4
3
+ Version: 0.7.0
4
4
  Summary: A distributed background task system for Python functions
5
5
  Project-URL: Homepage, https://github.com/chrisguidry/docket
6
6
  Project-URL: Bug Tracker, https://github.com/chrisguidry/docket/issues
@@ -153,13 +153,20 @@ async def main(
153
153
  sent_tasks = await r.zcard("hello:sent")
154
154
  received_tasks = await r.zcard("hello:received")
155
155
 
156
+ stream_length = await r.xlen(docket.stream_key)
157
+ pending = await r.xpending(
158
+ docket.stream_key, docket.worker_group_name
159
+ )
160
+
156
161
  logger.info(
157
- "sent: %d, received: %d, clients: %d",
162
+ "sent: %d, received: %d, stream: %d, pending: %d, clients: %d",
158
163
  sent_tasks,
159
164
  received_tasks,
165
+ stream_length,
166
+ pending["pending"],
160
167
  connected_clients,
161
168
  )
162
- if sent_tasks >= tasks:
169
+ if sent_tasks >= tasks and received_tasks >= sent_tasks:
163
170
  break
164
171
  except redis.exceptions.ConnectionError as e:
165
172
  logger.error(
@@ -177,16 +184,13 @@ async def main(
177
184
 
178
185
  elif chaos_chance < 0.10:
179
186
  worker_index = random.randrange(len(worker_processes))
180
- worker_to_kill = worker_processes.pop(worker_index)
187
+ worker_to_kill = worker_processes[worker_index]
181
188
 
182
189
  logger.warning("CHAOS: Killing worker %d...", worker_index)
183
190
  try:
184
- worker_to_kill.terminate()
191
+ worker_to_kill.kill()
185
192
  except ProcessLookupError:
186
193
  logger.warning(" What is dead may never die!")
187
-
188
- logger.warning("CHAOS: Replacing worker %d...", worker_index)
189
- worker_processes.append(await spawn_worker())
190
194
  elif chaos_chance < 0.15:
191
195
  logger.warning("CHAOS: Queuing a toxic task...")
192
196
  try:
@@ -194,6 +198,17 @@ async def main(
194
198
  except redis.exceptions.ConnectionError:
195
199
  pass
196
200
 
201
+ # Check if any worker processes have died and replace them
202
+ for i in range(len(worker_processes)):
203
+ process = worker_processes[i]
204
+ if process.returncode is not None:
205
+ logger.warning(
206
+ "Worker %d has died with code %d, replacing it...",
207
+ i,
208
+ process.returncode,
209
+ )
210
+ worker_processes[i] = await spawn_worker()
211
+
197
212
  await asyncio.sleep(0.25)
198
213
 
199
214
  async with docket.redis() as r:
@@ -225,5 +240,6 @@ async def main(
225
240
 
226
241
  if __name__ == "__main__":
227
242
  mode = sys.argv[1] if len(sys.argv) > 1 else "chaos"
243
+ tasks = int(sys.argv[2]) if len(sys.argv) > 2 else 20000
228
244
  assert mode in ("performance", "chaos")
229
- asyncio.run(main(mode=mode))
245
+ asyncio.run(main(mode=mode, tasks=tasks))
@@ -1,4 +1,6 @@
1
+ import asyncio
1
2
  import logging
3
+ import random
2
4
  import sys
3
5
  import time
4
6
 
@@ -29,7 +31,12 @@ async def hello(
29
31
 
30
32
 
31
33
  async def toxic():
32
- sys.exit(42)
34
+ if random.random() < 0.25:
35
+ sys.exit(42)
36
+ elif random.random() < 0.5:
37
+ raise Exception("Boom")
38
+ else:
39
+ await asyncio.sleep(random.uniform(0.01, 0.05))
33
40
 
34
41
 
35
42
  chaos_tasks = [hello, toxic]
@@ -3,7 +3,7 @@ import logging
3
3
  import time
4
4
  from contextlib import AsyncExitStack, asynccontextmanager
5
5
  from contextvars import ContextVar
6
- from datetime import timedelta
6
+ from datetime import datetime, timedelta, timezone
7
7
  from types import TracebackType
8
8
  from typing import (
9
9
  TYPE_CHECKING,
@@ -14,6 +14,7 @@ from typing import (
14
14
  Callable,
15
15
  Counter,
16
16
  Generic,
17
+ NoReturn,
17
18
  TypeVar,
18
19
  cast,
19
20
  )
@@ -188,6 +189,10 @@ def TaskLogger() -> logging.LoggerAdapter[logging.Logger]:
188
189
  return cast(logging.LoggerAdapter[logging.Logger], _TaskLogger())
189
190
 
190
191
 
192
+ class ForcedRetry(Exception):
193
+ """Raised when a task requests a retry via `in_` or `at`"""
194
+
195
+
191
196
  class Retry(Dependency):
192
197
  """Configures linear retries for a task. You can specify the total number of
193
198
  attempts (or `None` to retry indefinitely), and the delay between attempts.
@@ -222,6 +227,17 @@ class Retry(Dependency):
222
227
  retry.attempt = execution.attempt
223
228
  return retry
224
229
 
230
+ def at(self, when: datetime) -> NoReturn:
231
+ now = datetime.now(timezone.utc)
232
+ diff = when - now
233
+ diff = diff if diff.total_seconds() >= 0 else timedelta(0)
234
+
235
+ self.in_(diff)
236
+
237
+ def in_(self, when: timedelta) -> NoReturn:
238
+ self.delay: timedelta = when
239
+ raise ForcedRetry()
240
+
225
241
 
226
242
  class ExponentialRetry(Retry):
227
243
  """Configures exponential retries for a task. You can specify the total number
@@ -251,7 +267,6 @@ class ExponentialRetry(Retry):
251
267
  maximum_delay: The maximum delay between attempts.
252
268
  """
253
269
  super().__init__(attempts=attempts, delay=minimum_delay)
254
- self.minimum_delay = minimum_delay
255
270
  self.maximum_delay = maximum_delay
256
271
 
257
272
  async def __aenter__(self) -> "ExponentialRetry":
@@ -259,14 +274,14 @@ class ExponentialRetry(Retry):
259
274
 
260
275
  retry = ExponentialRetry(
261
276
  attempts=self.attempts,
262
- minimum_delay=self.minimum_delay,
277
+ minimum_delay=self.delay,
263
278
  maximum_delay=self.maximum_delay,
264
279
  )
265
280
  retry.attempt = execution.attempt
266
281
 
267
282
  if execution.attempt > 1:
268
283
  backoff_factor = 2 ** (execution.attempt - 1)
269
- calculated_delay = self.minimum_delay * backoff_factor
284
+ calculated_delay = self.delay * backoff_factor
270
285
 
271
286
  if calculated_delay > self.maximum_delay:
272
287
  retry.delay = self.maximum_delay
@@ -1,9 +1,10 @@
1
1
  import logging
2
+ from datetime import datetime, timedelta, timezone
2
3
 
3
4
  import pytest
4
5
 
5
6
  from docket import CurrentDocket, CurrentWorker, Docket, Worker
6
- from docket.dependencies import Depends, Retry, TaskArgument
7
+ from docket.dependencies import Depends, ExponentialRetry, Retry, TaskArgument
7
8
 
8
9
 
9
10
  async def test_dependencies_may_be_duplicated(docket: Docket, worker: Worker):
@@ -95,6 +96,127 @@ async def test_user_provide_retries_are_used(docket: Docket, worker: Worker):
95
96
  assert calls == 2
96
97
 
97
98
 
99
+ @pytest.mark.parametrize("retry_cls", [Retry, ExponentialRetry])
100
+ async def test_user_can_request_a_retry_in_timedelta_time(
101
+ retry_cls: Retry, docket: Docket, worker: Worker
102
+ ):
103
+ calls = 0
104
+ first_call_time = None
105
+ second_call_time = None
106
+
107
+ async def the_task(
108
+ a: str,
109
+ b: str,
110
+ retry: Retry = retry_cls(attempts=2), # type: ignore[reportCallIssue]
111
+ ):
112
+ assert a == "a"
113
+ assert b == "b"
114
+
115
+ nonlocal calls
116
+ calls += 1
117
+
118
+ nonlocal first_call_time
119
+ if not first_call_time:
120
+ first_call_time = datetime.now(timezone.utc)
121
+ retry.in_(timedelta(seconds=0.5))
122
+ else:
123
+ nonlocal second_call_time
124
+ second_call_time = datetime.now(timezone.utc)
125
+
126
+ await docket.add(the_task)("a", "b")
127
+
128
+ await worker.run_until_finished()
129
+
130
+ assert calls == 2
131
+
132
+ assert isinstance(first_call_time, datetime)
133
+ assert isinstance(second_call_time, datetime)
134
+
135
+ delay = second_call_time - first_call_time
136
+ assert delay.total_seconds() > 0 < 1
137
+
138
+
139
+ @pytest.mark.parametrize("retry_cls", [Retry, ExponentialRetry])
140
+ async def test_user_can_request_a_retry_at_a_specific_time(
141
+ retry_cls: Retry, docket: Docket, worker: Worker
142
+ ):
143
+ calls = 0
144
+ first_call_time = None
145
+ second_call_time = None
146
+
147
+ async def the_task(
148
+ a: str,
149
+ b: str,
150
+ retry: Retry = retry_cls(attempts=2), # type: ignore[reportCallIssue]
151
+ ):
152
+ assert a == "a"
153
+ assert b == "b"
154
+
155
+ nonlocal calls
156
+ calls += 1
157
+
158
+ nonlocal first_call_time
159
+ if not first_call_time:
160
+ when = datetime.now(timezone.utc) + timedelta(seconds=0.5)
161
+ first_call_time = datetime.now(timezone.utc)
162
+ retry.at(when)
163
+ else:
164
+ nonlocal second_call_time
165
+ second_call_time = datetime.now(timezone.utc)
166
+
167
+ await docket.add(the_task)("a", "b")
168
+
169
+ await worker.run_until_finished()
170
+
171
+ assert calls == 2
172
+
173
+ assert isinstance(first_call_time, datetime)
174
+ assert isinstance(second_call_time, datetime)
175
+
176
+ delay = second_call_time - first_call_time
177
+ assert delay.total_seconds() > 0 < 1
178
+
179
+
180
+ async def test_user_can_request_a_retry_at_a_specific_time_in_the_past(
181
+ docket: Docket, worker: Worker
182
+ ):
183
+ calls = 0
184
+ first_call_time = None
185
+ second_call_time = None
186
+
187
+ async def the_task(
188
+ a: str,
189
+ b: str,
190
+ retry: Retry = Retry(attempts=2),
191
+ ):
192
+ assert a == "a"
193
+ assert b == "b"
194
+
195
+ nonlocal calls
196
+ calls += 1
197
+
198
+ nonlocal first_call_time
199
+ if not first_call_time:
200
+ when = datetime.now(timezone.utc) - timedelta(days=1)
201
+ first_call_time = datetime.now(timezone.utc)
202
+ retry.at(when)
203
+ else:
204
+ nonlocal second_call_time
205
+ second_call_time = datetime.now(timezone.utc)
206
+
207
+ await docket.add(the_task)("a", "b")
208
+
209
+ await worker.run_until_finished()
210
+
211
+ assert calls == 2
212
+
213
+ assert isinstance(first_call_time, datetime)
214
+ assert isinstance(second_call_time, datetime)
215
+
216
+ delay = second_call_time - first_call_time
217
+ assert delay.total_seconds() > 0 < 1
218
+
219
+
98
220
  async def test_dependencies_error_for_missing_task_argument(
99
221
  docket: Docket, worker: Worker, caplog: pytest.LogCaptureFixture
100
222
  ):
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes