pydocket 0.6.1__tar.gz → 0.6.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pydocket might be problematic. Click here for more details.

Files changed (54) hide show
  1. {pydocket-0.6.1 → pydocket-0.6.2}/PKG-INFO +1 -1
  2. {pydocket-0.6.1 → pydocket-0.6.2}/pyproject.toml +3 -0
  3. {pydocket-0.6.1 → pydocket-0.6.2}/src/docket/__init__.py +11 -9
  4. {pydocket-0.6.1 → pydocket-0.6.2}/src/docket/cli.py +8 -0
  5. {pydocket-0.6.1 → pydocket-0.6.2}/src/docket/dependencies.py +41 -1
  6. {pydocket-0.6.1 → pydocket-0.6.2}/src/docket/execution.py +5 -0
  7. {pydocket-0.6.1 → pydocket-0.6.2}/src/docket/worker.py +25 -1
  8. {pydocket-0.6.1 → pydocket-0.6.2}/tests/test_dependencies.py +48 -1
  9. {pydocket-0.6.1 → pydocket-0.6.2}/tests/test_fundamentals.py +64 -0
  10. {pydocket-0.6.1 → pydocket-0.6.2}/tests/test_worker.py +22 -0
  11. {pydocket-0.6.1 → pydocket-0.6.2}/.cursor/rules/general.mdc +0 -0
  12. {pydocket-0.6.1 → pydocket-0.6.2}/.cursor/rules/python-style.mdc +0 -0
  13. {pydocket-0.6.1 → pydocket-0.6.2}/.github/codecov.yml +0 -0
  14. {pydocket-0.6.1 → pydocket-0.6.2}/.github/workflows/chaos.yml +0 -0
  15. {pydocket-0.6.1 → pydocket-0.6.2}/.github/workflows/ci.yml +0 -0
  16. {pydocket-0.6.1 → pydocket-0.6.2}/.github/workflows/publish.yml +0 -0
  17. {pydocket-0.6.1 → pydocket-0.6.2}/.gitignore +0 -0
  18. {pydocket-0.6.1 → pydocket-0.6.2}/.pre-commit-config.yaml +0 -0
  19. {pydocket-0.6.1 → pydocket-0.6.2}/LICENSE +0 -0
  20. {pydocket-0.6.1 → pydocket-0.6.2}/README.md +0 -0
  21. {pydocket-0.6.1 → pydocket-0.6.2}/chaos/README.md +0 -0
  22. {pydocket-0.6.1 → pydocket-0.6.2}/chaos/__init__.py +0 -0
  23. {pydocket-0.6.1 → pydocket-0.6.2}/chaos/driver.py +0 -0
  24. {pydocket-0.6.1 → pydocket-0.6.2}/chaos/producer.py +0 -0
  25. {pydocket-0.6.1 → pydocket-0.6.2}/chaos/run +0 -0
  26. {pydocket-0.6.1 → pydocket-0.6.2}/chaos/tasks.py +0 -0
  27. {pydocket-0.6.1 → pydocket-0.6.2}/examples/__init__.py +0 -0
  28. {pydocket-0.6.1 → pydocket-0.6.2}/examples/common.py +0 -0
  29. {pydocket-0.6.1 → pydocket-0.6.2}/examples/find_and_flood.py +0 -0
  30. {pydocket-0.6.1 → pydocket-0.6.2}/src/docket/__main__.py +0 -0
  31. {pydocket-0.6.1 → pydocket-0.6.2}/src/docket/annotations.py +0 -0
  32. {pydocket-0.6.1 → pydocket-0.6.2}/src/docket/docket.py +0 -0
  33. {pydocket-0.6.1 → pydocket-0.6.2}/src/docket/instrumentation.py +0 -0
  34. {pydocket-0.6.1 → pydocket-0.6.2}/src/docket/py.typed +0 -0
  35. {pydocket-0.6.1 → pydocket-0.6.2}/src/docket/tasks.py +0 -0
  36. {pydocket-0.6.1 → pydocket-0.6.2}/telemetry/.gitignore +0 -0
  37. {pydocket-0.6.1 → pydocket-0.6.2}/telemetry/start +0 -0
  38. {pydocket-0.6.1 → pydocket-0.6.2}/telemetry/stop +0 -0
  39. {pydocket-0.6.1 → pydocket-0.6.2}/tests/__init__.py +0 -0
  40. {pydocket-0.6.1 → pydocket-0.6.2}/tests/cli/__init__.py +0 -0
  41. {pydocket-0.6.1 → pydocket-0.6.2}/tests/cli/conftest.py +0 -0
  42. {pydocket-0.6.1 → pydocket-0.6.2}/tests/cli/test_module.py +0 -0
  43. {pydocket-0.6.1 → pydocket-0.6.2}/tests/cli/test_parsing.py +0 -0
  44. {pydocket-0.6.1 → pydocket-0.6.2}/tests/cli/test_snapshot.py +0 -0
  45. {pydocket-0.6.1 → pydocket-0.6.2}/tests/cli/test_striking.py +0 -0
  46. {pydocket-0.6.1 → pydocket-0.6.2}/tests/cli/test_tasks.py +0 -0
  47. {pydocket-0.6.1 → pydocket-0.6.2}/tests/cli/test_version.py +0 -0
  48. {pydocket-0.6.1 → pydocket-0.6.2}/tests/cli/test_worker.py +0 -0
  49. {pydocket-0.6.1 → pydocket-0.6.2}/tests/cli/test_workers.py +0 -0
  50. {pydocket-0.6.1 → pydocket-0.6.2}/tests/conftest.py +0 -0
  51. {pydocket-0.6.1 → pydocket-0.6.2}/tests/test_docket.py +0 -0
  52. {pydocket-0.6.1 → pydocket-0.6.2}/tests/test_instrumentation.py +0 -0
  53. {pydocket-0.6.1 → pydocket-0.6.2}/tests/test_striking.py +0 -0
  54. {pydocket-0.6.1 → pydocket-0.6.2}/uv.lock +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pydocket
3
- Version: 0.6.1
3
+ Version: 0.6.2
4
4
  Summary: A distributed background task system for Python functions
5
5
  Project-URL: Homepage, https://github.com/chrisguidry/docket
6
6
  Project-URL: Bug Tracker, https://github.com/chrisguidry/docket/issues
@@ -64,6 +64,9 @@ source = "vcs"
64
64
  [tool.hatch.build.targets.wheel]
65
65
  packages = ["src/docket"]
66
66
 
67
+ [tool.ruff]
68
+ target-version = "py312"
69
+
67
70
  [tool.pytest.ini_options]
68
71
  addopts = [
69
72
  "--numprocesses=logical",
@@ -17,6 +17,7 @@ from .dependencies import (
17
17
  ExponentialRetry,
18
18
  Perpetual,
19
19
  Retry,
20
+ TaskArgument,
20
21
  TaskKey,
21
22
  TaskLogger,
22
23
  Timeout,
@@ -26,19 +27,20 @@ from .execution import Execution
26
27
  from .worker import Worker
27
28
 
28
29
  __all__ = [
29
- "Docket",
30
- "Worker",
31
- "Execution",
30
+ "__version__",
32
31
  "CurrentDocket",
33
- "CurrentWorker",
34
32
  "CurrentExecution",
35
- "TaskKey",
36
- "TaskLogger",
37
- "Retry",
33
+ "CurrentWorker",
34
+ "Depends",
35
+ "Docket",
36
+ "Execution",
38
37
  "ExponentialRetry",
39
38
  "Logged",
40
39
  "Perpetual",
40
+ "Retry",
41
+ "TaskArgument",
42
+ "TaskKey",
43
+ "TaskLogger",
41
44
  "Timeout",
42
- "Depends",
43
- "__version__",
45
+ "Worker",
44
46
  ]
@@ -245,6 +245,13 @@ def worker(
245
245
  envvar="DOCKET_WORKER_SCHEDULING_RESOLUTION",
246
246
  ),
247
247
  ] = timedelta(milliseconds=250),
248
+ schedule_automatic_tasks: Annotated[
249
+ bool,
250
+ typer.Option(
251
+ "--schedule-automatic-tasks",
252
+ help="Schedule automatic tasks",
253
+ ),
254
+ ] = True,
248
255
  until_finished: Annotated[
249
256
  bool,
250
257
  typer.Option(
@@ -270,6 +277,7 @@ def worker(
270
277
  reconnection_delay=reconnection_delay,
271
278
  minimum_check_interval=minimum_check_interval,
272
279
  scheduling_resolution=scheduling_resolution,
280
+ schedule_automatic_tasks=schedule_automatic_tasks,
273
281
  until_finished=until_finished,
274
282
  metrics_port=metrics_port,
275
283
  tasks=tasks,
@@ -79,6 +79,22 @@ def TaskKey() -> str:
79
79
  return cast(str, _TaskKey())
80
80
 
81
81
 
82
+ class _TaskArgument(Dependency):
83
+ parameter: str | None
84
+
85
+ def __init__(self, parameter: str | None = None) -> None:
86
+ self.parameter = parameter
87
+
88
+ async def __aenter__(self) -> Any:
89
+ assert self.parameter is not None
90
+ execution = self.execution.get()
91
+ return execution.get_argument(self.parameter)
92
+
93
+
94
+ def TaskArgument(parameter: str | None = None) -> Any:
95
+ return cast(Any, _TaskArgument(parameter))
96
+
97
+
82
98
  class _TaskLogger(Dependency):
83
99
  async def __aenter__(self) -> logging.LoggerAdapter[logging.Logger]:
84
100
  execution = self.execution.get()
@@ -275,6 +291,11 @@ class _Depends(Dependency, Generic[R]):
275
291
  parameters = get_dependency_parameters(function)
276
292
 
277
293
  for parameter, dependency in parameters.items():
294
+ # Special case for TaskArguments, they are "magical" and infer the parameter
295
+ # they refer to from the parameter name (unless otherwise specified)
296
+ if isinstance(dependency, _TaskArgument) and not dependency.parameter:
297
+ dependency.parameter = parameter
298
+
278
299
  arguments[parameter] = await stack.enter_async_context(dependency)
279
300
 
280
301
  return arguments
@@ -338,6 +359,12 @@ def validate_dependencies(function: TaskFunction) -> None:
338
359
  )
339
360
 
340
361
 
362
+ class FailedDependency:
363
+ def __init__(self, parameter: str, error: Exception) -> None:
364
+ self.parameter = parameter
365
+ self.error = error
366
+
367
+
341
368
  @asynccontextmanager
342
369
  async def resolved_dependencies(
343
370
  worker: "Worker", execution: Execution
@@ -361,6 +388,19 @@ async def resolved_dependencies(
361
388
  arguments[parameter] = kwargs[parameter]
362
389
  continue
363
390
 
364
- arguments[parameter] = await stack.enter_async_context(dependency)
391
+ # Special case for TaskArguments, they are "magical" and infer the parameter
392
+ # they refer to from the parameter name (unless otherwise specified). At
393
+ # the top-level task function call, it doesn't make sense to specify one
394
+ # _without_ a parameter name, so we'll call that a failed dependency.
395
+ if isinstance(dependency, _TaskArgument) and not dependency.parameter:
396
+ arguments[parameter] = FailedDependency(
397
+ parameter, ValueError("No parameter name specified")
398
+ )
399
+ continue
400
+
401
+ try:
402
+ arguments[parameter] = await stack.enter_async_context(dependency)
403
+ except Exception as error:
404
+ arguments[parameter] = FailedDependency(parameter, error)
365
405
 
366
406
  yield arguments
@@ -83,6 +83,11 @@ class Execution:
83
83
  "docket.attempt": self.attempt,
84
84
  }
85
85
 
86
+ def get_argument(self, parameter: str) -> Any:
87
+ signature = get_signature(self.function)
88
+ bound_args = signature.bind(*self.args, **self.kwargs)
89
+ return bound_args.arguments[parameter]
90
+
86
91
  def call_repr(self) -> str:
87
92
  arguments: list[str] = []
88
93
  function_name = self.function.__name__
@@ -22,6 +22,7 @@ from docket.execution import get_signature
22
22
 
23
23
  from .dependencies import (
24
24
  Dependency,
25
+ FailedDependency,
25
26
  Perpetual,
26
27
  Retry,
27
28
  Timeout,
@@ -71,6 +72,7 @@ class Worker:
71
72
  reconnection_delay: timedelta
72
73
  minimum_check_interval: timedelta
73
74
  scheduling_resolution: timedelta
75
+ schedule_automatic_tasks: bool
74
76
 
75
77
  def __init__(
76
78
  self,
@@ -81,6 +83,7 @@ class Worker:
81
83
  reconnection_delay: timedelta = timedelta(seconds=5),
82
84
  minimum_check_interval: timedelta = timedelta(milliseconds=250),
83
85
  scheduling_resolution: timedelta = timedelta(milliseconds=250),
86
+ schedule_automatic_tasks: bool = True,
84
87
  ) -> None:
85
88
  self.docket = docket
86
89
  self.name = name or f"worker:{uuid4()}"
@@ -89,6 +92,7 @@ class Worker:
89
92
  self.reconnection_delay = reconnection_delay
90
93
  self.minimum_check_interval = minimum_check_interval
91
94
  self.scheduling_resolution = scheduling_resolution
95
+ self.schedule_automatic_tasks = schedule_automatic_tasks
92
96
 
93
97
  async def __aenter__(self) -> Self:
94
98
  self._heartbeat_task = asyncio.create_task(self._heartbeat())
@@ -134,6 +138,7 @@ class Worker:
134
138
  reconnection_delay: timedelta = timedelta(seconds=5),
135
139
  minimum_check_interval: timedelta = timedelta(milliseconds=100),
136
140
  scheduling_resolution: timedelta = timedelta(milliseconds=250),
141
+ schedule_automatic_tasks: bool = True,
137
142
  until_finished: bool = False,
138
143
  metrics_port: int | None = None,
139
144
  tasks: list[str] = ["docket.tasks:standard_tasks"],
@@ -151,6 +156,7 @@ class Worker:
151
156
  reconnection_delay=reconnection_delay,
152
157
  minimum_check_interval=minimum_check_interval,
153
158
  scheduling_resolution=scheduling_resolution,
159
+ schedule_automatic_tasks=schedule_automatic_tasks,
154
160
  ) as worker:
155
161
  if until_finished:
156
162
  await worker.run_until_finished()
@@ -220,7 +226,8 @@ class Worker:
220
226
  async def _worker_loop(self, redis: Redis, forever: bool = False):
221
227
  worker_stopping = asyncio.Event()
222
228
 
223
- await self._schedule_all_automatic_perpetual_tasks()
229
+ if self.schedule_automatic_tasks:
230
+ await self._schedule_all_automatic_perpetual_tasks()
224
231
 
225
232
  scheduler_task = asyncio.create_task(
226
233
  self._scheduler_loop(redis, worker_stopping)
@@ -520,6 +527,23 @@ class Worker:
520
527
  await self._delete_known_task(redis, execution)
521
528
 
522
529
  try:
530
+ dependency_failures = {
531
+ k: v
532
+ for k, v in dependencies.items()
533
+ if isinstance(v, FailedDependency)
534
+ }
535
+ if dependency_failures:
536
+ raise ExceptionGroup(
537
+ (
538
+ "Failed to resolve dependencies for parameter(s): "
539
+ + ", ".join(dependency_failures.keys())
540
+ ),
541
+ [
542
+ dependency.error
543
+ for dependency in dependency_failures.values()
544
+ ],
545
+ )
546
+
523
547
  if timeout := get_single_dependency_of_type(dependencies, Timeout):
524
548
  await self._run_function_with_timeout(
525
549
  execution, dependencies, timeout
@@ -1,7 +1,9 @@
1
+ import logging
2
+
1
3
  import pytest
2
4
 
3
5
  from docket import CurrentDocket, CurrentWorker, Docket, Worker
4
- from docket.dependencies import Retry
6
+ from docket.dependencies import Depends, Retry, TaskArgument
5
7
 
6
8
 
7
9
  async def test_dependencies_may_be_duplicated(docket: Docket, worker: Worker):
@@ -91,3 +93,48 @@ async def test_user_provide_retries_are_used(docket: Docket, worker: Worker):
91
93
  await worker.run_until_finished()
92
94
 
93
95
  assert calls == 2
96
+
97
+
98
+ async def test_dependencies_error_for_missing_task_argument(
99
+ docket: Docket, worker: Worker, caplog: pytest.LogCaptureFixture
100
+ ):
101
+ """A task will fail when asking for a missing task argument"""
102
+
103
+ async def dependency_one(nope: list[str] = TaskArgument()) -> list[str]:
104
+ raise NotImplementedError("This should not be called") # pragma: no cover
105
+
106
+ async def dependent_task(
107
+ a: list[str],
108
+ b: list[str] = TaskArgument("a"),
109
+ c: list[str] = Depends(dependency_one),
110
+ ) -> None:
111
+ raise NotImplementedError("This should not be called") # pragma: no cover
112
+
113
+ await docket.add(dependent_task)(a=["hello", "world"])
114
+
115
+ await worker.run_until_finished()
116
+
117
+ with caplog.at_level(logging.ERROR):
118
+ await worker.run_until_finished()
119
+
120
+ assert "Failed to resolve dependencies for parameter(s): c" in caplog.text
121
+ assert "ExceptionGroup" in caplog.text
122
+ assert "KeyError: 'nope'" in caplog.text
123
+
124
+
125
+ async def test_a_task_argument_cannot_ask_for_itself(
126
+ docket: Docket, worker: Worker, caplog: pytest.LogCaptureFixture
127
+ ):
128
+ """A task argument cannot ask for itself"""
129
+
130
+ # This task would be nonsense, because it's asking for itself.
131
+ async def dependent_task(a: list[str] = TaskArgument()) -> None:
132
+ raise NotImplementedError("This should not be called") # pragma: no cover
133
+
134
+ await docket.add(dependent_task)()
135
+
136
+ with caplog.at_level(logging.ERROR):
137
+ await worker.run_until_finished()
138
+
139
+ assert "Failed to resolve dependencies for parameter(s): a" in caplog.text
140
+ assert "ValueError: No parameter name specified" in caplog.text
@@ -27,6 +27,7 @@ from docket import (
27
27
  Logged,
28
28
  Perpetual,
29
29
  Retry,
30
+ TaskArgument,
30
31
  TaskKey,
31
32
  TaskLogger,
32
33
  Timeout,
@@ -1383,3 +1384,66 @@ async def test_dependencies_can_ask_for_docket_dependencies(
1383
1384
  await docket.add(dependent_task)()
1384
1385
 
1385
1386
  await worker.run_until_finished()
1387
+
1388
+
1389
+ async def test_dependency_failures_are_task_failures(
1390
+ docket: Docket, worker: Worker, caplog: pytest.LogCaptureFixture
1391
+ ):
1392
+ """A task dependency failure will cause the task to fail"""
1393
+
1394
+ called: bool = False
1395
+
1396
+ async def dependency_one() -> str:
1397
+ raise ValueError("this one is bad")
1398
+
1399
+ async def dependency_two() -> str:
1400
+ raise ValueError("and so is this one")
1401
+
1402
+ async def dependent_task(
1403
+ a: str = Depends(dependency_one),
1404
+ b: str = Depends(dependency_two),
1405
+ ) -> None:
1406
+ nonlocal called
1407
+ called = True # pragma: no cover
1408
+
1409
+ await docket.add(dependent_task)()
1410
+
1411
+ with caplog.at_level(logging.ERROR):
1412
+ await worker.run_until_finished()
1413
+
1414
+ assert not called
1415
+
1416
+ assert "Failed to resolve dependencies for parameter(s): a, b" in caplog.text
1417
+ assert "ValueError: this one is bad" in caplog.text
1418
+ assert "ValueError: and so is this one" in caplog.text
1419
+
1420
+
1421
+ async def test_dependencies_can_ask_for_task_arguments(docket: Docket, worker: Worker):
1422
+ """A task dependency can ask for a task argument"""
1423
+
1424
+ called = 0
1425
+
1426
+ async def dependency_one(a: list[str] = TaskArgument()) -> list[str]:
1427
+ return a
1428
+
1429
+ async def dependency_two(another_name: list[str] = TaskArgument("a")) -> list[str]:
1430
+ return another_name
1431
+
1432
+ async def dependent_task(
1433
+ a: list[str],
1434
+ b: list[str] = TaskArgument("a"),
1435
+ c: list[str] = Depends(dependency_one),
1436
+ d: list[str] = Depends(dependency_two),
1437
+ ) -> None:
1438
+ assert a is b
1439
+ assert a is c
1440
+ assert a is d
1441
+
1442
+ nonlocal called
1443
+ called += 1
1444
+
1445
+ await docket.add(dependent_task)(a=["hello", "world"])
1446
+
1447
+ await worker.run_until_finished()
1448
+
1449
+ assert called == 1
@@ -491,3 +491,25 @@ def test_formatting_durations():
491
491
  assert ms(1000.000) == " 1000s "
492
492
  assert ms(10000.00) == " 10000s "
493
493
  assert ms(100000.0) == "100000s "
494
+
495
+
496
+ async def test_worker_can_be_told_to_skip_automatic_tasks(docket: Docket):
497
+ """A worker can be told to skip automatic tasks"""
498
+
499
+ called = False
500
+
501
+ async def perpetual_task(
502
+ perpetual: Perpetual = Perpetual(
503
+ every=timedelta(milliseconds=50), automatic=True
504
+ ),
505
+ ):
506
+ nonlocal called
507
+ called = True # pragma: no cover
508
+
509
+ docket.register(perpetual_task)
510
+
511
+ # Without the flag, this would hang because the task would always be scheduled
512
+ async with Worker(docket, schedule_automatic_tasks=False) as worker:
513
+ await worker.run_until_finished()
514
+
515
+ assert not called
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes