pydocket 0.7.0__tar.gz → 0.7.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pydocket might be problematic. Click here for more details.
- {pydocket-0.7.0 → pydocket-0.7.1}/.github/workflows/ci.yml +2 -2
- {pydocket-0.7.0 → pydocket-0.7.1}/.github/workflows/publish.yml +9 -4
- {pydocket-0.7.0 → pydocket-0.7.1}/.gitignore +7 -9
- pydocket-0.7.1/CLAUDE.md +127 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/PKG-INFO +1 -1
- {pydocket-0.7.0 → pydocket-0.7.1}/src/docket/annotations.py +4 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/src/docket/cli.py +26 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/src/docket/dependencies.py +3 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/src/docket/docket.py +43 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/src/docket/execution.py +3 -1
- {pydocket-0.7.0 → pydocket-0.7.1}/src/docket/instrumentation.py +6 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/src/docket/worker.py +8 -3
- pydocket-0.7.1/tests/cli/test_clear.py +253 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/tests/conftest.py +5 -1
- pydocket-0.7.1/tests/test_docket.py +168 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/tests/test_instrumentation.py +92 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/uv.lock +417 -416
- pydocket-0.7.0/tests/test_docket.py +0 -14
- {pydocket-0.7.0 → pydocket-0.7.1}/.cursor/rules/general.mdc +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/.cursor/rules/python-style.mdc +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/.github/codecov.yml +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/.github/workflows/chaos.yml +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/.github/workflows/docs.yml +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/.pre-commit-config.yaml +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/LICENSE +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/README.md +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/chaos/README.md +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/chaos/__init__.py +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/chaos/driver.py +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/chaos/producer.py +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/chaos/run +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/chaos/tasks.py +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/docs/api-reference.md +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/docs/getting-started.md +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/docs/index.md +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/examples/__init__.py +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/examples/common.py +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/examples/find_and_flood.py +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/examples/self_perpetuating.py +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/mkdocs.yml +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/pyproject.toml +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/src/docket/__init__.py +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/src/docket/__main__.py +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/src/docket/py.typed +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/src/docket/tasks.py +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/telemetry/.gitignore +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/telemetry/start +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/telemetry/stop +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/tests/__init__.py +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/tests/cli/__init__.py +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/tests/cli/conftest.py +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/tests/cli/test_module.py +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/tests/cli/test_parsing.py +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/tests/cli/test_snapshot.py +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/tests/cli/test_striking.py +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/tests/cli/test_tasks.py +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/tests/cli/test_version.py +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/tests/cli/test_worker.py +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/tests/cli/test_workers.py +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/tests/test_dependencies.py +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/tests/test_execution.py +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/tests/test_fundamentals.py +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/tests/test_striking.py +0 -0
- {pydocket-0.7.0 → pydocket-0.7.1}/tests/test_worker.py +0 -0
|
@@ -15,7 +15,7 @@ jobs:
|
|
|
15
15
|
fail-fast: false
|
|
16
16
|
matrix:
|
|
17
17
|
python-version: ["3.12", "3.13"]
|
|
18
|
-
redis-version: ["6.2", "7.4"]
|
|
18
|
+
redis-version: ["6.2", "7.4", "valkey-8.0"]
|
|
19
19
|
redis-py-version: [">=4.6,<5", ">=5"]
|
|
20
20
|
|
|
21
21
|
steps:
|
|
@@ -34,7 +34,7 @@ jobs:
|
|
|
34
34
|
- name: Run tests
|
|
35
35
|
env:
|
|
36
36
|
REDIS_VERSION: ${{ matrix.redis-version }}
|
|
37
|
-
run: uv run pytest --cov-branch --cov-report=xml --cov-report=term-missing:skip-covered
|
|
37
|
+
run: uv run pytest --cov-branch --cov-fail-under=100 --cov-report=xml --cov-report=term-missing:skip-covered
|
|
38
38
|
|
|
39
39
|
- name: Upload coverage reports to Codecov
|
|
40
40
|
uses: codecov/codecov-action@v5
|
|
@@ -13,8 +13,11 @@ jobs:
|
|
|
13
13
|
name: Build and publish to PyPI
|
|
14
14
|
runs-on: ubuntu-latest
|
|
15
15
|
needs: ci
|
|
16
|
+
environment:
|
|
17
|
+
name: pypi
|
|
18
|
+
url: https://pypi.org/p/pydocket
|
|
16
19
|
permissions:
|
|
17
|
-
id-token: write
|
|
20
|
+
id-token: write # Required for trusted publishing and PEP 740 attestations
|
|
18
21
|
contents: read
|
|
19
22
|
|
|
20
23
|
steps:
|
|
@@ -31,10 +34,12 @@ jobs:
|
|
|
31
34
|
cache-dependency-glob: "pyproject.toml"
|
|
32
35
|
|
|
33
36
|
- name: Install build dependencies
|
|
34
|
-
run: uv pip install
|
|
37
|
+
run: uv pip install hatchling hatch-vcs
|
|
35
38
|
|
|
36
39
|
- name: Build package
|
|
37
40
|
run: uv build
|
|
38
41
|
|
|
39
|
-
- name: Publish to PyPI
|
|
40
|
-
|
|
42
|
+
- name: Publish to PyPI with PEP 740 attestations
|
|
43
|
+
uses: pypa/gh-action-pypi-publish@release/v1
|
|
44
|
+
with:
|
|
45
|
+
packages-dir: dist/
|
|
@@ -1,13 +1,11 @@
|
|
|
1
|
-
# Python-generated files
|
|
2
|
-
__pycache__/
|
|
3
|
-
*.py[oc]
|
|
4
|
-
build/
|
|
5
|
-
dist/
|
|
6
|
-
wheels/
|
|
7
1
|
*.egg-info
|
|
8
|
-
|
|
9
|
-
# Virtual environments
|
|
10
|
-
.venv
|
|
2
|
+
*.py[oc]
|
|
11
3
|
.coverage
|
|
12
4
|
.envrc
|
|
13
5
|
.python-version
|
|
6
|
+
.venv
|
|
7
|
+
.worktrees/
|
|
8
|
+
__pycache__/
|
|
9
|
+
build/
|
|
10
|
+
dist/
|
|
11
|
+
wheels/
|
pydocket-0.7.1/CLAUDE.md
ADDED
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
# CLAUDE.md
|
|
2
|
+
|
|
3
|
+
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
|
4
|
+
|
|
5
|
+
## Project Overview
|
|
6
|
+
|
|
7
|
+
**Docket** (`pydocket` on PyPI) is a distributed background task system for Python functions with Redis-backed persistence. It enables scheduling both immediate and future work with comprehensive dependency injection, retry mechanisms, and fault tolerance.
|
|
8
|
+
|
|
9
|
+
**Key Requirements**: Python 3.12+, Redis 6.2+ or Valkey 8.0+
|
|
10
|
+
|
|
11
|
+
## Development Commands
|
|
12
|
+
|
|
13
|
+
### Testing
|
|
14
|
+
|
|
15
|
+
```bash
|
|
16
|
+
# Run full test suite with coverage and parallel execution
|
|
17
|
+
pytest
|
|
18
|
+
|
|
19
|
+
# Run specific test
|
|
20
|
+
pytest tests/test_docket.py::test_specific_function
|
|
21
|
+
|
|
22
|
+
```
|
|
23
|
+
|
|
24
|
+
The project REQUIRES 100% test coverage
|
|
25
|
+
|
|
26
|
+
### Code Quality
|
|
27
|
+
|
|
28
|
+
```bash
|
|
29
|
+
# Lint and format code
|
|
30
|
+
ruff check
|
|
31
|
+
ruff format
|
|
32
|
+
|
|
33
|
+
# Type checking
|
|
34
|
+
pyright
|
|
35
|
+
pyright tests
|
|
36
|
+
|
|
37
|
+
# Run all pre-commit hooks
|
|
38
|
+
pre-commit run --all-files
|
|
39
|
+
```
|
|
40
|
+
|
|
41
|
+
### Development Setup
|
|
42
|
+
|
|
43
|
+
```bash
|
|
44
|
+
# Install development dependencies
|
|
45
|
+
uv sync --group dev
|
|
46
|
+
|
|
47
|
+
# Install pre-commit hooks
|
|
48
|
+
pre-commit install
|
|
49
|
+
```
|
|
50
|
+
|
|
51
|
+
### Git Workflow
|
|
52
|
+
|
|
53
|
+
- This project uses Github for issue tracking
|
|
54
|
+
- This project can use git worktrees under .worktrees/
|
|
55
|
+
|
|
56
|
+
## Core Architecture
|
|
57
|
+
|
|
58
|
+
### Key Classes
|
|
59
|
+
|
|
60
|
+
- **`Docket`** (`src/docket/docket.py`): Central task registry and scheduler
|
|
61
|
+
|
|
62
|
+
- `add()`: Schedule tasks for execution
|
|
63
|
+
- `replace()`: Replace existing scheduled tasks
|
|
64
|
+
- `cancel()`: Cancel pending tasks
|
|
65
|
+
- `strike()`/`restore()`: Conditionally block/unblock tasks
|
|
66
|
+
- `snapshot()`: Get current state for observability
|
|
67
|
+
|
|
68
|
+
- **`Worker`** (`src/docket/worker.py`): Task execution engine
|
|
69
|
+
|
|
70
|
+
- `run_forever()`/`run_until_finished()`: Main execution loops
|
|
71
|
+
- Handles concurrency, retries, and dependency injection
|
|
72
|
+
- Maintains heartbeat for liveness tracking
|
|
73
|
+
|
|
74
|
+
- **`Execution`** (`src/docket/execution.py`): Task execution context with metadata
|
|
75
|
+
|
|
76
|
+
### Dependencies System (`src/docket/dependencies.py`)
|
|
77
|
+
|
|
78
|
+
Rich dependency injection supporting:
|
|
79
|
+
|
|
80
|
+
- Context access: `CurrentDocket`, `CurrentWorker`, `CurrentExecution`
|
|
81
|
+
- Retry strategies: `Retry`, `ExponentialRetry`
|
|
82
|
+
- Special behaviors: `Perpetual` (self-rescheduling), `Timeout`
|
|
83
|
+
- Custom injection: `Depends()`
|
|
84
|
+
- Contextual logging: `TaskLogger`
|
|
85
|
+
|
|
86
|
+
### Redis Data Model
|
|
87
|
+
|
|
88
|
+
- **Streams**: `{docket}:stream` (ready tasks), `{docket}:strikes` (commands)
|
|
89
|
+
- **Sorted Sets**: `{docket}:queue` (scheduled tasks), `{docket}:workers` (heartbeats)
|
|
90
|
+
- **Hashes**: `{docket}:{key}` (parked task data)
|
|
91
|
+
- **Sets**: `{docket}:worker-tasks:{worker}` (worker capabilities)
|
|
92
|
+
|
|
93
|
+
### Task Lifecycle
|
|
94
|
+
|
|
95
|
+
1. Registration with `Docket.register()` or `@docket.task`
|
|
96
|
+
2. Scheduling: immediate → Redis stream, future → Redis sorted set
|
|
97
|
+
3. Worker processing: scheduler moves due tasks, workers consume via consumer groups
|
|
98
|
+
4. Execution: dependency injection, retry logic, acknowledgment
|
|
99
|
+
|
|
100
|
+
## Project Structure
|
|
101
|
+
|
|
102
|
+
### Source Code
|
|
103
|
+
|
|
104
|
+
- `src/docket/` - Main package
|
|
105
|
+
- `__init__.py` - Public API exports
|
|
106
|
+
- `docket.py` - Core Docket class
|
|
107
|
+
- `worker.py` - Worker implementation
|
|
108
|
+
- `execution.py` - Task execution context
|
|
109
|
+
- `dependencies.py` - Dependency injection system
|
|
110
|
+
- `tasks.py` - Built-in utility tasks
|
|
111
|
+
- `cli.py` - Command-line interface
|
|
112
|
+
|
|
113
|
+
### Testing and Examples
|
|
114
|
+
|
|
115
|
+
- `tests/` - Comprehensive test suite
|
|
116
|
+
- `examples/` - Usage examples
|
|
117
|
+
- `chaos/` - Chaos testing framework
|
|
118
|
+
|
|
119
|
+
## CLI Usage
|
|
120
|
+
|
|
121
|
+
```bash
|
|
122
|
+
# Run a worker
|
|
123
|
+
docket worker --url redis://localhost:6379/0 --tasks your.module --concurrency 4
|
|
124
|
+
|
|
125
|
+
# See all commands
|
|
126
|
+
docket --help
|
|
127
|
+
```
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: pydocket
|
|
3
|
-
Version: 0.7.
|
|
3
|
+
Version: 0.7.1
|
|
4
4
|
Summary: A distributed background task system for Python functions
|
|
5
5
|
Project-URL: Homepage, https://github.com/chrisguidry/docket
|
|
6
6
|
Project-URL: Bug Tracker, https://github.com/chrisguidry/docket/issues
|
|
@@ -2,6 +2,8 @@ import abc
|
|
|
2
2
|
import inspect
|
|
3
3
|
from typing import Any, Iterable, Mapping, Self
|
|
4
4
|
|
|
5
|
+
from .instrumentation import CACHE_SIZE
|
|
6
|
+
|
|
5
7
|
|
|
6
8
|
class Annotation(abc.ABC):
|
|
7
9
|
_cache: dict[tuple[type[Self], inspect.Signature], Mapping[str, Self]] = {}
|
|
@@ -10,6 +12,7 @@ class Annotation(abc.ABC):
|
|
|
10
12
|
def annotated_parameters(cls, signature: inspect.Signature) -> Mapping[str, Self]:
|
|
11
13
|
key = (cls, signature)
|
|
12
14
|
if key in cls._cache:
|
|
15
|
+
CACHE_SIZE.set(len(cls._cache), {"cache": "annotation"})
|
|
13
16
|
return cls._cache[key]
|
|
14
17
|
|
|
15
18
|
annotated: dict[str, Self] = {}
|
|
@@ -30,6 +33,7 @@ class Annotation(abc.ABC):
|
|
|
30
33
|
annotated[param_name] = arg_type()
|
|
31
34
|
|
|
32
35
|
cls._cache[key] = annotated
|
|
36
|
+
CACHE_SIZE.set(len(cls._cache), {"cache": "annotation"})
|
|
33
37
|
return annotated
|
|
34
38
|
|
|
35
39
|
|
|
@@ -358,6 +358,32 @@ def strike(
|
|
|
358
358
|
asyncio.run(run())
|
|
359
359
|
|
|
360
360
|
|
|
361
|
+
@app.command(help="Clear all pending and scheduled tasks from the docket")
|
|
362
|
+
def clear(
|
|
363
|
+
docket_: Annotated[
|
|
364
|
+
str,
|
|
365
|
+
typer.Option(
|
|
366
|
+
"--docket",
|
|
367
|
+
help="The name of the docket",
|
|
368
|
+
envvar="DOCKET_NAME",
|
|
369
|
+
),
|
|
370
|
+
] = "docket",
|
|
371
|
+
url: Annotated[
|
|
372
|
+
str,
|
|
373
|
+
typer.Option(
|
|
374
|
+
help="The URL of the Redis server",
|
|
375
|
+
envvar="DOCKET_URL",
|
|
376
|
+
),
|
|
377
|
+
] = "redis://localhost:6379/0",
|
|
378
|
+
) -> None:
|
|
379
|
+
async def run() -> None:
|
|
380
|
+
async with Docket(name=docket_, url=url) as docket:
|
|
381
|
+
cleared_count = await docket.clear()
|
|
382
|
+
print(f"Cleared {cleared_count} tasks from docket '{docket_}'")
|
|
383
|
+
|
|
384
|
+
asyncio.run(run())
|
|
385
|
+
|
|
386
|
+
|
|
361
387
|
@app.command(help="Restores a task or parameters to the Docket")
|
|
362
388
|
def restore(
|
|
363
389
|
function: Annotated[
|
|
@@ -21,6 +21,7 @@ from typing import (
|
|
|
21
21
|
|
|
22
22
|
from .docket import Docket
|
|
23
23
|
from .execution import Execution, TaskFunction, get_signature
|
|
24
|
+
from .instrumentation import CACHE_SIZE
|
|
24
25
|
|
|
25
26
|
if TYPE_CHECKING: # pragma: no cover
|
|
26
27
|
from .worker import Worker
|
|
@@ -415,6 +416,7 @@ def get_dependency_parameters(
|
|
|
415
416
|
function: TaskFunction | DependencyFunction[Any],
|
|
416
417
|
) -> dict[str, Dependency]:
|
|
417
418
|
if function in _parameter_cache:
|
|
419
|
+
CACHE_SIZE.set(len(_parameter_cache), {"cache": "parameter"})
|
|
418
420
|
return _parameter_cache[function]
|
|
419
421
|
|
|
420
422
|
dependencies: dict[str, Dependency] = {}
|
|
@@ -428,6 +430,7 @@ def get_dependency_parameters(
|
|
|
428
430
|
dependencies[parameter] = param.default
|
|
429
431
|
|
|
430
432
|
_parameter_cache[function] = dependencies
|
|
433
|
+
CACHE_SIZE.set(len(_parameter_cache), {"cache": "parameter"})
|
|
431
434
|
return dependencies
|
|
432
435
|
|
|
433
436
|
|
|
@@ -743,3 +743,46 @@ class Docket:
|
|
|
743
743
|
workers.append(WorkerInfo(worker_name, last_seen, task_names))
|
|
744
744
|
|
|
745
745
|
return workers
|
|
746
|
+
|
|
747
|
+
async def clear(self) -> int:
|
|
748
|
+
"""Clear all pending and scheduled tasks from the docket.
|
|
749
|
+
|
|
750
|
+
This removes all tasks from the stream (immediate tasks) and queue
|
|
751
|
+
(scheduled tasks), along with their associated parked data. Running
|
|
752
|
+
tasks are not affected.
|
|
753
|
+
|
|
754
|
+
Returns:
|
|
755
|
+
The total number of tasks that were cleared.
|
|
756
|
+
"""
|
|
757
|
+
with tracer.start_as_current_span(
|
|
758
|
+
"docket.clear",
|
|
759
|
+
attributes=self.labels(),
|
|
760
|
+
):
|
|
761
|
+
async with self.redis() as redis:
|
|
762
|
+
async with redis.pipeline() as pipeline:
|
|
763
|
+
# Get counts before clearing
|
|
764
|
+
pipeline.xlen(self.stream_key)
|
|
765
|
+
pipeline.zcard(self.queue_key)
|
|
766
|
+
pipeline.zrange(self.queue_key, 0, -1)
|
|
767
|
+
|
|
768
|
+
stream_count: int
|
|
769
|
+
queue_count: int
|
|
770
|
+
scheduled_keys: list[bytes]
|
|
771
|
+
stream_count, queue_count, scheduled_keys = await pipeline.execute()
|
|
772
|
+
|
|
773
|
+
# Clear all data
|
|
774
|
+
# Trim stream to 0 messages instead of deleting it to preserve consumer group
|
|
775
|
+
if stream_count > 0:
|
|
776
|
+
pipeline.xtrim(self.stream_key, maxlen=0, approximate=False)
|
|
777
|
+
pipeline.delete(self.queue_key)
|
|
778
|
+
|
|
779
|
+
# Clear parked task data and known task keys
|
|
780
|
+
for key_bytes in scheduled_keys:
|
|
781
|
+
key = key_bytes.decode()
|
|
782
|
+
pipeline.delete(self.parked_task_key(key))
|
|
783
|
+
pipeline.delete(self.known_task_key(key))
|
|
784
|
+
|
|
785
|
+
await pipeline.execute()
|
|
786
|
+
|
|
787
|
+
total_cleared = stream_count + queue_count
|
|
788
|
+
return total_cleared
|
|
@@ -19,7 +19,7 @@ import opentelemetry.context
|
|
|
19
19
|
from opentelemetry import propagate, trace
|
|
20
20
|
|
|
21
21
|
from .annotations import Logged
|
|
22
|
-
from .instrumentation import message_getter
|
|
22
|
+
from .instrumentation import CACHE_SIZE, message_getter
|
|
23
23
|
|
|
24
24
|
logger: logging.Logger = logging.getLogger(__name__)
|
|
25
25
|
|
|
@@ -32,10 +32,12 @@ _signature_cache: dict[Callable[..., Any], inspect.Signature] = {}
|
|
|
32
32
|
|
|
33
33
|
def get_signature(function: Callable[..., Any]) -> inspect.Signature:
|
|
34
34
|
if function in _signature_cache:
|
|
35
|
+
CACHE_SIZE.set(len(_signature_cache), {"cache": "signature"})
|
|
35
36
|
return _signature_cache[function]
|
|
36
37
|
|
|
37
38
|
signature = inspect.signature(function)
|
|
38
39
|
_signature_cache[function] = signature
|
|
40
|
+
CACHE_SIZE.set(len(_signature_cache), {"cache": "signature"})
|
|
39
41
|
return signature
|
|
40
42
|
|
|
41
43
|
|
|
@@ -15,7 +15,7 @@ from typing import (
|
|
|
15
15
|
)
|
|
16
16
|
|
|
17
17
|
from opentelemetry import trace
|
|
18
|
-
from opentelemetry.trace import Tracer
|
|
18
|
+
from opentelemetry.trace import Status, StatusCode, Tracer
|
|
19
19
|
from redis.asyncio import Redis
|
|
20
20
|
from redis.exceptions import ConnectionError, LockError
|
|
21
21
|
|
|
@@ -531,7 +531,7 @@ class Worker:
|
|
|
531
531
|
"code.function.name": execution.function.__name__,
|
|
532
532
|
},
|
|
533
533
|
links=execution.incoming_span_links(),
|
|
534
|
-
):
|
|
534
|
+
) as span:
|
|
535
535
|
try:
|
|
536
536
|
async with resolved_dependencies(self, execution) as dependencies:
|
|
537
537
|
# Preemptively reschedule the perpetual task for the future, or clear
|
|
@@ -576,6 +576,8 @@ class Worker:
|
|
|
576
576
|
duration = log_context["duration"] = time.time() - start
|
|
577
577
|
TASKS_SUCCEEDED.add(1, counter_labels)
|
|
578
578
|
|
|
579
|
+
span.set_status(Status(StatusCode.OK))
|
|
580
|
+
|
|
579
581
|
rescheduled = await self._perpetuate_if_requested(
|
|
580
582
|
execution, dependencies, timedelta(seconds=duration)
|
|
581
583
|
)
|
|
@@ -584,10 +586,13 @@ class Worker:
|
|
|
584
586
|
logger.info(
|
|
585
587
|
"%s [%s] %s", arrow, ms(duration), call, extra=log_context
|
|
586
588
|
)
|
|
587
|
-
except Exception:
|
|
589
|
+
except Exception as e:
|
|
588
590
|
duration = log_context["duration"] = time.time() - start
|
|
589
591
|
TASKS_FAILED.add(1, counter_labels)
|
|
590
592
|
|
|
593
|
+
span.record_exception(e)
|
|
594
|
+
span.set_status(Status(StatusCode.ERROR, str(e)))
|
|
595
|
+
|
|
591
596
|
retried = await self._retry_if_requested(execution, dependencies)
|
|
592
597
|
if not retried:
|
|
593
598
|
retried = await self._perpetuate_if_requested(
|
|
@@ -0,0 +1,253 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from datetime import datetime, timedelta, timezone
|
|
3
|
+
from unittest.mock import AsyncMock
|
|
4
|
+
|
|
5
|
+
import pytest
|
|
6
|
+
from typer.testing import CliRunner
|
|
7
|
+
|
|
8
|
+
from docket.cli import app
|
|
9
|
+
from docket.docket import Docket
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@pytest.fixture(autouse=True)
|
|
13
|
+
async def empty_docket(docket: Docket):
|
|
14
|
+
"""Ensure that the docket starts empty"""
|
|
15
|
+
await docket.clear()
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
async def test_clear_command_empty_docket(docket: Docket, runner: CliRunner):
|
|
19
|
+
"""Should clear empty docket and report 0 tasks cleared"""
|
|
20
|
+
result = await asyncio.get_running_loop().run_in_executor(
|
|
21
|
+
None,
|
|
22
|
+
runner.invoke,
|
|
23
|
+
app,
|
|
24
|
+
[
|
|
25
|
+
"clear",
|
|
26
|
+
"--url",
|
|
27
|
+
docket.url,
|
|
28
|
+
"--docket",
|
|
29
|
+
docket.name,
|
|
30
|
+
],
|
|
31
|
+
)
|
|
32
|
+
assert result.exit_code == 0, result.output
|
|
33
|
+
assert "Cleared 0 tasks" in result.output
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
async def test_clear_command_with_immediate_tasks(
|
|
37
|
+
docket: Docket, runner: CliRunner, the_task: AsyncMock
|
|
38
|
+
):
|
|
39
|
+
"""Should clear immediate tasks and report count"""
|
|
40
|
+
docket.register(the_task)
|
|
41
|
+
|
|
42
|
+
await docket.add(the_task)("arg1")
|
|
43
|
+
await docket.add(the_task)("arg2")
|
|
44
|
+
await docket.add(the_task)("arg3")
|
|
45
|
+
|
|
46
|
+
result = await asyncio.get_running_loop().run_in_executor(
|
|
47
|
+
None,
|
|
48
|
+
runner.invoke,
|
|
49
|
+
app,
|
|
50
|
+
[
|
|
51
|
+
"clear",
|
|
52
|
+
"--url",
|
|
53
|
+
docket.url,
|
|
54
|
+
"--docket",
|
|
55
|
+
docket.name,
|
|
56
|
+
],
|
|
57
|
+
)
|
|
58
|
+
assert result.exit_code == 0, result.output
|
|
59
|
+
assert "Cleared 3 tasks" in result.output
|
|
60
|
+
|
|
61
|
+
snapshot = await docket.snapshot()
|
|
62
|
+
assert len(snapshot.future) == 0
|
|
63
|
+
assert len(snapshot.running) == 0
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
async def test_clear_command_with_scheduled_tasks(
|
|
67
|
+
docket: Docket, runner: CliRunner, the_task: AsyncMock
|
|
68
|
+
):
|
|
69
|
+
"""Should clear scheduled tasks and report count"""
|
|
70
|
+
docket.register(the_task)
|
|
71
|
+
|
|
72
|
+
future = datetime.now(timezone.utc) + timedelta(seconds=60)
|
|
73
|
+
await docket.add(the_task, when=future)("scheduled1")
|
|
74
|
+
await docket.add(the_task, when=future + timedelta(seconds=1))("scheduled2")
|
|
75
|
+
|
|
76
|
+
result = await asyncio.get_running_loop().run_in_executor(
|
|
77
|
+
None,
|
|
78
|
+
runner.invoke,
|
|
79
|
+
app,
|
|
80
|
+
[
|
|
81
|
+
"clear",
|
|
82
|
+
"--url",
|
|
83
|
+
docket.url,
|
|
84
|
+
"--docket",
|
|
85
|
+
docket.name,
|
|
86
|
+
],
|
|
87
|
+
)
|
|
88
|
+
assert result.exit_code == 0, result.output
|
|
89
|
+
assert "Cleared 2 tasks" in result.output
|
|
90
|
+
|
|
91
|
+
snapshot = await docket.snapshot()
|
|
92
|
+
assert len(snapshot.future) == 0
|
|
93
|
+
assert len(snapshot.running) == 0
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
async def test_clear_command_with_mixed_tasks(
|
|
97
|
+
docket: Docket, runner: CliRunner, the_task: AsyncMock, another_task: AsyncMock
|
|
98
|
+
):
|
|
99
|
+
"""Should clear both immediate and scheduled tasks"""
|
|
100
|
+
docket.register(the_task)
|
|
101
|
+
docket.register(another_task)
|
|
102
|
+
|
|
103
|
+
future = datetime.now(timezone.utc) + timedelta(seconds=60)
|
|
104
|
+
|
|
105
|
+
await docket.add(the_task)("immediate1")
|
|
106
|
+
await docket.add(another_task)("immediate2")
|
|
107
|
+
await docket.add(the_task, when=future)("scheduled1")
|
|
108
|
+
await docket.add(another_task, when=future + timedelta(seconds=1))("scheduled2")
|
|
109
|
+
|
|
110
|
+
result = await asyncio.get_running_loop().run_in_executor(
|
|
111
|
+
None,
|
|
112
|
+
runner.invoke,
|
|
113
|
+
app,
|
|
114
|
+
[
|
|
115
|
+
"clear",
|
|
116
|
+
"--url",
|
|
117
|
+
docket.url,
|
|
118
|
+
"--docket",
|
|
119
|
+
docket.name,
|
|
120
|
+
],
|
|
121
|
+
)
|
|
122
|
+
assert result.exit_code == 0, result.output
|
|
123
|
+
assert "Cleared 4 tasks" in result.output
|
|
124
|
+
|
|
125
|
+
snapshot = await docket.snapshot()
|
|
126
|
+
assert len(snapshot.future) == 0
|
|
127
|
+
assert len(snapshot.running) == 0
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
async def test_clear_command_with_keyed_tasks(
|
|
131
|
+
docket: Docket, runner: CliRunner, the_task: AsyncMock
|
|
132
|
+
):
|
|
133
|
+
"""Should clear tasks with keys"""
|
|
134
|
+
docket.register(the_task)
|
|
135
|
+
|
|
136
|
+
await docket.add(the_task, key="task1")("arg1")
|
|
137
|
+
await docket.add(the_task, key="task2")("arg2")
|
|
138
|
+
|
|
139
|
+
result = await asyncio.get_running_loop().run_in_executor(
|
|
140
|
+
None,
|
|
141
|
+
runner.invoke,
|
|
142
|
+
app,
|
|
143
|
+
[
|
|
144
|
+
"clear",
|
|
145
|
+
"--url",
|
|
146
|
+
docket.url,
|
|
147
|
+
"--docket",
|
|
148
|
+
docket.name,
|
|
149
|
+
],
|
|
150
|
+
)
|
|
151
|
+
assert result.exit_code == 0, result.output
|
|
152
|
+
assert "Cleared 2 tasks" in result.output
|
|
153
|
+
|
|
154
|
+
snapshot = await docket.snapshot()
|
|
155
|
+
assert len(snapshot.future) == 0
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
async def test_clear_command_basic_functionality(
|
|
159
|
+
docket: Docket, runner: CliRunner, the_task: AsyncMock
|
|
160
|
+
):
|
|
161
|
+
"""Should clear tasks via CLI command"""
|
|
162
|
+
docket.register(the_task)
|
|
163
|
+
|
|
164
|
+
# Add some tasks to clear
|
|
165
|
+
await docket.add(the_task)("task1")
|
|
166
|
+
future = datetime.now(timezone.utc) + timedelta(seconds=60)
|
|
167
|
+
await docket.add(the_task, when=future)("scheduled_task")
|
|
168
|
+
|
|
169
|
+
result = await asyncio.get_running_loop().run_in_executor(
|
|
170
|
+
None,
|
|
171
|
+
runner.invoke,
|
|
172
|
+
app,
|
|
173
|
+
[
|
|
174
|
+
"clear",
|
|
175
|
+
"--url",
|
|
176
|
+
docket.url,
|
|
177
|
+
"--docket",
|
|
178
|
+
docket.name,
|
|
179
|
+
],
|
|
180
|
+
)
|
|
181
|
+
assert result.exit_code == 0, result.output
|
|
182
|
+
assert "Cleared" in result.output
|
|
183
|
+
|
|
184
|
+
snapshot_after_clear = await docket.snapshot()
|
|
185
|
+
assert len(snapshot_after_clear.future) == 0
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
async def test_clear_command_preserves_strikes(
|
|
189
|
+
docket: Docket, runner: CliRunner, the_task: AsyncMock
|
|
190
|
+
):
|
|
191
|
+
"""Should not affect strikes when clearing"""
|
|
192
|
+
docket.register(the_task)
|
|
193
|
+
|
|
194
|
+
await docket.strike("the_task")
|
|
195
|
+
await docket.add(the_task)("arg1")
|
|
196
|
+
|
|
197
|
+
result = await asyncio.get_running_loop().run_in_executor(
|
|
198
|
+
None,
|
|
199
|
+
runner.invoke,
|
|
200
|
+
app,
|
|
201
|
+
[
|
|
202
|
+
"clear",
|
|
203
|
+
"--url",
|
|
204
|
+
docket.url,
|
|
205
|
+
"--docket",
|
|
206
|
+
docket.name,
|
|
207
|
+
],
|
|
208
|
+
)
|
|
209
|
+
assert result.exit_code == 0, result.output
|
|
210
|
+
assert "Cleared" in result.output
|
|
211
|
+
|
|
212
|
+
# Strikes should still be in effect - clear doesn't affect strikes
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
async def test_clear_command_with_custom_url(runner: CliRunner):
|
|
216
|
+
"""Should handle custom Redis URL"""
|
|
217
|
+
result = await asyncio.get_running_loop().run_in_executor(
|
|
218
|
+
None,
|
|
219
|
+
runner.invoke,
|
|
220
|
+
app,
|
|
221
|
+
[
|
|
222
|
+
"clear",
|
|
223
|
+
"--url",
|
|
224
|
+
"redis://nonexistent:12345/0",
|
|
225
|
+
"--docket",
|
|
226
|
+
"test-docket",
|
|
227
|
+
],
|
|
228
|
+
)
|
|
229
|
+
assert result.exit_code != 0
|
|
230
|
+
assert result.exit_code != 0
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
async def test_clear_command_with_custom_docket_name(
|
|
234
|
+
docket: Docket, runner: CliRunner, the_task: AsyncMock
|
|
235
|
+
):
|
|
236
|
+
"""Should handle custom docket name"""
|
|
237
|
+
docket.register(the_task)
|
|
238
|
+
await docket.add(the_task)("test")
|
|
239
|
+
|
|
240
|
+
result = await asyncio.get_running_loop().run_in_executor(
|
|
241
|
+
None,
|
|
242
|
+
runner.invoke,
|
|
243
|
+
app,
|
|
244
|
+
[
|
|
245
|
+
"clear",
|
|
246
|
+
"--url",
|
|
247
|
+
docket.url,
|
|
248
|
+
"--docket",
|
|
249
|
+
docket.name,
|
|
250
|
+
],
|
|
251
|
+
)
|
|
252
|
+
assert result.exit_code == 0, result.output
|
|
253
|
+
assert "Cleared 1 tasks" in result.output
|
|
@@ -90,8 +90,12 @@ def redis_server(testrun_uid: str, worker_id: str) -> Generator[Container, None,
|
|
|
90
90
|
s.bind(("", 0))
|
|
91
91
|
redis_port = s.getsockname()[1]
|
|
92
92
|
|
|
93
|
+
image = f"redis:{REDIS_VERSION}"
|
|
94
|
+
if REDIS_VERSION.startswith("valkey-"): # pragma: no branch
|
|
95
|
+
image = f"valkey/valkey:{REDIS_VERSION.replace('valkey-', '')}" # pragma: no cover
|
|
96
|
+
|
|
93
97
|
container = client.containers.run(
|
|
94
|
-
|
|
98
|
+
image,
|
|
95
99
|
detach=True,
|
|
96
100
|
ports={"6379/tcp": redis_port},
|
|
97
101
|
labels={
|