pydocket 0.5.1__tar.gz → 0.6.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pydocket might be problematic. Click here for more details.
- {pydocket-0.5.1 → pydocket-0.6.0}/PKG-INFO +1 -1
- {pydocket-0.5.1 → pydocket-0.6.0}/chaos/driver.py +0 -6
- {pydocket-0.5.1 → pydocket-0.6.0}/chaos/tasks.py +11 -1
- pydocket-0.6.0/examples/common.py +69 -0
- pydocket-0.6.0/examples/find_and_flood.py +39 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/pyproject.toml +1 -1
- {pydocket-0.5.1 → pydocket-0.6.0}/src/docket/__init__.py +4 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/src/docket/annotations.py +16 -0
- pydocket-0.6.0/src/docket/dependencies.py +366 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/src/docket/docket.py +73 -45
- {pydocket-0.5.1 → pydocket-0.6.0}/src/docket/execution.py +30 -11
- {pydocket-0.5.1 → pydocket-0.6.0}/src/docket/worker.py +264 -243
- pydocket-0.6.0/tests/cli/__init__.py +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/tests/conftest.py +2 -2
- {pydocket-0.5.1 → pydocket-0.6.0}/tests/test_fundamentals.py +320 -2
- {pydocket-0.5.1 → pydocket-0.6.0}/tests/test_worker.py +56 -17
- {pydocket-0.5.1 → pydocket-0.6.0}/uv.lock +4 -4
- pydocket-0.5.1/src/docket/dependencies.py +0 -222
- {pydocket-0.5.1 → pydocket-0.6.0}/.cursor/rules/general.mdc +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/.cursor/rules/python-style.mdc +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/.github/codecov.yml +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/.github/workflows/chaos.yml +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/.github/workflows/ci.yml +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/.github/workflows/publish.yml +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/.gitignore +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/.pre-commit-config.yaml +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/LICENSE +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/README.md +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/chaos/README.md +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/chaos/__init__.py +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/chaos/producer.py +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/chaos/run +0 -0
- {pydocket-0.5.1/tests → pydocket-0.6.0/examples}/__init__.py +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/src/docket/__main__.py +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/src/docket/cli.py +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/src/docket/instrumentation.py +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/src/docket/py.typed +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/src/docket/tasks.py +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/telemetry/.gitignore +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/telemetry/start +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/telemetry/stop +0 -0
- {pydocket-0.5.1/tests/cli → pydocket-0.6.0/tests}/__init__.py +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/tests/cli/conftest.py +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/tests/cli/test_module.py +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/tests/cli/test_parsing.py +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/tests/cli/test_snapshot.py +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/tests/cli/test_striking.py +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/tests/cli/test_tasks.py +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/tests/cli/test_version.py +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/tests/cli/test_worker.py +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/tests/cli/test_workers.py +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/tests/test_dependencies.py +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/tests/test_docket.py +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/tests/test_instrumentation.py +0 -0
- {pydocket-0.5.1 → pydocket-0.6.0}/tests/test_striking.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: pydocket
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.6.0
|
|
4
4
|
Summary: A distributed background task system for Python functions
|
|
5
5
|
Project-URL: Homepage, https://github.com/chrisguidry/docket
|
|
6
6
|
Project-URL: Bug Tracker, https://github.com/chrisguidry/docket/issues
|
|
@@ -23,12 +23,6 @@ from .tasks import toxic
|
|
|
23
23
|
|
|
24
24
|
logging.getLogger().setLevel(logging.INFO)
|
|
25
25
|
|
|
26
|
-
# Quiets down the testcontainers logger
|
|
27
|
-
testcontainers_logger = logging.getLogger("testcontainers.core.container")
|
|
28
|
-
testcontainers_logger.setLevel(logging.ERROR)
|
|
29
|
-
testcontainers_logger = logging.getLogger("testcontainers.core.waiting_utils")
|
|
30
|
-
testcontainers_logger.setLevel(logging.ERROR)
|
|
31
|
-
|
|
32
26
|
console = logging.StreamHandler(stream=sys.stdout)
|
|
33
27
|
console.setFormatter(
|
|
34
28
|
logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
|
|
@@ -2,17 +2,27 @@ import logging
|
|
|
2
2
|
import sys
|
|
3
3
|
import time
|
|
4
4
|
|
|
5
|
-
from docket import CurrentDocket, Docket, Retry, TaskKey
|
|
5
|
+
from docket import CurrentDocket, Depends, Docket, Retry, TaskKey
|
|
6
6
|
|
|
7
7
|
logger = logging.getLogger(__name__)
|
|
8
8
|
|
|
9
9
|
|
|
10
|
+
async def greeting() -> str:
|
|
11
|
+
return "Hello, world"
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
async def emphatic_greeting(greeting: str = Depends(greeting)) -> str:
|
|
15
|
+
return greeting + "!"
|
|
16
|
+
|
|
17
|
+
|
|
10
18
|
async def hello(
|
|
19
|
+
greeting: str = Depends(emphatic_greeting),
|
|
11
20
|
key: str = TaskKey(),
|
|
12
21
|
docket: Docket = CurrentDocket(),
|
|
13
22
|
retry: Retry = Retry(attempts=sys.maxsize),
|
|
14
23
|
):
|
|
15
24
|
logger.info("Starting task %s", key)
|
|
25
|
+
logger.info("Greeting: %s", greeting)
|
|
16
26
|
async with docket.redis() as redis:
|
|
17
27
|
await redis.zadd("hello:received", {key: time.time()})
|
|
18
28
|
logger.info("Finished task %s", key)
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import os
|
|
3
|
+
import socket
|
|
4
|
+
from contextlib import asynccontextmanager
|
|
5
|
+
from typing import AsyncGenerator
|
|
6
|
+
|
|
7
|
+
from docker import DockerClient
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@asynccontextmanager
|
|
11
|
+
async def run_redis(version: str) -> AsyncGenerator[str, None]:
|
|
12
|
+
def get_free_port() -> int:
|
|
13
|
+
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
|
14
|
+
s.bind(("", 0))
|
|
15
|
+
return s.getsockname()[1]
|
|
16
|
+
|
|
17
|
+
port = get_free_port()
|
|
18
|
+
|
|
19
|
+
client = DockerClient.from_env()
|
|
20
|
+
container = client.containers.run(
|
|
21
|
+
f"redis:{version}",
|
|
22
|
+
detach=True,
|
|
23
|
+
ports={"6379/tcp": port},
|
|
24
|
+
auto_remove=True,
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
# Wait for Redis to be ready
|
|
28
|
+
for line in container.logs(stream=True):
|
|
29
|
+
if b"Ready to accept connections" in line:
|
|
30
|
+
break
|
|
31
|
+
|
|
32
|
+
url = f"redis://localhost:{port}/0"
|
|
33
|
+
print("***** Redis is running on %s *****", url)
|
|
34
|
+
try:
|
|
35
|
+
yield url
|
|
36
|
+
finally:
|
|
37
|
+
container.stop()
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
async def run_example_workers(workers: int, concurrency: int, tasks: str):
|
|
41
|
+
async with run_redis("7.4.2") as redis_url:
|
|
42
|
+
processes = [
|
|
43
|
+
await asyncio.create_subprocess_exec(
|
|
44
|
+
"docket",
|
|
45
|
+
"worker",
|
|
46
|
+
"--name",
|
|
47
|
+
f"worker-{i}",
|
|
48
|
+
"--url",
|
|
49
|
+
redis_url,
|
|
50
|
+
"--tasks",
|
|
51
|
+
tasks,
|
|
52
|
+
"--concurrency",
|
|
53
|
+
str(concurrency),
|
|
54
|
+
env={
|
|
55
|
+
**os.environ,
|
|
56
|
+
"PYTHONPATH": os.path.abspath(
|
|
57
|
+
os.path.join(os.path.dirname(__file__), "..")
|
|
58
|
+
),
|
|
59
|
+
},
|
|
60
|
+
)
|
|
61
|
+
for i in range(workers)
|
|
62
|
+
]
|
|
63
|
+
try:
|
|
64
|
+
await asyncio.gather(*[p.wait() for p in processes])
|
|
65
|
+
except asyncio.CancelledError:
|
|
66
|
+
for p in processes:
|
|
67
|
+
p.kill()
|
|
68
|
+
finally:
|
|
69
|
+
await asyncio.gather(*[p.wait() for p in processes])
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from datetime import timedelta
|
|
3
|
+
from logging import Logger, LoggerAdapter
|
|
4
|
+
from typing import Annotated
|
|
5
|
+
|
|
6
|
+
from docket import Docket
|
|
7
|
+
from docket.annotations import Logged
|
|
8
|
+
from docket.dependencies import CurrentDocket, Perpetual, TaskLogger
|
|
9
|
+
|
|
10
|
+
from .common import run_example_workers
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
async def find(
|
|
14
|
+
docket: Docket = CurrentDocket(),
|
|
15
|
+
logger: LoggerAdapter[Logger] = TaskLogger(),
|
|
16
|
+
perpetual: Perpetual = Perpetual(every=timedelta(seconds=3), automatic=True),
|
|
17
|
+
) -> None:
|
|
18
|
+
for i in range(1, 10 + 1):
|
|
19
|
+
await docket.add(flood, key=str(i))(i)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
async def flood(
|
|
23
|
+
item: Annotated[int, Logged],
|
|
24
|
+
logger: LoggerAdapter[Logger] = TaskLogger(),
|
|
25
|
+
) -> None:
|
|
26
|
+
logger.info("Working on %s", item)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
tasks = [find, flood]
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
if __name__ == "__main__":
|
|
33
|
+
asyncio.run(
|
|
34
|
+
run_example_workers(
|
|
35
|
+
workers=3,
|
|
36
|
+
concurrency=8,
|
|
37
|
+
tasks="examples.find_and_flood:tasks",
|
|
38
|
+
)
|
|
39
|
+
)
|
|
@@ -13,11 +13,13 @@ from .dependencies import (
|
|
|
13
13
|
CurrentDocket,
|
|
14
14
|
CurrentExecution,
|
|
15
15
|
CurrentWorker,
|
|
16
|
+
Depends,
|
|
16
17
|
ExponentialRetry,
|
|
17
18
|
Perpetual,
|
|
18
19
|
Retry,
|
|
19
20
|
TaskKey,
|
|
20
21
|
TaskLogger,
|
|
22
|
+
Timeout,
|
|
21
23
|
)
|
|
22
24
|
from .docket import Docket
|
|
23
25
|
from .execution import Execution
|
|
@@ -36,5 +38,7 @@ __all__ = [
|
|
|
36
38
|
"ExponentialRetry",
|
|
37
39
|
"Logged",
|
|
38
40
|
"Perpetual",
|
|
41
|
+
"Timeout",
|
|
42
|
+
"Depends",
|
|
39
43
|
"__version__",
|
|
40
44
|
]
|
|
@@ -28,3 +28,19 @@ class Annotation(abc.ABC):
|
|
|
28
28
|
|
|
29
29
|
class Logged(Annotation):
|
|
30
30
|
"""Instructs docket to include arguments to this parameter in the log."""
|
|
31
|
+
|
|
32
|
+
length_only: bool = False
|
|
33
|
+
|
|
34
|
+
def __init__(self, length_only: bool = False) -> None:
|
|
35
|
+
self.length_only = length_only
|
|
36
|
+
|
|
37
|
+
def format(self, argument: Any) -> str:
|
|
38
|
+
if self.length_only:
|
|
39
|
+
if isinstance(argument, (dict, set)):
|
|
40
|
+
return f"{{len {len(argument)}}}"
|
|
41
|
+
elif isinstance(argument, tuple):
|
|
42
|
+
return f"(len {len(argument)})"
|
|
43
|
+
elif hasattr(argument, "__len__"):
|
|
44
|
+
return f"[len {len(argument)}]"
|
|
45
|
+
|
|
46
|
+
return repr(argument)
|
|
@@ -0,0 +1,366 @@
|
|
|
1
|
+
import abc
|
|
2
|
+
import logging
|
|
3
|
+
import time
|
|
4
|
+
from contextlib import AsyncExitStack, asynccontextmanager
|
|
5
|
+
from contextvars import ContextVar
|
|
6
|
+
from datetime import timedelta
|
|
7
|
+
from types import TracebackType
|
|
8
|
+
from typing import (
|
|
9
|
+
TYPE_CHECKING,
|
|
10
|
+
Any,
|
|
11
|
+
AsyncContextManager,
|
|
12
|
+
AsyncGenerator,
|
|
13
|
+
Awaitable,
|
|
14
|
+
Callable,
|
|
15
|
+
Counter,
|
|
16
|
+
Generic,
|
|
17
|
+
TypeVar,
|
|
18
|
+
cast,
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
from .docket import Docket
|
|
22
|
+
from .execution import Execution, TaskFunction, get_signature
|
|
23
|
+
|
|
24
|
+
if TYPE_CHECKING: # pragma: no cover
|
|
25
|
+
from .worker import Worker
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class Dependency(abc.ABC):
|
|
29
|
+
single: bool = False
|
|
30
|
+
|
|
31
|
+
docket: ContextVar[Docket] = ContextVar("docket")
|
|
32
|
+
worker: ContextVar["Worker"] = ContextVar("worker")
|
|
33
|
+
execution: ContextVar[Execution] = ContextVar("execution")
|
|
34
|
+
|
|
35
|
+
@abc.abstractmethod
|
|
36
|
+
async def __aenter__(self) -> Any: ... # pragma: no cover
|
|
37
|
+
|
|
38
|
+
async def __aexit__(
|
|
39
|
+
self,
|
|
40
|
+
exc_type: type[BaseException] | None,
|
|
41
|
+
exc_value: BaseException | None,
|
|
42
|
+
traceback: TracebackType | None,
|
|
43
|
+
) -> bool: ... # pragma: no cover
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class _CurrentWorker(Dependency):
|
|
47
|
+
async def __aenter__(self) -> "Worker":
|
|
48
|
+
return self.worker.get()
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def CurrentWorker() -> "Worker":
|
|
52
|
+
return cast("Worker", _CurrentWorker())
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class _CurrentDocket(Dependency):
|
|
56
|
+
async def __aenter__(self) -> Docket:
|
|
57
|
+
return self.docket.get()
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def CurrentDocket() -> Docket:
|
|
61
|
+
return cast(Docket, _CurrentDocket())
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
class _CurrentExecution(Dependency):
|
|
65
|
+
async def __aenter__(self) -> Execution:
|
|
66
|
+
return self.execution.get()
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def CurrentExecution() -> Execution:
|
|
70
|
+
return cast(Execution, _CurrentExecution())
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
class _TaskKey(Dependency):
|
|
74
|
+
async def __aenter__(self) -> str:
|
|
75
|
+
return self.execution.get().key
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def TaskKey() -> str:
|
|
79
|
+
return cast(str, _TaskKey())
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
class _TaskLogger(Dependency):
|
|
83
|
+
async def __aenter__(self) -> logging.LoggerAdapter[logging.Logger]:
|
|
84
|
+
execution = self.execution.get()
|
|
85
|
+
logger = logging.getLogger(f"docket.task.{execution.function.__name__}")
|
|
86
|
+
return logging.LoggerAdapter(
|
|
87
|
+
logger,
|
|
88
|
+
{
|
|
89
|
+
**self.docket.get().labels(),
|
|
90
|
+
**self.worker.get().labels(),
|
|
91
|
+
**execution.specific_labels(),
|
|
92
|
+
},
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def TaskLogger() -> logging.LoggerAdapter[logging.Logger]:
|
|
97
|
+
return cast(logging.LoggerAdapter[logging.Logger], _TaskLogger())
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
class Retry(Dependency):
|
|
101
|
+
single: bool = True
|
|
102
|
+
|
|
103
|
+
def __init__(
|
|
104
|
+
self, attempts: int | None = 1, delay: timedelta = timedelta(0)
|
|
105
|
+
) -> None:
|
|
106
|
+
self.attempts = attempts
|
|
107
|
+
self.delay = delay
|
|
108
|
+
self.attempt = 1
|
|
109
|
+
|
|
110
|
+
async def __aenter__(self) -> "Retry":
|
|
111
|
+
execution = self.execution.get()
|
|
112
|
+
retry = Retry(attempts=self.attempts, delay=self.delay)
|
|
113
|
+
retry.attempt = execution.attempt
|
|
114
|
+
return retry
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
class ExponentialRetry(Retry):
|
|
118
|
+
attempts: int
|
|
119
|
+
|
|
120
|
+
def __init__(
|
|
121
|
+
self,
|
|
122
|
+
attempts: int = 1,
|
|
123
|
+
minimum_delay: timedelta = timedelta(seconds=1),
|
|
124
|
+
maximum_delay: timedelta = timedelta(seconds=64),
|
|
125
|
+
) -> None:
|
|
126
|
+
super().__init__(attempts=attempts, delay=minimum_delay)
|
|
127
|
+
self.minimum_delay = minimum_delay
|
|
128
|
+
self.maximum_delay = maximum_delay
|
|
129
|
+
|
|
130
|
+
async def __aenter__(self) -> "ExponentialRetry":
|
|
131
|
+
execution = self.execution.get()
|
|
132
|
+
|
|
133
|
+
retry = ExponentialRetry(
|
|
134
|
+
attempts=self.attempts,
|
|
135
|
+
minimum_delay=self.minimum_delay,
|
|
136
|
+
maximum_delay=self.maximum_delay,
|
|
137
|
+
)
|
|
138
|
+
retry.attempt = execution.attempt
|
|
139
|
+
|
|
140
|
+
if execution.attempt > 1:
|
|
141
|
+
backoff_factor = 2 ** (execution.attempt - 1)
|
|
142
|
+
calculated_delay = self.minimum_delay * backoff_factor
|
|
143
|
+
|
|
144
|
+
if calculated_delay > self.maximum_delay:
|
|
145
|
+
retry.delay = self.maximum_delay
|
|
146
|
+
else:
|
|
147
|
+
retry.delay = calculated_delay
|
|
148
|
+
|
|
149
|
+
return retry
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
class Perpetual(Dependency):
|
|
153
|
+
single = True
|
|
154
|
+
|
|
155
|
+
every: timedelta
|
|
156
|
+
automatic: bool
|
|
157
|
+
|
|
158
|
+
args: tuple[Any, ...]
|
|
159
|
+
kwargs: dict[str, Any]
|
|
160
|
+
|
|
161
|
+
cancelled: bool
|
|
162
|
+
|
|
163
|
+
def __init__(
|
|
164
|
+
self,
|
|
165
|
+
every: timedelta = timedelta(0),
|
|
166
|
+
automatic: bool = False,
|
|
167
|
+
) -> None:
|
|
168
|
+
"""Declare a task that should be run perpetually.
|
|
169
|
+
|
|
170
|
+
Args:
|
|
171
|
+
every: The target interval between task executions.
|
|
172
|
+
automatic: If set, this task will be automatically scheduled during worker
|
|
173
|
+
startup and continually through the worker's lifespan. This ensures
|
|
174
|
+
that the task will always be scheduled despite crashes and other
|
|
175
|
+
adverse conditions. Automatic tasks must not require any arguments.
|
|
176
|
+
"""
|
|
177
|
+
self.every = every
|
|
178
|
+
self.automatic = automatic
|
|
179
|
+
self.cancelled = False
|
|
180
|
+
|
|
181
|
+
async def __aenter__(self) -> "Perpetual":
|
|
182
|
+
execution = self.execution.get()
|
|
183
|
+
perpetual = Perpetual(every=self.every)
|
|
184
|
+
perpetual.args = execution.args
|
|
185
|
+
perpetual.kwargs = execution.kwargs
|
|
186
|
+
return perpetual
|
|
187
|
+
|
|
188
|
+
def cancel(self) -> None:
|
|
189
|
+
self.cancelled = True
|
|
190
|
+
|
|
191
|
+
def perpetuate(self, *args: Any, **kwargs: Any) -> None:
|
|
192
|
+
self.args = args
|
|
193
|
+
self.kwargs = kwargs
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
class Timeout(Dependency):
|
|
197
|
+
single = True
|
|
198
|
+
|
|
199
|
+
base: timedelta
|
|
200
|
+
|
|
201
|
+
_deadline: float
|
|
202
|
+
|
|
203
|
+
def __init__(self, base: timedelta) -> None:
|
|
204
|
+
self.base = base
|
|
205
|
+
|
|
206
|
+
async def __aenter__(self) -> "Timeout":
|
|
207
|
+
timeout = Timeout(base=self.base)
|
|
208
|
+
timeout.start()
|
|
209
|
+
return timeout
|
|
210
|
+
|
|
211
|
+
def start(self) -> None:
|
|
212
|
+
self._deadline = time.monotonic() + self.base.total_seconds()
|
|
213
|
+
|
|
214
|
+
def expired(self) -> bool:
|
|
215
|
+
return time.monotonic() >= self._deadline
|
|
216
|
+
|
|
217
|
+
def remaining(self) -> timedelta:
|
|
218
|
+
return timedelta(seconds=self._deadline - time.monotonic())
|
|
219
|
+
|
|
220
|
+
def extend(self, by: timedelta | None = None) -> None:
|
|
221
|
+
if by is None:
|
|
222
|
+
by = self.base
|
|
223
|
+
self._deadline += by.total_seconds()
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
R = TypeVar("R")
|
|
227
|
+
|
|
228
|
+
DependencyFunction = Callable[..., Awaitable[R] | AsyncContextManager[R]]
|
|
229
|
+
|
|
230
|
+
|
|
231
|
+
_parameter_cache: dict[
|
|
232
|
+
TaskFunction | DependencyFunction[Any],
|
|
233
|
+
dict[str, Dependency],
|
|
234
|
+
] = {}
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
def get_dependency_parameters(
|
|
238
|
+
function: TaskFunction | DependencyFunction[Any],
|
|
239
|
+
) -> dict[str, Dependency]:
|
|
240
|
+
if function in _parameter_cache:
|
|
241
|
+
return _parameter_cache[function]
|
|
242
|
+
|
|
243
|
+
dependencies: dict[str, Dependency] = {}
|
|
244
|
+
|
|
245
|
+
signature = get_signature(function)
|
|
246
|
+
|
|
247
|
+
for parameter, param in signature.parameters.items():
|
|
248
|
+
if not isinstance(param.default, Dependency):
|
|
249
|
+
continue
|
|
250
|
+
|
|
251
|
+
dependencies[parameter] = param.default
|
|
252
|
+
|
|
253
|
+
_parameter_cache[function] = dependencies
|
|
254
|
+
return dependencies
|
|
255
|
+
|
|
256
|
+
|
|
257
|
+
class _Depends(Dependency, Generic[R]):
|
|
258
|
+
dependency: DependencyFunction[R]
|
|
259
|
+
|
|
260
|
+
cache: ContextVar[dict[DependencyFunction[Any], Any]] = ContextVar("cache")
|
|
261
|
+
stack: ContextVar[AsyncExitStack] = ContextVar("stack")
|
|
262
|
+
|
|
263
|
+
def __init__(
|
|
264
|
+
self, dependency: Callable[[], Awaitable[R] | AsyncContextManager[R]]
|
|
265
|
+
) -> None:
|
|
266
|
+
self.dependency = dependency
|
|
267
|
+
|
|
268
|
+
async def _resolve_parameters(
|
|
269
|
+
self,
|
|
270
|
+
function: TaskFunction | DependencyFunction[Any],
|
|
271
|
+
) -> dict[str, Any]:
|
|
272
|
+
stack = self.stack.get()
|
|
273
|
+
|
|
274
|
+
arguments: dict[str, Any] = {}
|
|
275
|
+
parameters = get_dependency_parameters(function)
|
|
276
|
+
|
|
277
|
+
for parameter, dependency in parameters.items():
|
|
278
|
+
arguments[parameter] = await stack.enter_async_context(dependency)
|
|
279
|
+
|
|
280
|
+
return arguments
|
|
281
|
+
|
|
282
|
+
async def __aenter__(self) -> R:
|
|
283
|
+
cache = self.cache.get()
|
|
284
|
+
|
|
285
|
+
if self.dependency in cache:
|
|
286
|
+
return cache[self.dependency]
|
|
287
|
+
|
|
288
|
+
stack = self.stack.get()
|
|
289
|
+
arguments = await self._resolve_parameters(self.dependency)
|
|
290
|
+
|
|
291
|
+
value = self.dependency(**arguments)
|
|
292
|
+
|
|
293
|
+
if isinstance(value, AsyncContextManager):
|
|
294
|
+
value = await stack.enter_async_context(value)
|
|
295
|
+
else:
|
|
296
|
+
value = await value
|
|
297
|
+
|
|
298
|
+
cache[self.dependency] = value
|
|
299
|
+
return value
|
|
300
|
+
|
|
301
|
+
|
|
302
|
+
def Depends(dependency: DependencyFunction[R]) -> R:
|
|
303
|
+
return cast(R, _Depends(dependency))
|
|
304
|
+
|
|
305
|
+
|
|
306
|
+
D = TypeVar("D", bound=Dependency)
|
|
307
|
+
|
|
308
|
+
|
|
309
|
+
def get_single_dependency_parameter_of_type(
|
|
310
|
+
function: TaskFunction, dependency_type: type[D]
|
|
311
|
+
) -> D | None:
|
|
312
|
+
assert dependency_type.single, "Dependency must be single"
|
|
313
|
+
for _, dependency in get_dependency_parameters(function).items():
|
|
314
|
+
if isinstance(dependency, dependency_type):
|
|
315
|
+
return dependency
|
|
316
|
+
return None
|
|
317
|
+
|
|
318
|
+
|
|
319
|
+
def get_single_dependency_of_type(
|
|
320
|
+
dependencies: dict[str, Dependency], dependency_type: type[D]
|
|
321
|
+
) -> D | None:
|
|
322
|
+
assert dependency_type.single, "Dependency must be single"
|
|
323
|
+
for _, dependency in dependencies.items():
|
|
324
|
+
if isinstance(dependency, dependency_type):
|
|
325
|
+
return dependency
|
|
326
|
+
return None
|
|
327
|
+
|
|
328
|
+
|
|
329
|
+
def validate_dependencies(function: TaskFunction) -> None:
|
|
330
|
+
parameters = get_dependency_parameters(function)
|
|
331
|
+
|
|
332
|
+
counts = Counter(type(dependency) for dependency in parameters.values())
|
|
333
|
+
|
|
334
|
+
for dependency_type, count in counts.items():
|
|
335
|
+
if dependency_type.single and count > 1:
|
|
336
|
+
raise ValueError(
|
|
337
|
+
f"Only one {dependency_type.__name__} dependency is allowed per task"
|
|
338
|
+
)
|
|
339
|
+
|
|
340
|
+
|
|
341
|
+
@asynccontextmanager
|
|
342
|
+
async def resolved_dependencies(
|
|
343
|
+
worker: "Worker", execution: Execution
|
|
344
|
+
) -> AsyncGenerator[dict[str, Any], None]:
|
|
345
|
+
# Set context variables once at the beginning
|
|
346
|
+
Dependency.docket.set(worker.docket)
|
|
347
|
+
Dependency.worker.set(worker)
|
|
348
|
+
Dependency.execution.set(execution)
|
|
349
|
+
|
|
350
|
+
_Depends.cache.set({})
|
|
351
|
+
|
|
352
|
+
async with AsyncExitStack() as stack:
|
|
353
|
+
_Depends.stack.set(stack)
|
|
354
|
+
|
|
355
|
+
arguments: dict[str, Any] = {}
|
|
356
|
+
|
|
357
|
+
parameters = get_dependency_parameters(execution.function)
|
|
358
|
+
for parameter, dependency in parameters.items():
|
|
359
|
+
kwargs = execution.kwargs
|
|
360
|
+
if parameter in kwargs:
|
|
361
|
+
arguments[parameter] = kwargs[parameter]
|
|
362
|
+
continue
|
|
363
|
+
|
|
364
|
+
arguments[parameter] = await stack.enter_async_context(dependency)
|
|
365
|
+
|
|
366
|
+
yield arguments
|