python-durable 0.1.0__tar.gz → 0.1.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {python_durable-0.1.0 → python_durable-0.1.1}/PKG-INFO +5 -1
- python_durable-0.1.1/examples/approval.py +95 -0
- python_durable-0.1.1/examples/redis_example.py +74 -0
- {python_durable-0.1.0 → python_durable-0.1.1}/pyproject.toml +6 -1
- {python_durable-0.1.0 → python_durable-0.1.1}/src/durable/__init__.py +2 -0
- python_durable-0.1.1/src/durable/redis_store.py +116 -0
- {python_durable-0.1.0 → python_durable-0.1.1}/src/durable/store.py +49 -0
- {python_durable-0.1.0 → python_durable-0.1.1}/src/durable/workflow.py +48 -0
- python_durable-0.1.1/tests/test_redis_store.py +171 -0
- python_durable-0.1.1/tests/test_signals.py +243 -0
- {python_durable-0.1.0 → python_durable-0.1.1}/.github/workflows/publish.yml +0 -0
- {python_durable-0.1.0 → python_durable-0.1.1}/.gitignore +0 -0
- {python_durable-0.1.0 → python_durable-0.1.1}/LICENSE +0 -0
- {python_durable-0.1.0 → python_durable-0.1.1}/README.md +0 -0
- {python_durable-0.1.0 → python_durable-0.1.1}/examples/examples.py +0 -0
- {python_durable-0.1.0 → python_durable-0.1.1}/examples/in_memory_example.py +0 -0
- {python_durable-0.1.0 → python_durable-0.1.1}/examples/pydantic_ai_example.py +0 -0
- {python_durable-0.1.0 → python_durable-0.1.1}/src/durable/backoff.py +0 -0
- {python_durable-0.1.0 → python_durable-0.1.1}/src/durable/context.py +0 -0
- {python_durable-0.1.0 → python_durable-0.1.1}/tests/__init__.py +0 -0
- {python_durable-0.1.0 → python_durable-0.1.1}/tests/test_durable.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: python-durable
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.1
|
|
4
4
|
Summary: Lightweight workflow durability for Python — make any async workflow resumable after crashes with just a decorator.
|
|
5
5
|
Project-URL: Repository, https://github.com/WillemDeGroef/python-durable
|
|
6
6
|
Author: Willem
|
|
@@ -17,13 +17,17 @@ Classifier: Typing :: Typed
|
|
|
17
17
|
Requires-Python: >=3.12
|
|
18
18
|
Requires-Dist: aiosqlite>=0.20
|
|
19
19
|
Provides-Extra: dev
|
|
20
|
+
Requires-Dist: fakeredis>=2.26; extra == 'dev'
|
|
20
21
|
Requires-Dist: pytest-asyncio>=0.24; extra == 'dev'
|
|
21
22
|
Requires-Dist: pytest>=8.0; extra == 'dev'
|
|
23
|
+
Requires-Dist: redis>=5.0; extra == 'dev'
|
|
22
24
|
Requires-Dist: ruff>=0.9; extra == 'dev'
|
|
23
25
|
Requires-Dist: ty>=0.0.1a7; extra == 'dev'
|
|
24
26
|
Provides-Extra: examples
|
|
25
27
|
Requires-Dist: pydantic-ai>=0.1; extra == 'examples'
|
|
26
28
|
Requires-Dist: pydantic>=2.0; extra == 'examples'
|
|
29
|
+
Provides-Extra: redis
|
|
30
|
+
Requires-Dist: redis>=5.0; extra == 'redis'
|
|
27
31
|
Description-Content-Type: text/markdown
|
|
28
32
|
|
|
29
33
|
# durable
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
"""
|
|
2
|
+
approval.py — Human-in-the-loop approval workflow using durable signals.
|
|
3
|
+
|
|
4
|
+
Demonstrates how a workflow can durably wait for external input (e.g. a
|
|
5
|
+
human approval via a web endpoint), survive crashes, and resume cleanly.
|
|
6
|
+
|
|
7
|
+
Run with:
|
|
8
|
+
uvicorn examples.approval:app --reload
|
|
9
|
+
|
|
10
|
+
Then:
|
|
11
|
+
1. POST /orders/ord-100 → starts the workflow, blocks at approval
|
|
12
|
+
2. POST /approve/process-order-ord-100/manager-approval
|
|
13
|
+
body: {"approved": true, "approver": "alice@example.com"}
|
|
14
|
+
→ delivers the signal, workflow continues
|
|
15
|
+
3. POST /orders/ord-100 → re-run: everything replays from cache
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
import asyncio
|
|
19
|
+
import logging
|
|
20
|
+
|
|
21
|
+
from fastapi import FastAPI
|
|
22
|
+
|
|
23
|
+
from durable import Workflow
|
|
24
|
+
|
|
25
|
+
logging.basicConfig(level=logging.INFO)
|
|
26
|
+
|
|
27
|
+
app = FastAPI(title="Durable Approval Example")
|
|
28
|
+
wf = Workflow("approvals", db="sqlite:///approval.db")
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
# ---------------------------------------------------------------------------
|
|
32
|
+
# Tasks
|
|
33
|
+
# ---------------------------------------------------------------------------
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
@wf.task
|
|
37
|
+
async def validate_order(order_id: str) -> dict:
|
|
38
|
+
print(f" [validate_order] validating {order_id}...")
|
|
39
|
+
await asyncio.sleep(0.1)
|
|
40
|
+
return {"order_id": order_id, "total": 12_500.00, "items": 3}
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
@wf.task
|
|
44
|
+
async def fulfill_order(order: dict, approval: dict) -> dict:
|
|
45
|
+
print(
|
|
46
|
+
f" [fulfill_order] fulfilling {order['order_id']}, approved by {approval.get('approver')}"
|
|
47
|
+
)
|
|
48
|
+
await asyncio.sleep(0.1)
|
|
49
|
+
return {"status": "fulfilled", "order_id": order["order_id"]}
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
# ---------------------------------------------------------------------------
|
|
53
|
+
# Workflow — blocks at wf.signal() until a human approves
|
|
54
|
+
# ---------------------------------------------------------------------------
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
@wf.workflow(id="process-order-{order_id}")
|
|
58
|
+
async def process_order(order_id: str) -> dict:
|
|
59
|
+
order = await validate_order(order_id)
|
|
60
|
+
print(" [process_order] order validated, waiting for manager approval...")
|
|
61
|
+
|
|
62
|
+
# This durably blocks until someone calls wf.complete()
|
|
63
|
+
approval = await wf.signal("manager-approval")
|
|
64
|
+
|
|
65
|
+
if not approval.get("approved"):
|
|
66
|
+
return {"status": "rejected", "order_id": order_id}
|
|
67
|
+
|
|
68
|
+
return await fulfill_order(order, approval)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
# ---------------------------------------------------------------------------
|
|
72
|
+
# HTTP endpoints
|
|
73
|
+
# ---------------------------------------------------------------------------
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
@app.post("/orders/{order_id}")
|
|
77
|
+
async def start_order(order_id: str):
|
|
78
|
+
"""Start (or re-run) the order workflow. Blocks until approval signal arrives."""
|
|
79
|
+
result = await process_order(order_id=order_id)
|
|
80
|
+
return result
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
@app.post("/approve/{run_id}/{signal_name}")
|
|
84
|
+
async def approve(run_id: str, signal_name: str, payload: dict):
|
|
85
|
+
"""
|
|
86
|
+
Deliver a signal to a waiting workflow.
|
|
87
|
+
|
|
88
|
+
Example:
|
|
89
|
+
POST /approve/process-order-ord-100/manager-approval
|
|
90
|
+
{"approved": true, "approver": "alice@example.com"}
|
|
91
|
+
"""
|
|
92
|
+
ok = await wf.complete(run_id, signal_name, payload)
|
|
93
|
+
if not ok:
|
|
94
|
+
return {"status": "already_completed", "run_id": run_id, "signal": signal_name}
|
|
95
|
+
return {"status": "delivered", "run_id": run_id, "signal": signal_name}
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
"""
|
|
2
|
+
redis_example.py — Using RedisStore for distributed, TTL-backed workflows.
|
|
3
|
+
|
|
4
|
+
Checkpoints are stored in Redis with automatic expiration (default: 24 hours).
|
|
5
|
+
This is ideal for production deployments where multiple workers share state
|
|
6
|
+
or you want old runs to expire automatically.
|
|
7
|
+
|
|
8
|
+
Prerequisites:
|
|
9
|
+
pip install python-durable[redis]
|
|
10
|
+
# or: uv sync --all-extras
|
|
11
|
+
|
|
12
|
+
# Redis must be running locally (default: localhost:6379)
|
|
13
|
+
docker run -d --name redis -p 6379:6379 redis:latest
|
|
14
|
+
|
|
15
|
+
Run with:
|
|
16
|
+
uv run python examples/redis_example.py
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
import asyncio
|
|
20
|
+
|
|
21
|
+
from durable import RedisStore, Workflow
|
|
22
|
+
from durable.backoff import constant
|
|
23
|
+
|
|
24
|
+
store = RedisStore(
|
|
25
|
+
url="redis://localhost:6379/0",
|
|
26
|
+
ttl=3600, # checkpoints expire after 1 hour
|
|
27
|
+
prefix="example",
|
|
28
|
+
)
|
|
29
|
+
wf = Workflow("redis-demo", db=store)
|
|
30
|
+
|
|
31
|
+
call_count = 0
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
@wf.task(retries=3, backoff=constant(0))
|
|
35
|
+
async def flaky_fetch(url: str) -> dict:
|
|
36
|
+
"""Fails twice, then succeeds — retries are handled automatically."""
|
|
37
|
+
global call_count
|
|
38
|
+
call_count += 1
|
|
39
|
+
if call_count < 3:
|
|
40
|
+
raise ConnectionError(f"attempt {call_count}: connection refused")
|
|
41
|
+
print(f" [flaky_fetch] succeeded on attempt {call_count}")
|
|
42
|
+
return {"url": url, "status": "ok"}
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
@wf.task
|
|
46
|
+
async def transform(data: dict) -> str:
|
|
47
|
+
print(f" [transform] processing {data['url']}")
|
|
48
|
+
return f"transformed-{data['status']}"
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
@wf.workflow(id="pipeline-{job_id}")
|
|
52
|
+
async def pipeline(job_id: str) -> str:
|
|
53
|
+
data = await flaky_fetch("https://api.example.com/data")
|
|
54
|
+
return await transform(data)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
async def main():
|
|
58
|
+
await store.setup()
|
|
59
|
+
try:
|
|
60
|
+
print("── First run: flaky_fetch retries, then both steps execute ──")
|
|
61
|
+
result = await pipeline(job_id="job-1")
|
|
62
|
+
print(f" result: {result}")
|
|
63
|
+
|
|
64
|
+
print("\n── Second run (same id): both steps replayed from Redis ──")
|
|
65
|
+
result = await pipeline(job_id="job-1")
|
|
66
|
+
print(f" result: {result}")
|
|
67
|
+
|
|
68
|
+
print("\n✓ Done. Checkpoints stored in Redis (TTL: 1 hour).")
|
|
69
|
+
finally:
|
|
70
|
+
await store.close()
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
if __name__ == "__main__":
|
|
74
|
+
asyncio.run(main())
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "python-durable"
|
|
3
|
-
version = "0.1.
|
|
3
|
+
version = "0.1.1"
|
|
4
4
|
description = "Lightweight workflow durability for Python — make any async workflow resumable after crashes with just a decorator."
|
|
5
5
|
readme = "README.md"
|
|
6
6
|
license = "MIT"
|
|
@@ -24,11 +24,16 @@ dependencies = [
|
|
|
24
24
|
Repository = "https://github.com/WillemDeGroef/python-durable"
|
|
25
25
|
|
|
26
26
|
[project.optional-dependencies]
|
|
27
|
+
redis = [
|
|
28
|
+
"redis>=5.0",
|
|
29
|
+
]
|
|
27
30
|
dev = [
|
|
28
31
|
"pytest>=8.0",
|
|
29
32
|
"pytest-asyncio>=0.24",
|
|
30
33
|
"ruff>=0.9",
|
|
31
34
|
"ty>=0.0.1a7",
|
|
35
|
+
"fakeredis>=2.26",
|
|
36
|
+
"redis>=5.0",
|
|
32
37
|
]
|
|
33
38
|
examples = [
|
|
34
39
|
"pydantic>=2.0",
|
|
@@ -33,6 +33,7 @@ Quick start:
|
|
|
33
33
|
|
|
34
34
|
from .backoff import BackoffStrategy, constant, exponential, linear
|
|
35
35
|
from .context import RunContext
|
|
36
|
+
from .redis_store import RedisStore
|
|
36
37
|
from .store import InMemoryStore, SQLiteStore, Store
|
|
37
38
|
from .workflow import Workflow
|
|
38
39
|
|
|
@@ -41,6 +42,7 @@ __all__ = [
|
|
|
41
42
|
"Store",
|
|
42
43
|
"SQLiteStore",
|
|
43
44
|
"InMemoryStore",
|
|
45
|
+
"RedisStore",
|
|
44
46
|
"RunContext",
|
|
45
47
|
"BackoffStrategy",
|
|
46
48
|
"exponential",
|
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Redis-backed store with automatic key expiration.
|
|
3
|
+
|
|
4
|
+
Requires the ``redis`` extra: ``pip install python-durable[redis]``.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import hashlib
|
|
10
|
+
import json
|
|
11
|
+
from typing import Any
|
|
12
|
+
|
|
13
|
+
from redis.asyncio import Redis
|
|
14
|
+
|
|
15
|
+
from .store import Store
|
|
16
|
+
|
|
17
|
+
_WRAP_KEY = "v"
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def _wrap(value: Any) -> str:
|
|
21
|
+
return json.dumps({_WRAP_KEY: value})
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def _unwrap(raw: str | bytes) -> Any:
|
|
25
|
+
return json.loads(raw)[_WRAP_KEY]
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def _step_key(prefix: str, run_id: str, step_id: str) -> str:
|
|
29
|
+
tag = hashlib.sha256(f"{run_id}:{step_id}".encode()).hexdigest()[:16]
|
|
30
|
+
return f"{prefix}:step:{tag}"
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def _run_key(prefix: str, run_id: str) -> str:
|
|
34
|
+
return f"{prefix}:run:{run_id}"
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def _sig_key(prefix: str, run_id: str, name: str) -> str:
|
|
38
|
+
tag = hashlib.sha256(f"{run_id}:{name}".encode()).hexdigest()[:16]
|
|
39
|
+
return f"{prefix}:sig:{tag}"
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class RedisStore(Store):
|
|
43
|
+
"""Async Redis store with TTL-based auto-expiration."""
|
|
44
|
+
|
|
45
|
+
def __init__(
|
|
46
|
+
self,
|
|
47
|
+
url: str = "redis://localhost:6379/0",
|
|
48
|
+
ttl: int = 86_400,
|
|
49
|
+
prefix: str = "durable",
|
|
50
|
+
) -> None:
|
|
51
|
+
self._url = url
|
|
52
|
+
self._ttl = ttl
|
|
53
|
+
self._prefix = prefix
|
|
54
|
+
self._redis: Redis | None = None
|
|
55
|
+
|
|
56
|
+
async def setup(self) -> None:
|
|
57
|
+
if self._redis is None:
|
|
58
|
+
self._redis = Redis.from_url(self._url, decode_responses=True)
|
|
59
|
+
|
|
60
|
+
def _client(self) -> Redis:
|
|
61
|
+
if self._redis is None:
|
|
62
|
+
raise RuntimeError("RedisStore.setup() must be called before use")
|
|
63
|
+
return self._redis
|
|
64
|
+
|
|
65
|
+
async def get_step(self, run_id: str, step_id: str) -> tuple[bool, Any]:
|
|
66
|
+
raw = await self._client().get(_step_key(self._prefix, run_id, step_id))
|
|
67
|
+
if raw is None:
|
|
68
|
+
return False, None
|
|
69
|
+
return True, _unwrap(raw)
|
|
70
|
+
|
|
71
|
+
async def set_step(
|
|
72
|
+
self, run_id: str, step_id: str, result: Any, attempt: int = 1
|
|
73
|
+
) -> None:
|
|
74
|
+
payload = json.dumps({"v": result, "attempt": attempt})
|
|
75
|
+
key = _step_key(self._prefix, run_id, step_id)
|
|
76
|
+
client = self._client()
|
|
77
|
+
if self._ttl > 0:
|
|
78
|
+
await client.setex(key, self._ttl, payload)
|
|
79
|
+
else:
|
|
80
|
+
await client.set(key, payload)
|
|
81
|
+
|
|
82
|
+
async def mark_run_done(self, run_id: str) -> None:
|
|
83
|
+
key = _run_key(self._prefix, run_id)
|
|
84
|
+
client = self._client()
|
|
85
|
+
if self._ttl > 0:
|
|
86
|
+
await client.setex(key, self._ttl, "done")
|
|
87
|
+
else:
|
|
88
|
+
await client.set(key, "done")
|
|
89
|
+
|
|
90
|
+
async def mark_run_failed(self, run_id: str, error: str) -> None:
|
|
91
|
+
key = _run_key(self._prefix, run_id)
|
|
92
|
+
payload = json.dumps({"status": "failed", "error": error})
|
|
93
|
+
client = self._client()
|
|
94
|
+
if self._ttl > 0:
|
|
95
|
+
await client.setex(key, self._ttl, payload)
|
|
96
|
+
else:
|
|
97
|
+
await client.set(key, payload)
|
|
98
|
+
|
|
99
|
+
async def get_signal(self, run_id: str, name: str) -> tuple[bool, Any]:
|
|
100
|
+
raw = await self._client().get(_sig_key(self._prefix, run_id, name))
|
|
101
|
+
if raw is None:
|
|
102
|
+
return False, None
|
|
103
|
+
return True, json.loads(raw)
|
|
104
|
+
|
|
105
|
+
async def set_signal(self, run_id: str, name: str, payload: Any) -> bool:
|
|
106
|
+
key = _sig_key(self._prefix, run_id, name)
|
|
107
|
+
client = self._client()
|
|
108
|
+
created = await client.set(key, json.dumps(payload), nx=True)
|
|
109
|
+
if created and self._ttl > 0:
|
|
110
|
+
await client.expire(key, self._ttl)
|
|
111
|
+
return bool(created)
|
|
112
|
+
|
|
113
|
+
async def close(self) -> None:
|
|
114
|
+
if self._redis is not None:
|
|
115
|
+
await self._redis.aclose()
|
|
116
|
+
self._redis = None
|
|
@@ -55,6 +55,14 @@ class Store(ABC):
|
|
|
55
55
|
async def mark_run_failed(self, run_id: str, error: str) -> None:
|
|
56
56
|
"""Mark the run as failed with an error message."""
|
|
57
57
|
|
|
58
|
+
@abstractmethod
|
|
59
|
+
async def get_signal(self, run_id: str, name: str) -> tuple[bool, Any]:
|
|
60
|
+
"""Return (found, payload). found=False means the signal hasn't been delivered yet."""
|
|
61
|
+
|
|
62
|
+
@abstractmethod
|
|
63
|
+
async def set_signal(self, run_id: str, name: str, payload: Any) -> bool:
|
|
64
|
+
"""Write payload. Returns False if already set (idempotent, first-write-wins)."""
|
|
65
|
+
|
|
58
66
|
|
|
59
67
|
_SENTINEL = object()
|
|
60
68
|
|
|
@@ -70,6 +78,7 @@ class InMemoryStore(Store):
|
|
|
70
78
|
def __init__(self) -> None:
|
|
71
79
|
self._steps: dict[tuple[str, str], Any] = {}
|
|
72
80
|
self._runs: dict[str, str] = {}
|
|
81
|
+
self._signals: dict[tuple[str, str], Any] = {}
|
|
73
82
|
|
|
74
83
|
async def setup(self) -> None:
|
|
75
84
|
pass
|
|
@@ -91,6 +100,19 @@ class InMemoryStore(Store):
|
|
|
91
100
|
async def mark_run_failed(self, run_id: str, error: str) -> None:
|
|
92
101
|
self._runs[run_id] = "failed"
|
|
93
102
|
|
|
103
|
+
async def get_signal(self, run_id: str, name: str) -> tuple[bool, Any]:
|
|
104
|
+
key = (run_id, name)
|
|
105
|
+
if key in self._signals:
|
|
106
|
+
return True, self._signals[key]
|
|
107
|
+
return False, None
|
|
108
|
+
|
|
109
|
+
async def set_signal(self, run_id: str, name: str, payload: Any) -> bool:
|
|
110
|
+
key = (run_id, name)
|
|
111
|
+
if key in self._signals:
|
|
112
|
+
return False
|
|
113
|
+
self._signals[key] = payload
|
|
114
|
+
return True
|
|
115
|
+
|
|
94
116
|
|
|
95
117
|
class SQLiteStore(Store):
|
|
96
118
|
"""
|
|
@@ -126,6 +148,13 @@ class SQLiteStore(Store):
|
|
|
126
148
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
127
149
|
PRIMARY KEY (run_id, step_id)
|
|
128
150
|
);
|
|
151
|
+
|
|
152
|
+
CREATE TABLE IF NOT EXISTS signals (
|
|
153
|
+
run_id TEXT NOT NULL,
|
|
154
|
+
name TEXT NOT NULL,
|
|
155
|
+
payload TEXT NOT NULL,
|
|
156
|
+
PRIMARY KEY (run_id, name)
|
|
157
|
+
);
|
|
129
158
|
""")
|
|
130
159
|
await db.commit()
|
|
131
160
|
self._ready = True
|
|
@@ -189,6 +218,26 @@ class SQLiteStore(Store):
|
|
|
189
218
|
)
|
|
190
219
|
await db.commit()
|
|
191
220
|
|
|
221
|
+
async def get_signal(self, run_id: str, name: str) -> tuple[bool, Any]:
|
|
222
|
+
async with aiosqlite.connect(self.path) as db:
|
|
223
|
+
async with db.execute(
|
|
224
|
+
"SELECT payload FROM signals WHERE run_id = ? AND name = ?",
|
|
225
|
+
(run_id, name),
|
|
226
|
+
) as cursor:
|
|
227
|
+
row = await cursor.fetchone()
|
|
228
|
+
if row is None:
|
|
229
|
+
return False, None
|
|
230
|
+
return True, json.loads(row[0])
|
|
231
|
+
|
|
232
|
+
async def set_signal(self, run_id: str, name: str, payload: Any) -> bool:
|
|
233
|
+
async with aiosqlite.connect(self.path) as db:
|
|
234
|
+
cursor = await db.execute(
|
|
235
|
+
"INSERT OR IGNORE INTO signals (run_id, name, payload) VALUES (?, ?, ?)",
|
|
236
|
+
(run_id, name, json.dumps(payload)),
|
|
237
|
+
)
|
|
238
|
+
await db.commit()
|
|
239
|
+
return cursor.rowcount > 0
|
|
240
|
+
|
|
192
241
|
async def ensure_run(self, run_id: str, workflow_id: str) -> None:
|
|
193
242
|
async with aiosqlite.connect(self.path) as db:
|
|
194
243
|
await db.execute(
|
|
@@ -164,6 +164,8 @@ class Workflow:
|
|
|
164
164
|
self._default_backoff = default_backoff
|
|
165
165
|
self._store = self._build_store(db)
|
|
166
166
|
self._initialized = False
|
|
167
|
+
self._sig_events: dict[str, asyncio.Event] = {}
|
|
168
|
+
self._sig_data: dict[str, Any] = {}
|
|
167
169
|
|
|
168
170
|
# ------------------------------------------------------------------
|
|
169
171
|
# @wf.task — can be used bare or with arguments
|
|
@@ -320,6 +322,52 @@ class Workflow:
|
|
|
320
322
|
return decorator(fn)
|
|
321
323
|
return decorator
|
|
322
324
|
|
|
325
|
+
# ------------------------------------------------------------------
|
|
326
|
+
# Signals — durable wait for external input
|
|
327
|
+
# ------------------------------------------------------------------
|
|
328
|
+
|
|
329
|
+
@property
|
|
330
|
+
def _current_run_id(self) -> str:
|
|
331
|
+
ctx = _active_run.get()
|
|
332
|
+
if ctx is None:
|
|
333
|
+
raise RuntimeError("signal() must be called inside an active workflow")
|
|
334
|
+
return ctx.run_id
|
|
335
|
+
|
|
336
|
+
async def signal(self, name: str, *, poll: float = 2.0) -> Any:
|
|
337
|
+
"""Durably wait for an external signal inside a workflow."""
|
|
338
|
+
run_id = self._current_run_id
|
|
339
|
+
sk = f"{run_id}:{name}"
|
|
340
|
+
|
|
341
|
+
# Replay: already delivered?
|
|
342
|
+
found, payload = await self._store.get_signal(run_id, name)
|
|
343
|
+
if found:
|
|
344
|
+
return payload
|
|
345
|
+
|
|
346
|
+
# Wait: in-process event + store poll fallback
|
|
347
|
+
event = self._sig_events[sk] = asyncio.Event()
|
|
348
|
+
try:
|
|
349
|
+
while True:
|
|
350
|
+
try:
|
|
351
|
+
await asyncio.wait_for(event.wait(), timeout=poll)
|
|
352
|
+
return self._sig_data.pop(sk)
|
|
353
|
+
except asyncio.TimeoutError:
|
|
354
|
+
found, payload = await self._store.get_signal(run_id, name)
|
|
355
|
+
if found:
|
|
356
|
+
return payload
|
|
357
|
+
finally:
|
|
358
|
+
self._sig_events.pop(sk, None)
|
|
359
|
+
self._sig_data.pop(sk, None)
|
|
360
|
+
|
|
361
|
+
async def complete(self, run_id: str, name: str, payload: Any = None) -> bool:
|
|
362
|
+
"""Deliver a signal from the outside world (e.g. a web handler)."""
|
|
363
|
+
ok = await self._store.set_signal(run_id, name, payload or {})
|
|
364
|
+
if ok:
|
|
365
|
+
sk = f"{run_id}:{name}"
|
|
366
|
+
self._sig_data[sk] = payload or {}
|
|
367
|
+
if sk in self._sig_events:
|
|
368
|
+
self._sig_events[sk].set()
|
|
369
|
+
return ok
|
|
370
|
+
|
|
323
371
|
# ------------------------------------------------------------------
|
|
324
372
|
# Internal helpers
|
|
325
373
|
# ------------------------------------------------------------------
|
|
@@ -0,0 +1,171 @@
|
|
|
1
|
+
import json
|
|
2
|
+
|
|
3
|
+
import pytest
|
|
4
|
+
from fakeredis.aioredis import FakeRedis
|
|
5
|
+
|
|
6
|
+
from durable import RedisStore, Workflow
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def _make_store(ttl: int = 86_400) -> RedisStore:
|
|
10
|
+
store = RedisStore(ttl=ttl)
|
|
11
|
+
store._redis = FakeRedis(decode_responses=True)
|
|
12
|
+
return store
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def _make_wf(store: RedisStore) -> Workflow:
|
|
16
|
+
return Workflow("test-redis", db=store, default_retries=0)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
async def test_get_missing_step_returns_not_found():
|
|
20
|
+
store = _make_store()
|
|
21
|
+
await store.setup()
|
|
22
|
+
found, value = await store.get_step("run-1", "step-1")
|
|
23
|
+
assert found is False
|
|
24
|
+
assert value is None
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
async def test_set_and_get_step():
|
|
28
|
+
store = _make_store()
|
|
29
|
+
await store.setup()
|
|
30
|
+
|
|
31
|
+
await store.set_step("run-1", "step-1", {"count": 42})
|
|
32
|
+
found, value = await store.get_step("run-1", "step-1")
|
|
33
|
+
assert found is True
|
|
34
|
+
assert value == {"count": 42}
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
async def test_step_returns_none_value():
|
|
38
|
+
store = _make_store()
|
|
39
|
+
await store.setup()
|
|
40
|
+
|
|
41
|
+
await store.set_step("run-1", "step-1", None)
|
|
42
|
+
found, value = await store.get_step("run-1", "step-1")
|
|
43
|
+
assert found is True
|
|
44
|
+
assert value is None
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
async def test_ttl_is_applied():
|
|
48
|
+
store = _make_store(ttl=60)
|
|
49
|
+
await store.setup()
|
|
50
|
+
|
|
51
|
+
await store.set_step("run-1", "step-1", "hello")
|
|
52
|
+
key = next(k for k in await store._client().keys("*") if "step" in k)
|
|
53
|
+
ttl = await store._client().ttl(key)
|
|
54
|
+
assert 0 < ttl <= 60
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
async def test_no_ttl_when_zero():
|
|
58
|
+
store = _make_store(ttl=0)
|
|
59
|
+
await store.setup()
|
|
60
|
+
|
|
61
|
+
await store.set_step("run-1", "step-1", "hello")
|
|
62
|
+
key = next(k for k in await store._client().keys("*") if "step" in k)
|
|
63
|
+
ttl = await store._client().ttl(key)
|
|
64
|
+
assert ttl == -1
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
async def test_mark_run_done():
|
|
68
|
+
store = _make_store()
|
|
69
|
+
await store.setup()
|
|
70
|
+
|
|
71
|
+
await store.mark_run_done("run-1")
|
|
72
|
+
key = next(k for k in await store._client().keys("*") if "run" in k)
|
|
73
|
+
assert await store._client().get(key) == "done"
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
async def test_mark_run_failed():
|
|
77
|
+
store = _make_store()
|
|
78
|
+
await store.setup()
|
|
79
|
+
|
|
80
|
+
await store.mark_run_failed("run-1", "kaboom")
|
|
81
|
+
key = next(k for k in await store._client().keys("*") if "run" in k)
|
|
82
|
+
raw = await store._client().get(key)
|
|
83
|
+
data = json.loads(raw)
|
|
84
|
+
assert data["status"] == "failed"
|
|
85
|
+
assert data["error"] == "kaboom"
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
async def test_overwrite_step():
|
|
89
|
+
store = _make_store()
|
|
90
|
+
await store.setup()
|
|
91
|
+
|
|
92
|
+
await store.set_step("run-1", "step-1", "first")
|
|
93
|
+
await store.set_step("run-1", "step-1", "second")
|
|
94
|
+
found, value = await store.get_step("run-1", "step-1")
|
|
95
|
+
assert found is True
|
|
96
|
+
assert value == "second"
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
async def test_close():
|
|
100
|
+
store = _make_store()
|
|
101
|
+
await store.setup()
|
|
102
|
+
await store.close()
|
|
103
|
+
assert store._redis is None
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
async def test_workflow_checkpoints_with_redis():
|
|
107
|
+
store = _make_store()
|
|
108
|
+
wf = _make_wf(store)
|
|
109
|
+
call_log: list[str] = []
|
|
110
|
+
|
|
111
|
+
@wf.task
|
|
112
|
+
async def step_a() -> str:
|
|
113
|
+
call_log.append("a")
|
|
114
|
+
return "result-a"
|
|
115
|
+
|
|
116
|
+
@wf.task
|
|
117
|
+
async def step_b(x: str) -> str:
|
|
118
|
+
call_log.append("b")
|
|
119
|
+
return f"{x}+b"
|
|
120
|
+
|
|
121
|
+
@wf.workflow(id="ckpt-test")
|
|
122
|
+
async def pipeline() -> str:
|
|
123
|
+
a = await step_a()
|
|
124
|
+
return await step_b(a)
|
|
125
|
+
|
|
126
|
+
result = await pipeline()
|
|
127
|
+
assert result == "result-a+b"
|
|
128
|
+
assert call_log == ["a", "b"]
|
|
129
|
+
|
|
130
|
+
call_log.clear()
|
|
131
|
+
result = await pipeline()
|
|
132
|
+
assert result == "result-a+b"
|
|
133
|
+
assert call_log == []
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
async def test_workflow_resumes_after_failure():
|
|
137
|
+
store = _make_store()
|
|
138
|
+
wf = _make_wf(store)
|
|
139
|
+
call_count = 0
|
|
140
|
+
|
|
141
|
+
@wf.task
|
|
142
|
+
async def good_step() -> str:
|
|
143
|
+
nonlocal call_count
|
|
144
|
+
call_count += 1
|
|
145
|
+
return "ok"
|
|
146
|
+
|
|
147
|
+
fail_once = True
|
|
148
|
+
|
|
149
|
+
@wf.task
|
|
150
|
+
async def flaky_step(x: str) -> str:
|
|
151
|
+
nonlocal fail_once, call_count
|
|
152
|
+
call_count += 1
|
|
153
|
+
if fail_once:
|
|
154
|
+
fail_once = False
|
|
155
|
+
raise RuntimeError("boom")
|
|
156
|
+
return f"{x}!"
|
|
157
|
+
|
|
158
|
+
@wf.workflow(id="resume-test")
|
|
159
|
+
async def pipeline() -> str:
|
|
160
|
+
a = await good_step()
|
|
161
|
+
return await flaky_step(a)
|
|
162
|
+
|
|
163
|
+
with pytest.raises(RuntimeError, match="boom"):
|
|
164
|
+
await pipeline()
|
|
165
|
+
|
|
166
|
+
assert call_count == 2
|
|
167
|
+
|
|
168
|
+
call_count = 0
|
|
169
|
+
result = await pipeline()
|
|
170
|
+
assert result == "ok!"
|
|
171
|
+
assert call_count == 1
|
|
@@ -0,0 +1,243 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Tests for durable signals (human-in-the-loop / external input).
|
|
3
|
+
|
|
4
|
+
Covers:
|
|
5
|
+
1. Basic flow: workflow waits, external completes, workflow receives payload
|
|
6
|
+
2. Replay: re-running after delivery returns cached payload instantly
|
|
7
|
+
3. Idempotent complete: second complete() returns False
|
|
8
|
+
4. Signal before wait: complete() before workflow reaches signal()
|
|
9
|
+
5. Multiple signals: workflow waits for two independent signals
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
import asyncio
|
|
13
|
+
|
|
14
|
+
from fakeredis.aioredis import FakeRedis
|
|
15
|
+
|
|
16
|
+
from durable import InMemoryStore, RedisStore, Workflow
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
# ---------------------------------------------------------------------------
|
|
20
|
+
# InMemoryStore tests
|
|
21
|
+
# ---------------------------------------------------------------------------
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def _mem_wf() -> Workflow:
|
|
25
|
+
return Workflow("test-sig", db=InMemoryStore(), default_retries=0)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
async def test_basic_signal_flow():
|
|
29
|
+
wf = _mem_wf()
|
|
30
|
+
received = []
|
|
31
|
+
|
|
32
|
+
@wf.workflow(id="basic-sig")
|
|
33
|
+
async def my_workflow() -> dict:
|
|
34
|
+
result = await wf.signal("approval")
|
|
35
|
+
received.append(result)
|
|
36
|
+
return result
|
|
37
|
+
|
|
38
|
+
async def deliver():
|
|
39
|
+
await asyncio.sleep(0.05)
|
|
40
|
+
await wf.complete("basic-sig", "approval", {"approved": True})
|
|
41
|
+
|
|
42
|
+
asyncio.create_task(deliver())
|
|
43
|
+
result = await my_workflow()
|
|
44
|
+
assert result == {"approved": True}
|
|
45
|
+
assert received == [{"approved": True}]
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
async def test_signal_replay():
|
|
49
|
+
wf = _mem_wf()
|
|
50
|
+
call_count = 0
|
|
51
|
+
|
|
52
|
+
@wf.task
|
|
53
|
+
async def before_signal() -> str:
|
|
54
|
+
nonlocal call_count
|
|
55
|
+
call_count += 1
|
|
56
|
+
return "step-done"
|
|
57
|
+
|
|
58
|
+
@wf.workflow(id="replay-sig")
|
|
59
|
+
async def my_workflow() -> dict:
|
|
60
|
+
await before_signal()
|
|
61
|
+
return await wf.signal("approval")
|
|
62
|
+
|
|
63
|
+
# Deliver signal, run workflow
|
|
64
|
+
async def deliver():
|
|
65
|
+
await asyncio.sleep(0.05)
|
|
66
|
+
await wf.complete("replay-sig", "approval", {"ok": True})
|
|
67
|
+
|
|
68
|
+
asyncio.create_task(deliver())
|
|
69
|
+
result = await my_workflow()
|
|
70
|
+
assert result == {"ok": True}
|
|
71
|
+
assert call_count == 1
|
|
72
|
+
|
|
73
|
+
# Re-run: signal is already in the store, no waiting
|
|
74
|
+
call_count = 0
|
|
75
|
+
result = await my_workflow()
|
|
76
|
+
assert result == {"ok": True}
|
|
77
|
+
assert call_count == 0 # task replayed from checkpoint
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
async def test_idempotent_complete():
|
|
81
|
+
wf = _mem_wf()
|
|
82
|
+
|
|
83
|
+
@wf.workflow(id="idem-sig")
|
|
84
|
+
async def my_workflow() -> dict:
|
|
85
|
+
return await wf.signal("approval")
|
|
86
|
+
|
|
87
|
+
# Complete before workflow runs
|
|
88
|
+
ok1 = await wf.complete("idem-sig", "approval", {"first": True})
|
|
89
|
+
ok2 = await wf.complete("idem-sig", "approval", {"second": True})
|
|
90
|
+
assert ok1 is True
|
|
91
|
+
assert ok2 is False
|
|
92
|
+
|
|
93
|
+
# Workflow gets the first payload
|
|
94
|
+
result = await my_workflow()
|
|
95
|
+
assert result == {"first": True}
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
async def test_signal_before_wait():
|
|
99
|
+
wf = _mem_wf()
|
|
100
|
+
|
|
101
|
+
@wf.workflow(id="pre-sig")
|
|
102
|
+
async def my_workflow() -> dict:
|
|
103
|
+
return await wf.signal("approval")
|
|
104
|
+
|
|
105
|
+
# Deliver before workflow starts
|
|
106
|
+
await wf.complete("pre-sig", "approval", {"early": True})
|
|
107
|
+
|
|
108
|
+
# Workflow should pick it up immediately (no waiting)
|
|
109
|
+
result = await my_workflow()
|
|
110
|
+
assert result == {"early": True}
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
async def test_multiple_signals():
|
|
114
|
+
wf = _mem_wf()
|
|
115
|
+
|
|
116
|
+
@wf.workflow(id="multi-sig")
|
|
117
|
+
async def my_workflow() -> list:
|
|
118
|
+
a = await wf.signal("step-a")
|
|
119
|
+
b = await wf.signal("step-b")
|
|
120
|
+
return [a, b]
|
|
121
|
+
|
|
122
|
+
async def deliver():
|
|
123
|
+
await asyncio.sleep(0.05)
|
|
124
|
+
await wf.complete("multi-sig", "step-a", {"val": "A"})
|
|
125
|
+
await asyncio.sleep(0.05)
|
|
126
|
+
await wf.complete("multi-sig", "step-b", {"val": "B"})
|
|
127
|
+
|
|
128
|
+
asyncio.create_task(deliver())
|
|
129
|
+
result = await my_workflow()
|
|
130
|
+
assert result == [{"val": "A"}, {"val": "B"}]
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
# ---------------------------------------------------------------------------
|
|
134
|
+
# RedisStore tests
|
|
135
|
+
# ---------------------------------------------------------------------------
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
def _redis_store(ttl: int = 86_400) -> RedisStore:
|
|
139
|
+
store = RedisStore(ttl=ttl)
|
|
140
|
+
store._redis = FakeRedis(decode_responses=True)
|
|
141
|
+
return store
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def _redis_wf(store: RedisStore) -> Workflow:
|
|
145
|
+
return Workflow("test-redis-sig", db=store, default_retries=0)
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
async def test_redis_basic_signal_flow():
|
|
149
|
+
store = _redis_store()
|
|
150
|
+
wf = _redis_wf(store)
|
|
151
|
+
|
|
152
|
+
@wf.workflow(id="redis-basic-sig")
|
|
153
|
+
async def my_workflow() -> dict:
|
|
154
|
+
return await wf.signal("approval")
|
|
155
|
+
|
|
156
|
+
async def deliver():
|
|
157
|
+
await asyncio.sleep(0.05)
|
|
158
|
+
await wf.complete("redis-basic-sig", "approval", {"approved": True})
|
|
159
|
+
|
|
160
|
+
asyncio.create_task(deliver())
|
|
161
|
+
result = await my_workflow()
|
|
162
|
+
assert result == {"approved": True}
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
async def test_redis_signal_replay():
|
|
166
|
+
store = _redis_store()
|
|
167
|
+
wf = _redis_wf(store)
|
|
168
|
+
|
|
169
|
+
@wf.workflow(id="redis-replay-sig")
|
|
170
|
+
async def my_workflow() -> dict:
|
|
171
|
+
return await wf.signal("approval")
|
|
172
|
+
|
|
173
|
+
async def deliver():
|
|
174
|
+
await asyncio.sleep(0.05)
|
|
175
|
+
await wf.complete("redis-replay-sig", "approval", {"ok": True})
|
|
176
|
+
|
|
177
|
+
asyncio.create_task(deliver())
|
|
178
|
+
result = await my_workflow()
|
|
179
|
+
assert result == {"ok": True}
|
|
180
|
+
|
|
181
|
+
# Re-run: replayed from store
|
|
182
|
+
result = await my_workflow()
|
|
183
|
+
assert result == {"ok": True}
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
async def test_redis_idempotent_complete():
|
|
187
|
+
store = _redis_store()
|
|
188
|
+
wf = _redis_wf(store)
|
|
189
|
+
|
|
190
|
+
ok1 = await wf.complete("redis-idem", "approval", {"first": True})
|
|
191
|
+
ok2 = await wf.complete("redis-idem", "approval", {"second": True})
|
|
192
|
+
assert ok1 is True
|
|
193
|
+
assert ok2 is False
|
|
194
|
+
|
|
195
|
+
# Verify first payload persisted
|
|
196
|
+
found, payload = await store.get_signal("redis-idem", "approval")
|
|
197
|
+
assert found is True
|
|
198
|
+
assert payload == {"first": True}
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
async def test_redis_signal_before_wait():
|
|
202
|
+
store = _redis_store()
|
|
203
|
+
wf = _redis_wf(store)
|
|
204
|
+
|
|
205
|
+
@wf.workflow(id="redis-pre-sig")
|
|
206
|
+
async def my_workflow() -> dict:
|
|
207
|
+
return await wf.signal("approval")
|
|
208
|
+
|
|
209
|
+
await wf.complete("redis-pre-sig", "approval", {"early": True})
|
|
210
|
+
result = await my_workflow()
|
|
211
|
+
assert result == {"early": True}
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
async def test_redis_signal_ttl():
|
|
215
|
+
store = _redis_store(ttl=120)
|
|
216
|
+
wf = _redis_wf(store)
|
|
217
|
+
|
|
218
|
+
await wf.complete("ttl-test", "approval", {"data": 1})
|
|
219
|
+
keys = [k for k in await store._client().keys("*") if "sig" in k]
|
|
220
|
+
assert len(keys) == 1
|
|
221
|
+
ttl = await store._client().ttl(keys[0])
|
|
222
|
+
assert 0 < ttl <= 120
|
|
223
|
+
|
|
224
|
+
|
|
225
|
+
async def test_redis_multiple_signals():
|
|
226
|
+
store = _redis_store()
|
|
227
|
+
wf = _redis_wf(store)
|
|
228
|
+
|
|
229
|
+
@wf.workflow(id="redis-multi-sig")
|
|
230
|
+
async def my_workflow() -> list:
|
|
231
|
+
a = await wf.signal("step-a")
|
|
232
|
+
b = await wf.signal("step-b")
|
|
233
|
+
return [a, b]
|
|
234
|
+
|
|
235
|
+
async def deliver():
|
|
236
|
+
await asyncio.sleep(0.05)
|
|
237
|
+
await wf.complete("redis-multi-sig", "step-a", {"val": "A"})
|
|
238
|
+
await asyncio.sleep(0.05)
|
|
239
|
+
await wf.complete("redis-multi-sig", "step-b", {"val": "B"})
|
|
240
|
+
|
|
241
|
+
asyncio.create_task(deliver())
|
|
242
|
+
result = await my_workflow()
|
|
243
|
+
assert result == [{"val": "A"}, {"val": "B"}]
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|