redflow 0.0.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- redflow-0.0.1/.gitignore +44 -0
- redflow-0.0.1/PKG-INFO +308 -0
- redflow-0.0.1/README.md +277 -0
- redflow-0.0.1/pyproject.toml +60 -0
- redflow-0.0.1/src/redflow/__init__.py +120 -0
- redflow-0.0.1/src/redflow/_json.py +58 -0
- redflow-0.0.1/src/redflow/_keys.py +83 -0
- redflow-0.0.1/src/redflow/_lua_scripts.py +105 -0
- redflow-0.0.1/src/redflow/_time.py +10 -0
- redflow-0.0.1/src/redflow/client.py +1006 -0
- redflow-0.0.1/src/redflow/errors.py +139 -0
- redflow-0.0.1/src/redflow/py.typed +1 -0
- redflow-0.0.1/src/redflow/registry.py +169 -0
- redflow-0.0.1/src/redflow/testing.py +196 -0
- redflow-0.0.1/src/redflow/types.py +165 -0
- redflow-0.0.1/src/redflow/worker.py +1454 -0
- redflow-0.0.1/tests/__init__.py +1 -0
- redflow-0.0.1/tests/test_cross_compat.py +273 -0
- redflow-0.0.1/tests/test_errors.py +70 -0
- redflow-0.0.1/tests/test_inline.py +94 -0
- redflow-0.0.1/tests/test_json.py +61 -0
- redflow-0.0.1/tests/test_keys.py +95 -0
- redflow-0.0.1/tests/test_registry.py +73 -0
- redflow-0.0.1/uv.lock +503 -0
redflow-0.0.1/.gitignore
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
# dependencies (bun install)
|
|
2
|
+
node_modules
|
|
3
|
+
|
|
4
|
+
# output
|
|
5
|
+
out
|
|
6
|
+
dist
|
|
7
|
+
*.tgz
|
|
8
|
+
|
|
9
|
+
# code coverage
|
|
10
|
+
coverage
|
|
11
|
+
*.lcov
|
|
12
|
+
|
|
13
|
+
# logs
|
|
14
|
+
logs
|
|
15
|
+
_.log
|
|
16
|
+
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
|
|
17
|
+
|
|
18
|
+
# dotenv environment variable files
|
|
19
|
+
.env
|
|
20
|
+
.env.development.local
|
|
21
|
+
.env.test.local
|
|
22
|
+
.env.production.local
|
|
23
|
+
.env.local
|
|
24
|
+
|
|
25
|
+
# caches
|
|
26
|
+
.eslintcache
|
|
27
|
+
.cache
|
|
28
|
+
*.tsbuildinfo
|
|
29
|
+
|
|
30
|
+
# python
|
|
31
|
+
__pycache__
|
|
32
|
+
*.py[cod]
|
|
33
|
+
*.egg-info
|
|
34
|
+
.venv
|
|
35
|
+
.mypy_cache
|
|
36
|
+
.pytest_cache
|
|
37
|
+
.ruff_cache
|
|
38
|
+
|
|
39
|
+
# IntelliJ based IDEs
|
|
40
|
+
.idea
|
|
41
|
+
|
|
42
|
+
# Finder (MacOS) folder config
|
|
43
|
+
.DS_Store
|
|
44
|
+
|
redflow-0.0.1/PKG-INFO
ADDED
|
@@ -0,0 +1,308 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: redflow
|
|
3
|
+
Version: 0.0.1
|
|
4
|
+
Summary: Durable workflow engine backed by Redis — Python client
|
|
5
|
+
Author: getrelocapp
|
|
6
|
+
License-Expression: Apache-2.0
|
|
7
|
+
Keywords: background-jobs,durable,queue,redis,workflow
|
|
8
|
+
Classifier: Development Status :: 4 - Beta
|
|
9
|
+
Classifier: Framework :: AsyncIO
|
|
10
|
+
Classifier: Intended Audience :: Developers
|
|
11
|
+
Classifier: License :: OSI Approved :: Apache Software License
|
|
12
|
+
Classifier: Programming Language :: Python :: 3
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
16
|
+
Classifier: Topic :: Software Development :: Libraries
|
|
17
|
+
Classifier: Typing :: Typed
|
|
18
|
+
Requires-Python: >=3.11
|
|
19
|
+
Requires-Dist: croniter>=2.0.0
|
|
20
|
+
Requires-Dist: redis>=5.0.0
|
|
21
|
+
Provides-Extra: dev
|
|
22
|
+
Requires-Dist: mypy>=1.10; extra == 'dev'
|
|
23
|
+
Requires-Dist: pytest-asyncio>=0.24; extra == 'dev'
|
|
24
|
+
Requires-Dist: pytest>=8.0; extra == 'dev'
|
|
25
|
+
Requires-Dist: ruff>=0.4; extra == 'dev'
|
|
26
|
+
Provides-Extra: fast
|
|
27
|
+
Requires-Dist: redis[hiredis]>=5.0.0; extra == 'fast'
|
|
28
|
+
Provides-Extra: pydantic
|
|
29
|
+
Requires-Dist: pydantic>=2.0.0; extra == 'pydantic'
|
|
30
|
+
Description-Content-Type: text/markdown
|
|
31
|
+
|
|
32
|
+
# redflow
|
|
33
|
+
|
|
34
|
+
Durable workflow engine backed by Redis.
|
|
35
|
+
|
|
36
|
+
## Install
|
|
37
|
+
|
|
38
|
+
```bash
|
|
39
|
+
pip install redflow
|
|
40
|
+
# or with hiredis for better performance:
|
|
41
|
+
pip install redflow[fast]
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
## Define a workflow
|
|
45
|
+
|
|
46
|
+
```python
|
|
47
|
+
from redflow import define_workflow, WorkflowHandlerContext
|
|
48
|
+
|
|
49
|
+
async def handler(ctx: WorkflowHandlerContext) -> dict:
|
|
50
|
+
user = await ctx.step.run("fetch-user", fetch_user, ctx.input["user_id"])
|
|
51
|
+
await ctx.step.run("send-email", send_welcome, user["email"])
|
|
52
|
+
return {"sent": True}
|
|
53
|
+
|
|
54
|
+
send_welcome_email = define_workflow("send-welcome-email", handler=handler)
|
|
55
|
+
```
|
|
56
|
+
|
|
57
|
+
Or with the decorator:
|
|
58
|
+
|
|
59
|
+
```python
|
|
60
|
+
from redflow import workflow, WorkflowHandlerContext
|
|
61
|
+
|
|
62
|
+
@workflow("send-welcome-email")
|
|
63
|
+
async def send_welcome_email(ctx: WorkflowHandlerContext) -> dict:
|
|
64
|
+
user = await ctx.step.run("fetch-user", fetch_user, ctx.input["user_id"])
|
|
65
|
+
await ctx.step.run("send-email", send_welcome, user["email"])
|
|
66
|
+
return {"sent": True}
|
|
67
|
+
```
|
|
68
|
+
|
|
69
|
+
Handler context gives you:
|
|
70
|
+
|
|
71
|
+
- `input` — workflow input data
|
|
72
|
+
- `run` — run metadata (`id`, `workflow`, `queue`, `attempt`, `max_attempts`)
|
|
73
|
+
- `signal` — `asyncio.Event`, set when cancellation is requested
|
|
74
|
+
- `step` — durable step API
|
|
75
|
+
|
|
76
|
+
## Step API (inside workflow handlers)
|
|
77
|
+
|
|
78
|
+
### `step.run`
|
|
79
|
+
|
|
80
|
+
Durable, cached units of work. On crash recovery, completed steps return
|
|
81
|
+
their cached result instead of re-executing.
|
|
82
|
+
|
|
83
|
+
```python
|
|
84
|
+
payment = await ctx.step.run("capture-payment", capture_payment)
|
|
85
|
+
```
|
|
86
|
+
|
|
87
|
+
With timeout:
|
|
88
|
+
|
|
89
|
+
```python
|
|
90
|
+
payment = await ctx.step.run("capture-payment", capture_payment, timeout_ms=4000)
|
|
91
|
+
```
|
|
92
|
+
|
|
93
|
+
### `step.run_workflow`
|
|
94
|
+
|
|
95
|
+
Trigger a child workflow and wait for its result.
|
|
96
|
+
|
|
97
|
+
```python
|
|
98
|
+
receipt = await ctx.step.run_workflow(
|
|
99
|
+
"send-receipt",
|
|
100
|
+
"receipt-workflow",
|
|
101
|
+
{"order_id": order_id, "email": email},
|
|
102
|
+
timeout_ms=20_000,
|
|
103
|
+
idempotency_key=f"receipt:{order_id}",
|
|
104
|
+
)
|
|
105
|
+
```
|
|
106
|
+
|
|
107
|
+
### `step.emit_workflow`
|
|
108
|
+
|
|
109
|
+
Trigger a child workflow without waiting — returns the child run ID.
|
|
110
|
+
|
|
111
|
+
```python
|
|
112
|
+
child_run_id = await ctx.step.emit_workflow(
|
|
113
|
+
"emit-analytics",
|
|
114
|
+
"analytics-workflow",
|
|
115
|
+
{"order_id": order_id},
|
|
116
|
+
idempotency_key=f"analytics:{order_id}",
|
|
117
|
+
)
|
|
118
|
+
```
|
|
119
|
+
|
|
120
|
+
## Run workflows
|
|
121
|
+
|
|
122
|
+
```python
|
|
123
|
+
from redflow import create_client
|
|
124
|
+
|
|
125
|
+
client = create_client(url="redis://localhost:6379")
|
|
126
|
+
|
|
127
|
+
handle = await client.emit_workflow(
|
|
128
|
+
"send-welcome-email",
|
|
129
|
+
{"user_id": "user_123"},
|
|
130
|
+
idempotency_key="welcome:user_123",
|
|
131
|
+
)
|
|
132
|
+
|
|
133
|
+
output = await handle.result(timeout_ms=15_000)
|
|
134
|
+
```
|
|
135
|
+
|
|
136
|
+
Delayed run:
|
|
137
|
+
|
|
138
|
+
```python
|
|
139
|
+
from datetime import datetime, timedelta
|
|
140
|
+
|
|
141
|
+
handle = await client.emit_workflow(
|
|
142
|
+
"send-welcome-email",
|
|
143
|
+
{"user_id": "user_789"},
|
|
144
|
+
run_at=datetime.now() + timedelta(minutes=1),
|
|
145
|
+
idempotency_key="welcome:user_789:delayed",
|
|
146
|
+
)
|
|
147
|
+
```
|
|
148
|
+
|
|
149
|
+
## Start a worker
|
|
150
|
+
|
|
151
|
+
```python
|
|
152
|
+
import asyncio
|
|
153
|
+
from redflow import start_worker, StartWorkerOptions
|
|
154
|
+
|
|
155
|
+
# import your workflow modules so they register
|
|
156
|
+
import workflows
|
|
157
|
+
|
|
158
|
+
async def main():
|
|
159
|
+
worker = await start_worker(StartWorkerOptions(
|
|
160
|
+
app="billing-worker",
|
|
161
|
+
url="redis://localhost:6379",
|
|
162
|
+
concurrency=4,
|
|
163
|
+
))
|
|
164
|
+
|
|
165
|
+
# graceful shutdown
|
|
166
|
+
try:
|
|
167
|
+
await asyncio.Event().wait()
|
|
168
|
+
finally:
|
|
169
|
+
await worker.stop()
|
|
170
|
+
|
|
171
|
+
asyncio.run(main())
|
|
172
|
+
```
|
|
173
|
+
|
|
174
|
+
Explicit queues and runtime tuning:
|
|
175
|
+
|
|
176
|
+
```python
|
|
177
|
+
worker = await start_worker(StartWorkerOptions(
|
|
178
|
+
app="billing-worker",
|
|
179
|
+
url="redis://localhost:6379",
|
|
180
|
+
queues=["critical", "io", "analytics"],
|
|
181
|
+
concurrency=8,
|
|
182
|
+
lease_ms=5000,
|
|
183
|
+
blmove_timeout_sec=1,
|
|
184
|
+
reaper_interval_ms=500,
|
|
185
|
+
))
|
|
186
|
+
```
|
|
187
|
+
|
|
188
|
+
## Workflow options
|
|
189
|
+
|
|
190
|
+
### max_concurrency
|
|
191
|
+
|
|
192
|
+
Limits concurrent running runs per workflow. Default is `1`.
|
|
193
|
+
|
|
194
|
+
```python
|
|
195
|
+
define_workflow(
|
|
196
|
+
"heavy-sync",
|
|
197
|
+
handler=handler,
|
|
198
|
+
queue="ops",
|
|
199
|
+
max_concurrency=1,
|
|
200
|
+
)
|
|
201
|
+
```
|
|
202
|
+
|
|
203
|
+
### Cron
|
|
204
|
+
|
|
205
|
+
```python
|
|
206
|
+
from redflow import CronTrigger
|
|
207
|
+
|
|
208
|
+
define_workflow(
|
|
209
|
+
"digest-cron",
|
|
210
|
+
handler=handler,
|
|
211
|
+
queue="ops",
|
|
212
|
+
cron=[
|
|
213
|
+
CronTrigger(id="digest-hourly", expression="0 * * * *"),
|
|
214
|
+
CronTrigger(expression="*/5 * * * *", timezone="UTC", input={"source": "cron"}),
|
|
215
|
+
],
|
|
216
|
+
)
|
|
217
|
+
```
|
|
218
|
+
|
|
219
|
+
Cron respects `max_concurrency`: if the limit is reached, that tick is skipped.
|
|
220
|
+
|
|
221
|
+
### on_failure
|
|
222
|
+
|
|
223
|
+
```python
|
|
224
|
+
from redflow import NonRetriableError, OnFailureContext
|
|
225
|
+
|
|
226
|
+
async def on_fail(ctx: OnFailureContext) -> None:
|
|
227
|
+
print(f"workflow failed: {ctx.run.id} {ctx.run.workflow} {ctx.error}")
|
|
228
|
+
|
|
229
|
+
define_workflow(
|
|
230
|
+
"invoice-sync",
|
|
231
|
+
handler=handler,
|
|
232
|
+
queue="billing",
|
|
233
|
+
max_attempts=4,
|
|
234
|
+
on_failure=on_fail,
|
|
235
|
+
)
|
|
236
|
+
```
|
|
237
|
+
|
|
238
|
+
## Client API
|
|
239
|
+
|
|
240
|
+
### Inspect and control runs
|
|
241
|
+
|
|
242
|
+
```python
|
|
243
|
+
run = await client.get_run("run_123")
|
|
244
|
+
steps = await client.get_run_steps("run_123")
|
|
245
|
+
|
|
246
|
+
recent = await client.list_runs(ListRunsParams(limit=50))
|
|
247
|
+
failed = await client.list_runs(ListRunsParams(
|
|
248
|
+
workflow="checkout",
|
|
249
|
+
status="failed",
|
|
250
|
+
limit=20,
|
|
251
|
+
))
|
|
252
|
+
|
|
253
|
+
workflows = await client.list_workflows()
|
|
254
|
+
meta = await client.get_workflow_meta("checkout")
|
|
255
|
+
stats = await client.get_stats()
|
|
256
|
+
|
|
257
|
+
canceled = await client.cancel_run("run_123", reason="requested by user")
|
|
258
|
+
```
|
|
259
|
+
|
|
260
|
+
### RunHandle
|
|
261
|
+
|
|
262
|
+
```python
|
|
263
|
+
handle = await client.emit_workflow("checkout", {"order_id": "ord_3"})
|
|
264
|
+
|
|
265
|
+
state = await handle.get_state()
|
|
266
|
+
print(state["status"])
|
|
267
|
+
|
|
268
|
+
output = await handle.result(timeout_ms=30_000)
|
|
269
|
+
```
|
|
270
|
+
|
|
271
|
+
## Testing
|
|
272
|
+
|
|
273
|
+
`run_inline` executes a workflow handler in-process without Redis — useful
|
|
274
|
+
for unit tests.
|
|
275
|
+
|
|
276
|
+
```python
|
|
277
|
+
from redflow import run_inline
|
|
278
|
+
|
|
279
|
+
result = await run_inline(my_workflow_def, input={"user_id": "test"})
|
|
280
|
+
assert result.succeeded
|
|
281
|
+
assert result.output == {"sent": True}
|
|
282
|
+
```
|
|
283
|
+
|
|
284
|
+
Override external steps:
|
|
285
|
+
|
|
286
|
+
```python
|
|
287
|
+
result = await run_inline(
|
|
288
|
+
my_workflow_def,
|
|
289
|
+
input={"user_id": "test"},
|
|
290
|
+
step_overrides={"fetch-user": {"email": "mock@test.com"}},
|
|
291
|
+
)
|
|
292
|
+
```
|
|
293
|
+
|
|
294
|
+
## Errors
|
|
295
|
+
|
|
296
|
+
```python
|
|
297
|
+
from redflow import (
|
|
298
|
+
RedflowError, # base class
|
|
299
|
+
CanceledError, # run was canceled
|
|
300
|
+
TimeoutError, # operation timed out
|
|
301
|
+
NonRetriableError, # permanent failure, no retries
|
|
302
|
+
InputValidationError, # input schema mismatch
|
|
303
|
+
UnknownWorkflowError, # workflow not registered
|
|
304
|
+
)
|
|
305
|
+
```
|
|
306
|
+
|
|
307
|
+
Throw `NonRetriableError` from a handler to fail the run immediately
|
|
308
|
+
without exhausting retry attempts.
|
redflow-0.0.1/README.md
ADDED
|
@@ -0,0 +1,277 @@
|
|
|
1
|
+
# redflow
|
|
2
|
+
|
|
3
|
+
Durable workflow engine backed by Redis.
|
|
4
|
+
|
|
5
|
+
## Install
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
pip install redflow
|
|
9
|
+
# or with hiredis for better performance:
|
|
10
|
+
pip install redflow[fast]
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
## Define a workflow
|
|
14
|
+
|
|
15
|
+
```python
|
|
16
|
+
from redflow import define_workflow, WorkflowHandlerContext
|
|
17
|
+
|
|
18
|
+
async def handler(ctx: WorkflowHandlerContext) -> dict:
|
|
19
|
+
user = await ctx.step.run("fetch-user", fetch_user, ctx.input["user_id"])
|
|
20
|
+
await ctx.step.run("send-email", send_welcome, user["email"])
|
|
21
|
+
return {"sent": True}
|
|
22
|
+
|
|
23
|
+
send_welcome_email = define_workflow("send-welcome-email", handler=handler)
|
|
24
|
+
```
|
|
25
|
+
|
|
26
|
+
Or with the decorator:
|
|
27
|
+
|
|
28
|
+
```python
|
|
29
|
+
from redflow import workflow, WorkflowHandlerContext
|
|
30
|
+
|
|
31
|
+
@workflow("send-welcome-email")
|
|
32
|
+
async def send_welcome_email(ctx: WorkflowHandlerContext) -> dict:
|
|
33
|
+
user = await ctx.step.run("fetch-user", fetch_user, ctx.input["user_id"])
|
|
34
|
+
await ctx.step.run("send-email", send_welcome, user["email"])
|
|
35
|
+
return {"sent": True}
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
Handler context gives you:
|
|
39
|
+
|
|
40
|
+
- `input` — workflow input data
|
|
41
|
+
- `run` — run metadata (`id`, `workflow`, `queue`, `attempt`, `max_attempts`)
|
|
42
|
+
- `signal` — `asyncio.Event`, set when cancellation is requested
|
|
43
|
+
- `step` — durable step API
|
|
44
|
+
|
|
45
|
+
## Step API (inside workflow handlers)
|
|
46
|
+
|
|
47
|
+
### `step.run`
|
|
48
|
+
|
|
49
|
+
Durable, cached units of work. On crash recovery, completed steps return
|
|
50
|
+
their cached result instead of re-executing.
|
|
51
|
+
|
|
52
|
+
```python
|
|
53
|
+
payment = await ctx.step.run("capture-payment", capture_payment)
|
|
54
|
+
```
|
|
55
|
+
|
|
56
|
+
With timeout:
|
|
57
|
+
|
|
58
|
+
```python
|
|
59
|
+
payment = await ctx.step.run("capture-payment", capture_payment, timeout_ms=4000)
|
|
60
|
+
```
|
|
61
|
+
|
|
62
|
+
### `step.run_workflow`
|
|
63
|
+
|
|
64
|
+
Trigger a child workflow and wait for its result.
|
|
65
|
+
|
|
66
|
+
```python
|
|
67
|
+
receipt = await ctx.step.run_workflow(
|
|
68
|
+
"send-receipt",
|
|
69
|
+
"receipt-workflow",
|
|
70
|
+
{"order_id": order_id, "email": email},
|
|
71
|
+
timeout_ms=20_000,
|
|
72
|
+
idempotency_key=f"receipt:{order_id}",
|
|
73
|
+
)
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
### `step.emit_workflow`
|
|
77
|
+
|
|
78
|
+
Trigger a child workflow without waiting — returns the child run ID.
|
|
79
|
+
|
|
80
|
+
```python
|
|
81
|
+
child_run_id = await ctx.step.emit_workflow(
|
|
82
|
+
"emit-analytics",
|
|
83
|
+
"analytics-workflow",
|
|
84
|
+
{"order_id": order_id},
|
|
85
|
+
idempotency_key=f"analytics:{order_id}",
|
|
86
|
+
)
|
|
87
|
+
```
|
|
88
|
+
|
|
89
|
+
## Run workflows
|
|
90
|
+
|
|
91
|
+
```python
|
|
92
|
+
from redflow import create_client
|
|
93
|
+
|
|
94
|
+
client = create_client(url="redis://localhost:6379")
|
|
95
|
+
|
|
96
|
+
handle = await client.emit_workflow(
|
|
97
|
+
"send-welcome-email",
|
|
98
|
+
{"user_id": "user_123"},
|
|
99
|
+
idempotency_key="welcome:user_123",
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
output = await handle.result(timeout_ms=15_000)
|
|
103
|
+
```
|
|
104
|
+
|
|
105
|
+
Delayed run:
|
|
106
|
+
|
|
107
|
+
```python
|
|
108
|
+
from datetime import datetime, timedelta
|
|
109
|
+
|
|
110
|
+
handle = await client.emit_workflow(
|
|
111
|
+
"send-welcome-email",
|
|
112
|
+
{"user_id": "user_789"},
|
|
113
|
+
run_at=datetime.now() + timedelta(minutes=1),
|
|
114
|
+
idempotency_key="welcome:user_789:delayed",
|
|
115
|
+
)
|
|
116
|
+
```
|
|
117
|
+
|
|
118
|
+
## Start a worker
|
|
119
|
+
|
|
120
|
+
```python
|
|
121
|
+
import asyncio
|
|
122
|
+
from redflow import start_worker, StartWorkerOptions
|
|
123
|
+
|
|
124
|
+
# import your workflow modules so they register
|
|
125
|
+
import workflows
|
|
126
|
+
|
|
127
|
+
async def main():
|
|
128
|
+
worker = await start_worker(StartWorkerOptions(
|
|
129
|
+
app="billing-worker",
|
|
130
|
+
url="redis://localhost:6379",
|
|
131
|
+
concurrency=4,
|
|
132
|
+
))
|
|
133
|
+
|
|
134
|
+
# graceful shutdown
|
|
135
|
+
try:
|
|
136
|
+
await asyncio.Event().wait()
|
|
137
|
+
finally:
|
|
138
|
+
await worker.stop()
|
|
139
|
+
|
|
140
|
+
asyncio.run(main())
|
|
141
|
+
```
|
|
142
|
+
|
|
143
|
+
Explicit queues and runtime tuning:
|
|
144
|
+
|
|
145
|
+
```python
|
|
146
|
+
worker = await start_worker(StartWorkerOptions(
|
|
147
|
+
app="billing-worker",
|
|
148
|
+
url="redis://localhost:6379",
|
|
149
|
+
queues=["critical", "io", "analytics"],
|
|
150
|
+
concurrency=8,
|
|
151
|
+
lease_ms=5000,
|
|
152
|
+
blmove_timeout_sec=1,
|
|
153
|
+
reaper_interval_ms=500,
|
|
154
|
+
))
|
|
155
|
+
```
|
|
156
|
+
|
|
157
|
+
## Workflow options
|
|
158
|
+
|
|
159
|
+
### max_concurrency
|
|
160
|
+
|
|
161
|
+
Limits concurrent running runs per workflow. Default is `1`.
|
|
162
|
+
|
|
163
|
+
```python
|
|
164
|
+
define_workflow(
|
|
165
|
+
"heavy-sync",
|
|
166
|
+
handler=handler,
|
|
167
|
+
queue="ops",
|
|
168
|
+
max_concurrency=1,
|
|
169
|
+
)
|
|
170
|
+
```
|
|
171
|
+
|
|
172
|
+
### Cron
|
|
173
|
+
|
|
174
|
+
```python
|
|
175
|
+
from redflow import CronTrigger
|
|
176
|
+
|
|
177
|
+
define_workflow(
|
|
178
|
+
"digest-cron",
|
|
179
|
+
handler=handler,
|
|
180
|
+
queue="ops",
|
|
181
|
+
cron=[
|
|
182
|
+
CronTrigger(id="digest-hourly", expression="0 * * * *"),
|
|
183
|
+
CronTrigger(expression="*/5 * * * *", timezone="UTC", input={"source": "cron"}),
|
|
184
|
+
],
|
|
185
|
+
)
|
|
186
|
+
```
|
|
187
|
+
|
|
188
|
+
Cron respects `max_concurrency`: if the limit is reached, that tick is skipped.
|
|
189
|
+
|
|
190
|
+
### on_failure
|
|
191
|
+
|
|
192
|
+
```python
|
|
193
|
+
from redflow import NonRetriableError, OnFailureContext
|
|
194
|
+
|
|
195
|
+
async def on_fail(ctx: OnFailureContext) -> None:
|
|
196
|
+
print(f"workflow failed: {ctx.run.id} {ctx.run.workflow} {ctx.error}")
|
|
197
|
+
|
|
198
|
+
define_workflow(
|
|
199
|
+
"invoice-sync",
|
|
200
|
+
handler=handler,
|
|
201
|
+
queue="billing",
|
|
202
|
+
max_attempts=4,
|
|
203
|
+
on_failure=on_fail,
|
|
204
|
+
)
|
|
205
|
+
```
|
|
206
|
+
|
|
207
|
+
## Client API
|
|
208
|
+
|
|
209
|
+
### Inspect and control runs
|
|
210
|
+
|
|
211
|
+
```python
|
|
212
|
+
run = await client.get_run("run_123")
|
|
213
|
+
steps = await client.get_run_steps("run_123")
|
|
214
|
+
|
|
215
|
+
recent = await client.list_runs(ListRunsParams(limit=50))
|
|
216
|
+
failed = await client.list_runs(ListRunsParams(
|
|
217
|
+
workflow="checkout",
|
|
218
|
+
status="failed",
|
|
219
|
+
limit=20,
|
|
220
|
+
))
|
|
221
|
+
|
|
222
|
+
workflows = await client.list_workflows()
|
|
223
|
+
meta = await client.get_workflow_meta("checkout")
|
|
224
|
+
stats = await client.get_stats()
|
|
225
|
+
|
|
226
|
+
canceled = await client.cancel_run("run_123", reason="requested by user")
|
|
227
|
+
```
|
|
228
|
+
|
|
229
|
+
### RunHandle
|
|
230
|
+
|
|
231
|
+
```python
|
|
232
|
+
handle = await client.emit_workflow("checkout", {"order_id": "ord_3"})
|
|
233
|
+
|
|
234
|
+
state = await handle.get_state()
|
|
235
|
+
print(state["status"])
|
|
236
|
+
|
|
237
|
+
output = await handle.result(timeout_ms=30_000)
|
|
238
|
+
```
|
|
239
|
+
|
|
240
|
+
## Testing
|
|
241
|
+
|
|
242
|
+
`run_inline` executes a workflow handler in-process without Redis — useful
|
|
243
|
+
for unit tests.
|
|
244
|
+
|
|
245
|
+
```python
|
|
246
|
+
from redflow import run_inline
|
|
247
|
+
|
|
248
|
+
result = await run_inline(my_workflow_def, input={"user_id": "test"})
|
|
249
|
+
assert result.succeeded
|
|
250
|
+
assert result.output == {"sent": True}
|
|
251
|
+
```
|
|
252
|
+
|
|
253
|
+
Override external steps:
|
|
254
|
+
|
|
255
|
+
```python
|
|
256
|
+
result = await run_inline(
|
|
257
|
+
my_workflow_def,
|
|
258
|
+
input={"user_id": "test"},
|
|
259
|
+
step_overrides={"fetch-user": {"email": "mock@test.com"}},
|
|
260
|
+
)
|
|
261
|
+
```
|
|
262
|
+
|
|
263
|
+
## Errors
|
|
264
|
+
|
|
265
|
+
```python
|
|
266
|
+
from redflow import (
|
|
267
|
+
RedflowError, # base class
|
|
268
|
+
CanceledError, # run was canceled
|
|
269
|
+
TimeoutError, # operation timed out
|
|
270
|
+
NonRetriableError, # permanent failure, no retries
|
|
271
|
+
InputValidationError, # input schema mismatch
|
|
272
|
+
UnknownWorkflowError, # workflow not registered
|
|
273
|
+
)
|
|
274
|
+
```
|
|
275
|
+
|
|
276
|
+
Throw `NonRetriableError` from a handler to fail the run immediately
|
|
277
|
+
without exhausting retry attempts.
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "redflow"
|
|
3
|
+
version = "0.0.1"
|
|
4
|
+
description = "Durable workflow engine backed by Redis — Python client"
|
|
5
|
+
readme = "README.md"
|
|
6
|
+
license = "Apache-2.0"
|
|
7
|
+
requires-python = ">=3.11"
|
|
8
|
+
authors = [{ name = "getrelocapp" }]
|
|
9
|
+
keywords = ["workflow", "redis", "durable", "queue", "background-jobs"]
|
|
10
|
+
classifiers = [
|
|
11
|
+
"Development Status :: 4 - Beta",
|
|
12
|
+
"Framework :: AsyncIO",
|
|
13
|
+
"Intended Audience :: Developers",
|
|
14
|
+
"License :: OSI Approved :: Apache Software License",
|
|
15
|
+
"Programming Language :: Python :: 3",
|
|
16
|
+
"Programming Language :: Python :: 3.11",
|
|
17
|
+
"Programming Language :: Python :: 3.12",
|
|
18
|
+
"Programming Language :: Python :: 3.13",
|
|
19
|
+
"Topic :: Software Development :: Libraries",
|
|
20
|
+
"Typing :: Typed",
|
|
21
|
+
]
|
|
22
|
+
dependencies = [
|
|
23
|
+
"redis>=5.0.0",
|
|
24
|
+
"croniter>=2.0.0",
|
|
25
|
+
]
|
|
26
|
+
|
|
27
|
+
[project.optional-dependencies]
|
|
28
|
+
fast = ["redis[hiredis]>=5.0.0"]
|
|
29
|
+
pydantic = ["pydantic>=2.0.0"]
|
|
30
|
+
dev = [
|
|
31
|
+
"pytest>=8.0",
|
|
32
|
+
"pytest-asyncio>=0.24",
|
|
33
|
+
"mypy>=1.10",
|
|
34
|
+
"ruff>=0.4",
|
|
35
|
+
]
|
|
36
|
+
|
|
37
|
+
[build-system]
|
|
38
|
+
requires = ["hatchling"]
|
|
39
|
+
build-backend = "hatchling.build"
|
|
40
|
+
|
|
41
|
+
[tool.hatch.build.targets.wheel]
|
|
42
|
+
packages = ["src/redflow"]
|
|
43
|
+
|
|
44
|
+
[tool.ruff]
|
|
45
|
+
target-version = "py311"
|
|
46
|
+
line-length = 120
|
|
47
|
+
|
|
48
|
+
[tool.ruff.lint]
|
|
49
|
+
select = ["E", "F", "W", "I", "UP", "B", "SIM", "RUF"]
|
|
50
|
+
ignore = ["E501"]
|
|
51
|
+
|
|
52
|
+
[tool.mypy]
|
|
53
|
+
python_version = "3.11"
|
|
54
|
+
strict = true
|
|
55
|
+
warn_return_any = true
|
|
56
|
+
warn_unused_configs = true
|
|
57
|
+
|
|
58
|
+
[tool.pytest.ini_options]
|
|
59
|
+
asyncio_mode = "auto"
|
|
60
|
+
testpaths = ["tests"]
|