pyworkflow-engine 0.1.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dashboard/backend/app/__init__.py +1 -0
- dashboard/backend/app/config.py +32 -0
- dashboard/backend/app/controllers/__init__.py +6 -0
- dashboard/backend/app/controllers/run_controller.py +86 -0
- dashboard/backend/app/controllers/workflow_controller.py +33 -0
- dashboard/backend/app/dependencies/__init__.py +5 -0
- dashboard/backend/app/dependencies/storage.py +50 -0
- dashboard/backend/app/repositories/__init__.py +6 -0
- dashboard/backend/app/repositories/run_repository.py +80 -0
- dashboard/backend/app/repositories/workflow_repository.py +27 -0
- dashboard/backend/app/rest/__init__.py +8 -0
- dashboard/backend/app/rest/v1/__init__.py +12 -0
- dashboard/backend/app/rest/v1/health.py +33 -0
- dashboard/backend/app/rest/v1/runs.py +133 -0
- dashboard/backend/app/rest/v1/workflows.py +41 -0
- dashboard/backend/app/schemas/__init__.py +23 -0
- dashboard/backend/app/schemas/common.py +16 -0
- dashboard/backend/app/schemas/event.py +24 -0
- dashboard/backend/app/schemas/hook.py +25 -0
- dashboard/backend/app/schemas/run.py +54 -0
- dashboard/backend/app/schemas/step.py +28 -0
- dashboard/backend/app/schemas/workflow.py +31 -0
- dashboard/backend/app/server.py +87 -0
- dashboard/backend/app/services/__init__.py +6 -0
- dashboard/backend/app/services/run_service.py +240 -0
- dashboard/backend/app/services/workflow_service.py +155 -0
- dashboard/backend/main.py +18 -0
- docs/concepts/cancellation.mdx +362 -0
- docs/concepts/continue-as-new.mdx +434 -0
- docs/concepts/events.mdx +266 -0
- docs/concepts/fault-tolerance.mdx +370 -0
- docs/concepts/hooks.mdx +552 -0
- docs/concepts/limitations.mdx +167 -0
- docs/concepts/schedules.mdx +775 -0
- docs/concepts/sleep.mdx +312 -0
- docs/concepts/steps.mdx +301 -0
- docs/concepts/workflows.mdx +255 -0
- docs/guides/cli.mdx +942 -0
- docs/guides/configuration.mdx +560 -0
- docs/introduction.mdx +155 -0
- docs/quickstart.mdx +279 -0
- examples/__init__.py +1 -0
- examples/celery/__init__.py +1 -0
- examples/celery/durable/docker-compose.yml +55 -0
- examples/celery/durable/pyworkflow.config.yaml +12 -0
- examples/celery/durable/workflows/__init__.py +122 -0
- examples/celery/durable/workflows/basic.py +87 -0
- examples/celery/durable/workflows/batch_processing.py +102 -0
- examples/celery/durable/workflows/cancellation.py +273 -0
- examples/celery/durable/workflows/child_workflow_patterns.py +240 -0
- examples/celery/durable/workflows/child_workflows.py +202 -0
- examples/celery/durable/workflows/continue_as_new.py +260 -0
- examples/celery/durable/workflows/fault_tolerance.py +210 -0
- examples/celery/durable/workflows/hooks.py +211 -0
- examples/celery/durable/workflows/idempotency.py +112 -0
- examples/celery/durable/workflows/long_running.py +99 -0
- examples/celery/durable/workflows/retries.py +101 -0
- examples/celery/durable/workflows/schedules.py +209 -0
- examples/celery/transient/01_basic_workflow.py +91 -0
- examples/celery/transient/02_fault_tolerance.py +257 -0
- examples/celery/transient/__init__.py +20 -0
- examples/celery/transient/pyworkflow.config.yaml +25 -0
- examples/local/__init__.py +1 -0
- examples/local/durable/01_basic_workflow.py +94 -0
- examples/local/durable/02_file_storage.py +132 -0
- examples/local/durable/03_retries.py +169 -0
- examples/local/durable/04_long_running.py +119 -0
- examples/local/durable/05_event_log.py +145 -0
- examples/local/durable/06_idempotency.py +148 -0
- examples/local/durable/07_hooks.py +334 -0
- examples/local/durable/08_cancellation.py +233 -0
- examples/local/durable/09_child_workflows.py +198 -0
- examples/local/durable/10_child_workflow_patterns.py +265 -0
- examples/local/durable/11_continue_as_new.py +249 -0
- examples/local/durable/12_schedules.py +198 -0
- examples/local/durable/__init__.py +1 -0
- examples/local/transient/01_quick_tasks.py +87 -0
- examples/local/transient/02_retries.py +130 -0
- examples/local/transient/03_sleep.py +141 -0
- examples/local/transient/__init__.py +1 -0
- pyworkflow/__init__.py +256 -0
- pyworkflow/aws/__init__.py +68 -0
- pyworkflow/aws/context.py +234 -0
- pyworkflow/aws/handler.py +184 -0
- pyworkflow/aws/testing.py +310 -0
- pyworkflow/celery/__init__.py +41 -0
- pyworkflow/celery/app.py +198 -0
- pyworkflow/celery/scheduler.py +315 -0
- pyworkflow/celery/tasks.py +1746 -0
- pyworkflow/cli/__init__.py +132 -0
- pyworkflow/cli/__main__.py +6 -0
- pyworkflow/cli/commands/__init__.py +1 -0
- pyworkflow/cli/commands/hooks.py +640 -0
- pyworkflow/cli/commands/quickstart.py +495 -0
- pyworkflow/cli/commands/runs.py +773 -0
- pyworkflow/cli/commands/scheduler.py +130 -0
- pyworkflow/cli/commands/schedules.py +794 -0
- pyworkflow/cli/commands/setup.py +703 -0
- pyworkflow/cli/commands/worker.py +413 -0
- pyworkflow/cli/commands/workflows.py +1257 -0
- pyworkflow/cli/output/__init__.py +1 -0
- pyworkflow/cli/output/formatters.py +321 -0
- pyworkflow/cli/output/styles.py +121 -0
- pyworkflow/cli/utils/__init__.py +1 -0
- pyworkflow/cli/utils/async_helpers.py +30 -0
- pyworkflow/cli/utils/config.py +130 -0
- pyworkflow/cli/utils/config_generator.py +344 -0
- pyworkflow/cli/utils/discovery.py +53 -0
- pyworkflow/cli/utils/docker_manager.py +651 -0
- pyworkflow/cli/utils/interactive.py +364 -0
- pyworkflow/cli/utils/storage.py +115 -0
- pyworkflow/config.py +329 -0
- pyworkflow/context/__init__.py +63 -0
- pyworkflow/context/aws.py +230 -0
- pyworkflow/context/base.py +416 -0
- pyworkflow/context/local.py +930 -0
- pyworkflow/context/mock.py +381 -0
- pyworkflow/core/__init__.py +0 -0
- pyworkflow/core/exceptions.py +353 -0
- pyworkflow/core/registry.py +313 -0
- pyworkflow/core/scheduled.py +328 -0
- pyworkflow/core/step.py +494 -0
- pyworkflow/core/workflow.py +294 -0
- pyworkflow/discovery.py +248 -0
- pyworkflow/engine/__init__.py +0 -0
- pyworkflow/engine/events.py +879 -0
- pyworkflow/engine/executor.py +682 -0
- pyworkflow/engine/replay.py +273 -0
- pyworkflow/observability/__init__.py +19 -0
- pyworkflow/observability/logging.py +234 -0
- pyworkflow/primitives/__init__.py +33 -0
- pyworkflow/primitives/child_handle.py +174 -0
- pyworkflow/primitives/child_workflow.py +372 -0
- pyworkflow/primitives/continue_as_new.py +101 -0
- pyworkflow/primitives/define_hook.py +150 -0
- pyworkflow/primitives/hooks.py +97 -0
- pyworkflow/primitives/resume_hook.py +210 -0
- pyworkflow/primitives/schedule.py +545 -0
- pyworkflow/primitives/shield.py +96 -0
- pyworkflow/primitives/sleep.py +100 -0
- pyworkflow/runtime/__init__.py +21 -0
- pyworkflow/runtime/base.py +179 -0
- pyworkflow/runtime/celery.py +310 -0
- pyworkflow/runtime/factory.py +101 -0
- pyworkflow/runtime/local.py +706 -0
- pyworkflow/scheduler/__init__.py +9 -0
- pyworkflow/scheduler/local.py +248 -0
- pyworkflow/serialization/__init__.py +0 -0
- pyworkflow/serialization/decoder.py +146 -0
- pyworkflow/serialization/encoder.py +162 -0
- pyworkflow/storage/__init__.py +54 -0
- pyworkflow/storage/base.py +612 -0
- pyworkflow/storage/config.py +185 -0
- pyworkflow/storage/dynamodb.py +1315 -0
- pyworkflow/storage/file.py +827 -0
- pyworkflow/storage/memory.py +549 -0
- pyworkflow/storage/postgres.py +1161 -0
- pyworkflow/storage/schemas.py +486 -0
- pyworkflow/storage/sqlite.py +1136 -0
- pyworkflow/utils/__init__.py +0 -0
- pyworkflow/utils/duration.py +177 -0
- pyworkflow/utils/schedule.py +391 -0
- pyworkflow_engine-0.1.7.dist-info/METADATA +687 -0
- pyworkflow_engine-0.1.7.dist-info/RECORD +196 -0
- pyworkflow_engine-0.1.7.dist-info/WHEEL +5 -0
- pyworkflow_engine-0.1.7.dist-info/entry_points.txt +2 -0
- pyworkflow_engine-0.1.7.dist-info/licenses/LICENSE +21 -0
- pyworkflow_engine-0.1.7.dist-info/top_level.txt +5 -0
- tests/examples/__init__.py +0 -0
- tests/integration/__init__.py +0 -0
- tests/integration/test_cancellation.py +330 -0
- tests/integration/test_child_workflows.py +439 -0
- tests/integration/test_continue_as_new.py +428 -0
- tests/integration/test_dynamodb_storage.py +1146 -0
- tests/integration/test_fault_tolerance.py +369 -0
- tests/integration/test_schedule_storage.py +484 -0
- tests/unit/__init__.py +0 -0
- tests/unit/backends/__init__.py +1 -0
- tests/unit/backends/test_dynamodb_storage.py +1554 -0
- tests/unit/backends/test_postgres_storage.py +1281 -0
- tests/unit/backends/test_sqlite_storage.py +1460 -0
- tests/unit/conftest.py +41 -0
- tests/unit/test_cancellation.py +364 -0
- tests/unit/test_child_workflows.py +680 -0
- tests/unit/test_continue_as_new.py +441 -0
- tests/unit/test_event_limits.py +316 -0
- tests/unit/test_executor.py +320 -0
- tests/unit/test_fault_tolerance.py +334 -0
- tests/unit/test_hooks.py +495 -0
- tests/unit/test_registry.py +261 -0
- tests/unit/test_replay.py +420 -0
- tests/unit/test_schedule_schemas.py +285 -0
- tests/unit/test_schedule_utils.py +286 -0
- tests/unit/test_scheduled_workflow.py +274 -0
- tests/unit/test_step.py +353 -0
- tests/unit/test_workflow.py +243 -0
|
@@ -0,0 +1,211 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Celery Durable Workflow - Hooks Example
|
|
3
|
+
|
|
4
|
+
This example demonstrates hooks for waiting on external events with Celery workers:
|
|
5
|
+
- Using hook() to suspend workflow and wait for external input
|
|
6
|
+
- Using define_hook() for typed hooks with Pydantic validation
|
|
7
|
+
- Using CLI commands to list and resume hooks
|
|
8
|
+
- Composite tokens (run_id:hook_id) for self-describing tokens
|
|
9
|
+
|
|
10
|
+
Prerequisites:
|
|
11
|
+
1. Start Redis: docker run -d -p 6379:6379 redis:7-alpine
|
|
12
|
+
2. Start worker: pyworkflow --module examples.celery.durable.07_hooks worker run
|
|
13
|
+
|
|
14
|
+
Run workflow:
|
|
15
|
+
cd examples/celery/durable
|
|
16
|
+
PYTHONPATH=. pyworkflow --module 07_hooks workflows run approval_workflow --arg order_id=order-123
|
|
17
|
+
|
|
18
|
+
List pending hooks:
|
|
19
|
+
pyworkflow hooks list --status pending
|
|
20
|
+
|
|
21
|
+
Resume a hook (interactive):
|
|
22
|
+
pyworkflow hooks resume
|
|
23
|
+
# Step 1: Select the pending hook
|
|
24
|
+
# Step 2: Enter payload values (approved, reviewer, comments)
|
|
25
|
+
|
|
26
|
+
Resume with explicit payload:
|
|
27
|
+
pyworkflow hooks resume <token> --payload '{"approved": true, "reviewer": "admin@example.com"}'
|
|
28
|
+
|
|
29
|
+
Check workflow status:
|
|
30
|
+
pyworkflow runs status <run_id>
|
|
31
|
+
"""
|
|
32
|
+
|
|
33
|
+
from pydantic import BaseModel
|
|
34
|
+
|
|
35
|
+
from pyworkflow import define_hook, hook, step, workflow
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
# --- Pydantic model for typed hook payload ---
|
|
39
|
+
class ApprovalPayload(BaseModel):
|
|
40
|
+
"""Typed payload for approval hook.
|
|
41
|
+
|
|
42
|
+
This schema is stored with the hook and used by the CLI
|
|
43
|
+
to prompt for field values interactively.
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
approved: bool
|
|
47
|
+
reviewer: str
|
|
48
|
+
comments: str | None = None
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
# Create typed hook - schema is stored for CLI resume
|
|
52
|
+
approval_hook = define_hook("manager_approval", ApprovalPayload)
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
# --- Steps ---
|
|
56
|
+
@step()
|
|
57
|
+
async def prepare_order(order_id: str) -> dict:
|
|
58
|
+
"""Prepare the order for review."""
|
|
59
|
+
print(f"[Step] Preparing order {order_id}...")
|
|
60
|
+
return {"order_id": order_id, "status": "pending_approval"}
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
@step()
|
|
64
|
+
async def fulfill_order(order: dict) -> dict:
|
|
65
|
+
"""Fulfill the approved order."""
|
|
66
|
+
print(f"[Step] Fulfilling order {order['order_id']}...")
|
|
67
|
+
return {**order, "status": "fulfilled"}
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
@step()
|
|
71
|
+
async def cancel_order(order: dict, reason: str) -> dict:
|
|
72
|
+
"""Cancel the rejected order."""
|
|
73
|
+
print(f"[Step] Cancelling order {order['order_id']}: {reason}")
|
|
74
|
+
return {**order, "status": "cancelled", "reason": reason}
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
# --- Workflow with simple hook ---
|
|
78
|
+
@workflow(name="simple_approval_workflow", tags=["celery", "durable"])
|
|
79
|
+
async def simple_approval_workflow(order_id: str) -> dict:
|
|
80
|
+
"""
|
|
81
|
+
Workflow using simple hook() with untyped payload.
|
|
82
|
+
|
|
83
|
+
The workflow suspends at the hook and waits for external input.
|
|
84
|
+
Use `pyworkflow hooks resume` to send a payload and continue.
|
|
85
|
+
"""
|
|
86
|
+
order = await prepare_order(order_id)
|
|
87
|
+
|
|
88
|
+
async def on_hook_created(token: str):
|
|
89
|
+
"""Called when hook is created - log the token for CLI use."""
|
|
90
|
+
print(f"[Hook] Created with token: {token}")
|
|
91
|
+
print(
|
|
92
|
+
f"[Hook] Resume with: pyworkflow hooks resume {token} --payload '{{\"approved\": true}}'"
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
# Wait for external approval
|
|
96
|
+
# Token is auto-generated in composite format: run_id:hook_id
|
|
97
|
+
approval = await hook(
|
|
98
|
+
"simple_approval",
|
|
99
|
+
timeout="24h", # Expire after 24 hours
|
|
100
|
+
on_created=on_hook_created,
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
if approval.get("approved"):
|
|
104
|
+
return await fulfill_order(order)
|
|
105
|
+
else:
|
|
106
|
+
return await cancel_order(order, approval.get("reason", "Rejected"))
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
# --- Workflow with typed hook ---
|
|
110
|
+
@workflow(name="approval_workflow", tags=["celery", "durable"])
|
|
111
|
+
async def approval_workflow(order_id: str) -> dict:
|
|
112
|
+
"""
|
|
113
|
+
Workflow using define_hook() for type-safe payloads.
|
|
114
|
+
|
|
115
|
+
The typed hook stores its Pydantic schema, which the CLI uses
|
|
116
|
+
to prompt for each field interactively during `pyworkflow hooks resume`.
|
|
117
|
+
|
|
118
|
+
Run: pyworkflow workflows run approval_workflow --arg order_id=order-123
|
|
119
|
+
Resume: pyworkflow hooks resume (interactive)
|
|
120
|
+
"""
|
|
121
|
+
order = await prepare_order(order_id)
|
|
122
|
+
|
|
123
|
+
async def on_hook_created(token: str):
|
|
124
|
+
"""Called when hook is created - log for CLI use."""
|
|
125
|
+
print(f"[Hook] Typed hook created with token: {token}")
|
|
126
|
+
print("[Hook] Run: pyworkflow hooks resume")
|
|
127
|
+
print(
|
|
128
|
+
f'[Hook] Or: pyworkflow hooks resume {token} --payload \'{{"approved": true, "reviewer": "admin@example.com"}}\''
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
# Wait for typed approval - payload validated against ApprovalPayload
|
|
132
|
+
# CLI will prompt for: approved (bool), reviewer (str), comments (str, optional)
|
|
133
|
+
approval: ApprovalPayload = await approval_hook(
|
|
134
|
+
timeout="7d", # Expire after 7 days
|
|
135
|
+
on_created=on_hook_created,
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
print(
|
|
139
|
+
f"[Workflow] Received approval: approved={approval.approved}, reviewer={approval.reviewer}"
|
|
140
|
+
)
|
|
141
|
+
|
|
142
|
+
if approval.approved:
|
|
143
|
+
return await fulfill_order(order)
|
|
144
|
+
else:
|
|
145
|
+
return await cancel_order(order, approval.comments or "No reason given")
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
# --- Workflow with multiple hooks ---
|
|
149
|
+
@workflow(name="multi_approval_workflow", tags=["celery", "durable"])
|
|
150
|
+
async def multi_approval_workflow(order_id: str) -> dict:
|
|
151
|
+
"""
|
|
152
|
+
Workflow demonstrating sequential hooks for multi-level approval.
|
|
153
|
+
|
|
154
|
+
This workflow requires two approvals:
|
|
155
|
+
1. Manager approval
|
|
156
|
+
2. Finance approval (only if amount is significant)
|
|
157
|
+
"""
|
|
158
|
+
order = await prepare_order(order_id)
|
|
159
|
+
|
|
160
|
+
async def log_token(name: str):
|
|
161
|
+
async def _log(token: str):
|
|
162
|
+
print(f"[Hook] {name} hook created: {token}")
|
|
163
|
+
|
|
164
|
+
return _log
|
|
165
|
+
|
|
166
|
+
# First approval: Manager
|
|
167
|
+
print("[Workflow] Waiting for manager approval...")
|
|
168
|
+
manager_approval = await hook(
|
|
169
|
+
"manager_approval",
|
|
170
|
+
timeout="24h",
|
|
171
|
+
on_created=await log_token("Manager"),
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
if not manager_approval.get("approved"):
|
|
175
|
+
return await cancel_order(
|
|
176
|
+
order, f"Manager rejected: {manager_approval.get('reason', 'No reason')}"
|
|
177
|
+
)
|
|
178
|
+
|
|
179
|
+
order["manager_approved"] = True
|
|
180
|
+
order["manager"] = manager_approval.get("approver", "unknown")
|
|
181
|
+
|
|
182
|
+
# Second approval: Finance (simulating high-value order check)
|
|
183
|
+
print("[Workflow] Waiting for finance approval...")
|
|
184
|
+
finance_approval = await hook(
|
|
185
|
+
"finance_approval",
|
|
186
|
+
timeout="48h",
|
|
187
|
+
on_created=await log_token("Finance"),
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
if not finance_approval.get("approved"):
|
|
191
|
+
return await cancel_order(
|
|
192
|
+
order, f"Finance rejected: {finance_approval.get('reason', 'No reason')}"
|
|
193
|
+
)
|
|
194
|
+
|
|
195
|
+
order["finance_approved"] = True
|
|
196
|
+
order["finance_reviewer"] = finance_approval.get("approver", "unknown")
|
|
197
|
+
|
|
198
|
+
return await fulfill_order(order)
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
async def main() -> None:
|
|
202
|
+
"""Run the hooks workflow example via CLI."""
|
|
203
|
+
print(__doc__)
|
|
204
|
+
print("\nThis example should be run with Celery workers.")
|
|
205
|
+
print("See the docstring above for CLI commands.")
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
if __name__ == "__main__":
|
|
209
|
+
import asyncio
|
|
210
|
+
|
|
211
|
+
asyncio.run(main())
|
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Celery Durable Workflow - Idempotency
|
|
3
|
+
|
|
4
|
+
This example demonstrates idempotent workflow execution.
|
|
5
|
+
- Use --idempotency-key to prevent duplicate executions
|
|
6
|
+
- Same key returns existing run instead of starting new one
|
|
7
|
+
- Critical for payment processing and other sensitive operations
|
|
8
|
+
|
|
9
|
+
Prerequisites:
|
|
10
|
+
1. Start Redis: docker run -d -p 6379:6379 redis:7-alpine
|
|
11
|
+
2. Start worker: pyworkflow --module examples.celery.durable.05_idempotency worker run
|
|
12
|
+
|
|
13
|
+
Run with CLI:
|
|
14
|
+
# First run - starts new workflow
|
|
15
|
+
pyworkflow --module examples.celery.durable.05_idempotency workflows run payment_workflow \
|
|
16
|
+
--arg payment_id=pay-123 --arg amount=99.99 \
|
|
17
|
+
--idempotency-key payment-pay-123
|
|
18
|
+
|
|
19
|
+
# Second run with same key - returns existing run (no duplicate charge)
|
|
20
|
+
pyworkflow --module examples.celery.durable.05_idempotency workflows run payment_workflow \
|
|
21
|
+
--arg payment_id=pay-123 --arg amount=99.99 \
|
|
22
|
+
--idempotency-key payment-pay-123
|
|
23
|
+
|
|
24
|
+
Check status:
|
|
25
|
+
pyworkflow runs list
|
|
26
|
+
"""
|
|
27
|
+
|
|
28
|
+
from pyworkflow import step, workflow
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
@step()
|
|
32
|
+
async def validate_payment(payment_id: str, amount: float) -> dict:
|
|
33
|
+
"""Validate the payment request."""
|
|
34
|
+
print(f"[Step] Validating payment {payment_id} for ${amount:.2f}...")
|
|
35
|
+
return {"payment_id": payment_id, "amount": amount, "valid": True}
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
@step()
|
|
39
|
+
async def charge_payment(payment: dict) -> dict:
|
|
40
|
+
"""
|
|
41
|
+
Charge the payment.
|
|
42
|
+
|
|
43
|
+
IMPORTANT: This step should only run once per payment!
|
|
44
|
+
Idempotency keys ensure duplicate requests don't double-charge.
|
|
45
|
+
"""
|
|
46
|
+
print(f"[Step] CHARGING payment {payment['payment_id']} for ${payment['amount']:.2f}...")
|
|
47
|
+
print("[Step] (In production, this would call Stripe/PayPal with idempotency key)")
|
|
48
|
+
return {**payment, "charged": True, "transaction_id": f"txn_{payment['payment_id']}"}
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
@step()
|
|
52
|
+
async def send_receipt(payment: dict) -> dict:
|
|
53
|
+
"""Send payment receipt."""
|
|
54
|
+
print(f"[Step] Sending receipt for {payment['payment_id']}...")
|
|
55
|
+
return {**payment, "receipt_sent": True}
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
@workflow(tags=["celery", "durable"])
|
|
59
|
+
async def payment_workflow(payment_id: str, amount: float) -> dict:
|
|
60
|
+
"""
|
|
61
|
+
Payment processing workflow with idempotency.
|
|
62
|
+
|
|
63
|
+
ALWAYS use --idempotency-key when running this workflow to prevent
|
|
64
|
+
duplicate charges. The key should be unique per payment attempt.
|
|
65
|
+
|
|
66
|
+
Example keys:
|
|
67
|
+
- payment-{payment_id}
|
|
68
|
+
- order-{order_id}-payment
|
|
69
|
+
- user-{user_id}-{timestamp}
|
|
70
|
+
|
|
71
|
+
If a workflow with the same idempotency key already exists:
|
|
72
|
+
- If RUNNING: raises WorkflowAlreadyRunningError
|
|
73
|
+
- If COMPLETED/FAILED/SUSPENDED: returns existing run_id
|
|
74
|
+
"""
|
|
75
|
+
payment = await validate_payment(payment_id, amount)
|
|
76
|
+
payment = await charge_payment(payment)
|
|
77
|
+
payment = await send_receipt(payment)
|
|
78
|
+
return payment
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
async def main() -> None:
|
|
82
|
+
"""Run the payment workflow example with idempotency."""
|
|
83
|
+
import argparse
|
|
84
|
+
|
|
85
|
+
import pyworkflow
|
|
86
|
+
|
|
87
|
+
parser = argparse.ArgumentParser(description="Payment Workflow with Idempotency")
|
|
88
|
+
parser.add_argument("--payment-id", default="pay-123", help="Payment ID")
|
|
89
|
+
parser.add_argument("--amount", type=float, default=99.99, help="Payment amount")
|
|
90
|
+
parser.add_argument("--idempotency-key", help="Idempotency key (recommended)")
|
|
91
|
+
args = parser.parse_args()
|
|
92
|
+
|
|
93
|
+
idempotency_key = args.idempotency_key or f"payment-{args.payment_id}"
|
|
94
|
+
|
|
95
|
+
# Configuration is automatically loaded from pyworkflow.config.yaml
|
|
96
|
+
print(f"Starting payment workflow for {args.payment_id} (${args.amount:.2f})...")
|
|
97
|
+
print(f"Idempotency key: {idempotency_key}")
|
|
98
|
+
run_id = await pyworkflow.start(
|
|
99
|
+
payment_workflow,
|
|
100
|
+
args.payment_id,
|
|
101
|
+
args.amount,
|
|
102
|
+
idempotency_key=idempotency_key,
|
|
103
|
+
)
|
|
104
|
+
print(f"Workflow started with run_id: {run_id}")
|
|
105
|
+
print(f"\nCheck status: pyworkflow runs status {run_id}")
|
|
106
|
+
print("\nRun again with same --idempotency-key to see duplicate prevention!")
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
if __name__ == "__main__":
|
|
110
|
+
import asyncio
|
|
111
|
+
|
|
112
|
+
asyncio.run(main())
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Celery Durable Workflow - Long Running with Sleep
|
|
3
|
+
|
|
4
|
+
This example demonstrates automatic sleep resumption with Celery workers.
|
|
5
|
+
- Workflow suspends during sleep (releases resources)
|
|
6
|
+
- Celery automatically resumes after sleep completes
|
|
7
|
+
- No manual intervention required (unlike local runtime)
|
|
8
|
+
|
|
9
|
+
Prerequisites:
|
|
10
|
+
1. Start Redis: docker run -d -p 6379:6379 redis:7-alpine
|
|
11
|
+
2. Start worker: pyworkflow --module examples.celery.durable.02_long_running worker run
|
|
12
|
+
|
|
13
|
+
Run with CLI:
|
|
14
|
+
pyworkflow --module examples.celery.durable.02_long_running workflows run onboarding_workflow \
|
|
15
|
+
--arg user_id=user-456
|
|
16
|
+
|
|
17
|
+
Watch the worker output to see automatic resumption after each sleep.
|
|
18
|
+
|
|
19
|
+
Check status:
|
|
20
|
+
pyworkflow runs list --status suspended
|
|
21
|
+
pyworkflow runs status <run_id>
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
from pyworkflow import sleep, step, workflow
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@step()
|
|
28
|
+
async def send_welcome_email(user_id: str) -> dict:
|
|
29
|
+
"""Send welcome email to new user."""
|
|
30
|
+
print(f"[Step] Sending welcome email to {user_id}...")
|
|
31
|
+
return {"user_id": user_id, "welcome_sent": True}
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
@step()
|
|
35
|
+
async def send_tips_email(user: dict) -> dict:
|
|
36
|
+
"""Send helpful tips email after delay."""
|
|
37
|
+
print(f"[Step] Sending tips email to {user['user_id']}...")
|
|
38
|
+
return {**user, "tips_sent": True}
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
@step()
|
|
42
|
+
async def send_survey_email(user: dict) -> dict:
|
|
43
|
+
"""Send feedback survey after delay."""
|
|
44
|
+
print(f"[Step] Sending survey email to {user['user_id']}...")
|
|
45
|
+
return {**user, "survey_sent": True}
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
@workflow(tags=["celery", "durable"])
|
|
49
|
+
async def onboarding_workflow(user_id: str) -> dict:
|
|
50
|
+
"""
|
|
51
|
+
User onboarding workflow with scheduled emails.
|
|
52
|
+
|
|
53
|
+
Demonstrates automatic sleep resumption:
|
|
54
|
+
1. Send welcome email immediately
|
|
55
|
+
2. Wait 30 seconds, then send tips email
|
|
56
|
+
3. Wait another 30 seconds, then send survey
|
|
57
|
+
|
|
58
|
+
With Celery runtime, sleeps are handled automatically:
|
|
59
|
+
- Workflow suspends and worker is freed
|
|
60
|
+
- Celery schedules resumption task
|
|
61
|
+
- Worker picks up and continues execution
|
|
62
|
+
"""
|
|
63
|
+
user = await send_welcome_email(user_id)
|
|
64
|
+
|
|
65
|
+
print("[Workflow] Sleeping for 30 seconds before tips email...")
|
|
66
|
+
await sleep("30s")
|
|
67
|
+
|
|
68
|
+
user = await send_tips_email(user)
|
|
69
|
+
|
|
70
|
+
print("[Workflow] Sleeping for 30 seconds before survey...")
|
|
71
|
+
await sleep("30s")
|
|
72
|
+
|
|
73
|
+
user = await send_survey_email(user)
|
|
74
|
+
return user
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
async def main() -> None:
|
|
78
|
+
"""Run the onboarding workflow example."""
|
|
79
|
+
import argparse
|
|
80
|
+
|
|
81
|
+
import pyworkflow
|
|
82
|
+
|
|
83
|
+
parser = argparse.ArgumentParser(description="User Onboarding Workflow with Sleeps")
|
|
84
|
+
parser.add_argument("--user-id", default="user-456", help="User ID to onboard")
|
|
85
|
+
args = parser.parse_args()
|
|
86
|
+
|
|
87
|
+
# Configuration is automatically loaded from pyworkflow.config.yaml
|
|
88
|
+
print(f"Starting onboarding workflow for {args.user_id}...")
|
|
89
|
+
print("(Workflow will sleep between emails - watch the worker output)")
|
|
90
|
+
run_id = await pyworkflow.start(onboarding_workflow, args.user_id)
|
|
91
|
+
print(f"Workflow started with run_id: {run_id}")
|
|
92
|
+
print(f"\nCheck status: pyworkflow runs status {run_id}")
|
|
93
|
+
print("List suspended: pyworkflow runs list --status suspended")
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
if __name__ == "__main__":
|
|
97
|
+
import asyncio
|
|
98
|
+
|
|
99
|
+
asyncio.run(main())
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Celery Durable Workflow - Retry Handling
|
|
3
|
+
|
|
4
|
+
This example demonstrates automatic retry handling on Celery workers.
|
|
5
|
+
- Steps can specify max_retries and retry_delay
|
|
6
|
+
- RetryableError triggers automatic retry with backoff
|
|
7
|
+
- FatalError stops workflow immediately (no retry)
|
|
8
|
+
|
|
9
|
+
Prerequisites:
|
|
10
|
+
1. Start Redis: docker run -d -p 6379:6379 redis:7-alpine
|
|
11
|
+
2. Start worker: pyworkflow --module examples.celery.durable.03_retries worker run
|
|
12
|
+
|
|
13
|
+
Run with CLI:
|
|
14
|
+
pyworkflow --module examples.celery.durable.03_retries workflows run retry_demo_workflow \
|
|
15
|
+
--arg endpoint=/api/data
|
|
16
|
+
|
|
17
|
+
The workflow has a 30% failure rate - run multiple times to see retry behavior.
|
|
18
|
+
|
|
19
|
+
Check status:
|
|
20
|
+
pyworkflow runs list
|
|
21
|
+
pyworkflow runs logs <run_id> --filter failed
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
import random
|
|
25
|
+
|
|
26
|
+
from pyworkflow import step, workflow
|
|
27
|
+
from pyworkflow.core.exceptions import FatalError, RetryableError
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
@step(max_retries=3)
|
|
31
|
+
async def flaky_api_call(endpoint: str) -> dict:
|
|
32
|
+
"""
|
|
33
|
+
Simulate a flaky API call that may fail.
|
|
34
|
+
|
|
35
|
+
- 30% chance of RetryableError (will retry)
|
|
36
|
+
- 10% chance of FatalError (will not retry)
|
|
37
|
+
- 60% chance of success
|
|
38
|
+
"""
|
|
39
|
+
print(f"[Step] Calling API: {endpoint}...")
|
|
40
|
+
|
|
41
|
+
roll = random.random()
|
|
42
|
+
|
|
43
|
+
if roll < 0.3:
|
|
44
|
+
print("[Step] API temporarily unavailable, will retry...")
|
|
45
|
+
raise RetryableError("API temporarily unavailable", retry_after="5s")
|
|
46
|
+
|
|
47
|
+
if roll < 0.4:
|
|
48
|
+
print("[Step] API returned invalid response, fatal error...")
|
|
49
|
+
raise FatalError("API returned invalid response - cannot retry")
|
|
50
|
+
|
|
51
|
+
print("[Step] API call successful!")
|
|
52
|
+
return {"endpoint": endpoint, "status": "success", "data": {"value": 42}}
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
@step()
|
|
56
|
+
async def process_response(response: dict) -> dict:
|
|
57
|
+
"""Process the API response."""
|
|
58
|
+
print(f"[Step] Processing response from {response['endpoint']}...")
|
|
59
|
+
return {**response, "processed": True}
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
@workflow(tags=["celery", "durable"])
|
|
63
|
+
async def retry_demo_workflow(endpoint: str) -> dict:
|
|
64
|
+
"""
|
|
65
|
+
Workflow demonstrating automatic retry handling.
|
|
66
|
+
|
|
67
|
+
The flaky_api_call step has:
|
|
68
|
+
- 30% failure rate with RetryableError (auto-retry)
|
|
69
|
+
- 10% failure rate with FatalError (no retry)
|
|
70
|
+
- 60% success rate
|
|
71
|
+
|
|
72
|
+
Retries happen automatically with exponential backoff on workers.
|
|
73
|
+
"""
|
|
74
|
+
response = await flaky_api_call(endpoint)
|
|
75
|
+
result = await process_response(response)
|
|
76
|
+
return {"message": "API call and processing succeeded", **result}
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
async def main() -> None:
|
|
80
|
+
"""Run the retry demo workflow example."""
|
|
81
|
+
import argparse
|
|
82
|
+
|
|
83
|
+
import pyworkflow
|
|
84
|
+
|
|
85
|
+
parser = argparse.ArgumentParser(description="Retry Handling Demo Workflow")
|
|
86
|
+
parser.add_argument("--endpoint", default="/api/data", help="API endpoint to call")
|
|
87
|
+
args = parser.parse_args()
|
|
88
|
+
|
|
89
|
+
# Configuration is automatically loaded from pyworkflow.config.yaml
|
|
90
|
+
print(f"Starting retry demo workflow for endpoint {args.endpoint}...")
|
|
91
|
+
print("(30% chance of retry, 10% chance of fatal error, 60% success)")
|
|
92
|
+
run_id = await pyworkflow.start(retry_demo_workflow, args.endpoint)
|
|
93
|
+
print(f"Workflow started with run_id: {run_id}")
|
|
94
|
+
print(f"\nCheck status: pyworkflow runs status {run_id}")
|
|
95
|
+
print(f"View logs: pyworkflow runs logs {run_id}")
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
if __name__ == "__main__":
|
|
99
|
+
import asyncio
|
|
100
|
+
|
|
101
|
+
asyncio.run(main())
|