pyworkflow-engine 0.1.7__py3-none-any.whl → 0.1.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (145) hide show
  1. pyworkflow/__init__.py +10 -1
  2. pyworkflow/celery/tasks.py +272 -24
  3. pyworkflow/cli/__init__.py +4 -1
  4. pyworkflow/cli/commands/runs.py +4 -4
  5. pyworkflow/cli/commands/setup.py +203 -4
  6. pyworkflow/cli/utils/config_generator.py +76 -3
  7. pyworkflow/cli/utils/docker_manager.py +232 -0
  8. pyworkflow/context/__init__.py +13 -0
  9. pyworkflow/context/base.py +26 -0
  10. pyworkflow/context/local.py +80 -0
  11. pyworkflow/context/step_context.py +295 -0
  12. pyworkflow/core/registry.py +6 -1
  13. pyworkflow/core/step.py +141 -0
  14. pyworkflow/core/workflow.py +56 -0
  15. pyworkflow/engine/events.py +30 -0
  16. pyworkflow/engine/replay.py +39 -0
  17. pyworkflow/primitives/child_workflow.py +1 -1
  18. pyworkflow/runtime/local.py +1 -1
  19. pyworkflow/storage/__init__.py +14 -0
  20. pyworkflow/storage/base.py +35 -0
  21. pyworkflow/storage/cassandra.py +1747 -0
  22. pyworkflow/storage/config.py +69 -0
  23. pyworkflow/storage/dynamodb.py +31 -2
  24. pyworkflow/storage/file.py +28 -0
  25. pyworkflow/storage/memory.py +18 -0
  26. pyworkflow/storage/mysql.py +1159 -0
  27. pyworkflow/storage/postgres.py +27 -2
  28. pyworkflow/storage/schemas.py +4 -3
  29. pyworkflow/storage/sqlite.py +25 -2
  30. {pyworkflow_engine-0.1.7.dist-info → pyworkflow_engine-0.1.9.dist-info}/METADATA +7 -4
  31. pyworkflow_engine-0.1.9.dist-info/RECORD +91 -0
  32. pyworkflow_engine-0.1.9.dist-info/top_level.txt +1 -0
  33. dashboard/backend/app/__init__.py +0 -1
  34. dashboard/backend/app/config.py +0 -32
  35. dashboard/backend/app/controllers/__init__.py +0 -6
  36. dashboard/backend/app/controllers/run_controller.py +0 -86
  37. dashboard/backend/app/controllers/workflow_controller.py +0 -33
  38. dashboard/backend/app/dependencies/__init__.py +0 -5
  39. dashboard/backend/app/dependencies/storage.py +0 -50
  40. dashboard/backend/app/repositories/__init__.py +0 -6
  41. dashboard/backend/app/repositories/run_repository.py +0 -80
  42. dashboard/backend/app/repositories/workflow_repository.py +0 -27
  43. dashboard/backend/app/rest/__init__.py +0 -8
  44. dashboard/backend/app/rest/v1/__init__.py +0 -12
  45. dashboard/backend/app/rest/v1/health.py +0 -33
  46. dashboard/backend/app/rest/v1/runs.py +0 -133
  47. dashboard/backend/app/rest/v1/workflows.py +0 -41
  48. dashboard/backend/app/schemas/__init__.py +0 -23
  49. dashboard/backend/app/schemas/common.py +0 -16
  50. dashboard/backend/app/schemas/event.py +0 -24
  51. dashboard/backend/app/schemas/hook.py +0 -25
  52. dashboard/backend/app/schemas/run.py +0 -54
  53. dashboard/backend/app/schemas/step.py +0 -28
  54. dashboard/backend/app/schemas/workflow.py +0 -31
  55. dashboard/backend/app/server.py +0 -87
  56. dashboard/backend/app/services/__init__.py +0 -6
  57. dashboard/backend/app/services/run_service.py +0 -240
  58. dashboard/backend/app/services/workflow_service.py +0 -155
  59. dashboard/backend/main.py +0 -18
  60. docs/concepts/cancellation.mdx +0 -362
  61. docs/concepts/continue-as-new.mdx +0 -434
  62. docs/concepts/events.mdx +0 -266
  63. docs/concepts/fault-tolerance.mdx +0 -370
  64. docs/concepts/hooks.mdx +0 -552
  65. docs/concepts/limitations.mdx +0 -167
  66. docs/concepts/schedules.mdx +0 -775
  67. docs/concepts/sleep.mdx +0 -312
  68. docs/concepts/steps.mdx +0 -301
  69. docs/concepts/workflows.mdx +0 -255
  70. docs/guides/cli.mdx +0 -942
  71. docs/guides/configuration.mdx +0 -560
  72. docs/introduction.mdx +0 -155
  73. docs/quickstart.mdx +0 -279
  74. examples/__init__.py +0 -1
  75. examples/celery/__init__.py +0 -1
  76. examples/celery/durable/docker-compose.yml +0 -55
  77. examples/celery/durable/pyworkflow.config.yaml +0 -12
  78. examples/celery/durable/workflows/__init__.py +0 -122
  79. examples/celery/durable/workflows/basic.py +0 -87
  80. examples/celery/durable/workflows/batch_processing.py +0 -102
  81. examples/celery/durable/workflows/cancellation.py +0 -273
  82. examples/celery/durable/workflows/child_workflow_patterns.py +0 -240
  83. examples/celery/durable/workflows/child_workflows.py +0 -202
  84. examples/celery/durable/workflows/continue_as_new.py +0 -260
  85. examples/celery/durable/workflows/fault_tolerance.py +0 -210
  86. examples/celery/durable/workflows/hooks.py +0 -211
  87. examples/celery/durable/workflows/idempotency.py +0 -112
  88. examples/celery/durable/workflows/long_running.py +0 -99
  89. examples/celery/durable/workflows/retries.py +0 -101
  90. examples/celery/durable/workflows/schedules.py +0 -209
  91. examples/celery/transient/01_basic_workflow.py +0 -91
  92. examples/celery/transient/02_fault_tolerance.py +0 -257
  93. examples/celery/transient/__init__.py +0 -20
  94. examples/celery/transient/pyworkflow.config.yaml +0 -25
  95. examples/local/__init__.py +0 -1
  96. examples/local/durable/01_basic_workflow.py +0 -94
  97. examples/local/durable/02_file_storage.py +0 -132
  98. examples/local/durable/03_retries.py +0 -169
  99. examples/local/durable/04_long_running.py +0 -119
  100. examples/local/durable/05_event_log.py +0 -145
  101. examples/local/durable/06_idempotency.py +0 -148
  102. examples/local/durable/07_hooks.py +0 -334
  103. examples/local/durable/08_cancellation.py +0 -233
  104. examples/local/durable/09_child_workflows.py +0 -198
  105. examples/local/durable/10_child_workflow_patterns.py +0 -265
  106. examples/local/durable/11_continue_as_new.py +0 -249
  107. examples/local/durable/12_schedules.py +0 -198
  108. examples/local/durable/__init__.py +0 -1
  109. examples/local/transient/01_quick_tasks.py +0 -87
  110. examples/local/transient/02_retries.py +0 -130
  111. examples/local/transient/03_sleep.py +0 -141
  112. examples/local/transient/__init__.py +0 -1
  113. pyworkflow_engine-0.1.7.dist-info/RECORD +0 -196
  114. pyworkflow_engine-0.1.7.dist-info/top_level.txt +0 -5
  115. tests/examples/__init__.py +0 -0
  116. tests/integration/__init__.py +0 -0
  117. tests/integration/test_cancellation.py +0 -330
  118. tests/integration/test_child_workflows.py +0 -439
  119. tests/integration/test_continue_as_new.py +0 -428
  120. tests/integration/test_dynamodb_storage.py +0 -1146
  121. tests/integration/test_fault_tolerance.py +0 -369
  122. tests/integration/test_schedule_storage.py +0 -484
  123. tests/unit/__init__.py +0 -0
  124. tests/unit/backends/__init__.py +0 -1
  125. tests/unit/backends/test_dynamodb_storage.py +0 -1554
  126. tests/unit/backends/test_postgres_storage.py +0 -1281
  127. tests/unit/backends/test_sqlite_storage.py +0 -1460
  128. tests/unit/conftest.py +0 -41
  129. tests/unit/test_cancellation.py +0 -364
  130. tests/unit/test_child_workflows.py +0 -680
  131. tests/unit/test_continue_as_new.py +0 -441
  132. tests/unit/test_event_limits.py +0 -316
  133. tests/unit/test_executor.py +0 -320
  134. tests/unit/test_fault_tolerance.py +0 -334
  135. tests/unit/test_hooks.py +0 -495
  136. tests/unit/test_registry.py +0 -261
  137. tests/unit/test_replay.py +0 -420
  138. tests/unit/test_schedule_schemas.py +0 -285
  139. tests/unit/test_schedule_utils.py +0 -286
  140. tests/unit/test_scheduled_workflow.py +0 -274
  141. tests/unit/test_step.py +0 -353
  142. tests/unit/test_workflow.py +0 -243
  143. {pyworkflow_engine-0.1.7.dist-info → pyworkflow_engine-0.1.9.dist-info}/WHEEL +0 -0
  144. {pyworkflow_engine-0.1.7.dist-info → pyworkflow_engine-0.1.9.dist-info}/entry_points.txt +0 -0
  145. {pyworkflow_engine-0.1.7.dist-info → pyworkflow_engine-0.1.9.dist-info}/licenses/LICENSE +0 -0
docs/quickstart.mdx DELETED
@@ -1,279 +0,0 @@
1
- ---
2
- title: 'Quick Start'
3
- description: 'Get up and running with PyWorkflow in under 5 minutes'
4
- ---
5
-
6
- ## Installation
7
-
8
- Install PyWorkflow using pip:
9
-
10
- ```bash
11
- pip install pyworkflow
12
- ```
13
-
14
- ## Create a New Project
15
-
16
- The fastest way to get started is with the `quickstart` command:
17
-
18
- ```bash
19
- pyworkflow quickstart
20
- ```
21
-
22
- This interactive command will:
23
- 1. Create a `workflows/` directory with sample workflows
24
- 2. Generate `pyworkflow.config.yaml` configuration
25
- 3. Optionally start Docker services (Redis + Dashboard)
26
-
27
- <Accordion title="Quickstart Output Example">
28
- ```
29
- ============================================================
30
- PyWorkflow Quickstart
31
- ============================================================
32
-
33
- Select a project template:
34
- ❯ Basic - Order processing and notifications (2 workflows)
35
-
36
- Storage backend:
37
- ❯ SQLite - Single file database (recommended)
38
-
39
- Start Docker services (Redis + Dashboard)? [Y/n]
40
-
41
- Creating project structure...
42
- ✓ Created: workflows/__init__.py
43
- ✓ Created: workflows/orders.py
44
- ✓ Created: workflows/notifications.py
45
- ✓ Created: pyworkflow.config.yaml
46
-
47
- ============================================================
48
- Project Created!
49
- ============================================================
50
-
51
- Next steps:
52
-
53
- 1. Start a worker:
54
- $ pyworkflow worker start
55
-
56
- 2. Run a workflow:
57
- $ pyworkflow workflows run process_order \
58
- --input '{"order_id": "123", "amount": 49.99}'
59
-
60
- 3. View the dashboard:
61
- Open http://localhost:5173 in your browser
62
- ```
63
- </Accordion>
64
-
65
- ### Non-Interactive Mode
66
-
67
- For CI/CD or scripting, use non-interactive mode:
68
-
69
- ```bash
70
- # Create project with defaults (SQLite storage, start Docker)
71
- pyworkflow quickstart --non-interactive
72
-
73
- # Create project without Docker
74
- pyworkflow quickstart --non-interactive --skip-docker
75
-
76
- # Use file storage instead of SQLite
77
- pyworkflow quickstart --non-interactive --storage file
78
- ```
79
-
80
- ---
81
-
82
- ## Manual Setup
83
-
84
- If you prefer to set up manually or need more control:
85
-
86
- <Tabs>
87
- <Tab title="Docker Compose (Recommended)">
88
- Generate Docker configuration and start services:
89
-
90
- ```bash
91
- # Generate docker-compose.yml and config
92
- pyworkflow setup
93
-
94
- # Start services
95
- docker compose up -d
96
- ```
97
-
98
- This starts Redis and the PyWorkflow Dashboard.
99
- </Tab>
100
- <Tab title="Manual">
101
- Start each component manually:
102
-
103
- ```bash
104
- # 1. Start Redis
105
- docker run -d -p 6379:6379 redis:7-alpine
106
-
107
- # 2. Create config file
108
- cat > pyworkflow.config.yaml << EOF
109
- module: workflows
110
- runtime: celery
111
- storage:
112
- type: sqlite
113
- base_path: pyworkflow_data/pyworkflow.db
114
- celery:
115
- broker: redis://localhost:6379/0
116
- result_backend: redis://localhost:6379/1
117
- EOF
118
-
119
- # 3. Start Celery worker
120
- pyworkflow worker start
121
- ```
122
- </Tab>
123
- </Tabs>
124
-
125
- ## Your First Workflow
126
-
127
- Create a simple onboarding workflow that sends emails with delays:
128
-
129
- ```python
130
- from pyworkflow import workflow, step, start, sleep
131
-
132
- @step()
133
- async def send_welcome_email(user_id: str):
134
- """Send a welcome email to the new user."""
135
- print(f"Sending welcome email to user {user_id}")
136
- return f"Email sent to {user_id}"
137
-
138
- @step()
139
- async def send_tips_email(user_id: str):
140
- """Send helpful tips after the welcome period."""
141
- print(f"Sending tips email to user {user_id}")
142
- return f"Tips sent to {user_id}"
143
-
144
- @workflow()
145
- async def onboarding_workflow(user_id: str):
146
- # Send welcome email immediately
147
- await send_welcome_email(user_id)
148
-
149
- # Sleep for 1 day - workflow suspends, zero resources used
150
- await sleep("1d")
151
-
152
- # Automatically resumes after 1 day
153
- await send_tips_email(user_id)
154
-
155
- return "Onboarding complete"
156
-
157
- # Start the workflow
158
- run_id = start(onboarding_workflow, user_id="user_123")
159
- print(f"Workflow started: {run_id}")
160
- ```
161
-
162
- ## What Happens Under the Hood
163
-
164
- <Steps>
165
- <Step title="Workflow Starts">
166
- Your workflow is dispatched to an available Celery worker.
167
- </Step>
168
- <Step title="Welcome Email Sent">
169
- The `send_welcome_email` step executes and the result is recorded.
170
- </Step>
171
- <Step title="Workflow Suspends">
172
- When `sleep("1d")` is called, the workflow suspends and the worker is freed.
173
- Zero resources are consumed during the sleep period.
174
- </Step>
175
- <Step title="Automatic Resumption">
176
- After 1 day, Celery Beat automatically schedules the workflow to resume.
177
- </Step>
178
- <Step title="Tips Email Sent">
179
- The workflow picks up where it left off, sending the tips email.
180
- </Step>
181
- <Step title="Workflow Completes">
182
- The final result is recorded and the workflow is marked as complete.
183
- </Step>
184
- </Steps>
185
-
186
- ## Key Concepts
187
-
188
- <CardGroup cols={2}>
189
- <Card title="Workflows" icon="diagram-project" href="/concepts/workflows">
190
- Top-level orchestration functions that coordinate steps and handle business logic.
191
- </Card>
192
- <Card title="Steps" icon="stairs" href="/concepts/steps">
193
- Isolated, retryable units of work that run on Celery workers.
194
- </Card>
195
- <Card title="Sleep" icon="clock" href="/concepts/sleep">
196
- Pause workflows for any duration without consuming resources.
197
- </Card>
198
- <Card title="Events" icon="timeline" href="/concepts/events">
199
- Event sourcing provides durability and deterministic replay.
200
- </Card>
201
- </CardGroup>
202
-
203
- ## Adding Error Handling
204
-
205
- Make your workflows fault-tolerant with automatic retries:
206
-
207
- ```python
208
- from pyworkflow import step, RetryableError, FatalError
209
-
210
- @step(max_retries=3, retry_delay="exponential")
211
- async def call_payment_api(amount: float):
212
- """Process payment with automatic retry on failure."""
213
- try:
214
- result = await payment_gateway.charge(amount)
215
- return result
216
- except PaymentGatewayTimeoutError:
217
- # Retry with exponential backoff
218
- raise RetryableError("Gateway timeout", retry_after="10s")
219
- except InsufficientFundsError:
220
- # Don't retry - this is a permanent failure
221
- raise FatalError("Insufficient funds")
222
- ```
223
-
224
- <Tip>
225
- Use `RetryableError` for transient failures (network issues, timeouts) and `FatalError` for permanent failures (invalid input, business rule violations).
226
- </Tip>
227
-
228
- ## Running in Parallel
229
-
230
- Execute multiple steps concurrently using `asyncio.gather()`:
231
-
232
- ```python
233
- import asyncio
234
- from pyworkflow import workflow, step
235
-
236
- @step()
237
- async def fetch_user(user_id: str):
238
- return {"id": user_id, "name": "Alice"}
239
-
240
- @step()
241
- async def fetch_orders(user_id: str):
242
- return [{"id": "ORD-1"}, {"id": "ORD-2"}]
243
-
244
- @step()
245
- async def fetch_recommendations(user_id: str):
246
- return ["Product A", "Product B"]
247
-
248
- @workflow()
249
- async def dashboard_data(user_id: str):
250
- # Fetch all data in parallel
251
- user, orders, recommendations = await asyncio.gather(
252
- fetch_user(user_id),
253
- fetch_orders(user_id),
254
- fetch_recommendations(user_id)
255
- )
256
-
257
- return {
258
- "user": user,
259
- "orders": orders,
260
- "recommendations": recommendations
261
- }
262
- ```
263
-
264
- ## Next Steps
265
-
266
- <CardGroup cols={2}>
267
- <Card title="Core Concepts" icon="book" href="/concepts/workflows">
268
- Learn about workflows, steps, and event sourcing in depth.
269
- </Card>
270
- <Card title="Error Handling" icon="shield-check" href="/guides/error-handling">
271
- Build fault-tolerant workflows with retry strategies.
272
- </Card>
273
- <Card title="Testing" icon="flask-vial" href="/guides/testing">
274
- Write unit and integration tests for your workflows.
275
- </Card>
276
- <Card title="Deployment" icon="rocket" href="/guides/deployment">
277
- Deploy to production with Docker and Kubernetes.
278
- </Card>
279
- </CardGroup>
examples/__init__.py DELETED
@@ -1 +0,0 @@
1
- # PyWorkflow Examples Package
@@ -1 +0,0 @@
1
- # PyWorkflow Celery Examples Package
@@ -1,55 +0,0 @@
1
- services:
2
- redis:
3
- image: redis:7-alpine
4
- container_name: pyworkflow-redis
5
- ports:
6
- - "6379:6379"
7
- volumes:
8
- - redis_data:/data
9
- healthcheck:
10
- test: ["CMD", "redis-cli", "ping"]
11
- interval: 5s
12
- timeout: 3s
13
- retries: 5
14
- restart: unless-stopped
15
-
16
- dashboard-backend:
17
- image: yashabro/pyworkflow-dashboard-backend:latest
18
- platform: linux/amd64
19
- container_name: pyworkflow-dashboard-backend
20
- working_dir: /app/project
21
- ports:
22
- - "8585:8585"
23
- environment:
24
- - DASHBOARD_PYWORKFLOW_CONFIG_PATH=/app/project/pyworkflow.config.yaml
25
- - DASHBOARD_STORAGE_TYPE=sqlite
26
- - DASHBOARD_STORAGE_PATH=/app/project/pyworkflow_data
27
- - DASHBOARD_HOST=0.0.0.0
28
- - DASHBOARD_PORT=8585
29
- - DASHBOARD_CORS_ORIGINS=["http://localhost:5173","http://localhost:3000"]
30
- - PYWORKFLOW_CELERY_BROKER=redis://redis:6379/0
31
- - PYWORKFLOW_CELERY_RESULT_BACKEND=redis://redis:6379/1
32
- - PYTHONPATH=/app/project
33
- volumes:
34
- - .:/app/project:ro
35
- - ./pyworkflow_data:/app/project/pyworkflow_data
36
- depends_on:
37
- redis:
38
- condition: service_healthy
39
- restart: unless-stopped
40
-
41
- dashboard-frontend:
42
- image: yashabro/pyworkflow-dashboard-frontend:latest
43
- platform: linux/amd64
44
- container_name: pyworkflow-dashboard-frontend
45
- ports:
46
- - "5173:80"
47
- environment:
48
- - VITE_API_URL=http://localhost:8585
49
- depends_on:
50
- - dashboard-backend
51
- restart: unless-stopped
52
-
53
- volumes:
54
- redis_data:
55
- driver: local
@@ -1,12 +0,0 @@
1
- # PyWorkflow Configuration
2
- # Generated: 2025-12-27 11:44:23
3
- # Documentation: https://docs.pyworkflow.dev
4
-
5
- module: workflows
6
- runtime: celery
7
- storage:
8
- type: sqlite
9
- base_path: pyworkflow_data/pyworkflow.db
10
- celery:
11
- broker: redis://localhost:6379/0
12
- result_backend: redis://localhost:6379/1
@@ -1,122 +0,0 @@
1
- """
2
- Example workflows demonstrating PyWorkflow features.
3
-
4
- This package contains example workflows organized by feature:
5
- - basic: Simple order processing workflow
6
- - long_running: Long-running onboarding workflow with sleeps
7
- - retries: Retry handling with flaky APIs
8
- - batch_processing: Processing data in batches
9
- - idempotency: Idempotent payment processing
10
- - fault_tolerance: Worker crash recovery
11
- - hooks: Human-in-the-loop approvals with webhooks
12
- - cancellation: Cancellable workflows with cleanup
13
- - child_workflows: Parent-child workflow orchestration
14
- - child_workflow_patterns: Advanced child workflow patterns
15
- - continue_as_new: Long-running workflows with state reset
16
- - schedules: Scheduled/recurring workflows
17
-
18
- Usage:
19
- # Start workers
20
- pyworkflow worker start
21
-
22
- # List registered workflows
23
- pyworkflow workflows list
24
-
25
- # Trigger a workflow
26
- pyworkflow workflows run order_workflow --input '{"order_id": "123", "amount": 99.99}'
27
- """
28
-
29
- # Basic workflow
30
- from .basic import order_workflow
31
-
32
- # Batch processing
33
- from .batch_processing import batch_workflow
34
-
35
- # Cancellation
36
- from .cancellation import cancel_demo_simple_workflow, cancellable_order_workflow
37
-
38
- # Child workflow patterns
39
- from .child_workflow_patterns import (
40
- error_handling_parent_workflow,
41
- failing_child_workflow,
42
- level_1_workflow,
43
- level_2_workflow,
44
- level_3_workflow,
45
- parallel_parent_workflow,
46
- parallel_task_workflow,
47
- try_exceed_max_depth,
48
- )
49
- from .child_workflows import (
50
- notification_workflow,
51
- order_fulfillment_workflow,
52
- shipping_workflow,
53
- )
54
-
55
- # Child workflows
56
- from .child_workflows import (
57
- payment_workflow as child_payment_workflow,
58
- )
59
-
60
- # Continue as new
61
- from .continue_as_new import batch_processor, message_consumer, recurring_report
62
-
63
- # Fault tolerance / recovery
64
- from .fault_tolerance import critical_pipeline, data_pipeline
65
-
66
- # Webhooks / Human-in-the-loop
67
- from .hooks import approval_workflow, multi_approval_workflow, simple_approval_workflow
68
-
69
- # Idempotency
70
- from .idempotency import payment_workflow as idempotent_payment_workflow
71
-
72
- # Long-running workflow with sleeps
73
- from .long_running import onboarding_workflow
74
-
75
- # Retry handling
76
- from .retries import retry_demo_workflow
77
-
78
- # Schedules
79
- from .schedules import cleanup_workflow
80
-
81
- __all__ = [
82
- # Basic
83
- "order_workflow",
84
- # Long-running
85
- "onboarding_workflow",
86
- # Retries
87
- "retry_demo_workflow",
88
- # Batch processing
89
- "batch_workflow",
90
- # Idempotency
91
- "idempotent_payment_workflow",
92
- # Fault tolerance
93
- "data_pipeline",
94
- "critical_pipeline",
95
- # Hooks
96
- "simple_approval_workflow",
97
- "approval_workflow",
98
- "multi_approval_workflow",
99
- # Cancellation
100
- "cancellable_order_workflow",
101
- "cancel_demo_simple_workflow",
102
- # Child workflows
103
- "child_payment_workflow",
104
- "shipping_workflow",
105
- "notification_workflow",
106
- "order_fulfillment_workflow",
107
- # Child workflow patterns
108
- "level_1_workflow",
109
- "level_2_workflow",
110
- "level_3_workflow",
111
- "parallel_task_workflow",
112
- "parallel_parent_workflow",
113
- "failing_child_workflow",
114
- "error_handling_parent_workflow",
115
- "try_exceed_max_depth",
116
- # Continue as new
117
- "batch_processor",
118
- "message_consumer",
119
- "recurring_report",
120
- # Schedules
121
- "cleanup_workflow",
122
- ]
@@ -1,87 +0,0 @@
1
- """
2
- Celery Durable Workflow - Basic Example
3
-
4
- This example demonstrates a simple event-sourced workflow running on Celery workers.
5
- - 3-step order processing workflow
6
- - Distributed execution across workers
7
- - Events recorded for each step
8
-
9
- Prerequisites:
10
- 1. Start Redis: docker run -d -p 6379:6379 redis:7-alpine
11
- 2. Start worker: pyworkflow --module examples.celery.durable.01_basic_workflow worker run
12
-
13
- Run with CLI:
14
- pyworkflow --module examples.celery.durable.01_basic_workflow workflows run order_workflow \
15
- --arg order_id=order-123 --arg amount=99.99
16
-
17
- Check status:
18
- pyworkflow runs list
19
- pyworkflow runs status <run_id>
20
- pyworkflow runs logs <run_id>
21
- """
22
-
23
- from pyworkflow import step, workflow
24
-
25
-
26
- @step(name="basic_validate_order")
27
- async def validate_order(order_id: str) -> dict:
28
- """Validate the order exists and is processable."""
29
- print(f"[Step] Validating order {order_id}...")
30
- return {"order_id": order_id, "valid": True}
31
-
32
-
33
- @step(name="basic_process_payment")
34
- async def process_payment(order: dict, amount: float) -> dict:
35
- """Process payment for the order."""
36
- print(f"[Step] Processing payment ${amount:.2f} for {order['order_id']}...")
37
- return {**order, "paid": True, "amount": amount}
38
-
39
-
40
- @step()
41
- async def send_confirmation(order: dict) -> dict:
42
- """Send order confirmation email."""
43
- print(f"[Step] Sending confirmation for {order['order_id']}...")
44
- return {**order, "confirmed": True}
45
-
46
-
47
- @workflow(tags=["celery", "durable"])
48
- async def order_workflow(order_id: str, amount: float) -> dict:
49
- """
50
- Complete order processing workflow.
51
-
52
- Steps:
53
- 1. Validate the order
54
- 2. Process payment
55
- 3. Send confirmation
56
-
57
- Each step runs on Celery workers and is recorded as an event.
58
- """
59
- order = await validate_order(order_id)
60
- order = await process_payment(order, amount)
61
- order = await send_confirmation(order)
62
- return order
63
-
64
-
65
- async def main() -> None:
66
- """Run the order workflow example."""
67
- import argparse
68
-
69
- import pyworkflow
70
-
71
- parser = argparse.ArgumentParser(description="Order Processing Workflow")
72
- parser.add_argument("--order-id", default="order-123", help="Order ID to process")
73
- parser.add_argument("--amount", type=float, default=99.99, help="Order amount")
74
- args = parser.parse_args()
75
-
76
- # Configuration is automatically loaded from pyworkflow.config.yaml
77
- # which sets runtime=celery and creates storage backend
78
- print(f"Starting order workflow for {args.order_id} (${args.amount:.2f})...")
79
- run_id = await pyworkflow.start(order_workflow, args.order_id, args.amount)
80
- print(f"Workflow started with run_id: {run_id}")
81
- print(f"\nCheck status: pyworkflow runs status {run_id}")
82
-
83
-
84
- if __name__ == "__main__":
85
- import asyncio
86
-
87
- asyncio.run(main())
@@ -1,102 +0,0 @@
1
- """
2
- Celery Durable Workflow - Batch Processing
3
-
4
- This example demonstrates batch item processing on Celery workers.
5
- - Fetch items to process
6
- - Process each item (each as a recorded step)
7
- - Aggregate results
8
-
9
- Prerequisites:
10
- 1. Start Redis: docker run -d -p 6379:6379 redis:7-alpine
11
- 2. Start worker: pyworkflow --module examples.celery.durable.04_batch_processing worker run
12
-
13
- Run with CLI:
14
- pyworkflow --module examples.celery.durable.04_batch_processing workflows run batch_workflow \
15
- --arg batch_id=batch-789 --arg limit=5
16
-
17
- Check status:
18
- pyworkflow runs status <run_id>
19
- pyworkflow runs logs <run_id> --filter step_completed
20
- """
21
-
22
- from pyworkflow import step, workflow
23
-
24
-
25
- @step()
26
- async def fetch_batch_items(batch_id: str, limit: int = 100) -> list:
27
- """Fetch items to process in this batch."""
28
- print(f"[Step] Fetching batch {batch_id} with limit {limit}...")
29
- # Simulate fetching items from database
30
- items = [{"id": f"item-{i}", "batch_id": batch_id} for i in range(min(limit, 10))]
31
- print(f"[Step] Fetched {len(items)} items")
32
- return items
33
-
34
-
35
- @step(name="batch_process_item")
36
- async def process_item(item: dict) -> dict:
37
- """Process a single item."""
38
- print(f"[Step] Processing item {item['id']}...")
39
- # Simulate processing
40
- return {**item, "processed": True, "result": f"processed_{item['id']}"}
41
-
42
-
43
- @step()
44
- async def aggregate_results(results: list) -> dict:
45
- """Aggregate processing results."""
46
- successful = len([r for r in results if r.get("processed")])
47
- print(f"[Step] Aggregating {len(results)} results ({successful} successful)...")
48
- return {
49
- "total": len(results),
50
- "successful": successful,
51
- "failed": len(results) - successful,
52
- }
53
-
54
-
55
- @workflow(tags=["celery", "durable"])
56
- async def batch_workflow(batch_id: str, limit: int = 100) -> dict:
57
- """
58
- Batch processing workflow.
59
-
60
- Steps:
61
- 1. Fetch items to process
62
- 2. Process each item individually
63
- 3. Aggregate and return results
64
-
65
- Each item processing is recorded as a separate step event,
66
- enabling fine-grained tracking and potential parallel execution.
67
- """
68
- items = await fetch_batch_items(batch_id, limit)
69
-
70
- # Process items sequentially (each recorded as step)
71
- results = []
72
- for item in items:
73
- result = await process_item(item)
74
- results.append(result)
75
-
76
- summary = await aggregate_results(results)
77
- return {"batch_id": batch_id, **summary}
78
-
79
-
80
- async def main() -> None:
81
- """Run the batch processing workflow example."""
82
- import argparse
83
-
84
- import pyworkflow
85
-
86
- parser = argparse.ArgumentParser(description="Batch Processing Workflow")
87
- parser.add_argument("--batch-id", default="batch-789", help="Batch ID to process")
88
- parser.add_argument("--limit", type=int, default=5, help="Maximum items to process")
89
- args = parser.parse_args()
90
-
91
- # Configuration is automatically loaded from pyworkflow.config.yaml
92
- print(f"Starting batch workflow for {args.batch_id} (limit: {args.limit})...")
93
- run_id = await pyworkflow.start(batch_workflow, args.batch_id, args.limit)
94
- print(f"Workflow started with run_id: {run_id}")
95
- print(f"\nCheck status: pyworkflow runs status {run_id}")
96
- print(f"View step logs: pyworkflow runs logs {run_id} --filter step_completed")
97
-
98
-
99
- if __name__ == "__main__":
100
- import asyncio
101
-
102
- asyncio.run(main())