pyworkflow-engine 0.1.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (196) hide show
  1. dashboard/backend/app/__init__.py +1 -0
  2. dashboard/backend/app/config.py +32 -0
  3. dashboard/backend/app/controllers/__init__.py +6 -0
  4. dashboard/backend/app/controllers/run_controller.py +86 -0
  5. dashboard/backend/app/controllers/workflow_controller.py +33 -0
  6. dashboard/backend/app/dependencies/__init__.py +5 -0
  7. dashboard/backend/app/dependencies/storage.py +50 -0
  8. dashboard/backend/app/repositories/__init__.py +6 -0
  9. dashboard/backend/app/repositories/run_repository.py +80 -0
  10. dashboard/backend/app/repositories/workflow_repository.py +27 -0
  11. dashboard/backend/app/rest/__init__.py +8 -0
  12. dashboard/backend/app/rest/v1/__init__.py +12 -0
  13. dashboard/backend/app/rest/v1/health.py +33 -0
  14. dashboard/backend/app/rest/v1/runs.py +133 -0
  15. dashboard/backend/app/rest/v1/workflows.py +41 -0
  16. dashboard/backend/app/schemas/__init__.py +23 -0
  17. dashboard/backend/app/schemas/common.py +16 -0
  18. dashboard/backend/app/schemas/event.py +24 -0
  19. dashboard/backend/app/schemas/hook.py +25 -0
  20. dashboard/backend/app/schemas/run.py +54 -0
  21. dashboard/backend/app/schemas/step.py +28 -0
  22. dashboard/backend/app/schemas/workflow.py +31 -0
  23. dashboard/backend/app/server.py +87 -0
  24. dashboard/backend/app/services/__init__.py +6 -0
  25. dashboard/backend/app/services/run_service.py +240 -0
  26. dashboard/backend/app/services/workflow_service.py +155 -0
  27. dashboard/backend/main.py +18 -0
  28. docs/concepts/cancellation.mdx +362 -0
  29. docs/concepts/continue-as-new.mdx +434 -0
  30. docs/concepts/events.mdx +266 -0
  31. docs/concepts/fault-tolerance.mdx +370 -0
  32. docs/concepts/hooks.mdx +552 -0
  33. docs/concepts/limitations.mdx +167 -0
  34. docs/concepts/schedules.mdx +775 -0
  35. docs/concepts/sleep.mdx +312 -0
  36. docs/concepts/steps.mdx +301 -0
  37. docs/concepts/workflows.mdx +255 -0
  38. docs/guides/cli.mdx +942 -0
  39. docs/guides/configuration.mdx +560 -0
  40. docs/introduction.mdx +155 -0
  41. docs/quickstart.mdx +279 -0
  42. examples/__init__.py +1 -0
  43. examples/celery/__init__.py +1 -0
  44. examples/celery/durable/docker-compose.yml +55 -0
  45. examples/celery/durable/pyworkflow.config.yaml +12 -0
  46. examples/celery/durable/workflows/__init__.py +122 -0
  47. examples/celery/durable/workflows/basic.py +87 -0
  48. examples/celery/durable/workflows/batch_processing.py +102 -0
  49. examples/celery/durable/workflows/cancellation.py +273 -0
  50. examples/celery/durable/workflows/child_workflow_patterns.py +240 -0
  51. examples/celery/durable/workflows/child_workflows.py +202 -0
  52. examples/celery/durable/workflows/continue_as_new.py +260 -0
  53. examples/celery/durable/workflows/fault_tolerance.py +210 -0
  54. examples/celery/durable/workflows/hooks.py +211 -0
  55. examples/celery/durable/workflows/idempotency.py +112 -0
  56. examples/celery/durable/workflows/long_running.py +99 -0
  57. examples/celery/durable/workflows/retries.py +101 -0
  58. examples/celery/durable/workflows/schedules.py +209 -0
  59. examples/celery/transient/01_basic_workflow.py +91 -0
  60. examples/celery/transient/02_fault_tolerance.py +257 -0
  61. examples/celery/transient/__init__.py +20 -0
  62. examples/celery/transient/pyworkflow.config.yaml +25 -0
  63. examples/local/__init__.py +1 -0
  64. examples/local/durable/01_basic_workflow.py +94 -0
  65. examples/local/durable/02_file_storage.py +132 -0
  66. examples/local/durable/03_retries.py +169 -0
  67. examples/local/durable/04_long_running.py +119 -0
  68. examples/local/durable/05_event_log.py +145 -0
  69. examples/local/durable/06_idempotency.py +148 -0
  70. examples/local/durable/07_hooks.py +334 -0
  71. examples/local/durable/08_cancellation.py +233 -0
  72. examples/local/durable/09_child_workflows.py +198 -0
  73. examples/local/durable/10_child_workflow_patterns.py +265 -0
  74. examples/local/durable/11_continue_as_new.py +249 -0
  75. examples/local/durable/12_schedules.py +198 -0
  76. examples/local/durable/__init__.py +1 -0
  77. examples/local/transient/01_quick_tasks.py +87 -0
  78. examples/local/transient/02_retries.py +130 -0
  79. examples/local/transient/03_sleep.py +141 -0
  80. examples/local/transient/__init__.py +1 -0
  81. pyworkflow/__init__.py +256 -0
  82. pyworkflow/aws/__init__.py +68 -0
  83. pyworkflow/aws/context.py +234 -0
  84. pyworkflow/aws/handler.py +184 -0
  85. pyworkflow/aws/testing.py +310 -0
  86. pyworkflow/celery/__init__.py +41 -0
  87. pyworkflow/celery/app.py +198 -0
  88. pyworkflow/celery/scheduler.py +315 -0
  89. pyworkflow/celery/tasks.py +1746 -0
  90. pyworkflow/cli/__init__.py +132 -0
  91. pyworkflow/cli/__main__.py +6 -0
  92. pyworkflow/cli/commands/__init__.py +1 -0
  93. pyworkflow/cli/commands/hooks.py +640 -0
  94. pyworkflow/cli/commands/quickstart.py +495 -0
  95. pyworkflow/cli/commands/runs.py +773 -0
  96. pyworkflow/cli/commands/scheduler.py +130 -0
  97. pyworkflow/cli/commands/schedules.py +794 -0
  98. pyworkflow/cli/commands/setup.py +703 -0
  99. pyworkflow/cli/commands/worker.py +413 -0
  100. pyworkflow/cli/commands/workflows.py +1257 -0
  101. pyworkflow/cli/output/__init__.py +1 -0
  102. pyworkflow/cli/output/formatters.py +321 -0
  103. pyworkflow/cli/output/styles.py +121 -0
  104. pyworkflow/cli/utils/__init__.py +1 -0
  105. pyworkflow/cli/utils/async_helpers.py +30 -0
  106. pyworkflow/cli/utils/config.py +130 -0
  107. pyworkflow/cli/utils/config_generator.py +344 -0
  108. pyworkflow/cli/utils/discovery.py +53 -0
  109. pyworkflow/cli/utils/docker_manager.py +651 -0
  110. pyworkflow/cli/utils/interactive.py +364 -0
  111. pyworkflow/cli/utils/storage.py +115 -0
  112. pyworkflow/config.py +329 -0
  113. pyworkflow/context/__init__.py +63 -0
  114. pyworkflow/context/aws.py +230 -0
  115. pyworkflow/context/base.py +416 -0
  116. pyworkflow/context/local.py +930 -0
  117. pyworkflow/context/mock.py +381 -0
  118. pyworkflow/core/__init__.py +0 -0
  119. pyworkflow/core/exceptions.py +353 -0
  120. pyworkflow/core/registry.py +313 -0
  121. pyworkflow/core/scheduled.py +328 -0
  122. pyworkflow/core/step.py +494 -0
  123. pyworkflow/core/workflow.py +294 -0
  124. pyworkflow/discovery.py +248 -0
  125. pyworkflow/engine/__init__.py +0 -0
  126. pyworkflow/engine/events.py +879 -0
  127. pyworkflow/engine/executor.py +682 -0
  128. pyworkflow/engine/replay.py +273 -0
  129. pyworkflow/observability/__init__.py +19 -0
  130. pyworkflow/observability/logging.py +234 -0
  131. pyworkflow/primitives/__init__.py +33 -0
  132. pyworkflow/primitives/child_handle.py +174 -0
  133. pyworkflow/primitives/child_workflow.py +372 -0
  134. pyworkflow/primitives/continue_as_new.py +101 -0
  135. pyworkflow/primitives/define_hook.py +150 -0
  136. pyworkflow/primitives/hooks.py +97 -0
  137. pyworkflow/primitives/resume_hook.py +210 -0
  138. pyworkflow/primitives/schedule.py +545 -0
  139. pyworkflow/primitives/shield.py +96 -0
  140. pyworkflow/primitives/sleep.py +100 -0
  141. pyworkflow/runtime/__init__.py +21 -0
  142. pyworkflow/runtime/base.py +179 -0
  143. pyworkflow/runtime/celery.py +310 -0
  144. pyworkflow/runtime/factory.py +101 -0
  145. pyworkflow/runtime/local.py +706 -0
  146. pyworkflow/scheduler/__init__.py +9 -0
  147. pyworkflow/scheduler/local.py +248 -0
  148. pyworkflow/serialization/__init__.py +0 -0
  149. pyworkflow/serialization/decoder.py +146 -0
  150. pyworkflow/serialization/encoder.py +162 -0
  151. pyworkflow/storage/__init__.py +54 -0
  152. pyworkflow/storage/base.py +612 -0
  153. pyworkflow/storage/config.py +185 -0
  154. pyworkflow/storage/dynamodb.py +1315 -0
  155. pyworkflow/storage/file.py +827 -0
  156. pyworkflow/storage/memory.py +549 -0
  157. pyworkflow/storage/postgres.py +1161 -0
  158. pyworkflow/storage/schemas.py +486 -0
  159. pyworkflow/storage/sqlite.py +1136 -0
  160. pyworkflow/utils/__init__.py +0 -0
  161. pyworkflow/utils/duration.py +177 -0
  162. pyworkflow/utils/schedule.py +391 -0
  163. pyworkflow_engine-0.1.7.dist-info/METADATA +687 -0
  164. pyworkflow_engine-0.1.7.dist-info/RECORD +196 -0
  165. pyworkflow_engine-0.1.7.dist-info/WHEEL +5 -0
  166. pyworkflow_engine-0.1.7.dist-info/entry_points.txt +2 -0
  167. pyworkflow_engine-0.1.7.dist-info/licenses/LICENSE +21 -0
  168. pyworkflow_engine-0.1.7.dist-info/top_level.txt +5 -0
  169. tests/examples/__init__.py +0 -0
  170. tests/integration/__init__.py +0 -0
  171. tests/integration/test_cancellation.py +330 -0
  172. tests/integration/test_child_workflows.py +439 -0
  173. tests/integration/test_continue_as_new.py +428 -0
  174. tests/integration/test_dynamodb_storage.py +1146 -0
  175. tests/integration/test_fault_tolerance.py +369 -0
  176. tests/integration/test_schedule_storage.py +484 -0
  177. tests/unit/__init__.py +0 -0
  178. tests/unit/backends/__init__.py +1 -0
  179. tests/unit/backends/test_dynamodb_storage.py +1554 -0
  180. tests/unit/backends/test_postgres_storage.py +1281 -0
  181. tests/unit/backends/test_sqlite_storage.py +1460 -0
  182. tests/unit/conftest.py +41 -0
  183. tests/unit/test_cancellation.py +364 -0
  184. tests/unit/test_child_workflows.py +680 -0
  185. tests/unit/test_continue_as_new.py +441 -0
  186. tests/unit/test_event_limits.py +316 -0
  187. tests/unit/test_executor.py +320 -0
  188. tests/unit/test_fault_tolerance.py +334 -0
  189. tests/unit/test_hooks.py +495 -0
  190. tests/unit/test_registry.py +261 -0
  191. tests/unit/test_replay.py +420 -0
  192. tests/unit/test_schedule_schemas.py +285 -0
  193. tests/unit/test_schedule_utils.py +286 -0
  194. tests/unit/test_scheduled_workflow.py +274 -0
  195. tests/unit/test_step.py +353 -0
  196. tests/unit/test_workflow.py +243 -0
@@ -0,0 +1,273 @@
1
+ """
2
+ Celery Durable Workflow - Cancellation
3
+
4
+ This example demonstrates graceful workflow cancellation with Celery:
5
+ - Cancel running or suspended workflows via CLI
6
+ - Handle CancellationError for cleanup
7
+ - Use shield() to protect critical operations
8
+
9
+ ================================================================================
10
+ PREREQUISITES
11
+ ================================================================================
12
+
13
+ 1. Start Redis:
14
+ docker run -d -p 6379:6379 redis:7-alpine
15
+
16
+ 2. Start the Celery worker (in a separate terminal):
17
+ pyworkflow --module examples.celery.durable.08_cancellation worker run
18
+
19
+ ================================================================================
20
+ HOW TO RUN AND CANCEL
21
+ ================================================================================
22
+
23
+ STEP 1: Start the workflow (it will sleep for 60 seconds):
24
+
25
+ pyworkflow --module examples.celery.durable.08_cancellation workflows run \
26
+ cancellable_order_workflow --arg order_id=order-123
27
+
28
+ Output: Workflow started: run_abc123def456...
29
+
30
+ STEP 2: Check the status (should be "suspended" during sleep):
31
+
32
+ pyworkflow runs status <run_id>
33
+ pyworkflow runs list --status suspended
34
+
35
+ STEP 3: Cancel the workflow while it's sleeping:
36
+
37
+ pyworkflow runs cancel <run_id> --reason "Customer cancelled"
38
+
39
+ Or wait for cancellation to complete:
40
+
41
+ pyworkflow runs cancel <run_id> --wait --reason "Customer cancelled"
42
+
43
+ STEP 4: Check the worker logs - you should see:
44
+ - [Workflow] Cancellation detected!
45
+ - [Cleanup] Releasing inventory...
46
+ - [Cleanup] Refunding payment...
47
+
48
+ STEP 5: Verify the final status:
49
+
50
+ pyworkflow runs status <run_id>
51
+ pyworkflow runs logs <run_id> --filter cancel
52
+
53
+ ================================================================================
54
+ KEY POINTS
55
+ ================================================================================
56
+
57
+ - Cancellation is CHECKPOINT-BASED: happens before steps, during sleep/hook
58
+ - If a step is already running, cancellation waits until it completes
59
+ - Use shield() to protect cleanup code from cancellation
60
+ - Catch CancellationError to perform compensation logic
61
+
62
+ ================================================================================
63
+ """
64
+
65
+ import asyncio
66
+
67
+ from loguru import logger
68
+
69
+ from pyworkflow import (
70
+ CancellationError,
71
+ get_context,
72
+ shield,
73
+ sleep,
74
+ step,
75
+ workflow,
76
+ )
77
+
78
+
79
+ # --- Steps (prefixed to avoid naming conflicts with other examples) ---
80
+ @step()
81
+ async def cancel_demo_reserve_inventory(order_id: str) -> dict:
82
+ """Reserve inventory for order."""
83
+ logger.info(f"[Step] Reserving inventory for order {order_id}...")
84
+ await asyncio.sleep(1) # Simulate API call
85
+ logger.info(f"[Step] Inventory reserved for {order_id}")
86
+ return {"order_id": order_id, "inventory_reserved": True}
87
+
88
+
89
+ @step()
90
+ async def cancel_demo_charge_payment(order: dict) -> dict:
91
+ """Charge payment for order."""
92
+ logger.info(f"[Step] Charging payment for order {order['order_id']}...")
93
+ await asyncio.sleep(1) # Simulate payment processing
94
+ logger.info(f"[Step] Payment charged for {order['order_id']}")
95
+ return {**order, "payment_charged": True}
96
+
97
+
98
+ @step()
99
+ async def cancel_demo_create_shipment(order: dict) -> dict:
100
+ """Create shipment for order."""
101
+ logger.info(f"[Step] Creating shipment for order {order['order_id']}...")
102
+ await asyncio.sleep(1) # Simulate shipment creation
103
+ logger.info(f"[Step] Shipment created for {order['order_id']}")
104
+ return {**order, "shipment_created": True}
105
+
106
+
107
+ @step()
108
+ async def cancel_demo_release_inventory(order_id: str) -> None:
109
+ """Release previously reserved inventory (compensation)."""
110
+ logger.warning(f"[Cleanup] Releasing inventory for order {order_id}...")
111
+ await asyncio.sleep(0.5)
112
+ logger.warning(f"[Cleanup] Inventory released for {order_id}")
113
+
114
+
115
+ @step()
116
+ async def cancel_demo_refund_payment(order_id: str) -> None:
117
+ """Refund charged payment (compensation)."""
118
+ logger.warning(f"[Cleanup] Refunding payment for order {order_id}...")
119
+ await asyncio.sleep(0.5)
120
+ logger.warning(f"[Cleanup] Payment refunded for {order_id}")
121
+
122
+
123
+ # --- Workflow with Cancellation Handling ---
124
+ @workflow(tags=["celery", "durable"])
125
+ async def cancellable_order_workflow(order_id: str) -> dict:
126
+ """
127
+ Order processing workflow with cancellation handling.
128
+
129
+ Flow:
130
+ 1. Reserve inventory
131
+ 2. Sleep 60s (waiting for approval) <-- CANCEL HERE
132
+ 3. Charge payment
133
+ 4. Sleep 60s (waiting for warehouse) <-- OR CANCEL HERE
134
+ 5. Create shipment
135
+
136
+ If cancelled at any point:
137
+ - CancellationError is raised at the next checkpoint
138
+ - Cleanup logic releases inventory and refunds payment
139
+ - shield() ensures cleanup completes
140
+
141
+ To cancel this workflow:
142
+ pyworkflow runs cancel <run_id> --reason "Customer cancelled"
143
+ """
144
+ try:
145
+ logger.info(f"[Workflow] Starting order processing for {order_id}")
146
+
147
+ # Step 1: Reserve inventory
148
+ order = await cancel_demo_reserve_inventory(order_id)
149
+
150
+ # Sleep - workflow suspends here
151
+ # This is a good time to cancel!
152
+ logger.info("[Workflow] Waiting 60s for customer approval...")
153
+ logger.info("[Workflow] >>> To cancel: pyworkflow runs cancel <run_id> --reason 'test'")
154
+ await sleep("60s")
155
+
156
+ # Step 2: Charge payment
157
+ # Cancellation check happens here (before step)
158
+ order = await cancel_demo_charge_payment(order)
159
+
160
+ # Another sleep - another opportunity to cancel
161
+ logger.info("[Workflow] Waiting 60s for warehouse processing...")
162
+ logger.info("[Workflow] >>> To cancel: pyworkflow runs cancel <run_id> --reason 'test'")
163
+ await sleep("60s")
164
+
165
+ # Step 3: Create shipment
166
+ order = await cancel_demo_create_shipment(order)
167
+
168
+ logger.info(f"[Workflow] Order {order_id} completed successfully!")
169
+ return order
170
+
171
+ except CancellationError as e:
172
+ # Workflow was cancelled - perform cleanup
173
+ logger.warning(f"[Workflow] Cancellation detected! Reason: {e.reason}")
174
+ logger.warning("[Workflow] Performing compensation/cleanup...")
175
+
176
+ # Use shield() to ensure cleanup completes
177
+ # Even if another cancellation is requested, this block will finish
178
+ async with shield():
179
+ await cancel_demo_release_inventory(order_id)
180
+ await cancel_demo_refund_payment(order_id)
181
+
182
+ logger.warning("[Workflow] Cleanup complete. Re-raising CancellationError.")
183
+ raise # Re-raise to mark workflow as CANCELLED
184
+
185
+
186
+ @step()
187
+ async def cancel_demo_long_running_step(items: list) -> list:
188
+ """
189
+ Example of cooperative cancellation within a long-running step.
190
+
191
+ Since cancellation doesn't interrupt steps mid-execution, use
192
+ ctx.check_cancellation() for responsive cancellation in long loops.
193
+ """
194
+ ctx = get_context()
195
+ results = []
196
+
197
+ for i, item in enumerate(items):
198
+ # Check for cancellation periodically
199
+ if i % 10 == 0:
200
+ ctx.check_cancellation() # Raises CancellationError if cancelled
201
+
202
+ # Process item
203
+ await asyncio.sleep(0.1)
204
+ results.append(f"processed_{item}")
205
+
206
+ return results
207
+
208
+
209
+ # --- Alternative: Workflow without cleanup ---
210
+ @workflow(tags=["celery", "durable"])
211
+ async def cancel_demo_simple_workflow(data: str) -> str:
212
+ """
213
+ Simple workflow without explicit cancellation handling.
214
+
215
+ If cancelled:
216
+ - CancellationError propagates up
217
+ - Workflow is marked as CANCELLED
218
+ - No cleanup is performed
219
+
220
+ This is fine for workflows that don't need compensation logic.
221
+ """
222
+ logger.info(f"[Workflow] Processing: {data}")
223
+
224
+ await sleep("60s") # Cancel during this sleep
225
+
226
+ logger.info(f"[Workflow] Done: {data}")
227
+ return f"result_{data}"
228
+
229
+
230
+ async def main() -> None:
231
+ """Run the order workflow example."""
232
+ import argparse
233
+
234
+ import pyworkflow
235
+
236
+ parser = argparse.ArgumentParser(description="Order Workflow with Cancellation")
237
+ parser.add_argument("--order-id", default="order-123", help="Order ID to process")
238
+ args = parser.parse_args()
239
+
240
+ print("=" * 70)
241
+ print("ORDER WORKFLOW WITH CANCELLATION SUPPORT")
242
+ print("=" * 70)
243
+ print()
244
+ print(f"Starting order workflow for {args.order_id}...")
245
+ print("The workflow will sleep for 60 seconds - perfect time to cancel!")
246
+ print()
247
+
248
+ run_id = await pyworkflow.start(cancellable_order_workflow, args.order_id)
249
+
250
+ print(f"Workflow started with run_id: {run_id}")
251
+ print()
252
+ print("=" * 70)
253
+ print("TO CANCEL THIS WORKFLOW:")
254
+ print("=" * 70)
255
+ print()
256
+ print(f" pyworkflow runs cancel {run_id} --reason 'Customer cancelled'")
257
+ print()
258
+ print("Or wait for it and see cleanup:")
259
+ print()
260
+ print(f" pyworkflow runs cancel {run_id} --wait --reason 'Customer cancelled'")
261
+ print()
262
+ print("=" * 70)
263
+ print("CHECK STATUS:")
264
+ print("=" * 70)
265
+ print()
266
+ print(f" pyworkflow runs status {run_id}")
267
+ print(f" pyworkflow runs logs {run_id}")
268
+ print(f" pyworkflow runs logs {run_id} --filter cancel")
269
+ print()
270
+
271
+
272
+ if __name__ == "__main__":
273
+ asyncio.run(main())
@@ -0,0 +1,240 @@
1
+ """
2
+ Celery Durable Workflow - Child Workflows Advanced Patterns
3
+
4
+ This example demonstrates advanced child workflow patterns:
5
+ - Nested child workflows (parent -> child -> grandchild)
6
+ - Parallel child workflows using fire-and-forget + handle.result()
7
+ - Error handling with ChildWorkflowFailedError
8
+ - Cancellation propagation (TERMINATE policy)
9
+ - Using ChildWorkflowHandle for async patterns
10
+
11
+ Prerequisites:
12
+ 1. Start Redis: docker run -d -p 6379:6379 redis:7-alpine
13
+ 2. Start worker: pyworkflow --module examples.celery.durable.10_child_workflow_patterns worker run
14
+
15
+ Run demos with CLI:
16
+ # Nested workflows (3 levels)
17
+ pyworkflow --module examples.celery.durable.10_child_workflow_patterns workflows run level_1_workflow
18
+
19
+ # Parallel children
20
+ pyworkflow --module examples.celery.durable.10_child_workflow_patterns workflows run parallel_parent_workflow
21
+
22
+ # Error handling
23
+ pyworkflow --module examples.celery.durable.10_child_workflow_patterns workflows run error_handling_parent_workflow
24
+
25
+ Check status:
26
+ pyworkflow runs list
27
+ pyworkflow runs status <run_id>
28
+ pyworkflow runs children <run_id>
29
+ """
30
+
31
+ import asyncio
32
+
33
+ from pyworkflow import (
34
+ ChildWorkflowFailedError,
35
+ MaxNestingDepthError,
36
+ start_child_workflow,
37
+ step,
38
+ workflow,
39
+ )
40
+
41
+
42
+ # --- Steps ---
43
+ @step(name="patterns_demo_do_work")
44
+ async def do_work(name: str, duration: float = 0.1) -> dict:
45
+ """Simulate some work."""
46
+ print(f" [{name}] Working for {duration}s...")
47
+ await asyncio.sleep(duration)
48
+ return {"name": name, "completed": True}
49
+
50
+
51
+ @step(
52
+ name="patterns_demo_failing_step", max_retries=0
53
+ ) # No retries so failure propagates immediately
54
+ async def failing_step() -> dict:
55
+ """A step that always fails."""
56
+ raise ValueError("This step always fails!")
57
+
58
+
59
+ # --- Workflows for Nesting Demo ---
60
+ @workflow(name="patterns_demo_level_3_workflow", tags=["celery", "durable"])
61
+ async def level_3_workflow(task_id: str) -> dict:
62
+ """Grandchild workflow (nesting depth 3)."""
63
+ print(f" [Level3] Starting task {task_id}")
64
+ result = await do_work(f"level3-{task_id}")
65
+ print(f" [Level3] Completed task {task_id}")
66
+ return result
67
+
68
+
69
+ @workflow(name="patterns_demo_level_2_workflow", tags=["celery", "durable"])
70
+ async def level_2_workflow(task_id: str) -> dict:
71
+ """Child workflow that spawns a grandchild."""
72
+ print(f" [Level2] Starting task {task_id}")
73
+
74
+ # Spawn grandchild (nesting depth 3)
75
+ grandchild_result = await start_child_workflow(
76
+ level_3_workflow,
77
+ task_id,
78
+ )
79
+
80
+ print(f" [Level2] Completed task {task_id}")
81
+ return {"level2": task_id, "grandchild": grandchild_result}
82
+
83
+
84
+ @workflow(name="patterns_demo_level_1_workflow", tags=["celery", "durable"])
85
+ async def level_1_workflow() -> dict:
86
+ """Parent workflow demonstrating max nesting depth."""
87
+ print("[Level1] Starting nested workflow demo")
88
+
89
+ # Spawn child which will spawn grandchild
90
+ child_result = await start_child_workflow(level_2_workflow, "nested-task")
91
+
92
+ print("[Level1] Completed nested workflow demo")
93
+ return {"level1": True, "child": child_result}
94
+
95
+
96
+ # --- Workflows for Parallel Demo ---
97
+ @workflow(name="patterns_demo_parallel_task_workflow", tags=["celery", "durable"])
98
+ async def parallel_task_workflow(task_id: str, duration: float) -> dict:
99
+ """A simple workflow for parallel execution."""
100
+ result = await do_work(f"parallel-{task_id}", duration)
101
+ return {"task_id": task_id, **result}
102
+
103
+
104
+ @workflow(name="patterns_demo_parallel_parent_workflow", tags=["celery", "durable"])
105
+ async def parallel_parent_workflow() -> dict:
106
+ """Parent workflow that runs multiple children in parallel."""
107
+ print("[ParallelParent] Starting parallel children demo")
108
+
109
+ # Start multiple children with fire-and-forget
110
+ handles = []
111
+ for i in range(3):
112
+ handle = await start_child_workflow(
113
+ parallel_task_workflow,
114
+ f"task-{i}",
115
+ 0.1 * (i + 1), # Different durations
116
+ wait_for_completion=False,
117
+ )
118
+ handles.append(handle)
119
+ print(f" Started child {i}: {handle.child_run_id}")
120
+
121
+ # Wait for all children to complete using handles
122
+ print("[ParallelParent] Waiting for all children...")
123
+ results = []
124
+ for i, handle in enumerate(handles):
125
+ result = await handle.result(timeout=30.0)
126
+ results.append(result)
127
+ print(f" Child {i} completed: {result}")
128
+
129
+ print("[ParallelParent] All children completed")
130
+ return {"children_count": len(results), "results": results}
131
+
132
+
133
+ # --- Workflows for Error Handling Demo ---
134
+ @workflow(name="patterns_demo_failing_child_workflow", tags=["celery", "durable"])
135
+ async def failing_child_workflow() -> dict:
136
+ """A child workflow that fails."""
137
+ await failing_step()
138
+ return {"should": "never reach here"}
139
+
140
+
141
+ @workflow(name="patterns_demo_error_handling_parent_workflow", tags=["celery", "durable"])
142
+ async def error_handling_parent_workflow() -> dict:
143
+ """Parent workflow demonstrating error handling."""
144
+ print("[ErrorParent] Starting error handling demo")
145
+
146
+ try:
147
+ # This child will fail
148
+ await start_child_workflow(failing_child_workflow)
149
+ except ChildWorkflowFailedError as e:
150
+ print("[ErrorParent] Caught child failure!")
151
+ print(f" Child run_id: {e.child_run_id}")
152
+ print(f" Child workflow: {e.child_workflow_name}")
153
+ print(f" Error: {e.error}")
154
+ print(f" Error type: {e.error_type}")
155
+ return {
156
+ "status": "child_failed",
157
+ "error": e.error,
158
+ "child_run_id": e.child_run_id,
159
+ }
160
+
161
+ return {"status": "success"}
162
+
163
+
164
+ # --- Workflow for Max Nesting Depth Demo ---
165
+ @workflow(name="patterns_demo_try_exceed_max_depth", tags=["celery", "durable"])
166
+ async def try_exceed_max_depth() -> dict:
167
+ """Try to exceed max nesting depth (should fail at depth 4)."""
168
+ try:
169
+ # Define a workflow that would be depth 4
170
+ @workflow(name="patterns_demo_level_4_workflow")
171
+ async def level_4_workflow() -> dict:
172
+ return await do_work("level4")
173
+
174
+ await start_child_workflow(level_4_workflow)
175
+ except MaxNestingDepthError as e:
176
+ print(f" Caught MaxNestingDepthError: {e}")
177
+ return {"error": str(e), "max_depth": e.MAX_DEPTH}
178
+ return {"status": "success"}
179
+
180
+
181
+ async def main() -> None:
182
+ """Run the child workflow patterns demos."""
183
+ import argparse
184
+
185
+ import pyworkflow
186
+ from pyworkflow import get_workflow_run
187
+
188
+ parser = argparse.ArgumentParser(description="Child Workflow Patterns Demo")
189
+ parser.add_argument(
190
+ "--demo",
191
+ choices=["nested", "parallel", "error", "all"],
192
+ default="all",
193
+ help="Which demo to run",
194
+ )
195
+ args = parser.parse_args()
196
+
197
+ print("=== Child Workflows - Advanced Patterns ===")
198
+
199
+ demos = []
200
+ if args.demo in ("nested", "all"):
201
+ demos.append(("Nested Child Workflows (3 levels)", level_1_workflow))
202
+ if args.demo in ("parallel", "all"):
203
+ demos.append(("Parallel Child Workflows", parallel_parent_workflow))
204
+ if args.demo in ("error", "all"):
205
+ demos.append(("Child Workflow Error Handling", error_handling_parent_workflow))
206
+
207
+ for demo_name, workflow_func in demos:
208
+ print(f"\n{'=' * 50}")
209
+ print(f"DEMO: {demo_name}")
210
+ print("=" * 50 + "\n")
211
+
212
+ run_id = await pyworkflow.start(workflow_func)
213
+ print(f"\nWorkflow started: {run_id}")
214
+
215
+ # Poll for completion
216
+ for _ in range(30):
217
+ await asyncio.sleep(1)
218
+ run = await get_workflow_run(run_id)
219
+ if run.status.value in ("completed", "failed", "cancelled"):
220
+ print(f"Status: {run.status.value}")
221
+ if run.result:
222
+ print(f"Result: {run.result}")
223
+ if run.error:
224
+ print(f"Error: {run.error}")
225
+ break
226
+ else:
227
+ print("Timeout waiting for workflow completion")
228
+
229
+ print("\n" + "=" * 50)
230
+ print("=== Key Takeaways ===")
231
+ print("=" * 50)
232
+ print("1. Child workflows can spawn their own children (up to depth 3)")
233
+ print("2. Use wait_for_completion=False + handle.result() for parallel")
234
+ print("3. ChildWorkflowFailedError propagates child failures to parent")
235
+ print("4. MaxNestingDepthError prevents infinite nesting")
236
+ print("5. TERMINATE policy ensures cleanup on parent completion")
237
+
238
+
239
+ if __name__ == "__main__":
240
+ asyncio.run(main())