pyworkflow-engine 0.1.7__py3-none-any.whl → 0.1.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (146) hide show
  1. pyworkflow/__init__.py +10 -1
  2. pyworkflow/celery/tasks.py +272 -24
  3. pyworkflow/cli/__init__.py +4 -1
  4. pyworkflow/cli/commands/runs.py +4 -4
  5. pyworkflow/cli/commands/setup.py +203 -4
  6. pyworkflow/cli/utils/config_generator.py +76 -3
  7. pyworkflow/cli/utils/docker_manager.py +232 -0
  8. pyworkflow/config.py +94 -17
  9. pyworkflow/context/__init__.py +13 -0
  10. pyworkflow/context/base.py +26 -0
  11. pyworkflow/context/local.py +80 -0
  12. pyworkflow/context/step_context.py +295 -0
  13. pyworkflow/core/registry.py +6 -1
  14. pyworkflow/core/step.py +141 -0
  15. pyworkflow/core/workflow.py +56 -0
  16. pyworkflow/engine/events.py +30 -0
  17. pyworkflow/engine/replay.py +39 -0
  18. pyworkflow/primitives/child_workflow.py +1 -1
  19. pyworkflow/runtime/local.py +1 -1
  20. pyworkflow/storage/__init__.py +14 -0
  21. pyworkflow/storage/base.py +35 -0
  22. pyworkflow/storage/cassandra.py +1747 -0
  23. pyworkflow/storage/config.py +69 -0
  24. pyworkflow/storage/dynamodb.py +31 -2
  25. pyworkflow/storage/file.py +28 -0
  26. pyworkflow/storage/memory.py +18 -0
  27. pyworkflow/storage/mysql.py +1159 -0
  28. pyworkflow/storage/postgres.py +27 -2
  29. pyworkflow/storage/schemas.py +4 -3
  30. pyworkflow/storage/sqlite.py +25 -2
  31. {pyworkflow_engine-0.1.7.dist-info → pyworkflow_engine-0.1.10.dist-info}/METADATA +7 -4
  32. pyworkflow_engine-0.1.10.dist-info/RECORD +91 -0
  33. pyworkflow_engine-0.1.10.dist-info/top_level.txt +1 -0
  34. dashboard/backend/app/__init__.py +0 -1
  35. dashboard/backend/app/config.py +0 -32
  36. dashboard/backend/app/controllers/__init__.py +0 -6
  37. dashboard/backend/app/controllers/run_controller.py +0 -86
  38. dashboard/backend/app/controllers/workflow_controller.py +0 -33
  39. dashboard/backend/app/dependencies/__init__.py +0 -5
  40. dashboard/backend/app/dependencies/storage.py +0 -50
  41. dashboard/backend/app/repositories/__init__.py +0 -6
  42. dashboard/backend/app/repositories/run_repository.py +0 -80
  43. dashboard/backend/app/repositories/workflow_repository.py +0 -27
  44. dashboard/backend/app/rest/__init__.py +0 -8
  45. dashboard/backend/app/rest/v1/__init__.py +0 -12
  46. dashboard/backend/app/rest/v1/health.py +0 -33
  47. dashboard/backend/app/rest/v1/runs.py +0 -133
  48. dashboard/backend/app/rest/v1/workflows.py +0 -41
  49. dashboard/backend/app/schemas/__init__.py +0 -23
  50. dashboard/backend/app/schemas/common.py +0 -16
  51. dashboard/backend/app/schemas/event.py +0 -24
  52. dashboard/backend/app/schemas/hook.py +0 -25
  53. dashboard/backend/app/schemas/run.py +0 -54
  54. dashboard/backend/app/schemas/step.py +0 -28
  55. dashboard/backend/app/schemas/workflow.py +0 -31
  56. dashboard/backend/app/server.py +0 -87
  57. dashboard/backend/app/services/__init__.py +0 -6
  58. dashboard/backend/app/services/run_service.py +0 -240
  59. dashboard/backend/app/services/workflow_service.py +0 -155
  60. dashboard/backend/main.py +0 -18
  61. docs/concepts/cancellation.mdx +0 -362
  62. docs/concepts/continue-as-new.mdx +0 -434
  63. docs/concepts/events.mdx +0 -266
  64. docs/concepts/fault-tolerance.mdx +0 -370
  65. docs/concepts/hooks.mdx +0 -552
  66. docs/concepts/limitations.mdx +0 -167
  67. docs/concepts/schedules.mdx +0 -775
  68. docs/concepts/sleep.mdx +0 -312
  69. docs/concepts/steps.mdx +0 -301
  70. docs/concepts/workflows.mdx +0 -255
  71. docs/guides/cli.mdx +0 -942
  72. docs/guides/configuration.mdx +0 -560
  73. docs/introduction.mdx +0 -155
  74. docs/quickstart.mdx +0 -279
  75. examples/__init__.py +0 -1
  76. examples/celery/__init__.py +0 -1
  77. examples/celery/durable/docker-compose.yml +0 -55
  78. examples/celery/durable/pyworkflow.config.yaml +0 -12
  79. examples/celery/durable/workflows/__init__.py +0 -122
  80. examples/celery/durable/workflows/basic.py +0 -87
  81. examples/celery/durable/workflows/batch_processing.py +0 -102
  82. examples/celery/durable/workflows/cancellation.py +0 -273
  83. examples/celery/durable/workflows/child_workflow_patterns.py +0 -240
  84. examples/celery/durable/workflows/child_workflows.py +0 -202
  85. examples/celery/durable/workflows/continue_as_new.py +0 -260
  86. examples/celery/durable/workflows/fault_tolerance.py +0 -210
  87. examples/celery/durable/workflows/hooks.py +0 -211
  88. examples/celery/durable/workflows/idempotency.py +0 -112
  89. examples/celery/durable/workflows/long_running.py +0 -99
  90. examples/celery/durable/workflows/retries.py +0 -101
  91. examples/celery/durable/workflows/schedules.py +0 -209
  92. examples/celery/transient/01_basic_workflow.py +0 -91
  93. examples/celery/transient/02_fault_tolerance.py +0 -257
  94. examples/celery/transient/__init__.py +0 -20
  95. examples/celery/transient/pyworkflow.config.yaml +0 -25
  96. examples/local/__init__.py +0 -1
  97. examples/local/durable/01_basic_workflow.py +0 -94
  98. examples/local/durable/02_file_storage.py +0 -132
  99. examples/local/durable/03_retries.py +0 -169
  100. examples/local/durable/04_long_running.py +0 -119
  101. examples/local/durable/05_event_log.py +0 -145
  102. examples/local/durable/06_idempotency.py +0 -148
  103. examples/local/durable/07_hooks.py +0 -334
  104. examples/local/durable/08_cancellation.py +0 -233
  105. examples/local/durable/09_child_workflows.py +0 -198
  106. examples/local/durable/10_child_workflow_patterns.py +0 -265
  107. examples/local/durable/11_continue_as_new.py +0 -249
  108. examples/local/durable/12_schedules.py +0 -198
  109. examples/local/durable/__init__.py +0 -1
  110. examples/local/transient/01_quick_tasks.py +0 -87
  111. examples/local/transient/02_retries.py +0 -130
  112. examples/local/transient/03_sleep.py +0 -141
  113. examples/local/transient/__init__.py +0 -1
  114. pyworkflow_engine-0.1.7.dist-info/RECORD +0 -196
  115. pyworkflow_engine-0.1.7.dist-info/top_level.txt +0 -5
  116. tests/examples/__init__.py +0 -0
  117. tests/integration/__init__.py +0 -0
  118. tests/integration/test_cancellation.py +0 -330
  119. tests/integration/test_child_workflows.py +0 -439
  120. tests/integration/test_continue_as_new.py +0 -428
  121. tests/integration/test_dynamodb_storage.py +0 -1146
  122. tests/integration/test_fault_tolerance.py +0 -369
  123. tests/integration/test_schedule_storage.py +0 -484
  124. tests/unit/__init__.py +0 -0
  125. tests/unit/backends/__init__.py +0 -1
  126. tests/unit/backends/test_dynamodb_storage.py +0 -1554
  127. tests/unit/backends/test_postgres_storage.py +0 -1281
  128. tests/unit/backends/test_sqlite_storage.py +0 -1460
  129. tests/unit/conftest.py +0 -41
  130. tests/unit/test_cancellation.py +0 -364
  131. tests/unit/test_child_workflows.py +0 -680
  132. tests/unit/test_continue_as_new.py +0 -441
  133. tests/unit/test_event_limits.py +0 -316
  134. tests/unit/test_executor.py +0 -320
  135. tests/unit/test_fault_tolerance.py +0 -334
  136. tests/unit/test_hooks.py +0 -495
  137. tests/unit/test_registry.py +0 -261
  138. tests/unit/test_replay.py +0 -420
  139. tests/unit/test_schedule_schemas.py +0 -285
  140. tests/unit/test_schedule_utils.py +0 -286
  141. tests/unit/test_scheduled_workflow.py +0 -274
  142. tests/unit/test_step.py +0 -353
  143. tests/unit/test_workflow.py +0 -243
  144. {pyworkflow_engine-0.1.7.dist-info → pyworkflow_engine-0.1.10.dist-info}/WHEEL +0 -0
  145. {pyworkflow_engine-0.1.7.dist-info → pyworkflow_engine-0.1.10.dist-info}/entry_points.txt +0 -0
  146. {pyworkflow_engine-0.1.7.dist-info → pyworkflow_engine-0.1.10.dist-info}/licenses/LICENSE +0 -0
@@ -1,94 +0,0 @@
1
- """
2
- Durable Workflow - Basic Example
3
-
4
- This example demonstrates a simple event-sourced workflow using InMemoryStorageBackend.
5
- - 3-step order processing workflow
6
- - Events recorded for each step
7
- - Event log inspection after completion
8
- - Basic @workflow and @step decorators
9
-
10
- Run: python examples/local/durable/01_basic_workflow.py 2>/dev/null
11
- """
12
-
13
- import asyncio
14
-
15
- from pyworkflow import (
16
- configure,
17
- get_workflow_events,
18
- get_workflow_run,
19
- reset_config,
20
- start,
21
- step,
22
- workflow,
23
- )
24
- from pyworkflow.storage import InMemoryStorageBackend
25
-
26
-
27
- # --- Steps ---
28
- @step()
29
- async def process_order(order_id: str) -> dict:
30
- """Process the order and validate it."""
31
- print(f" Processing order {order_id}...")
32
- return {"order_id": order_id, "status": "processed"}
33
-
34
-
35
- @step()
36
- async def charge_payment(order: dict, amount: float) -> dict:
37
- """Charge the payment for the order."""
38
- print(f" Charging payment: ${amount:.2f}...")
39
- return {**order, "charged": amount}
40
-
41
-
42
- @step()
43
- async def send_notification(order: dict) -> dict:
44
- """Send order confirmation notification."""
45
- print(f" Sending notification for order {order['order_id']}...")
46
- return {**order, "notified": True}
47
-
48
-
49
- # --- Workflow ---
50
- @workflow(durable=True, tags=["local", "durable"])
51
- async def order_workflow(order_id: str, amount: float) -> dict:
52
- """Complete order processing workflow."""
53
- order = await process_order(order_id)
54
- order = await charge_payment(order, amount)
55
- order = await send_notification(order)
56
- return order
57
-
58
-
59
- async def main():
60
- # Configure with InMemoryStorageBackend
61
- reset_config()
62
- storage = InMemoryStorageBackend()
63
- configure(storage=storage, default_durable=True)
64
-
65
- print("=== Durable Workflow - Basic Example ===\n")
66
- print("Running order workflow...")
67
-
68
- # Start workflow
69
- run_id = await start(order_workflow, "order-123", 99.99)
70
- print(f"\nWorkflow completed: {run_id}\n")
71
-
72
- # Check workflow status
73
- run = await get_workflow_run(run_id)
74
- print(f"Status: {run.status.value}")
75
- print(f"Result: {run.result}")
76
-
77
- # Inspect event log
78
- events = await get_workflow_events(run_id)
79
- print(f"\n=== Event Log ({len(events)} events) ===")
80
- for event in events:
81
- print(f" {event.sequence}: {event.type.value}")
82
- if event.type.value == "step_completed":
83
- step_name = event.data.get("step_name", "unknown")
84
- print(f" Step: {step_name}")
85
-
86
- print("\n=== Key Takeaways ===")
87
- print("✓ Workflow executed with event sourcing")
88
- print("✓ Each step recorded as an event")
89
- print("✓ InMemoryStorageBackend used (data lost on exit)")
90
- print("✓ Try 02_file_storage.py for persistence!")
91
-
92
-
93
- if __name__ == "__main__":
94
- asyncio.run(main())
@@ -1,132 +0,0 @@
1
- """
2
- Durable Workflow - File Storage
3
-
4
- This example demonstrates persistent workflow storage using FileStorageBackend.
5
- - Same 3-step workflow as 01_basic_workflow.py
6
- - Data persists to filesystem in workflow_data/ directory
7
- - Human-readable JSON files
8
- - Inspect stored file structure and JSONL event log
9
-
10
- Run: python examples/local/durable/02_file_storage.py 2>/dev/null
11
- """
12
-
13
- import asyncio
14
- import json
15
- import os
16
- from pathlib import Path
17
-
18
- from pyworkflow import (
19
- configure,
20
- get_workflow_events,
21
- get_workflow_run,
22
- reset_config,
23
- start,
24
- step,
25
- workflow,
26
- )
27
- from pyworkflow.storage import FileStorageBackend
28
-
29
-
30
- # --- Steps ---
31
- @step()
32
- async def process_order(order_id: str) -> dict:
33
- """Process the order and validate it."""
34
- print(f" Processing order {order_id}...")
35
- return {"order_id": order_id, "status": "processed"}
36
-
37
-
38
- @step()
39
- async def charge_payment(order: dict, amount: float) -> dict:
40
- """Charge the payment for the order."""
41
- print(f" Charging payment: ${amount:.2f}...")
42
- return {**order, "charged": amount}
43
-
44
-
45
- @step()
46
- async def send_notification(order: dict) -> dict:
47
- """Send order confirmation notification."""
48
- print(f" Sending notification for order {order['order_id']}...")
49
- return {**order, "notified": True}
50
-
51
-
52
- # --- Workflow ---
53
- @workflow(durable=True, tags=["local", "durable"])
54
- async def order_workflow(order_id: str, amount: float) -> dict:
55
- """Complete order processing workflow."""
56
- order = await process_order(order_id)
57
- order = await charge_payment(order, amount)
58
- order = await send_notification(order)
59
- return order
60
-
61
-
62
- async def main():
63
- # Use local directory for persistence (added to .gitignore)
64
- data_dir = Path(__file__).parent / "workflow_data"
65
- data_dir.mkdir(exist_ok=True)
66
-
67
- print("=== Durable Workflow - File Storage ===\n")
68
- print(f"Storage directory: {data_dir}\n")
69
-
70
- # Configure with FileStorageBackend
71
- reset_config()
72
- storage = FileStorageBackend(base_path=str(data_dir))
73
- configure(storage=storage, default_durable=True)
74
-
75
- print("Running order workflow...")
76
-
77
- # Start workflow
78
- run_id = await start(order_workflow, "order-456", 149.99)
79
- print(f"\nWorkflow completed: {run_id}\n")
80
-
81
- # Check workflow status
82
- run = await get_workflow_run(run_id)
83
- print(f"Status: {run.status.value}")
84
- print(f"Result: {run.result}")
85
-
86
- # Show events
87
- events = await get_workflow_events(run_id)
88
- print(f"\n=== Event Log ({len(events)} events) ===")
89
- for event in events:
90
- print(f" {event.sequence}: {event.type.value}")
91
-
92
- # Show stored files
93
- print("\n=== Stored Files ===")
94
- for root, dirs, files in os.walk(data_dir):
95
- # Skip hidden directories (.locks)
96
- dirs[:] = [d for d in dirs if not d.startswith(".")]
97
- for f in files:
98
- path = os.path.join(root, f)
99
- rel_path = os.path.relpath(path, data_dir)
100
- size = os.path.getsize(path)
101
- print(f" {rel_path} ({size} bytes)")
102
-
103
- # Show JSONL event log contents
104
- event_log_path = data_dir / "events" / f"{run_id}.jsonl"
105
- if event_log_path.exists():
106
- print(f"\n=== Event Log File Contents ({event_log_path.name}) ===")
107
- with open(event_log_path) as f:
108
- for i, line in enumerate(f, 1):
109
- event_data = json.loads(line.strip())
110
- event_type = event_data.get("type", "unknown")
111
- print(f" Line {i}: {event_type}")
112
- # Show full data for first event
113
- if i == 1:
114
- print(f" Full data: {json.dumps(event_data, indent=6)}")
115
-
116
- print("\n=== Directory Structure ===")
117
- print(" runs/ - Workflow run metadata (JSON)")
118
- print(" events/ - Event log (JSONL, append-only)")
119
- print(" steps/ - Step execution records (JSON)")
120
- print(" .locks/ - Internal file locks")
121
-
122
- print("\n=== Key Takeaways ===")
123
- print("✓ Data persists to filesystem in workflow_data/")
124
- print("✓ Human-readable JSON format")
125
- print("✓ JSONL (JSON Lines) for event log (one event per line)")
126
- print("✓ Survives process restarts")
127
- print("✓ Good for development and single-machine deployments")
128
- print(f"\nℹ Storage persisted at: {data_dir.absolute()}")
129
-
130
-
131
- if __name__ == "__main__":
132
- asyncio.run(main())
@@ -1,169 +0,0 @@
1
- """
2
- Durable Workflow - Automatic Retries with Suspension
3
-
4
- This example demonstrates automatic retry behavior with workflow suspension/resumption.
5
- - Simulates flaky external API (fails 2x, succeeds on 3rd try)
6
- - Workflow suspends between retry attempts
7
- - Retry events recorded in event log for audit trail
8
- - Demonstrates manual resume after each retry delay
9
-
10
- IMPORTANT: Manual resumption (await resume(run_id)) is ONLY for local development/CI.
11
- In production, use Celery runtime for automatic scheduled resumption.
12
- See examples/celery/ for production-ready distributed execution.
13
-
14
- Run: python examples/local/durable/03_retries.py 2>/dev/null
15
- """
16
-
17
- import asyncio
18
-
19
- from pyworkflow import (
20
- configure,
21
- get_workflow_events,
22
- get_workflow_run,
23
- reset_config,
24
- resume,
25
- start,
26
- step,
27
- workflow,
28
- )
29
- from pyworkflow.storage import InMemoryStorageBackend
30
-
31
- # Simulate API call counter
32
- attempt_count = 0
33
-
34
-
35
- # --- Steps ---
36
- @step()
37
- async def validate_order(order_id: str) -> dict:
38
- """Validate the order."""
39
- print(f" Validating order {order_id}...")
40
- return {"order_id": order_id, "valid": True}
41
-
42
-
43
- @step(max_retries=3, retry_delay=1)
44
- async def call_flaky_api(order: dict) -> dict:
45
- """Simulate unreliable external API - fails twice then succeeds."""
46
- global attempt_count
47
- attempt_count += 1
48
-
49
- print(f" Calling external API (attempt {attempt_count})...")
50
-
51
- if attempt_count < 3:
52
- # Simulate temporary failure
53
- raise Exception(f"API timeout - connection refused (attempt {attempt_count})")
54
-
55
- # Third attempt succeeds
56
- print(f" ✓ API call successful on attempt {attempt_count}!")
57
- return {**order, "api_response": "payment_approved", "attempts": attempt_count}
58
-
59
-
60
- @step()
61
- async def finalize_order(order: dict) -> dict:
62
- """Finalize the order after successful API call."""
63
- print(f" Finalizing order {order['order_id']}...")
64
- return {**order, "finalized": True}
65
-
66
-
67
- # --- Workflow ---
68
- @workflow(durable=True, tags=["local", "durable"])
69
- async def order_workflow(order_id: str) -> dict:
70
- """Complete order processing with retry logic."""
71
- order = await validate_order(order_id)
72
- order = await call_flaky_api(order) # Will retry on failure
73
- order = await finalize_order(order)
74
- return order
75
-
76
-
77
- async def main():
78
- global attempt_count
79
-
80
- # Configure with InMemoryStorageBackend
81
- reset_config()
82
- storage = InMemoryStorageBackend()
83
- configure(storage=storage, default_durable=True)
84
-
85
- print("=== Durable Workflow - Automatic Retries with Suspension ===\n")
86
- print("Simulating flaky API (fails 2x, succeeds on 3rd try)...\n")
87
-
88
- # Reset counter
89
- attempt_count = 0
90
-
91
- # Start workflow
92
- print("Starting workflow...")
93
- run_id = await start(order_workflow, "order-789")
94
-
95
- # Check status after first attempt
96
- run = await get_workflow_run(run_id)
97
- print(f"\nStatus after attempt 1: {run.status.value}")
98
-
99
- if run.status.value == "suspended":
100
- print("→ Workflow suspended for retry (waiting 1 second...)")
101
-
102
- # Show events so far
103
- events = await get_workflow_events(run_id)
104
- print(f"\n=== Event Log (After Attempt 1) - {len(events)} events ===")
105
- for event in events:
106
- event_type = event.type.value
107
- attempt = event.data.get("attempt", "?")
108
- print(f" {event.sequence}: {event_type} (attempt={attempt})")
109
- if event_type == "step_retrying":
110
- next_attempt = event.data.get("attempt")
111
- print(f" → Will retry as attempt {next_attempt}")
112
-
113
- # Wait for retry delay and resume
114
- await asyncio.sleep(1.5)
115
- print("\nResuming workflow for attempt 2...")
116
- await resume(run_id)
117
-
118
- # Check status again
119
- run = await get_workflow_run(run_id)
120
- print(f"Status after attempt 2: {run.status.value}")
121
-
122
- if run.status.value == "suspended":
123
- print("→ Workflow suspended for retry again (waiting 1 second...)")
124
-
125
- # Wait and resume for attempt 3
126
- await asyncio.sleep(1.5)
127
- print("\nResuming workflow for attempt 3...")
128
- result = await resume(run_id)
129
-
130
- print("\n✓ Workflow completed successfully!")
131
- print(f"Result: {result}")
132
-
133
- # Final status
134
- run = await get_workflow_run(run_id)
135
- print(f"\nFinal status: {run.status.value}")
136
-
137
- # Show complete event log
138
- events = await get_workflow_events(run_id)
139
- print(f"\n=== Complete Event Log ({len(events)} events) ===")
140
-
141
- for event in events:
142
- event_type = event.type.value
143
- attempt = event.data.get("attempt", "")
144
- step_name = event.data.get("step_name", "")
145
-
146
- if attempt:
147
- print(f" {event.sequence}: {event_type} (attempt={attempt}, step={step_name})")
148
- else:
149
- print(f" {event.sequence}: {event_type}")
150
-
151
- if event_type == "step_failed":
152
- error = event.data.get("error", "")[:50]
153
- print(f" Error: {error}...")
154
- elif event_type == "step_retrying":
155
- retry_after = event.data.get("retry_after")
156
- resume_at = event.data.get("resume_at", "")[:19]
157
- print(f" Retry after: {retry_after}s, resume at: {resume_at}")
158
-
159
- print("\n=== Key Takeaways ===")
160
- print("✓ Workflow suspends between retry attempts (releases resources)")
161
- print("✓ Each retry requires manual resume() or automatic Celery scheduling")
162
- print("✓ Event log shows STEP_FAILED + STEP_RETRYING for each retry")
163
- print("✓ Resume restores state via event replay and continues from retry")
164
- print("✓ max_retries=3, retry_delay=1 (1 initial + 3 retries = 4 total attempts)")
165
- print(f"✓ Total attempts in this run: {attempt_count}")
166
-
167
-
168
- if __name__ == "__main__":
169
- asyncio.run(main())
@@ -1,119 +0,0 @@
1
- """
2
- Durable Workflow - Long Running with Sleep
3
-
4
- This example demonstrates workflow suspension and resumption with sleep().
5
- - Workflow suspends during sleep (releases resources)
6
- - Can be resumed after sleep completes
7
- - Uses FileStorageBackend for persistence across process restarts
8
- - Demonstrates manual resumption pattern
9
-
10
- IMPORTANT: Manual resumption (await resume(run_id)) is ONLY for local development/CI.
11
- In production, use Celery runtime for automatic scheduled resumption.
12
- See examples/celery/ for production-ready distributed execution.
13
-
14
- Run: python examples/local/durable/04_long_running.py 2>/dev/null
15
- """
16
-
17
- import asyncio
18
- import tempfile
19
-
20
- from pyworkflow import (
21
- configure,
22
- get_workflow_run,
23
- reset_config,
24
- resume,
25
- sleep,
26
- start,
27
- step,
28
- workflow,
29
- )
30
- from pyworkflow.storage import FileStorageBackend
31
-
32
-
33
- # --- Steps ---
34
- @step()
35
- async def prepare_batch(batch_id: str) -> dict:
36
- """Prepare the batch for processing."""
37
- print(f" Preparing batch {batch_id}...")
38
- return {"batch_id": batch_id, "status": "prepared"}
39
-
40
-
41
- @step()
42
- async def process_batch(batch: dict) -> dict:
43
- """Process the batch after sleep completes."""
44
- print(f" Processing batch {batch['batch_id']}...")
45
- return {**batch, "status": "processed", "items": 1000}
46
-
47
-
48
- @step()
49
- async def finalize_batch(batch: dict) -> dict:
50
- """Finalize the batch."""
51
- print(f" Finalizing batch {batch['batch_id']}...")
52
- return {**batch, "status": "completed"}
53
-
54
-
55
- # --- Workflow ---
56
- @workflow(durable=True, tags=["local", "durable"])
57
- async def batch_workflow(batch_id: str) -> dict:
58
- """Long-running batch processing workflow with sleep."""
59
- batch = await prepare_batch(batch_id)
60
-
61
- print(" Sleeping for 5 seconds (workflow will suspend)...")
62
- await sleep("5s") # Suspends workflow here
63
-
64
- print(" Resuming after sleep...")
65
- batch = await process_batch(batch)
66
- batch = await finalize_batch(batch)
67
- return batch
68
-
69
-
70
- async def main():
71
- # Use temp directory (use real path like "./workflow_data" for production)
72
- with tempfile.TemporaryDirectory() as tmpdir:
73
- print("=== Durable Workflow - Long Running ===\n")
74
-
75
- # Configure with FileStorageBackend (for persistence)
76
- reset_config()
77
- storage = FileStorageBackend(base_path=tmpdir)
78
- configure(storage=storage, default_durable=True)
79
-
80
- print("Starting batch workflow...\n")
81
-
82
- # Start workflow
83
- run_id = await start(batch_workflow, "batch-001")
84
-
85
- # Check status after start
86
- run = await get_workflow_run(run_id)
87
- print(f"\nWorkflow status after sleep: {run.status.value}")
88
-
89
- if run.status.value == "suspended":
90
- print("Workflow is suspended (waiting for sleep to complete)")
91
-
92
- # Wait for sleep duration, then resume
93
- print("\nWaiting 5 seconds for sleep to complete...")
94
- await asyncio.sleep(5)
95
-
96
- print(f"Resuming workflow {run_id}...\n")
97
- result = await resume(run_id)
98
-
99
- print("\nWorkflow completed!")
100
- print(f"Result: {result}")
101
- else:
102
- # Workflow already completed (sleep was short enough)
103
- print("Workflow completed without suspension")
104
- print(f"Result: {run.result}")
105
-
106
- # Final status check
107
- run = await get_workflow_run(run_id)
108
- print(f"\nFinal status: {run.status.value}")
109
-
110
- print("\n=== Key Takeaways ===")
111
- print("✓ Workflow suspends during sleep() (releases resources)")
112
- print("✓ FileStorageBackend persists state during suspension")
113
- print("✓ Can resume after sleep completes (even after process restart)")
114
- print("✓ Perfect for rate limiting, scheduled tasks, waiting for events")
115
- print("\nℹ For production: use real storage path, implement auto-resume logic")
116
-
117
-
118
- if __name__ == "__main__":
119
- asyncio.run(main())
@@ -1,145 +0,0 @@
1
- """
2
- Durable Workflow - Event Log Deep Dive
3
-
4
- This example demonstrates detailed event sourcing inspection.
5
- - Multiple workflows to show different event sequences
6
- - Deep dive into event structure (sequence, type, timestamp, data)
7
- - Understanding event types and their meaning
8
- - Event replay concepts
9
-
10
- Run: python examples/local/durable/05_event_log.py 2>/dev/null
11
- """
12
-
13
- import asyncio
14
-
15
- from pyworkflow import (
16
- configure,
17
- get_workflow_events,
18
- get_workflow_run,
19
- reset_config,
20
- start,
21
- step,
22
- workflow,
23
- )
24
- from pyworkflow.storage import InMemoryStorageBackend
25
-
26
-
27
- # --- Steps ---
28
- @step()
29
- async def step_a(value: int) -> int:
30
- """Simple step that doubles the value."""
31
- return value * 2
32
-
33
-
34
- @step()
35
- async def step_b(value: int) -> int:
36
- """Simple step that adds 10."""
37
- return value + 10
38
-
39
-
40
- @step()
41
- async def step_c(value: int) -> int:
42
- """Simple step that subtracts 5."""
43
- return value - 5
44
-
45
-
46
- # --- Workflows ---
47
- @workflow(durable=True, tags=["local", "durable"])
48
- async def simple_workflow(value: int) -> int:
49
- """Simple 2-step workflow."""
50
- result = await step_a(value)
51
- result = await step_b(result)
52
- return result
53
-
54
-
55
- @workflow(durable=True, tags=["local", "durable"])
56
- async def complex_workflow(value: int) -> int:
57
- """More complex 3-step workflow."""
58
- result = await step_a(value)
59
- result = await step_b(result)
60
- result = await step_c(result)
61
- return result
62
-
63
-
64
- def print_event_details(event, index: int):
65
- """Pretty print event details."""
66
- print(f"\nEvent #{index + 1}:")
67
- print(f" Sequence: {event.sequence}")
68
- print(f" Type: {event.type.value}")
69
- print(f" Timestamp: {event.timestamp.strftime('%Y-%m-%d %H:%M:%S.%f')[:-3]}")
70
-
71
- if event.data:
72
- print(" Data:")
73
- for key, value in event.data.items():
74
- # Format value nicely
75
- if isinstance(value, str) and len(value) > 50:
76
- value = value[:50] + "..."
77
- print(f" {key}: {value}")
78
-
79
-
80
- async def main():
81
- # Configure with InMemoryStorageBackend
82
- reset_config()
83
- storage = InMemoryStorageBackend()
84
- configure(storage=storage, default_durable=True)
85
-
86
- print("=== Durable Workflow - Event Log Deep Dive ===\n")
87
-
88
- # Run first workflow
89
- print("Running simple_workflow(5)...\n")
90
- run_id_1 = await start(simple_workflow, 5)
91
-
92
- run = await get_workflow_run(run_id_1)
93
- print(f"Result: {run.result}")
94
- print(f"Status: {run.status.value}")
95
-
96
- # Inspect events
97
- events = await get_workflow_events(run_id_1)
98
- print(f"\n=== Event Log for simple_workflow ({len(events)} events) ===")
99
-
100
- for i, event in enumerate(events):
101
- print_event_details(event, i)
102
-
103
- # Run second workflow
104
- print("\n" + "=" * 60)
105
- print("\nRunning complex_workflow(10)...\n")
106
- run_id_2 = await start(complex_workflow, 10)
107
-
108
- run = await get_workflow_run(run_id_2)
109
- print(f"Result: {run.result}")
110
- print(f"Status: {run.status.value}")
111
-
112
- # Inspect events
113
- events = await get_workflow_events(run_id_2)
114
- print(f"\n=== Event Log for complex_workflow ({len(events)} events) ===")
115
-
116
- for i, event in enumerate(events):
117
- print_event_details(event, i)
118
-
119
- # Event type summary
120
- print("\n" + "=" * 60)
121
- print("\n=== Event Types Explained ===")
122
- print("workflow_started - Workflow execution begins")
123
- print("step_completed - Step successfully executed (result cached)")
124
- print("step_failed - Step failed (will retry if configured)")
125
- print("sleep_started - Workflow suspended (sleep begins)")
126
- print("sleep_completed - Workflow resumed (sleep ends)")
127
- print("workflow_completed - Workflow finished successfully")
128
- print("workflow_failed - Workflow failed permanently")
129
-
130
- print("\n=== Event Replay Concepts ===")
131
- print("✓ Events are immutable - never modified, only appended")
132
- print("✓ On crash/restart, events replayed to restore state")
133
- print("✓ step_completed events: result cached, step not re-executed")
134
- print("✓ Sequence numbers ensure deterministic ordering")
135
- print("✓ Timestamps enable time-travel debugging")
136
-
137
- print("\n=== Key Takeaways ===")
138
- print("✓ Every state change recorded as an event")
139
- print("✓ Events contain sequence, type, timestamp, and data")
140
- print("✓ Event log enables crash recovery via replay")
141
- print("✓ Complete audit trail for compliance and debugging")
142
-
143
-
144
- if __name__ == "__main__":
145
- asyncio.run(main())