pyworkflow-engine 0.1.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dashboard/backend/app/__init__.py +1 -0
- dashboard/backend/app/config.py +32 -0
- dashboard/backend/app/controllers/__init__.py +6 -0
- dashboard/backend/app/controllers/run_controller.py +86 -0
- dashboard/backend/app/controllers/workflow_controller.py +33 -0
- dashboard/backend/app/dependencies/__init__.py +5 -0
- dashboard/backend/app/dependencies/storage.py +50 -0
- dashboard/backend/app/repositories/__init__.py +6 -0
- dashboard/backend/app/repositories/run_repository.py +80 -0
- dashboard/backend/app/repositories/workflow_repository.py +27 -0
- dashboard/backend/app/rest/__init__.py +8 -0
- dashboard/backend/app/rest/v1/__init__.py +12 -0
- dashboard/backend/app/rest/v1/health.py +33 -0
- dashboard/backend/app/rest/v1/runs.py +133 -0
- dashboard/backend/app/rest/v1/workflows.py +41 -0
- dashboard/backend/app/schemas/__init__.py +23 -0
- dashboard/backend/app/schemas/common.py +16 -0
- dashboard/backend/app/schemas/event.py +24 -0
- dashboard/backend/app/schemas/hook.py +25 -0
- dashboard/backend/app/schemas/run.py +54 -0
- dashboard/backend/app/schemas/step.py +28 -0
- dashboard/backend/app/schemas/workflow.py +31 -0
- dashboard/backend/app/server.py +87 -0
- dashboard/backend/app/services/__init__.py +6 -0
- dashboard/backend/app/services/run_service.py +240 -0
- dashboard/backend/app/services/workflow_service.py +155 -0
- dashboard/backend/main.py +18 -0
- docs/concepts/cancellation.mdx +362 -0
- docs/concepts/continue-as-new.mdx +434 -0
- docs/concepts/events.mdx +266 -0
- docs/concepts/fault-tolerance.mdx +370 -0
- docs/concepts/hooks.mdx +552 -0
- docs/concepts/limitations.mdx +167 -0
- docs/concepts/schedules.mdx +775 -0
- docs/concepts/sleep.mdx +312 -0
- docs/concepts/steps.mdx +301 -0
- docs/concepts/workflows.mdx +255 -0
- docs/guides/cli.mdx +942 -0
- docs/guides/configuration.mdx +560 -0
- docs/introduction.mdx +155 -0
- docs/quickstart.mdx +279 -0
- examples/__init__.py +1 -0
- examples/celery/__init__.py +1 -0
- examples/celery/durable/docker-compose.yml +55 -0
- examples/celery/durable/pyworkflow.config.yaml +12 -0
- examples/celery/durable/workflows/__init__.py +122 -0
- examples/celery/durable/workflows/basic.py +87 -0
- examples/celery/durable/workflows/batch_processing.py +102 -0
- examples/celery/durable/workflows/cancellation.py +273 -0
- examples/celery/durable/workflows/child_workflow_patterns.py +240 -0
- examples/celery/durable/workflows/child_workflows.py +202 -0
- examples/celery/durable/workflows/continue_as_new.py +260 -0
- examples/celery/durable/workflows/fault_tolerance.py +210 -0
- examples/celery/durable/workflows/hooks.py +211 -0
- examples/celery/durable/workflows/idempotency.py +112 -0
- examples/celery/durable/workflows/long_running.py +99 -0
- examples/celery/durable/workflows/retries.py +101 -0
- examples/celery/durable/workflows/schedules.py +209 -0
- examples/celery/transient/01_basic_workflow.py +91 -0
- examples/celery/transient/02_fault_tolerance.py +257 -0
- examples/celery/transient/__init__.py +20 -0
- examples/celery/transient/pyworkflow.config.yaml +25 -0
- examples/local/__init__.py +1 -0
- examples/local/durable/01_basic_workflow.py +94 -0
- examples/local/durable/02_file_storage.py +132 -0
- examples/local/durable/03_retries.py +169 -0
- examples/local/durable/04_long_running.py +119 -0
- examples/local/durable/05_event_log.py +145 -0
- examples/local/durable/06_idempotency.py +148 -0
- examples/local/durable/07_hooks.py +334 -0
- examples/local/durable/08_cancellation.py +233 -0
- examples/local/durable/09_child_workflows.py +198 -0
- examples/local/durable/10_child_workflow_patterns.py +265 -0
- examples/local/durable/11_continue_as_new.py +249 -0
- examples/local/durable/12_schedules.py +198 -0
- examples/local/durable/__init__.py +1 -0
- examples/local/transient/01_quick_tasks.py +87 -0
- examples/local/transient/02_retries.py +130 -0
- examples/local/transient/03_sleep.py +141 -0
- examples/local/transient/__init__.py +1 -0
- pyworkflow/__init__.py +256 -0
- pyworkflow/aws/__init__.py +68 -0
- pyworkflow/aws/context.py +234 -0
- pyworkflow/aws/handler.py +184 -0
- pyworkflow/aws/testing.py +310 -0
- pyworkflow/celery/__init__.py +41 -0
- pyworkflow/celery/app.py +198 -0
- pyworkflow/celery/scheduler.py +315 -0
- pyworkflow/celery/tasks.py +1746 -0
- pyworkflow/cli/__init__.py +132 -0
- pyworkflow/cli/__main__.py +6 -0
- pyworkflow/cli/commands/__init__.py +1 -0
- pyworkflow/cli/commands/hooks.py +640 -0
- pyworkflow/cli/commands/quickstart.py +495 -0
- pyworkflow/cli/commands/runs.py +773 -0
- pyworkflow/cli/commands/scheduler.py +130 -0
- pyworkflow/cli/commands/schedules.py +794 -0
- pyworkflow/cli/commands/setup.py +703 -0
- pyworkflow/cli/commands/worker.py +413 -0
- pyworkflow/cli/commands/workflows.py +1257 -0
- pyworkflow/cli/output/__init__.py +1 -0
- pyworkflow/cli/output/formatters.py +321 -0
- pyworkflow/cli/output/styles.py +121 -0
- pyworkflow/cli/utils/__init__.py +1 -0
- pyworkflow/cli/utils/async_helpers.py +30 -0
- pyworkflow/cli/utils/config.py +130 -0
- pyworkflow/cli/utils/config_generator.py +344 -0
- pyworkflow/cli/utils/discovery.py +53 -0
- pyworkflow/cli/utils/docker_manager.py +651 -0
- pyworkflow/cli/utils/interactive.py +364 -0
- pyworkflow/cli/utils/storage.py +115 -0
- pyworkflow/config.py +329 -0
- pyworkflow/context/__init__.py +63 -0
- pyworkflow/context/aws.py +230 -0
- pyworkflow/context/base.py +416 -0
- pyworkflow/context/local.py +930 -0
- pyworkflow/context/mock.py +381 -0
- pyworkflow/core/__init__.py +0 -0
- pyworkflow/core/exceptions.py +353 -0
- pyworkflow/core/registry.py +313 -0
- pyworkflow/core/scheduled.py +328 -0
- pyworkflow/core/step.py +494 -0
- pyworkflow/core/workflow.py +294 -0
- pyworkflow/discovery.py +248 -0
- pyworkflow/engine/__init__.py +0 -0
- pyworkflow/engine/events.py +879 -0
- pyworkflow/engine/executor.py +682 -0
- pyworkflow/engine/replay.py +273 -0
- pyworkflow/observability/__init__.py +19 -0
- pyworkflow/observability/logging.py +234 -0
- pyworkflow/primitives/__init__.py +33 -0
- pyworkflow/primitives/child_handle.py +174 -0
- pyworkflow/primitives/child_workflow.py +372 -0
- pyworkflow/primitives/continue_as_new.py +101 -0
- pyworkflow/primitives/define_hook.py +150 -0
- pyworkflow/primitives/hooks.py +97 -0
- pyworkflow/primitives/resume_hook.py +210 -0
- pyworkflow/primitives/schedule.py +545 -0
- pyworkflow/primitives/shield.py +96 -0
- pyworkflow/primitives/sleep.py +100 -0
- pyworkflow/runtime/__init__.py +21 -0
- pyworkflow/runtime/base.py +179 -0
- pyworkflow/runtime/celery.py +310 -0
- pyworkflow/runtime/factory.py +101 -0
- pyworkflow/runtime/local.py +706 -0
- pyworkflow/scheduler/__init__.py +9 -0
- pyworkflow/scheduler/local.py +248 -0
- pyworkflow/serialization/__init__.py +0 -0
- pyworkflow/serialization/decoder.py +146 -0
- pyworkflow/serialization/encoder.py +162 -0
- pyworkflow/storage/__init__.py +54 -0
- pyworkflow/storage/base.py +612 -0
- pyworkflow/storage/config.py +185 -0
- pyworkflow/storage/dynamodb.py +1315 -0
- pyworkflow/storage/file.py +827 -0
- pyworkflow/storage/memory.py +549 -0
- pyworkflow/storage/postgres.py +1161 -0
- pyworkflow/storage/schemas.py +486 -0
- pyworkflow/storage/sqlite.py +1136 -0
- pyworkflow/utils/__init__.py +0 -0
- pyworkflow/utils/duration.py +177 -0
- pyworkflow/utils/schedule.py +391 -0
- pyworkflow_engine-0.1.7.dist-info/METADATA +687 -0
- pyworkflow_engine-0.1.7.dist-info/RECORD +196 -0
- pyworkflow_engine-0.1.7.dist-info/WHEEL +5 -0
- pyworkflow_engine-0.1.7.dist-info/entry_points.txt +2 -0
- pyworkflow_engine-0.1.7.dist-info/licenses/LICENSE +21 -0
- pyworkflow_engine-0.1.7.dist-info/top_level.txt +5 -0
- tests/examples/__init__.py +0 -0
- tests/integration/__init__.py +0 -0
- tests/integration/test_cancellation.py +330 -0
- tests/integration/test_child_workflows.py +439 -0
- tests/integration/test_continue_as_new.py +428 -0
- tests/integration/test_dynamodb_storage.py +1146 -0
- tests/integration/test_fault_tolerance.py +369 -0
- tests/integration/test_schedule_storage.py +484 -0
- tests/unit/__init__.py +0 -0
- tests/unit/backends/__init__.py +1 -0
- tests/unit/backends/test_dynamodb_storage.py +1554 -0
- tests/unit/backends/test_postgres_storage.py +1281 -0
- tests/unit/backends/test_sqlite_storage.py +1460 -0
- tests/unit/conftest.py +41 -0
- tests/unit/test_cancellation.py +364 -0
- tests/unit/test_child_workflows.py +680 -0
- tests/unit/test_continue_as_new.py +441 -0
- tests/unit/test_event_limits.py +316 -0
- tests/unit/test_executor.py +320 -0
- tests/unit/test_fault_tolerance.py +334 -0
- tests/unit/test_hooks.py +495 -0
- tests/unit/test_registry.py +261 -0
- tests/unit/test_replay.py +420 -0
- tests/unit/test_schedule_schemas.py +285 -0
- tests/unit/test_schedule_utils.py +286 -0
- tests/unit/test_scheduled_workflow.py +274 -0
- tests/unit/test_step.py +353 -0
- tests/unit/test_workflow.py +243 -0
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Transient Workflow - Async Sleep
|
|
3
|
+
|
|
4
|
+
This example demonstrates sleep() behavior in transient mode.
|
|
5
|
+
- Uses asyncio.sleep() under the hood (blocks workflow)
|
|
6
|
+
- No workflow suspension (unlike durable mode)
|
|
7
|
+
- Simple delay mechanism
|
|
8
|
+
- Perfect for rate limiting and delays
|
|
9
|
+
|
|
10
|
+
Run: python examples/local/transient/03_sleep.py 2>/dev/null
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
import asyncio
|
|
14
|
+
from datetime import datetime
|
|
15
|
+
|
|
16
|
+
from pyworkflow import (
|
|
17
|
+
configure,
|
|
18
|
+
reset_config,
|
|
19
|
+
sleep,
|
|
20
|
+
start,
|
|
21
|
+
step,
|
|
22
|
+
workflow,
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
# --- Steps ---
|
|
27
|
+
@step()
|
|
28
|
+
async def start_task(task_id: str) -> dict:
|
|
29
|
+
"""Start a task."""
|
|
30
|
+
timestamp = datetime.now().strftime("%H:%M:%S")
|
|
31
|
+
print(f" [{timestamp}] Starting task {task_id}...")
|
|
32
|
+
return {"task_id": task_id, "status": "started"}
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
@step()
|
|
36
|
+
async def process_task(task: dict) -> dict:
|
|
37
|
+
"""Process the task."""
|
|
38
|
+
timestamp = datetime.now().strftime("%H:%M:%S")
|
|
39
|
+
print(f" [{timestamp}] Processing task {task['task_id']}...")
|
|
40
|
+
return {**task, "status": "processed"}
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
@step()
|
|
44
|
+
async def complete_task(task: dict) -> dict:
|
|
45
|
+
"""Complete the task."""
|
|
46
|
+
timestamp = datetime.now().strftime("%H:%M:%S")
|
|
47
|
+
print(f" [{timestamp}] Completing task {task['task_id']}...")
|
|
48
|
+
return {**task, "status": "completed"}
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
# --- Workflows ---
|
|
52
|
+
@workflow(durable=False, tags=["local", "transient"])
|
|
53
|
+
async def delayed_workflow(task_id: str, delay_seconds: int) -> dict:
|
|
54
|
+
"""Workflow with sleep delay."""
|
|
55
|
+
task = await start_task(task_id)
|
|
56
|
+
|
|
57
|
+
timestamp = datetime.now().strftime("%H:%M:%S")
|
|
58
|
+
print(f" [{timestamp}] Sleeping for {delay_seconds} seconds...")
|
|
59
|
+
await sleep(f"{delay_seconds}s") # Uses asyncio.sleep() in transient mode
|
|
60
|
+
|
|
61
|
+
timestamp = datetime.now().strftime("%H:%M:%S")
|
|
62
|
+
print(f" [{timestamp}] Woke up from sleep!")
|
|
63
|
+
|
|
64
|
+
task = await process_task(task)
|
|
65
|
+
task = await complete_task(task)
|
|
66
|
+
return task
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
@workflow(durable=False, tags=["local", "transient"])
|
|
70
|
+
async def rate_limited_workflow(task_id: str) -> dict:
|
|
71
|
+
"""Workflow demonstrating rate limiting pattern."""
|
|
72
|
+
task = await start_task(task_id)
|
|
73
|
+
|
|
74
|
+
# Simulate rate limiting between API calls
|
|
75
|
+
print(" Rate limiting: waiting 2 seconds before next API call...")
|
|
76
|
+
await sleep("2s")
|
|
77
|
+
|
|
78
|
+
task = await process_task(task)
|
|
79
|
+
|
|
80
|
+
# Another rate limit delay
|
|
81
|
+
print(" Rate limiting: waiting 2 seconds before final call...")
|
|
82
|
+
await sleep("2s")
|
|
83
|
+
|
|
84
|
+
task = await complete_task(task)
|
|
85
|
+
return task
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
async def main():
|
|
89
|
+
# Configure for transient mode
|
|
90
|
+
reset_config()
|
|
91
|
+
configure(default_durable=False)
|
|
92
|
+
|
|
93
|
+
print("=== Transient Workflow - Async Sleep ===\n")
|
|
94
|
+
|
|
95
|
+
# Example 1: Basic sleep
|
|
96
|
+
print("Example 1: Basic sleep (3 seconds)\n")
|
|
97
|
+
start_time = datetime.now()
|
|
98
|
+
|
|
99
|
+
run_id = await start(delayed_workflow, "task-001", 3)
|
|
100
|
+
|
|
101
|
+
end_time = datetime.now()
|
|
102
|
+
elapsed = (end_time - start_time).total_seconds()
|
|
103
|
+
|
|
104
|
+
print(f"\nWorkflow completed: {run_id}")
|
|
105
|
+
print(f"Total time: {elapsed:.1f} seconds")
|
|
106
|
+
|
|
107
|
+
# Example 2: Rate limiting
|
|
108
|
+
print("\n" + "=" * 60)
|
|
109
|
+
print("\nExample 2: Rate limiting with multiple sleeps\n")
|
|
110
|
+
start_time = datetime.now()
|
|
111
|
+
|
|
112
|
+
run_id = await start(rate_limited_workflow, "task-002")
|
|
113
|
+
|
|
114
|
+
end_time = datetime.now()
|
|
115
|
+
elapsed = (end_time - start_time).total_seconds()
|
|
116
|
+
|
|
117
|
+
print(f"\nWorkflow completed: {run_id}")
|
|
118
|
+
print(f"Total time: {elapsed:.1f} seconds")
|
|
119
|
+
|
|
120
|
+
print("\n=== Sleep Behavior in Transient Mode ===")
|
|
121
|
+
print("✓ Uses asyncio.sleep() (blocks the workflow)")
|
|
122
|
+
print("✓ No workflow suspension (process keeps running)")
|
|
123
|
+
print("✓ No resource release during sleep")
|
|
124
|
+
print("✓ Perfect for short delays and rate limiting")
|
|
125
|
+
|
|
126
|
+
print("\n=== Sleep Format Support ===")
|
|
127
|
+
print('sleep("5s") - 5 seconds')
|
|
128
|
+
print('sleep("2m") - 2 minutes')
|
|
129
|
+
print('sleep("1h") - 1 hour')
|
|
130
|
+
print("sleep(30) - 30 seconds (int)")
|
|
131
|
+
print("sleep(timedelta(seconds=10)) - 10 seconds")
|
|
132
|
+
|
|
133
|
+
print("\n=== Difference from Durable Mode ===")
|
|
134
|
+
print("Transient: sleep() blocks using asyncio.sleep()")
|
|
135
|
+
print("Durable: sleep() suspends workflow, can resume later")
|
|
136
|
+
print("\nFor long-running workflows with suspension:")
|
|
137
|
+
print(" See examples/local/durable/04_long_running.py")
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
if __name__ == "__main__":
|
|
141
|
+
asyncio.run(main())
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
# PyWorkflow Local Transient Examples Package
|
pyworkflow/__init__.py
ADDED
|
@@ -0,0 +1,256 @@
|
|
|
1
|
+
"""
|
|
2
|
+
PyWorkflow - Durable and transient workflows for Python
|
|
3
|
+
|
|
4
|
+
A Python implementation of workflow orchestration inspired by Vercel Workflow,
|
|
5
|
+
providing fault-tolerant, long-running workflows with automatic retry, sleep/delay,
|
|
6
|
+
and webhook integration.
|
|
7
|
+
|
|
8
|
+
Supports both:
|
|
9
|
+
- Durable workflows: Event-sourced, persistent, resumable
|
|
10
|
+
- Transient workflows: Simple execution without persistence overhead
|
|
11
|
+
|
|
12
|
+
Quick Start:
|
|
13
|
+
>>> import pyworkflow
|
|
14
|
+
>>> from pyworkflow import workflow, step, start
|
|
15
|
+
>>>
|
|
16
|
+
>>> # Configure defaults
|
|
17
|
+
>>> pyworkflow.configure(default_runtime="local", default_durable=False)
|
|
18
|
+
>>>
|
|
19
|
+
>>> @workflow
|
|
20
|
+
>>> async def my_workflow(name: str):
|
|
21
|
+
>>> result = await process_step(name)
|
|
22
|
+
>>> return result
|
|
23
|
+
>>>
|
|
24
|
+
>>> @step
|
|
25
|
+
>>> async def process_step(name: str):
|
|
26
|
+
>>> return f"Hello, {name}!"
|
|
27
|
+
>>>
|
|
28
|
+
>>> # Execute workflow
|
|
29
|
+
>>> run_id = await start(my_workflow, "Alice")
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
__version__ = "0.1.7"
|
|
33
|
+
|
|
34
|
+
# Configuration
|
|
35
|
+
from pyworkflow.config import (
|
|
36
|
+
configure,
|
|
37
|
+
configure_from_yaml,
|
|
38
|
+
get_config,
|
|
39
|
+
get_storage,
|
|
40
|
+
reset_config,
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
# Context API (new unified context via contextvars)
|
|
44
|
+
from pyworkflow.context import (
|
|
45
|
+
LocalContext,
|
|
46
|
+
MockContext,
|
|
47
|
+
WorkflowContext,
|
|
48
|
+
get_context,
|
|
49
|
+
has_context,
|
|
50
|
+
reset_context,
|
|
51
|
+
set_context,
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
# Exceptions
|
|
55
|
+
from pyworkflow.core.exceptions import (
|
|
56
|
+
CancellationError,
|
|
57
|
+
ChildWorkflowError,
|
|
58
|
+
ChildWorkflowFailedError,
|
|
59
|
+
FatalError,
|
|
60
|
+
HookAlreadyReceivedError,
|
|
61
|
+
HookExpiredError,
|
|
62
|
+
HookNotFoundError,
|
|
63
|
+
InvalidTokenError,
|
|
64
|
+
MaxNestingDepthError,
|
|
65
|
+
RetryableError,
|
|
66
|
+
SuspensionSignal,
|
|
67
|
+
WorkflowAlreadyRunningError,
|
|
68
|
+
WorkflowError,
|
|
69
|
+
WorkflowNotFoundError,
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
# Registry functions
|
|
73
|
+
from pyworkflow.core.registry import (
|
|
74
|
+
get_step,
|
|
75
|
+
get_workflow,
|
|
76
|
+
list_steps,
|
|
77
|
+
list_workflows,
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
# Core decorators and primitives
|
|
81
|
+
from pyworkflow.core.scheduled import (
|
|
82
|
+
ScheduledWorkflowMetadata,
|
|
83
|
+
activate_scheduled_workflows,
|
|
84
|
+
get_scheduled_workflow,
|
|
85
|
+
list_scheduled_workflows,
|
|
86
|
+
scheduled_workflow,
|
|
87
|
+
)
|
|
88
|
+
from pyworkflow.core.step import step
|
|
89
|
+
from pyworkflow.core.workflow import workflow
|
|
90
|
+
|
|
91
|
+
# Discovery
|
|
92
|
+
from pyworkflow.discovery import DiscoveryError, discover_workflows
|
|
93
|
+
|
|
94
|
+
# Execution engine
|
|
95
|
+
from pyworkflow.engine.executor import (
|
|
96
|
+
ConfigurationError,
|
|
97
|
+
cancel_workflow,
|
|
98
|
+
get_workflow_chain,
|
|
99
|
+
get_workflow_events,
|
|
100
|
+
get_workflow_run,
|
|
101
|
+
resume,
|
|
102
|
+
start,
|
|
103
|
+
)
|
|
104
|
+
|
|
105
|
+
# Core decorators and primitives
|
|
106
|
+
# Execution engine
|
|
107
|
+
# Logging and observability
|
|
108
|
+
from pyworkflow.observability.logging import (
|
|
109
|
+
bind_step_context,
|
|
110
|
+
bind_workflow_context,
|
|
111
|
+
configure_logging,
|
|
112
|
+
get_logger,
|
|
113
|
+
)
|
|
114
|
+
from pyworkflow.primitives.child_handle import ChildWorkflowHandle
|
|
115
|
+
from pyworkflow.primitives.child_workflow import start_child_workflow
|
|
116
|
+
from pyworkflow.primitives.continue_as_new import continue_as_new
|
|
117
|
+
from pyworkflow.primitives.define_hook import TypedHook, define_hook
|
|
118
|
+
from pyworkflow.primitives.hooks import hook
|
|
119
|
+
from pyworkflow.primitives.resume_hook import ResumeResult, resume_hook
|
|
120
|
+
from pyworkflow.primitives.schedule import (
|
|
121
|
+
backfill_schedule,
|
|
122
|
+
create_schedule,
|
|
123
|
+
delete_schedule,
|
|
124
|
+
get_schedule,
|
|
125
|
+
list_schedules,
|
|
126
|
+
pause_schedule,
|
|
127
|
+
resume_schedule,
|
|
128
|
+
trigger_schedule,
|
|
129
|
+
update_schedule,
|
|
130
|
+
)
|
|
131
|
+
from pyworkflow.primitives.shield import shield
|
|
132
|
+
from pyworkflow.primitives.sleep import sleep
|
|
133
|
+
|
|
134
|
+
# Runtime
|
|
135
|
+
from pyworkflow.runtime import LocalRuntime, Runtime, get_runtime, register_runtime
|
|
136
|
+
|
|
137
|
+
# Scheduler
|
|
138
|
+
from pyworkflow.scheduler import LocalScheduler
|
|
139
|
+
|
|
140
|
+
# Storage backends
|
|
141
|
+
from pyworkflow.storage.base import StorageBackend
|
|
142
|
+
from pyworkflow.storage.file import FileStorageBackend
|
|
143
|
+
from pyworkflow.storage.memory import InMemoryStorageBackend
|
|
144
|
+
from pyworkflow.storage.schemas import (
|
|
145
|
+
CalendarSpec,
|
|
146
|
+
OverlapPolicy,
|
|
147
|
+
RunStatus,
|
|
148
|
+
Schedule,
|
|
149
|
+
ScheduleSpec,
|
|
150
|
+
ScheduleStatus,
|
|
151
|
+
WorkflowRun,
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
__all__ = [
|
|
155
|
+
# Version
|
|
156
|
+
"__version__",
|
|
157
|
+
# Configuration
|
|
158
|
+
"configure",
|
|
159
|
+
"configure_from_yaml",
|
|
160
|
+
"get_config",
|
|
161
|
+
"get_storage",
|
|
162
|
+
"reset_config",
|
|
163
|
+
# Discovery
|
|
164
|
+
"discover_workflows",
|
|
165
|
+
"DiscoveryError",
|
|
166
|
+
# Core decorators
|
|
167
|
+
"workflow",
|
|
168
|
+
"step",
|
|
169
|
+
"scheduled_workflow",
|
|
170
|
+
# Schedule management
|
|
171
|
+
"create_schedule",
|
|
172
|
+
"get_schedule",
|
|
173
|
+
"list_schedules",
|
|
174
|
+
"update_schedule",
|
|
175
|
+
"pause_schedule",
|
|
176
|
+
"resume_schedule",
|
|
177
|
+
"delete_schedule",
|
|
178
|
+
"trigger_schedule",
|
|
179
|
+
"backfill_schedule",
|
|
180
|
+
"activate_scheduled_workflows",
|
|
181
|
+
"get_scheduled_workflow",
|
|
182
|
+
"list_scheduled_workflows",
|
|
183
|
+
"ScheduledWorkflowMetadata",
|
|
184
|
+
# Primitives
|
|
185
|
+
"sleep",
|
|
186
|
+
"hook",
|
|
187
|
+
"define_hook",
|
|
188
|
+
"TypedHook",
|
|
189
|
+
"resume_hook",
|
|
190
|
+
"ResumeResult",
|
|
191
|
+
"shield",
|
|
192
|
+
"continue_as_new",
|
|
193
|
+
# Child workflows
|
|
194
|
+
"start_child_workflow",
|
|
195
|
+
"ChildWorkflowHandle",
|
|
196
|
+
# Execution
|
|
197
|
+
"start",
|
|
198
|
+
"resume",
|
|
199
|
+
"cancel_workflow",
|
|
200
|
+
"get_workflow_run",
|
|
201
|
+
"get_workflow_events",
|
|
202
|
+
"get_workflow_chain",
|
|
203
|
+
# Exceptions
|
|
204
|
+
"WorkflowError",
|
|
205
|
+
"FatalError",
|
|
206
|
+
"RetryableError",
|
|
207
|
+
"CancellationError",
|
|
208
|
+
"SuspensionSignal",
|
|
209
|
+
"WorkflowNotFoundError",
|
|
210
|
+
"WorkflowAlreadyRunningError",
|
|
211
|
+
"HookNotFoundError",
|
|
212
|
+
"HookExpiredError",
|
|
213
|
+
"HookAlreadyReceivedError",
|
|
214
|
+
"InvalidTokenError",
|
|
215
|
+
"ConfigurationError",
|
|
216
|
+
"ChildWorkflowError",
|
|
217
|
+
"ChildWorkflowFailedError",
|
|
218
|
+
"MaxNestingDepthError",
|
|
219
|
+
# Context API
|
|
220
|
+
"WorkflowContext",
|
|
221
|
+
"LocalContext",
|
|
222
|
+
"MockContext",
|
|
223
|
+
"get_context",
|
|
224
|
+
"has_context",
|
|
225
|
+
"set_context",
|
|
226
|
+
"reset_context",
|
|
227
|
+
# Registry
|
|
228
|
+
"list_workflows",
|
|
229
|
+
"get_workflow",
|
|
230
|
+
"list_steps",
|
|
231
|
+
"get_step",
|
|
232
|
+
# Storage
|
|
233
|
+
"StorageBackend",
|
|
234
|
+
"FileStorageBackend",
|
|
235
|
+
"InMemoryStorageBackend",
|
|
236
|
+
"WorkflowRun",
|
|
237
|
+
"RunStatus",
|
|
238
|
+
# Schedule types
|
|
239
|
+
"Schedule",
|
|
240
|
+
"ScheduleSpec",
|
|
241
|
+
"CalendarSpec",
|
|
242
|
+
"ScheduleStatus",
|
|
243
|
+
"OverlapPolicy",
|
|
244
|
+
# Runtime
|
|
245
|
+
"Runtime",
|
|
246
|
+
"LocalRuntime",
|
|
247
|
+
"get_runtime",
|
|
248
|
+
"register_runtime",
|
|
249
|
+
# Scheduler
|
|
250
|
+
"LocalScheduler",
|
|
251
|
+
# Logging
|
|
252
|
+
"configure_logging",
|
|
253
|
+
"get_logger",
|
|
254
|
+
"bind_workflow_context",
|
|
255
|
+
"bind_step_context",
|
|
256
|
+
]
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
"""
|
|
2
|
+
AWS Durable Lambda Functions integration for PyWorkflow.
|
|
3
|
+
|
|
4
|
+
This module provides integration with AWS Lambda Durable Functions,
|
|
5
|
+
allowing PyWorkflow workflows to run on AWS Lambda with automatic
|
|
6
|
+
checkpointing, durability, and cost-free sleep/wait operations.
|
|
7
|
+
|
|
8
|
+
Quick Start:
|
|
9
|
+
```python
|
|
10
|
+
from pyworkflow import workflow, step
|
|
11
|
+
from pyworkflow.aws import aws_workflow_handler, AWSWorkflowContext
|
|
12
|
+
|
|
13
|
+
@step
|
|
14
|
+
async def process_order(order_id: str) -> dict:
|
|
15
|
+
return {"order_id": order_id, "status": "processed"}
|
|
16
|
+
|
|
17
|
+
@aws_workflow_handler
|
|
18
|
+
@workflow
|
|
19
|
+
async def order_workflow(ctx: AWSWorkflowContext, order_id: str):
|
|
20
|
+
# Execute step with automatic checkpointing
|
|
21
|
+
result = await process_order(order_id)
|
|
22
|
+
|
|
23
|
+
# Sleep without compute charges
|
|
24
|
+
ctx.sleep(300) # 5 minutes
|
|
25
|
+
|
|
26
|
+
return result
|
|
27
|
+
|
|
28
|
+
# Export Lambda handler
|
|
29
|
+
handler = order_workflow
|
|
30
|
+
```
|
|
31
|
+
|
|
32
|
+
Installation:
|
|
33
|
+
pip install pyworkflow[aws]
|
|
34
|
+
|
|
35
|
+
Features:
|
|
36
|
+
- Automatic checkpointing via AWS Durable Execution SDK
|
|
37
|
+
- Cost-free waits using context.wait()
|
|
38
|
+
- Support for both sync and async workflows
|
|
39
|
+
- Local testing with mock context
|
|
40
|
+
"""
|
|
41
|
+
|
|
42
|
+
from pyworkflow.aws.context import (
|
|
43
|
+
AWSWorkflowContext,
|
|
44
|
+
get_aws_context,
|
|
45
|
+
has_aws_context,
|
|
46
|
+
)
|
|
47
|
+
from pyworkflow.aws.handler import aws_workflow_handler, create_lambda_handler
|
|
48
|
+
from pyworkflow.aws.testing import (
|
|
49
|
+
MockCallback,
|
|
50
|
+
MockDurableContext,
|
|
51
|
+
MockDuration,
|
|
52
|
+
create_test_handler,
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
__all__ = [
|
|
56
|
+
# Context
|
|
57
|
+
"AWSWorkflowContext",
|
|
58
|
+
"get_aws_context",
|
|
59
|
+
"has_aws_context",
|
|
60
|
+
# Handler
|
|
61
|
+
"aws_workflow_handler",
|
|
62
|
+
"create_lambda_handler",
|
|
63
|
+
# Testing
|
|
64
|
+
"MockDurableContext",
|
|
65
|
+
"MockDuration",
|
|
66
|
+
"MockCallback",
|
|
67
|
+
"create_test_handler",
|
|
68
|
+
]
|
|
@@ -0,0 +1,234 @@
|
|
|
1
|
+
"""
|
|
2
|
+
AWS Durable Lambda Functions context adapter.
|
|
3
|
+
|
|
4
|
+
This module provides an adapter that wraps AWS DurableContext to work
|
|
5
|
+
with PyWorkflow's step and sleep primitives.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import asyncio
|
|
11
|
+
from collections.abc import Awaitable, Callable
|
|
12
|
+
from contextvars import ContextVar
|
|
13
|
+
from typing import TYPE_CHECKING, Any
|
|
14
|
+
|
|
15
|
+
from loguru import logger
|
|
16
|
+
|
|
17
|
+
from pyworkflow.context.base import StepFunction, WorkflowContext
|
|
18
|
+
from pyworkflow.utils.duration import parse_duration
|
|
19
|
+
|
|
20
|
+
if TYPE_CHECKING:
|
|
21
|
+
# Only import AWS SDK types for type checking
|
|
22
|
+
# Actual import happens at runtime if available
|
|
23
|
+
from aws_durable_execution_sdk_python import DurableContext
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
# Context variable to track current AWS context (for backward compatibility)
|
|
27
|
+
_aws_context: ContextVar[AWSWorkflowContext | None] = ContextVar(
|
|
28
|
+
"aws_workflow_context", default=None
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def get_aws_context() -> AWSWorkflowContext | None:
|
|
33
|
+
"""Get the current AWS workflow context if running in AWS environment."""
|
|
34
|
+
return _aws_context.get()
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def has_aws_context() -> bool:
|
|
38
|
+
"""Check if currently running in AWS Durable Lambda context."""
|
|
39
|
+
return _aws_context.get() is not None
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class AWSWorkflowContext(WorkflowContext):
|
|
43
|
+
"""
|
|
44
|
+
Adapts AWS DurableContext to PyWorkflow's context interface.
|
|
45
|
+
|
|
46
|
+
This class wraps the AWS Durable Execution SDK's context to provide
|
|
47
|
+
a familiar interface for PyWorkflow primitives while leveraging
|
|
48
|
+
AWS's native checkpointing and durability features.
|
|
49
|
+
|
|
50
|
+
Attributes:
|
|
51
|
+
_aws_ctx: The underlying AWS DurableContext
|
|
52
|
+
_step_counter: Counter for generating unique step names
|
|
53
|
+
"""
|
|
54
|
+
|
|
55
|
+
def __init__(
|
|
56
|
+
self,
|
|
57
|
+
aws_ctx: DurableContext,
|
|
58
|
+
run_id: str = "aws_run",
|
|
59
|
+
workflow_name: str = "aws_workflow",
|
|
60
|
+
) -> None:
|
|
61
|
+
"""
|
|
62
|
+
Initialize the AWS workflow context adapter.
|
|
63
|
+
|
|
64
|
+
Args:
|
|
65
|
+
aws_ctx: The AWS DurableContext from the Lambda handler
|
|
66
|
+
run_id: Optional run ID for logging
|
|
67
|
+
workflow_name: Optional workflow name for logging
|
|
68
|
+
"""
|
|
69
|
+
super().__init__(run_id=run_id, workflow_name=workflow_name)
|
|
70
|
+
self._aws_ctx = aws_ctx
|
|
71
|
+
self._step_counter = 0
|
|
72
|
+
|
|
73
|
+
# Set this context as the current AWS context (for backward compatibility)
|
|
74
|
+
_aws_context.set(self)
|
|
75
|
+
|
|
76
|
+
logger.debug("AWS workflow context initialized")
|
|
77
|
+
|
|
78
|
+
async def run(
|
|
79
|
+
self,
|
|
80
|
+
func: StepFunction,
|
|
81
|
+
*args: Any,
|
|
82
|
+
name: str | None = None,
|
|
83
|
+
**kwargs: Any,
|
|
84
|
+
) -> Any:
|
|
85
|
+
"""
|
|
86
|
+
Execute a step function with AWS checkpointing.
|
|
87
|
+
|
|
88
|
+
This is the new unified interface for step execution.
|
|
89
|
+
Uses AWS's context.step() for automatic checkpointing and replay.
|
|
90
|
+
|
|
91
|
+
Args:
|
|
92
|
+
func: Step function to execute
|
|
93
|
+
*args: Arguments for the function
|
|
94
|
+
name: Optional step name (used for checkpointing)
|
|
95
|
+
**kwargs: Keyword arguments
|
|
96
|
+
|
|
97
|
+
Returns:
|
|
98
|
+
Step result
|
|
99
|
+
"""
|
|
100
|
+
return self.execute_step(func, *args, step_name=name, **kwargs)
|
|
101
|
+
|
|
102
|
+
def execute_step(
|
|
103
|
+
self,
|
|
104
|
+
step_fn: Callable[..., Any],
|
|
105
|
+
*args: Any,
|
|
106
|
+
step_name: str | None = None,
|
|
107
|
+
**kwargs: Any,
|
|
108
|
+
) -> Any:
|
|
109
|
+
"""
|
|
110
|
+
Execute a step with AWS checkpointing (legacy interface).
|
|
111
|
+
|
|
112
|
+
This method wraps a step function call with AWS's context.step(),
|
|
113
|
+
which provides automatic checkpointing and replay behavior.
|
|
114
|
+
|
|
115
|
+
Args:
|
|
116
|
+
step_fn: The step function to execute
|
|
117
|
+
*args: Positional arguments to pass to the step
|
|
118
|
+
step_name: Optional name for the step (defaults to function name)
|
|
119
|
+
**kwargs: Keyword arguments to pass to the step
|
|
120
|
+
|
|
121
|
+
Returns:
|
|
122
|
+
The result of the step function
|
|
123
|
+
"""
|
|
124
|
+
# Generate step name
|
|
125
|
+
name = step_name or getattr(step_fn, "__name__", None)
|
|
126
|
+
if not name:
|
|
127
|
+
self._step_counter += 1
|
|
128
|
+
name = f"step_{self._step_counter}"
|
|
129
|
+
|
|
130
|
+
logger.debug(f"Executing AWS step: {name}")
|
|
131
|
+
|
|
132
|
+
def run_step(_: Any) -> Any:
|
|
133
|
+
"""Inner function to execute the step, handling async/sync."""
|
|
134
|
+
# Check if the step function is async
|
|
135
|
+
if asyncio.iscoroutinefunction(step_fn):
|
|
136
|
+
# Get or create event loop for async execution
|
|
137
|
+
try:
|
|
138
|
+
loop = asyncio.get_running_loop()
|
|
139
|
+
except RuntimeError:
|
|
140
|
+
loop = None
|
|
141
|
+
|
|
142
|
+
if loop is not None:
|
|
143
|
+
# We're in an async context - create a task
|
|
144
|
+
# This shouldn't happen in normal AWS Lambda flow
|
|
145
|
+
# but handle it gracefully
|
|
146
|
+
import concurrent.futures
|
|
147
|
+
|
|
148
|
+
with concurrent.futures.ThreadPoolExecutor() as executor:
|
|
149
|
+
future = executor.submit(asyncio.run, step_fn(*args, **kwargs))
|
|
150
|
+
return future.result()
|
|
151
|
+
else:
|
|
152
|
+
# No running loop - use asyncio.run()
|
|
153
|
+
return asyncio.run(step_fn(*args, **kwargs))
|
|
154
|
+
else:
|
|
155
|
+
# Synchronous function - execute directly
|
|
156
|
+
return step_fn(*args, **kwargs)
|
|
157
|
+
|
|
158
|
+
# Use AWS context.step() for checkpointing
|
|
159
|
+
result = self._aws_ctx.step(run_step, name=name)
|
|
160
|
+
|
|
161
|
+
logger.debug(f"AWS step completed: {name}")
|
|
162
|
+
return result
|
|
163
|
+
|
|
164
|
+
async def sleep(self, duration: str | int | float) -> None:
|
|
165
|
+
"""
|
|
166
|
+
Sleep using AWS wait (no compute charges during wait).
|
|
167
|
+
|
|
168
|
+
This method uses AWS's context.wait() which suspends the Lambda
|
|
169
|
+
execution without incurring compute charges.
|
|
170
|
+
|
|
171
|
+
Args:
|
|
172
|
+
duration: Sleep duration as:
|
|
173
|
+
- str: Duration string like "5s", "10m", "1h"
|
|
174
|
+
- int/float: Duration in seconds
|
|
175
|
+
"""
|
|
176
|
+
# Parse duration to seconds
|
|
177
|
+
duration_seconds = parse_duration(duration) if isinstance(duration, str) else int(duration)
|
|
178
|
+
|
|
179
|
+
logger.debug(f"AWS sleep: {duration_seconds} seconds")
|
|
180
|
+
|
|
181
|
+
# Try to use AWS Duration, fall back to raw seconds for mock context
|
|
182
|
+
try:
|
|
183
|
+
from aws_durable_execution_sdk_python.config import Duration
|
|
184
|
+
|
|
185
|
+
duration_obj = Duration.from_seconds(duration_seconds)
|
|
186
|
+
except ImportError:
|
|
187
|
+
# AWS SDK not installed - likely using mock context for testing
|
|
188
|
+
# MockDurableContext.wait() accepts raw seconds
|
|
189
|
+
duration_obj = duration_seconds
|
|
190
|
+
|
|
191
|
+
# Use AWS context.wait() for cost-free waiting
|
|
192
|
+
self._aws_ctx.wait(duration_obj)
|
|
193
|
+
|
|
194
|
+
logger.debug(f"AWS sleep completed: {duration_seconds} seconds")
|
|
195
|
+
|
|
196
|
+
async def parallel(self, *tasks: Any) -> list[Any]:
|
|
197
|
+
"""Execute tasks in parallel using asyncio.gather."""
|
|
198
|
+
return list(await asyncio.gather(*tasks))
|
|
199
|
+
|
|
200
|
+
# =========================================================================
|
|
201
|
+
# Cancellation support (not fully implemented for AWS - defer to AWS SDK)
|
|
202
|
+
# =========================================================================
|
|
203
|
+
|
|
204
|
+
def is_cancellation_requested(self) -> bool:
|
|
205
|
+
"""Check if cancellation requested (AWS manages this internally)."""
|
|
206
|
+
return False
|
|
207
|
+
|
|
208
|
+
def request_cancellation(self, reason: str | None = None) -> None:
|
|
209
|
+
"""Request cancellation (AWS manages this internally)."""
|
|
210
|
+
logger.warning("Cancellation not supported in AWS context")
|
|
211
|
+
|
|
212
|
+
def check_cancellation(self) -> None:
|
|
213
|
+
"""Check cancellation (AWS manages this internally)."""
|
|
214
|
+
pass # AWS handles this
|
|
215
|
+
|
|
216
|
+
@property
|
|
217
|
+
def cancellation_blocked(self) -> bool:
|
|
218
|
+
"""Check if cancellation blocked."""
|
|
219
|
+
return False
|
|
220
|
+
|
|
221
|
+
async def hook(
|
|
222
|
+
self,
|
|
223
|
+
name: str,
|
|
224
|
+
timeout: int | None = None,
|
|
225
|
+
on_created: Callable[[str], Awaitable[None]] | None = None,
|
|
226
|
+
payload_schema: type | None = None,
|
|
227
|
+
) -> Any:
|
|
228
|
+
"""Wait for hook (not implemented for AWS - use wait_for_callback)."""
|
|
229
|
+
raise NotImplementedError("Use AWS context.wait_for_callback() instead")
|
|
230
|
+
|
|
231
|
+
def cleanup(self) -> None:
|
|
232
|
+
"""Clean up the context when workflow completes."""
|
|
233
|
+
_aws_context.set(None)
|
|
234
|
+
logger.debug("AWS workflow context cleaned up")
|