pyworkflow-engine 0.1.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dashboard/backend/app/__init__.py +1 -0
- dashboard/backend/app/config.py +32 -0
- dashboard/backend/app/controllers/__init__.py +6 -0
- dashboard/backend/app/controllers/run_controller.py +86 -0
- dashboard/backend/app/controllers/workflow_controller.py +33 -0
- dashboard/backend/app/dependencies/__init__.py +5 -0
- dashboard/backend/app/dependencies/storage.py +50 -0
- dashboard/backend/app/repositories/__init__.py +6 -0
- dashboard/backend/app/repositories/run_repository.py +80 -0
- dashboard/backend/app/repositories/workflow_repository.py +27 -0
- dashboard/backend/app/rest/__init__.py +8 -0
- dashboard/backend/app/rest/v1/__init__.py +12 -0
- dashboard/backend/app/rest/v1/health.py +33 -0
- dashboard/backend/app/rest/v1/runs.py +133 -0
- dashboard/backend/app/rest/v1/workflows.py +41 -0
- dashboard/backend/app/schemas/__init__.py +23 -0
- dashboard/backend/app/schemas/common.py +16 -0
- dashboard/backend/app/schemas/event.py +24 -0
- dashboard/backend/app/schemas/hook.py +25 -0
- dashboard/backend/app/schemas/run.py +54 -0
- dashboard/backend/app/schemas/step.py +28 -0
- dashboard/backend/app/schemas/workflow.py +31 -0
- dashboard/backend/app/server.py +87 -0
- dashboard/backend/app/services/__init__.py +6 -0
- dashboard/backend/app/services/run_service.py +240 -0
- dashboard/backend/app/services/workflow_service.py +155 -0
- dashboard/backend/main.py +18 -0
- docs/concepts/cancellation.mdx +362 -0
- docs/concepts/continue-as-new.mdx +434 -0
- docs/concepts/events.mdx +266 -0
- docs/concepts/fault-tolerance.mdx +370 -0
- docs/concepts/hooks.mdx +552 -0
- docs/concepts/limitations.mdx +167 -0
- docs/concepts/schedules.mdx +775 -0
- docs/concepts/sleep.mdx +312 -0
- docs/concepts/steps.mdx +301 -0
- docs/concepts/workflows.mdx +255 -0
- docs/guides/cli.mdx +942 -0
- docs/guides/configuration.mdx +560 -0
- docs/introduction.mdx +155 -0
- docs/quickstart.mdx +279 -0
- examples/__init__.py +1 -0
- examples/celery/__init__.py +1 -0
- examples/celery/durable/docker-compose.yml +55 -0
- examples/celery/durable/pyworkflow.config.yaml +12 -0
- examples/celery/durable/workflows/__init__.py +122 -0
- examples/celery/durable/workflows/basic.py +87 -0
- examples/celery/durable/workflows/batch_processing.py +102 -0
- examples/celery/durable/workflows/cancellation.py +273 -0
- examples/celery/durable/workflows/child_workflow_patterns.py +240 -0
- examples/celery/durable/workflows/child_workflows.py +202 -0
- examples/celery/durable/workflows/continue_as_new.py +260 -0
- examples/celery/durable/workflows/fault_tolerance.py +210 -0
- examples/celery/durable/workflows/hooks.py +211 -0
- examples/celery/durable/workflows/idempotency.py +112 -0
- examples/celery/durable/workflows/long_running.py +99 -0
- examples/celery/durable/workflows/retries.py +101 -0
- examples/celery/durable/workflows/schedules.py +209 -0
- examples/celery/transient/01_basic_workflow.py +91 -0
- examples/celery/transient/02_fault_tolerance.py +257 -0
- examples/celery/transient/__init__.py +20 -0
- examples/celery/transient/pyworkflow.config.yaml +25 -0
- examples/local/__init__.py +1 -0
- examples/local/durable/01_basic_workflow.py +94 -0
- examples/local/durable/02_file_storage.py +132 -0
- examples/local/durable/03_retries.py +169 -0
- examples/local/durable/04_long_running.py +119 -0
- examples/local/durable/05_event_log.py +145 -0
- examples/local/durable/06_idempotency.py +148 -0
- examples/local/durable/07_hooks.py +334 -0
- examples/local/durable/08_cancellation.py +233 -0
- examples/local/durable/09_child_workflows.py +198 -0
- examples/local/durable/10_child_workflow_patterns.py +265 -0
- examples/local/durable/11_continue_as_new.py +249 -0
- examples/local/durable/12_schedules.py +198 -0
- examples/local/durable/__init__.py +1 -0
- examples/local/transient/01_quick_tasks.py +87 -0
- examples/local/transient/02_retries.py +130 -0
- examples/local/transient/03_sleep.py +141 -0
- examples/local/transient/__init__.py +1 -0
- pyworkflow/__init__.py +256 -0
- pyworkflow/aws/__init__.py +68 -0
- pyworkflow/aws/context.py +234 -0
- pyworkflow/aws/handler.py +184 -0
- pyworkflow/aws/testing.py +310 -0
- pyworkflow/celery/__init__.py +41 -0
- pyworkflow/celery/app.py +198 -0
- pyworkflow/celery/scheduler.py +315 -0
- pyworkflow/celery/tasks.py +1746 -0
- pyworkflow/cli/__init__.py +132 -0
- pyworkflow/cli/__main__.py +6 -0
- pyworkflow/cli/commands/__init__.py +1 -0
- pyworkflow/cli/commands/hooks.py +640 -0
- pyworkflow/cli/commands/quickstart.py +495 -0
- pyworkflow/cli/commands/runs.py +773 -0
- pyworkflow/cli/commands/scheduler.py +130 -0
- pyworkflow/cli/commands/schedules.py +794 -0
- pyworkflow/cli/commands/setup.py +703 -0
- pyworkflow/cli/commands/worker.py +413 -0
- pyworkflow/cli/commands/workflows.py +1257 -0
- pyworkflow/cli/output/__init__.py +1 -0
- pyworkflow/cli/output/formatters.py +321 -0
- pyworkflow/cli/output/styles.py +121 -0
- pyworkflow/cli/utils/__init__.py +1 -0
- pyworkflow/cli/utils/async_helpers.py +30 -0
- pyworkflow/cli/utils/config.py +130 -0
- pyworkflow/cli/utils/config_generator.py +344 -0
- pyworkflow/cli/utils/discovery.py +53 -0
- pyworkflow/cli/utils/docker_manager.py +651 -0
- pyworkflow/cli/utils/interactive.py +364 -0
- pyworkflow/cli/utils/storage.py +115 -0
- pyworkflow/config.py +329 -0
- pyworkflow/context/__init__.py +63 -0
- pyworkflow/context/aws.py +230 -0
- pyworkflow/context/base.py +416 -0
- pyworkflow/context/local.py +930 -0
- pyworkflow/context/mock.py +381 -0
- pyworkflow/core/__init__.py +0 -0
- pyworkflow/core/exceptions.py +353 -0
- pyworkflow/core/registry.py +313 -0
- pyworkflow/core/scheduled.py +328 -0
- pyworkflow/core/step.py +494 -0
- pyworkflow/core/workflow.py +294 -0
- pyworkflow/discovery.py +248 -0
- pyworkflow/engine/__init__.py +0 -0
- pyworkflow/engine/events.py +879 -0
- pyworkflow/engine/executor.py +682 -0
- pyworkflow/engine/replay.py +273 -0
- pyworkflow/observability/__init__.py +19 -0
- pyworkflow/observability/logging.py +234 -0
- pyworkflow/primitives/__init__.py +33 -0
- pyworkflow/primitives/child_handle.py +174 -0
- pyworkflow/primitives/child_workflow.py +372 -0
- pyworkflow/primitives/continue_as_new.py +101 -0
- pyworkflow/primitives/define_hook.py +150 -0
- pyworkflow/primitives/hooks.py +97 -0
- pyworkflow/primitives/resume_hook.py +210 -0
- pyworkflow/primitives/schedule.py +545 -0
- pyworkflow/primitives/shield.py +96 -0
- pyworkflow/primitives/sleep.py +100 -0
- pyworkflow/runtime/__init__.py +21 -0
- pyworkflow/runtime/base.py +179 -0
- pyworkflow/runtime/celery.py +310 -0
- pyworkflow/runtime/factory.py +101 -0
- pyworkflow/runtime/local.py +706 -0
- pyworkflow/scheduler/__init__.py +9 -0
- pyworkflow/scheduler/local.py +248 -0
- pyworkflow/serialization/__init__.py +0 -0
- pyworkflow/serialization/decoder.py +146 -0
- pyworkflow/serialization/encoder.py +162 -0
- pyworkflow/storage/__init__.py +54 -0
- pyworkflow/storage/base.py +612 -0
- pyworkflow/storage/config.py +185 -0
- pyworkflow/storage/dynamodb.py +1315 -0
- pyworkflow/storage/file.py +827 -0
- pyworkflow/storage/memory.py +549 -0
- pyworkflow/storage/postgres.py +1161 -0
- pyworkflow/storage/schemas.py +486 -0
- pyworkflow/storage/sqlite.py +1136 -0
- pyworkflow/utils/__init__.py +0 -0
- pyworkflow/utils/duration.py +177 -0
- pyworkflow/utils/schedule.py +391 -0
- pyworkflow_engine-0.1.7.dist-info/METADATA +687 -0
- pyworkflow_engine-0.1.7.dist-info/RECORD +196 -0
- pyworkflow_engine-0.1.7.dist-info/WHEEL +5 -0
- pyworkflow_engine-0.1.7.dist-info/entry_points.txt +2 -0
- pyworkflow_engine-0.1.7.dist-info/licenses/LICENSE +21 -0
- pyworkflow_engine-0.1.7.dist-info/top_level.txt +5 -0
- tests/examples/__init__.py +0 -0
- tests/integration/__init__.py +0 -0
- tests/integration/test_cancellation.py +330 -0
- tests/integration/test_child_workflows.py +439 -0
- tests/integration/test_continue_as_new.py +428 -0
- tests/integration/test_dynamodb_storage.py +1146 -0
- tests/integration/test_fault_tolerance.py +369 -0
- tests/integration/test_schedule_storage.py +484 -0
- tests/unit/__init__.py +0 -0
- tests/unit/backends/__init__.py +1 -0
- tests/unit/backends/test_dynamodb_storage.py +1554 -0
- tests/unit/backends/test_postgres_storage.py +1281 -0
- tests/unit/backends/test_sqlite_storage.py +1460 -0
- tests/unit/conftest.py +41 -0
- tests/unit/test_cancellation.py +364 -0
- tests/unit/test_child_workflows.py +680 -0
- tests/unit/test_continue_as_new.py +441 -0
- tests/unit/test_event_limits.py +316 -0
- tests/unit/test_executor.py +320 -0
- tests/unit/test_fault_tolerance.py +334 -0
- tests/unit/test_hooks.py +495 -0
- tests/unit/test_registry.py +261 -0
- tests/unit/test_replay.py +420 -0
- tests/unit/test_schedule_schemas.py +285 -0
- tests/unit/test_schedule_utils.py +286 -0
- tests/unit/test_scheduled_workflow.py +274 -0
- tests/unit/test_step.py +353 -0
- tests/unit/test_workflow.py +243 -0
|
@@ -0,0 +1,416 @@
|
|
|
1
|
+
"""
|
|
2
|
+
WorkflowContext - Base class for all workflow execution contexts.
|
|
3
|
+
|
|
4
|
+
Uses Python's contextvars for implicit context passing, similar to Scala's implicits.
|
|
5
|
+
The context is automatically available within workflow execution without explicit passing.
|
|
6
|
+
|
|
7
|
+
Usage:
|
|
8
|
+
from pyworkflow.context import get_context
|
|
9
|
+
|
|
10
|
+
async def my_step(order_id: str):
|
|
11
|
+
ctx = get_context() # Implicitly available
|
|
12
|
+
ctx.log(f"Processing {order_id}")
|
|
13
|
+
return {"order_id": order_id}
|
|
14
|
+
|
|
15
|
+
@workflow()
|
|
16
|
+
async def my_workflow(order_id: str):
|
|
17
|
+
# Context is set automatically by @workflow
|
|
18
|
+
result = await my_step(order_id)
|
|
19
|
+
await sleep("5m") # sleep() uses implicit context
|
|
20
|
+
return result
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
from __future__ import annotations
|
|
24
|
+
|
|
25
|
+
import asyncio
|
|
26
|
+
from abc import ABC, abstractmethod
|
|
27
|
+
from collections.abc import Awaitable, Callable, Coroutine
|
|
28
|
+
from contextvars import ContextVar, Token
|
|
29
|
+
from typing import TYPE_CHECKING, Any, TypeVar
|
|
30
|
+
|
|
31
|
+
from loguru import logger
|
|
32
|
+
|
|
33
|
+
if TYPE_CHECKING:
|
|
34
|
+
from pydantic import BaseModel
|
|
35
|
+
|
|
36
|
+
# Type for step functions
|
|
37
|
+
T = TypeVar("T")
|
|
38
|
+
StepFunction = Callable[..., T | Coroutine[Any, Any, T]]
|
|
39
|
+
|
|
40
|
+
# Global context variable - the implicit context
|
|
41
|
+
_current_context: ContextVar[WorkflowContext | None] = ContextVar("workflow_context", default=None)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def get_context() -> WorkflowContext:
|
|
45
|
+
"""
|
|
46
|
+
Get the current workflow context (implicit).
|
|
47
|
+
|
|
48
|
+
This function retrieves the context that was set when the workflow started.
|
|
49
|
+
It should be called from within a workflow or step execution.
|
|
50
|
+
|
|
51
|
+
Returns:
|
|
52
|
+
The current WorkflowContext
|
|
53
|
+
|
|
54
|
+
Raises:
|
|
55
|
+
RuntimeError: If called outside of a workflow context
|
|
56
|
+
|
|
57
|
+
Example:
|
|
58
|
+
async def my_step(data: str):
|
|
59
|
+
ctx = get_context()
|
|
60
|
+
ctx.log(f"Processing: {data}")
|
|
61
|
+
return {"data": data}
|
|
62
|
+
"""
|
|
63
|
+
ctx = _current_context.get()
|
|
64
|
+
if ctx is None:
|
|
65
|
+
raise RuntimeError(
|
|
66
|
+
"No workflow context available. "
|
|
67
|
+
"This function must be called within a workflow execution. "
|
|
68
|
+
"Make sure you're using the @workflow decorator."
|
|
69
|
+
)
|
|
70
|
+
return ctx
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def has_context() -> bool:
|
|
74
|
+
"""
|
|
75
|
+
Check if a workflow context is currently available.
|
|
76
|
+
|
|
77
|
+
Returns:
|
|
78
|
+
True if context is available, False otherwise
|
|
79
|
+
"""
|
|
80
|
+
return _current_context.get() is not None
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def set_context(ctx: WorkflowContext | None) -> Token:
|
|
84
|
+
"""
|
|
85
|
+
Set the current workflow context.
|
|
86
|
+
|
|
87
|
+
This is typically called by the workflow decorator, not user code.
|
|
88
|
+
|
|
89
|
+
Args:
|
|
90
|
+
ctx: The context to set, or None to clear
|
|
91
|
+
|
|
92
|
+
Returns:
|
|
93
|
+
Token that can be used to reset the context
|
|
94
|
+
"""
|
|
95
|
+
return _current_context.set(ctx)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def reset_context(token: Token) -> None:
|
|
99
|
+
"""
|
|
100
|
+
Reset the context to its previous value.
|
|
101
|
+
|
|
102
|
+
Args:
|
|
103
|
+
token: Token from set_context()
|
|
104
|
+
"""
|
|
105
|
+
_current_context.reset(token)
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
class WorkflowContext(ABC):
|
|
109
|
+
"""
|
|
110
|
+
Abstract base class for all workflow execution contexts.
|
|
111
|
+
|
|
112
|
+
All context implementations (Local, AWS, Mock) must inherit from this class
|
|
113
|
+
and implement the abstract methods.
|
|
114
|
+
|
|
115
|
+
The context provides:
|
|
116
|
+
- Step execution with checkpointing
|
|
117
|
+
- Sleep/wait operations
|
|
118
|
+
- Parallel execution
|
|
119
|
+
- Logging with workflow context
|
|
120
|
+
"""
|
|
121
|
+
|
|
122
|
+
def __init__(
|
|
123
|
+
self,
|
|
124
|
+
run_id: str = "unknown",
|
|
125
|
+
workflow_name: str = "unknown",
|
|
126
|
+
) -> None:
|
|
127
|
+
"""
|
|
128
|
+
Initialize base context.
|
|
129
|
+
|
|
130
|
+
Args:
|
|
131
|
+
run_id: Unique identifier for this workflow run
|
|
132
|
+
workflow_name: Name of the workflow
|
|
133
|
+
"""
|
|
134
|
+
self._run_id = run_id
|
|
135
|
+
self._workflow_name = workflow_name
|
|
136
|
+
|
|
137
|
+
@property
|
|
138
|
+
def run_id(self) -> str:
|
|
139
|
+
"""Get the current workflow run ID."""
|
|
140
|
+
return self._run_id
|
|
141
|
+
|
|
142
|
+
@property
|
|
143
|
+
def workflow_name(self) -> str:
|
|
144
|
+
"""Get the current workflow name."""
|
|
145
|
+
return self._workflow_name
|
|
146
|
+
|
|
147
|
+
# =========================================================================
|
|
148
|
+
# Abstract methods - must be implemented by subclasses
|
|
149
|
+
# =========================================================================
|
|
150
|
+
|
|
151
|
+
@abstractmethod
|
|
152
|
+
async def run(
|
|
153
|
+
self,
|
|
154
|
+
func: StepFunction[T],
|
|
155
|
+
*args: Any,
|
|
156
|
+
name: str | None = None,
|
|
157
|
+
**kwargs: Any,
|
|
158
|
+
) -> T:
|
|
159
|
+
"""
|
|
160
|
+
Execute a step function with context-appropriate handling.
|
|
161
|
+
|
|
162
|
+
Args:
|
|
163
|
+
func: The step function to execute (sync or async)
|
|
164
|
+
*args: Positional arguments to pass to the function
|
|
165
|
+
name: Optional step name for logging/checkpointing
|
|
166
|
+
**kwargs: Keyword arguments to pass to the function
|
|
167
|
+
|
|
168
|
+
Returns:
|
|
169
|
+
The result of the step function
|
|
170
|
+
"""
|
|
171
|
+
...
|
|
172
|
+
|
|
173
|
+
@abstractmethod
|
|
174
|
+
async def sleep(self, duration: str | int | float) -> None:
|
|
175
|
+
"""
|
|
176
|
+
Pause workflow execution for the specified duration.
|
|
177
|
+
|
|
178
|
+
Args:
|
|
179
|
+
duration: Sleep duration as:
|
|
180
|
+
- str: Duration string ("5s", "10m", "1h", "1d")
|
|
181
|
+
- int/float: Duration in seconds
|
|
182
|
+
"""
|
|
183
|
+
...
|
|
184
|
+
|
|
185
|
+
@abstractmethod
|
|
186
|
+
async def hook(
|
|
187
|
+
self,
|
|
188
|
+
name: str,
|
|
189
|
+
timeout: int | None = None,
|
|
190
|
+
on_created: Callable[[str], Awaitable[None]] | None = None,
|
|
191
|
+
payload_schema: type[BaseModel] | None = None,
|
|
192
|
+
) -> Any:
|
|
193
|
+
"""
|
|
194
|
+
Wait for an external event (webhook, approval, callback).
|
|
195
|
+
|
|
196
|
+
The workflow suspends until resume_hook() is called with the token.
|
|
197
|
+
Token is auto-generated in format "run_id:hook_id".
|
|
198
|
+
|
|
199
|
+
Args:
|
|
200
|
+
name: Human-readable name for the hook (for logging/debugging)
|
|
201
|
+
timeout: Optional timeout in seconds. None means wait forever.
|
|
202
|
+
on_created: Optional async callback called with token when hook is created.
|
|
203
|
+
payload_schema: Optional Pydantic model for payload validation.
|
|
204
|
+
|
|
205
|
+
Returns:
|
|
206
|
+
The payload passed to resume_hook()
|
|
207
|
+
|
|
208
|
+
Raises:
|
|
209
|
+
HookExpiredError: If timeout is reached before resume
|
|
210
|
+
NotImplementedError: If context doesn't support hooks (transient mode)
|
|
211
|
+
"""
|
|
212
|
+
...
|
|
213
|
+
|
|
214
|
+
# =========================================================================
|
|
215
|
+
# Cancellation support
|
|
216
|
+
# =========================================================================
|
|
217
|
+
|
|
218
|
+
@abstractmethod
|
|
219
|
+
def is_cancellation_requested(self) -> bool:
|
|
220
|
+
"""
|
|
221
|
+
Check if cancellation has been requested for this workflow.
|
|
222
|
+
|
|
223
|
+
Returns:
|
|
224
|
+
True if cancellation was requested, False otherwise
|
|
225
|
+
"""
|
|
226
|
+
...
|
|
227
|
+
|
|
228
|
+
@abstractmethod
|
|
229
|
+
def request_cancellation(self, reason: str | None = None) -> None:
|
|
230
|
+
"""
|
|
231
|
+
Mark this workflow as cancelled.
|
|
232
|
+
|
|
233
|
+
This sets the cancellation flag. The workflow will raise
|
|
234
|
+
CancellationError at the next cancellation check point.
|
|
235
|
+
|
|
236
|
+
Args:
|
|
237
|
+
reason: Optional reason for cancellation
|
|
238
|
+
"""
|
|
239
|
+
...
|
|
240
|
+
|
|
241
|
+
@abstractmethod
|
|
242
|
+
def check_cancellation(self) -> None:
|
|
243
|
+
"""
|
|
244
|
+
Check for cancellation and raise if requested.
|
|
245
|
+
|
|
246
|
+
This should be called at interruptible points (before steps,
|
|
247
|
+
during sleeps, etc.) to allow graceful cancellation.
|
|
248
|
+
|
|
249
|
+
Raises:
|
|
250
|
+
CancellationError: If cancellation was requested and not blocked
|
|
251
|
+
"""
|
|
252
|
+
...
|
|
253
|
+
|
|
254
|
+
@property
|
|
255
|
+
@abstractmethod
|
|
256
|
+
def cancellation_blocked(self) -> bool:
|
|
257
|
+
"""
|
|
258
|
+
Check if cancellation is currently blocked (within a shield scope).
|
|
259
|
+
|
|
260
|
+
Returns:
|
|
261
|
+
True if cancellation is blocked, False otherwise
|
|
262
|
+
"""
|
|
263
|
+
...
|
|
264
|
+
|
|
265
|
+
# =========================================================================
|
|
266
|
+
# Durable execution support - used by step decorator
|
|
267
|
+
# =========================================================================
|
|
268
|
+
|
|
269
|
+
@property
|
|
270
|
+
def is_durable(self) -> bool:
|
|
271
|
+
"""Check if running in durable (event-sourced) mode."""
|
|
272
|
+
return False # Default: transient mode
|
|
273
|
+
|
|
274
|
+
@property
|
|
275
|
+
def is_replaying(self) -> bool:
|
|
276
|
+
"""Check if currently replaying events."""
|
|
277
|
+
return False
|
|
278
|
+
|
|
279
|
+
@property
|
|
280
|
+
def storage(self) -> Any | None:
|
|
281
|
+
"""Get the storage backend."""
|
|
282
|
+
return None
|
|
283
|
+
|
|
284
|
+
def should_execute_step(self, step_id: str) -> bool:
|
|
285
|
+
"""Check if step should be executed (not already completed)."""
|
|
286
|
+
return True # Default: always execute
|
|
287
|
+
|
|
288
|
+
def get_step_result(self, step_id: str) -> Any:
|
|
289
|
+
"""Get cached step result."""
|
|
290
|
+
raise KeyError(f"Step {step_id} not found")
|
|
291
|
+
|
|
292
|
+
def cache_step_result(self, step_id: str, result: Any) -> None:
|
|
293
|
+
"""Cache step result for replay."""
|
|
294
|
+
pass # Default: no caching
|
|
295
|
+
|
|
296
|
+
def get_retry_state(self, step_id: str) -> dict[str, Any] | None:
|
|
297
|
+
"""Get retry state for a step."""
|
|
298
|
+
return None
|
|
299
|
+
|
|
300
|
+
def set_retry_state(
|
|
301
|
+
self,
|
|
302
|
+
step_id: str,
|
|
303
|
+
attempt: int,
|
|
304
|
+
resume_at: Any,
|
|
305
|
+
max_retries: int,
|
|
306
|
+
retry_delay: Any,
|
|
307
|
+
last_error: str,
|
|
308
|
+
) -> None:
|
|
309
|
+
"""Set retry state for a step."""
|
|
310
|
+
pass
|
|
311
|
+
|
|
312
|
+
def clear_retry_state(self, step_id: str) -> None:
|
|
313
|
+
"""Clear retry state for a step."""
|
|
314
|
+
pass
|
|
315
|
+
|
|
316
|
+
async def validate_event_limits(self) -> None:
|
|
317
|
+
"""Validate event count against configured limits."""
|
|
318
|
+
pass # Default: no validation
|
|
319
|
+
|
|
320
|
+
# =========================================================================
|
|
321
|
+
# Child workflow support - used by start_child_workflow
|
|
322
|
+
# =========================================================================
|
|
323
|
+
|
|
324
|
+
@property
|
|
325
|
+
def pending_children(self) -> dict[str, str]:
|
|
326
|
+
"""Get pending child workflows (child_id -> child_run_id)."""
|
|
327
|
+
return {}
|
|
328
|
+
|
|
329
|
+
def has_child_result(self, child_id: str) -> bool:
|
|
330
|
+
"""Check if a child workflow result exists."""
|
|
331
|
+
return False
|
|
332
|
+
|
|
333
|
+
def get_child_result(self, child_id: str) -> dict[str, Any]:
|
|
334
|
+
"""Get cached child workflow result."""
|
|
335
|
+
return {}
|
|
336
|
+
|
|
337
|
+
# =========================================================================
|
|
338
|
+
# Optional methods - can be overridden by subclasses
|
|
339
|
+
# =========================================================================
|
|
340
|
+
|
|
341
|
+
async def parallel(self, *tasks: Coroutine[Any, Any, T]) -> list[T]:
|
|
342
|
+
"""
|
|
343
|
+
Execute multiple tasks in parallel.
|
|
344
|
+
|
|
345
|
+
Default implementation uses asyncio.gather.
|
|
346
|
+
Subclasses may override for optimized parallel execution.
|
|
347
|
+
|
|
348
|
+
Args:
|
|
349
|
+
*tasks: Coroutines to execute in parallel
|
|
350
|
+
|
|
351
|
+
Returns:
|
|
352
|
+
List of results in the same order as input tasks
|
|
353
|
+
"""
|
|
354
|
+
return list(await asyncio.gather(*tasks))
|
|
355
|
+
|
|
356
|
+
async def wait_for_event(
|
|
357
|
+
self,
|
|
358
|
+
event_name: str,
|
|
359
|
+
timeout: str | int | None = None,
|
|
360
|
+
) -> Any:
|
|
361
|
+
"""
|
|
362
|
+
Wait for an external event (webhook, approval, callback).
|
|
363
|
+
|
|
364
|
+
Default implementation raises NotImplementedError.
|
|
365
|
+
Subclasses should override if they support external events.
|
|
366
|
+
|
|
367
|
+
Args:
|
|
368
|
+
event_name: Name/identifier for the event
|
|
369
|
+
timeout: Optional timeout duration
|
|
370
|
+
|
|
371
|
+
Returns:
|
|
372
|
+
The event payload when received
|
|
373
|
+
"""
|
|
374
|
+
raise NotImplementedError(
|
|
375
|
+
f"{self.__class__.__name__} does not support wait_for_event. "
|
|
376
|
+
"Use a context that supports external events (e.g., LocalContext with durable=True)."
|
|
377
|
+
)
|
|
378
|
+
|
|
379
|
+
# =========================================================================
|
|
380
|
+
# Utility methods
|
|
381
|
+
# =========================================================================
|
|
382
|
+
|
|
383
|
+
def log(self, message: str, level: str = "info", **kwargs: Any) -> None:
|
|
384
|
+
"""
|
|
385
|
+
Log a message with workflow context.
|
|
386
|
+
|
|
387
|
+
Args:
|
|
388
|
+
message: Log message
|
|
389
|
+
level: Log level (debug, info, warning, error)
|
|
390
|
+
**kwargs: Additional context to include in log
|
|
391
|
+
"""
|
|
392
|
+
log_fn = getattr(logger, level, logger.info)
|
|
393
|
+
log_fn(
|
|
394
|
+
message,
|
|
395
|
+
run_id=self._run_id,
|
|
396
|
+
workflow_name=self._workflow_name,
|
|
397
|
+
**kwargs,
|
|
398
|
+
)
|
|
399
|
+
|
|
400
|
+
def __enter__(self) -> WorkflowContext:
|
|
401
|
+
"""Context manager entry - set as current context."""
|
|
402
|
+
self._token = set_context(self)
|
|
403
|
+
return self
|
|
404
|
+
|
|
405
|
+
def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
|
|
406
|
+
"""Context manager exit - restore previous context."""
|
|
407
|
+
reset_context(self._token)
|
|
408
|
+
|
|
409
|
+
async def __aenter__(self) -> WorkflowContext:
|
|
410
|
+
"""Async context manager entry."""
|
|
411
|
+
self._token = set_context(self)
|
|
412
|
+
return self
|
|
413
|
+
|
|
414
|
+
async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
|
|
415
|
+
"""Async context manager exit."""
|
|
416
|
+
reset_context(self._token)
|