pyworkflow-engine 0.1.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dashboard/backend/app/__init__.py +1 -0
- dashboard/backend/app/config.py +32 -0
- dashboard/backend/app/controllers/__init__.py +6 -0
- dashboard/backend/app/controllers/run_controller.py +86 -0
- dashboard/backend/app/controllers/workflow_controller.py +33 -0
- dashboard/backend/app/dependencies/__init__.py +5 -0
- dashboard/backend/app/dependencies/storage.py +50 -0
- dashboard/backend/app/repositories/__init__.py +6 -0
- dashboard/backend/app/repositories/run_repository.py +80 -0
- dashboard/backend/app/repositories/workflow_repository.py +27 -0
- dashboard/backend/app/rest/__init__.py +8 -0
- dashboard/backend/app/rest/v1/__init__.py +12 -0
- dashboard/backend/app/rest/v1/health.py +33 -0
- dashboard/backend/app/rest/v1/runs.py +133 -0
- dashboard/backend/app/rest/v1/workflows.py +41 -0
- dashboard/backend/app/schemas/__init__.py +23 -0
- dashboard/backend/app/schemas/common.py +16 -0
- dashboard/backend/app/schemas/event.py +24 -0
- dashboard/backend/app/schemas/hook.py +25 -0
- dashboard/backend/app/schemas/run.py +54 -0
- dashboard/backend/app/schemas/step.py +28 -0
- dashboard/backend/app/schemas/workflow.py +31 -0
- dashboard/backend/app/server.py +87 -0
- dashboard/backend/app/services/__init__.py +6 -0
- dashboard/backend/app/services/run_service.py +240 -0
- dashboard/backend/app/services/workflow_service.py +155 -0
- dashboard/backend/main.py +18 -0
- docs/concepts/cancellation.mdx +362 -0
- docs/concepts/continue-as-new.mdx +434 -0
- docs/concepts/events.mdx +266 -0
- docs/concepts/fault-tolerance.mdx +370 -0
- docs/concepts/hooks.mdx +552 -0
- docs/concepts/limitations.mdx +167 -0
- docs/concepts/schedules.mdx +775 -0
- docs/concepts/sleep.mdx +312 -0
- docs/concepts/steps.mdx +301 -0
- docs/concepts/workflows.mdx +255 -0
- docs/guides/cli.mdx +942 -0
- docs/guides/configuration.mdx +560 -0
- docs/introduction.mdx +155 -0
- docs/quickstart.mdx +279 -0
- examples/__init__.py +1 -0
- examples/celery/__init__.py +1 -0
- examples/celery/durable/docker-compose.yml +55 -0
- examples/celery/durable/pyworkflow.config.yaml +12 -0
- examples/celery/durable/workflows/__init__.py +122 -0
- examples/celery/durable/workflows/basic.py +87 -0
- examples/celery/durable/workflows/batch_processing.py +102 -0
- examples/celery/durable/workflows/cancellation.py +273 -0
- examples/celery/durable/workflows/child_workflow_patterns.py +240 -0
- examples/celery/durable/workflows/child_workflows.py +202 -0
- examples/celery/durable/workflows/continue_as_new.py +260 -0
- examples/celery/durable/workflows/fault_tolerance.py +210 -0
- examples/celery/durable/workflows/hooks.py +211 -0
- examples/celery/durable/workflows/idempotency.py +112 -0
- examples/celery/durable/workflows/long_running.py +99 -0
- examples/celery/durable/workflows/retries.py +101 -0
- examples/celery/durable/workflows/schedules.py +209 -0
- examples/celery/transient/01_basic_workflow.py +91 -0
- examples/celery/transient/02_fault_tolerance.py +257 -0
- examples/celery/transient/__init__.py +20 -0
- examples/celery/transient/pyworkflow.config.yaml +25 -0
- examples/local/__init__.py +1 -0
- examples/local/durable/01_basic_workflow.py +94 -0
- examples/local/durable/02_file_storage.py +132 -0
- examples/local/durable/03_retries.py +169 -0
- examples/local/durable/04_long_running.py +119 -0
- examples/local/durable/05_event_log.py +145 -0
- examples/local/durable/06_idempotency.py +148 -0
- examples/local/durable/07_hooks.py +334 -0
- examples/local/durable/08_cancellation.py +233 -0
- examples/local/durable/09_child_workflows.py +198 -0
- examples/local/durable/10_child_workflow_patterns.py +265 -0
- examples/local/durable/11_continue_as_new.py +249 -0
- examples/local/durable/12_schedules.py +198 -0
- examples/local/durable/__init__.py +1 -0
- examples/local/transient/01_quick_tasks.py +87 -0
- examples/local/transient/02_retries.py +130 -0
- examples/local/transient/03_sleep.py +141 -0
- examples/local/transient/__init__.py +1 -0
- pyworkflow/__init__.py +256 -0
- pyworkflow/aws/__init__.py +68 -0
- pyworkflow/aws/context.py +234 -0
- pyworkflow/aws/handler.py +184 -0
- pyworkflow/aws/testing.py +310 -0
- pyworkflow/celery/__init__.py +41 -0
- pyworkflow/celery/app.py +198 -0
- pyworkflow/celery/scheduler.py +315 -0
- pyworkflow/celery/tasks.py +1746 -0
- pyworkflow/cli/__init__.py +132 -0
- pyworkflow/cli/__main__.py +6 -0
- pyworkflow/cli/commands/__init__.py +1 -0
- pyworkflow/cli/commands/hooks.py +640 -0
- pyworkflow/cli/commands/quickstart.py +495 -0
- pyworkflow/cli/commands/runs.py +773 -0
- pyworkflow/cli/commands/scheduler.py +130 -0
- pyworkflow/cli/commands/schedules.py +794 -0
- pyworkflow/cli/commands/setup.py +703 -0
- pyworkflow/cli/commands/worker.py +413 -0
- pyworkflow/cli/commands/workflows.py +1257 -0
- pyworkflow/cli/output/__init__.py +1 -0
- pyworkflow/cli/output/formatters.py +321 -0
- pyworkflow/cli/output/styles.py +121 -0
- pyworkflow/cli/utils/__init__.py +1 -0
- pyworkflow/cli/utils/async_helpers.py +30 -0
- pyworkflow/cli/utils/config.py +130 -0
- pyworkflow/cli/utils/config_generator.py +344 -0
- pyworkflow/cli/utils/discovery.py +53 -0
- pyworkflow/cli/utils/docker_manager.py +651 -0
- pyworkflow/cli/utils/interactive.py +364 -0
- pyworkflow/cli/utils/storage.py +115 -0
- pyworkflow/config.py +329 -0
- pyworkflow/context/__init__.py +63 -0
- pyworkflow/context/aws.py +230 -0
- pyworkflow/context/base.py +416 -0
- pyworkflow/context/local.py +930 -0
- pyworkflow/context/mock.py +381 -0
- pyworkflow/core/__init__.py +0 -0
- pyworkflow/core/exceptions.py +353 -0
- pyworkflow/core/registry.py +313 -0
- pyworkflow/core/scheduled.py +328 -0
- pyworkflow/core/step.py +494 -0
- pyworkflow/core/workflow.py +294 -0
- pyworkflow/discovery.py +248 -0
- pyworkflow/engine/__init__.py +0 -0
- pyworkflow/engine/events.py +879 -0
- pyworkflow/engine/executor.py +682 -0
- pyworkflow/engine/replay.py +273 -0
- pyworkflow/observability/__init__.py +19 -0
- pyworkflow/observability/logging.py +234 -0
- pyworkflow/primitives/__init__.py +33 -0
- pyworkflow/primitives/child_handle.py +174 -0
- pyworkflow/primitives/child_workflow.py +372 -0
- pyworkflow/primitives/continue_as_new.py +101 -0
- pyworkflow/primitives/define_hook.py +150 -0
- pyworkflow/primitives/hooks.py +97 -0
- pyworkflow/primitives/resume_hook.py +210 -0
- pyworkflow/primitives/schedule.py +545 -0
- pyworkflow/primitives/shield.py +96 -0
- pyworkflow/primitives/sleep.py +100 -0
- pyworkflow/runtime/__init__.py +21 -0
- pyworkflow/runtime/base.py +179 -0
- pyworkflow/runtime/celery.py +310 -0
- pyworkflow/runtime/factory.py +101 -0
- pyworkflow/runtime/local.py +706 -0
- pyworkflow/scheduler/__init__.py +9 -0
- pyworkflow/scheduler/local.py +248 -0
- pyworkflow/serialization/__init__.py +0 -0
- pyworkflow/serialization/decoder.py +146 -0
- pyworkflow/serialization/encoder.py +162 -0
- pyworkflow/storage/__init__.py +54 -0
- pyworkflow/storage/base.py +612 -0
- pyworkflow/storage/config.py +185 -0
- pyworkflow/storage/dynamodb.py +1315 -0
- pyworkflow/storage/file.py +827 -0
- pyworkflow/storage/memory.py +549 -0
- pyworkflow/storage/postgres.py +1161 -0
- pyworkflow/storage/schemas.py +486 -0
- pyworkflow/storage/sqlite.py +1136 -0
- pyworkflow/utils/__init__.py +0 -0
- pyworkflow/utils/duration.py +177 -0
- pyworkflow/utils/schedule.py +391 -0
- pyworkflow_engine-0.1.7.dist-info/METADATA +687 -0
- pyworkflow_engine-0.1.7.dist-info/RECORD +196 -0
- pyworkflow_engine-0.1.7.dist-info/WHEEL +5 -0
- pyworkflow_engine-0.1.7.dist-info/entry_points.txt +2 -0
- pyworkflow_engine-0.1.7.dist-info/licenses/LICENSE +21 -0
- pyworkflow_engine-0.1.7.dist-info/top_level.txt +5 -0
- tests/examples/__init__.py +0 -0
- tests/integration/__init__.py +0 -0
- tests/integration/test_cancellation.py +330 -0
- tests/integration/test_child_workflows.py +439 -0
- tests/integration/test_continue_as_new.py +428 -0
- tests/integration/test_dynamodb_storage.py +1146 -0
- tests/integration/test_fault_tolerance.py +369 -0
- tests/integration/test_schedule_storage.py +484 -0
- tests/unit/__init__.py +0 -0
- tests/unit/backends/__init__.py +1 -0
- tests/unit/backends/test_dynamodb_storage.py +1554 -0
- tests/unit/backends/test_postgres_storage.py +1281 -0
- tests/unit/backends/test_sqlite_storage.py +1460 -0
- tests/unit/conftest.py +41 -0
- tests/unit/test_cancellation.py +364 -0
- tests/unit/test_child_workflows.py +680 -0
- tests/unit/test_continue_as_new.py +441 -0
- tests/unit/test_event_limits.py +316 -0
- tests/unit/test_executor.py +320 -0
- tests/unit/test_fault_tolerance.py +334 -0
- tests/unit/test_hooks.py +495 -0
- tests/unit/test_registry.py +261 -0
- tests/unit/test_replay.py +420 -0
- tests/unit/test_schedule_schemas.py +285 -0
- tests/unit/test_schedule_utils.py +286 -0
- tests/unit/test_scheduled_workflow.py +274 -0
- tests/unit/test_step.py +353 -0
- tests/unit/test_workflow.py +243 -0
|
@@ -0,0 +1,612 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Abstract base class for storage backends.
|
|
3
|
+
|
|
4
|
+
All storage implementations must implement this interface to ensure consistency
|
|
5
|
+
across different backends (File, Redis, SQLite, PostgreSQL).
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from abc import ABC, abstractmethod
|
|
9
|
+
from datetime import datetime
|
|
10
|
+
|
|
11
|
+
from pyworkflow.engine.events import Event
|
|
12
|
+
from pyworkflow.storage.schemas import (
|
|
13
|
+
Hook,
|
|
14
|
+
HookStatus,
|
|
15
|
+
RunStatus,
|
|
16
|
+
Schedule,
|
|
17
|
+
ScheduleStatus,
|
|
18
|
+
StepExecution,
|
|
19
|
+
WorkflowRun,
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class StorageBackend(ABC):
|
|
24
|
+
"""
|
|
25
|
+
Abstract base class for workflow storage backends.
|
|
26
|
+
|
|
27
|
+
Storage backends are responsible for:
|
|
28
|
+
- Persisting workflow runs, steps
|
|
29
|
+
- Managing the event log (append-only)
|
|
30
|
+
- Providing query capabilities
|
|
31
|
+
|
|
32
|
+
All methods are async to support both sync and async backends.
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
# Workflow Run Operations
|
|
36
|
+
|
|
37
|
+
@abstractmethod
|
|
38
|
+
async def create_run(self, run: WorkflowRun) -> None:
|
|
39
|
+
"""
|
|
40
|
+
Create a new workflow run record.
|
|
41
|
+
|
|
42
|
+
Args:
|
|
43
|
+
run: WorkflowRun instance to persist
|
|
44
|
+
|
|
45
|
+
Raises:
|
|
46
|
+
Exception: If run_id already exists
|
|
47
|
+
"""
|
|
48
|
+
pass
|
|
49
|
+
|
|
50
|
+
@abstractmethod
|
|
51
|
+
async def get_run(self, run_id: str) -> WorkflowRun | None:
|
|
52
|
+
"""
|
|
53
|
+
Retrieve a workflow run by ID.
|
|
54
|
+
|
|
55
|
+
Args:
|
|
56
|
+
run_id: Unique workflow run identifier
|
|
57
|
+
|
|
58
|
+
Returns:
|
|
59
|
+
WorkflowRun if found, None otherwise
|
|
60
|
+
"""
|
|
61
|
+
pass
|
|
62
|
+
|
|
63
|
+
@abstractmethod
|
|
64
|
+
async def get_run_by_idempotency_key(self, key: str) -> WorkflowRun | None:
|
|
65
|
+
"""
|
|
66
|
+
Retrieve a workflow run by idempotency key.
|
|
67
|
+
|
|
68
|
+
Args:
|
|
69
|
+
key: Idempotency key
|
|
70
|
+
|
|
71
|
+
Returns:
|
|
72
|
+
WorkflowRun if found, None otherwise
|
|
73
|
+
"""
|
|
74
|
+
pass
|
|
75
|
+
|
|
76
|
+
@abstractmethod
|
|
77
|
+
async def update_run_status(
|
|
78
|
+
self,
|
|
79
|
+
run_id: str,
|
|
80
|
+
status: RunStatus,
|
|
81
|
+
result: str | None = None,
|
|
82
|
+
error: str | None = None,
|
|
83
|
+
) -> None:
|
|
84
|
+
"""
|
|
85
|
+
Update workflow run status and optionally result/error.
|
|
86
|
+
|
|
87
|
+
Args:
|
|
88
|
+
run_id: Workflow run identifier
|
|
89
|
+
status: New status
|
|
90
|
+
result: Serialized result (if completed)
|
|
91
|
+
error: Error message (if failed)
|
|
92
|
+
"""
|
|
93
|
+
pass
|
|
94
|
+
|
|
95
|
+
@abstractmethod
|
|
96
|
+
async def update_run_recovery_attempts(
|
|
97
|
+
self,
|
|
98
|
+
run_id: str,
|
|
99
|
+
recovery_attempts: int,
|
|
100
|
+
) -> None:
|
|
101
|
+
"""
|
|
102
|
+
Update the recovery attempts counter for a workflow run.
|
|
103
|
+
|
|
104
|
+
Called when a workflow is being recovered after a worker failure.
|
|
105
|
+
|
|
106
|
+
Args:
|
|
107
|
+
run_id: Workflow run identifier
|
|
108
|
+
recovery_attempts: New recovery attempts count
|
|
109
|
+
"""
|
|
110
|
+
pass
|
|
111
|
+
|
|
112
|
+
@abstractmethod
|
|
113
|
+
async def list_runs(
|
|
114
|
+
self,
|
|
115
|
+
query: str | None = None,
|
|
116
|
+
status: RunStatus | None = None,
|
|
117
|
+
start_time: datetime | None = None,
|
|
118
|
+
end_time: datetime | None = None,
|
|
119
|
+
limit: int = 100,
|
|
120
|
+
cursor: str | None = None,
|
|
121
|
+
) -> tuple[list[WorkflowRun], str | None]:
|
|
122
|
+
"""
|
|
123
|
+
List workflow runs with optional filtering and cursor-based pagination.
|
|
124
|
+
|
|
125
|
+
Args:
|
|
126
|
+
query: Case-insensitive substring search in workflow_name and input_kwargs
|
|
127
|
+
status: Filter by status
|
|
128
|
+
start_time: Filter runs started at or after this time
|
|
129
|
+
end_time: Filter runs started before this time
|
|
130
|
+
limit: Maximum number of results
|
|
131
|
+
cursor: Run ID to start after (for pagination)
|
|
132
|
+
|
|
133
|
+
Returns:
|
|
134
|
+
Tuple of (list of WorkflowRun instances, next_cursor or None if no more results)
|
|
135
|
+
"""
|
|
136
|
+
pass
|
|
137
|
+
|
|
138
|
+
# Event Log Operations
|
|
139
|
+
|
|
140
|
+
@abstractmethod
|
|
141
|
+
async def record_event(self, event: Event) -> None:
|
|
142
|
+
"""
|
|
143
|
+
Record an event to the append-only event log.
|
|
144
|
+
|
|
145
|
+
Events must be assigned a sequence number by the storage backend
|
|
146
|
+
to ensure ordering.
|
|
147
|
+
|
|
148
|
+
Args:
|
|
149
|
+
event: Event to record (sequence will be assigned)
|
|
150
|
+
"""
|
|
151
|
+
pass
|
|
152
|
+
|
|
153
|
+
@abstractmethod
|
|
154
|
+
async def get_events(
|
|
155
|
+
self,
|
|
156
|
+
run_id: str,
|
|
157
|
+
event_types: list[str] | None = None,
|
|
158
|
+
) -> list[Event]:
|
|
159
|
+
"""
|
|
160
|
+
Retrieve all events for a workflow run, ordered by sequence.
|
|
161
|
+
|
|
162
|
+
Args:
|
|
163
|
+
run_id: Workflow run identifier
|
|
164
|
+
event_types: Optional filter by event types
|
|
165
|
+
|
|
166
|
+
Returns:
|
|
167
|
+
List of events ordered by sequence number
|
|
168
|
+
"""
|
|
169
|
+
pass
|
|
170
|
+
|
|
171
|
+
@abstractmethod
|
|
172
|
+
async def get_latest_event(
|
|
173
|
+
self,
|
|
174
|
+
run_id: str,
|
|
175
|
+
event_type: str | None = None,
|
|
176
|
+
) -> Event | None:
|
|
177
|
+
"""
|
|
178
|
+
Get the latest event for a run, optionally filtered by type.
|
|
179
|
+
|
|
180
|
+
Args:
|
|
181
|
+
run_id: Workflow run identifier
|
|
182
|
+
event_type: Optional event type filter
|
|
183
|
+
|
|
184
|
+
Returns:
|
|
185
|
+
Latest matching event or None
|
|
186
|
+
"""
|
|
187
|
+
pass
|
|
188
|
+
|
|
189
|
+
# Step Operations
|
|
190
|
+
|
|
191
|
+
@abstractmethod
|
|
192
|
+
async def create_step(self, step: StepExecution) -> None:
|
|
193
|
+
"""
|
|
194
|
+
Create a step execution record.
|
|
195
|
+
|
|
196
|
+
Args:
|
|
197
|
+
step: StepExecution instance to persist
|
|
198
|
+
"""
|
|
199
|
+
pass
|
|
200
|
+
|
|
201
|
+
@abstractmethod
|
|
202
|
+
async def get_step(self, step_id: str) -> StepExecution | None:
|
|
203
|
+
"""
|
|
204
|
+
Retrieve a step execution by ID.
|
|
205
|
+
|
|
206
|
+
Args:
|
|
207
|
+
step_id: Step identifier
|
|
208
|
+
|
|
209
|
+
Returns:
|
|
210
|
+
StepExecution if found, None otherwise
|
|
211
|
+
"""
|
|
212
|
+
pass
|
|
213
|
+
|
|
214
|
+
@abstractmethod
|
|
215
|
+
async def update_step_status(
|
|
216
|
+
self,
|
|
217
|
+
step_id: str,
|
|
218
|
+
status: str,
|
|
219
|
+
result: str | None = None,
|
|
220
|
+
error: str | None = None,
|
|
221
|
+
) -> None:
|
|
222
|
+
"""
|
|
223
|
+
Update step execution status.
|
|
224
|
+
|
|
225
|
+
Args:
|
|
226
|
+
step_id: Step identifier
|
|
227
|
+
status: New status
|
|
228
|
+
result: Serialized result (if completed)
|
|
229
|
+
error: Error message (if failed)
|
|
230
|
+
"""
|
|
231
|
+
pass
|
|
232
|
+
|
|
233
|
+
@abstractmethod
|
|
234
|
+
async def list_steps(self, run_id: str) -> list[StepExecution]:
|
|
235
|
+
"""
|
|
236
|
+
List all steps for a workflow run.
|
|
237
|
+
|
|
238
|
+
Args:
|
|
239
|
+
run_id: Workflow run identifier
|
|
240
|
+
|
|
241
|
+
Returns:
|
|
242
|
+
List of StepExecution instances
|
|
243
|
+
"""
|
|
244
|
+
pass
|
|
245
|
+
|
|
246
|
+
# Hook Operations
|
|
247
|
+
|
|
248
|
+
@abstractmethod
|
|
249
|
+
async def create_hook(self, hook: Hook) -> None:
|
|
250
|
+
"""
|
|
251
|
+
Create a hook record.
|
|
252
|
+
|
|
253
|
+
Args:
|
|
254
|
+
hook: Hook instance to persist
|
|
255
|
+
"""
|
|
256
|
+
pass
|
|
257
|
+
|
|
258
|
+
@abstractmethod
|
|
259
|
+
async def get_hook(self, hook_id: str) -> Hook | None:
|
|
260
|
+
"""
|
|
261
|
+
Retrieve a hook by ID.
|
|
262
|
+
|
|
263
|
+
Args:
|
|
264
|
+
hook_id: Hook identifier
|
|
265
|
+
|
|
266
|
+
Returns:
|
|
267
|
+
Hook if found, None otherwise
|
|
268
|
+
"""
|
|
269
|
+
pass
|
|
270
|
+
|
|
271
|
+
@abstractmethod
|
|
272
|
+
async def get_hook_by_token(self, token: str) -> Hook | None:
|
|
273
|
+
"""
|
|
274
|
+
Retrieve a hook by its token.
|
|
275
|
+
|
|
276
|
+
Args:
|
|
277
|
+
token: Hook token (composite format: run_id:hook_id)
|
|
278
|
+
|
|
279
|
+
Returns:
|
|
280
|
+
Hook if found, None otherwise
|
|
281
|
+
"""
|
|
282
|
+
pass
|
|
283
|
+
|
|
284
|
+
@abstractmethod
|
|
285
|
+
async def update_hook_status(
|
|
286
|
+
self,
|
|
287
|
+
hook_id: str,
|
|
288
|
+
status: HookStatus,
|
|
289
|
+
payload: str | None = None,
|
|
290
|
+
) -> None:
|
|
291
|
+
"""
|
|
292
|
+
Update hook status and optionally payload.
|
|
293
|
+
|
|
294
|
+
Args:
|
|
295
|
+
hook_id: Hook identifier
|
|
296
|
+
status: New status
|
|
297
|
+
payload: JSON serialized payload (if received)
|
|
298
|
+
"""
|
|
299
|
+
pass
|
|
300
|
+
|
|
301
|
+
@abstractmethod
|
|
302
|
+
async def list_hooks(
|
|
303
|
+
self,
|
|
304
|
+
run_id: str | None = None,
|
|
305
|
+
status: HookStatus | None = None,
|
|
306
|
+
limit: int = 100,
|
|
307
|
+
offset: int = 0,
|
|
308
|
+
) -> list[Hook]:
|
|
309
|
+
"""
|
|
310
|
+
List hooks with optional filtering.
|
|
311
|
+
|
|
312
|
+
Args:
|
|
313
|
+
run_id: Filter by workflow run ID
|
|
314
|
+
status: Filter by status
|
|
315
|
+
limit: Maximum number of results
|
|
316
|
+
offset: Number of results to skip
|
|
317
|
+
|
|
318
|
+
Returns:
|
|
319
|
+
List of Hook instances
|
|
320
|
+
"""
|
|
321
|
+
pass
|
|
322
|
+
|
|
323
|
+
# Cancellation Flag Operations
|
|
324
|
+
|
|
325
|
+
@abstractmethod
|
|
326
|
+
async def set_cancellation_flag(self, run_id: str) -> None:
|
|
327
|
+
"""
|
|
328
|
+
Set a cancellation flag for a workflow run.
|
|
329
|
+
|
|
330
|
+
This flag is checked by running workflows to detect cancellation
|
|
331
|
+
requests. It's used when we can't directly interrupt a running
|
|
332
|
+
workflow (e.g., Celery workers).
|
|
333
|
+
|
|
334
|
+
Args:
|
|
335
|
+
run_id: Workflow run identifier
|
|
336
|
+
"""
|
|
337
|
+
pass
|
|
338
|
+
|
|
339
|
+
@abstractmethod
|
|
340
|
+
async def check_cancellation_flag(self, run_id: str) -> bool:
|
|
341
|
+
"""
|
|
342
|
+
Check if a cancellation flag is set for a workflow run.
|
|
343
|
+
|
|
344
|
+
Args:
|
|
345
|
+
run_id: Workflow run identifier
|
|
346
|
+
|
|
347
|
+
Returns:
|
|
348
|
+
True if cancellation is requested, False otherwise
|
|
349
|
+
"""
|
|
350
|
+
pass
|
|
351
|
+
|
|
352
|
+
@abstractmethod
|
|
353
|
+
async def clear_cancellation_flag(self, run_id: str) -> None:
|
|
354
|
+
"""
|
|
355
|
+
Clear the cancellation flag for a workflow run.
|
|
356
|
+
|
|
357
|
+
Called after cancellation has been processed or if cancellation
|
|
358
|
+
is no longer needed.
|
|
359
|
+
|
|
360
|
+
Args:
|
|
361
|
+
run_id: Workflow run identifier
|
|
362
|
+
"""
|
|
363
|
+
pass
|
|
364
|
+
|
|
365
|
+
# Continue-As-New Chain Operations
|
|
366
|
+
|
|
367
|
+
@abstractmethod
|
|
368
|
+
async def update_run_continuation(
|
|
369
|
+
self,
|
|
370
|
+
run_id: str,
|
|
371
|
+
continued_to_run_id: str,
|
|
372
|
+
) -> None:
|
|
373
|
+
"""
|
|
374
|
+
Update the continuation link for a workflow run.
|
|
375
|
+
|
|
376
|
+
Called when a workflow continues as new to link the current
|
|
377
|
+
run to the new run.
|
|
378
|
+
|
|
379
|
+
Args:
|
|
380
|
+
run_id: Current workflow run identifier
|
|
381
|
+
continued_to_run_id: New workflow run identifier
|
|
382
|
+
"""
|
|
383
|
+
pass
|
|
384
|
+
|
|
385
|
+
@abstractmethod
|
|
386
|
+
async def get_workflow_chain(
|
|
387
|
+
self,
|
|
388
|
+
run_id: str,
|
|
389
|
+
) -> list[WorkflowRun]:
|
|
390
|
+
"""
|
|
391
|
+
Get all runs in a continue-as-new chain.
|
|
392
|
+
|
|
393
|
+
Given any run_id in a chain, returns all runs in the chain
|
|
394
|
+
ordered from oldest to newest.
|
|
395
|
+
|
|
396
|
+
Args:
|
|
397
|
+
run_id: Any run ID in the chain
|
|
398
|
+
|
|
399
|
+
Returns:
|
|
400
|
+
List of WorkflowRun ordered from first to last in the chain
|
|
401
|
+
"""
|
|
402
|
+
pass
|
|
403
|
+
|
|
404
|
+
# Child Workflow Operations
|
|
405
|
+
|
|
406
|
+
@abstractmethod
|
|
407
|
+
async def get_children(
|
|
408
|
+
self,
|
|
409
|
+
parent_run_id: str,
|
|
410
|
+
status: RunStatus | None = None,
|
|
411
|
+
) -> list[WorkflowRun]:
|
|
412
|
+
"""
|
|
413
|
+
Get all child workflow runs for a parent workflow.
|
|
414
|
+
|
|
415
|
+
Args:
|
|
416
|
+
parent_run_id: Parent workflow run ID
|
|
417
|
+
status: Optional filter by status
|
|
418
|
+
|
|
419
|
+
Returns:
|
|
420
|
+
List of child WorkflowRun instances
|
|
421
|
+
"""
|
|
422
|
+
pass
|
|
423
|
+
|
|
424
|
+
@abstractmethod
|
|
425
|
+
async def get_parent(self, run_id: str) -> WorkflowRun | None:
|
|
426
|
+
"""
|
|
427
|
+
Get the parent workflow run for a child workflow.
|
|
428
|
+
|
|
429
|
+
Args:
|
|
430
|
+
run_id: Child workflow run ID
|
|
431
|
+
|
|
432
|
+
Returns:
|
|
433
|
+
Parent WorkflowRun if exists, None if this is a root workflow
|
|
434
|
+
"""
|
|
435
|
+
pass
|
|
436
|
+
|
|
437
|
+
@abstractmethod
|
|
438
|
+
async def get_nesting_depth(self, run_id: str) -> int:
|
|
439
|
+
"""
|
|
440
|
+
Get the nesting depth for a workflow.
|
|
441
|
+
|
|
442
|
+
Args:
|
|
443
|
+
run_id: Workflow run ID
|
|
444
|
+
|
|
445
|
+
Returns:
|
|
446
|
+
Nesting depth (0=root, 1=child, 2=grandchild, max 3)
|
|
447
|
+
"""
|
|
448
|
+
pass
|
|
449
|
+
|
|
450
|
+
# Schedule Operations
|
|
451
|
+
|
|
452
|
+
@abstractmethod
|
|
453
|
+
async def create_schedule(self, schedule: Schedule) -> None:
|
|
454
|
+
"""
|
|
455
|
+
Create a new schedule record.
|
|
456
|
+
|
|
457
|
+
Args:
|
|
458
|
+
schedule: Schedule instance to persist
|
|
459
|
+
|
|
460
|
+
Raises:
|
|
461
|
+
ValueError: If schedule_id already exists
|
|
462
|
+
"""
|
|
463
|
+
pass
|
|
464
|
+
|
|
465
|
+
@abstractmethod
|
|
466
|
+
async def get_schedule(self, schedule_id: str) -> Schedule | None:
|
|
467
|
+
"""
|
|
468
|
+
Retrieve a schedule by ID.
|
|
469
|
+
|
|
470
|
+
Args:
|
|
471
|
+
schedule_id: Schedule identifier
|
|
472
|
+
|
|
473
|
+
Returns:
|
|
474
|
+
Schedule if found, None otherwise
|
|
475
|
+
"""
|
|
476
|
+
pass
|
|
477
|
+
|
|
478
|
+
@abstractmethod
|
|
479
|
+
async def update_schedule(self, schedule: Schedule) -> None:
|
|
480
|
+
"""
|
|
481
|
+
Update an existing schedule.
|
|
482
|
+
|
|
483
|
+
Replaces the schedule record with the provided schedule.
|
|
484
|
+
The schedule_id must match an existing schedule.
|
|
485
|
+
|
|
486
|
+
Args:
|
|
487
|
+
schedule: Schedule with updated values
|
|
488
|
+
|
|
489
|
+
Raises:
|
|
490
|
+
ValueError: If schedule_id does not exist
|
|
491
|
+
"""
|
|
492
|
+
pass
|
|
493
|
+
|
|
494
|
+
@abstractmethod
|
|
495
|
+
async def delete_schedule(self, schedule_id: str) -> None:
|
|
496
|
+
"""
|
|
497
|
+
Mark a schedule as deleted (soft delete).
|
|
498
|
+
|
|
499
|
+
Sets the schedule status to DELETED. The schedule record
|
|
500
|
+
is preserved for audit purposes.
|
|
501
|
+
|
|
502
|
+
Args:
|
|
503
|
+
schedule_id: Schedule identifier
|
|
504
|
+
|
|
505
|
+
Raises:
|
|
506
|
+
ValueError: If schedule_id does not exist
|
|
507
|
+
"""
|
|
508
|
+
pass
|
|
509
|
+
|
|
510
|
+
@abstractmethod
|
|
511
|
+
async def list_schedules(
|
|
512
|
+
self,
|
|
513
|
+
workflow_name: str | None = None,
|
|
514
|
+
status: ScheduleStatus | None = None,
|
|
515
|
+
limit: int = 100,
|
|
516
|
+
offset: int = 0,
|
|
517
|
+
) -> list[Schedule]:
|
|
518
|
+
"""
|
|
519
|
+
List schedules with optional filtering.
|
|
520
|
+
|
|
521
|
+
Args:
|
|
522
|
+
workflow_name: Filter by workflow name (None = all)
|
|
523
|
+
status: Filter by status (None = all)
|
|
524
|
+
limit: Maximum number of results
|
|
525
|
+
offset: Number of results to skip
|
|
526
|
+
|
|
527
|
+
Returns:
|
|
528
|
+
List of Schedule instances, sorted by created_at descending
|
|
529
|
+
"""
|
|
530
|
+
pass
|
|
531
|
+
|
|
532
|
+
@abstractmethod
|
|
533
|
+
async def get_due_schedules(self, now: datetime) -> list[Schedule]:
|
|
534
|
+
"""
|
|
535
|
+
Get all schedules that are due to run.
|
|
536
|
+
|
|
537
|
+
Returns schedules where:
|
|
538
|
+
- status is ACTIVE
|
|
539
|
+
- next_run_time is not None
|
|
540
|
+
- next_run_time <= now
|
|
541
|
+
|
|
542
|
+
Args:
|
|
543
|
+
now: Current datetime
|
|
544
|
+
|
|
545
|
+
Returns:
|
|
546
|
+
List of schedules due to run, sorted by next_run_time ascending
|
|
547
|
+
"""
|
|
548
|
+
pass
|
|
549
|
+
|
|
550
|
+
@abstractmethod
|
|
551
|
+
async def add_running_run(self, schedule_id: str, run_id: str) -> None:
|
|
552
|
+
"""
|
|
553
|
+
Add a run_id to the schedule's running_run_ids list.
|
|
554
|
+
|
|
555
|
+
Called when a scheduled workflow starts execution.
|
|
556
|
+
|
|
557
|
+
Args:
|
|
558
|
+
schedule_id: Schedule identifier
|
|
559
|
+
run_id: Run ID to add
|
|
560
|
+
|
|
561
|
+
Raises:
|
|
562
|
+
ValueError: If schedule_id does not exist
|
|
563
|
+
"""
|
|
564
|
+
pass
|
|
565
|
+
|
|
566
|
+
@abstractmethod
|
|
567
|
+
async def remove_running_run(self, schedule_id: str, run_id: str) -> None:
|
|
568
|
+
"""
|
|
569
|
+
Remove a run_id from the schedule's running_run_ids list.
|
|
570
|
+
|
|
571
|
+
Called when a scheduled workflow completes (success or failure).
|
|
572
|
+
|
|
573
|
+
Args:
|
|
574
|
+
schedule_id: Schedule identifier
|
|
575
|
+
run_id: Run ID to remove
|
|
576
|
+
|
|
577
|
+
Raises:
|
|
578
|
+
ValueError: If schedule_id does not exist
|
|
579
|
+
"""
|
|
580
|
+
pass
|
|
581
|
+
|
|
582
|
+
# Lifecycle
|
|
583
|
+
|
|
584
|
+
async def connect(self) -> None:
|
|
585
|
+
"""
|
|
586
|
+
Initialize connection to storage backend.
|
|
587
|
+
|
|
588
|
+
Override if your backend requires explicit connection setup.
|
|
589
|
+
"""
|
|
590
|
+
pass
|
|
591
|
+
|
|
592
|
+
async def disconnect(self) -> None:
|
|
593
|
+
"""
|
|
594
|
+
Close connection to storage backend.
|
|
595
|
+
|
|
596
|
+
Override if your backend requires explicit cleanup.
|
|
597
|
+
"""
|
|
598
|
+
pass
|
|
599
|
+
|
|
600
|
+
async def health_check(self) -> bool:
|
|
601
|
+
"""
|
|
602
|
+
Check if storage backend is healthy and accessible.
|
|
603
|
+
|
|
604
|
+
Returns:
|
|
605
|
+
True if healthy, False otherwise
|
|
606
|
+
"""
|
|
607
|
+
try:
|
|
608
|
+
# Simple check - try to list runs
|
|
609
|
+
await self.list_runs(limit=1) # Returns (runs, next_cursor)
|
|
610
|
+
return True
|
|
611
|
+
except Exception:
|
|
612
|
+
return False
|