pyworkflow-engine 0.1.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (196) hide show
  1. dashboard/backend/app/__init__.py +1 -0
  2. dashboard/backend/app/config.py +32 -0
  3. dashboard/backend/app/controllers/__init__.py +6 -0
  4. dashboard/backend/app/controllers/run_controller.py +86 -0
  5. dashboard/backend/app/controllers/workflow_controller.py +33 -0
  6. dashboard/backend/app/dependencies/__init__.py +5 -0
  7. dashboard/backend/app/dependencies/storage.py +50 -0
  8. dashboard/backend/app/repositories/__init__.py +6 -0
  9. dashboard/backend/app/repositories/run_repository.py +80 -0
  10. dashboard/backend/app/repositories/workflow_repository.py +27 -0
  11. dashboard/backend/app/rest/__init__.py +8 -0
  12. dashboard/backend/app/rest/v1/__init__.py +12 -0
  13. dashboard/backend/app/rest/v1/health.py +33 -0
  14. dashboard/backend/app/rest/v1/runs.py +133 -0
  15. dashboard/backend/app/rest/v1/workflows.py +41 -0
  16. dashboard/backend/app/schemas/__init__.py +23 -0
  17. dashboard/backend/app/schemas/common.py +16 -0
  18. dashboard/backend/app/schemas/event.py +24 -0
  19. dashboard/backend/app/schemas/hook.py +25 -0
  20. dashboard/backend/app/schemas/run.py +54 -0
  21. dashboard/backend/app/schemas/step.py +28 -0
  22. dashboard/backend/app/schemas/workflow.py +31 -0
  23. dashboard/backend/app/server.py +87 -0
  24. dashboard/backend/app/services/__init__.py +6 -0
  25. dashboard/backend/app/services/run_service.py +240 -0
  26. dashboard/backend/app/services/workflow_service.py +155 -0
  27. dashboard/backend/main.py +18 -0
  28. docs/concepts/cancellation.mdx +362 -0
  29. docs/concepts/continue-as-new.mdx +434 -0
  30. docs/concepts/events.mdx +266 -0
  31. docs/concepts/fault-tolerance.mdx +370 -0
  32. docs/concepts/hooks.mdx +552 -0
  33. docs/concepts/limitations.mdx +167 -0
  34. docs/concepts/schedules.mdx +775 -0
  35. docs/concepts/sleep.mdx +312 -0
  36. docs/concepts/steps.mdx +301 -0
  37. docs/concepts/workflows.mdx +255 -0
  38. docs/guides/cli.mdx +942 -0
  39. docs/guides/configuration.mdx +560 -0
  40. docs/introduction.mdx +155 -0
  41. docs/quickstart.mdx +279 -0
  42. examples/__init__.py +1 -0
  43. examples/celery/__init__.py +1 -0
  44. examples/celery/durable/docker-compose.yml +55 -0
  45. examples/celery/durable/pyworkflow.config.yaml +12 -0
  46. examples/celery/durable/workflows/__init__.py +122 -0
  47. examples/celery/durable/workflows/basic.py +87 -0
  48. examples/celery/durable/workflows/batch_processing.py +102 -0
  49. examples/celery/durable/workflows/cancellation.py +273 -0
  50. examples/celery/durable/workflows/child_workflow_patterns.py +240 -0
  51. examples/celery/durable/workflows/child_workflows.py +202 -0
  52. examples/celery/durable/workflows/continue_as_new.py +260 -0
  53. examples/celery/durable/workflows/fault_tolerance.py +210 -0
  54. examples/celery/durable/workflows/hooks.py +211 -0
  55. examples/celery/durable/workflows/idempotency.py +112 -0
  56. examples/celery/durable/workflows/long_running.py +99 -0
  57. examples/celery/durable/workflows/retries.py +101 -0
  58. examples/celery/durable/workflows/schedules.py +209 -0
  59. examples/celery/transient/01_basic_workflow.py +91 -0
  60. examples/celery/transient/02_fault_tolerance.py +257 -0
  61. examples/celery/transient/__init__.py +20 -0
  62. examples/celery/transient/pyworkflow.config.yaml +25 -0
  63. examples/local/__init__.py +1 -0
  64. examples/local/durable/01_basic_workflow.py +94 -0
  65. examples/local/durable/02_file_storage.py +132 -0
  66. examples/local/durable/03_retries.py +169 -0
  67. examples/local/durable/04_long_running.py +119 -0
  68. examples/local/durable/05_event_log.py +145 -0
  69. examples/local/durable/06_idempotency.py +148 -0
  70. examples/local/durable/07_hooks.py +334 -0
  71. examples/local/durable/08_cancellation.py +233 -0
  72. examples/local/durable/09_child_workflows.py +198 -0
  73. examples/local/durable/10_child_workflow_patterns.py +265 -0
  74. examples/local/durable/11_continue_as_new.py +249 -0
  75. examples/local/durable/12_schedules.py +198 -0
  76. examples/local/durable/__init__.py +1 -0
  77. examples/local/transient/01_quick_tasks.py +87 -0
  78. examples/local/transient/02_retries.py +130 -0
  79. examples/local/transient/03_sleep.py +141 -0
  80. examples/local/transient/__init__.py +1 -0
  81. pyworkflow/__init__.py +256 -0
  82. pyworkflow/aws/__init__.py +68 -0
  83. pyworkflow/aws/context.py +234 -0
  84. pyworkflow/aws/handler.py +184 -0
  85. pyworkflow/aws/testing.py +310 -0
  86. pyworkflow/celery/__init__.py +41 -0
  87. pyworkflow/celery/app.py +198 -0
  88. pyworkflow/celery/scheduler.py +315 -0
  89. pyworkflow/celery/tasks.py +1746 -0
  90. pyworkflow/cli/__init__.py +132 -0
  91. pyworkflow/cli/__main__.py +6 -0
  92. pyworkflow/cli/commands/__init__.py +1 -0
  93. pyworkflow/cli/commands/hooks.py +640 -0
  94. pyworkflow/cli/commands/quickstart.py +495 -0
  95. pyworkflow/cli/commands/runs.py +773 -0
  96. pyworkflow/cli/commands/scheduler.py +130 -0
  97. pyworkflow/cli/commands/schedules.py +794 -0
  98. pyworkflow/cli/commands/setup.py +703 -0
  99. pyworkflow/cli/commands/worker.py +413 -0
  100. pyworkflow/cli/commands/workflows.py +1257 -0
  101. pyworkflow/cli/output/__init__.py +1 -0
  102. pyworkflow/cli/output/formatters.py +321 -0
  103. pyworkflow/cli/output/styles.py +121 -0
  104. pyworkflow/cli/utils/__init__.py +1 -0
  105. pyworkflow/cli/utils/async_helpers.py +30 -0
  106. pyworkflow/cli/utils/config.py +130 -0
  107. pyworkflow/cli/utils/config_generator.py +344 -0
  108. pyworkflow/cli/utils/discovery.py +53 -0
  109. pyworkflow/cli/utils/docker_manager.py +651 -0
  110. pyworkflow/cli/utils/interactive.py +364 -0
  111. pyworkflow/cli/utils/storage.py +115 -0
  112. pyworkflow/config.py +329 -0
  113. pyworkflow/context/__init__.py +63 -0
  114. pyworkflow/context/aws.py +230 -0
  115. pyworkflow/context/base.py +416 -0
  116. pyworkflow/context/local.py +930 -0
  117. pyworkflow/context/mock.py +381 -0
  118. pyworkflow/core/__init__.py +0 -0
  119. pyworkflow/core/exceptions.py +353 -0
  120. pyworkflow/core/registry.py +313 -0
  121. pyworkflow/core/scheduled.py +328 -0
  122. pyworkflow/core/step.py +494 -0
  123. pyworkflow/core/workflow.py +294 -0
  124. pyworkflow/discovery.py +248 -0
  125. pyworkflow/engine/__init__.py +0 -0
  126. pyworkflow/engine/events.py +879 -0
  127. pyworkflow/engine/executor.py +682 -0
  128. pyworkflow/engine/replay.py +273 -0
  129. pyworkflow/observability/__init__.py +19 -0
  130. pyworkflow/observability/logging.py +234 -0
  131. pyworkflow/primitives/__init__.py +33 -0
  132. pyworkflow/primitives/child_handle.py +174 -0
  133. pyworkflow/primitives/child_workflow.py +372 -0
  134. pyworkflow/primitives/continue_as_new.py +101 -0
  135. pyworkflow/primitives/define_hook.py +150 -0
  136. pyworkflow/primitives/hooks.py +97 -0
  137. pyworkflow/primitives/resume_hook.py +210 -0
  138. pyworkflow/primitives/schedule.py +545 -0
  139. pyworkflow/primitives/shield.py +96 -0
  140. pyworkflow/primitives/sleep.py +100 -0
  141. pyworkflow/runtime/__init__.py +21 -0
  142. pyworkflow/runtime/base.py +179 -0
  143. pyworkflow/runtime/celery.py +310 -0
  144. pyworkflow/runtime/factory.py +101 -0
  145. pyworkflow/runtime/local.py +706 -0
  146. pyworkflow/scheduler/__init__.py +9 -0
  147. pyworkflow/scheduler/local.py +248 -0
  148. pyworkflow/serialization/__init__.py +0 -0
  149. pyworkflow/serialization/decoder.py +146 -0
  150. pyworkflow/serialization/encoder.py +162 -0
  151. pyworkflow/storage/__init__.py +54 -0
  152. pyworkflow/storage/base.py +612 -0
  153. pyworkflow/storage/config.py +185 -0
  154. pyworkflow/storage/dynamodb.py +1315 -0
  155. pyworkflow/storage/file.py +827 -0
  156. pyworkflow/storage/memory.py +549 -0
  157. pyworkflow/storage/postgres.py +1161 -0
  158. pyworkflow/storage/schemas.py +486 -0
  159. pyworkflow/storage/sqlite.py +1136 -0
  160. pyworkflow/utils/__init__.py +0 -0
  161. pyworkflow/utils/duration.py +177 -0
  162. pyworkflow/utils/schedule.py +391 -0
  163. pyworkflow_engine-0.1.7.dist-info/METADATA +687 -0
  164. pyworkflow_engine-0.1.7.dist-info/RECORD +196 -0
  165. pyworkflow_engine-0.1.7.dist-info/WHEEL +5 -0
  166. pyworkflow_engine-0.1.7.dist-info/entry_points.txt +2 -0
  167. pyworkflow_engine-0.1.7.dist-info/licenses/LICENSE +21 -0
  168. pyworkflow_engine-0.1.7.dist-info/top_level.txt +5 -0
  169. tests/examples/__init__.py +0 -0
  170. tests/integration/__init__.py +0 -0
  171. tests/integration/test_cancellation.py +330 -0
  172. tests/integration/test_child_workflows.py +439 -0
  173. tests/integration/test_continue_as_new.py +428 -0
  174. tests/integration/test_dynamodb_storage.py +1146 -0
  175. tests/integration/test_fault_tolerance.py +369 -0
  176. tests/integration/test_schedule_storage.py +484 -0
  177. tests/unit/__init__.py +0 -0
  178. tests/unit/backends/__init__.py +1 -0
  179. tests/unit/backends/test_dynamodb_storage.py +1554 -0
  180. tests/unit/backends/test_postgres_storage.py +1281 -0
  181. tests/unit/backends/test_sqlite_storage.py +1460 -0
  182. tests/unit/conftest.py +41 -0
  183. tests/unit/test_cancellation.py +364 -0
  184. tests/unit/test_child_workflows.py +680 -0
  185. tests/unit/test_continue_as_new.py +441 -0
  186. tests/unit/test_event_limits.py +316 -0
  187. tests/unit/test_executor.py +320 -0
  188. tests/unit/test_fault_tolerance.py +334 -0
  189. tests/unit/test_hooks.py +495 -0
  190. tests/unit/test_registry.py +261 -0
  191. tests/unit/test_replay.py +420 -0
  192. tests/unit/test_schedule_schemas.py +285 -0
  193. tests/unit/test_schedule_utils.py +286 -0
  194. tests/unit/test_scheduled_workflow.py +274 -0
  195. tests/unit/test_step.py +353 -0
  196. tests/unit/test_workflow.py +243 -0
@@ -0,0 +1,1161 @@
1
+ """
2
+ PostgreSQL storage backend using asyncpg.
3
+
4
+ This backend stores workflow data in a PostgreSQL database, suitable for:
5
+ - Production deployments requiring scalability
6
+ - Multi-instance deployments
7
+ - High-availability requirements
8
+
9
+ Provides ACID guarantees, connection pooling, and efficient querying with SQL indexes.
10
+ """
11
+
12
+ import json
13
+ from datetime import UTC, datetime
14
+ from typing import Any
15
+
16
+ import asyncpg
17
+
18
+ from pyworkflow.engine.events import Event, EventType
19
+ from pyworkflow.storage.base import StorageBackend
20
+ from pyworkflow.storage.schemas import (
21
+ Hook,
22
+ HookStatus,
23
+ OverlapPolicy,
24
+ RunStatus,
25
+ Schedule,
26
+ ScheduleSpec,
27
+ ScheduleStatus,
28
+ StepExecution,
29
+ StepStatus,
30
+ WorkflowRun,
31
+ )
32
+
33
+
34
+ class PostgresStorageBackend(StorageBackend):
35
+ """
36
+ PostgreSQL storage backend using asyncpg for async operations.
37
+
38
+ All workflow data is stored in a PostgreSQL database with proper
39
+ indexes for efficient querying and connection pooling for performance.
40
+ """
41
+
42
+ def __init__(
43
+ self,
44
+ dsn: str | None = None,
45
+ host: str = "localhost",
46
+ port: int = 5432,
47
+ user: str = "pyworkflow",
48
+ password: str = "",
49
+ database: str = "pyworkflow",
50
+ min_pool_size: int = 1,
51
+ max_pool_size: int = 10,
52
+ ):
53
+ """
54
+ Initialize PostgreSQL storage backend.
55
+
56
+ Args:
57
+ dsn: Connection string (e.g., postgresql://user:pass@host:5432/db)
58
+ host: Database host (used if dsn not provided)
59
+ port: Database port (used if dsn not provided)
60
+ user: Database user (used if dsn not provided)
61
+ password: Database password (used if dsn not provided)
62
+ database: Database name (used if dsn not provided)
63
+ min_pool_size: Minimum connections in pool
64
+ max_pool_size: Maximum connections in pool
65
+ """
66
+ self.dsn = dsn
67
+ self.host = host
68
+ self.port = port
69
+ self.user = user
70
+ self.password = password
71
+ self.database = database
72
+ self.min_pool_size = min_pool_size
73
+ self.max_pool_size = max_pool_size
74
+ self._pool: asyncpg.Pool | None = None
75
+ self._initialized = False
76
+
77
+ def _build_dsn(self) -> str:
78
+ """Build DSN from individual parameters."""
79
+ if self.password:
80
+ return (
81
+ f"postgresql://{self.user}:{self.password}@{self.host}:{self.port}/{self.database}"
82
+ )
83
+ return f"postgresql://{self.user}@{self.host}:{self.port}/{self.database}"
84
+
85
+ async def connect(self) -> None:
86
+ """Initialize connection pool and create tables if needed."""
87
+ if self._pool is None:
88
+ self._pool = await asyncpg.create_pool(
89
+ dsn=self.dsn or self._build_dsn(),
90
+ min_size=self.min_pool_size,
91
+ max_size=self.max_pool_size,
92
+ )
93
+
94
+ if not self._initialized:
95
+ await self._initialize_schema()
96
+ self._initialized = True
97
+
98
+ async def disconnect(self) -> None:
99
+ """Close connection pool."""
100
+ if self._pool:
101
+ await self._pool.close()
102
+ self._pool = None
103
+ self._initialized = False
104
+
105
+ async def _initialize_schema(self) -> None:
106
+ """Create database tables if they don't exist."""
107
+ if not self._pool:
108
+ await self.connect()
109
+
110
+ pool = self._ensure_connected()
111
+ async with pool.acquire() as conn:
112
+ # Workflow runs table
113
+ await conn.execute("""
114
+ CREATE TABLE IF NOT EXISTS workflow_runs (
115
+ run_id TEXT PRIMARY KEY,
116
+ workflow_name TEXT NOT NULL,
117
+ status TEXT NOT NULL,
118
+ created_at TIMESTAMPTZ NOT NULL,
119
+ updated_at TIMESTAMPTZ NOT NULL,
120
+ started_at TIMESTAMPTZ,
121
+ completed_at TIMESTAMPTZ,
122
+ input_args TEXT NOT NULL DEFAULT '[]',
123
+ input_kwargs TEXT NOT NULL DEFAULT '{}',
124
+ result TEXT,
125
+ error TEXT,
126
+ idempotency_key TEXT,
127
+ max_duration TEXT,
128
+ metadata TEXT DEFAULT '{}',
129
+ recovery_attempts INTEGER DEFAULT 0,
130
+ max_recovery_attempts INTEGER DEFAULT 3,
131
+ recover_on_worker_loss BOOLEAN DEFAULT TRUE,
132
+ parent_run_id TEXT REFERENCES workflow_runs(run_id),
133
+ nesting_depth INTEGER DEFAULT 0,
134
+ continued_from_run_id TEXT,
135
+ continued_to_run_id TEXT
136
+ )
137
+ """)
138
+
139
+ # Indexes for workflow_runs
140
+ await conn.execute(
141
+ "CREATE INDEX IF NOT EXISTS idx_runs_status ON workflow_runs(status)"
142
+ )
143
+ await conn.execute(
144
+ "CREATE INDEX IF NOT EXISTS idx_runs_workflow_name ON workflow_runs(workflow_name)"
145
+ )
146
+ await conn.execute(
147
+ "CREATE INDEX IF NOT EXISTS idx_runs_created_at ON workflow_runs(created_at DESC)"
148
+ )
149
+ await conn.execute(
150
+ "CREATE UNIQUE INDEX IF NOT EXISTS idx_runs_idempotency_key ON workflow_runs(idempotency_key) WHERE idempotency_key IS NOT NULL"
151
+ )
152
+ await conn.execute(
153
+ "CREATE INDEX IF NOT EXISTS idx_runs_parent_run_id ON workflow_runs(parent_run_id)"
154
+ )
155
+
156
+ # Events table
157
+ await conn.execute("""
158
+ CREATE TABLE IF NOT EXISTS events (
159
+ event_id TEXT PRIMARY KEY,
160
+ run_id TEXT NOT NULL REFERENCES workflow_runs(run_id) ON DELETE CASCADE,
161
+ sequence INTEGER NOT NULL,
162
+ type TEXT NOT NULL,
163
+ timestamp TIMESTAMPTZ NOT NULL,
164
+ data TEXT NOT NULL DEFAULT '{}'
165
+ )
166
+ """)
167
+
168
+ # Indexes for events
169
+ await conn.execute(
170
+ "CREATE INDEX IF NOT EXISTS idx_events_run_id_sequence ON events(run_id, sequence)"
171
+ )
172
+ await conn.execute("CREATE INDEX IF NOT EXISTS idx_events_type ON events(type)")
173
+
174
+ # Steps table
175
+ await conn.execute("""
176
+ CREATE TABLE IF NOT EXISTS steps (
177
+ step_id TEXT PRIMARY KEY,
178
+ run_id TEXT NOT NULL REFERENCES workflow_runs(run_id) ON DELETE CASCADE,
179
+ step_name TEXT NOT NULL,
180
+ status TEXT NOT NULL,
181
+ created_at TIMESTAMPTZ NOT NULL,
182
+ started_at TIMESTAMPTZ,
183
+ completed_at TIMESTAMPTZ,
184
+ input_args TEXT NOT NULL DEFAULT '[]',
185
+ input_kwargs TEXT NOT NULL DEFAULT '{}',
186
+ result TEXT,
187
+ error TEXT,
188
+ retry_count INTEGER DEFAULT 0
189
+ )
190
+ """)
191
+
192
+ # Indexes for steps
193
+ await conn.execute("CREATE INDEX IF NOT EXISTS idx_steps_run_id ON steps(run_id)")
194
+
195
+ # Hooks table
196
+ await conn.execute("""
197
+ CREATE TABLE IF NOT EXISTS hooks (
198
+ hook_id TEXT PRIMARY KEY,
199
+ run_id TEXT NOT NULL REFERENCES workflow_runs(run_id) ON DELETE CASCADE,
200
+ token TEXT UNIQUE NOT NULL,
201
+ created_at TIMESTAMPTZ NOT NULL,
202
+ received_at TIMESTAMPTZ,
203
+ expires_at TIMESTAMPTZ,
204
+ status TEXT NOT NULL,
205
+ payload TEXT,
206
+ metadata TEXT DEFAULT '{}'
207
+ )
208
+ """)
209
+
210
+ # Indexes for hooks
211
+ await conn.execute("CREATE UNIQUE INDEX IF NOT EXISTS idx_hooks_token ON hooks(token)")
212
+ await conn.execute("CREATE INDEX IF NOT EXISTS idx_hooks_run_id ON hooks(run_id)")
213
+ await conn.execute("CREATE INDEX IF NOT EXISTS idx_hooks_status ON hooks(status)")
214
+
215
+ # Schedules table
216
+ await conn.execute("""
217
+ CREATE TABLE IF NOT EXISTS schedules (
218
+ schedule_id TEXT PRIMARY KEY,
219
+ workflow_name TEXT NOT NULL,
220
+ spec TEXT NOT NULL,
221
+ spec_type TEXT NOT NULL,
222
+ timezone TEXT,
223
+ input_args TEXT NOT NULL DEFAULT '[]',
224
+ input_kwargs TEXT NOT NULL DEFAULT '{}',
225
+ status TEXT NOT NULL,
226
+ overlap_policy TEXT NOT NULL,
227
+ next_run_time TIMESTAMPTZ,
228
+ last_run_time TIMESTAMPTZ,
229
+ running_run_ids TEXT DEFAULT '[]',
230
+ metadata TEXT DEFAULT '{}',
231
+ created_at TIMESTAMPTZ NOT NULL,
232
+ updated_at TIMESTAMPTZ NOT NULL,
233
+ paused_at TIMESTAMPTZ,
234
+ deleted_at TIMESTAMPTZ
235
+ )
236
+ """)
237
+
238
+ # Indexes for schedules
239
+ await conn.execute(
240
+ "CREATE INDEX IF NOT EXISTS idx_schedules_status ON schedules(status)"
241
+ )
242
+ await conn.execute(
243
+ "CREATE INDEX IF NOT EXISTS idx_schedules_next_run_time ON schedules(next_run_time)"
244
+ )
245
+ await conn.execute(
246
+ "CREATE INDEX IF NOT EXISTS idx_schedules_workflow_name ON schedules(workflow_name)"
247
+ )
248
+
249
+ # Cancellation flags table
250
+ await conn.execute("""
251
+ CREATE TABLE IF NOT EXISTS cancellation_flags (
252
+ run_id TEXT PRIMARY KEY REFERENCES workflow_runs(run_id) ON DELETE CASCADE,
253
+ created_at TIMESTAMPTZ NOT NULL
254
+ )
255
+ """)
256
+
257
+ def _ensure_connected(self) -> asyncpg.Pool:
258
+ """Ensure database pool is connected."""
259
+ if not self._pool:
260
+ raise RuntimeError("Database not connected. Call connect() first.")
261
+ return self._pool
262
+
263
+ # Workflow Run Operations
264
+
265
+ async def create_run(self, run: WorkflowRun) -> None:
266
+ """Create a new workflow run record."""
267
+ pool = self._ensure_connected()
268
+
269
+ async with pool.acquire() as conn:
270
+ await conn.execute(
271
+ """
272
+ INSERT INTO workflow_runs (
273
+ run_id, workflow_name, status, created_at, updated_at, started_at,
274
+ completed_at, input_args, input_kwargs, result, error, idempotency_key,
275
+ max_duration, metadata, recovery_attempts, max_recovery_attempts,
276
+ recover_on_worker_loss, parent_run_id, nesting_depth,
277
+ continued_from_run_id, continued_to_run_id
278
+ ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, $19, $20, $21)
279
+ """,
280
+ run.run_id,
281
+ run.workflow_name,
282
+ run.status.value,
283
+ run.created_at,
284
+ run.updated_at,
285
+ run.started_at,
286
+ run.completed_at,
287
+ run.input_args,
288
+ run.input_kwargs,
289
+ run.result,
290
+ run.error,
291
+ run.idempotency_key,
292
+ run.max_duration,
293
+ json.dumps(run.metadata),
294
+ run.recovery_attempts,
295
+ run.max_recovery_attempts,
296
+ run.recover_on_worker_loss,
297
+ run.parent_run_id,
298
+ run.nesting_depth,
299
+ run.continued_from_run_id,
300
+ run.continued_to_run_id,
301
+ )
302
+
303
+ async def get_run(self, run_id: str) -> WorkflowRun | None:
304
+ """Retrieve a workflow run by ID."""
305
+ pool = self._ensure_connected()
306
+
307
+ async with pool.acquire() as conn:
308
+ row = await conn.fetchrow("SELECT * FROM workflow_runs WHERE run_id = $1", run_id)
309
+
310
+ if not row:
311
+ return None
312
+
313
+ return self._row_to_workflow_run(row)
314
+
315
+ async def get_run_by_idempotency_key(self, key: str) -> WorkflowRun | None:
316
+ """Retrieve a workflow run by idempotency key."""
317
+ pool = self._ensure_connected()
318
+
319
+ async with pool.acquire() as conn:
320
+ row = await conn.fetchrow("SELECT * FROM workflow_runs WHERE idempotency_key = $1", key)
321
+
322
+ if not row:
323
+ return None
324
+
325
+ return self._row_to_workflow_run(row)
326
+
327
+ async def update_run_status(
328
+ self,
329
+ run_id: str,
330
+ status: RunStatus,
331
+ result: str | None = None,
332
+ error: str | None = None,
333
+ ) -> None:
334
+ """Update workflow run status."""
335
+ pool = self._ensure_connected()
336
+
337
+ now = datetime.now(UTC)
338
+ completed_at = now if status == RunStatus.COMPLETED else None
339
+
340
+ # Build dynamic query
341
+ updates = ["status = $1", "updated_at = $2"]
342
+ params: list[Any] = [status.value, now]
343
+ param_idx = 3
344
+
345
+ if result is not None:
346
+ updates.append(f"result = ${param_idx}")
347
+ params.append(result)
348
+ param_idx += 1
349
+
350
+ if error is not None:
351
+ updates.append(f"error = ${param_idx}")
352
+ params.append(error)
353
+ param_idx += 1
354
+
355
+ if completed_at:
356
+ updates.append(f"completed_at = ${param_idx}")
357
+ params.append(completed_at)
358
+ param_idx += 1
359
+
360
+ params.append(run_id)
361
+
362
+ async with pool.acquire() as conn:
363
+ await conn.execute(
364
+ f"UPDATE workflow_runs SET {', '.join(updates)} WHERE run_id = ${param_idx}",
365
+ *params,
366
+ )
367
+
368
+ async def update_run_recovery_attempts(
369
+ self,
370
+ run_id: str,
371
+ recovery_attempts: int,
372
+ ) -> None:
373
+ """Update the recovery attempts counter for a workflow run."""
374
+ pool = self._ensure_connected()
375
+
376
+ async with pool.acquire() as conn:
377
+ await conn.execute(
378
+ """
379
+ UPDATE workflow_runs
380
+ SET recovery_attempts = $1, updated_at = $2
381
+ WHERE run_id = $3
382
+ """,
383
+ recovery_attempts,
384
+ datetime.now(UTC),
385
+ run_id,
386
+ )
387
+
388
+ async def list_runs(
389
+ self,
390
+ query: str | None = None,
391
+ status: RunStatus | None = None,
392
+ start_time: datetime | None = None,
393
+ end_time: datetime | None = None,
394
+ limit: int = 100,
395
+ cursor: str | None = None,
396
+ ) -> tuple[list[WorkflowRun], str | None]:
397
+ """List workflow runs with optional filtering and pagination."""
398
+ pool = self._ensure_connected()
399
+
400
+ conditions = []
401
+ params: list[Any] = []
402
+ param_idx = 1
403
+
404
+ if cursor:
405
+ conditions.append(
406
+ f"created_at < (SELECT created_at FROM workflow_runs WHERE run_id = ${param_idx})"
407
+ )
408
+ params.append(cursor)
409
+ param_idx += 1
410
+
411
+ if query:
412
+ conditions.append(
413
+ f"(workflow_name LIKE ${param_idx} OR input_kwargs LIKE ${param_idx + 1})"
414
+ )
415
+ search_param = f"%{query}%"
416
+ params.extend([search_param, search_param])
417
+ param_idx += 2
418
+
419
+ if status:
420
+ conditions.append(f"status = ${param_idx}")
421
+ params.append(status.value)
422
+ param_idx += 1
423
+
424
+ if start_time:
425
+ conditions.append(f"created_at >= ${param_idx}")
426
+ params.append(start_time)
427
+ param_idx += 1
428
+
429
+ if end_time:
430
+ conditions.append(f"created_at < ${param_idx}")
431
+ params.append(end_time)
432
+ param_idx += 1
433
+
434
+ where_clause = f"WHERE {' AND '.join(conditions)}" if conditions else ""
435
+ params.append(limit + 1) # Fetch one extra to determine if there are more
436
+
437
+ sql = f"""
438
+ SELECT * FROM workflow_runs
439
+ {where_clause}
440
+ ORDER BY created_at DESC
441
+ LIMIT ${param_idx}
442
+ """
443
+
444
+ async with pool.acquire() as conn:
445
+ rows = await conn.fetch(sql, *params)
446
+
447
+ has_more = len(rows) > limit
448
+ if has_more:
449
+ rows = rows[:limit]
450
+
451
+ runs = [self._row_to_workflow_run(row) for row in rows]
452
+ next_cursor = runs[-1].run_id if runs and has_more else None
453
+
454
+ return runs, next_cursor
455
+
456
+ # Event Log Operations
457
+
458
+ async def record_event(self, event: Event) -> None:
459
+ """Record an event to the append-only event log."""
460
+ pool = self._ensure_connected()
461
+
462
+ async with pool.acquire() as conn, conn.transaction():
463
+ # Get next sequence number and insert in a transaction
464
+ row = await conn.fetchrow(
465
+ "SELECT COALESCE(MAX(sequence), -1) + 1 FROM events WHERE run_id = $1",
466
+ event.run_id,
467
+ )
468
+ sequence = row[0] if row else 0
469
+
470
+ await conn.execute(
471
+ """
472
+ INSERT INTO events (event_id, run_id, sequence, type, timestamp, data)
473
+ VALUES ($1, $2, $3, $4, $5, $6)
474
+ """,
475
+ event.event_id,
476
+ event.run_id,
477
+ sequence,
478
+ event.type.value,
479
+ event.timestamp,
480
+ json.dumps(event.data),
481
+ )
482
+
483
+ async def get_events(
484
+ self,
485
+ run_id: str,
486
+ event_types: list[str] | None = None,
487
+ ) -> list[Event]:
488
+ """Retrieve all events for a workflow run, ordered by sequence."""
489
+ pool = self._ensure_connected()
490
+
491
+ async with pool.acquire() as conn:
492
+ if event_types:
493
+ rows = await conn.fetch(
494
+ """
495
+ SELECT * FROM events
496
+ WHERE run_id = $1 AND type = ANY($2)
497
+ ORDER BY sequence ASC
498
+ """,
499
+ run_id,
500
+ event_types,
501
+ )
502
+ else:
503
+ rows = await conn.fetch(
504
+ "SELECT * FROM events WHERE run_id = $1 ORDER BY sequence ASC",
505
+ run_id,
506
+ )
507
+
508
+ return [self._row_to_event(row) for row in rows]
509
+
510
+ async def get_latest_event(
511
+ self,
512
+ run_id: str,
513
+ event_type: str | None = None,
514
+ ) -> Event | None:
515
+ """Get the latest event for a run, optionally filtered by type."""
516
+ pool = self._ensure_connected()
517
+
518
+ async with pool.acquire() as conn:
519
+ if event_type:
520
+ row = await conn.fetchrow(
521
+ """
522
+ SELECT * FROM events
523
+ WHERE run_id = $1 AND type = $2
524
+ ORDER BY sequence DESC
525
+ LIMIT 1
526
+ """,
527
+ run_id,
528
+ event_type,
529
+ )
530
+ else:
531
+ row = await conn.fetchrow(
532
+ """
533
+ SELECT * FROM events
534
+ WHERE run_id = $1
535
+ ORDER BY sequence DESC
536
+ LIMIT 1
537
+ """,
538
+ run_id,
539
+ )
540
+
541
+ if not row:
542
+ return None
543
+
544
+ return self._row_to_event(row)
545
+
546
+ # Step Operations
547
+
548
+ async def create_step(self, step: StepExecution) -> None:
549
+ """Create a step execution record."""
550
+ pool = self._ensure_connected()
551
+
552
+ async with pool.acquire() as conn:
553
+ await conn.execute(
554
+ """
555
+ INSERT INTO steps (
556
+ step_id, run_id, step_name, status, created_at, started_at,
557
+ completed_at, input_args, input_kwargs, result, error, retry_count
558
+ ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)
559
+ """,
560
+ step.step_id,
561
+ step.run_id,
562
+ step.step_name,
563
+ step.status.value,
564
+ step.created_at,
565
+ step.started_at,
566
+ step.completed_at,
567
+ step.input_args,
568
+ step.input_kwargs,
569
+ step.result,
570
+ step.error,
571
+ step.attempt,
572
+ )
573
+
574
+ async def get_step(self, step_id: str) -> StepExecution | None:
575
+ """Retrieve a step execution by ID."""
576
+ pool = self._ensure_connected()
577
+
578
+ async with pool.acquire() as conn:
579
+ row = await conn.fetchrow("SELECT * FROM steps WHERE step_id = $1", step_id)
580
+
581
+ if not row:
582
+ return None
583
+
584
+ return self._row_to_step_execution(row)
585
+
586
+ async def update_step_status(
587
+ self,
588
+ step_id: str,
589
+ status: str,
590
+ result: str | None = None,
591
+ error: str | None = None,
592
+ ) -> None:
593
+ """Update step execution status."""
594
+ pool = self._ensure_connected()
595
+
596
+ updates = ["status = $1"]
597
+ params: list[Any] = [status]
598
+ param_idx = 2
599
+
600
+ if result is not None:
601
+ updates.append(f"result = ${param_idx}")
602
+ params.append(result)
603
+ param_idx += 1
604
+
605
+ if error is not None:
606
+ updates.append(f"error = ${param_idx}")
607
+ params.append(error)
608
+ param_idx += 1
609
+
610
+ if status == "completed":
611
+ updates.append(f"completed_at = ${param_idx}")
612
+ params.append(datetime.now(UTC))
613
+ param_idx += 1
614
+
615
+ params.append(step_id)
616
+
617
+ async with pool.acquire() as conn:
618
+ await conn.execute(
619
+ f"UPDATE steps SET {', '.join(updates)} WHERE step_id = ${param_idx}",
620
+ *params,
621
+ )
622
+
623
+ async def list_steps(self, run_id: str) -> list[StepExecution]:
624
+ """List all steps for a workflow run."""
625
+ pool = self._ensure_connected()
626
+
627
+ async with pool.acquire() as conn:
628
+ rows = await conn.fetch(
629
+ "SELECT * FROM steps WHERE run_id = $1 ORDER BY created_at ASC",
630
+ run_id,
631
+ )
632
+
633
+ return [self._row_to_step_execution(row) for row in rows]
634
+
635
+ # Hook Operations
636
+
637
+ async def create_hook(self, hook: Hook) -> None:
638
+ """Create a hook record."""
639
+ pool = self._ensure_connected()
640
+
641
+ async with pool.acquire() as conn:
642
+ await conn.execute(
643
+ """
644
+ INSERT INTO hooks (
645
+ hook_id, run_id, token, created_at, received_at, expires_at,
646
+ status, payload, metadata
647
+ ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
648
+ """,
649
+ hook.hook_id,
650
+ hook.run_id,
651
+ hook.token,
652
+ hook.created_at,
653
+ hook.received_at,
654
+ hook.expires_at,
655
+ hook.status.value,
656
+ hook.payload,
657
+ json.dumps(hook.metadata),
658
+ )
659
+
660
+ async def get_hook(self, hook_id: str) -> Hook | None:
661
+ """Retrieve a hook by ID."""
662
+ pool = self._ensure_connected()
663
+
664
+ async with pool.acquire() as conn:
665
+ row = await conn.fetchrow("SELECT * FROM hooks WHERE hook_id = $1", hook_id)
666
+
667
+ if not row:
668
+ return None
669
+
670
+ return self._row_to_hook(row)
671
+
672
+ async def get_hook_by_token(self, token: str) -> Hook | None:
673
+ """Retrieve a hook by its token."""
674
+ pool = self._ensure_connected()
675
+
676
+ async with pool.acquire() as conn:
677
+ row = await conn.fetchrow("SELECT * FROM hooks WHERE token = $1", token)
678
+
679
+ if not row:
680
+ return None
681
+
682
+ return self._row_to_hook(row)
683
+
684
+ async def update_hook_status(
685
+ self,
686
+ hook_id: str,
687
+ status: HookStatus,
688
+ payload: str | None = None,
689
+ ) -> None:
690
+ """Update hook status and optionally payload."""
691
+ pool = self._ensure_connected()
692
+
693
+ updates = ["status = $1"]
694
+ params: list[Any] = [status.value]
695
+ param_idx = 2
696
+
697
+ if payload is not None:
698
+ updates.append(f"payload = ${param_idx}")
699
+ params.append(payload)
700
+ param_idx += 1
701
+
702
+ if status == HookStatus.RECEIVED:
703
+ updates.append(f"received_at = ${param_idx}")
704
+ params.append(datetime.now(UTC))
705
+ param_idx += 1
706
+
707
+ params.append(hook_id)
708
+
709
+ async with pool.acquire() as conn:
710
+ await conn.execute(
711
+ f"UPDATE hooks SET {', '.join(updates)} WHERE hook_id = ${param_idx}",
712
+ *params,
713
+ )
714
+
715
+ async def list_hooks(
716
+ self,
717
+ run_id: str | None = None,
718
+ status: HookStatus | None = None,
719
+ limit: int = 100,
720
+ offset: int = 0,
721
+ ) -> list[Hook]:
722
+ """List hooks with optional filtering."""
723
+ pool = self._ensure_connected()
724
+
725
+ conditions = []
726
+ params: list[Any] = []
727
+ param_idx = 1
728
+
729
+ if run_id:
730
+ conditions.append(f"run_id = ${param_idx}")
731
+ params.append(run_id)
732
+ param_idx += 1
733
+
734
+ if status:
735
+ conditions.append(f"status = ${param_idx}")
736
+ params.append(status.value)
737
+ param_idx += 1
738
+
739
+ where_clause = f"WHERE {' AND '.join(conditions)}" if conditions else ""
740
+ params.extend([limit, offset])
741
+
742
+ sql = f"""
743
+ SELECT * FROM hooks
744
+ {where_clause}
745
+ ORDER BY created_at DESC
746
+ LIMIT ${param_idx} OFFSET ${param_idx + 1}
747
+ """
748
+
749
+ async with pool.acquire() as conn:
750
+ rows = await conn.fetch(sql, *params)
751
+
752
+ return [self._row_to_hook(row) for row in rows]
753
+
754
+ # Cancellation Flag Operations
755
+
756
+ async def set_cancellation_flag(self, run_id: str) -> None:
757
+ """Set a cancellation flag for a workflow run."""
758
+ pool = self._ensure_connected()
759
+
760
+ async with pool.acquire() as conn:
761
+ await conn.execute(
762
+ """
763
+ INSERT INTO cancellation_flags (run_id, created_at)
764
+ VALUES ($1, $2)
765
+ ON CONFLICT (run_id) DO NOTHING
766
+ """,
767
+ run_id,
768
+ datetime.now(UTC),
769
+ )
770
+
771
+ async def check_cancellation_flag(self, run_id: str) -> bool:
772
+ """Check if a cancellation flag is set for a workflow run."""
773
+ pool = self._ensure_connected()
774
+
775
+ async with pool.acquire() as conn:
776
+ row = await conn.fetchrow("SELECT 1 FROM cancellation_flags WHERE run_id = $1", run_id)
777
+
778
+ return row is not None
779
+
780
+ async def clear_cancellation_flag(self, run_id: str) -> None:
781
+ """Clear the cancellation flag for a workflow run."""
782
+ pool = self._ensure_connected()
783
+
784
+ async with pool.acquire() as conn:
785
+ await conn.execute("DELETE FROM cancellation_flags WHERE run_id = $1", run_id)
786
+
787
+ # Continue-As-New Chain Operations
788
+
789
+ async def update_run_continuation(
790
+ self,
791
+ run_id: str,
792
+ continued_to_run_id: str,
793
+ ) -> None:
794
+ """Update the continuation link for a workflow run."""
795
+ pool = self._ensure_connected()
796
+
797
+ async with pool.acquire() as conn:
798
+ await conn.execute(
799
+ """
800
+ UPDATE workflow_runs
801
+ SET continued_to_run_id = $1, updated_at = $2
802
+ WHERE run_id = $3
803
+ """,
804
+ continued_to_run_id,
805
+ datetime.now(UTC),
806
+ run_id,
807
+ )
808
+
809
+ async def get_workflow_chain(
810
+ self,
811
+ run_id: str,
812
+ ) -> list[WorkflowRun]:
813
+ """Get all runs in a continue-as-new chain."""
814
+ pool = self._ensure_connected()
815
+
816
+ # Find the first run in the chain
817
+ current_id: str | None = run_id
818
+ async with pool.acquire() as conn:
819
+ while True:
820
+ row = await conn.fetchrow(
821
+ "SELECT continued_from_run_id FROM workflow_runs WHERE run_id = $1",
822
+ current_id,
823
+ )
824
+
825
+ if not row or not row[0]:
826
+ break
827
+
828
+ current_id = row[0]
829
+
830
+ # Now collect all runs in the chain from first to last
831
+ runs = []
832
+ while current_id:
833
+ run = await self.get_run(current_id)
834
+ if not run:
835
+ break
836
+ runs.append(run)
837
+ current_id = run.continued_to_run_id
838
+
839
+ return runs
840
+
841
+ # Child Workflow Operations
842
+
843
+ async def get_children(
844
+ self,
845
+ parent_run_id: str,
846
+ status: RunStatus | None = None,
847
+ ) -> list[WorkflowRun]:
848
+ """Get all child workflow runs for a parent workflow."""
849
+ pool = self._ensure_connected()
850
+
851
+ async with pool.acquire() as conn:
852
+ if status:
853
+ rows = await conn.fetch(
854
+ """
855
+ SELECT * FROM workflow_runs
856
+ WHERE parent_run_id = $1 AND status = $2
857
+ ORDER BY created_at ASC
858
+ """,
859
+ parent_run_id,
860
+ status.value,
861
+ )
862
+ else:
863
+ rows = await conn.fetch(
864
+ """
865
+ SELECT * FROM workflow_runs
866
+ WHERE parent_run_id = $1
867
+ ORDER BY created_at ASC
868
+ """,
869
+ parent_run_id,
870
+ )
871
+
872
+ return [self._row_to_workflow_run(row) for row in rows]
873
+
874
+ async def get_parent(self, run_id: str) -> WorkflowRun | None:
875
+ """Get the parent workflow run for a child workflow."""
876
+ run = await self.get_run(run_id)
877
+ if not run or not run.parent_run_id:
878
+ return None
879
+
880
+ return await self.get_run(run.parent_run_id)
881
+
882
+ async def get_nesting_depth(self, run_id: str) -> int:
883
+ """Get the nesting depth for a workflow."""
884
+ run = await self.get_run(run_id)
885
+ return run.nesting_depth if run else 0
886
+
887
+ # Schedule Operations
888
+
889
+ async def create_schedule(self, schedule: Schedule) -> None:
890
+ """Create a new schedule record."""
891
+ pool = self._ensure_connected()
892
+
893
+ # Derive spec_type from the ScheduleSpec
894
+ spec_type = (
895
+ "cron" if schedule.spec.cron else ("interval" if schedule.spec.interval else "calendar")
896
+ )
897
+
898
+ async with pool.acquire() as conn:
899
+ await conn.execute(
900
+ """
901
+ INSERT INTO schedules (
902
+ schedule_id, workflow_name, spec, spec_type, timezone,
903
+ input_args, input_kwargs, status, overlap_policy,
904
+ next_run_time, last_run_time, running_run_ids, metadata,
905
+ created_at, updated_at, paused_at, deleted_at
906
+ ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17)
907
+ """,
908
+ schedule.schedule_id,
909
+ schedule.workflow_name,
910
+ json.dumps(schedule.spec.to_dict()),
911
+ spec_type,
912
+ schedule.spec.timezone,
913
+ schedule.args,
914
+ schedule.kwargs,
915
+ schedule.status.value,
916
+ schedule.overlap_policy.value,
917
+ schedule.next_run_time,
918
+ schedule.last_run_at,
919
+ json.dumps(schedule.running_run_ids),
920
+ "{}", # metadata - not in current dataclass
921
+ schedule.created_at,
922
+ schedule.updated_at,
923
+ None, # paused_at - not in current dataclass
924
+ None, # deleted_at - not in current dataclass
925
+ )
926
+
927
+ async def get_schedule(self, schedule_id: str) -> Schedule | None:
928
+ """Retrieve a schedule by ID."""
929
+ pool = self._ensure_connected()
930
+
931
+ async with pool.acquire() as conn:
932
+ row = await conn.fetchrow("SELECT * FROM schedules WHERE schedule_id = $1", schedule_id)
933
+
934
+ if not row:
935
+ return None
936
+
937
+ return self._row_to_schedule(row)
938
+
939
+ async def update_schedule(self, schedule: Schedule) -> None:
940
+ """Update an existing schedule."""
941
+ pool = self._ensure_connected()
942
+
943
+ # Derive spec_type from the ScheduleSpec
944
+ spec_type = (
945
+ "cron" if schedule.spec.cron else ("interval" if schedule.spec.interval else "calendar")
946
+ )
947
+
948
+ async with pool.acquire() as conn:
949
+ await conn.execute(
950
+ """
951
+ UPDATE schedules SET
952
+ workflow_name = $1, spec = $2, spec_type = $3, timezone = $4,
953
+ input_args = $5, input_kwargs = $6, status = $7, overlap_policy = $8,
954
+ next_run_time = $9, last_run_time = $10, running_run_ids = $11,
955
+ metadata = $12, updated_at = $13, paused_at = $14, deleted_at = $15
956
+ WHERE schedule_id = $16
957
+ """,
958
+ schedule.workflow_name,
959
+ json.dumps(schedule.spec.to_dict()),
960
+ spec_type,
961
+ schedule.spec.timezone,
962
+ schedule.args,
963
+ schedule.kwargs,
964
+ schedule.status.value,
965
+ schedule.overlap_policy.value,
966
+ schedule.next_run_time,
967
+ schedule.last_run_at,
968
+ json.dumps(schedule.running_run_ids),
969
+ "{}", # metadata - not in current dataclass
970
+ schedule.updated_at,
971
+ None, # paused_at - not in current dataclass
972
+ None, # deleted_at - not in current dataclass
973
+ schedule.schedule_id,
974
+ )
975
+
976
+ async def delete_schedule(self, schedule_id: str) -> None:
977
+ """Mark a schedule as deleted (soft delete)."""
978
+ pool = self._ensure_connected()
979
+
980
+ now = datetime.now(UTC)
981
+ async with pool.acquire() as conn:
982
+ await conn.execute(
983
+ """
984
+ UPDATE schedules
985
+ SET status = $1, deleted_at = $2, updated_at = $3
986
+ WHERE schedule_id = $4
987
+ """,
988
+ ScheduleStatus.DELETED.value,
989
+ now,
990
+ now,
991
+ schedule_id,
992
+ )
993
+
994
+ async def list_schedules(
995
+ self,
996
+ workflow_name: str | None = None,
997
+ status: ScheduleStatus | None = None,
998
+ limit: int = 100,
999
+ offset: int = 0,
1000
+ ) -> list[Schedule]:
1001
+ """List schedules with optional filtering."""
1002
+ pool = self._ensure_connected()
1003
+
1004
+ conditions = []
1005
+ params: list[Any] = []
1006
+ param_idx = 1
1007
+
1008
+ if workflow_name:
1009
+ conditions.append(f"workflow_name = ${param_idx}")
1010
+ params.append(workflow_name)
1011
+ param_idx += 1
1012
+
1013
+ if status:
1014
+ conditions.append(f"status = ${param_idx}")
1015
+ params.append(status.value)
1016
+ param_idx += 1
1017
+
1018
+ where_clause = f"WHERE {' AND '.join(conditions)}" if conditions else ""
1019
+ params.extend([limit, offset])
1020
+
1021
+ sql = f"""
1022
+ SELECT * FROM schedules
1023
+ {where_clause}
1024
+ ORDER BY created_at DESC
1025
+ LIMIT ${param_idx} OFFSET ${param_idx + 1}
1026
+ """
1027
+
1028
+ async with pool.acquire() as conn:
1029
+ rows = await conn.fetch(sql, *params)
1030
+
1031
+ return [self._row_to_schedule(row) for row in rows]
1032
+
1033
+ async def get_due_schedules(self, now: datetime) -> list[Schedule]:
1034
+ """Get all schedules that are due to run."""
1035
+ pool = self._ensure_connected()
1036
+
1037
+ async with pool.acquire() as conn:
1038
+ rows = await conn.fetch(
1039
+ """
1040
+ SELECT * FROM schedules
1041
+ WHERE status = $1 AND next_run_time IS NOT NULL AND next_run_time <= $2
1042
+ ORDER BY next_run_time ASC
1043
+ """,
1044
+ ScheduleStatus.ACTIVE.value,
1045
+ now,
1046
+ )
1047
+
1048
+ return [self._row_to_schedule(row) for row in rows]
1049
+
1050
+ async def add_running_run(self, schedule_id: str, run_id: str) -> None:
1051
+ """Add a run_id to the schedule's running_run_ids list."""
1052
+ schedule = await self.get_schedule(schedule_id)
1053
+ if not schedule:
1054
+ raise ValueError(f"Schedule {schedule_id} not found")
1055
+
1056
+ if run_id not in schedule.running_run_ids:
1057
+ schedule.running_run_ids.append(run_id)
1058
+ schedule.updated_at = datetime.now(UTC)
1059
+ await self.update_schedule(schedule)
1060
+
1061
+ async def remove_running_run(self, schedule_id: str, run_id: str) -> None:
1062
+ """Remove a run_id from the schedule's running_run_ids list."""
1063
+ schedule = await self.get_schedule(schedule_id)
1064
+ if not schedule:
1065
+ raise ValueError(f"Schedule {schedule_id} not found")
1066
+
1067
+ if run_id in schedule.running_run_ids:
1068
+ schedule.running_run_ids.remove(run_id)
1069
+ schedule.updated_at = datetime.now(UTC)
1070
+ await self.update_schedule(schedule)
1071
+
1072
+ # Helper methods for converting database rows to domain objects
1073
+
1074
+ def _row_to_workflow_run(self, row: asyncpg.Record) -> WorkflowRun:
1075
+ """Convert database row to WorkflowRun object."""
1076
+ return WorkflowRun(
1077
+ run_id=row["run_id"],
1078
+ workflow_name=row["workflow_name"],
1079
+ status=RunStatus(row["status"]),
1080
+ created_at=row["created_at"],
1081
+ updated_at=row["updated_at"],
1082
+ started_at=row["started_at"],
1083
+ completed_at=row["completed_at"],
1084
+ input_args=row["input_args"],
1085
+ input_kwargs=row["input_kwargs"],
1086
+ result=row["result"],
1087
+ error=row["error"],
1088
+ idempotency_key=row["idempotency_key"],
1089
+ max_duration=row["max_duration"],
1090
+ metadata=json.loads(row["metadata"]) if row["metadata"] else {},
1091
+ recovery_attempts=row["recovery_attempts"],
1092
+ max_recovery_attempts=row["max_recovery_attempts"],
1093
+ recover_on_worker_loss=row["recover_on_worker_loss"],
1094
+ parent_run_id=row["parent_run_id"],
1095
+ nesting_depth=row["nesting_depth"],
1096
+ continued_from_run_id=row["continued_from_run_id"],
1097
+ continued_to_run_id=row["continued_to_run_id"],
1098
+ )
1099
+
1100
+ def _row_to_event(self, row: asyncpg.Record) -> Event:
1101
+ """Convert database row to Event object."""
1102
+ return Event(
1103
+ event_id=row["event_id"],
1104
+ run_id=row["run_id"],
1105
+ sequence=row["sequence"],
1106
+ type=EventType(row["type"]),
1107
+ timestamp=row["timestamp"],
1108
+ data=json.loads(row["data"]) if row["data"] else {},
1109
+ )
1110
+
1111
+ def _row_to_step_execution(self, row: asyncpg.Record) -> StepExecution:
1112
+ """Convert database row to StepExecution object."""
1113
+ return StepExecution(
1114
+ step_id=row["step_id"],
1115
+ run_id=row["run_id"],
1116
+ step_name=row["step_name"],
1117
+ status=StepStatus(row["status"]),
1118
+ created_at=row["created_at"],
1119
+ started_at=row["started_at"],
1120
+ completed_at=row["completed_at"],
1121
+ input_args=row["input_args"],
1122
+ input_kwargs=row["input_kwargs"],
1123
+ result=row["result"],
1124
+ error=row["error"],
1125
+ attempt=row["retry_count"] or 1,
1126
+ )
1127
+
1128
+ def _row_to_hook(self, row: asyncpg.Record) -> Hook:
1129
+ """Convert database row to Hook object."""
1130
+ return Hook(
1131
+ hook_id=row["hook_id"],
1132
+ run_id=row["run_id"],
1133
+ token=row["token"],
1134
+ created_at=row["created_at"],
1135
+ received_at=row["received_at"],
1136
+ expires_at=row["expires_at"],
1137
+ status=HookStatus(row["status"]),
1138
+ payload=row["payload"],
1139
+ metadata=json.loads(row["metadata"]) if row["metadata"] else {},
1140
+ )
1141
+
1142
+ def _row_to_schedule(self, row: asyncpg.Record) -> Schedule:
1143
+ """Convert database row to Schedule object."""
1144
+ # Parse the spec from JSON and create ScheduleSpec
1145
+ spec_data = json.loads(row["spec"]) if row["spec"] else {}
1146
+ spec = ScheduleSpec.from_dict(spec_data)
1147
+
1148
+ return Schedule(
1149
+ schedule_id=row["schedule_id"],
1150
+ workflow_name=row["workflow_name"],
1151
+ spec=spec,
1152
+ status=ScheduleStatus(row["status"]),
1153
+ args=row["input_args"],
1154
+ kwargs=row["input_kwargs"],
1155
+ overlap_policy=OverlapPolicy(row["overlap_policy"]),
1156
+ next_run_time=row["next_run_time"],
1157
+ last_run_at=row["last_run_time"],
1158
+ running_run_ids=json.loads(row["running_run_ids"]) if row["running_run_ids"] else [],
1159
+ created_at=row["created_at"],
1160
+ updated_at=row["updated_at"],
1161
+ )