stabilize 0.9.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. stabilize/__init__.py +29 -0
  2. stabilize/cli.py +1193 -0
  3. stabilize/context/__init__.py +7 -0
  4. stabilize/context/stage_context.py +170 -0
  5. stabilize/dag/__init__.py +15 -0
  6. stabilize/dag/graph.py +215 -0
  7. stabilize/dag/topological.py +199 -0
  8. stabilize/examples/__init__.py +1 -0
  9. stabilize/examples/docker-example.py +759 -0
  10. stabilize/examples/golden-standard-expected-result.txt +1 -0
  11. stabilize/examples/golden-standard.py +488 -0
  12. stabilize/examples/http-example.py +606 -0
  13. stabilize/examples/llama-example.py +662 -0
  14. stabilize/examples/python-example.py +731 -0
  15. stabilize/examples/shell-example.py +399 -0
  16. stabilize/examples/ssh-example.py +603 -0
  17. stabilize/handlers/__init__.py +53 -0
  18. stabilize/handlers/base.py +226 -0
  19. stabilize/handlers/complete_stage.py +209 -0
  20. stabilize/handlers/complete_task.py +75 -0
  21. stabilize/handlers/complete_workflow.py +150 -0
  22. stabilize/handlers/run_task.py +369 -0
  23. stabilize/handlers/start_stage.py +262 -0
  24. stabilize/handlers/start_task.py +74 -0
  25. stabilize/handlers/start_workflow.py +136 -0
  26. stabilize/launcher.py +307 -0
  27. stabilize/migrations/01KDQ4N9QPJ6Q4MCV3V9GHWPV4_initial_schema.sql +97 -0
  28. stabilize/migrations/01KDRK3TXW4R2GERC1WBCQYJGG_rag_embeddings.sql +25 -0
  29. stabilize/migrations/__init__.py +1 -0
  30. stabilize/models/__init__.py +15 -0
  31. stabilize/models/stage.py +389 -0
  32. stabilize/models/status.py +146 -0
  33. stabilize/models/task.py +125 -0
  34. stabilize/models/workflow.py +317 -0
  35. stabilize/orchestrator.py +113 -0
  36. stabilize/persistence/__init__.py +28 -0
  37. stabilize/persistence/connection.py +185 -0
  38. stabilize/persistence/factory.py +136 -0
  39. stabilize/persistence/memory.py +214 -0
  40. stabilize/persistence/postgres.py +655 -0
  41. stabilize/persistence/sqlite.py +674 -0
  42. stabilize/persistence/store.py +235 -0
  43. stabilize/queue/__init__.py +59 -0
  44. stabilize/queue/messages.py +377 -0
  45. stabilize/queue/processor.py +312 -0
  46. stabilize/queue/queue.py +526 -0
  47. stabilize/queue/sqlite_queue.py +354 -0
  48. stabilize/rag/__init__.py +19 -0
  49. stabilize/rag/assistant.py +459 -0
  50. stabilize/rag/cache.py +294 -0
  51. stabilize/stages/__init__.py +11 -0
  52. stabilize/stages/builder.py +253 -0
  53. stabilize/tasks/__init__.py +19 -0
  54. stabilize/tasks/interface.py +335 -0
  55. stabilize/tasks/registry.py +255 -0
  56. stabilize/tasks/result.py +283 -0
  57. stabilize-0.9.2.dist-info/METADATA +301 -0
  58. stabilize-0.9.2.dist-info/RECORD +61 -0
  59. stabilize-0.9.2.dist-info/WHEEL +4 -0
  60. stabilize-0.9.2.dist-info/entry_points.txt +2 -0
  61. stabilize-0.9.2.dist-info/licenses/LICENSE +201 -0
@@ -0,0 +1,674 @@
1
+ """
2
+ SQLite execution repository.
3
+
4
+ Lightweight persistence using SQLite for development and small deployments.
5
+ Uses singleton ConnectionManager for efficient connection sharing.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ import json
11
+ import sqlite3
12
+ import time
13
+ from collections.abc import Iterator
14
+ from typing import Any
15
+
16
+ from stabilize.models.stage import StageExecution, SyntheticStageOwner
17
+ from stabilize.models.status import WorkflowStatus
18
+ from stabilize.models.task import TaskExecution
19
+ from stabilize.models.workflow import (
20
+ PausedDetails,
21
+ Trigger,
22
+ Workflow,
23
+ WorkflowType,
24
+ )
25
+ from stabilize.persistence.store import (
26
+ WorkflowCriteria,
27
+ WorkflowNotFoundError,
28
+ WorkflowStore,
29
+ )
30
+
31
+
32
+ class SqliteWorkflowStore(WorkflowStore):
33
+ """
34
+ SQLite implementation of WorkflowStore.
35
+
36
+ Uses native sqlite3 for file-based or in-memory storage.
37
+ Suitable for development, testing, and single-node deployments.
38
+
39
+ Features:
40
+ - WAL mode for better concurrent read performance
41
+ - Foreign key support enabled
42
+ - JSON stored as TEXT strings
43
+ - Arrays stored as JSON strings
44
+ - Thread-local connections managed by singleton ConnectionManager
45
+ """
46
+
47
+ def __init__(
48
+ self,
49
+ connection_string: str,
50
+ create_tables: bool = False,
51
+ ) -> None:
52
+ """
53
+ Initialize the repository.
54
+
55
+ Args:
56
+ connection_string: SQLite connection string (e.g., sqlite:///./db.sqlite)
57
+ create_tables: Whether to create tables if they don't exist
58
+ """
59
+ from stabilize.persistence.connection import get_connection_manager
60
+
61
+ self.connection_string = connection_string
62
+ self._manager = get_connection_manager()
63
+
64
+ # Verify connection works
65
+ conn = self._get_connection()
66
+ conn.execute("SELECT 1")
67
+
68
+ if create_tables:
69
+ self._create_tables()
70
+
71
+ def _get_connection(self) -> sqlite3.Connection:
72
+ """
73
+ Get thread-local connection from ConnectionManager.
74
+
75
+ Returns a connection configured with:
76
+ - Row factory for dict-like access
77
+ - Foreign keys enabled
78
+ - WAL journal mode for concurrency
79
+ - 30 second busy timeout
80
+ """
81
+ return self._manager.get_sqlite_connection(self.connection_string)
82
+
83
+ def close(self) -> None:
84
+ """Close SQLite connection for current thread."""
85
+ self._manager.close_sqlite_connection(self.connection_string)
86
+
87
+ def _create_tables(self) -> None:
88
+ """Create database tables if they don't exist."""
89
+ schema = """
90
+ CREATE TABLE IF NOT EXISTS pipeline_executions (
91
+ id TEXT PRIMARY KEY,
92
+ type TEXT NOT NULL,
93
+ application TEXT NOT NULL,
94
+ name TEXT,
95
+ status TEXT NOT NULL,
96
+ start_time INTEGER,
97
+ end_time INTEGER,
98
+ start_time_expiry INTEGER,
99
+ trigger TEXT,
100
+ is_canceled INTEGER DEFAULT 0,
101
+ canceled_by TEXT,
102
+ cancellation_reason TEXT,
103
+ paused TEXT,
104
+ pipeline_config_id TEXT,
105
+ is_limit_concurrent INTEGER DEFAULT 0,
106
+ max_concurrent_executions INTEGER DEFAULT 0,
107
+ keep_waiting_pipelines INTEGER DEFAULT 0,
108
+ origin TEXT,
109
+ created_at TEXT DEFAULT (datetime('now'))
110
+ );
111
+
112
+ CREATE TABLE IF NOT EXISTS stage_executions (
113
+ id TEXT PRIMARY KEY,
114
+ execution_id TEXT NOT NULL REFERENCES pipeline_executions(id) ON DELETE CASCADE,
115
+ ref_id TEXT NOT NULL,
116
+ type TEXT NOT NULL,
117
+ name TEXT,
118
+ status TEXT NOT NULL,
119
+ context TEXT DEFAULT '{}',
120
+ outputs TEXT DEFAULT '{}',
121
+ requisite_stage_ref_ids TEXT,
122
+ parent_stage_id TEXT,
123
+ synthetic_stage_owner TEXT,
124
+ start_time INTEGER,
125
+ end_time INTEGER,
126
+ start_time_expiry INTEGER,
127
+ scheduled_time INTEGER,
128
+ UNIQUE(execution_id, ref_id)
129
+ );
130
+
131
+ CREATE TABLE IF NOT EXISTS task_executions (
132
+ id TEXT PRIMARY KEY,
133
+ stage_id TEXT NOT NULL REFERENCES stage_executions(id) ON DELETE CASCADE,
134
+ name TEXT NOT NULL,
135
+ implementing_class TEXT NOT NULL,
136
+ status TEXT NOT NULL,
137
+ start_time INTEGER,
138
+ end_time INTEGER,
139
+ stage_start INTEGER DEFAULT 0,
140
+ stage_end INTEGER DEFAULT 0,
141
+ loop_start INTEGER DEFAULT 0,
142
+ loop_end INTEGER DEFAULT 0,
143
+ task_exception_details TEXT DEFAULT '{}'
144
+ );
145
+
146
+ CREATE TABLE IF NOT EXISTS queue_messages (
147
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
148
+ message_id TEXT NOT NULL UNIQUE,
149
+ message_type TEXT NOT NULL,
150
+ payload TEXT NOT NULL,
151
+ deliver_at TEXT NOT NULL DEFAULT (datetime('now')),
152
+ attempts INTEGER DEFAULT 0,
153
+ max_attempts INTEGER DEFAULT 10,
154
+ locked_until TEXT,
155
+ version INTEGER DEFAULT 0,
156
+ created_at TEXT DEFAULT (datetime('now'))
157
+ );
158
+
159
+ CREATE INDEX IF NOT EXISTS idx_execution_application
160
+ ON pipeline_executions(application);
161
+ CREATE INDEX IF NOT EXISTS idx_execution_config
162
+ ON pipeline_executions(pipeline_config_id);
163
+ CREATE INDEX IF NOT EXISTS idx_execution_status
164
+ ON pipeline_executions(status);
165
+ CREATE INDEX IF NOT EXISTS idx_stage_execution
166
+ ON stage_executions(execution_id);
167
+ CREATE INDEX IF NOT EXISTS idx_task_stage
168
+ ON task_executions(stage_id);
169
+ CREATE INDEX IF NOT EXISTS idx_queue_deliver
170
+ ON queue_messages(deliver_at);
171
+ CREATE INDEX IF NOT EXISTS idx_queue_locked
172
+ ON queue_messages(locked_until);
173
+ """
174
+
175
+ conn = self._get_connection()
176
+ for statement in schema.split(";"):
177
+ statement = statement.strip()
178
+ if statement:
179
+ conn.execute(statement)
180
+ conn.commit()
181
+
182
+ def store(self, execution: Workflow) -> None:
183
+ """Store a complete execution."""
184
+ conn = self._get_connection()
185
+
186
+ # Insert execution
187
+ conn.execute(
188
+ """
189
+ INSERT INTO pipeline_executions (
190
+ id, type, application, name, status, start_time, end_time,
191
+ start_time_expiry, trigger, is_canceled, canceled_by,
192
+ cancellation_reason, paused, pipeline_config_id,
193
+ is_limit_concurrent, max_concurrent_executions,
194
+ keep_waiting_pipelines, origin
195
+ ) VALUES (
196
+ :id, :type, :application, :name, :status, :start_time, :end_time,
197
+ :start_time_expiry, :trigger, :is_canceled, :canceled_by,
198
+ :cancellation_reason, :paused, :pipeline_config_id,
199
+ :is_limit_concurrent, :max_concurrent_executions,
200
+ :keep_waiting_pipelines, :origin
201
+ )
202
+ """,
203
+ self._execution_to_dict(execution),
204
+ )
205
+
206
+ # Insert stages
207
+ for stage in execution.stages:
208
+ self._insert_stage(conn, stage, execution.id)
209
+
210
+ conn.commit()
211
+
212
+ def retrieve(self, execution_id: str) -> Workflow:
213
+ """Retrieve an execution by ID."""
214
+ conn = self._get_connection()
215
+
216
+ # Get execution
217
+ result = conn.execute(
218
+ "SELECT * FROM pipeline_executions WHERE id = :id",
219
+ {"id": execution_id},
220
+ )
221
+ row = result.fetchone()
222
+ if not row:
223
+ raise WorkflowNotFoundError(execution_id)
224
+
225
+ execution = self._row_to_execution(row)
226
+
227
+ # Get stages
228
+ result = conn.execute(
229
+ """
230
+ SELECT * FROM stage_executions
231
+ WHERE execution_id = :execution_id
232
+ """,
233
+ {"execution_id": execution_id},
234
+ )
235
+ stages = []
236
+ for stage_row in result.fetchall():
237
+ stage = self._row_to_stage(stage_row)
238
+ stage._execution = execution
239
+
240
+ # Get tasks for stage
241
+ task_result = conn.execute(
242
+ """
243
+ SELECT * FROM task_executions
244
+ WHERE stage_id = :stage_id
245
+ """,
246
+ {"stage_id": stage.id},
247
+ )
248
+ for task_row in task_result.fetchall():
249
+ task = self._row_to_task(task_row)
250
+ task._stage = stage
251
+ stage.tasks.append(task)
252
+
253
+ stages.append(stage)
254
+
255
+ execution.stages = stages
256
+ return execution
257
+
258
+ def update_status(self, execution: Workflow) -> None:
259
+ """Update execution status."""
260
+ conn = self._get_connection()
261
+ conn.execute(
262
+ """
263
+ UPDATE pipeline_executions SET
264
+ status = :status,
265
+ start_time = :start_time,
266
+ end_time = :end_time,
267
+ is_canceled = :is_canceled,
268
+ canceled_by = :canceled_by,
269
+ cancellation_reason = :cancellation_reason,
270
+ paused = :paused
271
+ WHERE id = :id
272
+ """,
273
+ {
274
+ "id": execution.id,
275
+ "status": execution.status.name,
276
+ "start_time": execution.start_time,
277
+ "end_time": execution.end_time,
278
+ "is_canceled": 1 if execution.is_canceled else 0,
279
+ "canceled_by": execution.canceled_by,
280
+ "cancellation_reason": execution.cancellation_reason,
281
+ "paused": (json.dumps(self._paused_to_dict(execution.paused)) if execution.paused else None),
282
+ },
283
+ )
284
+ conn.commit()
285
+
286
+ def delete(self, execution_id: str) -> None:
287
+ """Delete an execution."""
288
+ conn = self._get_connection()
289
+ conn.execute(
290
+ "DELETE FROM pipeline_executions WHERE id = :id",
291
+ {"id": execution_id},
292
+ )
293
+ conn.commit()
294
+
295
+ def store_stage(self, stage: StageExecution) -> None:
296
+ """Store or update a stage."""
297
+ conn = self._get_connection()
298
+
299
+ # Check if stage exists
300
+ result = conn.execute(
301
+ "SELECT id FROM stage_executions WHERE id = :id",
302
+ {"id": stage.id},
303
+ )
304
+ exists = result.fetchone() is not None
305
+
306
+ if exists:
307
+ # Update
308
+ conn.execute(
309
+ """
310
+ UPDATE stage_executions SET
311
+ status = :status,
312
+ context = :context,
313
+ outputs = :outputs,
314
+ start_time = :start_time,
315
+ end_time = :end_time
316
+ WHERE id = :id
317
+ """,
318
+ {
319
+ "id": stage.id,
320
+ "status": stage.status.name,
321
+ "context": json.dumps(stage.context),
322
+ "outputs": json.dumps(stage.outputs),
323
+ "start_time": stage.start_time,
324
+ "end_time": stage.end_time,
325
+ },
326
+ )
327
+
328
+ # Update tasks
329
+ for task in stage.tasks:
330
+ self._upsert_task(conn, task, stage.id)
331
+ else:
332
+ self._insert_stage(conn, stage, stage.execution.id)
333
+
334
+ conn.commit()
335
+
336
+ def add_stage(self, stage: StageExecution) -> None:
337
+ """Add a new stage."""
338
+ self.store_stage(stage)
339
+
340
+ def remove_stage(
341
+ self,
342
+ execution: Workflow,
343
+ stage_id: str,
344
+ ) -> None:
345
+ """Remove a stage."""
346
+ conn = self._get_connection()
347
+ conn.execute(
348
+ "DELETE FROM stage_executions WHERE id = :id",
349
+ {"id": stage_id},
350
+ )
351
+ conn.commit()
352
+
353
+ def retrieve_by_pipeline_config_id(
354
+ self,
355
+ pipeline_config_id: str,
356
+ criteria: WorkflowCriteria | None = None,
357
+ ) -> Iterator[Workflow]:
358
+ """Retrieve executions by pipeline config ID."""
359
+ query = """
360
+ SELECT id FROM pipeline_executions
361
+ WHERE pipeline_config_id = :config_id
362
+ """
363
+ params: dict[str, Any] = {"config_id": pipeline_config_id}
364
+
365
+ if criteria and criteria.statuses:
366
+ status_names = [s.name for s in criteria.statuses]
367
+ placeholders = ", ".join(f":status_{i}" for i in range(len(status_names)))
368
+ query += f" AND status IN ({placeholders})"
369
+ for i, name in enumerate(status_names):
370
+ params[f"status_{i}"] = name
371
+
372
+ query += " ORDER BY start_time DESC"
373
+
374
+ if criteria and criteria.page_size:
375
+ query += f" LIMIT {criteria.page_size}"
376
+
377
+ conn = self._get_connection()
378
+ result = conn.execute(query, params)
379
+ for row in result.fetchall():
380
+ yield self.retrieve(row[0])
381
+
382
+ def retrieve_by_application(
383
+ self,
384
+ application: str,
385
+ criteria: WorkflowCriteria | None = None,
386
+ ) -> Iterator[Workflow]:
387
+ """Retrieve executions by application."""
388
+ query = """
389
+ SELECT id FROM pipeline_executions
390
+ WHERE application = :application
391
+ """
392
+ params: dict[str, Any] = {"application": application}
393
+
394
+ if criteria and criteria.statuses:
395
+ status_names = [s.name for s in criteria.statuses]
396
+ placeholders = ", ".join(f":status_{i}" for i in range(len(status_names)))
397
+ query += f" AND status IN ({placeholders})"
398
+ for i, name in enumerate(status_names):
399
+ params[f"status_{i}"] = name
400
+
401
+ query += " ORDER BY start_time DESC"
402
+
403
+ if criteria and criteria.page_size:
404
+ query += f" LIMIT {criteria.page_size}"
405
+
406
+ conn = self._get_connection()
407
+ result = conn.execute(query, params)
408
+ for row in result.fetchall():
409
+ yield self.retrieve(row[0])
410
+
411
+ def pause(self, execution_id: str, paused_by: str) -> None:
412
+ """Pause an execution."""
413
+ paused = PausedDetails(
414
+ paused_by=paused_by,
415
+ pause_time=int(time.time() * 1000),
416
+ )
417
+
418
+ conn = self._get_connection()
419
+ conn.execute(
420
+ """
421
+ UPDATE pipeline_executions SET
422
+ status = :status,
423
+ paused = :paused
424
+ WHERE id = :id
425
+ """,
426
+ {
427
+ "id": execution_id,
428
+ "status": WorkflowStatus.PAUSED.name,
429
+ "paused": json.dumps(self._paused_to_dict(paused)),
430
+ },
431
+ )
432
+ conn.commit()
433
+
434
+ def resume(self, execution_id: str) -> None:
435
+ """Resume a paused execution."""
436
+ # First get current paused details
437
+ execution = self.retrieve(execution_id)
438
+ if execution.paused and execution.paused.pause_time:
439
+ current_time = int(time.time() * 1000)
440
+ execution.paused.resume_time = current_time
441
+ execution.paused.paused_ms = current_time - execution.paused.pause_time
442
+
443
+ conn = self._get_connection()
444
+ conn.execute(
445
+ """
446
+ UPDATE pipeline_executions SET
447
+ status = :status,
448
+ paused = :paused
449
+ WHERE id = :id
450
+ """,
451
+ {
452
+ "id": execution_id,
453
+ "status": WorkflowStatus.RUNNING.name,
454
+ "paused": (json.dumps(self._paused_to_dict(execution.paused)) if execution.paused else None),
455
+ },
456
+ )
457
+ conn.commit()
458
+
459
+ def cancel(
460
+ self,
461
+ execution_id: str,
462
+ canceled_by: str,
463
+ reason: str,
464
+ ) -> None:
465
+ """Cancel an execution."""
466
+ conn = self._get_connection()
467
+ conn.execute(
468
+ """
469
+ UPDATE pipeline_executions SET
470
+ is_canceled = 1,
471
+ canceled_by = :canceled_by,
472
+ cancellation_reason = :reason
473
+ WHERE id = :id
474
+ """,
475
+ {
476
+ "id": execution_id,
477
+ "canceled_by": canceled_by,
478
+ "reason": reason,
479
+ },
480
+ )
481
+ conn.commit()
482
+
483
+ # ========== Helper Methods ==========
484
+
485
+ def _insert_stage(self, conn: sqlite3.Connection, stage: StageExecution, execution_id: str) -> None:
486
+ """Insert a stage."""
487
+ conn.execute(
488
+ """
489
+ INSERT INTO stage_executions (
490
+ id, execution_id, ref_id, type, name, status, context, outputs,
491
+ requisite_stage_ref_ids, parent_stage_id, synthetic_stage_owner,
492
+ start_time, end_time, start_time_expiry, scheduled_time
493
+ ) VALUES (
494
+ :id, :execution_id, :ref_id, :type, :name, :status,
495
+ :context, :outputs, :requisite_stage_ref_ids,
496
+ :parent_stage_id, :synthetic_stage_owner, :start_time,
497
+ :end_time, :start_time_expiry, :scheduled_time
498
+ )
499
+ """,
500
+ {
501
+ "id": stage.id,
502
+ "execution_id": execution_id,
503
+ "ref_id": stage.ref_id,
504
+ "type": stage.type,
505
+ "name": stage.name,
506
+ "status": stage.status.name,
507
+ "context": json.dumps(stage.context),
508
+ "outputs": json.dumps(stage.outputs),
509
+ "requisite_stage_ref_ids": json.dumps(list(stage.requisite_stage_ref_ids)),
510
+ "parent_stage_id": stage.parent_stage_id,
511
+ "synthetic_stage_owner": (stage.synthetic_stage_owner.value if stage.synthetic_stage_owner else None),
512
+ "start_time": stage.start_time,
513
+ "end_time": stage.end_time,
514
+ "start_time_expiry": stage.start_time_expiry,
515
+ "scheduled_time": stage.scheduled_time,
516
+ },
517
+ )
518
+
519
+ # Insert tasks
520
+ for task in stage.tasks:
521
+ self._upsert_task(conn, task, stage.id)
522
+
523
+ def _upsert_task(self, conn: sqlite3.Connection, task: TaskExecution, stage_id: str) -> None:
524
+ """Insert or update a task."""
525
+ conn.execute(
526
+ """
527
+ INSERT OR REPLACE INTO task_executions (
528
+ id, stage_id, name, implementing_class, status,
529
+ start_time, end_time, stage_start, stage_end,
530
+ loop_start, loop_end, task_exception_details
531
+ ) VALUES (
532
+ :id, :stage_id, :name, :implementing_class, :status,
533
+ :start_time, :end_time, :stage_start, :stage_end,
534
+ :loop_start, :loop_end, :task_exception_details
535
+ )
536
+ """,
537
+ {
538
+ "id": task.id,
539
+ "stage_id": stage_id,
540
+ "name": task.name,
541
+ "implementing_class": task.implementing_class,
542
+ "status": task.status.name,
543
+ "start_time": task.start_time,
544
+ "end_time": task.end_time,
545
+ "stage_start": 1 if task.stage_start else 0,
546
+ "stage_end": 1 if task.stage_end else 0,
547
+ "loop_start": 1 if task.loop_start else 0,
548
+ "loop_end": 1 if task.loop_end else 0,
549
+ "task_exception_details": json.dumps(task.task_exception_details),
550
+ },
551
+ )
552
+
553
+ def _execution_to_dict(self, execution: Workflow) -> dict[str, Any]:
554
+ """Convert execution to dictionary for storage."""
555
+ return {
556
+ "id": execution.id,
557
+ "type": execution.type.value,
558
+ "application": execution.application,
559
+ "name": execution.name,
560
+ "status": execution.status.name,
561
+ "start_time": execution.start_time,
562
+ "end_time": execution.end_time,
563
+ "start_time_expiry": execution.start_time_expiry,
564
+ "trigger": json.dumps(execution.trigger.to_dict()),
565
+ "is_canceled": 1 if execution.is_canceled else 0,
566
+ "canceled_by": execution.canceled_by,
567
+ "cancellation_reason": execution.cancellation_reason,
568
+ "paused": (json.dumps(self._paused_to_dict(execution.paused)) if execution.paused else None),
569
+ "pipeline_config_id": execution.pipeline_config_id,
570
+ "is_limit_concurrent": 1 if execution.is_limit_concurrent else 0,
571
+ "max_concurrent_executions": execution.max_concurrent_executions,
572
+ "keep_waiting_pipelines": 1 if execution.keep_waiting_pipelines else 0,
573
+ "origin": execution.origin,
574
+ }
575
+
576
+ def _paused_to_dict(self, paused: PausedDetails | None) -> dict[str, Any] | None:
577
+ """Convert PausedDetails to dict."""
578
+ if paused is None:
579
+ return None
580
+ return {
581
+ "paused_by": paused.paused_by,
582
+ "pause_time": paused.pause_time,
583
+ "resume_time": paused.resume_time,
584
+ "paused_ms": paused.paused_ms,
585
+ }
586
+
587
+ def _row_to_execution(self, row: sqlite3.Row) -> Workflow:
588
+ """Convert database row to Workflow."""
589
+ trigger_data = json.loads(row["trigger"] or "{}")
590
+ paused_data = json.loads(row["paused"]) if row["paused"] else None
591
+
592
+ paused = None
593
+ if paused_data:
594
+ paused = PausedDetails(
595
+ paused_by=paused_data.get("paused_by", ""),
596
+ pause_time=paused_data.get("pause_time"),
597
+ resume_time=paused_data.get("resume_time"),
598
+ paused_ms=paused_data.get("paused_ms", 0),
599
+ )
600
+
601
+ return Workflow(
602
+ id=row["id"],
603
+ type=WorkflowType(row["type"]),
604
+ application=row["application"],
605
+ name=row["name"] or "",
606
+ status=WorkflowStatus[row["status"]],
607
+ start_time=row["start_time"],
608
+ end_time=row["end_time"],
609
+ start_time_expiry=row["start_time_expiry"],
610
+ trigger=Trigger.from_dict(trigger_data),
611
+ is_canceled=bool(row["is_canceled"]),
612
+ canceled_by=row["canceled_by"],
613
+ cancellation_reason=row["cancellation_reason"],
614
+ paused=paused,
615
+ pipeline_config_id=row["pipeline_config_id"],
616
+ is_limit_concurrent=bool(row["is_limit_concurrent"]),
617
+ max_concurrent_executions=row["max_concurrent_executions"] or 0,
618
+ keep_waiting_pipelines=bool(row["keep_waiting_pipelines"]),
619
+ origin=row["origin"] or "unknown",
620
+ )
621
+
622
+ def _row_to_stage(self, row: sqlite3.Row) -> StageExecution:
623
+ """Convert database row to StageExecution."""
624
+ context = json.loads(row["context"] or "{}")
625
+ outputs = json.loads(row["outputs"] or "{}")
626
+ requisite_ids = json.loads(row["requisite_stage_ref_ids"] or "[]")
627
+
628
+ synthetic_owner = None
629
+ if row["synthetic_stage_owner"]:
630
+ synthetic_owner = SyntheticStageOwner(row["synthetic_stage_owner"])
631
+
632
+ return StageExecution(
633
+ id=row["id"],
634
+ ref_id=row["ref_id"],
635
+ type=row["type"],
636
+ name=row["name"] or "",
637
+ status=WorkflowStatus[row["status"]],
638
+ context=context,
639
+ outputs=outputs,
640
+ requisite_stage_ref_ids=set(requisite_ids),
641
+ parent_stage_id=row["parent_stage_id"],
642
+ synthetic_stage_owner=synthetic_owner,
643
+ start_time=row["start_time"],
644
+ end_time=row["end_time"],
645
+ start_time_expiry=row["start_time_expiry"],
646
+ scheduled_time=row["scheduled_time"],
647
+ )
648
+
649
+ def _row_to_task(self, row: sqlite3.Row) -> TaskExecution:
650
+ """Convert database row to TaskExecution."""
651
+ exception_details = json.loads(row["task_exception_details"] or "{}")
652
+
653
+ return TaskExecution(
654
+ id=row["id"],
655
+ name=row["name"],
656
+ implementing_class=row["implementing_class"],
657
+ status=WorkflowStatus[row["status"]],
658
+ start_time=row["start_time"],
659
+ end_time=row["end_time"],
660
+ stage_start=bool(row["stage_start"]),
661
+ stage_end=bool(row["stage_end"]),
662
+ loop_start=bool(row["loop_start"]),
663
+ loop_end=bool(row["loop_end"]),
664
+ task_exception_details=exception_details,
665
+ )
666
+
667
+ def is_healthy(self) -> bool:
668
+ """Check if the database connection is healthy."""
669
+ try:
670
+ conn = self._get_connection()
671
+ conn.execute("SELECT 1")
672
+ return True
673
+ except Exception:
674
+ return False