loom-core 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- loom_core-0.1.0.dist-info/METADATA +342 -0
- loom_core-0.1.0.dist-info/RECORD +50 -0
- loom_core-0.1.0.dist-info/WHEEL +5 -0
- loom_core-0.1.0.dist-info/entry_points.txt +2 -0
- loom_core-0.1.0.dist-info/licenses/LICENSE +21 -0
- loom_core-0.1.0.dist-info/top_level.txt +1 -0
- src/__init__.py +45 -0
- src/cli/__init__.py +5 -0
- src/cli/cli.py +246 -0
- src/common/activity.py +30 -0
- src/common/config.py +9 -0
- src/common/errors.py +64 -0
- src/common/workflow.py +56 -0
- src/core/__init__.py +0 -0
- src/core/compiled.py +41 -0
- src/core/context.py +256 -0
- src/core/engine.py +106 -0
- src/core/handle.py +166 -0
- src/core/logger.py +60 -0
- src/core/runner.py +53 -0
- src/core/state.py +96 -0
- src/core/worker.py +147 -0
- src/core/workflow.py +168 -0
- src/database/__init__.py +0 -0
- src/database/db.py +716 -0
- src/decorators/__init__.py +0 -0
- src/decorators/activity.py +126 -0
- src/decorators/workflow.py +46 -0
- src/lib/progress.py +109 -0
- src/lib/utils.py +25 -0
- src/migrations/down/001_setup_pragma.sql +5 -0
- src/migrations/down/002_create_workflows.sql +3 -0
- src/migrations/down/003.create_events.sql +3 -0
- src/migrations/down/004.create_tasks.sql +3 -0
- src/migrations/down/005.create_indexes.sql +5 -0
- src/migrations/down/006_auto_update_triggers.sql +4 -0
- src/migrations/down/007_create_logs.sql +1 -0
- src/migrations/up/001_setup_pragma.sql +11 -0
- src/migrations/up/002_create_workflows.sql +15 -0
- src/migrations/up/003_create_events.sql +13 -0
- src/migrations/up/004_create_tasks.sql +23 -0
- src/migrations/up/005_create_indexes.sql +11 -0
- src/migrations/up/006_auto_update_triggers.sql +19 -0
- src/migrations/up/007_create_logs.sql +10 -0
- src/schemas/__init__.py +0 -0
- src/schemas/activity.py +13 -0
- src/schemas/database.py +17 -0
- src/schemas/events.py +70 -0
- src/schemas/tasks.py +58 -0
- src/schemas/workflow.py +33 -0
src/database/db.py
ADDED
|
@@ -0,0 +1,716 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import os
|
|
3
|
+
from datetime import datetime, timezone
|
|
4
|
+
from typing import Any, Dict, Generic, Iterable, List
|
|
5
|
+
from uuid import uuid4
|
|
6
|
+
|
|
7
|
+
import aiosqlite
|
|
8
|
+
|
|
9
|
+
from ..common.config import DATA_ROOT, DATABASE
|
|
10
|
+
from ..common.errors import WorkflowNotFoundError
|
|
11
|
+
from ..lib.utils import get_downgrade_migrations, get_upgrade_migrations
|
|
12
|
+
from ..schemas.activity import ActivityMetadata
|
|
13
|
+
from ..schemas.database import WorkflowInput
|
|
14
|
+
from ..schemas.events import Event
|
|
15
|
+
from ..schemas.tasks import Task
|
|
16
|
+
from ..schemas.workflow import InputT, StateT
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class Database(Generic[InputT, StateT]):
|
|
20
|
+
"""Async SQLite database interface for workflow orchestration.
|
|
21
|
+
|
|
22
|
+
This class provides a comprehensive interface for managing workflow data,
|
|
23
|
+
including workflow instances, events, and tasks. It supports ACID transactions
|
|
24
|
+
and maintains data consistency for the Loom workflow orchestration system.
|
|
25
|
+
|
|
26
|
+
Type Parameters:
|
|
27
|
+
InputT: The input type for workflows
|
|
28
|
+
StateT: The state type for workflows
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
def __init__(self) -> None:
|
|
32
|
+
"""Initialize the database with migration scripts."""
|
|
33
|
+
self.upgrade_migrations = get_upgrade_migrations()
|
|
34
|
+
self.downgrade_migrations = get_downgrade_migrations()
|
|
35
|
+
|
|
36
|
+
async def _init_db(self) -> None:
|
|
37
|
+
"""Initialize the database by creating directories and running migrations.
|
|
38
|
+
|
|
39
|
+
Creates the data directory if it doesn't exist and applies all upgrade
|
|
40
|
+
migrations to set up the database schema.
|
|
41
|
+
"""
|
|
42
|
+
# Ensure data directory exists
|
|
43
|
+
if not os.path.exists(DATA_ROOT):
|
|
44
|
+
os.makedirs(DATA_ROOT, exist_ok=True)
|
|
45
|
+
|
|
46
|
+
# Create and migrate database if it doesn't exist
|
|
47
|
+
if not os.path.exists(DATABASE):
|
|
48
|
+
async with aiosqlite.connect(DATABASE) as conn:
|
|
49
|
+
for migration in self.upgrade_migrations:
|
|
50
|
+
await conn.executescript(migration["sql"])
|
|
51
|
+
await conn.commit()
|
|
52
|
+
|
|
53
|
+
async def query(self, sql: str, params: tuple = ()) -> Iterable[aiosqlite.Row]:
|
|
54
|
+
"""Execute a SELECT query and return all results.
|
|
55
|
+
|
|
56
|
+
Args:
|
|
57
|
+
sql: The SQL query string
|
|
58
|
+
params: Query parameters as a tuple
|
|
59
|
+
|
|
60
|
+
Returns:
|
|
61
|
+
List of Row objects containing query results
|
|
62
|
+
"""
|
|
63
|
+
async with aiosqlite.connect(DATABASE) as conn:
|
|
64
|
+
conn.row_factory = aiosqlite.Row
|
|
65
|
+
cursor = await conn.execute(sql, params)
|
|
66
|
+
results = await cursor.fetchall()
|
|
67
|
+
return results
|
|
68
|
+
|
|
69
|
+
async def fetchone(self, sql: str, params: tuple = ()) -> aiosqlite.Row | None:
|
|
70
|
+
"""Execute a SELECT query and return the first result.
|
|
71
|
+
|
|
72
|
+
Args:
|
|
73
|
+
sql: The SQL query string
|
|
74
|
+
params: Query parameters as a tuple
|
|
75
|
+
|
|
76
|
+
Returns:
|
|
77
|
+
First Row object or None if no results
|
|
78
|
+
"""
|
|
79
|
+
async with aiosqlite.connect(DATABASE) as conn:
|
|
80
|
+
conn.row_factory = aiosqlite.Row
|
|
81
|
+
cursor = await conn.execute(sql, params)
|
|
82
|
+
result = await cursor.fetchone()
|
|
83
|
+
await conn.commit()
|
|
84
|
+
return result
|
|
85
|
+
|
|
86
|
+
async def execute(self, sql: str, params: tuple = ()) -> None:
|
|
87
|
+
"""Execute a SQL statement (INSERT, UPDATE, DELETE).
|
|
88
|
+
|
|
89
|
+
Args:
|
|
90
|
+
sql: The SQL statement string
|
|
91
|
+
params: Statement parameters as a tuple
|
|
92
|
+
"""
|
|
93
|
+
async with aiosqlite.connect(DATABASE) as conn:
|
|
94
|
+
conn.row_factory = aiosqlite.Row
|
|
95
|
+
await conn.execute(sql, params)
|
|
96
|
+
await conn.commit()
|
|
97
|
+
|
|
98
|
+
async def get_workflow_events(self, workflow_id: str) -> List[Event]:
|
|
99
|
+
"""Retrieve all events for a specific workflow in chronological order.
|
|
100
|
+
|
|
101
|
+
Args:
|
|
102
|
+
workflow_id: Unique identifier of the workflow
|
|
103
|
+
|
|
104
|
+
Returns:
|
|
105
|
+
List of Event objects ordered by creation time
|
|
106
|
+
"""
|
|
107
|
+
sql = """
|
|
108
|
+
SELECT type, payload
|
|
109
|
+
FROM events
|
|
110
|
+
WHERE workflow_id = ?
|
|
111
|
+
ORDER BY id ASC
|
|
112
|
+
"""
|
|
113
|
+
|
|
114
|
+
async with aiosqlite.connect(DATABASE) as conn:
|
|
115
|
+
conn.row_factory = aiosqlite.Row
|
|
116
|
+
cursor = await conn.execute(sql, (workflow_id,))
|
|
117
|
+
rows = await cursor.fetchall()
|
|
118
|
+
|
|
119
|
+
events: List[Event] = []
|
|
120
|
+
for row in rows:
|
|
121
|
+
events.append(
|
|
122
|
+
Event(
|
|
123
|
+
type=row["type"],
|
|
124
|
+
payload=json.loads(row["payload"]),
|
|
125
|
+
)
|
|
126
|
+
)
|
|
127
|
+
return events
|
|
128
|
+
|
|
129
|
+
async def get_workflow_info(self, workflow_id: str) -> Dict[str, Any]:
|
|
130
|
+
"""Retrieve complete information for a specific workflow.
|
|
131
|
+
|
|
132
|
+
Args:
|
|
133
|
+
workflow_id: Unique identifier of the workflow
|
|
134
|
+
|
|
135
|
+
Returns:
|
|
136
|
+
Dictionary containing workflow information
|
|
137
|
+
|
|
138
|
+
Raises:
|
|
139
|
+
WorkflowNotFoundError: If the workflow doesn't exist
|
|
140
|
+
"""
|
|
141
|
+
sql = """
|
|
142
|
+
SELECT *
|
|
143
|
+
FROM workflows
|
|
144
|
+
WHERE id = ?
|
|
145
|
+
"""
|
|
146
|
+
|
|
147
|
+
async with aiosqlite.connect(DATABASE) as conn:
|
|
148
|
+
conn.row_factory = aiosqlite.Row
|
|
149
|
+
cursor = await conn.execute(sql, (workflow_id,))
|
|
150
|
+
row = await cursor.fetchone()
|
|
151
|
+
|
|
152
|
+
if row:
|
|
153
|
+
return {
|
|
154
|
+
"id": row["id"],
|
|
155
|
+
"name": row["name"],
|
|
156
|
+
"status": row["status"],
|
|
157
|
+
"module": row["module"],
|
|
158
|
+
"input": row["input"],
|
|
159
|
+
"created_at": row["created_at"],
|
|
160
|
+
"updated_at": row["updated_at"],
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
raise WorkflowNotFoundError(f"Workflow with ID {workflow_id} not found.")
|
|
164
|
+
|
|
165
|
+
async def get_workflow_status(self, workflow_id: str) -> str:
|
|
166
|
+
"""Retrieve only the status of a specific workflow (optimized query).
|
|
167
|
+
|
|
168
|
+
This is a performance-optimized version of get_workflow_info when
|
|
169
|
+
only the status is needed.
|
|
170
|
+
|
|
171
|
+
Args:
|
|
172
|
+
workflow_id: Unique identifier of the workflow
|
|
173
|
+
|
|
174
|
+
Returns:
|
|
175
|
+
Workflow status string
|
|
176
|
+
|
|
177
|
+
Raises:
|
|
178
|
+
WorkflowNotFoundError: If the workflow doesn't exist
|
|
179
|
+
"""
|
|
180
|
+
sql = """
|
|
181
|
+
SELECT status
|
|
182
|
+
FROM workflows
|
|
183
|
+
WHERE id = ?
|
|
184
|
+
"""
|
|
185
|
+
|
|
186
|
+
async with aiosqlite.connect(DATABASE) as conn:
|
|
187
|
+
conn.row_factory = aiosqlite.Row
|
|
188
|
+
cursor = await conn.execute(sql, (workflow_id,))
|
|
189
|
+
row = await cursor.fetchone()
|
|
190
|
+
|
|
191
|
+
if row:
|
|
192
|
+
return row["status"] # type: ignore
|
|
193
|
+
|
|
194
|
+
raise WorkflowNotFoundError(f"Workflow with ID {workflow_id} not found.")
|
|
195
|
+
|
|
196
|
+
# === Workflow Management Methods ===
|
|
197
|
+
async def recreate_workflow_task(self, workflow_id: str) -> None:
|
|
198
|
+
"""Recreate the initial task for a workflow.
|
|
199
|
+
|
|
200
|
+
This method is useful for restarting or retrying a workflow by
|
|
201
|
+
recreating its initial task.
|
|
202
|
+
|
|
203
|
+
Args:
|
|
204
|
+
workflow_id: Unique identifier of the workflow
|
|
205
|
+
"""
|
|
206
|
+
task_id = self._create_id()
|
|
207
|
+
|
|
208
|
+
workflow = await self.get_workflow_info(workflow_id)
|
|
209
|
+
|
|
210
|
+
# Schedule first step
|
|
211
|
+
task_sql = """
|
|
212
|
+
INSERT INTO tasks (id, workflow_id, kind, target, run_at, status)
|
|
213
|
+
VALUES (?, ?, 'STEP', ?, CURRENT_TIMESTAMP, 'PENDING')
|
|
214
|
+
"""
|
|
215
|
+
task_params = (
|
|
216
|
+
task_id,
|
|
217
|
+
workflow_id,
|
|
218
|
+
workflow["name"],
|
|
219
|
+
)
|
|
220
|
+
await self.execute(task_sql, task_params)
|
|
221
|
+
|
|
222
|
+
async def create_workflow(self, workflow: WorkflowInput, input: InputT) -> str:
|
|
223
|
+
"""Create a new workflow instance with initial state.
|
|
224
|
+
|
|
225
|
+
Creates the workflow record, adds a WORKFLOW_STARTED event, and schedules
|
|
226
|
+
the first step for execution.
|
|
227
|
+
|
|
228
|
+
Args:
|
|
229
|
+
workflow: Workflow metadata and definition
|
|
230
|
+
input: Input data for the workflow
|
|
231
|
+
|
|
232
|
+
Returns:
|
|
233
|
+
Unique workflow identifier
|
|
234
|
+
"""
|
|
235
|
+
workflow_id = self._create_id()
|
|
236
|
+
task_id = self._create_id()
|
|
237
|
+
|
|
238
|
+
# Create workflow record
|
|
239
|
+
workflow_sql = """
|
|
240
|
+
INSERT INTO workflows (id, name, description, version, status, module, input)
|
|
241
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
242
|
+
"""
|
|
243
|
+
workflow_params = (
|
|
244
|
+
workflow_id,
|
|
245
|
+
workflow["name"],
|
|
246
|
+
workflow["description"],
|
|
247
|
+
workflow["version"],
|
|
248
|
+
workflow["status"],
|
|
249
|
+
workflow["module"],
|
|
250
|
+
json.dumps(input),
|
|
251
|
+
)
|
|
252
|
+
await self.execute(workflow_sql, workflow_params)
|
|
253
|
+
|
|
254
|
+
# Add workflow started event
|
|
255
|
+
event_sql = """
|
|
256
|
+
INSERT INTO events (workflow_id, type, payload)
|
|
257
|
+
VALUES (?, 'WORKFLOW_STARTED', ?)
|
|
258
|
+
"""
|
|
259
|
+
event_params = (
|
|
260
|
+
workflow_id,
|
|
261
|
+
json.dumps({"input": input}),
|
|
262
|
+
)
|
|
263
|
+
await self.execute(event_sql, event_params)
|
|
264
|
+
|
|
265
|
+
# Schedule first step
|
|
266
|
+
task_sql = """
|
|
267
|
+
INSERT INTO tasks (id, workflow_id, kind, target, run_at, status)
|
|
268
|
+
VALUES (?, ?, 'STEP', ?, CURRENT_TIMESTAMP, 'PENDING')
|
|
269
|
+
"""
|
|
270
|
+
task_params = (
|
|
271
|
+
task_id,
|
|
272
|
+
workflow_id,
|
|
273
|
+
workflow["name"],
|
|
274
|
+
)
|
|
275
|
+
await self.execute(task_sql, task_params)
|
|
276
|
+
|
|
277
|
+
return workflow_id
|
|
278
|
+
|
|
279
|
+
async def create_event(
|
|
280
|
+
self, workflow_id: str, type: str, payload: Dict[str, Any]
|
|
281
|
+
) -> None:
|
|
282
|
+
"""Create a generic event for a workflow.
|
|
283
|
+
|
|
284
|
+
Args:
|
|
285
|
+
workflow_id: Target workflow identifier
|
|
286
|
+
event_type: Type of the event (e.g., 'CUSTOM_EVENT')
|
|
287
|
+
payload: Event data payload
|
|
288
|
+
"""
|
|
289
|
+
await self.execute(
|
|
290
|
+
"""
|
|
291
|
+
INSERT INTO events (workflow_id, type, payload)
|
|
292
|
+
VALUES (?, ?, ?)
|
|
293
|
+
""",
|
|
294
|
+
(workflow_id, type, json.dumps(payload)),
|
|
295
|
+
)
|
|
296
|
+
|
|
297
|
+
async def create_signal_event(
|
|
298
|
+
self, workflow_id: str, name: str, payload: Dict[str, Any]
|
|
299
|
+
) -> None:
|
|
300
|
+
"""Create a signal event for a running workflow.
|
|
301
|
+
|
|
302
|
+
Signals can be sent to workflows to trigger conditional logic or
|
|
303
|
+
provide external input during execution.
|
|
304
|
+
|
|
305
|
+
Args:
|
|
306
|
+
workflow_id: Target workflow identifier
|
|
307
|
+
name: Signal name/identifier
|
|
308
|
+
payload: Signal data payload
|
|
309
|
+
|
|
310
|
+
Raises:
|
|
311
|
+
WorkflowNotFoundError: If the workflow doesn't exist
|
|
312
|
+
RuntimeError: If the workflow is not in RUNNING state
|
|
313
|
+
"""
|
|
314
|
+
# Verify workflow exists and is running
|
|
315
|
+
row = await self.fetchone(
|
|
316
|
+
"""
|
|
317
|
+
SELECT id, status
|
|
318
|
+
FROM workflows
|
|
319
|
+
WHERE id = ?
|
|
320
|
+
""",
|
|
321
|
+
(workflow_id,),
|
|
322
|
+
)
|
|
323
|
+
|
|
324
|
+
if not row:
|
|
325
|
+
raise WorkflowNotFoundError(f"Workflow with ID {workflow_id} not found.")
|
|
326
|
+
|
|
327
|
+
if row["status"] != "RUNNING":
|
|
328
|
+
raise RuntimeError(
|
|
329
|
+
f"Cannot signal workflow with ID {workflow_id} because it is not running."
|
|
330
|
+
)
|
|
331
|
+
|
|
332
|
+
# Create signal event
|
|
333
|
+
signal_payload = {
|
|
334
|
+
"name": name,
|
|
335
|
+
"payload": payload,
|
|
336
|
+
"sent_at": datetime.now(timezone.utc).isoformat(),
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
await self.execute(
|
|
340
|
+
"""
|
|
341
|
+
INSERT INTO events (workflow_id, type, payload)
|
|
342
|
+
VALUES (?, 'SIGNAL_RECEIVED', ?)
|
|
343
|
+
""",
|
|
344
|
+
(workflow_id, json.dumps(signal_payload)),
|
|
345
|
+
)
|
|
346
|
+
|
|
347
|
+
async def cancel_workflow(
|
|
348
|
+
self, workflow_id: str, reason: str | None = None
|
|
349
|
+
) -> None:
|
|
350
|
+
"""Cancel a workflow and all its pending tasks.
|
|
351
|
+
|
|
352
|
+
Cancellation marks the workflow as CANCELLED and fails all pending
|
|
353
|
+
tasks associated with it. Already completed or failed workflows
|
|
354
|
+
are left unchanged.
|
|
355
|
+
|
|
356
|
+
Args:
|
|
357
|
+
workflow_id: Workflow identifier to cancel
|
|
358
|
+
reason: Optional cancellation reason
|
|
359
|
+
|
|
360
|
+
Raises:
|
|
361
|
+
WorkflowNotFoundError: If the workflow doesn't exist
|
|
362
|
+
"""
|
|
363
|
+
# Verify workflow exists
|
|
364
|
+
row = await self.fetchone(
|
|
365
|
+
"""
|
|
366
|
+
SELECT id, status
|
|
367
|
+
FROM workflows
|
|
368
|
+
WHERE id = ?
|
|
369
|
+
""",
|
|
370
|
+
(workflow_id,),
|
|
371
|
+
)
|
|
372
|
+
|
|
373
|
+
if not row:
|
|
374
|
+
raise WorkflowNotFoundError(f"Workflow with ID {workflow_id} not found.")
|
|
375
|
+
|
|
376
|
+
# Skip if already in terminal state
|
|
377
|
+
if row["status"] in ("COMPLETED", "FAILED", "CANCELED"):
|
|
378
|
+
return
|
|
379
|
+
|
|
380
|
+
# Prepare cancellation payload
|
|
381
|
+
payload = {
|
|
382
|
+
"reason": reason or "Cancelled",
|
|
383
|
+
"canceled_at": datetime.now(timezone.utc).isoformat(),
|
|
384
|
+
}
|
|
385
|
+
|
|
386
|
+
# Create cancellation event
|
|
387
|
+
await self.execute(
|
|
388
|
+
"""
|
|
389
|
+
INSERT INTO events (workflow_id, type, payload)
|
|
390
|
+
VALUES (?, 'WORKFLOW_CANCELLED', ?)
|
|
391
|
+
""",
|
|
392
|
+
(workflow_id, json.dumps(payload)),
|
|
393
|
+
)
|
|
394
|
+
|
|
395
|
+
# Update workflow status
|
|
396
|
+
await self.execute(
|
|
397
|
+
"""
|
|
398
|
+
UPDATE workflows
|
|
399
|
+
SET status = 'CANCELLED'
|
|
400
|
+
WHERE id = ?
|
|
401
|
+
""",
|
|
402
|
+
(workflow_id,),
|
|
403
|
+
)
|
|
404
|
+
|
|
405
|
+
# Fail all pending tasks
|
|
406
|
+
await self.execute(
|
|
407
|
+
"""
|
|
408
|
+
UPDATE tasks
|
|
409
|
+
SET status = 'FAILED',
|
|
410
|
+
last_error = 'workflow cancelled'
|
|
411
|
+
WHERE workflow_id = ?
|
|
412
|
+
AND status = 'PENDING'
|
|
413
|
+
""",
|
|
414
|
+
(workflow_id,),
|
|
415
|
+
)
|
|
416
|
+
|
|
417
|
+
# === Task Management Methods ===
|
|
418
|
+
|
|
419
|
+
async def task_completed(self, task_id: str) -> None:
|
|
420
|
+
"""Mark a task as completed.
|
|
421
|
+
|
|
422
|
+
Updates the task status to COMPLETED, indicating successful execution.
|
|
423
|
+
|
|
424
|
+
Args:
|
|
425
|
+
task_id: Unique identifier of the task to mark as completed
|
|
426
|
+
"""
|
|
427
|
+
sql = """
|
|
428
|
+
UPDATE tasks
|
|
429
|
+
SET status = 'COMPLETED',
|
|
430
|
+
updated_at = CURRENT_TIMESTAMP
|
|
431
|
+
WHERE id = ?
|
|
432
|
+
AND status = 'RUNNING'
|
|
433
|
+
"""
|
|
434
|
+
await self.execute(sql, (task_id,))
|
|
435
|
+
|
|
436
|
+
async def task_failed(self, task_id: str, error_message: str) -> None:
|
|
437
|
+
"""Mark a task as failed with an error message.
|
|
438
|
+
|
|
439
|
+
Updates the task status to FAILED and records the error message.
|
|
440
|
+
|
|
441
|
+
Args:
|
|
442
|
+
task_id: Unique identifier of the task to mark as failed
|
|
443
|
+
error_message: Error message describing the failure
|
|
444
|
+
"""
|
|
445
|
+
sql = """
|
|
446
|
+
UPDATE tasks
|
|
447
|
+
SET status = 'FAILED',
|
|
448
|
+
last_error = ?,
|
|
449
|
+
updated_at = CURRENT_TIMESTAMP
|
|
450
|
+
WHERE id = ?
|
|
451
|
+
AND status = 'RUNNING'
|
|
452
|
+
"""
|
|
453
|
+
await self.execute(sql, (error_message, task_id))
|
|
454
|
+
|
|
455
|
+
async def schedule_retry(self, task_id: str, run_at: datetime, error: str) -> None:
|
|
456
|
+
"""Schedule a task for retry.
|
|
457
|
+
|
|
458
|
+
Updates the task status to PENDING and sets the next execution time.
|
|
459
|
+
|
|
460
|
+
Args:
|
|
461
|
+
task_id: Unique identifier of the task to retry
|
|
462
|
+
run_at: Datetime when the task should be retried
|
|
463
|
+
error: Error message from the failed attempt
|
|
464
|
+
"""
|
|
465
|
+
sql = """
|
|
466
|
+
UPDATE tasks
|
|
467
|
+
SET status = 'PENDING',
|
|
468
|
+
run_at = ?,
|
|
469
|
+
last_error = ?,
|
|
470
|
+
updated_at = CURRENT_TIMESTAMP
|
|
471
|
+
WHERE id = ?
|
|
472
|
+
"""
|
|
473
|
+
await self.execute(sql, (run_at, error, task_id))
|
|
474
|
+
|
|
475
|
+
async def claim_task(self) -> Task | None:
|
|
476
|
+
"""Atomically claim the next available task for processing.
|
|
477
|
+
|
|
478
|
+
Claims a pending STEP task that is ready to run by updating its status
|
|
479
|
+
to RUNNING and incrementing the attempt counter.
|
|
480
|
+
|
|
481
|
+
Returns:
|
|
482
|
+
Task object if a task was claimed, None if no tasks available
|
|
483
|
+
"""
|
|
484
|
+
sql = """
|
|
485
|
+
UPDATE tasks
|
|
486
|
+
SET status = 'RUNNING',
|
|
487
|
+
attempts = attempts + 1,
|
|
488
|
+
updated_at = CURRENT_TIMESTAMP
|
|
489
|
+
WHERE id = (
|
|
490
|
+
SELECT id
|
|
491
|
+
FROM tasks
|
|
492
|
+
WHERE status = 'PENDING'
|
|
493
|
+
AND run_at <= CURRENT_TIMESTAMP
|
|
494
|
+
ORDER BY run_at ASC, created_at ASC
|
|
495
|
+
LIMIT 1
|
|
496
|
+
)
|
|
497
|
+
RETURNING *;
|
|
498
|
+
"""
|
|
499
|
+
|
|
500
|
+
row = await self.fetchone(sql)
|
|
501
|
+
return Task(**row) if row else None # type: ignore
|
|
502
|
+
|
|
503
|
+
async def create_activity(
|
|
504
|
+
self, workflow_id: str, metadata: ActivityMetadata
|
|
505
|
+
) -> None:
|
|
506
|
+
"""Create an activity task and corresponding event.
|
|
507
|
+
|
|
508
|
+
Schedules an activity for execution by creating both an event record
|
|
509
|
+
and a task record with retry configuration.
|
|
510
|
+
|
|
511
|
+
Args:
|
|
512
|
+
workflow_id: ID of the workflow that owns this activity
|
|
513
|
+
metadata: Activity metadata including name, retry count, and timeout
|
|
514
|
+
"""
|
|
515
|
+
activity_id = self._create_id()
|
|
516
|
+
|
|
517
|
+
# SQL statements
|
|
518
|
+
event_sql = """
|
|
519
|
+
INSERT INTO events (workflow_id, type, payload)
|
|
520
|
+
VALUES (?, 'ACTIVITY_SCHEDULED', ?)
|
|
521
|
+
"""
|
|
522
|
+
|
|
523
|
+
task_sql = """
|
|
524
|
+
INSERT INTO tasks (
|
|
525
|
+
id, workflow_id, kind, target, run_at, status,
|
|
526
|
+
attempts, max_attempts
|
|
527
|
+
) VALUES (?, ?, 'ACTIVITY', ?, CURRENT_TIMESTAMP, 'PENDING', 0, ?)
|
|
528
|
+
"""
|
|
529
|
+
|
|
530
|
+
# Parameters
|
|
531
|
+
event_params = (workflow_id, json.dumps(metadata))
|
|
532
|
+
task_params = (
|
|
533
|
+
activity_id,
|
|
534
|
+
workflow_id,
|
|
535
|
+
metadata["name"],
|
|
536
|
+
metadata["retry_count"],
|
|
537
|
+
)
|
|
538
|
+
|
|
539
|
+
# Execute as transaction
|
|
540
|
+
async with aiosqlite.connect(DATABASE) as conn:
|
|
541
|
+
await conn.execute(event_sql, event_params)
|
|
542
|
+
await conn.execute(task_sql, task_params)
|
|
543
|
+
await conn.commit()
|
|
544
|
+
|
|
545
|
+
async def get_activity_event(
|
|
546
|
+
self, workflow_id: str, activity_name: str, attempts: int
|
|
547
|
+
) -> Event | None:
|
|
548
|
+
"""Retrieve the scheduled event for a specific activity.
|
|
549
|
+
|
|
550
|
+
Args:
|
|
551
|
+
workflow_id: ID of the workflow that owns the activity
|
|
552
|
+
activity_name: Name of the activity
|
|
553
|
+
Returns:
|
|
554
|
+
Event object if found, None otherwise
|
|
555
|
+
"""
|
|
556
|
+
sql = """
|
|
557
|
+
SELECT type, payload
|
|
558
|
+
FROM events
|
|
559
|
+
WHERE workflow_id = ?
|
|
560
|
+
AND type = 'ACTIVITY_SCHEDULED'
|
|
561
|
+
AND payload->>'$.name' = ?
|
|
562
|
+
ORDER BY id ASC
|
|
563
|
+
LIMIT 1
|
|
564
|
+
OFFSET ?
|
|
565
|
+
"""
|
|
566
|
+
row = await self.fetchone(sql, (workflow_id, activity_name, attempts - 1))
|
|
567
|
+
if not row:
|
|
568
|
+
return None
|
|
569
|
+
|
|
570
|
+
return Event(
|
|
571
|
+
type=row["type"],
|
|
572
|
+
payload=json.loads(row["payload"]),
|
|
573
|
+
)
|
|
574
|
+
|
|
575
|
+
async def create_timer(self, workflow_id: str, fire_at: datetime) -> None:
|
|
576
|
+
"""Create a timer task for a workflow.
|
|
577
|
+
Schedules a timer task to wake up the workflow at a specific time.
|
|
578
|
+
Args:
|
|
579
|
+
workflow_id: ID of the workflow to schedule the timer for
|
|
580
|
+
fire_at: Datetime when the timer should trigger
|
|
581
|
+
"""
|
|
582
|
+
timer_id = self._create_id()
|
|
583
|
+
|
|
584
|
+
await self.create_event(
|
|
585
|
+
workflow_id,
|
|
586
|
+
"TIMER_SCHEDULED",
|
|
587
|
+
{
|
|
588
|
+
"timer_id": timer_id,
|
|
589
|
+
"fire_at": fire_at.isoformat(),
|
|
590
|
+
},
|
|
591
|
+
)
|
|
592
|
+
|
|
593
|
+
await self.execute(
|
|
594
|
+
"""
|
|
595
|
+
INSERT INTO tasks (
|
|
596
|
+
id, workflow_id, kind, target, run_at, status
|
|
597
|
+
)
|
|
598
|
+
VALUES (?, ?, 'TIMER', '__timer__', ?, 'PENDING')
|
|
599
|
+
""",
|
|
600
|
+
(timer_id, workflow_id, fire_at),
|
|
601
|
+
)
|
|
602
|
+
|
|
603
|
+
async def release_task(self, task_id: str) -> None:
|
|
604
|
+
"""
|
|
605
|
+
Release a claimed task back to PENDING.
|
|
606
|
+
Used when the task cannot be executed yet (e.g. TIMER not due).
|
|
607
|
+
Args:
|
|
608
|
+
task_id: Unique identifier of the task to release
|
|
609
|
+
"""
|
|
610
|
+
await self.execute(
|
|
611
|
+
"""
|
|
612
|
+
UPDATE tasks
|
|
613
|
+
SET status = 'PENDING',
|
|
614
|
+
updated_at = CURRENT_TIMESTAMP
|
|
615
|
+
WHERE id = ?
|
|
616
|
+
AND status = 'RUNNING'
|
|
617
|
+
""",
|
|
618
|
+
(task_id,),
|
|
619
|
+
)
|
|
620
|
+
|
|
621
|
+
async def rotate_workflow_driver(self, workflow_id: str) -> None:
|
|
622
|
+
"""
|
|
623
|
+
Retire the currently running workflow driver and enqueue a new one.
|
|
624
|
+
Called when an unblock event (activity/timer/signal) occurs.
|
|
625
|
+
"""
|
|
626
|
+
|
|
627
|
+
id = self._create_id()
|
|
628
|
+
|
|
629
|
+
workflow = await self.get_workflow_info(workflow_id)
|
|
630
|
+
|
|
631
|
+
# 1. Complete old driver
|
|
632
|
+
await self.execute(
|
|
633
|
+
"""
|
|
634
|
+
UPDATE tasks
|
|
635
|
+
SET status = 'COMPLETED',
|
|
636
|
+
updated_at = CURRENT_TIMESTAMP
|
|
637
|
+
WHERE workflow_id = ?
|
|
638
|
+
AND kind = 'STEP'
|
|
639
|
+
AND status = 'RUNNING'
|
|
640
|
+
""",
|
|
641
|
+
(workflow_id,),
|
|
642
|
+
)
|
|
643
|
+
|
|
644
|
+
# 2. Enqueue new driver
|
|
645
|
+
await self.execute(
|
|
646
|
+
"""
|
|
647
|
+
INSERT INTO tasks (
|
|
648
|
+
id, workflow_id, kind, target, status, run_at
|
|
649
|
+
)
|
|
650
|
+
VALUES (?, ?, 'STEP', ?, 'PENDING', CURRENT_TIMESTAMP)
|
|
651
|
+
""",
|
|
652
|
+
(id, workflow_id, workflow["name"]),
|
|
653
|
+
)
|
|
654
|
+
|
|
655
|
+
async def complete_running_step(self, workflow_id: str):
|
|
656
|
+
await self.execute(
|
|
657
|
+
"""
|
|
658
|
+
UPDATE tasks
|
|
659
|
+
SET status = 'COMPLETED',
|
|
660
|
+
updated_at = CURRENT_TIMESTAMP
|
|
661
|
+
WHERE workflow_id = ?
|
|
662
|
+
AND kind = 'STEP'
|
|
663
|
+
AND status = 'RUNNING'
|
|
664
|
+
""",
|
|
665
|
+
(workflow_id,),
|
|
666
|
+
)
|
|
667
|
+
|
|
668
|
+
async def workflow_is_completed(self, workflow_id: str) -> bool:
|
|
669
|
+
row = await self.fetchone(
|
|
670
|
+
"""
|
|
671
|
+
SELECT 1
|
|
672
|
+
FROM events
|
|
673
|
+
WHERE workflow_id = ?
|
|
674
|
+
AND type = 'WORKFLOW_COMPLETED'
|
|
675
|
+
LIMIT 1
|
|
676
|
+
""",
|
|
677
|
+
(workflow_id,),
|
|
678
|
+
)
|
|
679
|
+
return row is not None
|
|
680
|
+
|
|
681
|
+
async def create_log(self, workflow_id: str, level: str, message: str) -> None:
|
|
682
|
+
"""Create a log entry for a workflow.
|
|
683
|
+
|
|
684
|
+
Args:
|
|
685
|
+
workflow_id: ID of the workflow to associate the log with
|
|
686
|
+
level: Log level (e.g., 'INFO', 'ERROR')
|
|
687
|
+
message: Log message content
|
|
688
|
+
"""
|
|
689
|
+
await self.execute(
|
|
690
|
+
"""
|
|
691
|
+
INSERT INTO logs (workflow_id, level, message, created_at)
|
|
692
|
+
VALUES (?, ?, ?, CURRENT_TIMESTAMP)
|
|
693
|
+
""",
|
|
694
|
+
(workflow_id, level, message),
|
|
695
|
+
)
|
|
696
|
+
|
|
697
|
+
# === Context Manager Methods ===
|
|
698
|
+
|
|
699
|
+
async def __aenter__(self) -> "Database[InputT, StateT]":
|
|
700
|
+
"""Async context manager entry point."""
|
|
701
|
+
await self._init_db()
|
|
702
|
+
return self
|
|
703
|
+
|
|
704
|
+
async def __aexit__(self, exc_type, exc, tb) -> None:
|
|
705
|
+
"""Async context manager exit point."""
|
|
706
|
+
pass
|
|
707
|
+
|
|
708
|
+
# === Utility Methods ===
|
|
709
|
+
|
|
710
|
+
def _create_id(self) -> str:
|
|
711
|
+
"""Generate a unique identifier using UUID4.
|
|
712
|
+
|
|
713
|
+
Returns:
|
|
714
|
+
Hexadecimal string representation of a UUID4
|
|
715
|
+
"""
|
|
716
|
+
return uuid4().hex
|