ralphx 0.3.5__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ralphx/__init__.py +1 -1
- ralphx/adapters/base.py +10 -2
- ralphx/adapters/claude_cli.py +222 -82
- ralphx/api/routes/auth.py +105 -32
- ralphx/api/routes/planning.py +865 -16
- ralphx/api/routes/resources.py +528 -6
- ralphx/api/routes/stream.py +58 -56
- ralphx/api/routes/workflows.py +257 -25
- ralphx/core/auth.py +32 -7
- ralphx/core/executor.py +170 -16
- ralphx/core/loop_templates.py +26 -0
- ralphx/core/planning_iteration_executor.py +633 -0
- ralphx/core/planning_service.py +10 -3
- ralphx/core/project_db.py +770 -79
- ralphx/core/resources.py +28 -2
- ralphx/core/workflow_executor.py +32 -3
- ralphx/mcp/tools/diagnostics.py +1 -1
- ralphx/mcp/tools/monitoring.py +10 -16
- ralphx/mcp/tools/workflows.py +3 -3
- ralphx/static/assets/index-BuLI7ffn.css +1 -0
- ralphx/static/assets/index-DWvlqOTb.js +264 -0
- ralphx/static/assets/index-DWvlqOTb.js.map +1 -0
- ralphx/static/index.html +2 -2
- ralphx/templates/loop_templates/consumer.md +2 -2
- {ralphx-0.3.5.dist-info → ralphx-0.4.0.dist-info}/METADATA +1 -1
- {ralphx-0.3.5.dist-info → ralphx-0.4.0.dist-info}/RECORD +28 -27
- ralphx/static/assets/index-0ovNnfOq.css +0 -1
- ralphx/static/assets/index-CY9s08ZB.js +0 -251
- ralphx/static/assets/index-CY9s08ZB.js.map +0 -1
- {ralphx-0.3.5.dist-info → ralphx-0.4.0.dist-info}/WHEEL +0 -0
- {ralphx-0.3.5.dist-info → ralphx-0.4.0.dist-info}/entry_points.txt +0 -0
ralphx/core/project_db.py
CHANGED
|
@@ -27,7 +27,7 @@ logger = logging.getLogger(__name__)
|
|
|
27
27
|
|
|
28
28
|
|
|
29
29
|
# Schema version for project DB
|
|
30
|
-
PROJECT_SCHEMA_VERSION =
|
|
30
|
+
PROJECT_SCHEMA_VERSION = 20
|
|
31
31
|
|
|
32
32
|
# Project database schema - all project-specific data
|
|
33
33
|
PROJECT_SCHEMA_SQL = """
|
|
@@ -306,19 +306,65 @@ CREATE TABLE IF NOT EXISTS workflow_steps (
|
|
|
306
306
|
);
|
|
307
307
|
|
|
308
308
|
-- Planning sessions (for interactive steps)
|
|
309
|
+
-- New iteration-based paradigm: users provide a prompt + iteration count, system runs N iterations
|
|
309
310
|
CREATE TABLE IF NOT EXISTS planning_sessions (
|
|
310
311
|
id TEXT PRIMARY KEY,
|
|
311
312
|
workflow_id TEXT NOT NULL,
|
|
312
313
|
step_id INTEGER NOT NULL,
|
|
313
|
-
messages JSON NOT NULL DEFAULT '[]', -- Conversation history
|
|
314
|
+
messages JSON NOT NULL DEFAULT '[]', -- Conversation history (legacy chat-based sessions)
|
|
314
315
|
artifacts JSON, -- Generated design doc, guardrails
|
|
315
316
|
status TEXT DEFAULT 'active', -- active, completed
|
|
317
|
+
-- New iteration-based fields (v17)
|
|
318
|
+
prompt TEXT, -- User's guidance for this session
|
|
319
|
+
iterations_requested INTEGER DEFAULT 1, -- Number of iterations requested
|
|
320
|
+
iterations_completed INTEGER DEFAULT 0, -- Number of iterations completed
|
|
321
|
+
current_iteration INTEGER DEFAULT 0, -- Current iteration number (0 = not started)
|
|
322
|
+
run_status TEXT DEFAULT 'pending', -- pending, running, completed, cancelled, error
|
|
323
|
+
is_legacy BOOLEAN DEFAULT FALSE, -- TRUE for old chat-based sessions
|
|
324
|
+
error_message TEXT, -- Error message if run_status='error'
|
|
316
325
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
317
326
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
318
327
|
FOREIGN KEY (workflow_id) REFERENCES workflows(id) ON DELETE CASCADE,
|
|
319
328
|
FOREIGN KEY (step_id) REFERENCES workflow_steps(id) ON DELETE CASCADE
|
|
320
329
|
);
|
|
321
330
|
|
|
331
|
+
-- Planning iterations (per-iteration tracking for iteration-based sessions)
|
|
332
|
+
CREATE TABLE IF NOT EXISTS planning_iterations (
|
|
333
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
334
|
+
session_id TEXT NOT NULL,
|
|
335
|
+
iteration_number INTEGER NOT NULL,
|
|
336
|
+
started_at TIMESTAMP,
|
|
337
|
+
completed_at TIMESTAMP,
|
|
338
|
+
status TEXT DEFAULT 'pending', -- pending, running, completed, failed, skipped
|
|
339
|
+
chars_added INTEGER DEFAULT 0,
|
|
340
|
+
chars_removed INTEGER DEFAULT 0,
|
|
341
|
+
tool_calls JSON DEFAULT '[]', -- [{tool, input_preview, duration_ms}] - truncated
|
|
342
|
+
summary TEXT, -- Brief summary of what changed
|
|
343
|
+
diff_text TEXT, -- Unified diff of changes
|
|
344
|
+
doc_before TEXT, -- Document content before iteration
|
|
345
|
+
doc_after TEXT, -- Document content after iteration
|
|
346
|
+
error_message TEXT,
|
|
347
|
+
FOREIGN KEY (session_id) REFERENCES planning_sessions(id) ON DELETE CASCADE
|
|
348
|
+
);
|
|
349
|
+
|
|
350
|
+
-- Planning iteration events (persistent event log for streaming/reconnection)
|
|
351
|
+
CREATE TABLE IF NOT EXISTS planning_iteration_events (
|
|
352
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
353
|
+
session_id TEXT NOT NULL,
|
|
354
|
+
iteration_number INTEGER,
|
|
355
|
+
event_type TEXT NOT NULL,
|
|
356
|
+
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
357
|
+
content TEXT,
|
|
358
|
+
tool_name TEXT,
|
|
359
|
+
tool_input TEXT,
|
|
360
|
+
tool_result TEXT,
|
|
361
|
+
event_data TEXT,
|
|
362
|
+
FOREIGN KEY (session_id) REFERENCES planning_sessions(id) ON DELETE CASCADE
|
|
363
|
+
);
|
|
364
|
+
|
|
365
|
+
CREATE INDEX IF NOT EXISTS idx_pie_session ON planning_iteration_events(session_id);
|
|
366
|
+
CREATE INDEX IF NOT EXISTS idx_pie_session_id ON planning_iteration_events(session_id, id);
|
|
367
|
+
|
|
322
368
|
-- Workflow-scoped resources (design docs, guardrails, input files)
|
|
323
369
|
CREATE TABLE IF NOT EXISTS workflow_resources (
|
|
324
370
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
@@ -452,6 +498,8 @@ CREATE INDEX IF NOT EXISTS idx_workflow_steps_workflow ON workflow_steps(workflo
|
|
|
452
498
|
CREATE INDEX IF NOT EXISTS idx_workflow_steps_status ON workflow_steps(status);
|
|
453
499
|
CREATE INDEX IF NOT EXISTS idx_planning_sessions_workflow ON planning_sessions(workflow_id);
|
|
454
500
|
CREATE INDEX IF NOT EXISTS idx_planning_sessions_status ON planning_sessions(status);
|
|
501
|
+
CREATE INDEX IF NOT EXISTS idx_planning_sessions_run_status ON planning_sessions(run_status);
|
|
502
|
+
CREATE INDEX IF NOT EXISTS idx_planning_iterations_session ON planning_iterations(session_id);
|
|
455
503
|
|
|
456
504
|
-- Workflow resources indexes
|
|
457
505
|
CREATE INDEX IF NOT EXISTS idx_workflow_resources_workflow ON workflow_resources(workflow_id, resource_type);
|
|
@@ -567,9 +615,8 @@ class ProjectDatabase:
|
|
|
567
615
|
"Please delete your .ralphx/ralphx.db file and start fresh."
|
|
568
616
|
)
|
|
569
617
|
|
|
570
|
-
#
|
|
618
|
+
# Create schema tables (indexes created after migrations)
|
|
571
619
|
conn.executescript(PROJECT_SCHEMA_SQL)
|
|
572
|
-
conn.executescript(PROJECT_INDEXES_SQL)
|
|
573
620
|
|
|
574
621
|
if current_version == 0:
|
|
575
622
|
# Fresh database
|
|
@@ -583,6 +630,9 @@ class ProjectDatabase:
|
|
|
583
630
|
# Run migrations (for future versions > 6)
|
|
584
631
|
self._run_migrations(conn, current_version)
|
|
585
632
|
|
|
633
|
+
# Create indexes AFTER migrations so all columns exist
|
|
634
|
+
conn.executescript(PROJECT_INDEXES_SQL)
|
|
635
|
+
|
|
586
636
|
def _backup_before_migration(self, from_version: int) -> None:
|
|
587
637
|
"""Create a backup of the database before running migrations.
|
|
588
638
|
|
|
@@ -663,6 +713,26 @@ class ProjectDatabase:
|
|
|
663
713
|
# Migration from v15 to v16: Remove namespace from workflows table
|
|
664
714
|
if from_version == 15:
|
|
665
715
|
self._migrate_v15_to_v16(conn)
|
|
716
|
+
from_version = 16 # Continue to next migration
|
|
717
|
+
|
|
718
|
+
# Migration from v16 to v17: Add iteration-based planning fields
|
|
719
|
+
if from_version == 16:
|
|
720
|
+
self._migrate_v16_to_v17(conn)
|
|
721
|
+
from_version = 17 # Continue to next migration
|
|
722
|
+
|
|
723
|
+
# Migration from v17 to v18: Add planning_iteration_events table
|
|
724
|
+
if from_version == 17:
|
|
725
|
+
self._migrate_v17_to_v18(conn)
|
|
726
|
+
from_version = 18 # Continue to next migration
|
|
727
|
+
|
|
728
|
+
# Migration from v18 to v19: Add diff_text column to planning_iterations
|
|
729
|
+
if from_version == 18:
|
|
730
|
+
self._migrate_v18_to_v19(conn)
|
|
731
|
+
from_version = 19 # Continue to next migration
|
|
732
|
+
|
|
733
|
+
# Migration from v19 to v20: Add doc_before/doc_after columns
|
|
734
|
+
if from_version == 19:
|
|
735
|
+
self._migrate_v19_to_v20(conn)
|
|
666
736
|
|
|
667
737
|
# Seed workflow templates for fresh databases
|
|
668
738
|
self._seed_workflow_templates(conn)
|
|
@@ -978,41 +1048,155 @@ class ProjectDatabase:
|
|
|
978
1048
|
otherwise the ON DELETE CASCADE on workflow_steps will delete all steps!
|
|
979
1049
|
"""
|
|
980
1050
|
# 0. Disable foreign keys to prevent CASCADE deletes during table swap
|
|
1051
|
+
# IMPORTANT: PRAGMA foreign_keys is NOT transactional, so we must
|
|
1052
|
+
# re-enable in a finally block to prevent silent FK violations
|
|
981
1053
|
conn.execute("PRAGMA foreign_keys=OFF")
|
|
1054
|
+
try:
|
|
1055
|
+
# 1. Create new table without namespace
|
|
1056
|
+
conn.execute("""
|
|
1057
|
+
CREATE TABLE workflows_new (
|
|
1058
|
+
id TEXT PRIMARY KEY,
|
|
1059
|
+
template_id TEXT,
|
|
1060
|
+
name TEXT NOT NULL,
|
|
1061
|
+
status TEXT DEFAULT 'draft',
|
|
1062
|
+
current_step INTEGER DEFAULT 1,
|
|
1063
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
1064
|
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
1065
|
+
archived_at TIMESTAMP
|
|
1066
|
+
)
|
|
1067
|
+
""")
|
|
982
1068
|
|
|
983
|
-
|
|
1069
|
+
# 2. Copy data (excluding namespace)
|
|
1070
|
+
conn.execute("""
|
|
1071
|
+
INSERT INTO workflows_new (id, template_id, name, status, current_step, created_at, updated_at, archived_at)
|
|
1072
|
+
SELECT id, template_id, name, status, current_step, created_at, updated_at, archived_at
|
|
1073
|
+
FROM workflows
|
|
1074
|
+
""")
|
|
1075
|
+
|
|
1076
|
+
# 3. Drop old table and index
|
|
1077
|
+
conn.execute("DROP INDEX IF EXISTS idx_workflows_namespace")
|
|
1078
|
+
conn.execute("DROP TABLE workflows")
|
|
1079
|
+
|
|
1080
|
+
# 4. Rename new table
|
|
1081
|
+
conn.execute("ALTER TABLE workflows_new RENAME TO workflows")
|
|
1082
|
+
|
|
1083
|
+
# 5. Recreate the status index on the new table
|
|
1084
|
+
conn.execute("CREATE INDEX IF NOT EXISTS idx_workflows_status ON workflows(status)")
|
|
1085
|
+
finally:
|
|
1086
|
+
# 6. Re-enable foreign keys (must happen even on failure)
|
|
1087
|
+
conn.execute("PRAGMA foreign_keys=ON")
|
|
1088
|
+
|
|
1089
|
+
def _migrate_v16_to_v17(self, conn: sqlite3.Connection) -> None:
|
|
1090
|
+
"""Migrate from schema v16 to v17.
|
|
1091
|
+
|
|
1092
|
+
Adds:
|
|
1093
|
+
- New columns to planning_sessions for iteration-based paradigm
|
|
1094
|
+
- New planning_iterations table for per-iteration tracking
|
|
1095
|
+
- Marks existing chat-based sessions as legacy
|
|
1096
|
+
"""
|
|
1097
|
+
# 1. Add new columns to planning_sessions
|
|
1098
|
+
conn.execute("ALTER TABLE planning_sessions ADD COLUMN prompt TEXT")
|
|
1099
|
+
conn.execute(
|
|
1100
|
+
"ALTER TABLE planning_sessions ADD COLUMN iterations_requested INTEGER DEFAULT 1"
|
|
1101
|
+
)
|
|
1102
|
+
conn.execute(
|
|
1103
|
+
"ALTER TABLE planning_sessions ADD COLUMN iterations_completed INTEGER DEFAULT 0"
|
|
1104
|
+
)
|
|
1105
|
+
conn.execute(
|
|
1106
|
+
"ALTER TABLE planning_sessions ADD COLUMN current_iteration INTEGER DEFAULT 0"
|
|
1107
|
+
)
|
|
1108
|
+
conn.execute(
|
|
1109
|
+
"ALTER TABLE planning_sessions ADD COLUMN run_status TEXT DEFAULT 'pending'"
|
|
1110
|
+
)
|
|
1111
|
+
conn.execute(
|
|
1112
|
+
"ALTER TABLE planning_sessions ADD COLUMN is_legacy BOOLEAN DEFAULT FALSE"
|
|
1113
|
+
)
|
|
1114
|
+
conn.execute("ALTER TABLE planning_sessions ADD COLUMN error_message TEXT")
|
|
1115
|
+
|
|
1116
|
+
# 2. Mark existing sessions with messages as legacy
|
|
984
1117
|
conn.execute("""
|
|
985
|
-
|
|
986
|
-
|
|
987
|
-
|
|
988
|
-
|
|
989
|
-
|
|
990
|
-
|
|
991
|
-
|
|
992
|
-
|
|
993
|
-
|
|
1118
|
+
UPDATE planning_sessions
|
|
1119
|
+
SET is_legacy = TRUE, run_status = 'completed'
|
|
1120
|
+
WHERE json_array_length(messages) > 0 AND prompt IS NULL
|
|
1121
|
+
""")
|
|
1122
|
+
|
|
1123
|
+
# 3. Create planning_iterations table
|
|
1124
|
+
conn.execute("""
|
|
1125
|
+
CREATE TABLE IF NOT EXISTS planning_iterations (
|
|
1126
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
1127
|
+
session_id TEXT NOT NULL,
|
|
1128
|
+
iteration_number INTEGER NOT NULL,
|
|
1129
|
+
started_at TIMESTAMP,
|
|
1130
|
+
completed_at TIMESTAMP,
|
|
1131
|
+
status TEXT DEFAULT 'pending',
|
|
1132
|
+
chars_added INTEGER DEFAULT 0,
|
|
1133
|
+
chars_removed INTEGER DEFAULT 0,
|
|
1134
|
+
tool_calls JSON DEFAULT '[]',
|
|
1135
|
+
summary TEXT,
|
|
1136
|
+
error_message TEXT,
|
|
1137
|
+
FOREIGN KEY (session_id) REFERENCES planning_sessions(id) ON DELETE CASCADE
|
|
994
1138
|
)
|
|
995
1139
|
""")
|
|
996
1140
|
|
|
997
|
-
#
|
|
1141
|
+
# 4. Add indexes
|
|
1142
|
+
conn.execute(
|
|
1143
|
+
"CREATE INDEX IF NOT EXISTS idx_planning_sessions_run_status ON planning_sessions(run_status)"
|
|
1144
|
+
)
|
|
1145
|
+
conn.execute(
|
|
1146
|
+
"CREATE INDEX IF NOT EXISTS idx_planning_iterations_session ON planning_iterations(session_id)"
|
|
1147
|
+
)
|
|
1148
|
+
|
|
1149
|
+
def _migrate_v17_to_v18(self, conn: sqlite3.Connection) -> None:
|
|
1150
|
+
"""Migrate from schema v17 to v18.
|
|
1151
|
+
|
|
1152
|
+
Adds:
|
|
1153
|
+
- planning_iteration_events table for persistent event streaming/reconnection
|
|
1154
|
+
"""
|
|
998
1155
|
conn.execute("""
|
|
999
|
-
|
|
1000
|
-
|
|
1001
|
-
|
|
1156
|
+
CREATE TABLE IF NOT EXISTS planning_iteration_events (
|
|
1157
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
1158
|
+
session_id TEXT NOT NULL,
|
|
1159
|
+
iteration_number INTEGER,
|
|
1160
|
+
event_type TEXT NOT NULL,
|
|
1161
|
+
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
1162
|
+
content TEXT,
|
|
1163
|
+
tool_name TEXT,
|
|
1164
|
+
tool_input TEXT,
|
|
1165
|
+
tool_result TEXT,
|
|
1166
|
+
event_data TEXT,
|
|
1167
|
+
FOREIGN KEY (session_id) REFERENCES planning_sessions(id) ON DELETE CASCADE
|
|
1168
|
+
)
|
|
1002
1169
|
""")
|
|
1170
|
+
conn.execute(
|
|
1171
|
+
"CREATE INDEX IF NOT EXISTS idx_pie_session ON planning_iteration_events(session_id)"
|
|
1172
|
+
)
|
|
1173
|
+
conn.execute(
|
|
1174
|
+
"CREATE INDEX IF NOT EXISTS idx_pie_session_id ON planning_iteration_events(session_id, id)"
|
|
1175
|
+
)
|
|
1003
1176
|
|
|
1004
|
-
|
|
1005
|
-
|
|
1006
|
-
conn.execute("DROP TABLE workflows")
|
|
1177
|
+
def _migrate_v18_to_v19(self, conn: sqlite3.Connection) -> None:
|
|
1178
|
+
"""Migrate from schema v18 to v19.
|
|
1007
1179
|
|
|
1008
|
-
|
|
1009
|
-
|
|
1180
|
+
Adds:
|
|
1181
|
+
- diff_text column to planning_iterations for storing unified diffs
|
|
1182
|
+
"""
|
|
1183
|
+
conn.execute(
|
|
1184
|
+
"ALTER TABLE planning_iterations ADD COLUMN diff_text TEXT"
|
|
1185
|
+
)
|
|
1010
1186
|
|
|
1011
|
-
|
|
1012
|
-
|
|
1187
|
+
def _migrate_v19_to_v20(self, conn: sqlite3.Connection) -> None:
|
|
1188
|
+
"""Migrate from schema v19 to v20.
|
|
1013
1189
|
|
|
1014
|
-
|
|
1015
|
-
|
|
1190
|
+
Adds:
|
|
1191
|
+
- doc_before column to planning_iterations for pre-iteration doc snapshot
|
|
1192
|
+
- doc_after column to planning_iterations for post-iteration doc snapshot
|
|
1193
|
+
"""
|
|
1194
|
+
conn.execute(
|
|
1195
|
+
"ALTER TABLE planning_iterations ADD COLUMN doc_before TEXT"
|
|
1196
|
+
)
|
|
1197
|
+
conn.execute(
|
|
1198
|
+
"ALTER TABLE planning_iterations ADD COLUMN doc_after TEXT"
|
|
1199
|
+
)
|
|
1016
1200
|
|
|
1017
1201
|
# ========== Loops ==========
|
|
1018
1202
|
|
|
@@ -1273,23 +1457,29 @@ class ProjectDatabase:
|
|
|
1273
1457
|
def list_sessions(
|
|
1274
1458
|
self,
|
|
1275
1459
|
run_id: Optional[str] = None,
|
|
1460
|
+
status: Optional[str] = None,
|
|
1276
1461
|
limit: int = 100,
|
|
1277
1462
|
) -> list[dict]:
|
|
1278
1463
|
"""List sessions with optional filters."""
|
|
1279
1464
|
with self._reader() as conn:
|
|
1465
|
+
conditions = ["1=1"]
|
|
1466
|
+
params: list[Any] = []
|
|
1467
|
+
|
|
1280
1468
|
if run_id:
|
|
1281
|
-
|
|
1282
|
-
|
|
1283
|
-
|
|
1284
|
-
|
|
1285
|
-
|
|
1286
|
-
|
|
1287
|
-
|
|
1288
|
-
|
|
1289
|
-
|
|
1290
|
-
|
|
1291
|
-
|
|
1292
|
-
|
|
1469
|
+
conditions.append("run_id = ?")
|
|
1470
|
+
params.append(run_id)
|
|
1471
|
+
if status:
|
|
1472
|
+
conditions.append("status = ?")
|
|
1473
|
+
params.append(status)
|
|
1474
|
+
|
|
1475
|
+
cursor = conn.execute(
|
|
1476
|
+
f"""
|
|
1477
|
+
SELECT * FROM sessions
|
|
1478
|
+
WHERE {' AND '.join(conditions)}
|
|
1479
|
+
ORDER BY started_at DESC LIMIT ?
|
|
1480
|
+
""",
|
|
1481
|
+
params + [limit],
|
|
1482
|
+
)
|
|
1293
1483
|
return [dict(row) for row in cursor.fetchall()]
|
|
1294
1484
|
|
|
1295
1485
|
_SESSION_UPDATE_COLS = frozenset({
|
|
@@ -1371,6 +1561,7 @@ class ProjectDatabase:
|
|
|
1371
1561
|
self,
|
|
1372
1562
|
session_id: str,
|
|
1373
1563
|
after_id: Optional[int] = None,
|
|
1564
|
+
event_type: Optional[str] = None,
|
|
1374
1565
|
limit: int = 500,
|
|
1375
1566
|
) -> list[dict]:
|
|
1376
1567
|
"""Get events for a session.
|
|
@@ -1378,30 +1569,31 @@ class ProjectDatabase:
|
|
|
1378
1569
|
Args:
|
|
1379
1570
|
session_id: Session UUID.
|
|
1380
1571
|
after_id: Only return events with ID greater than this (for polling).
|
|
1572
|
+
event_type: Filter by event type (text, tool_call, tool_result, error).
|
|
1381
1573
|
limit: Maximum number of events to return.
|
|
1382
1574
|
|
|
1383
1575
|
Returns:
|
|
1384
1576
|
List of event dicts.
|
|
1385
1577
|
"""
|
|
1386
1578
|
with self._reader() as conn:
|
|
1579
|
+
conditions = ["session_id = ?"]
|
|
1580
|
+
params: list[Any] = [session_id]
|
|
1581
|
+
|
|
1387
1582
|
if after_id:
|
|
1388
|
-
|
|
1389
|
-
|
|
1390
|
-
|
|
1391
|
-
|
|
1392
|
-
|
|
1393
|
-
|
|
1394
|
-
|
|
1395
|
-
|
|
1396
|
-
|
|
1397
|
-
|
|
1398
|
-
|
|
1399
|
-
|
|
1400
|
-
|
|
1401
|
-
|
|
1402
|
-
""",
|
|
1403
|
-
(session_id, limit),
|
|
1404
|
-
)
|
|
1583
|
+
conditions.append("id > ?")
|
|
1584
|
+
params.append(after_id)
|
|
1585
|
+
if event_type:
|
|
1586
|
+
conditions.append("event_type = ?")
|
|
1587
|
+
params.append(event_type)
|
|
1588
|
+
|
|
1589
|
+
cursor = conn.execute(
|
|
1590
|
+
f"""
|
|
1591
|
+
SELECT * FROM session_events
|
|
1592
|
+
WHERE {' AND '.join(conditions)}
|
|
1593
|
+
ORDER BY id ASC LIMIT ?
|
|
1594
|
+
""",
|
|
1595
|
+
params + [limit],
|
|
1596
|
+
)
|
|
1405
1597
|
|
|
1406
1598
|
events = []
|
|
1407
1599
|
for row in cursor.fetchall():
|
|
@@ -1693,7 +1885,12 @@ class ProjectDatabase:
|
|
|
1693
1885
|
return cursor.rowcount > 0
|
|
1694
1886
|
|
|
1695
1887
|
def release_work_item(self, id: str) -> bool:
|
|
1696
|
-
"""Release a claimed work item back to
|
|
1888
|
+
"""Release a claimed work item back to 'completed' state.
|
|
1889
|
+
|
|
1890
|
+
Items generated by producer loops have status 'completed' before being
|
|
1891
|
+
claimed. We restore to 'completed' (not 'pending') so that consumer
|
|
1892
|
+
loops, which query for status='completed', can find and retry them.
|
|
1893
|
+
"""
|
|
1697
1894
|
with self._writer() as conn:
|
|
1698
1895
|
now = datetime.utcnow().isoformat()
|
|
1699
1896
|
cursor = conn.execute(
|
|
@@ -1701,7 +1898,7 @@ class ProjectDatabase:
|
|
|
1701
1898
|
UPDATE work_items
|
|
1702
1899
|
SET claimed_by = NULL,
|
|
1703
1900
|
claimed_at = NULL,
|
|
1704
|
-
status = '
|
|
1901
|
+
status = 'completed',
|
|
1705
1902
|
updated_at = ?
|
|
1706
1903
|
WHERE id = ? AND status = 'claimed'
|
|
1707
1904
|
""",
|
|
@@ -1831,6 +2028,9 @@ class ProjectDatabase:
|
|
|
1831
2028
|
def release_stale_claims(self, max_age_minutes: int = 30) -> int:
|
|
1832
2029
|
"""Release claims that have been held too long (likely crashed consumer).
|
|
1833
2030
|
|
|
2031
|
+
Released items are restored to 'completed' status so consumer loops
|
|
2032
|
+
(which query for status='completed') can find and retry them.
|
|
2033
|
+
|
|
1834
2034
|
Args:
|
|
1835
2035
|
max_age_minutes: Claims older than this are released.
|
|
1836
2036
|
|
|
@@ -1848,7 +2048,7 @@ class ProjectDatabase:
|
|
|
1848
2048
|
UPDATE work_items
|
|
1849
2049
|
SET claimed_by = NULL,
|
|
1850
2050
|
claimed_at = NULL,
|
|
1851
|
-
status = '
|
|
2051
|
+
status = 'completed',
|
|
1852
2052
|
updated_at = ?
|
|
1853
2053
|
WHERE claimed_at < ?
|
|
1854
2054
|
AND claimed_by IS NOT NULL
|
|
@@ -1862,8 +2062,8 @@ class ProjectDatabase:
|
|
|
1862
2062
|
"""Release all claims held by a specific loop.
|
|
1863
2063
|
|
|
1864
2064
|
Used when deleting a loop to prevent orphaned claims.
|
|
1865
|
-
Released items are restored to '
|
|
1866
|
-
picked up by other loops.
|
|
2065
|
+
Released items are restored to 'completed' status so they can be
|
|
2066
|
+
picked up by other consumer loops.
|
|
1867
2067
|
|
|
1868
2068
|
Args:
|
|
1869
2069
|
loop_name: Name of the loop whose claims should be released.
|
|
@@ -1879,7 +2079,7 @@ class ProjectDatabase:
|
|
|
1879
2079
|
UPDATE work_items
|
|
1880
2080
|
SET claimed_by = NULL,
|
|
1881
2081
|
claimed_at = NULL,
|
|
1882
|
-
status = '
|
|
2082
|
+
status = 'completed',
|
|
1883
2083
|
updated_at = ?
|
|
1884
2084
|
WHERE claimed_by = ? AND status = 'claimed'
|
|
1885
2085
|
""",
|
|
@@ -1893,8 +2093,8 @@ class ProjectDatabase:
|
|
|
1893
2093
|
This is an atomic operation that checks ownership and releases in one step
|
|
1894
2094
|
to prevent TOCTOU race conditions.
|
|
1895
2095
|
|
|
1896
|
-
Released items are restored to '
|
|
1897
|
-
|
|
2096
|
+
Released items are restored to 'completed' status so consumer loops
|
|
2097
|
+
(which query for status='completed') can find and retry them.
|
|
1898
2098
|
|
|
1899
2099
|
Args:
|
|
1900
2100
|
id: Work item ID.
|
|
@@ -1910,7 +2110,7 @@ class ProjectDatabase:
|
|
|
1910
2110
|
UPDATE work_items
|
|
1911
2111
|
SET claimed_by = NULL,
|
|
1912
2112
|
claimed_at = NULL,
|
|
1913
|
-
status = '
|
|
2113
|
+
status = 'completed',
|
|
1914
2114
|
updated_at = ?
|
|
1915
2115
|
WHERE id = ? AND claimed_by = ? AND status = 'claimed'
|
|
1916
2116
|
""",
|
|
@@ -2550,26 +2750,50 @@ class ProjectDatabase:
|
|
|
2550
2750
|
self,
|
|
2551
2751
|
run_id: Optional[str] = None,
|
|
2552
2752
|
level: Optional[str] = None,
|
|
2753
|
+
session_id: Optional[str] = None,
|
|
2754
|
+
search: Optional[str] = None,
|
|
2553
2755
|
limit: int = 100,
|
|
2554
2756
|
offset: int = 0,
|
|
2555
|
-
) -> list[dict]:
|
|
2556
|
-
"""Get logs with optional filters.
|
|
2757
|
+
) -> tuple[list[dict], int]:
|
|
2758
|
+
"""Get logs with optional filters.
|
|
2759
|
+
|
|
2760
|
+
Returns:
|
|
2761
|
+
Tuple of (logs list, total count).
|
|
2762
|
+
"""
|
|
2557
2763
|
with self._reader() as conn:
|
|
2558
2764
|
conditions = ["1=1"]
|
|
2559
2765
|
params: list[Any] = []
|
|
2560
2766
|
|
|
2561
2767
|
if run_id:
|
|
2562
|
-
conditions.append("run_id = ?")
|
|
2768
|
+
conditions.append("l.run_id = ?")
|
|
2563
2769
|
params.append(run_id)
|
|
2564
2770
|
if level:
|
|
2565
|
-
conditions.append("level = ?")
|
|
2771
|
+
conditions.append("l.level = ?")
|
|
2566
2772
|
params.append(level)
|
|
2773
|
+
if session_id:
|
|
2774
|
+
# Filter logs by session: join through runs → sessions
|
|
2775
|
+
conditions.append(
|
|
2776
|
+
"l.run_id IN (SELECT run_id FROM sessions WHERE session_id = ?)"
|
|
2777
|
+
)
|
|
2778
|
+
params.append(session_id)
|
|
2779
|
+
if search:
|
|
2780
|
+
conditions.append("l.message LIKE ?")
|
|
2781
|
+
params.append(f"%{search}%")
|
|
2782
|
+
|
|
2783
|
+
where_clause = " AND ".join(conditions)
|
|
2784
|
+
|
|
2785
|
+
# Get total count
|
|
2786
|
+
count_row = conn.execute(
|
|
2787
|
+
f"SELECT COUNT(*) FROM logs l WHERE {where_clause}",
|
|
2788
|
+
params,
|
|
2789
|
+
).fetchone()
|
|
2790
|
+
total = count_row[0] if count_row else 0
|
|
2567
2791
|
|
|
2568
2792
|
cursor = conn.execute(
|
|
2569
2793
|
f"""
|
|
2570
|
-
SELECT
|
|
2571
|
-
WHERE {
|
|
2572
|
-
ORDER BY timestamp DESC
|
|
2794
|
+
SELECT l.* FROM logs l
|
|
2795
|
+
WHERE {where_clause}
|
|
2796
|
+
ORDER BY l.timestamp DESC
|
|
2573
2797
|
LIMIT ? OFFSET ?
|
|
2574
2798
|
""",
|
|
2575
2799
|
params + [limit, offset],
|
|
@@ -2581,7 +2805,84 @@ class ProjectDatabase:
|
|
|
2581
2805
|
if result.get("metadata"):
|
|
2582
2806
|
result["metadata"] = json.loads(result["metadata"])
|
|
2583
2807
|
results.append(result)
|
|
2584
|
-
return results
|
|
2808
|
+
return results, total
|
|
2809
|
+
|
|
2810
|
+
def get_log_stats(
|
|
2811
|
+
self,
|
|
2812
|
+
run_id: Optional[str] = None,
|
|
2813
|
+
session_id: Optional[str] = None,
|
|
2814
|
+
) -> dict:
|
|
2815
|
+
"""Get log statistics (counts by level).
|
|
2816
|
+
|
|
2817
|
+
Returns:
|
|
2818
|
+
Dict with by_level counts and total.
|
|
2819
|
+
"""
|
|
2820
|
+
with self._reader() as conn:
|
|
2821
|
+
conditions = ["1=1"]
|
|
2822
|
+
params: list[Any] = []
|
|
2823
|
+
|
|
2824
|
+
if run_id:
|
|
2825
|
+
conditions.append("run_id = ?")
|
|
2826
|
+
params.append(run_id)
|
|
2827
|
+
if session_id:
|
|
2828
|
+
conditions.append(
|
|
2829
|
+
"run_id IN (SELECT run_id FROM sessions WHERE session_id = ?)"
|
|
2830
|
+
)
|
|
2831
|
+
params.append(session_id)
|
|
2832
|
+
|
|
2833
|
+
where_clause = " AND ".join(conditions)
|
|
2834
|
+
|
|
2835
|
+
# Count by level
|
|
2836
|
+
cursor = conn.execute(
|
|
2837
|
+
f"""
|
|
2838
|
+
SELECT level, COUNT(*) as count FROM logs
|
|
2839
|
+
WHERE {where_clause}
|
|
2840
|
+
GROUP BY level
|
|
2841
|
+
""",
|
|
2842
|
+
params,
|
|
2843
|
+
)
|
|
2844
|
+
by_level = {row["level"]: row["count"] for row in cursor.fetchall()}
|
|
2845
|
+
|
|
2846
|
+
total = sum(by_level.values())
|
|
2847
|
+
|
|
2848
|
+
return {
|
|
2849
|
+
"by_level": by_level,
|
|
2850
|
+
"by_category": {}, # No category column in schema
|
|
2851
|
+
"total": total,
|
|
2852
|
+
}
|
|
2853
|
+
|
|
2854
|
+
def cleanup_logs(
|
|
2855
|
+
self,
|
|
2856
|
+
days: int = 30,
|
|
2857
|
+
dry_run: bool = True,
|
|
2858
|
+
) -> dict:
|
|
2859
|
+
"""Delete logs older than specified days.
|
|
2860
|
+
|
|
2861
|
+
Args:
|
|
2862
|
+
days: Delete logs older than this many days.
|
|
2863
|
+
dry_run: If True, only report what would be deleted.
|
|
2864
|
+
|
|
2865
|
+
Returns:
|
|
2866
|
+
Dict with deleted_count.
|
|
2867
|
+
"""
|
|
2868
|
+
from datetime import datetime, timedelta
|
|
2869
|
+
|
|
2870
|
+
cutoff = (datetime.utcnow() - timedelta(days=days)).isoformat()
|
|
2871
|
+
|
|
2872
|
+
if dry_run:
|
|
2873
|
+
with self._reader() as conn:
|
|
2874
|
+
row = conn.execute(
|
|
2875
|
+
"SELECT COUNT(*) FROM logs WHERE timestamp < ?",
|
|
2876
|
+
(cutoff,),
|
|
2877
|
+
).fetchone()
|
|
2878
|
+
return {"deleted_count": row[0] if row else 0}
|
|
2879
|
+
else:
|
|
2880
|
+
with self._writer() as conn:
|
|
2881
|
+
cursor = conn.execute(
|
|
2882
|
+
"DELETE FROM logs WHERE timestamp < ?",
|
|
2883
|
+
(cutoff,),
|
|
2884
|
+
)
|
|
2885
|
+
return {"deleted_count": cursor.rowcount}
|
|
2585
2886
|
|
|
2586
2887
|
# ========== Checkpoints ==========
|
|
2587
2888
|
|
|
@@ -4823,6 +5124,52 @@ class ProjectDatabase:
|
|
|
4823
5124
|
|
|
4824
5125
|
return True
|
|
4825
5126
|
|
|
5127
|
+
def reopen_workflow_step_atomic(
|
|
5128
|
+
self,
|
|
5129
|
+
workflow_id: str,
|
|
5130
|
+
step_id: int,
|
|
5131
|
+
step_number: int,
|
|
5132
|
+
) -> bool:
|
|
5133
|
+
"""Atomically reopen a completed/skipped step.
|
|
5134
|
+
|
|
5135
|
+
Sets the target step back to 'active', resets all later steps
|
|
5136
|
+
to 'pending', and moves workflow.current_step back.
|
|
5137
|
+
|
|
5138
|
+
Args:
|
|
5139
|
+
workflow_id: The workflow ID.
|
|
5140
|
+
step_id: The ID of the step to reopen.
|
|
5141
|
+
step_number: The step_number of the step to reopen.
|
|
5142
|
+
|
|
5143
|
+
Returns:
|
|
5144
|
+
True if reopen succeeded, False otherwise.
|
|
5145
|
+
"""
|
|
5146
|
+
with self._writer() as conn:
|
|
5147
|
+
now = datetime.utcnow().isoformat()
|
|
5148
|
+
|
|
5149
|
+
# Set target step back to active, clear completed_at
|
|
5150
|
+
conn.execute(
|
|
5151
|
+
"UPDATE workflow_steps SET status = 'active', completed_at = NULL, updated_at = ? "
|
|
5152
|
+
"WHERE id = ?",
|
|
5153
|
+
(now, step_id),
|
|
5154
|
+
)
|
|
5155
|
+
|
|
5156
|
+
# Reset all later steps to pending, clear timestamps
|
|
5157
|
+
conn.execute(
|
|
5158
|
+
"UPDATE workflow_steps SET status = 'pending', started_at = NULL, "
|
|
5159
|
+
"completed_at = NULL, updated_at = ? "
|
|
5160
|
+
"WHERE workflow_id = ? AND step_number > ? AND archived_at IS NULL",
|
|
5161
|
+
(now, workflow_id, step_number),
|
|
5162
|
+
)
|
|
5163
|
+
|
|
5164
|
+
# Move workflow.current_step back and ensure workflow is active
|
|
5165
|
+
conn.execute(
|
|
5166
|
+
"UPDATE workflows SET current_step = ?, status = 'active', updated_at = ? "
|
|
5167
|
+
"WHERE id = ?",
|
|
5168
|
+
(step_number, now, workflow_id),
|
|
5169
|
+
)
|
|
5170
|
+
|
|
5171
|
+
return True
|
|
5172
|
+
|
|
4826
5173
|
# ========== Planning Sessions ==========
|
|
4827
5174
|
|
|
4828
5175
|
def create_planning_session(
|
|
@@ -4833,6 +5180,11 @@ class ProjectDatabase:
|
|
|
4833
5180
|
messages: Optional[list] = None,
|
|
4834
5181
|
artifacts: Optional[dict] = None,
|
|
4835
5182
|
status: str = "active",
|
|
5183
|
+
# New iteration-based fields
|
|
5184
|
+
prompt: Optional[str] = None,
|
|
5185
|
+
iterations_requested: int = 1,
|
|
5186
|
+
run_status: str = "pending",
|
|
5187
|
+
is_legacy: bool = False,
|
|
4836
5188
|
) -> dict:
|
|
4837
5189
|
"""Create a planning session for an interactive step.
|
|
4838
5190
|
|
|
@@ -4843,6 +5195,10 @@ class ProjectDatabase:
|
|
|
4843
5195
|
messages: Initial messages (default: empty list).
|
|
4844
5196
|
artifacts: Optional artifacts dict.
|
|
4845
5197
|
status: Session status (default: 'active').
|
|
5198
|
+
prompt: User's guidance prompt for iteration-based sessions.
|
|
5199
|
+
iterations_requested: Number of iterations requested (default: 1).
|
|
5200
|
+
run_status: Execution status for iterations (default: 'pending').
|
|
5201
|
+
is_legacy: Whether this is a legacy chat-based session.
|
|
4846
5202
|
|
|
4847
5203
|
Returns:
|
|
4848
5204
|
The created session dict.
|
|
@@ -4853,9 +5209,24 @@ class ProjectDatabase:
|
|
|
4853
5209
|
artifacts_json = json.dumps(artifacts) if artifacts else None
|
|
4854
5210
|
conn.execute(
|
|
4855
5211
|
"""INSERT INTO planning_sessions
|
|
4856
|
-
(id, workflow_id, step_id, messages, artifacts, status,
|
|
4857
|
-
|
|
4858
|
-
|
|
5212
|
+
(id, workflow_id, step_id, messages, artifacts, status,
|
|
5213
|
+
prompt, iterations_requested, iterations_completed, current_iteration,
|
|
5214
|
+
run_status, is_legacy, created_at, updated_at)
|
|
5215
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, 0, 0, ?, ?, ?, ?)""",
|
|
5216
|
+
(
|
|
5217
|
+
id,
|
|
5218
|
+
workflow_id,
|
|
5219
|
+
step_id,
|
|
5220
|
+
messages_json,
|
|
5221
|
+
artifacts_json,
|
|
5222
|
+
status,
|
|
5223
|
+
prompt,
|
|
5224
|
+
iterations_requested,
|
|
5225
|
+
run_status,
|
|
5226
|
+
is_legacy,
|
|
5227
|
+
now,
|
|
5228
|
+
now,
|
|
5229
|
+
),
|
|
4859
5230
|
)
|
|
4860
5231
|
return self.get_planning_session(id)
|
|
4861
5232
|
|
|
@@ -4876,10 +5247,11 @@ class ProjectDatabase:
|
|
|
4876
5247
|
return None
|
|
4877
5248
|
|
|
4878
5249
|
def get_planning_session_by_step(self, step_id: int) -> Optional[dict]:
|
|
4879
|
-
"""Get planning session
|
|
5250
|
+
"""Get the most recent planning session for a step ID."""
|
|
4880
5251
|
with self._reader() as conn:
|
|
4881
5252
|
cursor = conn.execute(
|
|
4882
|
-
"SELECT * FROM planning_sessions WHERE step_id = ?",
|
|
5253
|
+
"SELECT * FROM planning_sessions WHERE step_id = ? ORDER BY created_at DESC LIMIT 1",
|
|
5254
|
+
(step_id,),
|
|
4883
5255
|
)
|
|
4884
5256
|
row = cursor.fetchone()
|
|
4885
5257
|
if row:
|
|
@@ -5000,6 +5372,11 @@ class ProjectDatabase:
|
|
|
5000
5372
|
id: str,
|
|
5001
5373
|
status: Optional[str] = None,
|
|
5002
5374
|
artifacts: Optional[dict] = None,
|
|
5375
|
+
# New iteration-based fields
|
|
5376
|
+
run_status: Optional[str] = None,
|
|
5377
|
+
current_iteration: Optional[int] = None,
|
|
5378
|
+
iterations_completed: Optional[int] = None,
|
|
5379
|
+
error_message: Optional[str] = None,
|
|
5003
5380
|
) -> bool:
|
|
5004
5381
|
"""Update planning session fields."""
|
|
5005
5382
|
updates = []
|
|
@@ -5011,6 +5388,18 @@ class ProjectDatabase:
|
|
|
5011
5388
|
if artifacts is not None:
|
|
5012
5389
|
updates.append("artifacts = ?")
|
|
5013
5390
|
params.append(json.dumps(artifacts))
|
|
5391
|
+
if run_status is not None:
|
|
5392
|
+
updates.append("run_status = ?")
|
|
5393
|
+
params.append(run_status)
|
|
5394
|
+
if current_iteration is not None:
|
|
5395
|
+
updates.append("current_iteration = ?")
|
|
5396
|
+
params.append(current_iteration)
|
|
5397
|
+
if iterations_completed is not None:
|
|
5398
|
+
updates.append("iterations_completed = ?")
|
|
5399
|
+
params.append(iterations_completed)
|
|
5400
|
+
if error_message is not None:
|
|
5401
|
+
updates.append("error_message = ?")
|
|
5402
|
+
params.append(error_message)
|
|
5014
5403
|
|
|
5015
5404
|
if not updates:
|
|
5016
5405
|
return False
|
|
@@ -5030,7 +5419,309 @@ class ProjectDatabase:
|
|
|
5030
5419
|
self, id: str, artifacts: Optional[dict] = None
|
|
5031
5420
|
) -> bool:
|
|
5032
5421
|
"""Mark a planning session as completed."""
|
|
5033
|
-
return self.update_planning_session(
|
|
5422
|
+
return self.update_planning_session(
|
|
5423
|
+
id, status="completed", run_status="completed", artifacts=artifacts
|
|
5424
|
+
)
|
|
5425
|
+
|
|
5426
|
+
def get_running_planning_session(self, workflow_id: str) -> Optional[dict]:
|
|
5427
|
+
"""Get any currently running planning session for a workflow.
|
|
5428
|
+
|
|
5429
|
+
Used to prevent multiple concurrent sessions.
|
|
5430
|
+
|
|
5431
|
+
Args:
|
|
5432
|
+
workflow_id: The workflow ID to check.
|
|
5433
|
+
|
|
5434
|
+
Returns:
|
|
5435
|
+
The running session dict if found, None otherwise.
|
|
5436
|
+
"""
|
|
5437
|
+
with self._reader() as conn:
|
|
5438
|
+
cursor = conn.execute(
|
|
5439
|
+
"""SELECT * FROM planning_sessions
|
|
5440
|
+
WHERE workflow_id = ? AND run_status = 'running'
|
|
5441
|
+
ORDER BY created_at DESC LIMIT 1""",
|
|
5442
|
+
(workflow_id,),
|
|
5443
|
+
)
|
|
5444
|
+
row = cursor.fetchone()
|
|
5445
|
+
if row:
|
|
5446
|
+
result = dict(row)
|
|
5447
|
+
if result.get("messages"):
|
|
5448
|
+
result["messages"] = json.loads(result["messages"])
|
|
5449
|
+
if result.get("artifacts"):
|
|
5450
|
+
result["artifacts"] = json.loads(result["artifacts"])
|
|
5451
|
+
return result
|
|
5452
|
+
return None
|
|
5453
|
+
|
|
5454
|
+
def cancel_planning_session(self, id: str) -> bool:
|
|
5455
|
+
"""Cancel a running planning session.
|
|
5456
|
+
|
|
5457
|
+
Args:
|
|
5458
|
+
id: The session ID to cancel.
|
|
5459
|
+
|
|
5460
|
+
Returns:
|
|
5461
|
+
True if session was cancelled, False otherwise.
|
|
5462
|
+
"""
|
|
5463
|
+
with self._writer() as conn:
|
|
5464
|
+
cursor = conn.execute(
|
|
5465
|
+
"""UPDATE planning_sessions
|
|
5466
|
+
SET run_status = 'cancelled', updated_at = ?
|
|
5467
|
+
WHERE id = ? AND run_status = 'running'""",
|
|
5468
|
+
(datetime.utcnow().isoformat(), id),
|
|
5469
|
+
)
|
|
5470
|
+
return cursor.rowcount > 0
|
|
5471
|
+
|
|
5472
|
+
# ========== Planning Iterations ==========
|
|
5473
|
+
|
|
5474
|
+
def create_planning_iteration(
|
|
5475
|
+
self,
|
|
5476
|
+
session_id: str,
|
|
5477
|
+
iteration_number: int,
|
|
5478
|
+
status: str = "pending",
|
|
5479
|
+
) -> Optional[dict]:
|
|
5480
|
+
"""Create a new planning iteration record.
|
|
5481
|
+
|
|
5482
|
+
Args:
|
|
5483
|
+
session_id: Parent session ID.
|
|
5484
|
+
iteration_number: The iteration number (1-indexed).
|
|
5485
|
+
status: Initial status (default: 'pending').
|
|
5486
|
+
|
|
5487
|
+
Returns:
|
|
5488
|
+
The created iteration dict.
|
|
5489
|
+
"""
|
|
5490
|
+
with self._writer() as conn:
|
|
5491
|
+
cursor = conn.execute(
|
|
5492
|
+
"""INSERT INTO planning_iterations
|
|
5493
|
+
(session_id, iteration_number, status)
|
|
5494
|
+
VALUES (?, ?, ?)""",
|
|
5495
|
+
(session_id, iteration_number, status),
|
|
5496
|
+
)
|
|
5497
|
+
return self.get_planning_iteration(cursor.lastrowid)
|
|
5498
|
+
|
|
5499
|
+
def get_planning_iteration(self, iteration_id: int) -> Optional[dict]:
|
|
5500
|
+
"""Get a planning iteration by ID."""
|
|
5501
|
+
with self._reader() as conn:
|
|
5502
|
+
cursor = conn.execute(
|
|
5503
|
+
"SELECT * FROM planning_iterations WHERE id = ?", (iteration_id,)
|
|
5504
|
+
)
|
|
5505
|
+
row = cursor.fetchone()
|
|
5506
|
+
if row:
|
|
5507
|
+
result = dict(row)
|
|
5508
|
+
if result.get("tool_calls"):
|
|
5509
|
+
result["tool_calls"] = json.loads(result["tool_calls"])
|
|
5510
|
+
return result
|
|
5511
|
+
return None
|
|
5512
|
+
|
|
5513
|
+
def list_planning_iterations(self, session_id: str) -> list[dict]:
|
|
5514
|
+
"""List all iterations for a planning session.
|
|
5515
|
+
|
|
5516
|
+
Args:
|
|
5517
|
+
session_id: The session ID.
|
|
5518
|
+
|
|
5519
|
+
Returns:
|
|
5520
|
+
List of iteration dicts ordered by iteration_number.
|
|
5521
|
+
"""
|
|
5522
|
+
with self._reader() as conn:
|
|
5523
|
+
cursor = conn.execute(
|
|
5524
|
+
"""SELECT * FROM planning_iterations
|
|
5525
|
+
WHERE session_id = ?
|
|
5526
|
+
ORDER BY iteration_number ASC""",
|
|
5527
|
+
(session_id,),
|
|
5528
|
+
)
|
|
5529
|
+
results = []
|
|
5530
|
+
for row in cursor.fetchall():
|
|
5531
|
+
result = dict(row)
|
|
5532
|
+
if result.get("tool_calls"):
|
|
5533
|
+
result["tool_calls"] = json.loads(result["tool_calls"])
|
|
5534
|
+
results.append(result)
|
|
5535
|
+
return results
|
|
5536
|
+
|
|
5537
|
+
def update_planning_iteration(
|
|
5538
|
+
self,
|
|
5539
|
+
iteration_id: int,
|
|
5540
|
+
status: Optional[str] = None,
|
|
5541
|
+
started_at: Optional[str] = None,
|
|
5542
|
+
completed_at: Optional[str] = None,
|
|
5543
|
+
chars_added: Optional[int] = None,
|
|
5544
|
+
chars_removed: Optional[int] = None,
|
|
5545
|
+
tool_calls: Optional[list] = None,
|
|
5546
|
+
summary: Optional[str] = None,
|
|
5547
|
+
error_message: Optional[str] = None,
|
|
5548
|
+
diff_text: Optional[str] = None,
|
|
5549
|
+
doc_before: Optional[str] = None,
|
|
5550
|
+
doc_after: Optional[str] = None,
|
|
5551
|
+
) -> bool:
|
|
5552
|
+
"""Update a planning iteration.
|
|
5553
|
+
|
|
5554
|
+
Args:
|
|
5555
|
+
iteration_id: The iteration ID.
|
|
5556
|
+
status: New status.
|
|
5557
|
+
started_at: Start timestamp.
|
|
5558
|
+
completed_at: Completion timestamp.
|
|
5559
|
+
chars_added: Characters added to design doc.
|
|
5560
|
+
chars_removed: Characters removed from design doc.
|
|
5561
|
+
tool_calls: List of tool call records.
|
|
5562
|
+
summary: Brief summary of changes.
|
|
5563
|
+
error_message: Error message if failed.
|
|
5564
|
+
diff_text: Unified diff of changes.
|
|
5565
|
+
doc_before: Document content before iteration.
|
|
5566
|
+
doc_after: Document content after iteration.
|
|
5567
|
+
|
|
5568
|
+
Returns:
|
|
5569
|
+
True if updated, False otherwise.
|
|
5570
|
+
"""
|
|
5571
|
+
updates = []
|
|
5572
|
+
params: list[Any] = []
|
|
5573
|
+
|
|
5574
|
+
if status is not None:
|
|
5575
|
+
updates.append("status = ?")
|
|
5576
|
+
params.append(status)
|
|
5577
|
+
if started_at is not None:
|
|
5578
|
+
updates.append("started_at = ?")
|
|
5579
|
+
params.append(started_at)
|
|
5580
|
+
if completed_at is not None:
|
|
5581
|
+
updates.append("completed_at = ?")
|
|
5582
|
+
params.append(completed_at)
|
|
5583
|
+
if chars_added is not None:
|
|
5584
|
+
updates.append("chars_added = ?")
|
|
5585
|
+
params.append(chars_added)
|
|
5586
|
+
if chars_removed is not None:
|
|
5587
|
+
updates.append("chars_removed = ?")
|
|
5588
|
+
params.append(chars_removed)
|
|
5589
|
+
if tool_calls is not None:
|
|
5590
|
+
updates.append("tool_calls = ?")
|
|
5591
|
+
params.append(json.dumps(tool_calls))
|
|
5592
|
+
if summary is not None:
|
|
5593
|
+
updates.append("summary = ?")
|
|
5594
|
+
params.append(summary)
|
|
5595
|
+
if error_message is not None:
|
|
5596
|
+
updates.append("error_message = ?")
|
|
5597
|
+
params.append(error_message)
|
|
5598
|
+
if diff_text is not None:
|
|
5599
|
+
updates.append("diff_text = ?")
|
|
5600
|
+
params.append(diff_text)
|
|
5601
|
+
if doc_before is not None:
|
|
5602
|
+
updates.append("doc_before = ?")
|
|
5603
|
+
params.append(doc_before)
|
|
5604
|
+
if doc_after is not None:
|
|
5605
|
+
updates.append("doc_after = ?")
|
|
5606
|
+
params.append(doc_after)
|
|
5607
|
+
|
|
5608
|
+
if not updates:
|
|
5609
|
+
return False
|
|
5610
|
+
|
|
5611
|
+
params.append(iteration_id)
|
|
5612
|
+
|
|
5613
|
+
with self._writer() as conn:
|
|
5614
|
+
cursor = conn.execute(
|
|
5615
|
+
f"UPDATE planning_iterations SET {', '.join(updates)} WHERE id = ?",
|
|
5616
|
+
params,
|
|
5617
|
+
)
|
|
5618
|
+
return cursor.rowcount > 0
|
|
5619
|
+
|
|
5620
|
+
def start_planning_iteration(self, iteration_id: int) -> bool:
|
|
5621
|
+
"""Mark an iteration as started."""
|
|
5622
|
+
return self.update_planning_iteration(
|
|
5623
|
+
iteration_id,
|
|
5624
|
+
status="running",
|
|
5625
|
+
started_at=datetime.utcnow().isoformat(),
|
|
5626
|
+
)
|
|
5627
|
+
|
|
5628
|
+
def complete_planning_iteration(
|
|
5629
|
+
self,
|
|
5630
|
+
iteration_id: int,
|
|
5631
|
+
chars_added: int = 0,
|
|
5632
|
+
chars_removed: int = 0,
|
|
5633
|
+
tool_calls: Optional[list] = None,
|
|
5634
|
+
summary: Optional[str] = None,
|
|
5635
|
+
diff_text: Optional[str] = None,
|
|
5636
|
+
doc_before: Optional[str] = None,
|
|
5637
|
+
doc_after: Optional[str] = None,
|
|
5638
|
+
) -> bool:
|
|
5639
|
+
"""Mark an iteration as completed with results."""
|
|
5640
|
+
return self.update_planning_iteration(
|
|
5641
|
+
iteration_id,
|
|
5642
|
+
status="completed",
|
|
5643
|
+
completed_at=datetime.utcnow().isoformat(),
|
|
5644
|
+
chars_added=chars_added,
|
|
5645
|
+
chars_removed=chars_removed,
|
|
5646
|
+
tool_calls=tool_calls,
|
|
5647
|
+
summary=summary,
|
|
5648
|
+
diff_text=diff_text,
|
|
5649
|
+
doc_before=doc_before,
|
|
5650
|
+
doc_after=doc_after,
|
|
5651
|
+
)
|
|
5652
|
+
|
|
5653
|
+
def fail_planning_iteration(
|
|
5654
|
+
self, iteration_id: int, error_message: str
|
|
5655
|
+
) -> bool:
|
|
5656
|
+
"""Mark an iteration as failed."""
|
|
5657
|
+
return self.update_planning_iteration(
|
|
5658
|
+
iteration_id,
|
|
5659
|
+
status="failed",
|
|
5660
|
+
completed_at=datetime.utcnow().isoformat(),
|
|
5661
|
+
error_message=error_message,
|
|
5662
|
+
)
|
|
5663
|
+
|
|
5664
|
+
# ========== Planning Iteration Events ==========
|
|
5665
|
+
|
|
5666
|
+
def add_planning_iteration_event(
|
|
5667
|
+
self,
|
|
5668
|
+
session_id: str,
|
|
5669
|
+
event_type: str,
|
|
5670
|
+
iteration_number: Optional[int] = None,
|
|
5671
|
+
content: Optional[str] = None,
|
|
5672
|
+
tool_name: Optional[str] = None,
|
|
5673
|
+
tool_input: Optional[str] = None,
|
|
5674
|
+
tool_result: Optional[str] = None,
|
|
5675
|
+
event_data: Optional[str] = None,
|
|
5676
|
+
) -> int:
|
|
5677
|
+
"""Add a planning iteration event to the persistent log.
|
|
5678
|
+
|
|
5679
|
+
Returns:
|
|
5680
|
+
The event ID.
|
|
5681
|
+
"""
|
|
5682
|
+
with self._writer() as conn:
|
|
5683
|
+
cursor = conn.execute(
|
|
5684
|
+
"""INSERT INTO planning_iteration_events
|
|
5685
|
+
(session_id, iteration_number, event_type, content, tool_name, tool_input, tool_result, event_data)
|
|
5686
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?)""",
|
|
5687
|
+
(session_id, iteration_number, event_type, content, tool_name, tool_input, tool_result, event_data),
|
|
5688
|
+
)
|
|
5689
|
+
return cursor.lastrowid
|
|
5690
|
+
|
|
5691
|
+
def get_planning_iteration_events(
|
|
5692
|
+
self,
|
|
5693
|
+
session_id: str,
|
|
5694
|
+
after_id: int = 0,
|
|
5695
|
+
limit: int = 500,
|
|
5696
|
+
) -> list[dict]:
|
|
5697
|
+
"""Get planning iteration events for a session.
|
|
5698
|
+
|
|
5699
|
+
Args:
|
|
5700
|
+
session_id: The session ID.
|
|
5701
|
+
after_id: Only return events with id > after_id (for pagination/polling).
|
|
5702
|
+
limit: Maximum events to return.
|
|
5703
|
+
|
|
5704
|
+
Returns:
|
|
5705
|
+
List of event dicts ordered by id ASC.
|
|
5706
|
+
"""
|
|
5707
|
+
with self._reader() as conn:
|
|
5708
|
+
cursor = conn.execute(
|
|
5709
|
+
"""SELECT * FROM planning_iteration_events
|
|
5710
|
+
WHERE session_id = ? AND id > ?
|
|
5711
|
+
ORDER BY id ASC
|
|
5712
|
+
LIMIT ?""",
|
|
5713
|
+
(session_id, after_id, limit),
|
|
5714
|
+
)
|
|
5715
|
+
return [dict(row) for row in cursor.fetchall()]
|
|
5716
|
+
|
|
5717
|
+
def get_latest_event_timestamp(self, session_id: str) -> Optional[str]:
|
|
5718
|
+
"""Get the timestamp of the most recent event for a planning session."""
|
|
5719
|
+
with self._reader() as conn:
|
|
5720
|
+
row = conn.execute(
|
|
5721
|
+
"SELECT MAX(timestamp) FROM planning_iteration_events WHERE session_id = ?",
|
|
5722
|
+
(session_id,),
|
|
5723
|
+
).fetchone()
|
|
5724
|
+
return row[0] if row and row[0] else None
|
|
5034
5725
|
|
|
5035
5726
|
# ========== Utilities ==========
|
|
5036
5727
|
|