ralphx 0.3.5__py3-none-any.whl → 0.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ralphx/__init__.py +1 -1
- ralphx/adapters/base.py +18 -2
- ralphx/adapters/claude_cli.py +415 -350
- ralphx/api/routes/auth.py +105 -32
- ralphx/api/routes/items.py +4 -0
- ralphx/api/routes/loops.py +101 -15
- ralphx/api/routes/planning.py +866 -17
- ralphx/api/routes/resources.py +528 -6
- ralphx/api/routes/stream.py +161 -114
- ralphx/api/routes/templates.py +1 -0
- ralphx/api/routes/workflows.py +257 -25
- ralphx/core/auth.py +32 -7
- ralphx/core/checkpoint.py +118 -0
- ralphx/core/executor.py +292 -85
- ralphx/core/loop_templates.py +59 -14
- ralphx/core/planning_iteration_executor.py +633 -0
- ralphx/core/planning_service.py +11 -4
- ralphx/core/project_db.py +835 -85
- ralphx/core/resources.py +28 -2
- ralphx/core/session.py +62 -10
- ralphx/core/templates.py +74 -87
- ralphx/core/workflow_executor.py +35 -3
- ralphx/mcp/tools/diagnostics.py +1 -1
- ralphx/mcp/tools/monitoring.py +10 -16
- ralphx/mcp/tools/workflows.py +5 -5
- ralphx/models/loop.py +1 -1
- ralphx/models/session.py +5 -0
- ralphx/static/assets/index-DnihHetG.js +265 -0
- ralphx/static/assets/index-DnihHetG.js.map +1 -0
- ralphx/static/assets/index-nIDWmtzm.css +1 -0
- ralphx/static/index.html +2 -2
- ralphx/templates/loop_templates/consumer.md +2 -2
- {ralphx-0.3.5.dist-info → ralphx-0.4.1.dist-info}/METADATA +1 -1
- {ralphx-0.3.5.dist-info → ralphx-0.4.1.dist-info}/RECORD +36 -35
- ralphx/static/assets/index-0ovNnfOq.css +0 -1
- ralphx/static/assets/index-CY9s08ZB.js +0 -251
- ralphx/static/assets/index-CY9s08ZB.js.map +0 -1
- {ralphx-0.3.5.dist-info → ralphx-0.4.1.dist-info}/WHEEL +0 -0
- {ralphx-0.3.5.dist-info → ralphx-0.4.1.dist-info}/entry_points.txt +0 -0
ralphx/core/project_db.py
CHANGED
|
@@ -27,7 +27,7 @@ logger = logging.getLogger(__name__)
|
|
|
27
27
|
|
|
28
28
|
|
|
29
29
|
# Schema version for project DB
|
|
30
|
-
PROJECT_SCHEMA_VERSION =
|
|
30
|
+
PROJECT_SCHEMA_VERSION = 21
|
|
31
31
|
|
|
32
32
|
# Project database schema - all project-specific data
|
|
33
33
|
PROJECT_SCHEMA_SQL = """
|
|
@@ -67,7 +67,8 @@ CREATE TABLE IF NOT EXISTS sessions (
|
|
|
67
67
|
started_at TIMESTAMP,
|
|
68
68
|
duration_seconds REAL,
|
|
69
69
|
status TEXT,
|
|
70
|
-
items_added TEXT
|
|
70
|
+
items_added TEXT,
|
|
71
|
+
account_email TEXT
|
|
71
72
|
);
|
|
72
73
|
|
|
73
74
|
-- Session events table (stores parsed events for history and streaming)
|
|
@@ -306,19 +307,65 @@ CREATE TABLE IF NOT EXISTS workflow_steps (
|
|
|
306
307
|
);
|
|
307
308
|
|
|
308
309
|
-- Planning sessions (for interactive steps)
|
|
310
|
+
-- New iteration-based paradigm: users provide a prompt + iteration count, system runs N iterations
|
|
309
311
|
CREATE TABLE IF NOT EXISTS planning_sessions (
|
|
310
312
|
id TEXT PRIMARY KEY,
|
|
311
313
|
workflow_id TEXT NOT NULL,
|
|
312
314
|
step_id INTEGER NOT NULL,
|
|
313
|
-
messages JSON NOT NULL DEFAULT '[]', -- Conversation history
|
|
315
|
+
messages JSON NOT NULL DEFAULT '[]', -- Conversation history (legacy chat-based sessions)
|
|
314
316
|
artifacts JSON, -- Generated design doc, guardrails
|
|
315
317
|
status TEXT DEFAULT 'active', -- active, completed
|
|
318
|
+
-- New iteration-based fields (v17)
|
|
319
|
+
prompt TEXT, -- User's guidance for this session
|
|
320
|
+
iterations_requested INTEGER DEFAULT 1, -- Number of iterations requested
|
|
321
|
+
iterations_completed INTEGER DEFAULT 0, -- Number of iterations completed
|
|
322
|
+
current_iteration INTEGER DEFAULT 0, -- Current iteration number (0 = not started)
|
|
323
|
+
run_status TEXT DEFAULT 'pending', -- pending, running, completed, cancelled, error
|
|
324
|
+
is_legacy BOOLEAN DEFAULT FALSE, -- TRUE for old chat-based sessions
|
|
325
|
+
error_message TEXT, -- Error message if run_status='error'
|
|
316
326
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
317
327
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
318
328
|
FOREIGN KEY (workflow_id) REFERENCES workflows(id) ON DELETE CASCADE,
|
|
319
329
|
FOREIGN KEY (step_id) REFERENCES workflow_steps(id) ON DELETE CASCADE
|
|
320
330
|
);
|
|
321
331
|
|
|
332
|
+
-- Planning iterations (per-iteration tracking for iteration-based sessions)
|
|
333
|
+
CREATE TABLE IF NOT EXISTS planning_iterations (
|
|
334
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
335
|
+
session_id TEXT NOT NULL,
|
|
336
|
+
iteration_number INTEGER NOT NULL,
|
|
337
|
+
started_at TIMESTAMP,
|
|
338
|
+
completed_at TIMESTAMP,
|
|
339
|
+
status TEXT DEFAULT 'pending', -- pending, running, completed, failed, skipped
|
|
340
|
+
chars_added INTEGER DEFAULT 0,
|
|
341
|
+
chars_removed INTEGER DEFAULT 0,
|
|
342
|
+
tool_calls JSON DEFAULT '[]', -- [{tool, input_preview, duration_ms}] - truncated
|
|
343
|
+
summary TEXT, -- Brief summary of what changed
|
|
344
|
+
diff_text TEXT, -- Unified diff of changes
|
|
345
|
+
doc_before TEXT, -- Document content before iteration
|
|
346
|
+
doc_after TEXT, -- Document content after iteration
|
|
347
|
+
error_message TEXT,
|
|
348
|
+
FOREIGN KEY (session_id) REFERENCES planning_sessions(id) ON DELETE CASCADE
|
|
349
|
+
);
|
|
350
|
+
|
|
351
|
+
-- Planning iteration events (persistent event log for streaming/reconnection)
|
|
352
|
+
CREATE TABLE IF NOT EXISTS planning_iteration_events (
|
|
353
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
354
|
+
session_id TEXT NOT NULL,
|
|
355
|
+
iteration_number INTEGER,
|
|
356
|
+
event_type TEXT NOT NULL,
|
|
357
|
+
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
358
|
+
content TEXT,
|
|
359
|
+
tool_name TEXT,
|
|
360
|
+
tool_input TEXT,
|
|
361
|
+
tool_result TEXT,
|
|
362
|
+
event_data TEXT,
|
|
363
|
+
FOREIGN KEY (session_id) REFERENCES planning_sessions(id) ON DELETE CASCADE
|
|
364
|
+
);
|
|
365
|
+
|
|
366
|
+
CREATE INDEX IF NOT EXISTS idx_pie_session ON planning_iteration_events(session_id);
|
|
367
|
+
CREATE INDEX IF NOT EXISTS idx_pie_session_id ON planning_iteration_events(session_id, id);
|
|
368
|
+
|
|
322
369
|
-- Workflow-scoped resources (design docs, guardrails, input files)
|
|
323
370
|
CREATE TABLE IF NOT EXISTS workflow_resources (
|
|
324
371
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
@@ -452,6 +499,8 @@ CREATE INDEX IF NOT EXISTS idx_workflow_steps_workflow ON workflow_steps(workflo
|
|
|
452
499
|
CREATE INDEX IF NOT EXISTS idx_workflow_steps_status ON workflow_steps(status);
|
|
453
500
|
CREATE INDEX IF NOT EXISTS idx_planning_sessions_workflow ON planning_sessions(workflow_id);
|
|
454
501
|
CREATE INDEX IF NOT EXISTS idx_planning_sessions_status ON planning_sessions(status);
|
|
502
|
+
CREATE INDEX IF NOT EXISTS idx_planning_sessions_run_status ON planning_sessions(run_status);
|
|
503
|
+
CREATE INDEX IF NOT EXISTS idx_planning_iterations_session ON planning_iterations(session_id);
|
|
455
504
|
|
|
456
505
|
-- Workflow resources indexes
|
|
457
506
|
CREATE INDEX IF NOT EXISTS idx_workflow_resources_workflow ON workflow_resources(workflow_id, resource_type);
|
|
@@ -567,9 +616,8 @@ class ProjectDatabase:
|
|
|
567
616
|
"Please delete your .ralphx/ralphx.db file and start fresh."
|
|
568
617
|
)
|
|
569
618
|
|
|
570
|
-
#
|
|
619
|
+
# Create schema tables (indexes created after migrations)
|
|
571
620
|
conn.executescript(PROJECT_SCHEMA_SQL)
|
|
572
|
-
conn.executescript(PROJECT_INDEXES_SQL)
|
|
573
621
|
|
|
574
622
|
if current_version == 0:
|
|
575
623
|
# Fresh database
|
|
@@ -583,6 +631,26 @@ class ProjectDatabase:
|
|
|
583
631
|
# Run migrations (for future versions > 6)
|
|
584
632
|
self._run_migrations(conn, current_version)
|
|
585
633
|
|
|
634
|
+
# Self-heal: verify critical columns exist even if migrations
|
|
635
|
+
# partially applied (e.g., version bumped but ALTER TABLE lost)
|
|
636
|
+
self._verify_schema_columns(conn)
|
|
637
|
+
|
|
638
|
+
# Create indexes AFTER migrations so all columns exist
|
|
639
|
+
conn.executescript(PROJECT_INDEXES_SQL)
|
|
640
|
+
|
|
641
|
+
def _verify_schema_columns(self, conn: sqlite3.Connection) -> None:
|
|
642
|
+
"""Verify expected columns exist and repair if missing.
|
|
643
|
+
|
|
644
|
+
Handles edge cases where migrations bumped the schema version
|
|
645
|
+
but the ALTER TABLE didn't persist (e.g., due to executescript()
|
|
646
|
+
transaction semantics).
|
|
647
|
+
"""
|
|
648
|
+
cursor = conn.execute("PRAGMA table_info(sessions)")
|
|
649
|
+
columns = {row[1] for row in cursor.fetchall()}
|
|
650
|
+
if "account_email" not in columns:
|
|
651
|
+
conn.execute("ALTER TABLE sessions ADD COLUMN account_email TEXT")
|
|
652
|
+
logger.warning("Repaired missing account_email column in sessions table")
|
|
653
|
+
|
|
586
654
|
def _backup_before_migration(self, from_version: int) -> None:
|
|
587
655
|
"""Create a backup of the database before running migrations.
|
|
588
656
|
|
|
@@ -663,6 +731,31 @@ class ProjectDatabase:
|
|
|
663
731
|
# Migration from v15 to v16: Remove namespace from workflows table
|
|
664
732
|
if from_version == 15:
|
|
665
733
|
self._migrate_v15_to_v16(conn)
|
|
734
|
+
from_version = 16 # Continue to next migration
|
|
735
|
+
|
|
736
|
+
# Migration from v16 to v17: Add iteration-based planning fields
|
|
737
|
+
if from_version == 16:
|
|
738
|
+
self._migrate_v16_to_v17(conn)
|
|
739
|
+
from_version = 17 # Continue to next migration
|
|
740
|
+
|
|
741
|
+
# Migration from v17 to v18: Add planning_iteration_events table
|
|
742
|
+
if from_version == 17:
|
|
743
|
+
self._migrate_v17_to_v18(conn)
|
|
744
|
+
from_version = 18 # Continue to next migration
|
|
745
|
+
|
|
746
|
+
# Migration from v18 to v19: Add diff_text column to planning_iterations
|
|
747
|
+
if from_version == 18:
|
|
748
|
+
self._migrate_v18_to_v19(conn)
|
|
749
|
+
from_version = 19 # Continue to next migration
|
|
750
|
+
|
|
751
|
+
# Migration from v19 to v20: Add doc_before/doc_after columns
|
|
752
|
+
if from_version == 19:
|
|
753
|
+
self._migrate_v19_to_v20(conn)
|
|
754
|
+
from_version = 20 # Continue to next migration
|
|
755
|
+
|
|
756
|
+
# Migration from v20 to v21: Add account_email to sessions
|
|
757
|
+
if from_version == 20:
|
|
758
|
+
self._migrate_v20_to_v21(conn)
|
|
666
759
|
|
|
667
760
|
# Seed workflow templates for fresh databases
|
|
668
761
|
self._seed_workflow_templates(conn)
|
|
@@ -978,41 +1071,169 @@ class ProjectDatabase:
|
|
|
978
1071
|
otherwise the ON DELETE CASCADE on workflow_steps will delete all steps!
|
|
979
1072
|
"""
|
|
980
1073
|
# 0. Disable foreign keys to prevent CASCADE deletes during table swap
|
|
1074
|
+
# IMPORTANT: PRAGMA foreign_keys is NOT transactional, so we must
|
|
1075
|
+
# re-enable in a finally block to prevent silent FK violations
|
|
981
1076
|
conn.execute("PRAGMA foreign_keys=OFF")
|
|
1077
|
+
try:
|
|
1078
|
+
# 1. Create new table without namespace
|
|
1079
|
+
conn.execute("""
|
|
1080
|
+
CREATE TABLE workflows_new (
|
|
1081
|
+
id TEXT PRIMARY KEY,
|
|
1082
|
+
template_id TEXT,
|
|
1083
|
+
name TEXT NOT NULL,
|
|
1084
|
+
status TEXT DEFAULT 'draft',
|
|
1085
|
+
current_step INTEGER DEFAULT 1,
|
|
1086
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
1087
|
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
1088
|
+
archived_at TIMESTAMP
|
|
1089
|
+
)
|
|
1090
|
+
""")
|
|
1091
|
+
|
|
1092
|
+
# 2. Copy data (excluding namespace)
|
|
1093
|
+
conn.execute("""
|
|
1094
|
+
INSERT INTO workflows_new (id, template_id, name, status, current_step, created_at, updated_at, archived_at)
|
|
1095
|
+
SELECT id, template_id, name, status, current_step, created_at, updated_at, archived_at
|
|
1096
|
+
FROM workflows
|
|
1097
|
+
""")
|
|
1098
|
+
|
|
1099
|
+
# 3. Drop old table and index
|
|
1100
|
+
conn.execute("DROP INDEX IF EXISTS idx_workflows_namespace")
|
|
1101
|
+
conn.execute("DROP TABLE workflows")
|
|
1102
|
+
|
|
1103
|
+
# 4. Rename new table
|
|
1104
|
+
conn.execute("ALTER TABLE workflows_new RENAME TO workflows")
|
|
1105
|
+
|
|
1106
|
+
# 5. Recreate the status index on the new table
|
|
1107
|
+
conn.execute("CREATE INDEX IF NOT EXISTS idx_workflows_status ON workflows(status)")
|
|
1108
|
+
finally:
|
|
1109
|
+
# 6. Re-enable foreign keys (must happen even on failure)
|
|
1110
|
+
conn.execute("PRAGMA foreign_keys=ON")
|
|
1111
|
+
|
|
1112
|
+
def _migrate_v16_to_v17(self, conn: sqlite3.Connection) -> None:
|
|
1113
|
+
"""Migrate from schema v16 to v17.
|
|
1114
|
+
|
|
1115
|
+
Adds:
|
|
1116
|
+
- New columns to planning_sessions for iteration-based paradigm
|
|
1117
|
+
- New planning_iterations table for per-iteration tracking
|
|
1118
|
+
- Marks existing chat-based sessions as legacy
|
|
1119
|
+
"""
|
|
1120
|
+
# 1. Add new columns to planning_sessions
|
|
1121
|
+
conn.execute("ALTER TABLE planning_sessions ADD COLUMN prompt TEXT")
|
|
1122
|
+
conn.execute(
|
|
1123
|
+
"ALTER TABLE planning_sessions ADD COLUMN iterations_requested INTEGER DEFAULT 1"
|
|
1124
|
+
)
|
|
1125
|
+
conn.execute(
|
|
1126
|
+
"ALTER TABLE planning_sessions ADD COLUMN iterations_completed INTEGER DEFAULT 0"
|
|
1127
|
+
)
|
|
1128
|
+
conn.execute(
|
|
1129
|
+
"ALTER TABLE planning_sessions ADD COLUMN current_iteration INTEGER DEFAULT 0"
|
|
1130
|
+
)
|
|
1131
|
+
conn.execute(
|
|
1132
|
+
"ALTER TABLE planning_sessions ADD COLUMN run_status TEXT DEFAULT 'pending'"
|
|
1133
|
+
)
|
|
1134
|
+
conn.execute(
|
|
1135
|
+
"ALTER TABLE planning_sessions ADD COLUMN is_legacy BOOLEAN DEFAULT FALSE"
|
|
1136
|
+
)
|
|
1137
|
+
conn.execute("ALTER TABLE planning_sessions ADD COLUMN error_message TEXT")
|
|
982
1138
|
|
|
983
|
-
#
|
|
1139
|
+
# 2. Mark existing sessions with messages as legacy
|
|
984
1140
|
conn.execute("""
|
|
985
|
-
|
|
986
|
-
|
|
987
|
-
|
|
988
|
-
|
|
989
|
-
|
|
990
|
-
|
|
991
|
-
|
|
992
|
-
|
|
993
|
-
|
|
1141
|
+
UPDATE planning_sessions
|
|
1142
|
+
SET is_legacy = TRUE, run_status = 'completed'
|
|
1143
|
+
WHERE json_array_length(messages) > 0 AND prompt IS NULL
|
|
1144
|
+
""")
|
|
1145
|
+
|
|
1146
|
+
# 3. Create planning_iterations table
|
|
1147
|
+
conn.execute("""
|
|
1148
|
+
CREATE TABLE IF NOT EXISTS planning_iterations (
|
|
1149
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
1150
|
+
session_id TEXT NOT NULL,
|
|
1151
|
+
iteration_number INTEGER NOT NULL,
|
|
1152
|
+
started_at TIMESTAMP,
|
|
1153
|
+
completed_at TIMESTAMP,
|
|
1154
|
+
status TEXT DEFAULT 'pending',
|
|
1155
|
+
chars_added INTEGER DEFAULT 0,
|
|
1156
|
+
chars_removed INTEGER DEFAULT 0,
|
|
1157
|
+
tool_calls JSON DEFAULT '[]',
|
|
1158
|
+
summary TEXT,
|
|
1159
|
+
error_message TEXT,
|
|
1160
|
+
FOREIGN KEY (session_id) REFERENCES planning_sessions(id) ON DELETE CASCADE
|
|
994
1161
|
)
|
|
995
1162
|
""")
|
|
996
1163
|
|
|
997
|
-
#
|
|
1164
|
+
# 4. Add indexes
|
|
1165
|
+
conn.execute(
|
|
1166
|
+
"CREATE INDEX IF NOT EXISTS idx_planning_sessions_run_status ON planning_sessions(run_status)"
|
|
1167
|
+
)
|
|
1168
|
+
conn.execute(
|
|
1169
|
+
"CREATE INDEX IF NOT EXISTS idx_planning_iterations_session ON planning_iterations(session_id)"
|
|
1170
|
+
)
|
|
1171
|
+
|
|
1172
|
+
def _migrate_v17_to_v18(self, conn: sqlite3.Connection) -> None:
|
|
1173
|
+
"""Migrate from schema v17 to v18.
|
|
1174
|
+
|
|
1175
|
+
Adds:
|
|
1176
|
+
- planning_iteration_events table for persistent event streaming/reconnection
|
|
1177
|
+
"""
|
|
998
1178
|
conn.execute("""
|
|
999
|
-
|
|
1000
|
-
|
|
1001
|
-
|
|
1179
|
+
CREATE TABLE IF NOT EXISTS planning_iteration_events (
|
|
1180
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
1181
|
+
session_id TEXT NOT NULL,
|
|
1182
|
+
iteration_number INTEGER,
|
|
1183
|
+
event_type TEXT NOT NULL,
|
|
1184
|
+
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
1185
|
+
content TEXT,
|
|
1186
|
+
tool_name TEXT,
|
|
1187
|
+
tool_input TEXT,
|
|
1188
|
+
tool_result TEXT,
|
|
1189
|
+
event_data TEXT,
|
|
1190
|
+
FOREIGN KEY (session_id) REFERENCES planning_sessions(id) ON DELETE CASCADE
|
|
1191
|
+
)
|
|
1002
1192
|
""")
|
|
1193
|
+
conn.execute(
|
|
1194
|
+
"CREATE INDEX IF NOT EXISTS idx_pie_session ON planning_iteration_events(session_id)"
|
|
1195
|
+
)
|
|
1196
|
+
conn.execute(
|
|
1197
|
+
"CREATE INDEX IF NOT EXISTS idx_pie_session_id ON planning_iteration_events(session_id, id)"
|
|
1198
|
+
)
|
|
1003
1199
|
|
|
1004
|
-
|
|
1005
|
-
|
|
1006
|
-
conn.execute("DROP TABLE workflows")
|
|
1200
|
+
def _migrate_v18_to_v19(self, conn: sqlite3.Connection) -> None:
|
|
1201
|
+
"""Migrate from schema v18 to v19.
|
|
1007
1202
|
|
|
1008
|
-
|
|
1009
|
-
|
|
1203
|
+
Adds:
|
|
1204
|
+
- diff_text column to planning_iterations for storing unified diffs
|
|
1205
|
+
"""
|
|
1206
|
+
conn.execute(
|
|
1207
|
+
"ALTER TABLE planning_iterations ADD COLUMN diff_text TEXT"
|
|
1208
|
+
)
|
|
1010
1209
|
|
|
1011
|
-
|
|
1012
|
-
|
|
1210
|
+
def _migrate_v19_to_v20(self, conn: sqlite3.Connection) -> None:
|
|
1211
|
+
"""Migrate from schema v19 to v20.
|
|
1013
1212
|
|
|
1014
|
-
|
|
1015
|
-
|
|
1213
|
+
Adds:
|
|
1214
|
+
- doc_before column to planning_iterations for pre-iteration doc snapshot
|
|
1215
|
+
- doc_after column to planning_iterations for post-iteration doc snapshot
|
|
1216
|
+
"""
|
|
1217
|
+
conn.execute(
|
|
1218
|
+
"ALTER TABLE planning_iterations ADD COLUMN doc_before TEXT"
|
|
1219
|
+
)
|
|
1220
|
+
conn.execute(
|
|
1221
|
+
"ALTER TABLE planning_iterations ADD COLUMN doc_after TEXT"
|
|
1222
|
+
)
|
|
1223
|
+
|
|
1224
|
+
def _migrate_v20_to_v21(self, conn: sqlite3.Connection) -> None:
|
|
1225
|
+
"""Migrate from schema v20 to v21.
|
|
1226
|
+
|
|
1227
|
+
Adds:
|
|
1228
|
+
- account_email column to sessions for tracking which Claude account was used
|
|
1229
|
+
"""
|
|
1230
|
+
# Idempotent: check if column already exists before adding
|
|
1231
|
+
cursor = conn.execute("PRAGMA table_info(sessions)")
|
|
1232
|
+
columns = {row[1] for row in cursor.fetchall()}
|
|
1233
|
+
if "account_email" not in columns:
|
|
1234
|
+
conn.execute(
|
|
1235
|
+
"ALTER TABLE sessions ADD COLUMN account_email TEXT"
|
|
1236
|
+
)
|
|
1016
1237
|
|
|
1017
1238
|
# ========== Loops ==========
|
|
1018
1239
|
|
|
@@ -1222,6 +1443,7 @@ class ProjectDatabase:
|
|
|
1222
1443
|
iteration: int,
|
|
1223
1444
|
mode: Optional[str] = None,
|
|
1224
1445
|
status: str = "running",
|
|
1446
|
+
account_email: Optional[str] = None,
|
|
1225
1447
|
) -> dict:
|
|
1226
1448
|
"""Create a new session.
|
|
1227
1449
|
|
|
@@ -1231,15 +1453,16 @@ class ProjectDatabase:
|
|
|
1231
1453
|
iteration: Iteration number.
|
|
1232
1454
|
mode: Mode name for this session.
|
|
1233
1455
|
status: Session status (running, completed, error).
|
|
1456
|
+
account_email: Email of the Claude account used for this session.
|
|
1234
1457
|
"""
|
|
1235
1458
|
with self._writer() as conn:
|
|
1236
1459
|
now = datetime.utcnow().isoformat()
|
|
1237
1460
|
conn.execute(
|
|
1238
1461
|
"""
|
|
1239
|
-
INSERT INTO sessions (session_id, run_id, iteration, mode, started_at, status)
|
|
1240
|
-
VALUES (?, ?, ?, ?, ?, ?)
|
|
1462
|
+
INSERT OR IGNORE INTO sessions (session_id, run_id, iteration, mode, started_at, status, account_email)
|
|
1463
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
1241
1464
|
""",
|
|
1242
|
-
(session_id, run_id, iteration, mode, now, status),
|
|
1465
|
+
(session_id, run_id, iteration, mode, now, status, account_email),
|
|
1243
1466
|
)
|
|
1244
1467
|
return self.get_session(session_id)
|
|
1245
1468
|
|
|
@@ -1273,27 +1496,33 @@ class ProjectDatabase:
|
|
|
1273
1496
|
def list_sessions(
|
|
1274
1497
|
self,
|
|
1275
1498
|
run_id: Optional[str] = None,
|
|
1499
|
+
status: Optional[str] = None,
|
|
1276
1500
|
limit: int = 100,
|
|
1277
1501
|
) -> list[dict]:
|
|
1278
1502
|
"""List sessions with optional filters."""
|
|
1279
1503
|
with self._reader() as conn:
|
|
1504
|
+
conditions = ["1=1"]
|
|
1505
|
+
params: list[Any] = []
|
|
1506
|
+
|
|
1280
1507
|
if run_id:
|
|
1281
|
-
|
|
1282
|
-
|
|
1283
|
-
|
|
1284
|
-
|
|
1285
|
-
|
|
1286
|
-
|
|
1287
|
-
|
|
1288
|
-
|
|
1289
|
-
|
|
1290
|
-
|
|
1291
|
-
|
|
1292
|
-
|
|
1508
|
+
conditions.append("run_id = ?")
|
|
1509
|
+
params.append(run_id)
|
|
1510
|
+
if status:
|
|
1511
|
+
conditions.append("status = ?")
|
|
1512
|
+
params.append(status)
|
|
1513
|
+
|
|
1514
|
+
cursor = conn.execute(
|
|
1515
|
+
f"""
|
|
1516
|
+
SELECT * FROM sessions
|
|
1517
|
+
WHERE {' AND '.join(conditions)}
|
|
1518
|
+
ORDER BY started_at DESC LIMIT ?
|
|
1519
|
+
""",
|
|
1520
|
+
params + [limit],
|
|
1521
|
+
)
|
|
1293
1522
|
return [dict(row) for row in cursor.fetchall()]
|
|
1294
1523
|
|
|
1295
1524
|
_SESSION_UPDATE_COLS = frozenset({
|
|
1296
|
-
"duration_seconds", "status", "items_added"
|
|
1525
|
+
"duration_seconds", "status", "items_added", "account_email"
|
|
1297
1526
|
})
|
|
1298
1527
|
|
|
1299
1528
|
def update_session(self, session_id: str, **kwargs) -> bool:
|
|
@@ -1371,6 +1600,7 @@ class ProjectDatabase:
|
|
|
1371
1600
|
self,
|
|
1372
1601
|
session_id: str,
|
|
1373
1602
|
after_id: Optional[int] = None,
|
|
1603
|
+
event_type: Optional[str] = None,
|
|
1374
1604
|
limit: int = 500,
|
|
1375
1605
|
) -> list[dict]:
|
|
1376
1606
|
"""Get events for a session.
|
|
@@ -1378,30 +1608,31 @@ class ProjectDatabase:
|
|
|
1378
1608
|
Args:
|
|
1379
1609
|
session_id: Session UUID.
|
|
1380
1610
|
after_id: Only return events with ID greater than this (for polling).
|
|
1611
|
+
event_type: Filter by event type (text, tool_call, tool_result, error).
|
|
1381
1612
|
limit: Maximum number of events to return.
|
|
1382
1613
|
|
|
1383
1614
|
Returns:
|
|
1384
1615
|
List of event dicts.
|
|
1385
1616
|
"""
|
|
1386
1617
|
with self._reader() as conn:
|
|
1618
|
+
conditions = ["session_id = ?"]
|
|
1619
|
+
params: list[Any] = [session_id]
|
|
1620
|
+
|
|
1387
1621
|
if after_id:
|
|
1388
|
-
|
|
1389
|
-
|
|
1390
|
-
|
|
1391
|
-
|
|
1392
|
-
|
|
1393
|
-
|
|
1394
|
-
|
|
1395
|
-
|
|
1396
|
-
|
|
1397
|
-
|
|
1398
|
-
|
|
1399
|
-
|
|
1400
|
-
|
|
1401
|
-
|
|
1402
|
-
""",
|
|
1403
|
-
(session_id, limit),
|
|
1404
|
-
)
|
|
1622
|
+
conditions.append("id > ?")
|
|
1623
|
+
params.append(after_id)
|
|
1624
|
+
if event_type:
|
|
1625
|
+
conditions.append("event_type = ?")
|
|
1626
|
+
params.append(event_type)
|
|
1627
|
+
|
|
1628
|
+
cursor = conn.execute(
|
|
1629
|
+
f"""
|
|
1630
|
+
SELECT * FROM session_events
|
|
1631
|
+
WHERE {' AND '.join(conditions)}
|
|
1632
|
+
ORDER BY id ASC LIMIT ?
|
|
1633
|
+
""",
|
|
1634
|
+
params + [limit],
|
|
1635
|
+
)
|
|
1405
1636
|
|
|
1406
1637
|
events = []
|
|
1407
1638
|
for row in cursor.fetchall():
|
|
@@ -1513,6 +1744,11 @@ class ProjectDatabase:
|
|
|
1513
1744
|
return item
|
|
1514
1745
|
return None
|
|
1515
1746
|
|
|
1747
|
+
# Whitelist of columns allowed for sorting (prevents SQL injection)
|
|
1748
|
+
_WORK_ITEM_SORT_COLUMNS = frozenset({
|
|
1749
|
+
"created_at", "updated_at", "priority", "title", "status", "category", "id"
|
|
1750
|
+
})
|
|
1751
|
+
|
|
1516
1752
|
def list_work_items(
|
|
1517
1753
|
self,
|
|
1518
1754
|
status: Optional[str] = None,
|
|
@@ -1523,6 +1759,8 @@ class ProjectDatabase:
|
|
|
1523
1759
|
unclaimed_only: bool = False,
|
|
1524
1760
|
limit: int = 100,
|
|
1525
1761
|
offset: int = 0,
|
|
1762
|
+
sort_by: str = "priority",
|
|
1763
|
+
sort_order: str = "desc",
|
|
1526
1764
|
) -> tuple[list[dict], int]:
|
|
1527
1765
|
"""List work items with optional filters.
|
|
1528
1766
|
|
|
@@ -1535,6 +1773,8 @@ class ProjectDatabase:
|
|
|
1535
1773
|
unclaimed_only: If True, only return items not claimed by any loop.
|
|
1536
1774
|
limit: Maximum items to return.
|
|
1537
1775
|
offset: Pagination offset.
|
|
1776
|
+
sort_by: Column to sort by (default: priority).
|
|
1777
|
+
sort_order: Sort order, "asc" or "desc" (default: desc).
|
|
1538
1778
|
|
|
1539
1779
|
Returns:
|
|
1540
1780
|
Tuple of (items list, total count).
|
|
@@ -1571,10 +1811,21 @@ class ProjectDatabase:
|
|
|
1571
1811
|
)
|
|
1572
1812
|
total = cursor.fetchone()[0]
|
|
1573
1813
|
|
|
1814
|
+
# Validate and build ORDER BY clause
|
|
1815
|
+
if sort_by not in self._WORK_ITEM_SORT_COLUMNS:
|
|
1816
|
+
sort_by = "priority"
|
|
1817
|
+
order_dir = "ASC" if (sort_order or "desc").lower() == "asc" else "DESC"
|
|
1818
|
+
|
|
1819
|
+
# Build ORDER BY: primary sort + secondary sort by priority
|
|
1820
|
+
if sort_by == "priority":
|
|
1821
|
+
order_clause = f"ORDER BY priority {order_dir} NULLS LAST"
|
|
1822
|
+
else:
|
|
1823
|
+
order_clause = f"ORDER BY {sort_by} {order_dir}, priority ASC NULLS LAST"
|
|
1824
|
+
|
|
1574
1825
|
# Get items
|
|
1575
1826
|
query = f"""
|
|
1576
1827
|
SELECT * FROM work_items WHERE {where_clause}
|
|
1577
|
-
|
|
1828
|
+
{order_clause}
|
|
1578
1829
|
LIMIT ? OFFSET ?
|
|
1579
1830
|
"""
|
|
1580
1831
|
cursor = conn.execute(query, params + [limit, offset])
|
|
@@ -1693,7 +1944,12 @@ class ProjectDatabase:
|
|
|
1693
1944
|
return cursor.rowcount > 0
|
|
1694
1945
|
|
|
1695
1946
|
def release_work_item(self, id: str) -> bool:
|
|
1696
|
-
"""Release a claimed work item back to
|
|
1947
|
+
"""Release a claimed work item back to 'completed' state.
|
|
1948
|
+
|
|
1949
|
+
Items generated by producer loops have status 'completed' before being
|
|
1950
|
+
claimed. We restore to 'completed' (not 'pending') so that consumer
|
|
1951
|
+
loops, which query for status='completed', can find and retry them.
|
|
1952
|
+
"""
|
|
1697
1953
|
with self._writer() as conn:
|
|
1698
1954
|
now = datetime.utcnow().isoformat()
|
|
1699
1955
|
cursor = conn.execute(
|
|
@@ -1701,7 +1957,7 @@ class ProjectDatabase:
|
|
|
1701
1957
|
UPDATE work_items
|
|
1702
1958
|
SET claimed_by = NULL,
|
|
1703
1959
|
claimed_at = NULL,
|
|
1704
|
-
status = '
|
|
1960
|
+
status = 'completed',
|
|
1705
1961
|
updated_at = ?
|
|
1706
1962
|
WHERE id = ? AND status = 'claimed'
|
|
1707
1963
|
""",
|
|
@@ -1831,6 +2087,9 @@ class ProjectDatabase:
|
|
|
1831
2087
|
def release_stale_claims(self, max_age_minutes: int = 30) -> int:
|
|
1832
2088
|
"""Release claims that have been held too long (likely crashed consumer).
|
|
1833
2089
|
|
|
2090
|
+
Released items are restored to 'completed' status so consumer loops
|
|
2091
|
+
(which query for status='completed') can find and retry them.
|
|
2092
|
+
|
|
1834
2093
|
Args:
|
|
1835
2094
|
max_age_minutes: Claims older than this are released.
|
|
1836
2095
|
|
|
@@ -1848,7 +2107,7 @@ class ProjectDatabase:
|
|
|
1848
2107
|
UPDATE work_items
|
|
1849
2108
|
SET claimed_by = NULL,
|
|
1850
2109
|
claimed_at = NULL,
|
|
1851
|
-
status = '
|
|
2110
|
+
status = 'completed',
|
|
1852
2111
|
updated_at = ?
|
|
1853
2112
|
WHERE claimed_at < ?
|
|
1854
2113
|
AND claimed_by IS NOT NULL
|
|
@@ -1862,8 +2121,8 @@ class ProjectDatabase:
|
|
|
1862
2121
|
"""Release all claims held by a specific loop.
|
|
1863
2122
|
|
|
1864
2123
|
Used when deleting a loop to prevent orphaned claims.
|
|
1865
|
-
Released items are restored to '
|
|
1866
|
-
picked up by other loops.
|
|
2124
|
+
Released items are restored to 'completed' status so they can be
|
|
2125
|
+
picked up by other consumer loops.
|
|
1867
2126
|
|
|
1868
2127
|
Args:
|
|
1869
2128
|
loop_name: Name of the loop whose claims should be released.
|
|
@@ -1879,7 +2138,7 @@ class ProjectDatabase:
|
|
|
1879
2138
|
UPDATE work_items
|
|
1880
2139
|
SET claimed_by = NULL,
|
|
1881
2140
|
claimed_at = NULL,
|
|
1882
|
-
status = '
|
|
2141
|
+
status = 'completed',
|
|
1883
2142
|
updated_at = ?
|
|
1884
2143
|
WHERE claimed_by = ? AND status = 'claimed'
|
|
1885
2144
|
""",
|
|
@@ -1893,8 +2152,8 @@ class ProjectDatabase:
|
|
|
1893
2152
|
This is an atomic operation that checks ownership and releases in one step
|
|
1894
2153
|
to prevent TOCTOU race conditions.
|
|
1895
2154
|
|
|
1896
|
-
Released items are restored to '
|
|
1897
|
-
|
|
2155
|
+
Released items are restored to 'completed' status so consumer loops
|
|
2156
|
+
(which query for status='completed') can find and retry them.
|
|
1898
2157
|
|
|
1899
2158
|
Args:
|
|
1900
2159
|
id: Work item ID.
|
|
@@ -1910,7 +2169,7 @@ class ProjectDatabase:
|
|
|
1910
2169
|
UPDATE work_items
|
|
1911
2170
|
SET claimed_by = NULL,
|
|
1912
2171
|
claimed_at = NULL,
|
|
1913
|
-
status = '
|
|
2172
|
+
status = 'completed',
|
|
1914
2173
|
updated_at = ?
|
|
1915
2174
|
WHERE id = ? AND claimed_by = ? AND status = 'claimed'
|
|
1916
2175
|
""",
|
|
@@ -2550,26 +2809,50 @@ class ProjectDatabase:
|
|
|
2550
2809
|
self,
|
|
2551
2810
|
run_id: Optional[str] = None,
|
|
2552
2811
|
level: Optional[str] = None,
|
|
2812
|
+
session_id: Optional[str] = None,
|
|
2813
|
+
search: Optional[str] = None,
|
|
2553
2814
|
limit: int = 100,
|
|
2554
2815
|
offset: int = 0,
|
|
2555
|
-
) -> list[dict]:
|
|
2556
|
-
"""Get logs with optional filters.
|
|
2816
|
+
) -> tuple[list[dict], int]:
|
|
2817
|
+
"""Get logs with optional filters.
|
|
2818
|
+
|
|
2819
|
+
Returns:
|
|
2820
|
+
Tuple of (logs list, total count).
|
|
2821
|
+
"""
|
|
2557
2822
|
with self._reader() as conn:
|
|
2558
2823
|
conditions = ["1=1"]
|
|
2559
2824
|
params: list[Any] = []
|
|
2560
2825
|
|
|
2561
2826
|
if run_id:
|
|
2562
|
-
conditions.append("run_id = ?")
|
|
2827
|
+
conditions.append("l.run_id = ?")
|
|
2563
2828
|
params.append(run_id)
|
|
2564
2829
|
if level:
|
|
2565
|
-
conditions.append("level = ?")
|
|
2830
|
+
conditions.append("l.level = ?")
|
|
2566
2831
|
params.append(level)
|
|
2832
|
+
if session_id:
|
|
2833
|
+
# Filter logs by session: join through runs → sessions
|
|
2834
|
+
conditions.append(
|
|
2835
|
+
"l.run_id IN (SELECT run_id FROM sessions WHERE session_id = ?)"
|
|
2836
|
+
)
|
|
2837
|
+
params.append(session_id)
|
|
2838
|
+
if search:
|
|
2839
|
+
conditions.append("l.message LIKE ?")
|
|
2840
|
+
params.append(f"%{search}%")
|
|
2841
|
+
|
|
2842
|
+
where_clause = " AND ".join(conditions)
|
|
2843
|
+
|
|
2844
|
+
# Get total count
|
|
2845
|
+
count_row = conn.execute(
|
|
2846
|
+
f"SELECT COUNT(*) FROM logs l WHERE {where_clause}",
|
|
2847
|
+
params,
|
|
2848
|
+
).fetchone()
|
|
2849
|
+
total = count_row[0] if count_row else 0
|
|
2567
2850
|
|
|
2568
2851
|
cursor = conn.execute(
|
|
2569
2852
|
f"""
|
|
2570
|
-
SELECT
|
|
2571
|
-
WHERE {
|
|
2572
|
-
ORDER BY timestamp DESC
|
|
2853
|
+
SELECT l.* FROM logs l
|
|
2854
|
+
WHERE {where_clause}
|
|
2855
|
+
ORDER BY l.timestamp DESC
|
|
2573
2856
|
LIMIT ? OFFSET ?
|
|
2574
2857
|
""",
|
|
2575
2858
|
params + [limit, offset],
|
|
@@ -2581,7 +2864,84 @@ class ProjectDatabase:
|
|
|
2581
2864
|
if result.get("metadata"):
|
|
2582
2865
|
result["metadata"] = json.loads(result["metadata"])
|
|
2583
2866
|
results.append(result)
|
|
2584
|
-
return results
|
|
2867
|
+
return results, total
|
|
2868
|
+
|
|
2869
|
+
def get_log_stats(
|
|
2870
|
+
self,
|
|
2871
|
+
run_id: Optional[str] = None,
|
|
2872
|
+
session_id: Optional[str] = None,
|
|
2873
|
+
) -> dict:
|
|
2874
|
+
"""Get log statistics (counts by level).
|
|
2875
|
+
|
|
2876
|
+
Returns:
|
|
2877
|
+
Dict with by_level counts and total.
|
|
2878
|
+
"""
|
|
2879
|
+
with self._reader() as conn:
|
|
2880
|
+
conditions = ["1=1"]
|
|
2881
|
+
params: list[Any] = []
|
|
2882
|
+
|
|
2883
|
+
if run_id:
|
|
2884
|
+
conditions.append("run_id = ?")
|
|
2885
|
+
params.append(run_id)
|
|
2886
|
+
if session_id:
|
|
2887
|
+
conditions.append(
|
|
2888
|
+
"run_id IN (SELECT run_id FROM sessions WHERE session_id = ?)"
|
|
2889
|
+
)
|
|
2890
|
+
params.append(session_id)
|
|
2891
|
+
|
|
2892
|
+
where_clause = " AND ".join(conditions)
|
|
2893
|
+
|
|
2894
|
+
# Count by level
|
|
2895
|
+
cursor = conn.execute(
|
|
2896
|
+
f"""
|
|
2897
|
+
SELECT level, COUNT(*) as count FROM logs
|
|
2898
|
+
WHERE {where_clause}
|
|
2899
|
+
GROUP BY level
|
|
2900
|
+
""",
|
|
2901
|
+
params,
|
|
2902
|
+
)
|
|
2903
|
+
by_level = {row["level"]: row["count"] for row in cursor.fetchall()}
|
|
2904
|
+
|
|
2905
|
+
total = sum(by_level.values())
|
|
2906
|
+
|
|
2907
|
+
return {
|
|
2908
|
+
"by_level": by_level,
|
|
2909
|
+
"by_category": {}, # No category column in schema
|
|
2910
|
+
"total": total,
|
|
2911
|
+
}
|
|
2912
|
+
|
|
2913
|
+
def cleanup_logs(
|
|
2914
|
+
self,
|
|
2915
|
+
days: int = 30,
|
|
2916
|
+
dry_run: bool = True,
|
|
2917
|
+
) -> dict:
|
|
2918
|
+
"""Delete logs older than specified days.
|
|
2919
|
+
|
|
2920
|
+
Args:
|
|
2921
|
+
days: Delete logs older than this many days.
|
|
2922
|
+
dry_run: If True, only report what would be deleted.
|
|
2923
|
+
|
|
2924
|
+
Returns:
|
|
2925
|
+
Dict with deleted_count.
|
|
2926
|
+
"""
|
|
2927
|
+
from datetime import datetime, timedelta
|
|
2928
|
+
|
|
2929
|
+
cutoff = (datetime.utcnow() - timedelta(days=days)).isoformat()
|
|
2930
|
+
|
|
2931
|
+
if dry_run:
|
|
2932
|
+
with self._reader() as conn:
|
|
2933
|
+
row = conn.execute(
|
|
2934
|
+
"SELECT COUNT(*) FROM logs WHERE timestamp < ?",
|
|
2935
|
+
(cutoff,),
|
|
2936
|
+
).fetchone()
|
|
2937
|
+
return {"deleted_count": row[0] if row else 0}
|
|
2938
|
+
else:
|
|
2939
|
+
with self._writer() as conn:
|
|
2940
|
+
cursor = conn.execute(
|
|
2941
|
+
"DELETE FROM logs WHERE timestamp < ?",
|
|
2942
|
+
(cutoff,),
|
|
2943
|
+
)
|
|
2944
|
+
return {"deleted_count": cursor.rowcount}
|
|
2585
2945
|
|
|
2586
2946
|
# ========== Checkpoints ==========
|
|
2587
2947
|
|
|
@@ -4823,6 +5183,52 @@ class ProjectDatabase:
|
|
|
4823
5183
|
|
|
4824
5184
|
return True
|
|
4825
5185
|
|
|
5186
|
+
def reopen_workflow_step_atomic(
|
|
5187
|
+
self,
|
|
5188
|
+
workflow_id: str,
|
|
5189
|
+
step_id: int,
|
|
5190
|
+
step_number: int,
|
|
5191
|
+
) -> bool:
|
|
5192
|
+
"""Atomically reopen a completed/skipped step.
|
|
5193
|
+
|
|
5194
|
+
Sets the target step back to 'active', resets all later steps
|
|
5195
|
+
to 'pending', and moves workflow.current_step back.
|
|
5196
|
+
|
|
5197
|
+
Args:
|
|
5198
|
+
workflow_id: The workflow ID.
|
|
5199
|
+
step_id: The ID of the step to reopen.
|
|
5200
|
+
step_number: The step_number of the step to reopen.
|
|
5201
|
+
|
|
5202
|
+
Returns:
|
|
5203
|
+
True if reopen succeeded, False otherwise.
|
|
5204
|
+
"""
|
|
5205
|
+
with self._writer() as conn:
|
|
5206
|
+
now = datetime.utcnow().isoformat()
|
|
5207
|
+
|
|
5208
|
+
# Set target step back to active, clear completed_at
|
|
5209
|
+
conn.execute(
|
|
5210
|
+
"UPDATE workflow_steps SET status = 'active', completed_at = NULL, updated_at = ? "
|
|
5211
|
+
"WHERE id = ?",
|
|
5212
|
+
(now, step_id),
|
|
5213
|
+
)
|
|
5214
|
+
|
|
5215
|
+
# Reset all later steps to pending, clear timestamps
|
|
5216
|
+
conn.execute(
|
|
5217
|
+
"UPDATE workflow_steps SET status = 'pending', started_at = NULL, "
|
|
5218
|
+
"completed_at = NULL, updated_at = ? "
|
|
5219
|
+
"WHERE workflow_id = ? AND step_number > ? AND archived_at IS NULL",
|
|
5220
|
+
(now, workflow_id, step_number),
|
|
5221
|
+
)
|
|
5222
|
+
|
|
5223
|
+
# Move workflow.current_step back and ensure workflow is active
|
|
5224
|
+
conn.execute(
|
|
5225
|
+
"UPDATE workflows SET current_step = ?, status = 'active', updated_at = ? "
|
|
5226
|
+
"WHERE id = ?",
|
|
5227
|
+
(step_number, now, workflow_id),
|
|
5228
|
+
)
|
|
5229
|
+
|
|
5230
|
+
return True
|
|
5231
|
+
|
|
4826
5232
|
# ========== Planning Sessions ==========
|
|
4827
5233
|
|
|
4828
5234
|
def create_planning_session(
|
|
@@ -4833,6 +5239,11 @@ class ProjectDatabase:
|
|
|
4833
5239
|
messages: Optional[list] = None,
|
|
4834
5240
|
artifacts: Optional[dict] = None,
|
|
4835
5241
|
status: str = "active",
|
|
5242
|
+
# New iteration-based fields
|
|
5243
|
+
prompt: Optional[str] = None,
|
|
5244
|
+
iterations_requested: int = 1,
|
|
5245
|
+
run_status: str = "pending",
|
|
5246
|
+
is_legacy: bool = False,
|
|
4836
5247
|
) -> dict:
|
|
4837
5248
|
"""Create a planning session for an interactive step.
|
|
4838
5249
|
|
|
@@ -4843,6 +5254,10 @@ class ProjectDatabase:
|
|
|
4843
5254
|
messages: Initial messages (default: empty list).
|
|
4844
5255
|
artifacts: Optional artifacts dict.
|
|
4845
5256
|
status: Session status (default: 'active').
|
|
5257
|
+
prompt: User's guidance prompt for iteration-based sessions.
|
|
5258
|
+
iterations_requested: Number of iterations requested (default: 1).
|
|
5259
|
+
run_status: Execution status for iterations (default: 'pending').
|
|
5260
|
+
is_legacy: Whether this is a legacy chat-based session.
|
|
4846
5261
|
|
|
4847
5262
|
Returns:
|
|
4848
5263
|
The created session dict.
|
|
@@ -4853,9 +5268,24 @@ class ProjectDatabase:
|
|
|
4853
5268
|
artifacts_json = json.dumps(artifacts) if artifacts else None
|
|
4854
5269
|
conn.execute(
|
|
4855
5270
|
"""INSERT INTO planning_sessions
|
|
4856
|
-
(id, workflow_id, step_id, messages, artifacts, status,
|
|
4857
|
-
|
|
4858
|
-
|
|
5271
|
+
(id, workflow_id, step_id, messages, artifacts, status,
|
|
5272
|
+
prompt, iterations_requested, iterations_completed, current_iteration,
|
|
5273
|
+
run_status, is_legacy, created_at, updated_at)
|
|
5274
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, 0, 0, ?, ?, ?, ?)""",
|
|
5275
|
+
(
|
|
5276
|
+
id,
|
|
5277
|
+
workflow_id,
|
|
5278
|
+
step_id,
|
|
5279
|
+
messages_json,
|
|
5280
|
+
artifacts_json,
|
|
5281
|
+
status,
|
|
5282
|
+
prompt,
|
|
5283
|
+
iterations_requested,
|
|
5284
|
+
run_status,
|
|
5285
|
+
is_legacy,
|
|
5286
|
+
now,
|
|
5287
|
+
now,
|
|
5288
|
+
),
|
|
4859
5289
|
)
|
|
4860
5290
|
return self.get_planning_session(id)
|
|
4861
5291
|
|
|
@@ -4876,10 +5306,11 @@ class ProjectDatabase:
|
|
|
4876
5306
|
return None
|
|
4877
5307
|
|
|
4878
5308
|
def get_planning_session_by_step(self, step_id: int) -> Optional[dict]:
|
|
4879
|
-
"""Get planning session
|
|
5309
|
+
"""Get the most recent planning session for a step ID."""
|
|
4880
5310
|
with self._reader() as conn:
|
|
4881
5311
|
cursor = conn.execute(
|
|
4882
|
-
"SELECT * FROM planning_sessions WHERE step_id = ?",
|
|
5312
|
+
"SELECT * FROM planning_sessions WHERE step_id = ? ORDER BY created_at DESC LIMIT 1",
|
|
5313
|
+
(step_id,),
|
|
4883
5314
|
)
|
|
4884
5315
|
row = cursor.fetchone()
|
|
4885
5316
|
if row:
|
|
@@ -5000,6 +5431,11 @@ class ProjectDatabase:
|
|
|
5000
5431
|
id: str,
|
|
5001
5432
|
status: Optional[str] = None,
|
|
5002
5433
|
artifacts: Optional[dict] = None,
|
|
5434
|
+
# New iteration-based fields
|
|
5435
|
+
run_status: Optional[str] = None,
|
|
5436
|
+
current_iteration: Optional[int] = None,
|
|
5437
|
+
iterations_completed: Optional[int] = None,
|
|
5438
|
+
error_message: Optional[str] = None,
|
|
5003
5439
|
) -> bool:
|
|
5004
5440
|
"""Update planning session fields."""
|
|
5005
5441
|
updates = []
|
|
@@ -5011,6 +5447,18 @@ class ProjectDatabase:
|
|
|
5011
5447
|
if artifacts is not None:
|
|
5012
5448
|
updates.append("artifacts = ?")
|
|
5013
5449
|
params.append(json.dumps(artifacts))
|
|
5450
|
+
if run_status is not None:
|
|
5451
|
+
updates.append("run_status = ?")
|
|
5452
|
+
params.append(run_status)
|
|
5453
|
+
if current_iteration is not None:
|
|
5454
|
+
updates.append("current_iteration = ?")
|
|
5455
|
+
params.append(current_iteration)
|
|
5456
|
+
if iterations_completed is not None:
|
|
5457
|
+
updates.append("iterations_completed = ?")
|
|
5458
|
+
params.append(iterations_completed)
|
|
5459
|
+
if error_message is not None:
|
|
5460
|
+
updates.append("error_message = ?")
|
|
5461
|
+
params.append(error_message)
|
|
5014
5462
|
|
|
5015
5463
|
if not updates:
|
|
5016
5464
|
return False
|
|
@@ -5030,7 +5478,309 @@ class ProjectDatabase:
|
|
|
5030
5478
|
self, id: str, artifacts: Optional[dict] = None
|
|
5031
5479
|
) -> bool:
|
|
5032
5480
|
"""Mark a planning session as completed."""
|
|
5033
|
-
return self.update_planning_session(
|
|
5481
|
+
return self.update_planning_session(
|
|
5482
|
+
id, status="completed", run_status="completed", artifacts=artifacts
|
|
5483
|
+
)
|
|
5484
|
+
|
|
5485
|
+
def get_running_planning_session(self, workflow_id: str) -> Optional[dict]:
|
|
5486
|
+
"""Get any currently running planning session for a workflow.
|
|
5487
|
+
|
|
5488
|
+
Used to prevent multiple concurrent sessions.
|
|
5489
|
+
|
|
5490
|
+
Args:
|
|
5491
|
+
workflow_id: The workflow ID to check.
|
|
5492
|
+
|
|
5493
|
+
Returns:
|
|
5494
|
+
The running session dict if found, None otherwise.
|
|
5495
|
+
"""
|
|
5496
|
+
with self._reader() as conn:
|
|
5497
|
+
cursor = conn.execute(
|
|
5498
|
+
"""SELECT * FROM planning_sessions
|
|
5499
|
+
WHERE workflow_id = ? AND run_status = 'running'
|
|
5500
|
+
ORDER BY created_at DESC LIMIT 1""",
|
|
5501
|
+
(workflow_id,),
|
|
5502
|
+
)
|
|
5503
|
+
row = cursor.fetchone()
|
|
5504
|
+
if row:
|
|
5505
|
+
result = dict(row)
|
|
5506
|
+
if result.get("messages"):
|
|
5507
|
+
result["messages"] = json.loads(result["messages"])
|
|
5508
|
+
if result.get("artifacts"):
|
|
5509
|
+
result["artifacts"] = json.loads(result["artifacts"])
|
|
5510
|
+
return result
|
|
5511
|
+
return None
|
|
5512
|
+
|
|
5513
|
+
def cancel_planning_session(self, id: str) -> bool:
|
|
5514
|
+
"""Cancel a running planning session.
|
|
5515
|
+
|
|
5516
|
+
Args:
|
|
5517
|
+
id: The session ID to cancel.
|
|
5518
|
+
|
|
5519
|
+
Returns:
|
|
5520
|
+
True if session was cancelled, False otherwise.
|
|
5521
|
+
"""
|
|
5522
|
+
with self._writer() as conn:
|
|
5523
|
+
cursor = conn.execute(
|
|
5524
|
+
"""UPDATE planning_sessions
|
|
5525
|
+
SET run_status = 'cancelled', updated_at = ?
|
|
5526
|
+
WHERE id = ? AND run_status = 'running'""",
|
|
5527
|
+
(datetime.utcnow().isoformat(), id),
|
|
5528
|
+
)
|
|
5529
|
+
return cursor.rowcount > 0
|
|
5530
|
+
|
|
5531
|
+
# ========== Planning Iterations ==========
|
|
5532
|
+
|
|
5533
|
+
def create_planning_iteration(
|
|
5534
|
+
self,
|
|
5535
|
+
session_id: str,
|
|
5536
|
+
iteration_number: int,
|
|
5537
|
+
status: str = "pending",
|
|
5538
|
+
) -> Optional[dict]:
|
|
5539
|
+
"""Create a new planning iteration record.
|
|
5540
|
+
|
|
5541
|
+
Args:
|
|
5542
|
+
session_id: Parent session ID.
|
|
5543
|
+
iteration_number: The iteration number (1-indexed).
|
|
5544
|
+
status: Initial status (default: 'pending').
|
|
5545
|
+
|
|
5546
|
+
Returns:
|
|
5547
|
+
The created iteration dict.
|
|
5548
|
+
"""
|
|
5549
|
+
with self._writer() as conn:
|
|
5550
|
+
cursor = conn.execute(
|
|
5551
|
+
"""INSERT INTO planning_iterations
|
|
5552
|
+
(session_id, iteration_number, status)
|
|
5553
|
+
VALUES (?, ?, ?)""",
|
|
5554
|
+
(session_id, iteration_number, status),
|
|
5555
|
+
)
|
|
5556
|
+
return self.get_planning_iteration(cursor.lastrowid)
|
|
5557
|
+
|
|
5558
|
+
def get_planning_iteration(self, iteration_id: int) -> Optional[dict]:
|
|
5559
|
+
"""Get a planning iteration by ID."""
|
|
5560
|
+
with self._reader() as conn:
|
|
5561
|
+
cursor = conn.execute(
|
|
5562
|
+
"SELECT * FROM planning_iterations WHERE id = ?", (iteration_id,)
|
|
5563
|
+
)
|
|
5564
|
+
row = cursor.fetchone()
|
|
5565
|
+
if row:
|
|
5566
|
+
result = dict(row)
|
|
5567
|
+
if result.get("tool_calls"):
|
|
5568
|
+
result["tool_calls"] = json.loads(result["tool_calls"])
|
|
5569
|
+
return result
|
|
5570
|
+
return None
|
|
5571
|
+
|
|
5572
|
+
def list_planning_iterations(self, session_id: str) -> list[dict]:
|
|
5573
|
+
"""List all iterations for a planning session.
|
|
5574
|
+
|
|
5575
|
+
Args:
|
|
5576
|
+
session_id: The session ID.
|
|
5577
|
+
|
|
5578
|
+
Returns:
|
|
5579
|
+
List of iteration dicts ordered by iteration_number.
|
|
5580
|
+
"""
|
|
5581
|
+
with self._reader() as conn:
|
|
5582
|
+
cursor = conn.execute(
|
|
5583
|
+
"""SELECT * FROM planning_iterations
|
|
5584
|
+
WHERE session_id = ?
|
|
5585
|
+
ORDER BY iteration_number ASC""",
|
|
5586
|
+
(session_id,),
|
|
5587
|
+
)
|
|
5588
|
+
results = []
|
|
5589
|
+
for row in cursor.fetchall():
|
|
5590
|
+
result = dict(row)
|
|
5591
|
+
if result.get("tool_calls"):
|
|
5592
|
+
result["tool_calls"] = json.loads(result["tool_calls"])
|
|
5593
|
+
results.append(result)
|
|
5594
|
+
return results
|
|
5595
|
+
|
|
5596
|
+
def update_planning_iteration(
|
|
5597
|
+
self,
|
|
5598
|
+
iteration_id: int,
|
|
5599
|
+
status: Optional[str] = None,
|
|
5600
|
+
started_at: Optional[str] = None,
|
|
5601
|
+
completed_at: Optional[str] = None,
|
|
5602
|
+
chars_added: Optional[int] = None,
|
|
5603
|
+
chars_removed: Optional[int] = None,
|
|
5604
|
+
tool_calls: Optional[list] = None,
|
|
5605
|
+
summary: Optional[str] = None,
|
|
5606
|
+
error_message: Optional[str] = None,
|
|
5607
|
+
diff_text: Optional[str] = None,
|
|
5608
|
+
doc_before: Optional[str] = None,
|
|
5609
|
+
doc_after: Optional[str] = None,
|
|
5610
|
+
) -> bool:
|
|
5611
|
+
"""Update a planning iteration.
|
|
5612
|
+
|
|
5613
|
+
Args:
|
|
5614
|
+
iteration_id: The iteration ID.
|
|
5615
|
+
status: New status.
|
|
5616
|
+
started_at: Start timestamp.
|
|
5617
|
+
completed_at: Completion timestamp.
|
|
5618
|
+
chars_added: Characters added to design doc.
|
|
5619
|
+
chars_removed: Characters removed from design doc.
|
|
5620
|
+
tool_calls: List of tool call records.
|
|
5621
|
+
summary: Brief summary of changes.
|
|
5622
|
+
error_message: Error message if failed.
|
|
5623
|
+
diff_text: Unified diff of changes.
|
|
5624
|
+
doc_before: Document content before iteration.
|
|
5625
|
+
doc_after: Document content after iteration.
|
|
5626
|
+
|
|
5627
|
+
Returns:
|
|
5628
|
+
True if updated, False otherwise.
|
|
5629
|
+
"""
|
|
5630
|
+
updates = []
|
|
5631
|
+
params: list[Any] = []
|
|
5632
|
+
|
|
5633
|
+
if status is not None:
|
|
5634
|
+
updates.append("status = ?")
|
|
5635
|
+
params.append(status)
|
|
5636
|
+
if started_at is not None:
|
|
5637
|
+
updates.append("started_at = ?")
|
|
5638
|
+
params.append(started_at)
|
|
5639
|
+
if completed_at is not None:
|
|
5640
|
+
updates.append("completed_at = ?")
|
|
5641
|
+
params.append(completed_at)
|
|
5642
|
+
if chars_added is not None:
|
|
5643
|
+
updates.append("chars_added = ?")
|
|
5644
|
+
params.append(chars_added)
|
|
5645
|
+
if chars_removed is not None:
|
|
5646
|
+
updates.append("chars_removed = ?")
|
|
5647
|
+
params.append(chars_removed)
|
|
5648
|
+
if tool_calls is not None:
|
|
5649
|
+
updates.append("tool_calls = ?")
|
|
5650
|
+
params.append(json.dumps(tool_calls))
|
|
5651
|
+
if summary is not None:
|
|
5652
|
+
updates.append("summary = ?")
|
|
5653
|
+
params.append(summary)
|
|
5654
|
+
if error_message is not None:
|
|
5655
|
+
updates.append("error_message = ?")
|
|
5656
|
+
params.append(error_message)
|
|
5657
|
+
if diff_text is not None:
|
|
5658
|
+
updates.append("diff_text = ?")
|
|
5659
|
+
params.append(diff_text)
|
|
5660
|
+
if doc_before is not None:
|
|
5661
|
+
updates.append("doc_before = ?")
|
|
5662
|
+
params.append(doc_before)
|
|
5663
|
+
if doc_after is not None:
|
|
5664
|
+
updates.append("doc_after = ?")
|
|
5665
|
+
params.append(doc_after)
|
|
5666
|
+
|
|
5667
|
+
if not updates:
|
|
5668
|
+
return False
|
|
5669
|
+
|
|
5670
|
+
params.append(iteration_id)
|
|
5671
|
+
|
|
5672
|
+
with self._writer() as conn:
|
|
5673
|
+
cursor = conn.execute(
|
|
5674
|
+
f"UPDATE planning_iterations SET {', '.join(updates)} WHERE id = ?",
|
|
5675
|
+
params,
|
|
5676
|
+
)
|
|
5677
|
+
return cursor.rowcount > 0
|
|
5678
|
+
|
|
5679
|
+
def start_planning_iteration(self, iteration_id: int) -> bool:
|
|
5680
|
+
"""Mark an iteration as started."""
|
|
5681
|
+
return self.update_planning_iteration(
|
|
5682
|
+
iteration_id,
|
|
5683
|
+
status="running",
|
|
5684
|
+
started_at=datetime.utcnow().isoformat(),
|
|
5685
|
+
)
|
|
5686
|
+
|
|
5687
|
+
def complete_planning_iteration(
|
|
5688
|
+
self,
|
|
5689
|
+
iteration_id: int,
|
|
5690
|
+
chars_added: int = 0,
|
|
5691
|
+
chars_removed: int = 0,
|
|
5692
|
+
tool_calls: Optional[list] = None,
|
|
5693
|
+
summary: Optional[str] = None,
|
|
5694
|
+
diff_text: Optional[str] = None,
|
|
5695
|
+
doc_before: Optional[str] = None,
|
|
5696
|
+
doc_after: Optional[str] = None,
|
|
5697
|
+
) -> bool:
|
|
5698
|
+
"""Mark an iteration as completed with results."""
|
|
5699
|
+
return self.update_planning_iteration(
|
|
5700
|
+
iteration_id,
|
|
5701
|
+
status="completed",
|
|
5702
|
+
completed_at=datetime.utcnow().isoformat(),
|
|
5703
|
+
chars_added=chars_added,
|
|
5704
|
+
chars_removed=chars_removed,
|
|
5705
|
+
tool_calls=tool_calls,
|
|
5706
|
+
summary=summary,
|
|
5707
|
+
diff_text=diff_text,
|
|
5708
|
+
doc_before=doc_before,
|
|
5709
|
+
doc_after=doc_after,
|
|
5710
|
+
)
|
|
5711
|
+
|
|
5712
|
+
def fail_planning_iteration(
|
|
5713
|
+
self, iteration_id: int, error_message: str
|
|
5714
|
+
) -> bool:
|
|
5715
|
+
"""Mark an iteration as failed."""
|
|
5716
|
+
return self.update_planning_iteration(
|
|
5717
|
+
iteration_id,
|
|
5718
|
+
status="failed",
|
|
5719
|
+
completed_at=datetime.utcnow().isoformat(),
|
|
5720
|
+
error_message=error_message,
|
|
5721
|
+
)
|
|
5722
|
+
|
|
5723
|
+
# ========== Planning Iteration Events ==========
|
|
5724
|
+
|
|
5725
|
+
def add_planning_iteration_event(
|
|
5726
|
+
self,
|
|
5727
|
+
session_id: str,
|
|
5728
|
+
event_type: str,
|
|
5729
|
+
iteration_number: Optional[int] = None,
|
|
5730
|
+
content: Optional[str] = None,
|
|
5731
|
+
tool_name: Optional[str] = None,
|
|
5732
|
+
tool_input: Optional[str] = None,
|
|
5733
|
+
tool_result: Optional[str] = None,
|
|
5734
|
+
event_data: Optional[str] = None,
|
|
5735
|
+
) -> int:
|
|
5736
|
+
"""Add a planning iteration event to the persistent log.
|
|
5737
|
+
|
|
5738
|
+
Returns:
|
|
5739
|
+
The event ID.
|
|
5740
|
+
"""
|
|
5741
|
+
with self._writer() as conn:
|
|
5742
|
+
cursor = conn.execute(
|
|
5743
|
+
"""INSERT INTO planning_iteration_events
|
|
5744
|
+
(session_id, iteration_number, event_type, content, tool_name, tool_input, tool_result, event_data)
|
|
5745
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?)""",
|
|
5746
|
+
(session_id, iteration_number, event_type, content, tool_name, tool_input, tool_result, event_data),
|
|
5747
|
+
)
|
|
5748
|
+
return cursor.lastrowid
|
|
5749
|
+
|
|
5750
|
+
def get_planning_iteration_events(
|
|
5751
|
+
self,
|
|
5752
|
+
session_id: str,
|
|
5753
|
+
after_id: int = 0,
|
|
5754
|
+
limit: int = 500,
|
|
5755
|
+
) -> list[dict]:
|
|
5756
|
+
"""Get planning iteration events for a session.
|
|
5757
|
+
|
|
5758
|
+
Args:
|
|
5759
|
+
session_id: The session ID.
|
|
5760
|
+
after_id: Only return events with id > after_id (for pagination/polling).
|
|
5761
|
+
limit: Maximum events to return.
|
|
5762
|
+
|
|
5763
|
+
Returns:
|
|
5764
|
+
List of event dicts ordered by id ASC.
|
|
5765
|
+
"""
|
|
5766
|
+
with self._reader() as conn:
|
|
5767
|
+
cursor = conn.execute(
|
|
5768
|
+
"""SELECT * FROM planning_iteration_events
|
|
5769
|
+
WHERE session_id = ? AND id > ?
|
|
5770
|
+
ORDER BY id ASC
|
|
5771
|
+
LIMIT ?""",
|
|
5772
|
+
(session_id, after_id, limit),
|
|
5773
|
+
)
|
|
5774
|
+
return [dict(row) for row in cursor.fetchall()]
|
|
5775
|
+
|
|
5776
|
+
def get_latest_event_timestamp(self, session_id: str) -> Optional[str]:
|
|
5777
|
+
"""Get the timestamp of the most recent event for a planning session."""
|
|
5778
|
+
with self._reader() as conn:
|
|
5779
|
+
row = conn.execute(
|
|
5780
|
+
"SELECT MAX(timestamp) FROM planning_iteration_events WHERE session_id = ?",
|
|
5781
|
+
(session_id,),
|
|
5782
|
+
).fetchone()
|
|
5783
|
+
return row[0] if row and row[0] else None
|
|
5034
5784
|
|
|
5035
5785
|
# ========== Utilities ==========
|
|
5036
5786
|
|