ralphx 0.3.4__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ralphx/__init__.py +1 -1
- ralphx/adapters/base.py +10 -2
- ralphx/adapters/claude_cli.py +222 -82
- ralphx/api/routes/auth.py +780 -98
- ralphx/api/routes/config.py +3 -56
- ralphx/api/routes/export_import.py +6 -9
- ralphx/api/routes/loops.py +4 -4
- ralphx/api/routes/planning.py +882 -19
- ralphx/api/routes/resources.py +528 -6
- ralphx/api/routes/stream.py +58 -56
- ralphx/api/routes/templates.py +2 -2
- ralphx/api/routes/workflows.py +258 -47
- ralphx/cli.py +4 -1
- ralphx/core/auth.py +372 -172
- ralphx/core/database.py +588 -164
- ralphx/core/executor.py +170 -19
- ralphx/core/loop.py +15 -2
- ralphx/core/loop_templates.py +29 -3
- ralphx/core/planning_iteration_executor.py +633 -0
- ralphx/core/planning_service.py +119 -24
- ralphx/core/preview.py +9 -25
- ralphx/core/project_db.py +864 -121
- ralphx/core/project_export.py +1 -5
- ralphx/core/project_import.py +14 -29
- ralphx/core/resources.py +28 -2
- ralphx/core/sample_project.py +1 -5
- ralphx/core/templates.py +9 -9
- ralphx/core/workflow_executor.py +32 -3
- ralphx/core/workflow_export.py +4 -7
- ralphx/core/workflow_import.py +3 -27
- ralphx/mcp/__init__.py +6 -2
- ralphx/mcp/registry.py +3 -3
- ralphx/mcp/tools/diagnostics.py +1 -1
- ralphx/mcp/tools/monitoring.py +10 -16
- ralphx/mcp/tools/workflows.py +115 -33
- ralphx/mcp_server.py +6 -2
- ralphx/static/assets/index-BuLI7ffn.css +1 -0
- ralphx/static/assets/index-DWvlqOTb.js +264 -0
- ralphx/static/assets/index-DWvlqOTb.js.map +1 -0
- ralphx/static/index.html +2 -2
- ralphx/templates/loop_templates/consumer.md +2 -2
- {ralphx-0.3.4.dist-info → ralphx-0.4.0.dist-info}/METADATA +33 -12
- {ralphx-0.3.4.dist-info → ralphx-0.4.0.dist-info}/RECORD +45 -44
- ralphx/static/assets/index-CcRDyY3b.css +0 -1
- ralphx/static/assets/index-CcxfTosc.js +0 -251
- ralphx/static/assets/index-CcxfTosc.js.map +0 -1
- {ralphx-0.3.4.dist-info → ralphx-0.4.0.dist-info}/WHEEL +0 -0
- {ralphx-0.3.4.dist-info → ralphx-0.4.0.dist-info}/entry_points.txt +0 -0
ralphx/core/project_db.py
CHANGED
|
@@ -14,7 +14,8 @@ This makes projects portable - clone a repo with .ralphx/ and all data comes wit
|
|
|
14
14
|
"""
|
|
15
15
|
|
|
16
16
|
import json
|
|
17
|
-
import
|
|
17
|
+
import logging
|
|
18
|
+
import shutil
|
|
18
19
|
import sqlite3
|
|
19
20
|
import threading
|
|
20
21
|
from contextlib import contextmanager
|
|
@@ -22,30 +23,11 @@ from datetime import datetime
|
|
|
22
23
|
from pathlib import Path
|
|
23
24
|
from typing import Any, Iterator, Optional
|
|
24
25
|
|
|
26
|
+
logger = logging.getLogger(__name__)
|
|
25
27
|
|
|
26
|
-
# Schema version for project DB
|
|
27
|
-
PROJECT_SCHEMA_VERSION = 15
|
|
28
|
-
|
|
29
|
-
# Namespace validation pattern: lowercase, alphanumeric, underscores, dashes, max 64 chars
|
|
30
|
-
# Must start with a letter
|
|
31
|
-
NAMESPACE_PATTERN = re.compile(r'^[a-z][a-z0-9_-]{0,63}$')
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
def validate_namespace(namespace: str) -> bool:
|
|
35
|
-
"""Validate namespace format.
|
|
36
|
-
|
|
37
|
-
Namespaces must:
|
|
38
|
-
- Start with a lowercase letter
|
|
39
|
-
- Contain only lowercase letters, digits, underscores, and dashes
|
|
40
|
-
- Be 1-64 characters long
|
|
41
|
-
|
|
42
|
-
Args:
|
|
43
|
-
namespace: The namespace string to validate.
|
|
44
28
|
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
"""
|
|
48
|
-
return bool(NAMESPACE_PATTERN.match(namespace))
|
|
29
|
+
# Schema version for project DB
|
|
30
|
+
PROJECT_SCHEMA_VERSION = 20
|
|
49
31
|
|
|
50
32
|
# Project database schema - all project-specific data
|
|
51
33
|
PROJECT_SCHEMA_SQL = """
|
|
@@ -297,7 +279,6 @@ CREATE TABLE IF NOT EXISTS workflows (
|
|
|
297
279
|
id TEXT PRIMARY KEY,
|
|
298
280
|
template_id TEXT, -- Optional reference to template
|
|
299
281
|
name TEXT NOT NULL,
|
|
300
|
-
namespace TEXT NOT NULL, -- Workflow identifier
|
|
301
282
|
status TEXT DEFAULT 'draft', -- draft, active, paused, completed
|
|
302
283
|
current_step INTEGER DEFAULT 1,
|
|
303
284
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
@@ -325,19 +306,65 @@ CREATE TABLE IF NOT EXISTS workflow_steps (
|
|
|
325
306
|
);
|
|
326
307
|
|
|
327
308
|
-- Planning sessions (for interactive steps)
|
|
309
|
+
-- New iteration-based paradigm: users provide a prompt + iteration count, system runs N iterations
|
|
328
310
|
CREATE TABLE IF NOT EXISTS planning_sessions (
|
|
329
311
|
id TEXT PRIMARY KEY,
|
|
330
312
|
workflow_id TEXT NOT NULL,
|
|
331
313
|
step_id INTEGER NOT NULL,
|
|
332
|
-
messages JSON NOT NULL DEFAULT '[]', -- Conversation history
|
|
314
|
+
messages JSON NOT NULL DEFAULT '[]', -- Conversation history (legacy chat-based sessions)
|
|
333
315
|
artifacts JSON, -- Generated design doc, guardrails
|
|
334
316
|
status TEXT DEFAULT 'active', -- active, completed
|
|
317
|
+
-- New iteration-based fields (v17)
|
|
318
|
+
prompt TEXT, -- User's guidance for this session
|
|
319
|
+
iterations_requested INTEGER DEFAULT 1, -- Number of iterations requested
|
|
320
|
+
iterations_completed INTEGER DEFAULT 0, -- Number of iterations completed
|
|
321
|
+
current_iteration INTEGER DEFAULT 0, -- Current iteration number (0 = not started)
|
|
322
|
+
run_status TEXT DEFAULT 'pending', -- pending, running, completed, cancelled, error
|
|
323
|
+
is_legacy BOOLEAN DEFAULT FALSE, -- TRUE for old chat-based sessions
|
|
324
|
+
error_message TEXT, -- Error message if run_status='error'
|
|
335
325
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
336
326
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
337
327
|
FOREIGN KEY (workflow_id) REFERENCES workflows(id) ON DELETE CASCADE,
|
|
338
328
|
FOREIGN KEY (step_id) REFERENCES workflow_steps(id) ON DELETE CASCADE
|
|
339
329
|
);
|
|
340
330
|
|
|
331
|
+
-- Planning iterations (per-iteration tracking for iteration-based sessions)
|
|
332
|
+
CREATE TABLE IF NOT EXISTS planning_iterations (
|
|
333
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
334
|
+
session_id TEXT NOT NULL,
|
|
335
|
+
iteration_number INTEGER NOT NULL,
|
|
336
|
+
started_at TIMESTAMP,
|
|
337
|
+
completed_at TIMESTAMP,
|
|
338
|
+
status TEXT DEFAULT 'pending', -- pending, running, completed, failed, skipped
|
|
339
|
+
chars_added INTEGER DEFAULT 0,
|
|
340
|
+
chars_removed INTEGER DEFAULT 0,
|
|
341
|
+
tool_calls JSON DEFAULT '[]', -- [{tool, input_preview, duration_ms}] - truncated
|
|
342
|
+
summary TEXT, -- Brief summary of what changed
|
|
343
|
+
diff_text TEXT, -- Unified diff of changes
|
|
344
|
+
doc_before TEXT, -- Document content before iteration
|
|
345
|
+
doc_after TEXT, -- Document content after iteration
|
|
346
|
+
error_message TEXT,
|
|
347
|
+
FOREIGN KEY (session_id) REFERENCES planning_sessions(id) ON DELETE CASCADE
|
|
348
|
+
);
|
|
349
|
+
|
|
350
|
+
-- Planning iteration events (persistent event log for streaming/reconnection)
|
|
351
|
+
CREATE TABLE IF NOT EXISTS planning_iteration_events (
|
|
352
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
353
|
+
session_id TEXT NOT NULL,
|
|
354
|
+
iteration_number INTEGER,
|
|
355
|
+
event_type TEXT NOT NULL,
|
|
356
|
+
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
357
|
+
content TEXT,
|
|
358
|
+
tool_name TEXT,
|
|
359
|
+
tool_input TEXT,
|
|
360
|
+
tool_result TEXT,
|
|
361
|
+
event_data TEXT,
|
|
362
|
+
FOREIGN KEY (session_id) REFERENCES planning_sessions(id) ON DELETE CASCADE
|
|
363
|
+
);
|
|
364
|
+
|
|
365
|
+
CREATE INDEX IF NOT EXISTS idx_pie_session ON planning_iteration_events(session_id);
|
|
366
|
+
CREATE INDEX IF NOT EXISTS idx_pie_session_id ON planning_iteration_events(session_id, id);
|
|
367
|
+
|
|
341
368
|
-- Workflow-scoped resources (design docs, guardrails, input files)
|
|
342
369
|
CREATE TABLE IF NOT EXISTS workflow_resources (
|
|
343
370
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
@@ -467,11 +494,12 @@ CREATE INDEX IF NOT EXISTS idx_loop_resources_type ON loop_resources(resource_ty
|
|
|
467
494
|
|
|
468
495
|
-- Workflow indexes
|
|
469
496
|
CREATE INDEX IF NOT EXISTS idx_workflows_status ON workflows(status);
|
|
470
|
-
CREATE INDEX IF NOT EXISTS idx_workflows_namespace ON workflows(namespace);
|
|
471
497
|
CREATE INDEX IF NOT EXISTS idx_workflow_steps_workflow ON workflow_steps(workflow_id, step_number);
|
|
472
498
|
CREATE INDEX IF NOT EXISTS idx_workflow_steps_status ON workflow_steps(status);
|
|
473
499
|
CREATE INDEX IF NOT EXISTS idx_planning_sessions_workflow ON planning_sessions(workflow_id);
|
|
474
500
|
CREATE INDEX IF NOT EXISTS idx_planning_sessions_status ON planning_sessions(status);
|
|
501
|
+
CREATE INDEX IF NOT EXISTS idx_planning_sessions_run_status ON planning_sessions(run_status);
|
|
502
|
+
CREATE INDEX IF NOT EXISTS idx_planning_iterations_session ON planning_iterations(session_id);
|
|
475
503
|
|
|
476
504
|
-- Workflow resources indexes
|
|
477
505
|
CREATE INDEX IF NOT EXISTS idx_workflow_resources_workflow ON workflow_resources(workflow_id, resource_type);
|
|
@@ -587,9 +615,8 @@ class ProjectDatabase:
|
|
|
587
615
|
"Please delete your .ralphx/ralphx.db file and start fresh."
|
|
588
616
|
)
|
|
589
617
|
|
|
590
|
-
#
|
|
618
|
+
# Create schema tables (indexes created after migrations)
|
|
591
619
|
conn.executescript(PROJECT_SCHEMA_SQL)
|
|
592
|
-
conn.executescript(PROJECT_INDEXES_SQL)
|
|
593
620
|
|
|
594
621
|
if current_version == 0:
|
|
595
622
|
# Fresh database
|
|
@@ -598,9 +625,32 @@ class ProjectDatabase:
|
|
|
598
625
|
(PROJECT_SCHEMA_VERSION,),
|
|
599
626
|
)
|
|
600
627
|
elif current_version < PROJECT_SCHEMA_VERSION:
|
|
628
|
+
# Create backup before running migrations
|
|
629
|
+
self._backup_before_migration(current_version)
|
|
601
630
|
# Run migrations (for future versions > 6)
|
|
602
631
|
self._run_migrations(conn, current_version)
|
|
603
632
|
|
|
633
|
+
# Create indexes AFTER migrations so all columns exist
|
|
634
|
+
conn.executescript(PROJECT_INDEXES_SQL)
|
|
635
|
+
|
|
636
|
+
def _backup_before_migration(self, from_version: int) -> None:
|
|
637
|
+
"""Create a backup of the database before running migrations.
|
|
638
|
+
|
|
639
|
+
Creates a timestamped backup file in the same directory as the database.
|
|
640
|
+
This allows recovery if a migration fails or causes data loss.
|
|
641
|
+
|
|
642
|
+
Args:
|
|
643
|
+
from_version: Current schema version before migration.
|
|
644
|
+
"""
|
|
645
|
+
try:
|
|
646
|
+
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
647
|
+
backup_path = self.db_path.with_suffix(f".v{from_version}.{timestamp}.bak")
|
|
648
|
+
shutil.copy2(self.db_path, backup_path)
|
|
649
|
+
logger.info(f"Created database backup before migration: {backup_path}")
|
|
650
|
+
except Exception as e:
|
|
651
|
+
logger.warning(f"Failed to create backup before migration: {e}")
|
|
652
|
+
# Don't fail the migration if backup fails - just warn
|
|
653
|
+
|
|
604
654
|
def _run_migrations(self, conn: sqlite3.Connection, from_version: int) -> None:
|
|
605
655
|
"""Run schema migrations from a version to the latest.
|
|
606
656
|
|
|
@@ -658,6 +708,31 @@ class ProjectDatabase:
|
|
|
658
708
|
# Migration from v14 to v15: Add workflow_resource_versions table
|
|
659
709
|
if from_version == 14:
|
|
660
710
|
self._migrate_v14_to_v15(conn)
|
|
711
|
+
from_version = 15 # Continue to next migration
|
|
712
|
+
|
|
713
|
+
# Migration from v15 to v16: Remove namespace from workflows table
|
|
714
|
+
if from_version == 15:
|
|
715
|
+
self._migrate_v15_to_v16(conn)
|
|
716
|
+
from_version = 16 # Continue to next migration
|
|
717
|
+
|
|
718
|
+
# Migration from v16 to v17: Add iteration-based planning fields
|
|
719
|
+
if from_version == 16:
|
|
720
|
+
self._migrate_v16_to_v17(conn)
|
|
721
|
+
from_version = 17 # Continue to next migration
|
|
722
|
+
|
|
723
|
+
# Migration from v17 to v18: Add planning_iteration_events table
|
|
724
|
+
if from_version == 17:
|
|
725
|
+
self._migrate_v17_to_v18(conn)
|
|
726
|
+
from_version = 18 # Continue to next migration
|
|
727
|
+
|
|
728
|
+
# Migration from v18 to v19: Add diff_text column to planning_iterations
|
|
729
|
+
if from_version == 18:
|
|
730
|
+
self._migrate_v18_to_v19(conn)
|
|
731
|
+
from_version = 19 # Continue to next migration
|
|
732
|
+
|
|
733
|
+
# Migration from v19 to v20: Add doc_before/doc_after columns
|
|
734
|
+
if from_version == 19:
|
|
735
|
+
self._migrate_v19_to_v20(conn)
|
|
661
736
|
|
|
662
737
|
# Seed workflow templates for fresh databases
|
|
663
738
|
self._seed_workflow_templates(conn)
|
|
@@ -960,6 +1035,169 @@ class ProjectDatabase:
|
|
|
960
1035
|
ON workflow_resource_versions(workflow_resource_id, version_number DESC)
|
|
961
1036
|
""")
|
|
962
1037
|
|
|
1038
|
+
def _migrate_v15_to_v16(self, conn: sqlite3.Connection) -> None:
|
|
1039
|
+
"""Migrate from schema v15 to v16.
|
|
1040
|
+
|
|
1041
|
+
Removes:
|
|
1042
|
+
- namespace column from workflows table (deprecated, replaced by workflow_id)
|
|
1043
|
+
- idx_workflows_namespace index
|
|
1044
|
+
|
|
1045
|
+
SQLite doesn't support DROP COLUMN directly, so we recreate the table.
|
|
1046
|
+
|
|
1047
|
+
IMPORTANT: We must disable foreign keys before dropping the old table,
|
|
1048
|
+
otherwise the ON DELETE CASCADE on workflow_steps will delete all steps!
|
|
1049
|
+
"""
|
|
1050
|
+
# 0. Disable foreign keys to prevent CASCADE deletes during table swap
|
|
1051
|
+
# IMPORTANT: PRAGMA foreign_keys is NOT transactional, so we must
|
|
1052
|
+
# re-enable in a finally block to prevent silent FK violations
|
|
1053
|
+
conn.execute("PRAGMA foreign_keys=OFF")
|
|
1054
|
+
try:
|
|
1055
|
+
# 1. Create new table without namespace
|
|
1056
|
+
conn.execute("""
|
|
1057
|
+
CREATE TABLE workflows_new (
|
|
1058
|
+
id TEXT PRIMARY KEY,
|
|
1059
|
+
template_id TEXT,
|
|
1060
|
+
name TEXT NOT NULL,
|
|
1061
|
+
status TEXT DEFAULT 'draft',
|
|
1062
|
+
current_step INTEGER DEFAULT 1,
|
|
1063
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
1064
|
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
1065
|
+
archived_at TIMESTAMP
|
|
1066
|
+
)
|
|
1067
|
+
""")
|
|
1068
|
+
|
|
1069
|
+
# 2. Copy data (excluding namespace)
|
|
1070
|
+
conn.execute("""
|
|
1071
|
+
INSERT INTO workflows_new (id, template_id, name, status, current_step, created_at, updated_at, archived_at)
|
|
1072
|
+
SELECT id, template_id, name, status, current_step, created_at, updated_at, archived_at
|
|
1073
|
+
FROM workflows
|
|
1074
|
+
""")
|
|
1075
|
+
|
|
1076
|
+
# 3. Drop old table and index
|
|
1077
|
+
conn.execute("DROP INDEX IF EXISTS idx_workflows_namespace")
|
|
1078
|
+
conn.execute("DROP TABLE workflows")
|
|
1079
|
+
|
|
1080
|
+
# 4. Rename new table
|
|
1081
|
+
conn.execute("ALTER TABLE workflows_new RENAME TO workflows")
|
|
1082
|
+
|
|
1083
|
+
# 5. Recreate the status index on the new table
|
|
1084
|
+
conn.execute("CREATE INDEX IF NOT EXISTS idx_workflows_status ON workflows(status)")
|
|
1085
|
+
finally:
|
|
1086
|
+
# 6. Re-enable foreign keys (must happen even on failure)
|
|
1087
|
+
conn.execute("PRAGMA foreign_keys=ON")
|
|
1088
|
+
|
|
1089
|
+
def _migrate_v16_to_v17(self, conn: sqlite3.Connection) -> None:
|
|
1090
|
+
"""Migrate from schema v16 to v17.
|
|
1091
|
+
|
|
1092
|
+
Adds:
|
|
1093
|
+
- New columns to planning_sessions for iteration-based paradigm
|
|
1094
|
+
- New planning_iterations table for per-iteration tracking
|
|
1095
|
+
- Marks existing chat-based sessions as legacy
|
|
1096
|
+
"""
|
|
1097
|
+
# 1. Add new columns to planning_sessions
|
|
1098
|
+
conn.execute("ALTER TABLE planning_sessions ADD COLUMN prompt TEXT")
|
|
1099
|
+
conn.execute(
|
|
1100
|
+
"ALTER TABLE planning_sessions ADD COLUMN iterations_requested INTEGER DEFAULT 1"
|
|
1101
|
+
)
|
|
1102
|
+
conn.execute(
|
|
1103
|
+
"ALTER TABLE planning_sessions ADD COLUMN iterations_completed INTEGER DEFAULT 0"
|
|
1104
|
+
)
|
|
1105
|
+
conn.execute(
|
|
1106
|
+
"ALTER TABLE planning_sessions ADD COLUMN current_iteration INTEGER DEFAULT 0"
|
|
1107
|
+
)
|
|
1108
|
+
conn.execute(
|
|
1109
|
+
"ALTER TABLE planning_sessions ADD COLUMN run_status TEXT DEFAULT 'pending'"
|
|
1110
|
+
)
|
|
1111
|
+
conn.execute(
|
|
1112
|
+
"ALTER TABLE planning_sessions ADD COLUMN is_legacy BOOLEAN DEFAULT FALSE"
|
|
1113
|
+
)
|
|
1114
|
+
conn.execute("ALTER TABLE planning_sessions ADD COLUMN error_message TEXT")
|
|
1115
|
+
|
|
1116
|
+
# 2. Mark existing sessions with messages as legacy
|
|
1117
|
+
conn.execute("""
|
|
1118
|
+
UPDATE planning_sessions
|
|
1119
|
+
SET is_legacy = TRUE, run_status = 'completed'
|
|
1120
|
+
WHERE json_array_length(messages) > 0 AND prompt IS NULL
|
|
1121
|
+
""")
|
|
1122
|
+
|
|
1123
|
+
# 3. Create planning_iterations table
|
|
1124
|
+
conn.execute("""
|
|
1125
|
+
CREATE TABLE IF NOT EXISTS planning_iterations (
|
|
1126
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
1127
|
+
session_id TEXT NOT NULL,
|
|
1128
|
+
iteration_number INTEGER NOT NULL,
|
|
1129
|
+
started_at TIMESTAMP,
|
|
1130
|
+
completed_at TIMESTAMP,
|
|
1131
|
+
status TEXT DEFAULT 'pending',
|
|
1132
|
+
chars_added INTEGER DEFAULT 0,
|
|
1133
|
+
chars_removed INTEGER DEFAULT 0,
|
|
1134
|
+
tool_calls JSON DEFAULT '[]',
|
|
1135
|
+
summary TEXT,
|
|
1136
|
+
error_message TEXT,
|
|
1137
|
+
FOREIGN KEY (session_id) REFERENCES planning_sessions(id) ON DELETE CASCADE
|
|
1138
|
+
)
|
|
1139
|
+
""")
|
|
1140
|
+
|
|
1141
|
+
# 4. Add indexes
|
|
1142
|
+
conn.execute(
|
|
1143
|
+
"CREATE INDEX IF NOT EXISTS idx_planning_sessions_run_status ON planning_sessions(run_status)"
|
|
1144
|
+
)
|
|
1145
|
+
conn.execute(
|
|
1146
|
+
"CREATE INDEX IF NOT EXISTS idx_planning_iterations_session ON planning_iterations(session_id)"
|
|
1147
|
+
)
|
|
1148
|
+
|
|
1149
|
+
def _migrate_v17_to_v18(self, conn: sqlite3.Connection) -> None:
|
|
1150
|
+
"""Migrate from schema v17 to v18.
|
|
1151
|
+
|
|
1152
|
+
Adds:
|
|
1153
|
+
- planning_iteration_events table for persistent event streaming/reconnection
|
|
1154
|
+
"""
|
|
1155
|
+
conn.execute("""
|
|
1156
|
+
CREATE TABLE IF NOT EXISTS planning_iteration_events (
|
|
1157
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
1158
|
+
session_id TEXT NOT NULL,
|
|
1159
|
+
iteration_number INTEGER,
|
|
1160
|
+
event_type TEXT NOT NULL,
|
|
1161
|
+
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
1162
|
+
content TEXT,
|
|
1163
|
+
tool_name TEXT,
|
|
1164
|
+
tool_input TEXT,
|
|
1165
|
+
tool_result TEXT,
|
|
1166
|
+
event_data TEXT,
|
|
1167
|
+
FOREIGN KEY (session_id) REFERENCES planning_sessions(id) ON DELETE CASCADE
|
|
1168
|
+
)
|
|
1169
|
+
""")
|
|
1170
|
+
conn.execute(
|
|
1171
|
+
"CREATE INDEX IF NOT EXISTS idx_pie_session ON planning_iteration_events(session_id)"
|
|
1172
|
+
)
|
|
1173
|
+
conn.execute(
|
|
1174
|
+
"CREATE INDEX IF NOT EXISTS idx_pie_session_id ON planning_iteration_events(session_id, id)"
|
|
1175
|
+
)
|
|
1176
|
+
|
|
1177
|
+
def _migrate_v18_to_v19(self, conn: sqlite3.Connection) -> None:
|
|
1178
|
+
"""Migrate from schema v18 to v19.
|
|
1179
|
+
|
|
1180
|
+
Adds:
|
|
1181
|
+
- diff_text column to planning_iterations for storing unified diffs
|
|
1182
|
+
"""
|
|
1183
|
+
conn.execute(
|
|
1184
|
+
"ALTER TABLE planning_iterations ADD COLUMN diff_text TEXT"
|
|
1185
|
+
)
|
|
1186
|
+
|
|
1187
|
+
def _migrate_v19_to_v20(self, conn: sqlite3.Connection) -> None:
|
|
1188
|
+
"""Migrate from schema v19 to v20.
|
|
1189
|
+
|
|
1190
|
+
Adds:
|
|
1191
|
+
- doc_before column to planning_iterations for pre-iteration doc snapshot
|
|
1192
|
+
- doc_after column to planning_iterations for post-iteration doc snapshot
|
|
1193
|
+
"""
|
|
1194
|
+
conn.execute(
|
|
1195
|
+
"ALTER TABLE planning_iterations ADD COLUMN doc_before TEXT"
|
|
1196
|
+
)
|
|
1197
|
+
conn.execute(
|
|
1198
|
+
"ALTER TABLE planning_iterations ADD COLUMN doc_after TEXT"
|
|
1199
|
+
)
|
|
1200
|
+
|
|
963
1201
|
# ========== Loops ==========
|
|
964
1202
|
|
|
965
1203
|
def create_loop(
|
|
@@ -1219,23 +1457,29 @@ class ProjectDatabase:
|
|
|
1219
1457
|
def list_sessions(
|
|
1220
1458
|
self,
|
|
1221
1459
|
run_id: Optional[str] = None,
|
|
1460
|
+
status: Optional[str] = None,
|
|
1222
1461
|
limit: int = 100,
|
|
1223
1462
|
) -> list[dict]:
|
|
1224
1463
|
"""List sessions with optional filters."""
|
|
1225
1464
|
with self._reader() as conn:
|
|
1465
|
+
conditions = ["1=1"]
|
|
1466
|
+
params: list[Any] = []
|
|
1467
|
+
|
|
1226
1468
|
if run_id:
|
|
1227
|
-
|
|
1228
|
-
|
|
1229
|
-
|
|
1230
|
-
|
|
1231
|
-
|
|
1232
|
-
|
|
1233
|
-
|
|
1234
|
-
|
|
1235
|
-
|
|
1236
|
-
|
|
1237
|
-
|
|
1238
|
-
|
|
1469
|
+
conditions.append("run_id = ?")
|
|
1470
|
+
params.append(run_id)
|
|
1471
|
+
if status:
|
|
1472
|
+
conditions.append("status = ?")
|
|
1473
|
+
params.append(status)
|
|
1474
|
+
|
|
1475
|
+
cursor = conn.execute(
|
|
1476
|
+
f"""
|
|
1477
|
+
SELECT * FROM sessions
|
|
1478
|
+
WHERE {' AND '.join(conditions)}
|
|
1479
|
+
ORDER BY started_at DESC LIMIT ?
|
|
1480
|
+
""",
|
|
1481
|
+
params + [limit],
|
|
1482
|
+
)
|
|
1239
1483
|
return [dict(row) for row in cursor.fetchall()]
|
|
1240
1484
|
|
|
1241
1485
|
_SESSION_UPDATE_COLS = frozenset({
|
|
@@ -1317,6 +1561,7 @@ class ProjectDatabase:
|
|
|
1317
1561
|
self,
|
|
1318
1562
|
session_id: str,
|
|
1319
1563
|
after_id: Optional[int] = None,
|
|
1564
|
+
event_type: Optional[str] = None,
|
|
1320
1565
|
limit: int = 500,
|
|
1321
1566
|
) -> list[dict]:
|
|
1322
1567
|
"""Get events for a session.
|
|
@@ -1324,30 +1569,31 @@ class ProjectDatabase:
|
|
|
1324
1569
|
Args:
|
|
1325
1570
|
session_id: Session UUID.
|
|
1326
1571
|
after_id: Only return events with ID greater than this (for polling).
|
|
1572
|
+
event_type: Filter by event type (text, tool_call, tool_result, error).
|
|
1327
1573
|
limit: Maximum number of events to return.
|
|
1328
1574
|
|
|
1329
1575
|
Returns:
|
|
1330
1576
|
List of event dicts.
|
|
1331
1577
|
"""
|
|
1332
1578
|
with self._reader() as conn:
|
|
1579
|
+
conditions = ["session_id = ?"]
|
|
1580
|
+
params: list[Any] = [session_id]
|
|
1581
|
+
|
|
1333
1582
|
if after_id:
|
|
1334
|
-
|
|
1335
|
-
|
|
1336
|
-
|
|
1337
|
-
|
|
1338
|
-
|
|
1339
|
-
|
|
1340
|
-
|
|
1341
|
-
|
|
1342
|
-
|
|
1343
|
-
|
|
1344
|
-
|
|
1345
|
-
|
|
1346
|
-
|
|
1347
|
-
|
|
1348
|
-
""",
|
|
1349
|
-
(session_id, limit),
|
|
1350
|
-
)
|
|
1583
|
+
conditions.append("id > ?")
|
|
1584
|
+
params.append(after_id)
|
|
1585
|
+
if event_type:
|
|
1586
|
+
conditions.append("event_type = ?")
|
|
1587
|
+
params.append(event_type)
|
|
1588
|
+
|
|
1589
|
+
cursor = conn.execute(
|
|
1590
|
+
f"""
|
|
1591
|
+
SELECT * FROM session_events
|
|
1592
|
+
WHERE {' AND '.join(conditions)}
|
|
1593
|
+
ORDER BY id ASC LIMIT ?
|
|
1594
|
+
""",
|
|
1595
|
+
params + [limit],
|
|
1596
|
+
)
|
|
1351
1597
|
|
|
1352
1598
|
events = []
|
|
1353
1599
|
for row in cursor.fetchall():
|
|
@@ -1639,7 +1885,12 @@ class ProjectDatabase:
|
|
|
1639
1885
|
return cursor.rowcount > 0
|
|
1640
1886
|
|
|
1641
1887
|
def release_work_item(self, id: str) -> bool:
|
|
1642
|
-
"""Release a claimed work item back to
|
|
1888
|
+
"""Release a claimed work item back to 'completed' state.
|
|
1889
|
+
|
|
1890
|
+
Items generated by producer loops have status 'completed' before being
|
|
1891
|
+
claimed. We restore to 'completed' (not 'pending') so that consumer
|
|
1892
|
+
loops, which query for status='completed', can find and retry them.
|
|
1893
|
+
"""
|
|
1643
1894
|
with self._writer() as conn:
|
|
1644
1895
|
now = datetime.utcnow().isoformat()
|
|
1645
1896
|
cursor = conn.execute(
|
|
@@ -1647,7 +1898,7 @@ class ProjectDatabase:
|
|
|
1647
1898
|
UPDATE work_items
|
|
1648
1899
|
SET claimed_by = NULL,
|
|
1649
1900
|
claimed_at = NULL,
|
|
1650
|
-
status = '
|
|
1901
|
+
status = 'completed',
|
|
1651
1902
|
updated_at = ?
|
|
1652
1903
|
WHERE id = ? AND status = 'claimed'
|
|
1653
1904
|
""",
|
|
@@ -1777,6 +2028,9 @@ class ProjectDatabase:
|
|
|
1777
2028
|
def release_stale_claims(self, max_age_minutes: int = 30) -> int:
|
|
1778
2029
|
"""Release claims that have been held too long (likely crashed consumer).
|
|
1779
2030
|
|
|
2031
|
+
Released items are restored to 'completed' status so consumer loops
|
|
2032
|
+
(which query for status='completed') can find and retry them.
|
|
2033
|
+
|
|
1780
2034
|
Args:
|
|
1781
2035
|
max_age_minutes: Claims older than this are released.
|
|
1782
2036
|
|
|
@@ -1794,7 +2048,7 @@ class ProjectDatabase:
|
|
|
1794
2048
|
UPDATE work_items
|
|
1795
2049
|
SET claimed_by = NULL,
|
|
1796
2050
|
claimed_at = NULL,
|
|
1797
|
-
status = '
|
|
2051
|
+
status = 'completed',
|
|
1798
2052
|
updated_at = ?
|
|
1799
2053
|
WHERE claimed_at < ?
|
|
1800
2054
|
AND claimed_by IS NOT NULL
|
|
@@ -1808,9 +2062,8 @@ class ProjectDatabase:
|
|
|
1808
2062
|
"""Release all claims held by a specific loop.
|
|
1809
2063
|
|
|
1810
2064
|
Used when deleting a loop to prevent orphaned claims.
|
|
1811
|
-
|
|
1812
|
-
|
|
1813
|
-
Items without a namespace are restored to 'pending'.
|
|
2065
|
+
Released items are restored to 'completed' status so they can be
|
|
2066
|
+
picked up by other consumer loops.
|
|
1814
2067
|
|
|
1815
2068
|
Args:
|
|
1816
2069
|
loop_name: Name of the loop whose claims should be released.
|
|
@@ -1826,7 +2079,7 @@ class ProjectDatabase:
|
|
|
1826
2079
|
UPDATE work_items
|
|
1827
2080
|
SET claimed_by = NULL,
|
|
1828
2081
|
claimed_at = NULL,
|
|
1829
|
-
status =
|
|
2082
|
+
status = 'completed',
|
|
1830
2083
|
updated_at = ?
|
|
1831
2084
|
WHERE claimed_by = ? AND status = 'claimed'
|
|
1832
2085
|
""",
|
|
@@ -1840,9 +2093,8 @@ class ProjectDatabase:
|
|
|
1840
2093
|
This is an atomic operation that checks ownership and releases in one step
|
|
1841
2094
|
to prevent TOCTOU race conditions.
|
|
1842
2095
|
|
|
1843
|
-
|
|
1844
|
-
|
|
1845
|
-
Items without a namespace are restored to 'pending'.
|
|
2096
|
+
Released items are restored to 'completed' status so consumer loops
|
|
2097
|
+
(which query for status='completed') can find and retry them.
|
|
1846
2098
|
|
|
1847
2099
|
Args:
|
|
1848
2100
|
id: Work item ID.
|
|
@@ -1858,7 +2110,7 @@ class ProjectDatabase:
|
|
|
1858
2110
|
UPDATE work_items
|
|
1859
2111
|
SET claimed_by = NULL,
|
|
1860
2112
|
claimed_at = NULL,
|
|
1861
|
-
status =
|
|
2113
|
+
status = 'completed',
|
|
1862
2114
|
updated_at = ?
|
|
1863
2115
|
WHERE id = ? AND claimed_by = ? AND status = 'claimed'
|
|
1864
2116
|
""",
|
|
@@ -2498,26 +2750,50 @@ class ProjectDatabase:
|
|
|
2498
2750
|
self,
|
|
2499
2751
|
run_id: Optional[str] = None,
|
|
2500
2752
|
level: Optional[str] = None,
|
|
2753
|
+
session_id: Optional[str] = None,
|
|
2754
|
+
search: Optional[str] = None,
|
|
2501
2755
|
limit: int = 100,
|
|
2502
2756
|
offset: int = 0,
|
|
2503
|
-
) -> list[dict]:
|
|
2504
|
-
"""Get logs with optional filters.
|
|
2757
|
+
) -> tuple[list[dict], int]:
|
|
2758
|
+
"""Get logs with optional filters.
|
|
2759
|
+
|
|
2760
|
+
Returns:
|
|
2761
|
+
Tuple of (logs list, total count).
|
|
2762
|
+
"""
|
|
2505
2763
|
with self._reader() as conn:
|
|
2506
2764
|
conditions = ["1=1"]
|
|
2507
2765
|
params: list[Any] = []
|
|
2508
2766
|
|
|
2509
2767
|
if run_id:
|
|
2510
|
-
conditions.append("run_id = ?")
|
|
2768
|
+
conditions.append("l.run_id = ?")
|
|
2511
2769
|
params.append(run_id)
|
|
2512
2770
|
if level:
|
|
2513
|
-
conditions.append("level = ?")
|
|
2771
|
+
conditions.append("l.level = ?")
|
|
2514
2772
|
params.append(level)
|
|
2773
|
+
if session_id:
|
|
2774
|
+
# Filter logs by session: join through runs → sessions
|
|
2775
|
+
conditions.append(
|
|
2776
|
+
"l.run_id IN (SELECT run_id FROM sessions WHERE session_id = ?)"
|
|
2777
|
+
)
|
|
2778
|
+
params.append(session_id)
|
|
2779
|
+
if search:
|
|
2780
|
+
conditions.append("l.message LIKE ?")
|
|
2781
|
+
params.append(f"%{search}%")
|
|
2782
|
+
|
|
2783
|
+
where_clause = " AND ".join(conditions)
|
|
2784
|
+
|
|
2785
|
+
# Get total count
|
|
2786
|
+
count_row = conn.execute(
|
|
2787
|
+
f"SELECT COUNT(*) FROM logs l WHERE {where_clause}",
|
|
2788
|
+
params,
|
|
2789
|
+
).fetchone()
|
|
2790
|
+
total = count_row[0] if count_row else 0
|
|
2515
2791
|
|
|
2516
2792
|
cursor = conn.execute(
|
|
2517
2793
|
f"""
|
|
2518
|
-
SELECT
|
|
2519
|
-
WHERE {
|
|
2520
|
-
ORDER BY timestamp DESC
|
|
2794
|
+
SELECT l.* FROM logs l
|
|
2795
|
+
WHERE {where_clause}
|
|
2796
|
+
ORDER BY l.timestamp DESC
|
|
2521
2797
|
LIMIT ? OFFSET ?
|
|
2522
2798
|
""",
|
|
2523
2799
|
params + [limit, offset],
|
|
@@ -2529,7 +2805,84 @@ class ProjectDatabase:
|
|
|
2529
2805
|
if result.get("metadata"):
|
|
2530
2806
|
result["metadata"] = json.loads(result["metadata"])
|
|
2531
2807
|
results.append(result)
|
|
2532
|
-
return results
|
|
2808
|
+
return results, total
|
|
2809
|
+
|
|
2810
|
+
def get_log_stats(
|
|
2811
|
+
self,
|
|
2812
|
+
run_id: Optional[str] = None,
|
|
2813
|
+
session_id: Optional[str] = None,
|
|
2814
|
+
) -> dict:
|
|
2815
|
+
"""Get log statistics (counts by level).
|
|
2816
|
+
|
|
2817
|
+
Returns:
|
|
2818
|
+
Dict with by_level counts and total.
|
|
2819
|
+
"""
|
|
2820
|
+
with self._reader() as conn:
|
|
2821
|
+
conditions = ["1=1"]
|
|
2822
|
+
params: list[Any] = []
|
|
2823
|
+
|
|
2824
|
+
if run_id:
|
|
2825
|
+
conditions.append("run_id = ?")
|
|
2826
|
+
params.append(run_id)
|
|
2827
|
+
if session_id:
|
|
2828
|
+
conditions.append(
|
|
2829
|
+
"run_id IN (SELECT run_id FROM sessions WHERE session_id = ?)"
|
|
2830
|
+
)
|
|
2831
|
+
params.append(session_id)
|
|
2832
|
+
|
|
2833
|
+
where_clause = " AND ".join(conditions)
|
|
2834
|
+
|
|
2835
|
+
# Count by level
|
|
2836
|
+
cursor = conn.execute(
|
|
2837
|
+
f"""
|
|
2838
|
+
SELECT level, COUNT(*) as count FROM logs
|
|
2839
|
+
WHERE {where_clause}
|
|
2840
|
+
GROUP BY level
|
|
2841
|
+
""",
|
|
2842
|
+
params,
|
|
2843
|
+
)
|
|
2844
|
+
by_level = {row["level"]: row["count"] for row in cursor.fetchall()}
|
|
2845
|
+
|
|
2846
|
+
total = sum(by_level.values())
|
|
2847
|
+
|
|
2848
|
+
return {
|
|
2849
|
+
"by_level": by_level,
|
|
2850
|
+
"by_category": {}, # No category column in schema
|
|
2851
|
+
"total": total,
|
|
2852
|
+
}
|
|
2853
|
+
|
|
2854
|
+
def cleanup_logs(
|
|
2855
|
+
self,
|
|
2856
|
+
days: int = 30,
|
|
2857
|
+
dry_run: bool = True,
|
|
2858
|
+
) -> dict:
|
|
2859
|
+
"""Delete logs older than specified days.
|
|
2860
|
+
|
|
2861
|
+
Args:
|
|
2862
|
+
days: Delete logs older than this many days.
|
|
2863
|
+
dry_run: If True, only report what would be deleted.
|
|
2864
|
+
|
|
2865
|
+
Returns:
|
|
2866
|
+
Dict with deleted_count.
|
|
2867
|
+
"""
|
|
2868
|
+
from datetime import datetime, timedelta
|
|
2869
|
+
|
|
2870
|
+
cutoff = (datetime.utcnow() - timedelta(days=days)).isoformat()
|
|
2871
|
+
|
|
2872
|
+
if dry_run:
|
|
2873
|
+
with self._reader() as conn:
|
|
2874
|
+
row = conn.execute(
|
|
2875
|
+
"SELECT COUNT(*) FROM logs WHERE timestamp < ?",
|
|
2876
|
+
(cutoff,),
|
|
2877
|
+
).fetchone()
|
|
2878
|
+
return {"deleted_count": row[0] if row else 0}
|
|
2879
|
+
else:
|
|
2880
|
+
with self._writer() as conn:
|
|
2881
|
+
cursor = conn.execute(
|
|
2882
|
+
"DELETE FROM logs WHERE timestamp < ?",
|
|
2883
|
+
(cutoff,),
|
|
2884
|
+
)
|
|
2885
|
+
return {"deleted_count": cursor.rowcount}
|
|
2533
2886
|
|
|
2534
2887
|
# ========== Checkpoints ==========
|
|
2535
2888
|
|
|
@@ -3093,11 +3446,12 @@ class ProjectDatabase:
|
|
|
3093
3446
|
now = datetime.utcnow().isoformat()
|
|
3094
3447
|
|
|
3095
3448
|
# Build Product workflow template
|
|
3449
|
+
# Uses processing_type to reference PROCESSING_TYPES in mcp/tools/workflows.py
|
|
3096
3450
|
build_product_phases = json.dumps([
|
|
3097
3451
|
{
|
|
3098
3452
|
"number": 1,
|
|
3099
|
-
"name": "
|
|
3100
|
-
"
|
|
3453
|
+
"name": "Design Document",
|
|
3454
|
+
"processing_type": "design_doc",
|
|
3101
3455
|
"description": "Describe what you want to build. Claude will help create a design document.",
|
|
3102
3456
|
"outputs": ["design_doc", "guardrails"],
|
|
3103
3457
|
"skippable": True,
|
|
@@ -3105,10 +3459,9 @@ class ProjectDatabase:
|
|
|
3105
3459
|
},
|
|
3106
3460
|
{
|
|
3107
3461
|
"number": 2,
|
|
3108
|
-
"name": "Story Generation",
|
|
3109
|
-
"
|
|
3110
|
-
"
|
|
3111
|
-
"description": "Claude generates detailed user stories from the design document.",
|
|
3462
|
+
"name": "Story Generation (Extract)",
|
|
3463
|
+
"processing_type": "extractgen_requirements",
|
|
3464
|
+
"description": "Claude extracts user stories from the design document.",
|
|
3112
3465
|
"inputs": ["design_doc", "guardrails"],
|
|
3113
3466
|
"outputs": ["stories"],
|
|
3114
3467
|
"skippable": True,
|
|
@@ -3116,9 +3469,18 @@ class ProjectDatabase:
|
|
|
3116
3469
|
},
|
|
3117
3470
|
{
|
|
3118
3471
|
"number": 3,
|
|
3472
|
+
"name": "Story Generation (Web)",
|
|
3473
|
+
"processing_type": "webgen_requirements",
|
|
3474
|
+
"description": "Claude discovers additional requirements via web research.",
|
|
3475
|
+
"inputs": ["design_doc", "guardrails", "stories"],
|
|
3476
|
+
"outputs": ["stories"],
|
|
3477
|
+
"skippable": True,
|
|
3478
|
+
"skipCondition": "Skip web research"
|
|
3479
|
+
},
|
|
3480
|
+
{
|
|
3481
|
+
"number": 4,
|
|
3119
3482
|
"name": "Implementation",
|
|
3120
|
-
"
|
|
3121
|
-
"loopType": "consumer",
|
|
3483
|
+
"processing_type": "implementation",
|
|
3122
3484
|
"description": "Claude implements each story, committing code to git.",
|
|
3123
3485
|
"inputs": ["stories", "design_doc", "guardrails"],
|
|
3124
3486
|
"outputs": ["code"],
|
|
@@ -3138,23 +3500,31 @@ class ProjectDatabase:
|
|
|
3138
3500
|
),
|
|
3139
3501
|
)
|
|
3140
3502
|
|
|
3141
|
-
# From Design Doc workflow - skips
|
|
3503
|
+
# From Design Doc workflow - skips design doc, starts with story generation
|
|
3142
3504
|
from_design_doc_phases = json.dumps([
|
|
3143
3505
|
{
|
|
3144
3506
|
"number": 1,
|
|
3145
|
-
"name": "Story Generation",
|
|
3146
|
-
"
|
|
3147
|
-
"
|
|
3148
|
-
"description": "Claude generates detailed user stories from your design document.",
|
|
3507
|
+
"name": "Story Generation (Extract)",
|
|
3508
|
+
"processing_type": "extractgen_requirements",
|
|
3509
|
+
"description": "Claude extracts user stories from your design document.",
|
|
3149
3510
|
"inputs": ["design_doc"],
|
|
3150
3511
|
"outputs": ["stories"],
|
|
3151
3512
|
"skippable": False
|
|
3152
3513
|
},
|
|
3153
3514
|
{
|
|
3154
3515
|
"number": 2,
|
|
3516
|
+
"name": "Story Generation (Web)",
|
|
3517
|
+
"processing_type": "webgen_requirements",
|
|
3518
|
+
"description": "Claude discovers additional requirements via web research.",
|
|
3519
|
+
"inputs": ["design_doc", "stories"],
|
|
3520
|
+
"outputs": ["stories"],
|
|
3521
|
+
"skippable": True,
|
|
3522
|
+
"skipCondition": "Skip web research"
|
|
3523
|
+
},
|
|
3524
|
+
{
|
|
3525
|
+
"number": 3,
|
|
3155
3526
|
"name": "Implementation",
|
|
3156
|
-
"
|
|
3157
|
-
"loopType": "consumer",
|
|
3527
|
+
"processing_type": "implementation",
|
|
3158
3528
|
"description": "Claude implements each story, committing code to git.",
|
|
3159
3529
|
"inputs": ["stories", "design_doc"],
|
|
3160
3530
|
"outputs": ["code"],
|
|
@@ -3179,8 +3549,7 @@ class ProjectDatabase:
|
|
|
3179
3549
|
{
|
|
3180
3550
|
"number": 1,
|
|
3181
3551
|
"name": "Implementation",
|
|
3182
|
-
"
|
|
3183
|
-
"loopType": "consumer",
|
|
3552
|
+
"processing_type": "implementation",
|
|
3184
3553
|
"description": "Claude implements each story, committing code to git.",
|
|
3185
3554
|
"inputs": ["stories"],
|
|
3186
3555
|
"outputs": ["code"],
|
|
@@ -3200,12 +3569,12 @@ class ProjectDatabase:
|
|
|
3200
3569
|
),
|
|
3201
3570
|
)
|
|
3202
3571
|
|
|
3203
|
-
#
|
|
3572
|
+
# Design Doc Only workflow - just the interactive design doc step
|
|
3204
3573
|
planning_only_phases = json.dumps([
|
|
3205
3574
|
{
|
|
3206
3575
|
"number": 1,
|
|
3207
|
-
"name": "
|
|
3208
|
-
"
|
|
3576
|
+
"name": "Design Document",
|
|
3577
|
+
"processing_type": "design_doc",
|
|
3209
3578
|
"description": "Collaborate with Claude to create a comprehensive design document.",
|
|
3210
3579
|
"outputs": ["design_doc", "guardrails"],
|
|
3211
3580
|
"skippable": False
|
|
@@ -3273,7 +3642,6 @@ class ProjectDatabase:
|
|
|
3273
3642
|
self,
|
|
3274
3643
|
id: str,
|
|
3275
3644
|
name: str,
|
|
3276
|
-
namespace: str,
|
|
3277
3645
|
template_id: Optional[str] = None,
|
|
3278
3646
|
status: str = "draft",
|
|
3279
3647
|
) -> dict:
|
|
@@ -3282,29 +3650,19 @@ class ProjectDatabase:
|
|
|
3282
3650
|
Args:
|
|
3283
3651
|
id: Unique workflow identifier.
|
|
3284
3652
|
name: User-facing workflow name.
|
|
3285
|
-
namespace: Namespace to link all phases.
|
|
3286
3653
|
template_id: Optional template ID this workflow is based on.
|
|
3287
3654
|
status: Initial status (default: draft).
|
|
3288
3655
|
|
|
3289
3656
|
Returns:
|
|
3290
3657
|
The created workflow dict.
|
|
3291
|
-
|
|
3292
|
-
Raises:
|
|
3293
|
-
ValueError: If namespace is invalid.
|
|
3294
3658
|
"""
|
|
3295
|
-
if not validate_namespace(namespace):
|
|
3296
|
-
raise ValueError(
|
|
3297
|
-
f"Invalid namespace '{namespace}'. Must match pattern: "
|
|
3298
|
-
"lowercase letter followed by up to 63 lowercase letters, digits, underscores, or dashes."
|
|
3299
|
-
)
|
|
3300
|
-
|
|
3301
3659
|
with self._writer() as conn:
|
|
3302
3660
|
now = datetime.utcnow().isoformat()
|
|
3303
3661
|
conn.execute(
|
|
3304
3662
|
"""INSERT INTO workflows
|
|
3305
|
-
(id, template_id, name,
|
|
3306
|
-
VALUES (?, ?, ?, ?,
|
|
3307
|
-
(id, template_id, name,
|
|
3663
|
+
(id, template_id, name, status, current_step, created_at, updated_at)
|
|
3664
|
+
VALUES (?, ?, ?, ?, 1, ?, ?)""",
|
|
3665
|
+
(id, template_id, name, status, now, now),
|
|
3308
3666
|
)
|
|
3309
3667
|
return self.get_workflow(id)
|
|
3310
3668
|
|
|
@@ -3318,7 +3676,6 @@ class ProjectDatabase:
|
|
|
3318
3676
|
def list_workflows(
|
|
3319
3677
|
self,
|
|
3320
3678
|
status: Optional[str] = None,
|
|
3321
|
-
namespace: Optional[str] = None,
|
|
3322
3679
|
include_archived: bool = False,
|
|
3323
3680
|
archived_only: bool = False,
|
|
3324
3681
|
) -> list[dict]:
|
|
@@ -3326,7 +3683,6 @@ class ProjectDatabase:
|
|
|
3326
3683
|
|
|
3327
3684
|
Args:
|
|
3328
3685
|
status: Filter by workflow status.
|
|
3329
|
-
namespace: Filter by namespace.
|
|
3330
3686
|
include_archived: If True, include archived workflows.
|
|
3331
3687
|
archived_only: If True, only return archived workflows.
|
|
3332
3688
|
"""
|
|
@@ -3337,9 +3693,6 @@ class ProjectDatabase:
|
|
|
3337
3693
|
if status:
|
|
3338
3694
|
conditions.append("status = ?")
|
|
3339
3695
|
params.append(status)
|
|
3340
|
-
if namespace:
|
|
3341
|
-
conditions.append("namespace = ?")
|
|
3342
|
-
params.append(namespace)
|
|
3343
3696
|
|
|
3344
3697
|
# Handle archived filtering
|
|
3345
3698
|
if archived_only:
|
|
@@ -4771,6 +5124,52 @@ class ProjectDatabase:
|
|
|
4771
5124
|
|
|
4772
5125
|
return True
|
|
4773
5126
|
|
|
5127
|
+
def reopen_workflow_step_atomic(
|
|
5128
|
+
self,
|
|
5129
|
+
workflow_id: str,
|
|
5130
|
+
step_id: int,
|
|
5131
|
+
step_number: int,
|
|
5132
|
+
) -> bool:
|
|
5133
|
+
"""Atomically reopen a completed/skipped step.
|
|
5134
|
+
|
|
5135
|
+
Sets the target step back to 'active', resets all later steps
|
|
5136
|
+
to 'pending', and moves workflow.current_step back.
|
|
5137
|
+
|
|
5138
|
+
Args:
|
|
5139
|
+
workflow_id: The workflow ID.
|
|
5140
|
+
step_id: The ID of the step to reopen.
|
|
5141
|
+
step_number: The step_number of the step to reopen.
|
|
5142
|
+
|
|
5143
|
+
Returns:
|
|
5144
|
+
True if reopen succeeded, False otherwise.
|
|
5145
|
+
"""
|
|
5146
|
+
with self._writer() as conn:
|
|
5147
|
+
now = datetime.utcnow().isoformat()
|
|
5148
|
+
|
|
5149
|
+
# Set target step back to active, clear completed_at
|
|
5150
|
+
conn.execute(
|
|
5151
|
+
"UPDATE workflow_steps SET status = 'active', completed_at = NULL, updated_at = ? "
|
|
5152
|
+
"WHERE id = ?",
|
|
5153
|
+
(now, step_id),
|
|
5154
|
+
)
|
|
5155
|
+
|
|
5156
|
+
# Reset all later steps to pending, clear timestamps
|
|
5157
|
+
conn.execute(
|
|
5158
|
+
"UPDATE workflow_steps SET status = 'pending', started_at = NULL, "
|
|
5159
|
+
"completed_at = NULL, updated_at = ? "
|
|
5160
|
+
"WHERE workflow_id = ? AND step_number > ? AND archived_at IS NULL",
|
|
5161
|
+
(now, workflow_id, step_number),
|
|
5162
|
+
)
|
|
5163
|
+
|
|
5164
|
+
# Move workflow.current_step back and ensure workflow is active
|
|
5165
|
+
conn.execute(
|
|
5166
|
+
"UPDATE workflows SET current_step = ?, status = 'active', updated_at = ? "
|
|
5167
|
+
"WHERE id = ?",
|
|
5168
|
+
(step_number, now, workflow_id),
|
|
5169
|
+
)
|
|
5170
|
+
|
|
5171
|
+
return True
|
|
5172
|
+
|
|
4774
5173
|
# ========== Planning Sessions ==========
|
|
4775
5174
|
|
|
4776
5175
|
def create_planning_session(
|
|
@@ -4781,6 +5180,11 @@ class ProjectDatabase:
|
|
|
4781
5180
|
messages: Optional[list] = None,
|
|
4782
5181
|
artifacts: Optional[dict] = None,
|
|
4783
5182
|
status: str = "active",
|
|
5183
|
+
# New iteration-based fields
|
|
5184
|
+
prompt: Optional[str] = None,
|
|
5185
|
+
iterations_requested: int = 1,
|
|
5186
|
+
run_status: str = "pending",
|
|
5187
|
+
is_legacy: bool = False,
|
|
4784
5188
|
) -> dict:
|
|
4785
5189
|
"""Create a planning session for an interactive step.
|
|
4786
5190
|
|
|
@@ -4791,6 +5195,10 @@ class ProjectDatabase:
|
|
|
4791
5195
|
messages: Initial messages (default: empty list).
|
|
4792
5196
|
artifacts: Optional artifacts dict.
|
|
4793
5197
|
status: Session status (default: 'active').
|
|
5198
|
+
prompt: User's guidance prompt for iteration-based sessions.
|
|
5199
|
+
iterations_requested: Number of iterations requested (default: 1).
|
|
5200
|
+
run_status: Execution status for iterations (default: 'pending').
|
|
5201
|
+
is_legacy: Whether this is a legacy chat-based session.
|
|
4794
5202
|
|
|
4795
5203
|
Returns:
|
|
4796
5204
|
The created session dict.
|
|
@@ -4801,9 +5209,24 @@ class ProjectDatabase:
|
|
|
4801
5209
|
artifacts_json = json.dumps(artifacts) if artifacts else None
|
|
4802
5210
|
conn.execute(
|
|
4803
5211
|
"""INSERT INTO planning_sessions
|
|
4804
|
-
(id, workflow_id, step_id, messages, artifacts, status,
|
|
4805
|
-
|
|
4806
|
-
|
|
5212
|
+
(id, workflow_id, step_id, messages, artifacts, status,
|
|
5213
|
+
prompt, iterations_requested, iterations_completed, current_iteration,
|
|
5214
|
+
run_status, is_legacy, created_at, updated_at)
|
|
5215
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, 0, 0, ?, ?, ?, ?)""",
|
|
5216
|
+
(
|
|
5217
|
+
id,
|
|
5218
|
+
workflow_id,
|
|
5219
|
+
step_id,
|
|
5220
|
+
messages_json,
|
|
5221
|
+
artifacts_json,
|
|
5222
|
+
status,
|
|
5223
|
+
prompt,
|
|
5224
|
+
iterations_requested,
|
|
5225
|
+
run_status,
|
|
5226
|
+
is_legacy,
|
|
5227
|
+
now,
|
|
5228
|
+
now,
|
|
5229
|
+
),
|
|
4807
5230
|
)
|
|
4808
5231
|
return self.get_planning_session(id)
|
|
4809
5232
|
|
|
@@ -4824,10 +5247,11 @@ class ProjectDatabase:
|
|
|
4824
5247
|
return None
|
|
4825
5248
|
|
|
4826
5249
|
def get_planning_session_by_step(self, step_id: int) -> Optional[dict]:
|
|
4827
|
-
"""Get planning session
|
|
5250
|
+
"""Get the most recent planning session for a step ID."""
|
|
4828
5251
|
with self._reader() as conn:
|
|
4829
5252
|
cursor = conn.execute(
|
|
4830
|
-
"SELECT * FROM planning_sessions WHERE step_id = ?",
|
|
5253
|
+
"SELECT * FROM planning_sessions WHERE step_id = ? ORDER BY created_at DESC LIMIT 1",
|
|
5254
|
+
(step_id,),
|
|
4831
5255
|
)
|
|
4832
5256
|
row = cursor.fetchone()
|
|
4833
5257
|
if row:
|
|
@@ -4948,6 +5372,11 @@ class ProjectDatabase:
|
|
|
4948
5372
|
id: str,
|
|
4949
5373
|
status: Optional[str] = None,
|
|
4950
5374
|
artifacts: Optional[dict] = None,
|
|
5375
|
+
# New iteration-based fields
|
|
5376
|
+
run_status: Optional[str] = None,
|
|
5377
|
+
current_iteration: Optional[int] = None,
|
|
5378
|
+
iterations_completed: Optional[int] = None,
|
|
5379
|
+
error_message: Optional[str] = None,
|
|
4951
5380
|
) -> bool:
|
|
4952
5381
|
"""Update planning session fields."""
|
|
4953
5382
|
updates = []
|
|
@@ -4959,6 +5388,18 @@ class ProjectDatabase:
|
|
|
4959
5388
|
if artifacts is not None:
|
|
4960
5389
|
updates.append("artifacts = ?")
|
|
4961
5390
|
params.append(json.dumps(artifacts))
|
|
5391
|
+
if run_status is not None:
|
|
5392
|
+
updates.append("run_status = ?")
|
|
5393
|
+
params.append(run_status)
|
|
5394
|
+
if current_iteration is not None:
|
|
5395
|
+
updates.append("current_iteration = ?")
|
|
5396
|
+
params.append(current_iteration)
|
|
5397
|
+
if iterations_completed is not None:
|
|
5398
|
+
updates.append("iterations_completed = ?")
|
|
5399
|
+
params.append(iterations_completed)
|
|
5400
|
+
if error_message is not None:
|
|
5401
|
+
updates.append("error_message = ?")
|
|
5402
|
+
params.append(error_message)
|
|
4962
5403
|
|
|
4963
5404
|
if not updates:
|
|
4964
5405
|
return False
|
|
@@ -4978,7 +5419,309 @@ class ProjectDatabase:
|
|
|
4978
5419
|
self, id: str, artifacts: Optional[dict] = None
|
|
4979
5420
|
) -> bool:
|
|
4980
5421
|
"""Mark a planning session as completed."""
|
|
4981
|
-
return self.update_planning_session(
|
|
5422
|
+
return self.update_planning_session(
|
|
5423
|
+
id, status="completed", run_status="completed", artifacts=artifacts
|
|
5424
|
+
)
|
|
5425
|
+
|
|
5426
|
+
def get_running_planning_session(self, workflow_id: str) -> Optional[dict]:
|
|
5427
|
+
"""Get any currently running planning session for a workflow.
|
|
5428
|
+
|
|
5429
|
+
Used to prevent multiple concurrent sessions.
|
|
5430
|
+
|
|
5431
|
+
Args:
|
|
5432
|
+
workflow_id: The workflow ID to check.
|
|
5433
|
+
|
|
5434
|
+
Returns:
|
|
5435
|
+
The running session dict if found, None otherwise.
|
|
5436
|
+
"""
|
|
5437
|
+
with self._reader() as conn:
|
|
5438
|
+
cursor = conn.execute(
|
|
5439
|
+
"""SELECT * FROM planning_sessions
|
|
5440
|
+
WHERE workflow_id = ? AND run_status = 'running'
|
|
5441
|
+
ORDER BY created_at DESC LIMIT 1""",
|
|
5442
|
+
(workflow_id,),
|
|
5443
|
+
)
|
|
5444
|
+
row = cursor.fetchone()
|
|
5445
|
+
if row:
|
|
5446
|
+
result = dict(row)
|
|
5447
|
+
if result.get("messages"):
|
|
5448
|
+
result["messages"] = json.loads(result["messages"])
|
|
5449
|
+
if result.get("artifacts"):
|
|
5450
|
+
result["artifacts"] = json.loads(result["artifacts"])
|
|
5451
|
+
return result
|
|
5452
|
+
return None
|
|
5453
|
+
|
|
5454
|
+
def cancel_planning_session(self, id: str) -> bool:
|
|
5455
|
+
"""Cancel a running planning session.
|
|
5456
|
+
|
|
5457
|
+
Args:
|
|
5458
|
+
id: The session ID to cancel.
|
|
5459
|
+
|
|
5460
|
+
Returns:
|
|
5461
|
+
True if session was cancelled, False otherwise.
|
|
5462
|
+
"""
|
|
5463
|
+
with self._writer() as conn:
|
|
5464
|
+
cursor = conn.execute(
|
|
5465
|
+
"""UPDATE planning_sessions
|
|
5466
|
+
SET run_status = 'cancelled', updated_at = ?
|
|
5467
|
+
WHERE id = ? AND run_status = 'running'""",
|
|
5468
|
+
(datetime.utcnow().isoformat(), id),
|
|
5469
|
+
)
|
|
5470
|
+
return cursor.rowcount > 0
|
|
5471
|
+
|
|
5472
|
+
# ========== Planning Iterations ==========
|
|
5473
|
+
|
|
5474
|
+
def create_planning_iteration(
|
|
5475
|
+
self,
|
|
5476
|
+
session_id: str,
|
|
5477
|
+
iteration_number: int,
|
|
5478
|
+
status: str = "pending",
|
|
5479
|
+
) -> Optional[dict]:
|
|
5480
|
+
"""Create a new planning iteration record.
|
|
5481
|
+
|
|
5482
|
+
Args:
|
|
5483
|
+
session_id: Parent session ID.
|
|
5484
|
+
iteration_number: The iteration number (1-indexed).
|
|
5485
|
+
status: Initial status (default: 'pending').
|
|
5486
|
+
|
|
5487
|
+
Returns:
|
|
5488
|
+
The created iteration dict.
|
|
5489
|
+
"""
|
|
5490
|
+
with self._writer() as conn:
|
|
5491
|
+
cursor = conn.execute(
|
|
5492
|
+
"""INSERT INTO planning_iterations
|
|
5493
|
+
(session_id, iteration_number, status)
|
|
5494
|
+
VALUES (?, ?, ?)""",
|
|
5495
|
+
(session_id, iteration_number, status),
|
|
5496
|
+
)
|
|
5497
|
+
return self.get_planning_iteration(cursor.lastrowid)
|
|
5498
|
+
|
|
5499
|
+
def get_planning_iteration(self, iteration_id: int) -> Optional[dict]:
|
|
5500
|
+
"""Get a planning iteration by ID."""
|
|
5501
|
+
with self._reader() as conn:
|
|
5502
|
+
cursor = conn.execute(
|
|
5503
|
+
"SELECT * FROM planning_iterations WHERE id = ?", (iteration_id,)
|
|
5504
|
+
)
|
|
5505
|
+
row = cursor.fetchone()
|
|
5506
|
+
if row:
|
|
5507
|
+
result = dict(row)
|
|
5508
|
+
if result.get("tool_calls"):
|
|
5509
|
+
result["tool_calls"] = json.loads(result["tool_calls"])
|
|
5510
|
+
return result
|
|
5511
|
+
return None
|
|
5512
|
+
|
|
5513
|
+
def list_planning_iterations(self, session_id: str) -> list[dict]:
|
|
5514
|
+
"""List all iterations for a planning session.
|
|
5515
|
+
|
|
5516
|
+
Args:
|
|
5517
|
+
session_id: The session ID.
|
|
5518
|
+
|
|
5519
|
+
Returns:
|
|
5520
|
+
List of iteration dicts ordered by iteration_number.
|
|
5521
|
+
"""
|
|
5522
|
+
with self._reader() as conn:
|
|
5523
|
+
cursor = conn.execute(
|
|
5524
|
+
"""SELECT * FROM planning_iterations
|
|
5525
|
+
WHERE session_id = ?
|
|
5526
|
+
ORDER BY iteration_number ASC""",
|
|
5527
|
+
(session_id,),
|
|
5528
|
+
)
|
|
5529
|
+
results = []
|
|
5530
|
+
for row in cursor.fetchall():
|
|
5531
|
+
result = dict(row)
|
|
5532
|
+
if result.get("tool_calls"):
|
|
5533
|
+
result["tool_calls"] = json.loads(result["tool_calls"])
|
|
5534
|
+
results.append(result)
|
|
5535
|
+
return results
|
|
5536
|
+
|
|
5537
|
+
def update_planning_iteration(
|
|
5538
|
+
self,
|
|
5539
|
+
iteration_id: int,
|
|
5540
|
+
status: Optional[str] = None,
|
|
5541
|
+
started_at: Optional[str] = None,
|
|
5542
|
+
completed_at: Optional[str] = None,
|
|
5543
|
+
chars_added: Optional[int] = None,
|
|
5544
|
+
chars_removed: Optional[int] = None,
|
|
5545
|
+
tool_calls: Optional[list] = None,
|
|
5546
|
+
summary: Optional[str] = None,
|
|
5547
|
+
error_message: Optional[str] = None,
|
|
5548
|
+
diff_text: Optional[str] = None,
|
|
5549
|
+
doc_before: Optional[str] = None,
|
|
5550
|
+
doc_after: Optional[str] = None,
|
|
5551
|
+
) -> bool:
|
|
5552
|
+
"""Update a planning iteration.
|
|
5553
|
+
|
|
5554
|
+
Args:
|
|
5555
|
+
iteration_id: The iteration ID.
|
|
5556
|
+
status: New status.
|
|
5557
|
+
started_at: Start timestamp.
|
|
5558
|
+
completed_at: Completion timestamp.
|
|
5559
|
+
chars_added: Characters added to design doc.
|
|
5560
|
+
chars_removed: Characters removed from design doc.
|
|
5561
|
+
tool_calls: List of tool call records.
|
|
5562
|
+
summary: Brief summary of changes.
|
|
5563
|
+
error_message: Error message if failed.
|
|
5564
|
+
diff_text: Unified diff of changes.
|
|
5565
|
+
doc_before: Document content before iteration.
|
|
5566
|
+
doc_after: Document content after iteration.
|
|
5567
|
+
|
|
5568
|
+
Returns:
|
|
5569
|
+
True if updated, False otherwise.
|
|
5570
|
+
"""
|
|
5571
|
+
updates = []
|
|
5572
|
+
params: list[Any] = []
|
|
5573
|
+
|
|
5574
|
+
if status is not None:
|
|
5575
|
+
updates.append("status = ?")
|
|
5576
|
+
params.append(status)
|
|
5577
|
+
if started_at is not None:
|
|
5578
|
+
updates.append("started_at = ?")
|
|
5579
|
+
params.append(started_at)
|
|
5580
|
+
if completed_at is not None:
|
|
5581
|
+
updates.append("completed_at = ?")
|
|
5582
|
+
params.append(completed_at)
|
|
5583
|
+
if chars_added is not None:
|
|
5584
|
+
updates.append("chars_added = ?")
|
|
5585
|
+
params.append(chars_added)
|
|
5586
|
+
if chars_removed is not None:
|
|
5587
|
+
updates.append("chars_removed = ?")
|
|
5588
|
+
params.append(chars_removed)
|
|
5589
|
+
if tool_calls is not None:
|
|
5590
|
+
updates.append("tool_calls = ?")
|
|
5591
|
+
params.append(json.dumps(tool_calls))
|
|
5592
|
+
if summary is not None:
|
|
5593
|
+
updates.append("summary = ?")
|
|
5594
|
+
params.append(summary)
|
|
5595
|
+
if error_message is not None:
|
|
5596
|
+
updates.append("error_message = ?")
|
|
5597
|
+
params.append(error_message)
|
|
5598
|
+
if diff_text is not None:
|
|
5599
|
+
updates.append("diff_text = ?")
|
|
5600
|
+
params.append(diff_text)
|
|
5601
|
+
if doc_before is not None:
|
|
5602
|
+
updates.append("doc_before = ?")
|
|
5603
|
+
params.append(doc_before)
|
|
5604
|
+
if doc_after is not None:
|
|
5605
|
+
updates.append("doc_after = ?")
|
|
5606
|
+
params.append(doc_after)
|
|
5607
|
+
|
|
5608
|
+
if not updates:
|
|
5609
|
+
return False
|
|
5610
|
+
|
|
5611
|
+
params.append(iteration_id)
|
|
5612
|
+
|
|
5613
|
+
with self._writer() as conn:
|
|
5614
|
+
cursor = conn.execute(
|
|
5615
|
+
f"UPDATE planning_iterations SET {', '.join(updates)} WHERE id = ?",
|
|
5616
|
+
params,
|
|
5617
|
+
)
|
|
5618
|
+
return cursor.rowcount > 0
|
|
5619
|
+
|
|
5620
|
+
def start_planning_iteration(self, iteration_id: int) -> bool:
|
|
5621
|
+
"""Mark an iteration as started."""
|
|
5622
|
+
return self.update_planning_iteration(
|
|
5623
|
+
iteration_id,
|
|
5624
|
+
status="running",
|
|
5625
|
+
started_at=datetime.utcnow().isoformat(),
|
|
5626
|
+
)
|
|
5627
|
+
|
|
5628
|
+
def complete_planning_iteration(
|
|
5629
|
+
self,
|
|
5630
|
+
iteration_id: int,
|
|
5631
|
+
chars_added: int = 0,
|
|
5632
|
+
chars_removed: int = 0,
|
|
5633
|
+
tool_calls: Optional[list] = None,
|
|
5634
|
+
summary: Optional[str] = None,
|
|
5635
|
+
diff_text: Optional[str] = None,
|
|
5636
|
+
doc_before: Optional[str] = None,
|
|
5637
|
+
doc_after: Optional[str] = None,
|
|
5638
|
+
) -> bool:
|
|
5639
|
+
"""Mark an iteration as completed with results."""
|
|
5640
|
+
return self.update_planning_iteration(
|
|
5641
|
+
iteration_id,
|
|
5642
|
+
status="completed",
|
|
5643
|
+
completed_at=datetime.utcnow().isoformat(),
|
|
5644
|
+
chars_added=chars_added,
|
|
5645
|
+
chars_removed=chars_removed,
|
|
5646
|
+
tool_calls=tool_calls,
|
|
5647
|
+
summary=summary,
|
|
5648
|
+
diff_text=diff_text,
|
|
5649
|
+
doc_before=doc_before,
|
|
5650
|
+
doc_after=doc_after,
|
|
5651
|
+
)
|
|
5652
|
+
|
|
5653
|
+
def fail_planning_iteration(
|
|
5654
|
+
self, iteration_id: int, error_message: str
|
|
5655
|
+
) -> bool:
|
|
5656
|
+
"""Mark an iteration as failed."""
|
|
5657
|
+
return self.update_planning_iteration(
|
|
5658
|
+
iteration_id,
|
|
5659
|
+
status="failed",
|
|
5660
|
+
completed_at=datetime.utcnow().isoformat(),
|
|
5661
|
+
error_message=error_message,
|
|
5662
|
+
)
|
|
5663
|
+
|
|
5664
|
+
# ========== Planning Iteration Events ==========
|
|
5665
|
+
|
|
5666
|
+
def add_planning_iteration_event(
|
|
5667
|
+
self,
|
|
5668
|
+
session_id: str,
|
|
5669
|
+
event_type: str,
|
|
5670
|
+
iteration_number: Optional[int] = None,
|
|
5671
|
+
content: Optional[str] = None,
|
|
5672
|
+
tool_name: Optional[str] = None,
|
|
5673
|
+
tool_input: Optional[str] = None,
|
|
5674
|
+
tool_result: Optional[str] = None,
|
|
5675
|
+
event_data: Optional[str] = None,
|
|
5676
|
+
) -> int:
|
|
5677
|
+
"""Add a planning iteration event to the persistent log.
|
|
5678
|
+
|
|
5679
|
+
Returns:
|
|
5680
|
+
The event ID.
|
|
5681
|
+
"""
|
|
5682
|
+
with self._writer() as conn:
|
|
5683
|
+
cursor = conn.execute(
|
|
5684
|
+
"""INSERT INTO planning_iteration_events
|
|
5685
|
+
(session_id, iteration_number, event_type, content, tool_name, tool_input, tool_result, event_data)
|
|
5686
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?)""",
|
|
5687
|
+
(session_id, iteration_number, event_type, content, tool_name, tool_input, tool_result, event_data),
|
|
5688
|
+
)
|
|
5689
|
+
return cursor.lastrowid
|
|
5690
|
+
|
|
5691
|
+
def get_planning_iteration_events(
|
|
5692
|
+
self,
|
|
5693
|
+
session_id: str,
|
|
5694
|
+
after_id: int = 0,
|
|
5695
|
+
limit: int = 500,
|
|
5696
|
+
) -> list[dict]:
|
|
5697
|
+
"""Get planning iteration events for a session.
|
|
5698
|
+
|
|
5699
|
+
Args:
|
|
5700
|
+
session_id: The session ID.
|
|
5701
|
+
after_id: Only return events with id > after_id (for pagination/polling).
|
|
5702
|
+
limit: Maximum events to return.
|
|
5703
|
+
|
|
5704
|
+
Returns:
|
|
5705
|
+
List of event dicts ordered by id ASC.
|
|
5706
|
+
"""
|
|
5707
|
+
with self._reader() as conn:
|
|
5708
|
+
cursor = conn.execute(
|
|
5709
|
+
"""SELECT * FROM planning_iteration_events
|
|
5710
|
+
WHERE session_id = ? AND id > ?
|
|
5711
|
+
ORDER BY id ASC
|
|
5712
|
+
LIMIT ?""",
|
|
5713
|
+
(session_id, after_id, limit),
|
|
5714
|
+
)
|
|
5715
|
+
return [dict(row) for row in cursor.fetchall()]
|
|
5716
|
+
|
|
5717
|
+
def get_latest_event_timestamp(self, session_id: str) -> Optional[str]:
|
|
5718
|
+
"""Get the timestamp of the most recent event for a planning session."""
|
|
5719
|
+
with self._reader() as conn:
|
|
5720
|
+
row = conn.execute(
|
|
5721
|
+
"SELECT MAX(timestamp) FROM planning_iteration_events WHERE session_id = ?",
|
|
5722
|
+
(session_id,),
|
|
5723
|
+
).fetchone()
|
|
5724
|
+
return row[0] if row and row[0] else None
|
|
4982
5725
|
|
|
4983
5726
|
# ========== Utilities ==========
|
|
4984
5727
|
|