ralphx 0.3.4__py3-none-any.whl → 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. ralphx/__init__.py +1 -1
  2. ralphx/adapters/base.py +10 -2
  3. ralphx/adapters/claude_cli.py +222 -82
  4. ralphx/api/routes/auth.py +780 -98
  5. ralphx/api/routes/config.py +3 -56
  6. ralphx/api/routes/export_import.py +6 -9
  7. ralphx/api/routes/loops.py +4 -4
  8. ralphx/api/routes/planning.py +882 -19
  9. ralphx/api/routes/resources.py +528 -6
  10. ralphx/api/routes/stream.py +58 -56
  11. ralphx/api/routes/templates.py +2 -2
  12. ralphx/api/routes/workflows.py +258 -47
  13. ralphx/cli.py +4 -1
  14. ralphx/core/auth.py +372 -172
  15. ralphx/core/database.py +588 -164
  16. ralphx/core/executor.py +170 -19
  17. ralphx/core/loop.py +15 -2
  18. ralphx/core/loop_templates.py +29 -3
  19. ralphx/core/planning_iteration_executor.py +633 -0
  20. ralphx/core/planning_service.py +119 -24
  21. ralphx/core/preview.py +9 -25
  22. ralphx/core/project_db.py +864 -121
  23. ralphx/core/project_export.py +1 -5
  24. ralphx/core/project_import.py +14 -29
  25. ralphx/core/resources.py +28 -2
  26. ralphx/core/sample_project.py +1 -5
  27. ralphx/core/templates.py +9 -9
  28. ralphx/core/workflow_executor.py +32 -3
  29. ralphx/core/workflow_export.py +4 -7
  30. ralphx/core/workflow_import.py +3 -27
  31. ralphx/mcp/__init__.py +6 -2
  32. ralphx/mcp/registry.py +3 -3
  33. ralphx/mcp/tools/diagnostics.py +1 -1
  34. ralphx/mcp/tools/monitoring.py +10 -16
  35. ralphx/mcp/tools/workflows.py +115 -33
  36. ralphx/mcp_server.py +6 -2
  37. ralphx/static/assets/index-BuLI7ffn.css +1 -0
  38. ralphx/static/assets/index-DWvlqOTb.js +264 -0
  39. ralphx/static/assets/index-DWvlqOTb.js.map +1 -0
  40. ralphx/static/index.html +2 -2
  41. ralphx/templates/loop_templates/consumer.md +2 -2
  42. {ralphx-0.3.4.dist-info → ralphx-0.4.0.dist-info}/METADATA +33 -12
  43. {ralphx-0.3.4.dist-info → ralphx-0.4.0.dist-info}/RECORD +45 -44
  44. ralphx/static/assets/index-CcRDyY3b.css +0 -1
  45. ralphx/static/assets/index-CcxfTosc.js +0 -251
  46. ralphx/static/assets/index-CcxfTosc.js.map +0 -1
  47. {ralphx-0.3.4.dist-info → ralphx-0.4.0.dist-info}/WHEEL +0 -0
  48. {ralphx-0.3.4.dist-info → ralphx-0.4.0.dist-info}/entry_points.txt +0 -0
@@ -4,12 +4,15 @@ Planning sessions are interactive chat-based conversations with Claude
4
4
  for the planning step of workflows.
5
5
  """
6
6
 
7
+ import asyncio
8
+ import json
7
9
  import logging
8
10
  import sqlite3
9
11
  import uuid
12
+ from datetime import datetime, timedelta
10
13
  from typing import Any, Optional
11
14
 
12
- from fastapi import APIRouter, HTTPException, status
15
+ from fastapi import APIRouter, HTTPException, Query, status
13
16
  from fastapi.responses import StreamingResponse
14
17
  from pydantic import BaseModel, Field
15
18
 
@@ -48,6 +51,38 @@ class PlanningSessionResponse(BaseModel):
48
51
  updated_at: str
49
52
 
50
53
 
54
+ class PlanningSessionSummary(BaseModel):
55
+ """Summary of a planning session for list display."""
56
+
57
+ id: str
58
+ step_id: int
59
+ status: str # 'active', 'completed', 'interrupted'
60
+ message_count: int
61
+ first_user_message: Optional[str] = None # Truncated to 100 chars
62
+ created_at: str
63
+ updated_at: str
64
+ # Diff stats (if we have before/after snapshots)
65
+ chars_added: Optional[int] = None
66
+ chars_removed: Optional[int] = None
67
+ backup_created: Optional[str] = None # Backup filename if one was created
68
+
69
+
70
+ class PlanningSessionDetail(BaseModel):
71
+ """Full planning session with messages."""
72
+
73
+ id: str
74
+ workflow_id: str
75
+ step_id: int
76
+ status: str
77
+ messages: list[PlanningMessage]
78
+ artifacts: Optional[dict] = None
79
+ created_at: str
80
+ updated_at: str
81
+ # Snapshot info
82
+ initial_content_size: Optional[int] = None # Size when session started
83
+ final_content_size: Optional[int] = None # Size when session ended
84
+
85
+
51
86
  class SendMessageRequest(BaseModel):
52
87
  """Request model for sending a message to Claude."""
53
88
 
@@ -68,6 +103,95 @@ class ArtifactUpdate(BaseModel):
68
103
  guardrails: Optional[str] = None
69
104
 
70
105
 
106
+ # ============================================================================
107
+ # Iteration-Based Planning Models (v17)
108
+ # ============================================================================
109
+
110
+
111
+ class StartIterationRequest(BaseModel):
112
+ """Request model for starting an iteration session."""
113
+
114
+ prompt: str = Field(..., min_length=1, description="User's guidance for the iterations")
115
+ iterations: int = Field(default=3, ge=1, le=10, description="Number of iterations (1-10)")
116
+ model: str = Field(default="opus", description="Model to use")
117
+
118
+
119
+ class CancelIterationRequest(BaseModel):
120
+ """Request model for cancelling an iteration session."""
121
+
122
+ session_id: str = Field(..., description="Session ID to cancel")
123
+
124
+
125
+ class IterationResponse(BaseModel):
126
+ """Response model for iteration session."""
127
+
128
+ id: str
129
+ workflow_id: str
130
+ step_id: int
131
+ prompt: Optional[str] = None
132
+ iterations_requested: int
133
+ iterations_completed: int
134
+ current_iteration: int
135
+ run_status: str
136
+ is_legacy: bool
137
+ error_message: Optional[str] = None
138
+ artifacts: Optional[dict] = None
139
+ status: str
140
+ created_at: str
141
+ updated_at: str
142
+
143
+
144
+ class PlanningIterationSummary(BaseModel):
145
+ """Summary of a planning iteration."""
146
+
147
+ id: int
148
+ iteration_number: int
149
+ status: str
150
+ chars_added: int
151
+ chars_removed: int
152
+ summary: Optional[str] = None
153
+ started_at: Optional[str] = None
154
+ completed_at: Optional[str] = None
155
+
156
+
157
+ class DiffLine(BaseModel):
158
+ """A single line in a unified diff."""
159
+
160
+ line: str
161
+ type: str # 'add', 'remove', 'context', 'hunk'
162
+
163
+
164
+ class IterationDiffResponse(BaseModel):
165
+ """Response model for iteration diff."""
166
+
167
+ iteration_id: int
168
+ iteration_number: int
169
+ diff_text: Optional[str] = None
170
+ chars_added: int = 0
171
+ chars_removed: int = 0
172
+ diff_lines: list[DiffLine] = []
173
+
174
+
175
+ class IterationSessionSummary(BaseModel):
176
+ """Summary of an iteration session for list display."""
177
+
178
+ id: str
179
+ step_id: int
180
+ status: str
181
+ run_status: str
182
+ is_legacy: bool
183
+ prompt: Optional[str] = None # Full for iteration sessions, truncated for legacy
184
+ iterations_requested: int
185
+ iterations_completed: int
186
+ current_iteration: int = 0
187
+ created_at: str
188
+ updated_at: str
189
+ # Aggregate stats from iterations
190
+ total_chars_added: int = 0
191
+ total_chars_removed: int = 0
192
+ iterations: list[PlanningIterationSummary] = []
193
+
194
+
71
195
  # ============================================================================
72
196
  # Helper Functions
73
197
  # ============================================================================
@@ -195,11 +319,33 @@ async def get_planning_session(slug: str, workflow_id: str):
195
319
  session = pdb.get_planning_session_by_step(current_step["id"])
196
320
  if not session:
197
321
  session_id = f"ps-{uuid.uuid4().hex[:12]}"
322
+
323
+ # Check if step has a design_doc_path configured - load existing content
324
+ initial_artifacts = None
325
+ step_config = current_step.get("config") or {}
326
+ design_doc_path = step_config.get("design_doc_path")
327
+ if design_doc_path:
328
+ from pathlib import Path
329
+ doc_dir = Path(project["path"]) / ".ralphx" / "resources" / "design_doc"
330
+ doc_file = doc_dir / design_doc_path
331
+ # Security: verify path stays within design_doc directory
332
+ if (".." not in design_doc_path and "\0" not in design_doc_path
333
+ and doc_file.resolve().is_relative_to(doc_dir.resolve())
334
+ and doc_file.exists()):
335
+ try:
336
+ initial_artifacts = {"design_doc": doc_file.read_text()}
337
+ logger.info(f"Loaded existing design doc from {doc_file}")
338
+ except Exception as e:
339
+ logger.warning(f"Failed to load design doc {doc_file}: {e}")
340
+ elif ".." in design_doc_path or "\0" in design_doc_path:
341
+ logger.warning(f"Path traversal blocked in design_doc_path: {design_doc_path!r}")
342
+
198
343
  session = pdb.create_planning_session(
199
344
  id=session_id,
200
345
  workflow_id=workflow_id,
201
346
  step_id=current_step["id"],
202
347
  messages=[],
348
+ artifacts=initial_artifacts,
203
349
  )
204
350
 
205
351
  return _session_to_response(session)
@@ -269,9 +415,26 @@ async def stream_planning_response(slug: str, workflow_id: str):
269
415
  detail="Planning session is not active",
270
416
  )
271
417
 
272
- # Get workflow for context
418
+ # Get workflow and current step for context
273
419
  workflow = pdb.get_workflow(workflow_id)
274
420
 
421
+ # Get the step to access its config (tools, model, timeout)
422
+ step = pdb.get_workflow_step(session["step_id"])
423
+ step_config = step.get("config", {}) if step else {}
424
+
425
+ # Default tools for design_doc steps (matches PROCESSING_TYPES in workflows.py)
426
+ DEFAULT_DESIGN_DOC_TOOLS = ["WebSearch", "WebFetch", "Bash", "Read", "Glob", "Grep", "Edit", "Write"]
427
+
428
+ # Extract configuration from step, with defaults for design_doc
429
+ loop_type = step_config.get("loopType", "design_doc")
430
+ allowed_tools = step_config.get("allowedTools")
431
+ if allowed_tools is None and loop_type == "design_doc":
432
+ allowed_tools = DEFAULT_DESIGN_DOC_TOOLS
433
+ elif allowed_tools is None:
434
+ allowed_tools = []
435
+ model = step_config.get("model", "opus") # Default to opus for design docs
436
+ timeout = step_config.get("timeout", 180)
437
+
275
438
  async def generate_response():
276
439
  """Generate streaming response from Claude."""
277
440
  import json
@@ -289,40 +452,65 @@ async def stream_planning_response(slug: str, workflow_id: str):
289
452
  messages = session.get("messages", [])
290
453
  accumulated = ""
291
454
 
455
+ error_occurred = False
456
+ error_message = None
457
+
292
458
  try:
293
- async for event in service.stream_response(messages):
459
+ async for event in service.stream_response(
460
+ messages,
461
+ model=model,
462
+ tools=allowed_tools if allowed_tools else None,
463
+ timeout=timeout,
464
+ ):
294
465
  if event.type == AdapterEvent.TEXT:
295
466
  text = event.text or ""
296
467
  accumulated += text
297
468
  yield f"data: {json.dumps({'type': 'content', 'content': text})}\n\n"
469
+ elif event.type == AdapterEvent.TOOL_USE:
470
+ # Forward tool use events so frontend can show activity
471
+ yield f"data: {json.dumps({'type': 'tool_use', 'tool': event.tool_name, 'input': event.tool_input})}\n\n"
472
+ elif event.type == AdapterEvent.TOOL_RESULT:
473
+ # Forward tool result (truncated for display)
474
+ result_preview = str(event.tool_result or "")[:200]
475
+ if len(str(event.tool_result or "")) > 200:
476
+ result_preview += "..."
477
+ yield f"data: {json.dumps({'type': 'tool_result', 'tool': event.tool_name, 'result': result_preview})}\n\n"
298
478
  elif event.type == AdapterEvent.ERROR:
299
479
  logger.warning(f"Claude error: {event.error_message}")
300
- safe_message = _sanitize_error_message(event.error_message or "Claude error")
301
- yield f"data: {json.dumps({'type': 'error', 'message': safe_message})}\n\n"
302
- return
480
+ error_occurred = True
481
+ error_message = _sanitize_error_message(event.error_message or "Claude error")
482
+ # Don't return early - save accumulated content first
483
+ break
303
484
  elif event.type == AdapterEvent.COMPLETE:
304
485
  break
305
486
 
306
- # Add assistant message to session
307
- if accumulated:
487
+ except Exception as e:
488
+ # Log full error for debugging but sanitize for client
489
+ logger.warning(f"Error during streaming response: {e}", exc_info=True)
490
+ error_occurred = True
491
+ error_message = _sanitize_error_message(str(e))
492
+
493
+ # Always save accumulated content, even on error
494
+ # This preserves partial responses from Claude
495
+ if accumulated:
496
+ try:
308
497
  pdb.add_planning_message(
309
498
  session_id=session["id"],
310
499
  role="assistant",
311
500
  content=accumulated,
312
501
  )
502
+ except Exception as save_err:
503
+ logger.warning(f"Failed to save accumulated message: {save_err}")
313
504
 
314
- yield f"data: {json.dumps({'type': 'done'})}\n\n"
315
-
316
- except Exception as e:
317
- # Log full error for debugging but sanitize for client
318
- logger.warning(f"Error during streaming response: {e}", exc_info=True)
505
+ # Send error if one occurred (after saving content)
506
+ if error_occurred:
319
507
  try:
320
- # Sanitize error message to avoid leaking internal paths/details
321
- safe_message = _sanitize_error_message(str(e))
322
- yield f"data: {json.dumps({'type': 'error', 'message': safe_message})}\n\n"
508
+ yield f"data: {json.dumps({'type': 'error', 'message': error_message})}\n\n"
323
509
  except Exception:
324
510
  pass # Client disconnected
325
511
 
512
+ yield f"data: {json.dumps({'type': 'done'})}\n\n"
513
+
326
514
  return StreamingResponse(
327
515
  generate_response(),
328
516
  media_type="text/event-stream",
@@ -405,9 +593,9 @@ async def complete_planning_session(
405
593
 
406
594
  # Get workflow info
407
595
  workflow = pdb.get_workflow(workflow_id)
408
- namespace = workflow["namespace"]
409
596
 
410
597
  # Save artifacts as project resources
598
+ # Use workflow_id for unique filenames (namespace was removed in schema v16)
411
599
  from pathlib import Path
412
600
  from datetime import datetime
413
601
 
@@ -416,7 +604,7 @@ async def complete_planning_session(
416
604
  resource_path = Path(project["path"]) / ".ralphx" / "resources"
417
605
  resource_path.mkdir(parents=True, exist_ok=True)
418
606
 
419
- doc_filename = f"design-doc-{namespace}.md"
607
+ doc_filename = f"design-doc-{workflow_id}.md"
420
608
  doc_path = resource_path / doc_filename
421
609
  doc_path.write_text(artifacts["design_doc"])
422
610
 
@@ -442,7 +630,7 @@ async def complete_planning_session(
442
630
  resource_path = Path(project["path"]) / ".ralphx" / "resources"
443
631
  resource_path.mkdir(parents=True, exist_ok=True)
444
632
 
445
- guardrails_filename = f"guardrails-{namespace}.md"
633
+ guardrails_filename = f"guardrails-{workflow_id}.md"
446
634
  guardrails_path = resource_path / guardrails_filename
447
635
  guardrails_path.write_text(artifacts["guardrails"])
448
636
 
@@ -592,3 +780,678 @@ async def generate_artifacts(slug: str, workflow_id: str):
592
780
  "Connection": "keep-alive",
593
781
  },
594
782
  )
783
+
784
+
785
+ # ============================================================================
786
+ # Iteration-Based Planning Endpoints (v17)
787
+ # ============================================================================
788
+
789
+
790
+ def _session_to_iteration_response(session: dict) -> IterationResponse:
791
+ """Convert planning session to iteration response model."""
792
+ return IterationResponse(
793
+ id=session["id"],
794
+ workflow_id=session["workflow_id"],
795
+ step_id=session["step_id"],
796
+ prompt=session.get("prompt"),
797
+ iterations_requested=session.get("iterations_requested", 1),
798
+ iterations_completed=session.get("iterations_completed", 0),
799
+ current_iteration=session.get("current_iteration", 0),
800
+ run_status=session.get("run_status", "pending"),
801
+ is_legacy=session.get("is_legacy", False),
802
+ error_message=session.get("error_message"),
803
+ artifacts=session.get("artifacts"),
804
+ status=session["status"],
805
+ created_at=session["created_at"],
806
+ updated_at=session["updated_at"],
807
+ )
808
+
809
+
810
+ @router.post(
811
+ "/workflows/{workflow_id}/planning/iterate",
812
+ response_model=IterationResponse,
813
+ )
814
+ async def start_iteration_session(
815
+ slug: str, workflow_id: str, request: StartIterationRequest
816
+ ):
817
+ """Start a new iteration-based planning session.
818
+
819
+ Creates a new session and returns immediately. Use the stream endpoint
820
+ to receive progress events as iterations run.
821
+ """
822
+ pdb, project = _get_project_db(slug)
823
+
824
+ # Verify workflow exists
825
+ workflow = pdb.get_workflow(workflow_id)
826
+ if not workflow:
827
+ raise HTTPException(
828
+ status_code=status.HTTP_404_NOT_FOUND,
829
+ detail=f"Workflow '{workflow_id}' not found",
830
+ )
831
+
832
+ # Find the current interactive step
833
+ steps = pdb.list_workflow_steps(workflow_id)
834
+ current_step = None
835
+ for s in steps:
836
+ if s["step_number"] == workflow["current_step"]:
837
+ current_step = s
838
+ break
839
+
840
+ if not current_step:
841
+ raise HTTPException(
842
+ status_code=status.HTTP_400_BAD_REQUEST,
843
+ detail="No current step found",
844
+ )
845
+
846
+ if current_step["step_type"] != "interactive":
847
+ raise HTTPException(
848
+ status_code=status.HTTP_400_BAD_REQUEST,
849
+ detail=f"Current step '{current_step['name']}' is not interactive",
850
+ )
851
+
852
+ # Check for already running session (concurrency protection) with stale recovery
853
+ existing = pdb.get_running_planning_session(workflow_id)
854
+ if existing:
855
+ updated = datetime.fromisoformat(existing["updated_at"])
856
+ if datetime.utcnow() - updated > timedelta(minutes=10):
857
+ logger.warning(f"Auto-recovering stale session '{existing['id']}' (last updated: {existing['updated_at']})")
858
+ pdb.update_planning_session(
859
+ existing["id"], run_status="error", error_message="Session timed out (stale recovery)"
860
+ )
861
+ else:
862
+ raise HTTPException(
863
+ status_code=status.HTTP_409_CONFLICT,
864
+ detail=f"Session '{existing['id']}' is already running. Cancel it first.",
865
+ )
866
+
867
+ # Check if there's an existing active session to build upon
868
+ existing_session = pdb.get_planning_session_by_step(current_step["id"])
869
+ initial_artifacts = None
870
+
871
+ if existing_session:
872
+ # Preserve existing artifacts (design doc)
873
+ initial_artifacts = existing_session.get("artifacts")
874
+ # Mark old session as completed if it's active
875
+ if existing_session["status"] == "active":
876
+ pdb.update_planning_session(
877
+ existing_session["id"],
878
+ status="completed",
879
+ run_status="completed" if existing_session.get("run_status") == "running" else existing_session.get("run_status", "completed"),
880
+ )
881
+
882
+ # Also check step config for design_doc_path to load existing content
883
+ if not initial_artifacts:
884
+ step_config = current_step.get("config") or {}
885
+ design_doc_path = step_config.get("design_doc_path")
886
+ if design_doc_path:
887
+ from pathlib import Path
888
+ doc_dir = Path(project["path"]) / ".ralphx" / "resources" / "design_doc"
889
+ doc_file = doc_dir / design_doc_path
890
+ # Security: verify path stays within design_doc directory
891
+ if (".." not in design_doc_path and "\0" not in design_doc_path
892
+ and doc_file.resolve().is_relative_to(doc_dir.resolve())
893
+ and doc_file.exists()):
894
+ try:
895
+ initial_artifacts = {"design_doc": doc_file.read_text()}
896
+ logger.info(f"Loaded existing design doc from {doc_file}")
897
+ except Exception as e:
898
+ logger.warning(f"Failed to load design doc {doc_file}: {e}")
899
+ elif ".." in design_doc_path or "\0" in design_doc_path:
900
+ logger.warning(f"Path traversal blocked in design_doc_path: {design_doc_path!r}")
901
+
902
+ # Create new iteration session
903
+ session_id = f"ps-{uuid.uuid4().hex[:12]}"
904
+ session = pdb.create_planning_session(
905
+ id=session_id,
906
+ workflow_id=workflow_id,
907
+ step_id=current_step["id"],
908
+ messages=[], # Not used in iteration mode
909
+ artifacts=initial_artifacts,
910
+ status="active",
911
+ prompt=request.prompt,
912
+ iterations_requested=request.iterations,
913
+ run_status="pending",
914
+ is_legacy=False,
915
+ )
916
+
917
+ # Get step configuration for tools/model
918
+ step_config = current_step.get("config", {}) or {}
919
+ DEFAULT_DESIGN_DOC_TOOLS = ["WebSearch", "WebFetch", "Bash", "Read", "Glob", "Grep", "Edit", "Write"]
920
+ allowed_tools = step_config.get("allowedTools") or DEFAULT_DESIGN_DOC_TOOLS
921
+ model = step_config.get("model", "opus")
922
+
923
+ # Launch executor as background task
924
+ async def run_executor_background():
925
+ from ralphx.core.project import Project
926
+ from ralphx.core.planning_iteration_executor import PlanningIterationExecutor
927
+
928
+ project_obj = Project.from_dict(project)
929
+
930
+ # Resolve the design doc file path for file-based editing
931
+ from pathlib import Path as _Path
932
+ _doc_dir = _Path(project["path"]) / ".ralphx" / "resources"
933
+ _configured_path = step_config.get("design_doc_path")
934
+ if _configured_path:
935
+ _design_doc_dir = _doc_dir / "design_doc"
936
+ _doc_file = _design_doc_dir / _configured_path
937
+ # Security: verify path stays within design_doc directory
938
+ if (".." in _configured_path or "\0" in _configured_path
939
+ or not _doc_file.resolve().is_relative_to(_design_doc_dir.resolve())):
940
+ logger.warning(f"Path traversal blocked in design_doc_path for executor: {_configured_path!r}")
941
+ _doc_file = _doc_dir / f"design-doc-{workflow_id}.md"
942
+ else:
943
+ _doc_file = _doc_dir / f"design-doc-{workflow_id}.md"
944
+
945
+ executor = PlanningIterationExecutor(
946
+ project=project_obj,
947
+ pdb=pdb,
948
+ session_id=session_id,
949
+ project_id=project.get("id"),
950
+ design_doc_path=str(_doc_file),
951
+ )
952
+
953
+ try:
954
+ async for event in executor.run(
955
+ prompt=request.prompt,
956
+ iterations=request.iterations,
957
+ model=model,
958
+ tools=allowed_tools,
959
+ ):
960
+ # Persist every event to DB
961
+ pdb.add_planning_iteration_event(
962
+ session_id=session_id,
963
+ event_type=event.get("type", "unknown"),
964
+ iteration_number=event.get("iteration"),
965
+ content=event.get("text"),
966
+ tool_name=event.get("tool"),
967
+ tool_input=json.dumps(event.get("input"))[:1000] if event.get("input") else None,
968
+ tool_result=(event.get("result") or "")[:1000] if event.get("result") else None,
969
+ event_data=json.dumps(event),
970
+ )
971
+ except Exception as e:
972
+ logger.error(f"Background executor error for session {session_id}: {e}", exc_info=True)
973
+ try:
974
+ pdb.update_planning_session(session_id, run_status="error", error_message="Executor failed")
975
+ pdb.add_planning_iteration_event(
976
+ session_id=session_id,
977
+ event_type="error",
978
+ event_data=json.dumps({"type": "error", "message": "Execution failed unexpectedly", "fatal": True}),
979
+ )
980
+ except Exception:
981
+ pass
982
+
983
+ task = asyncio.create_task(run_executor_background(), name=f"planning-iteration-{session_id}")
984
+
985
+ def _on_task_done(t: asyncio.Task) -> None:
986
+ if t.cancelled():
987
+ logger.warning(f"Planning iteration task {session_id} was cancelled")
988
+ elif t.exception():
989
+ logger.error(f"Planning iteration task {session_id} failed: {t.exception()}")
990
+
991
+ task.add_done_callback(_on_task_done)
992
+
993
+ return _session_to_iteration_response(session)
994
+
995
+
996
+ @router.get("/workflows/{workflow_id}/planning/iterate/stream/{session_id}")
997
+ async def stream_iteration_progress(
998
+ slug: str,
999
+ workflow_id: str,
1000
+ session_id: str,
1001
+ after_event_id: int = Query(default=0, description="Resume from this event ID"),
1002
+ ):
1003
+ """Stream iteration progress via Server-Sent Events (DB-polling).
1004
+
1005
+ This endpoint polls the planning_iteration_events table for new events.
1006
+ Supports reconnection: pass after_event_id to resume from where you left off.
1007
+
1008
+ Events include:
1009
+ - iteration_start: {iteration: N, total: M}
1010
+ - tool_use: {tool: "WebSearch", input: {...}}
1011
+ - tool_result: {tool: "WebSearch", result: "..."}
1012
+ - content: {text: "..."} - Claude's response text
1013
+ - design_doc_updated: {chars_added: N, chars_removed: M}
1014
+ - heartbeat: {} - Periodic keepalive
1015
+ - iteration_complete: {iteration: N, summary: "..."}
1016
+ - error: {message: "..."}
1017
+ - cancelled: {iterations_completed: N}
1018
+ - done: {iterations_completed: N}
1019
+ """
1020
+ pdb, project = _get_project_db(slug)
1021
+
1022
+ # Verify session exists and belongs to this workflow
1023
+ session = pdb.get_planning_session(session_id)
1024
+ if not session:
1025
+ raise HTTPException(
1026
+ status_code=status.HTTP_404_NOT_FOUND,
1027
+ detail=f"Session '{session_id}' not found",
1028
+ )
1029
+
1030
+ if session["workflow_id"] != workflow_id:
1031
+ raise HTTPException(
1032
+ status_code=status.HTTP_404_NOT_FOUND,
1033
+ detail=f"Session '{session_id}' not found in workflow '{workflow_id}'",
1034
+ )
1035
+
1036
+ async def generate_stream():
1037
+ """Poll DB for events and stream as SSE."""
1038
+ last_id = after_event_id
1039
+
1040
+ # Immediate stale check before entering loop
1041
+ current = pdb.get_planning_session(session_id)
1042
+ if current and current.get("run_status") == "running":
1043
+ last_event_ts = pdb.get_latest_event_timestamp(session_id)
1044
+ # Fall back to created_at if no events exist yet
1045
+ check_ts = last_event_ts or current.get("created_at")
1046
+ if check_ts:
1047
+ elapsed = datetime.utcnow() - datetime.fromisoformat(check_ts)
1048
+ if elapsed > timedelta(minutes=7):
1049
+ pdb.update_planning_session(
1050
+ session_id, run_status="error",
1051
+ error_message="Session timed out (no activity)",
1052
+ )
1053
+ yield f"data: {json.dumps({'type': 'error', 'message': 'Session timed out (no activity)', 'fatal': True})}\n\n"
1054
+ return
1055
+
1056
+ while True:
1057
+ # Fetch new events from DB
1058
+ events = pdb.get_planning_iteration_events(session_id, after_id=last_id)
1059
+ for evt in events:
1060
+ last_id = evt["id"]
1061
+ if evt.get("event_data"):
1062
+ try:
1063
+ event_data = json.loads(evt["event_data"])
1064
+ except (json.JSONDecodeError, TypeError):
1065
+ event_data = {"type": evt["event_type"]}
1066
+ else:
1067
+ event_data = {"type": evt["event_type"]}
1068
+ event_data["_event_id"] = evt["id"]
1069
+ yield f"data: {json.dumps(event_data)}\n\n"
1070
+
1071
+ # Check if session reached terminal status
1072
+ current = pdb.get_planning_session(session_id)
1073
+
1074
+ # Check for stale session (no events for >7 min while supposedly running)
1075
+ if current and current.get("run_status") == "running":
1076
+ last_event_ts = pdb.get_latest_event_timestamp(session_id)
1077
+ check_ts = last_event_ts or (current.get("created_at") if current else None)
1078
+ if check_ts:
1079
+ elapsed = datetime.utcnow() - datetime.fromisoformat(check_ts)
1080
+ if elapsed > timedelta(minutes=7):
1081
+ pdb.update_planning_session(
1082
+ session_id, run_status="error",
1083
+ error_message="Session timed out (no activity)",
1084
+ )
1085
+ yield f"data: {json.dumps({'type': 'error', 'message': 'Session timed out (no activity)', 'fatal': True})}\n\n"
1086
+ break
1087
+
1088
+ if current and current.get("run_status") in ("completed", "error", "cancelled"):
1089
+ # Drain any remaining events
1090
+ final_events = pdb.get_planning_iteration_events(session_id, after_id=last_id)
1091
+ for evt in final_events:
1092
+ last_id = evt["id"]
1093
+ if evt.get("event_data"):
1094
+ try:
1095
+ event_data = json.loads(evt["event_data"])
1096
+ except (json.JSONDecodeError, TypeError):
1097
+ event_data = {"type": evt["event_type"]}
1098
+ else:
1099
+ event_data = {"type": evt["event_type"]}
1100
+ event_data["_event_id"] = evt["id"]
1101
+ yield f"data: {json.dumps(event_data)}\n\n"
1102
+ break
1103
+
1104
+ # Heartbeat to keep connection alive
1105
+ yield f"data: {json.dumps({'type': 'heartbeat'})}\n\n"
1106
+ await asyncio.sleep(0.5)
1107
+
1108
+ return StreamingResponse(
1109
+ generate_stream(),
1110
+ media_type="text/event-stream",
1111
+ headers={
1112
+ "Cache-Control": "no-cache",
1113
+ "Connection": "keep-alive",
1114
+ "X-Accel-Buffering": "no",
1115
+ },
1116
+ )
1117
+
1118
+
1119
+ @router.post(
1120
+ "/workflows/{workflow_id}/planning/iterate/cancel",
1121
+ response_model=IterationResponse,
1122
+ )
1123
+ async def cancel_iteration_session(
1124
+ slug: str, workflow_id: str, request: CancelIterationRequest
1125
+ ):
1126
+ """Cancel a running iteration session.
1127
+
1128
+ Marks the session as cancelled. The running iteration will complete
1129
+ but no further iterations will start.
1130
+ """
1131
+ pdb, project = _get_project_db(slug)
1132
+
1133
+ session = pdb.get_planning_session(request.session_id)
1134
+ if not session:
1135
+ raise HTTPException(
1136
+ status_code=status.HTTP_404_NOT_FOUND,
1137
+ detail=f"Session '{request.session_id}' not found",
1138
+ )
1139
+
1140
+ if session["workflow_id"] != workflow_id:
1141
+ raise HTTPException(
1142
+ status_code=status.HTTP_404_NOT_FOUND,
1143
+ detail=f"Session '{request.session_id}' not found in workflow '{workflow_id}'",
1144
+ )
1145
+
1146
+ if session.get("run_status") != "running":
1147
+ raise HTTPException(
1148
+ status_code=status.HTTP_400_BAD_REQUEST,
1149
+ detail="Session is not running",
1150
+ )
1151
+
1152
+ # Mark as cancelled (executor will pick this up)
1153
+ pdb.cancel_planning_session(request.session_id)
1154
+
1155
+ session = pdb.get_planning_session(request.session_id)
1156
+ return _session_to_iteration_response(session)
1157
+
1158
+
1159
+ @router.get(
1160
+ "/workflows/{workflow_id}/planning/iterate/{session_id}",
1161
+ response_model=IterationResponse,
1162
+ )
1163
+ async def get_iteration_session(slug: str, workflow_id: str, session_id: str):
1164
+ """Get details of an iteration session including progress."""
1165
+ pdb, project = _get_project_db(slug)
1166
+
1167
+ session = pdb.get_planning_session(session_id)
1168
+ if not session:
1169
+ raise HTTPException(
1170
+ status_code=status.HTTP_404_NOT_FOUND,
1171
+ detail=f"Session '{session_id}' not found",
1172
+ )
1173
+
1174
+ if session["workflow_id"] != workflow_id:
1175
+ raise HTTPException(
1176
+ status_code=status.HTTP_404_NOT_FOUND,
1177
+ detail=f"Session '{session_id}' not found in workflow '{workflow_id}'",
1178
+ )
1179
+
1180
+ return _session_to_iteration_response(session)
1181
+
1182
+
1183
+ @router.get("/workflows/{workflow_id}/planning/iterate/{session_id}/events")
1184
+ async def get_iteration_events(
1185
+ slug: str,
1186
+ workflow_id: str,
1187
+ session_id: str,
1188
+ after_id: int = Query(default=0, description="Only return events after this ID"),
1189
+ limit: int = Query(default=500, ge=1, le=1000),
1190
+ ):
1191
+ """Get persisted iteration events for a session.
1192
+
1193
+ Use after_id for pagination. Returns events ordered by ID ascending.
1194
+ """
1195
+ pdb, project = _get_project_db(slug)
1196
+
1197
+ session = pdb.get_planning_session(session_id)
1198
+ if not session:
1199
+ raise HTTPException(
1200
+ status_code=status.HTTP_404_NOT_FOUND,
1201
+ detail=f"Session '{session_id}' not found",
1202
+ )
1203
+
1204
+ if session["workflow_id"] != workflow_id:
1205
+ raise HTTPException(
1206
+ status_code=status.HTTP_404_NOT_FOUND,
1207
+ detail=f"Session '{session_id}' not found in workflow '{workflow_id}'",
1208
+ )
1209
+
1210
+ return pdb.get_planning_iteration_events(session_id, after_id=after_id, limit=limit)
1211
+
1212
+
1213
+ @router.get(
1214
+ "/workflows/{workflow_id}/planning/iterate/{session_id}/iterations",
1215
+ response_model=list[PlanningIterationSummary],
1216
+ )
1217
+ async def list_session_iterations(slug: str, workflow_id: str, session_id: str):
1218
+ """List all iterations for a session with their stats."""
1219
+ pdb, project = _get_project_db(slug)
1220
+
1221
+ session = pdb.get_planning_session(session_id)
1222
+ if not session:
1223
+ raise HTTPException(
1224
+ status_code=status.HTTP_404_NOT_FOUND,
1225
+ detail=f"Session '{session_id}' not found",
1226
+ )
1227
+
1228
+ if session["workflow_id"] != workflow_id:
1229
+ raise HTTPException(
1230
+ status_code=status.HTTP_404_NOT_FOUND,
1231
+ detail=f"Session '{session_id}' not found in workflow '{workflow_id}'",
1232
+ )
1233
+
1234
+ iterations = pdb.list_planning_iterations(session_id)
1235
+
1236
+ return [
1237
+ PlanningIterationSummary(
1238
+ id=it["id"],
1239
+ iteration_number=it["iteration_number"],
1240
+ status=it["status"],
1241
+ chars_added=it.get("chars_added", 0),
1242
+ chars_removed=it.get("chars_removed", 0),
1243
+ summary=it.get("summary"),
1244
+ started_at=it.get("started_at"),
1245
+ completed_at=it.get("completed_at"),
1246
+ )
1247
+ for it in iterations
1248
+ ]
1249
+
1250
+
1251
+ @router.get(
1252
+ "/workflows/{workflow_id}/planning/iterate/{session_id}/iterations/{iteration_id}/diff",
1253
+ response_model=IterationDiffResponse,
1254
+ )
1255
+ async def get_iteration_diff(
1256
+ slug: str, workflow_id: str, session_id: str, iteration_id: int
1257
+ ):
1258
+ """Get the unified diff for a specific iteration."""
1259
+ pdb, project = _get_project_db(slug)
1260
+
1261
+ session = pdb.get_planning_session(session_id)
1262
+ if not session:
1263
+ raise HTTPException(
1264
+ status_code=status.HTTP_404_NOT_FOUND,
1265
+ detail=f"Session '{session_id}' not found",
1266
+ )
1267
+
1268
+ if session["workflow_id"] != workflow_id:
1269
+ raise HTTPException(
1270
+ status_code=status.HTTP_404_NOT_FOUND,
1271
+ detail=f"Session '{session_id}' not found in workflow '{workflow_id}'",
1272
+ )
1273
+
1274
+ # Fetch the specific iteration directly (avoids loading all diffs)
1275
+ iteration = pdb.get_planning_iteration(iteration_id)
1276
+ if not iteration or iteration.get("session_id") != session_id:
1277
+ raise HTTPException(
1278
+ status_code=status.HTTP_404_NOT_FOUND,
1279
+ detail=f"Iteration '{iteration_id}' not found in session '{session_id}'",
1280
+ )
1281
+
1282
+ diff_text = iteration.get("diff_text")
1283
+
1284
+ # Fallback: compute diff on-the-fly from doc snapshots if diff_text is missing
1285
+ if not diff_text:
1286
+ doc_before = iteration.get("doc_before")
1287
+ doc_after = iteration.get("doc_after")
1288
+ if doc_before is not None and doc_after is not None:
1289
+ import difflib
1290
+
1291
+ diff_text = "\n".join(
1292
+ difflib.unified_diff(
1293
+ doc_before.splitlines(),
1294
+ doc_after.splitlines(),
1295
+ fromfile="before",
1296
+ tofile="after",
1297
+ lineterm="",
1298
+ )
1299
+ )
1300
+
1301
+ diff_lines: list[DiffLine] = []
1302
+
1303
+ if diff_text:
1304
+ for line in diff_text.splitlines():
1305
+ if line.startswith("+") and not line.startswith("+++"):
1306
+ diff_lines.append(DiffLine(line=line, type="add"))
1307
+ elif line.startswith("-") and not line.startswith("---"):
1308
+ diff_lines.append(DiffLine(line=line, type="remove"))
1309
+ elif line.startswith("@@"):
1310
+ diff_lines.append(DiffLine(line=line, type="hunk"))
1311
+ elif not line.startswith("+++") and not line.startswith("---"):
1312
+ diff_lines.append(DiffLine(line=line, type="context"))
1313
+
1314
+ return IterationDiffResponse(
1315
+ iteration_id=iteration["id"],
1316
+ iteration_number=iteration["iteration_number"],
1317
+ diff_text=diff_text,
1318
+ chars_added=iteration.get("chars_added", 0),
1319
+ chars_removed=iteration.get("chars_removed", 0),
1320
+ diff_lines=diff_lines,
1321
+ )
1322
+
1323
+
1324
+ # ============================================================================
1325
+ # Planning Session History Endpoints (updated for v17)
1326
+ # ============================================================================
1327
+
1328
+
1329
+ @router.get(
1330
+ "/workflows/{workflow_id}/planning/sessions",
1331
+ response_model=list[IterationSessionSummary],
1332
+ )
1333
+ async def list_workflow_planning_sessions(slug: str, workflow_id: str):
1334
+ """List all planning sessions for a workflow.
1335
+
1336
+ Returns sessions in reverse chronological order (newest first).
1337
+ Supports both legacy chat-based sessions and new iteration-based sessions.
1338
+ """
1339
+ pdb, project = _get_project_db(slug)
1340
+
1341
+ # Verify workflow exists
1342
+ workflow = pdb.get_workflow(workflow_id)
1343
+ if not workflow:
1344
+ raise HTTPException(
1345
+ status_code=status.HTTP_404_NOT_FOUND,
1346
+ detail=f"Workflow '{workflow_id}' not found",
1347
+ )
1348
+
1349
+ sessions = pdb.list_planning_sessions(workflow_id=workflow_id)
1350
+
1351
+ summaries = []
1352
+ for session in sessions:
1353
+ is_legacy = session.get("is_legacy", False)
1354
+
1355
+ # For legacy sessions, use first user message as prompt preview
1356
+ prompt_preview = session.get("prompt")
1357
+ if not prompt_preview and is_legacy:
1358
+ messages = session.get("messages", [])
1359
+ user_messages = [m for m in messages if m.get("role") == "user"]
1360
+ if user_messages:
1361
+ content = user_messages[0].get("content", "")
1362
+ prompt_preview = content[:100] if len(content) > 100 else content
1363
+
1364
+ # Get iterations for this session
1365
+ iterations = pdb.list_planning_iterations(session["id"])
1366
+ iteration_summaries = [
1367
+ PlanningIterationSummary(
1368
+ id=it["id"],
1369
+ iteration_number=it["iteration_number"],
1370
+ status=it["status"],
1371
+ chars_added=it.get("chars_added", 0),
1372
+ chars_removed=it.get("chars_removed", 0),
1373
+ summary=it.get("summary"),
1374
+ started_at=it.get("started_at"),
1375
+ completed_at=it.get("completed_at"),
1376
+ )
1377
+ for it in iterations
1378
+ ]
1379
+
1380
+ # Calculate totals from iterations
1381
+ total_chars_added = sum(it.get("chars_added", 0) for it in iterations)
1382
+ total_chars_removed = sum(it.get("chars_removed", 0) for it in iterations)
1383
+
1384
+ summaries.append(
1385
+ IterationSessionSummary(
1386
+ id=session["id"],
1387
+ step_id=session["step_id"],
1388
+ status=session["status"],
1389
+ run_status=session.get("run_status", "completed" if is_legacy else "pending"),
1390
+ is_legacy=is_legacy,
1391
+ prompt=prompt_preview,
1392
+ iterations_requested=session.get("iterations_requested", 0 if is_legacy else 1),
1393
+ iterations_completed=session.get("iterations_completed", 0),
1394
+ current_iteration=session.get("current_iteration", 0),
1395
+ created_at=session["created_at"],
1396
+ updated_at=session["updated_at"],
1397
+ total_chars_added=total_chars_added,
1398
+ total_chars_removed=total_chars_removed,
1399
+ iterations=iteration_summaries,
1400
+ )
1401
+ )
1402
+
1403
+ return summaries
1404
+
1405
+
1406
+ @router.get(
1407
+ "/workflows/{workflow_id}/planning/sessions/{session_id}",
1408
+ response_model=PlanningSessionDetail,
1409
+ )
1410
+ async def get_planning_session_detail(slug: str, workflow_id: str, session_id: str):
1411
+ """Get full details of a specific planning session including all messages."""
1412
+ pdb, project = _get_project_db(slug)
1413
+
1414
+ # Verify workflow exists
1415
+ workflow = pdb.get_workflow(workflow_id)
1416
+ if not workflow:
1417
+ raise HTTPException(
1418
+ status_code=status.HTTP_404_NOT_FOUND,
1419
+ detail=f"Workflow '{workflow_id}' not found",
1420
+ )
1421
+
1422
+ session = pdb.get_planning_session(session_id)
1423
+ if not session:
1424
+ raise HTTPException(
1425
+ status_code=status.HTTP_404_NOT_FOUND,
1426
+ detail=f"Session '{session_id}' not found",
1427
+ )
1428
+
1429
+ # Verify session belongs to this workflow
1430
+ if session["workflow_id"] != workflow_id:
1431
+ raise HTTPException(
1432
+ status_code=status.HTTP_404_NOT_FOUND,
1433
+ detail=f"Session '{session_id}' not found in workflow '{workflow_id}'",
1434
+ )
1435
+
1436
+ messages = [
1437
+ PlanningMessage(
1438
+ role=m["role"],
1439
+ content=m["content"],
1440
+ timestamp=m.get("timestamp", ""),
1441
+ metadata=m.get("metadata"),
1442
+ )
1443
+ for m in session.get("messages", [])
1444
+ ]
1445
+
1446
+ return PlanningSessionDetail(
1447
+ id=session["id"],
1448
+ workflow_id=session["workflow_id"],
1449
+ step_id=session["step_id"],
1450
+ status=session["status"],
1451
+ messages=messages,
1452
+ artifacts=session.get("artifacts"),
1453
+ created_at=session["created_at"],
1454
+ updated_at=session["updated_at"],
1455
+ initial_content_size=session.get("initial_content_size"),
1456
+ final_content_size=session.get("final_content_size"),
1457
+ )