emdash-core 0.1.33__py3-none-any.whl → 0.1.60__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. emdash_core/agent/agents.py +93 -23
  2. emdash_core/agent/background.py +481 -0
  3. emdash_core/agent/hooks.py +419 -0
  4. emdash_core/agent/inprocess_subagent.py +114 -10
  5. emdash_core/agent/mcp/config.py +78 -2
  6. emdash_core/agent/prompts/main_agent.py +88 -1
  7. emdash_core/agent/prompts/plan_mode.py +65 -44
  8. emdash_core/agent/prompts/subagents.py +96 -8
  9. emdash_core/agent/prompts/workflow.py +215 -50
  10. emdash_core/agent/providers/models.py +1 -1
  11. emdash_core/agent/providers/openai_provider.py +10 -0
  12. emdash_core/agent/research/researcher.py +154 -45
  13. emdash_core/agent/runner/agent_runner.py +157 -19
  14. emdash_core/agent/runner/context.py +28 -9
  15. emdash_core/agent/runner/sdk_runner.py +29 -2
  16. emdash_core/agent/skills.py +81 -1
  17. emdash_core/agent/toolkit.py +87 -11
  18. emdash_core/agent/toolkits/__init__.py +117 -18
  19. emdash_core/agent/toolkits/base.py +87 -2
  20. emdash_core/agent/toolkits/explore.py +18 -0
  21. emdash_core/agent/toolkits/plan.py +18 -0
  22. emdash_core/agent/tools/__init__.py +2 -0
  23. emdash_core/agent/tools/coding.py +344 -52
  24. emdash_core/agent/tools/lsp.py +361 -0
  25. emdash_core/agent/tools/skill.py +21 -1
  26. emdash_core/agent/tools/task.py +27 -23
  27. emdash_core/agent/tools/task_output.py +262 -32
  28. emdash_core/agent/verifier/__init__.py +11 -0
  29. emdash_core/agent/verifier/manager.py +295 -0
  30. emdash_core/agent/verifier/models.py +97 -0
  31. emdash_core/{swarm/worktree_manager.py → agent/worktree.py} +19 -1
  32. emdash_core/api/agent.py +451 -5
  33. emdash_core/api/research.py +3 -3
  34. emdash_core/api/router.py +0 -4
  35. emdash_core/context/longevity.py +197 -0
  36. emdash_core/context/providers/explored_areas.py +83 -39
  37. emdash_core/context/reranker.py +35 -144
  38. emdash_core/context/simple_reranker.py +500 -0
  39. emdash_core/context/tool_relevance.py +84 -0
  40. emdash_core/core/config.py +8 -0
  41. emdash_core/graph/__init__.py +8 -1
  42. emdash_core/graph/connection.py +24 -3
  43. emdash_core/graph/writer.py +7 -1
  44. emdash_core/ingestion/repository.py +17 -198
  45. emdash_core/models/agent.py +14 -0
  46. emdash_core/server.py +1 -6
  47. emdash_core/sse/stream.py +16 -1
  48. emdash_core/utils/__init__.py +0 -2
  49. emdash_core/utils/git.py +103 -0
  50. emdash_core/utils/image.py +147 -160
  51. {emdash_core-0.1.33.dist-info → emdash_core-0.1.60.dist-info}/METADATA +7 -5
  52. {emdash_core-0.1.33.dist-info → emdash_core-0.1.60.dist-info}/RECORD +54 -58
  53. emdash_core/api/swarm.py +0 -223
  54. emdash_core/db/__init__.py +0 -67
  55. emdash_core/db/auth.py +0 -134
  56. emdash_core/db/models.py +0 -91
  57. emdash_core/db/provider.py +0 -222
  58. emdash_core/db/providers/__init__.py +0 -5
  59. emdash_core/db/providers/supabase.py +0 -452
  60. emdash_core/swarm/__init__.py +0 -17
  61. emdash_core/swarm/merge_agent.py +0 -383
  62. emdash_core/swarm/session_manager.py +0 -274
  63. emdash_core/swarm/swarm_runner.py +0 -226
  64. emdash_core/swarm/task_definition.py +0 -137
  65. emdash_core/swarm/worker_spawner.py +0 -319
  66. {emdash_core-0.1.33.dist-info → emdash_core-0.1.60.dist-info}/WHEEL +0 -0
  67. {emdash_core-0.1.33.dist-info → emdash_core-0.1.60.dist-info}/entry_points.txt +0 -0
@@ -1,226 +0,0 @@
1
- """Main swarm orchestrator combining all components."""
2
-
3
- import re
4
- from pathlib import Path
5
- from typing import Callable, Optional
6
- from uuid import uuid4
7
-
8
- from .task_definition import SwarmTask, SwarmState, TaskStatus
9
- from .worktree_manager import WorktreeManager
10
- from .worker_spawner import WorkerSpawner, WorkerResult
11
- from .merge_agent import MergeAgent, MergeResult
12
- from ..utils.logger import log
13
-
14
-
15
- class SwarmRunner:
16
- """Orchestrates the full swarm lifecycle.
17
-
18
- 1. Creates worktrees for each task
19
- 2. Spawns parallel agent workers
20
- 3. Collects results
21
- 4. Merges completed work
22
-
23
- Example:
24
- runner = SwarmRunner(repo_root=Path("."))
25
-
26
- tasks = [
27
- "Add user authentication",
28
- "Fix the login page CSS",
29
- "Add unit tests for auth module",
30
- ]
31
-
32
- state = runner.run(tasks)
33
- runner.merge_completed()
34
- """
35
-
36
- def __init__(
37
- self,
38
- repo_root: Path,
39
- model: str = "gpt-4o-mini",
40
- max_workers: int = 3,
41
- timeout_per_task: int = 600,
42
- base_branch: str = "main",
43
- include_graph_tools: bool = False,
44
- ):
45
- self.repo_root = repo_root.resolve()
46
- self.model = model
47
- self.max_workers = max_workers
48
- self.timeout = timeout_per_task
49
- self.base_branch = base_branch
50
- self.include_graph_tools = include_graph_tools
51
-
52
- self.worktree_manager = WorktreeManager(repo_root)
53
- self.spawner = WorkerSpawner(
54
- repo_root=repo_root,
55
- model=model,
56
- include_graph_tools=include_graph_tools,
57
- timeout_per_task=timeout_per_task,
58
- )
59
- self.merge_agent = MergeAgent(repo_root, model)
60
-
61
- self.state: Optional[SwarmState] = None
62
-
63
- def slugify(self, text: str) -> str:
64
- """Create URL-safe slug from text."""
65
- text = text.lower().strip()
66
- text = re.sub(r"[^\w\s-]", "", text)
67
- text = re.sub(r"[-\s]+", "-", text)
68
- return text[:50]
69
-
70
- def run(
71
- self,
72
- task_descriptions: list[str],
73
- progress_callback: Optional[Callable[[SwarmTask, WorkerResult], None]] = None,
74
- ) -> SwarmState:
75
- """Execute the full swarm workflow.
76
-
77
- Args:
78
- task_descriptions: List of task descriptions
79
- progress_callback: Called when each task completes
80
-
81
- Returns:
82
- SwarmState with all task results
83
- """
84
- # Create swarm state
85
- swarm_id = str(uuid4())[:8]
86
- tasks = []
87
-
88
- for i, desc in enumerate(task_descriptions):
89
- slug = self.slugify(desc)
90
- # Add index suffix if slug is duplicate
91
- existing_slugs = [t.slug for t in tasks]
92
- if slug in existing_slugs:
93
- slug = f"{slug}-{i+1}"
94
-
95
- task = SwarmTask(
96
- id=str(uuid4()),
97
- slug=slug,
98
- title=desc[:100],
99
- description=desc,
100
- base_branch=self.base_branch,
101
- )
102
- tasks.append(task)
103
-
104
- self.state = SwarmState(
105
- id=swarm_id,
106
- tasks=tasks,
107
- base_branch=self.base_branch,
108
- )
109
-
110
- # Save initial state
111
- self.state.save(self.repo_root)
112
-
113
- log.info(f"Starting swarm {swarm_id} with {len(tasks)} tasks")
114
-
115
- # Run parallel workers
116
- self.spawner.run_parallel(
117
- tasks=tasks,
118
- max_workers=self.max_workers,
119
- progress_callback=progress_callback,
120
- )
121
-
122
- # Update and save final state
123
- self.state.save(self.repo_root)
124
-
125
- return self.state
126
-
127
- def merge_completed(
128
- self,
129
- use_llm: bool = False,
130
- target_branch: Optional[str] = None,
131
- cleanup_worktrees: bool = True,
132
- ) -> list[MergeResult]:
133
- """Merge all completed task branches.
134
-
135
- Args:
136
- use_llm: Whether to use LLM assistance for merges
137
- target_branch: Target branch (defaults to base_branch)
138
- cleanup_worktrees: Whether to delete worktrees after successful merge
139
-
140
- Returns:
141
- List of merge results
142
- """
143
- if not self.state:
144
- self.state = SwarmState.load(self.repo_root)
145
-
146
- if not self.state:
147
- raise ValueError("No swarm state found")
148
-
149
- target = target_branch or self.state.base_branch
150
-
151
- results = self.merge_agent.merge_all_completed(
152
- self.state.tasks,
153
- target=target,
154
- use_llm=use_llm,
155
- cleanup_worktrees=cleanup_worktrees,
156
- )
157
-
158
- # Update state
159
- self.state.save(self.repo_root)
160
-
161
- return results
162
-
163
- def cleanup(self) -> int:
164
- """Clean up all worktrees from this swarm.
165
-
166
- Returns:
167
- Number of worktrees removed
168
- """
169
- return self.worktree_manager.cleanup_all()
170
-
171
- def get_status(self) -> dict:
172
- """Get current swarm status.
173
-
174
- Returns:
175
- Dict with status summary
176
- """
177
- if not self.state:
178
- self.state = SwarmState.load(self.repo_root)
179
-
180
- if not self.state:
181
- return {"active": False, "message": "No active swarm"}
182
-
183
- summary = {
184
- "active": True,
185
- "swarm_id": self.state.id,
186
- "base_branch": self.state.base_branch,
187
- "created_at": self.state.created_at,
188
- "total_tasks": len(self.state.tasks),
189
- "pending": sum(1 for t in self.state.tasks if t.status == TaskStatus.PENDING),
190
- "running": sum(1 for t in self.state.tasks if t.status == TaskStatus.RUNNING),
191
- "completed": sum(1 for t in self.state.tasks if t.status == TaskStatus.COMPLETED),
192
- "failed": sum(1 for t in self.state.tasks if t.status == TaskStatus.FAILED),
193
- "merged": sum(1 for t in self.state.tasks if t.status == TaskStatus.MERGED),
194
- "tasks": [
195
- {
196
- "slug": t.slug,
197
- "title": t.title,
198
- "status": t.status.value,
199
- "branch": t.branch,
200
- "files_modified": len(t.files_modified),
201
- }
202
- for t in self.state.tasks
203
- ],
204
- }
205
- return summary
206
-
207
- @classmethod
208
- def load(cls, repo_root: Path) -> Optional["SwarmRunner"]:
209
- """Load an existing swarm runner from state.
210
-
211
- Args:
212
- repo_root: Repository root path
213
-
214
- Returns:
215
- SwarmRunner if state exists, None otherwise
216
- """
217
- state = SwarmState.load(repo_root)
218
- if not state:
219
- return None
220
-
221
- runner = cls(
222
- repo_root=repo_root,
223
- base_branch=state.base_branch,
224
- )
225
- runner.state = state
226
- return runner
@@ -1,137 +0,0 @@
1
- """Task definitions and state for swarm execution."""
2
-
3
- from dataclasses import dataclass, field, asdict
4
- from enum import Enum
5
- from pathlib import Path
6
- from typing import Optional, Any
7
- from datetime import datetime
8
- import json
9
-
10
-
11
- class TaskStatus(Enum):
12
- """Status of a swarm task."""
13
- PENDING = "pending"
14
- WORKTREE_CREATED = "worktree_created"
15
- RUNNING = "running"
16
- COMPLETED = "completed"
17
- FAILED = "failed"
18
- MERGED = "merged"
19
-
20
-
21
- @dataclass
22
- class SwarmTask:
23
- """A task to be executed by a swarm worker.
24
-
25
- Each task gets its own worktree and subprocess.
26
- State is persisted to {worktree}/.emdash-task/task.json
27
- """
28
- # Core identity
29
- id: str
30
- slug: str
31
- title: str
32
- description: str
33
-
34
- # Git context
35
- base_branch: str = "main"
36
- branch: str = ""
37
- worktree_path: Optional[str] = None
38
-
39
- # Status tracking
40
- status: TaskStatus = TaskStatus.PENDING
41
- created_at: str = field(default_factory=lambda: datetime.now().isoformat())
42
- started_at: Optional[str] = None
43
- completed_at: Optional[str] = None
44
-
45
- # Results
46
- files_modified: list[str] = field(default_factory=list)
47
- completion_summary: Optional[str] = None
48
- error_message: Optional[str] = None
49
- agent_output: Optional[str] = None
50
-
51
- # Merge status
52
- merge_status: Optional[str] = None
53
- merge_conflicts: list[str] = field(default_factory=list)
54
-
55
- def to_dict(self) -> dict[str, Any]:
56
- """Convert to dictionary for JSON serialization."""
57
- d = asdict(self)
58
- d["status"] = self.status.value
59
- return d
60
-
61
- @classmethod
62
- def from_dict(cls, data: dict[str, Any]) -> "SwarmTask":
63
- """Create from dictionary."""
64
- data = data.copy()
65
- data["status"] = TaskStatus(data["status"])
66
- return cls(**data)
67
-
68
- def save(self, worktree_path: Path) -> None:
69
- """Save task state to worktree."""
70
- task_dir = worktree_path / ".emdash-task"
71
- task_dir.mkdir(parents=True, exist_ok=True)
72
-
73
- task_file = task_dir / "task.json"
74
- with open(task_file, "w") as f:
75
- json.dump(self.to_dict(), f, indent=2)
76
-
77
- @classmethod
78
- def load(cls, worktree_path: Path) -> Optional["SwarmTask"]:
79
- """Load task state from worktree."""
80
- task_file = worktree_path / ".emdash-task" / "task.json"
81
- if not task_file.exists():
82
- return None
83
-
84
- with open(task_file) as f:
85
- return cls.from_dict(json.load(f))
86
-
87
-
88
- @dataclass
89
- class SwarmState:
90
- """Overall state of a swarm execution.
91
-
92
- Persisted to {repo}/.emdash-worktrees/.swarm-state.json
93
- """
94
- id: str
95
- tasks: list[SwarmTask]
96
- base_branch: str = "main"
97
- created_at: str = field(default_factory=lambda: datetime.now().isoformat())
98
- completed_at: Optional[str] = None
99
-
100
- def save(self, repo_root: Path) -> None:
101
- """Save swarm state to repo root."""
102
- state_file = repo_root / ".emdash-worktrees" / ".swarm-state.json"
103
- state_file.parent.mkdir(parents=True, exist_ok=True)
104
-
105
- data = {
106
- "id": self.id,
107
- "tasks": [t.to_dict() for t in self.tasks],
108
- "base_branch": self.base_branch,
109
- "created_at": self.created_at,
110
- "completed_at": self.completed_at,
111
- "summary": {
112
- "total": len(self.tasks),
113
- "completed": sum(1 for t in self.tasks if t.status == TaskStatus.COMPLETED),
114
- "failed": sum(1 for t in self.tasks if t.status == TaskStatus.FAILED),
115
- "merged": sum(1 for t in self.tasks if t.status == TaskStatus.MERGED),
116
- },
117
- }
118
- with open(state_file, "w") as f:
119
- json.dump(data, f, indent=2)
120
-
121
- @classmethod
122
- def load(cls, repo_root: Path) -> Optional["SwarmState"]:
123
- """Load swarm state from repo root."""
124
- state_file = repo_root / ".emdash-worktrees" / ".swarm-state.json"
125
- if not state_file.exists():
126
- return None
127
-
128
- with open(state_file) as f:
129
- data = json.load(f)
130
- tasks = [SwarmTask.from_dict(t) for t in data.get("tasks", [])]
131
- return cls(
132
- id=data["id"],
133
- tasks=tasks,
134
- base_branch=data.get("base_branch", "main"),
135
- created_at=data.get("created_at", ""),
136
- completed_at=data.get("completed_at"),
137
- )
@@ -1,319 +0,0 @@
1
- """Subprocess spawning and management for parallel agent workers."""
2
-
3
- import json
4
- import os
5
- import subprocess
6
- import sys
7
- from concurrent.futures import ProcessPoolExecutor, as_completed
8
- from dataclasses import dataclass
9
- from datetime import datetime
10
- from pathlib import Path
11
- from typing import Callable, Optional
12
-
13
- from .task_definition import SwarmTask, TaskStatus
14
- from .worktree_manager import WorktreeManager, WorktreeInfo
15
- from ..utils.logger import log
16
-
17
-
18
- @dataclass
19
- class WorkerResult:
20
- """Result from a worker subprocess."""
21
- task_id: str
22
- success: bool
23
- files_modified: list[str]
24
- completion_summary: Optional[str]
25
- error_message: Optional[str]
26
- stdout: str
27
- stderr: str
28
- return_code: int
29
-
30
-
31
- def run_agent_in_worktree(
32
- worktree_path: str,
33
- task_description: str,
34
- model: str = "gpt-4o-mini",
35
- include_graph_tools: bool = False,
36
- timeout: int = 600,
37
- ) -> WorkerResult:
38
- """Run a coding agent in a worktree subprocess.
39
-
40
- This function is designed to be called in a separate process.
41
- It invokes `emdash agent code` within the worktree directory.
42
-
43
- Args:
44
- worktree_path: Path to the worktree
45
- task_description: Task for the agent to complete
46
- model: LLM model to use
47
- include_graph_tools: Whether to include graph tools
48
- timeout: Maximum seconds to run
49
-
50
- Returns:
51
- WorkerResult with execution details
52
- """
53
- worktree = Path(worktree_path)
54
- task_file = worktree / ".emdash-task" / "task.json"
55
-
56
- # Load task for ID
57
- task_id = "unknown"
58
- if task_file.exists():
59
- with open(task_file) as f:
60
- task_data = json.load(f)
61
- task_id = task_data.get("id", "unknown")
62
-
63
- # Build command - run emdash agent code in non-interactive mode
64
- cmd = [
65
- sys.executable, "-m", "emdash",
66
- "agent", "code",
67
- task_description,
68
- "--model", model,
69
- "--mode", "code",
70
- "-q", # Quiet mode
71
- ]
72
-
73
- if not include_graph_tools:
74
- cmd.append("--no-graph-tools")
75
-
76
- # Set environment for isolated execution
77
- env = os.environ.copy()
78
- # Each worktree gets its own Kuzu DB to avoid lock contention
79
- env["KUZU_DATABASE_PATH"] = str(worktree / ".emdash-task" / "kuzu_db")
80
-
81
- try:
82
- result = subprocess.run(
83
- cmd,
84
- cwd=str(worktree),
85
- capture_output=True,
86
- text=True,
87
- timeout=timeout,
88
- env=env,
89
- )
90
-
91
- # Parse output to extract modified files
92
- files_modified = _extract_modified_files(result.stdout)
93
- completion_summary = _extract_summary(result.stdout)
94
-
95
- return WorkerResult(
96
- task_id=task_id,
97
- success=result.returncode == 0,
98
- files_modified=files_modified,
99
- completion_summary=completion_summary,
100
- error_message=result.stderr if result.returncode != 0 else None,
101
- stdout=result.stdout,
102
- stderr=result.stderr,
103
- return_code=result.returncode,
104
- )
105
-
106
- except subprocess.TimeoutExpired:
107
- return WorkerResult(
108
- task_id=task_id,
109
- success=False,
110
- files_modified=[],
111
- completion_summary=None,
112
- error_message=f"Task timed out after {timeout} seconds",
113
- stdout="",
114
- stderr="",
115
- return_code=-1,
116
- )
117
- except Exception as e:
118
- return WorkerResult(
119
- task_id=task_id,
120
- success=False,
121
- files_modified=[],
122
- completion_summary=None,
123
- error_message=str(e),
124
- stdout="",
125
- stderr="",
126
- return_code=-1,
127
- )
128
-
129
-
130
- def _extract_modified_files(output: str) -> list[str]:
131
- """Extract list of modified files from agent output."""
132
- files = []
133
- for line in output.split("\n"):
134
- # Look for common patterns indicating file modifications
135
- lower = line.lower()
136
- if any(x in lower for x in ["modified:", "created:", "applied diff to", "wrote to"]):
137
- # Try to extract file path
138
- parts = line.split()
139
- for part in parts:
140
- if "/" in part or part.endswith(".py") or part.endswith(".ts"):
141
- # Clean up the path
142
- path = part.strip(",:\"'`")
143
- if path and not path.startswith("-"):
144
- files.append(path)
145
- return list(set(files)) # Dedupe
146
-
147
-
148
- def _extract_summary(output: str) -> Optional[str]:
149
- """Extract completion summary from agent output."""
150
- # Look for completion markers
151
- lines = output.split("\n")
152
- for i, line in enumerate(lines):
153
- if "completed" in line.lower() or "summary" in line.lower():
154
- # Return this line and a few following
155
- summary_lines = lines[i:i+5]
156
- return "\n".join(summary_lines).strip()
157
-
158
- # Fall back to last non-empty lines
159
- non_empty = [l.strip() for l in lines if l.strip()]
160
- if non_empty:
161
- return "\n".join(non_empty[-3:])
162
- return None
163
-
164
-
165
- class WorkerSpawner:
166
- """Orchestrates parallel worker subprocesses.
167
-
168
- Creates worktrees for each task and runs agents in parallel,
169
- collecting results as they complete.
170
-
171
- Example:
172
- spawner = WorkerSpawner(repo_root=Path("."))
173
-
174
- tasks = [
175
- SwarmTask(id="1", slug="add-auth", ...),
176
- SwarmTask(id="2", slug="fix-bug", ...),
177
- ]
178
-
179
- results = spawner.run_parallel(tasks, max_workers=3)
180
- """
181
-
182
- def __init__(
183
- self,
184
- repo_root: Path,
185
- model: str = "gpt-4o-mini",
186
- include_graph_tools: bool = False,
187
- timeout_per_task: int = 600,
188
- ):
189
- self.repo_root = repo_root.resolve()
190
- self.worktree_manager = WorktreeManager(repo_root)
191
- self.model = model
192
- self.include_graph_tools = include_graph_tools
193
- self.timeout = timeout_per_task
194
-
195
- def prepare_worktree(self, task: SwarmTask) -> WorktreeInfo:
196
- """Create worktree and initialize task state."""
197
- info = self.worktree_manager.create_worktree(
198
- task_name=task.slug,
199
- base_branch=task.base_branch,
200
- force=True,
201
- )
202
-
203
- # Update task with worktree info
204
- task.worktree_path = str(info.path)
205
- task.branch = info.branch
206
- task.status = TaskStatus.WORKTREE_CREATED
207
-
208
- # Save task state to worktree
209
- task.save(info.path)
210
-
211
- return info
212
-
213
- def run_parallel(
214
- self,
215
- tasks: list[SwarmTask],
216
- max_workers: int = 3,
217
- progress_callback: Optional[Callable[[SwarmTask, WorkerResult], None]] = None,
218
- ) -> dict[str, WorkerResult]:
219
- """Run all tasks in parallel.
220
-
221
- Args:
222
- tasks: List of tasks to execute
223
- max_workers: Maximum concurrent workers
224
- progress_callback: Called when each task completes
225
-
226
- Returns:
227
- Dict mapping task_id to WorkerResult
228
- """
229
- results: dict[str, WorkerResult] = {}
230
-
231
- # Create all worktrees first
232
- log.info(f"Creating {len(tasks)} worktrees...")
233
- for task in tasks:
234
- self.prepare_worktree(task)
235
- task.status = TaskStatus.RUNNING
236
- task.started_at = datetime.now().isoformat()
237
- if task.worktree_path:
238
- task.save(Path(task.worktree_path))
239
-
240
- # Run agents in parallel using ProcessPoolExecutor
241
- log.info(f"Starting {len(tasks)} agents with max {max_workers} workers...")
242
-
243
- with ProcessPoolExecutor(max_workers=max_workers) as executor:
244
- # Submit all tasks
245
- future_to_task = {
246
- executor.submit(
247
- run_agent_in_worktree,
248
- task.worktree_path,
249
- task.description,
250
- self.model,
251
- self.include_graph_tools,
252
- self.timeout,
253
- ): task
254
- for task in tasks
255
- }
256
-
257
- # Collect results as they complete
258
- for future in as_completed(future_to_task):
259
- task = future_to_task[future]
260
-
261
- try:
262
- result = future.result()
263
- results[task.id] = result
264
-
265
- # Update task status
266
- task.status = TaskStatus.COMPLETED if result.success else TaskStatus.FAILED
267
- task.files_modified = result.files_modified
268
- task.completion_summary = result.completion_summary
269
- task.error_message = result.error_message
270
- task.agent_output = result.stdout[:5000] if result.stdout else None
271
- task.completed_at = datetime.now().isoformat()
272
-
273
- # Save updated state
274
- if task.worktree_path:
275
- task.save(Path(task.worktree_path))
276
-
277
- if progress_callback:
278
- progress_callback(task, result)
279
-
280
- status = "SUCCESS" if result.success else "FAILED"
281
- log.info(f"Task {task.slug} completed: {status}")
282
-
283
- except Exception as e:
284
- log.exception(f"Task {task.slug} raised exception")
285
- task.status = TaskStatus.FAILED
286
- task.error_message = str(e)
287
- task.completed_at = datetime.now().isoformat()
288
-
289
- if task.worktree_path:
290
- task.save(Path(task.worktree_path))
291
-
292
- results[task.id] = WorkerResult(
293
- task_id=task.id,
294
- success=False,
295
- files_modified=[],
296
- completion_summary=None,
297
- error_message=str(e),
298
- stdout="",
299
- stderr="",
300
- return_code=-1,
301
- )
302
-
303
- return results
304
-
305
- def run_sequential(
306
- self,
307
- tasks: list[SwarmTask],
308
- progress_callback: Optional[Callable[[SwarmTask, WorkerResult], None]] = None,
309
- ) -> dict[str, WorkerResult]:
310
- """Run tasks sequentially (for debugging or dependent tasks).
311
-
312
- Args:
313
- tasks: List of tasks to execute
314
- progress_callback: Called when each task completes
315
-
316
- Returns:
317
- Dict mapping task_id to WorkerResult
318
- """
319
- return self.run_parallel(tasks, max_workers=1, progress_callback=progress_callback)