codefleet 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
codefleet/__init__.py ADDED
@@ -0,0 +1,3 @@
1
+ """codefleet — orchestrate fleets of AI coding agents across providers."""
2
+
3
+ __version__ = "0.3.0"
codefleet/git_ops.py ADDED
@@ -0,0 +1,115 @@
1
+ import shutil
2
+ import subprocess
3
+ from pathlib import Path
4
+
5
+
6
+ class GitError(Exception):
7
+ pass
8
+
9
+
10
+ def is_git_repo(path: Path) -> bool:
11
+ """Check if path is inside a git repository."""
12
+ try:
13
+ result = subprocess.run(
14
+ ["git", "-C", str(path), "rev-parse", "--is-inside-work-tree"],
15
+ capture_output=True,
16
+ text=True,
17
+ timeout=10,
18
+ )
19
+ return result.returncode == 0 and result.stdout.strip() == "true"
20
+ except (subprocess.TimeoutExpired, FileNotFoundError):
21
+ return False
22
+
23
+
24
+ def resolve_ref(repo_path: Path, ref: str = "HEAD") -> str:
25
+ """Resolve a git ref to a commit hash."""
26
+ result = subprocess.run(
27
+ ["git", "-C", str(repo_path), "rev-parse", ref],
28
+ capture_output=True,
29
+ text=True,
30
+ timeout=10,
31
+ )
32
+ if result.returncode != 0:
33
+ raise GitError(f"Failed to resolve ref '{ref}': {result.stderr.strip()}")
34
+ return result.stdout.strip()
35
+
36
+
37
+ def create_worktree(
38
+ repo_path: Path,
39
+ worktree_path: Path,
40
+ branch_name: str,
41
+ base_ref: str = "HEAD",
42
+ ) -> None:
43
+ """Create a git worktree with a new branch."""
44
+ worktree_path.parent.mkdir(parents=True, exist_ok=True)
45
+ result = subprocess.run(
46
+ [
47
+ "git", "-C", str(repo_path),
48
+ "worktree", "add",
49
+ "-b", branch_name,
50
+ str(worktree_path),
51
+ base_ref,
52
+ ],
53
+ capture_output=True,
54
+ text=True,
55
+ timeout=30,
56
+ )
57
+ if result.returncode != 0:
58
+ raise GitError(f"Failed to create worktree: {result.stderr.strip()}")
59
+
60
+
61
+ def remove_worktree(repo_path: Path, worktree_path: Path) -> None:
62
+ """Remove a git worktree."""
63
+ result = subprocess.run(
64
+ [
65
+ "git", "-C", str(repo_path),
66
+ "worktree", "remove", "--force",
67
+ str(worktree_path),
68
+ ],
69
+ capture_output=True,
70
+ text=True,
71
+ timeout=30,
72
+ )
73
+ if result.returncode != 0:
74
+ # If worktree remove fails, try to clean up manually
75
+ if worktree_path.exists():
76
+ shutil.rmtree(worktree_path, ignore_errors=True)
77
+ # Prune stale worktrees
78
+ subprocess.run(
79
+ ["git", "-C", str(repo_path), "worktree", "prune"],
80
+ capture_output=True,
81
+ text=True,
82
+ timeout=10,
83
+ )
84
+
85
+
86
+ def delete_branch(repo_path: Path, branch_name: str) -> None:
87
+ """Delete a git branch."""
88
+ result = subprocess.run(
89
+ ["git", "-C", str(repo_path), "branch", "-D", branch_name],
90
+ capture_output=True,
91
+ text=True,
92
+ timeout=10,
93
+ )
94
+ if result.returncode != 0:
95
+ raise GitError(
96
+ f"Failed to delete branch '{branch_name}': {result.stderr.strip()}"
97
+ )
98
+
99
+
100
+ def get_git_path() -> str | None:
101
+ """Return the path to git, or None."""
102
+ return shutil.which("git")
103
+
104
+
105
+ def get_repo_root(path: Path) -> Path | None:
106
+ """Return the root of the git repo containing path."""
107
+ result = subprocess.run(
108
+ ["git", "-C", str(path), "rev-parse", "--show-toplevel"],
109
+ capture_output=True,
110
+ text=True,
111
+ timeout=10,
112
+ )
113
+ if result.returncode == 0:
114
+ return Path(result.stdout.strip())
115
+ return None
codefleet/models.py ADDED
@@ -0,0 +1,233 @@
1
+ from __future__ import annotations
2
+
3
+ import enum
4
+ from typing import Optional
5
+
6
+ from pydantic import BaseModel, Field
7
+
8
+
9
+ class ExecutorType(str, enum.Enum):
10
+ CODEX = "codex"
11
+ GEMINI = "gemini"
12
+ CLAUDE = "claude"
13
+
14
+
15
+ class WorkerStatus(str, enum.Enum):
16
+ PENDING = "pending"
17
+ RUNNING = "running"
18
+ SUCCEEDED = "succeeded"
19
+ FAILED = "failed"
20
+ CANCELLED = "cancelled"
21
+ TIMED_OUT = "timed_out"
22
+ CLEANUP_FAILED = "cleanup_failed"
23
+
24
+ @property
25
+ def is_terminal(self) -> bool:
26
+ return self in {
27
+ WorkerStatus.SUCCEEDED,
28
+ WorkerStatus.FAILED,
29
+ WorkerStatus.CANCELLED,
30
+ WorkerStatus.TIMED_OUT,
31
+ WorkerStatus.CLEANUP_FAILED,
32
+ }
33
+
34
+
35
+ class ResultStatus(str, enum.Enum):
36
+ COMPLETED = "completed"
37
+ BLOCKED = "blocked"
38
+
39
+
40
+ class TestStatus(str, enum.Enum):
41
+ PASSED = "passed"
42
+ FAILED = "failed"
43
+ NOT_RUN = "not_run"
44
+
45
+
46
+ class TestResult(BaseModel):
47
+ command: str
48
+ status: TestStatus
49
+ details: str = ""
50
+
51
+
52
+ class WorkerResult(BaseModel):
53
+ summary: str
54
+ files_changed: list[str] = Field(default_factory=list)
55
+ tests: list[TestResult] = Field(default_factory=list)
56
+ commits: list[str] = Field(default_factory=list)
57
+ next_steps: list[str] = Field(default_factory=list)
58
+ status: ResultStatus
59
+
60
+
61
+ class WorkerRecord(BaseModel):
62
+ worker_id: str
63
+ task_name: str
64
+ repo_path: str
65
+ branch_name: str
66
+ worktree_path: str
67
+ worker_dir: str
68
+ model: str
69
+ executor: ExecutorType = ExecutorType.CODEX
70
+ profile: Optional[str] = None
71
+ status: WorkerStatus
72
+ created_at: float
73
+ started_at: Optional[float] = None
74
+ ended_at: Optional[float] = None
75
+ timeout_seconds: int
76
+ pid: Optional[int] = None
77
+ exit_code: Optional[int] = None
78
+ codex_command: str
79
+ prompt: str
80
+ result_json_path: str
81
+ stdout_path: str
82
+ stderr_path: str
83
+ prompt_path: str
84
+ meta_path: str
85
+ retry_count: int = 0
86
+ parent_worker_id: Optional[str] = None
87
+ tags: list[str] = Field(default_factory=list)
88
+ metadata: dict = Field(default_factory=dict)
89
+ error_message: Optional[str] = None
90
+ workflow_id: Optional[str] = None
91
+ stage_index: Optional[int] = None
92
+
93
+
94
+ class WorkerStatusPayload(BaseModel):
95
+ """Payload returned by create_worker, get_worker_status, etc."""
96
+
97
+ worker_id: str
98
+ task_name: str
99
+ status: WorkerStatus
100
+ repo_path: str
101
+ branch_name: str
102
+ worktree_path: str
103
+ worker_dir: str
104
+ model: str
105
+ executor: ExecutorType = ExecutorType.CODEX
106
+ profile: Optional[str] = None
107
+ created_at: float
108
+ started_at: Optional[float] = None
109
+ ended_at: Optional[float] = None
110
+ timeout_seconds: int
111
+ pid: Optional[int] = None
112
+ exit_code: Optional[int] = None
113
+ retry_count: int = 0
114
+ tags: list[str] = Field(default_factory=list)
115
+ metadata: dict = Field(default_factory=dict)
116
+ error_message: Optional[str] = None
117
+ prompt_path: str
118
+ result_json_path: str
119
+ stdout_path: str
120
+ stderr_path: str
121
+ meta_path: str
122
+
123
+ @classmethod
124
+ def from_record(cls, record: WorkerRecord) -> WorkerStatusPayload:
125
+ return cls(
126
+ worker_id=record.worker_id,
127
+ task_name=record.task_name,
128
+ status=record.status,
129
+ repo_path=record.repo_path,
130
+ branch_name=record.branch_name,
131
+ worktree_path=record.worktree_path,
132
+ worker_dir=record.worker_dir,
133
+ model=record.model,
134
+ executor=record.executor,
135
+ profile=record.profile,
136
+ created_at=record.created_at,
137
+ started_at=record.started_at,
138
+ ended_at=record.ended_at,
139
+ timeout_seconds=record.timeout_seconds,
140
+ pid=record.pid,
141
+ exit_code=record.exit_code,
142
+ retry_count=record.retry_count,
143
+ tags=record.tags,
144
+ metadata=record.metadata,
145
+ error_message=record.error_message,
146
+ prompt_path=record.prompt_path,
147
+ result_json_path=record.result_json_path,
148
+ stdout_path=record.stdout_path,
149
+ stderr_path=record.stderr_path,
150
+ meta_path=record.meta_path,
151
+ )
152
+
153
+
154
+ # --- Workflow Models ---
155
+
156
+
157
+ class WorktreeStrategy(str, enum.Enum):
158
+ NEW = "new"
159
+ INHERIT = "inherit"
160
+
161
+
162
+ class StageDefinition(BaseModel):
163
+ name: str
164
+ executor: ExecutorType
165
+ prompt_template: str
166
+ model: Optional[str] = None
167
+ worktree_strategy: WorktreeStrategy = WorktreeStrategy.INHERIT
168
+ depends_on: list[int] = Field(default_factory=list)
169
+ timeout_seconds: Optional[int] = None
170
+ reasoning_effort: Optional[str] = None
171
+ extra_args: Optional[list[str]] = None
172
+
173
+
174
+ class WorkflowStatus(str, enum.Enum):
175
+ PENDING = "pending"
176
+ RUNNING = "running"
177
+ SUCCEEDED = "succeeded"
178
+ FAILED = "failed"
179
+ CANCELLED = "cancelled"
180
+
181
+
182
+ class StageState(BaseModel):
183
+ worker_id: Optional[str] = None
184
+ status: WorkerStatus = WorkerStatus.PENDING
185
+ worktree_path: Optional[str] = None
186
+
187
+
188
+ class WorkflowRecord(BaseModel):
189
+ workflow_id: str
190
+ name: str
191
+ status: WorkflowStatus
192
+ repo_path: str
193
+ base_ref: str
194
+ task_prompt: str
195
+ stages: list[StageDefinition]
196
+ stage_states: dict[int, StageState] = Field(default_factory=dict)
197
+ created_at: float
198
+ completed_at: Optional[float] = None
199
+ error_message: Optional[str] = None
200
+
201
+
202
+ class WorkflowStatusPayload(BaseModel):
203
+ workflow_id: str
204
+ name: str
205
+ status: WorkflowStatus
206
+ repo_path: str
207
+ stage_summary: list[dict]
208
+ created_at: float
209
+ completed_at: Optional[float] = None
210
+ error_message: Optional[str] = None
211
+
212
+ @classmethod
213
+ def from_record(cls, record: WorkflowRecord) -> WorkflowStatusPayload:
214
+ stage_summary = []
215
+ for i, stage in enumerate(record.stages):
216
+ state = record.stage_states.get(i, StageState())
217
+ stage_summary.append({
218
+ "index": i,
219
+ "name": stage.name,
220
+ "executor": stage.executor.value,
221
+ "status": state.status.value,
222
+ "worker_id": state.worker_id,
223
+ })
224
+ return cls(
225
+ workflow_id=record.workflow_id,
226
+ name=record.name,
227
+ status=record.status,
228
+ repo_path=record.repo_path,
229
+ stage_summary=stage_summary,
230
+ created_at=record.created_at,
231
+ completed_at=record.completed_at,
232
+ error_message=record.error_message,
233
+ )
@@ -0,0 +1,39 @@
1
+ import json
2
+ from pathlib import Path
3
+
4
+ from .models import WorkerResult
5
+
6
+
7
+ class ResultValidationError(Exception):
8
+ pass
9
+
10
+
11
+ def parse_result_file(path: Path) -> WorkerResult:
12
+ """Parse and validate a result.json file."""
13
+ if not path.exists():
14
+ raise ResultValidationError(f"Result file not found: {path}")
15
+
16
+ raw = path.read_text(encoding="utf-8")
17
+ if not raw.strip():
18
+ raise ResultValidationError(f"Result file is empty: {path}")
19
+
20
+ try:
21
+ data = json.loads(raw)
22
+ except json.JSONDecodeError as e:
23
+ raise ResultValidationError(f"Invalid JSON in result file: {e}")
24
+
25
+ if not isinstance(data, dict):
26
+ raise ResultValidationError("Result JSON must be an object")
27
+
28
+ try:
29
+ return WorkerResult.model_validate(data)
30
+ except Exception as e:
31
+ raise ResultValidationError(f"Result schema validation failed: {e}")
32
+
33
+
34
+ def validate_result_data(data: dict) -> WorkerResult:
35
+ """Validate a result dict against the schema."""
36
+ try:
37
+ return WorkerResult.model_validate(data)
38
+ except Exception as e:
39
+ raise ResultValidationError(f"Result schema validation failed: {e}")
codefleet/server.py ADDED
@@ -0,0 +1,196 @@
1
+ import os
2
+ from typing import Optional
3
+
4
+ from mcp.server.fastmcp import FastMCP
5
+
6
+ from .supervisor import FleetSupervisor
7
+
8
+
9
+ def create_server(supervisor: Optional[FleetSupervisor] = None) -> FastMCP:
10
+ """Create the MCP server with all tools registered."""
11
+ mcp = FastMCP("codefleet")
12
+
13
+ if supervisor is None:
14
+ allowed_repos_str = os.environ.get("FLEET_ALLOWED_REPOS", "")
15
+ allowed_repos = (
16
+ [r.strip() for r in allowed_repos_str.split(",") if r.strip()] or None
17
+ )
18
+
19
+ supervisor = FleetSupervisor(
20
+ base_dir=os.environ.get("FLEET_BASE_DIR"),
21
+ default_model=os.environ.get("FLEET_DEFAULT_MODEL", "gpt-5.4"),
22
+ default_gemini_model=os.environ.get(
23
+ "FLEET_GEMINI_DEFAULT_MODEL", "gemini-3.1-pro-preview"
24
+ ),
25
+ default_claude_model=os.environ.get(
26
+ "FLEET_CLAUDE_DEFAULT_MODEL", "claude-sonnet-4-6"
27
+ ),
28
+ default_reasoning_effort=os.environ.get("FLEET_REASONING_EFFORT", "xhigh"),
29
+ default_timeout=int(os.environ.get("FLEET_DEFAULT_TIMEOUT", "600")),
30
+ max_concurrent=int(os.environ.get("FLEET_MAX_CONCURRENT", "10")),
31
+ allowed_repos=allowed_repos,
32
+ default_executor=os.environ.get("FLEET_DEFAULT_EXECUTOR", "codex"),
33
+ max_spawn_depth=int(os.environ.get("FLEET_MAX_SPAWN_DEPTH", "2")),
34
+ )
35
+
36
+ # --- Worker tools ---
37
+
38
+ @mcp.tool()
39
+ def healthcheck() -> dict:
40
+ """Return a basic capability report. Lets Claude verify the MCP server is reachable and the local environment is sane."""
41
+ return supervisor.healthcheck()
42
+
43
+ @mcp.tool()
44
+ def create_worker(
45
+ repo_path: str,
46
+ task_name: str,
47
+ prompt: str,
48
+ base_ref: str = "HEAD",
49
+ model: Optional[str] = None,
50
+ executor: Optional[str] = None,
51
+ reasoning_effort: Optional[str] = None,
52
+ timeout_seconds: Optional[int] = None,
53
+ profile: Optional[str] = None,
54
+ tags: Optional[list[str]] = None,
55
+ metadata: Optional[dict] = None,
56
+ extra_args: Optional[list[str]] = None,
57
+ extra_codex_args: Optional[list[str]] = None,
58
+ parent_worker_id: Optional[str] = None,
59
+ ) -> dict:
60
+ """Launch a new worker in an isolated git worktree. Supports 'codex', 'gemini', and 'claude' executors."""
61
+ result = supervisor.create_worker(
62
+ repo_path=repo_path,
63
+ task_name=task_name,
64
+ prompt=prompt,
65
+ base_ref=base_ref,
66
+ model=model,
67
+ executor=executor,
68
+ reasoning_effort=reasoning_effort,
69
+ timeout_seconds=timeout_seconds,
70
+ profile=profile,
71
+ tags=tags,
72
+ metadata=metadata,
73
+ extra_args=extra_args,
74
+ extra_codex_args=extra_codex_args,
75
+ parent_worker_id=parent_worker_id,
76
+ )
77
+ return result.model_dump()
78
+
79
+ @mcp.tool()
80
+ def get_worker_status(worker_id: str) -> dict:
81
+ """Return current status and metadata for a worker."""
82
+ result = supervisor.get_worker_status(worker_id)
83
+ return result.model_dump()
84
+
85
+ @mcp.tool()
86
+ def list_workers(
87
+ statuses: Optional[list[str]] = None,
88
+ limit: int = 25,
89
+ ) -> dict:
90
+ """List recent workers, optionally filtered by status."""
91
+ results = supervisor.list_workers(statuses=statuses, limit=limit)
92
+ return {"workers": [r.model_dump() for r in results], "count": len(results)}
93
+
94
+ @mcp.tool()
95
+ def collect_worker_result(
96
+ worker_id: str,
97
+ include_logs: bool = False,
98
+ log_tail_lines: int = 80,
99
+ ) -> dict:
100
+ """Return worker metadata plus parsed result.json and optional log tails."""
101
+ return supervisor.collect_worker_result(
102
+ worker_id=worker_id,
103
+ include_logs=include_logs,
104
+ log_tail_lines=log_tail_lines,
105
+ )
106
+
107
+ @mcp.tool()
108
+ def cancel_worker(worker_id: str) -> dict:
109
+ """Cancel a running worker."""
110
+ result = supervisor.cancel_worker(worker_id)
111
+ return result.model_dump()
112
+
113
+ @mcp.tool()
114
+ def cleanup_worker(
115
+ worker_id: str,
116
+ remove_branch: bool = True,
117
+ remove_worktree_dir: bool = True,
118
+ ) -> dict:
119
+ """Remove worktree and optional branch for a terminal worker."""
120
+ return supervisor.cleanup_worker(
121
+ worker_id=worker_id,
122
+ remove_branch=remove_branch,
123
+ remove_worktree_dir=remove_worktree_dir,
124
+ )
125
+
126
+ # --- Workflow tools ---
127
+
128
+ @mcp.tool()
129
+ def create_workflow(
130
+ name: str,
131
+ repo_path: str,
132
+ task_prompt: str,
133
+ stages: list[dict],
134
+ base_ref: str = "HEAD",
135
+ timeout_seconds: Optional[int] = None,
136
+ ) -> dict:
137
+ """Define and start a multi-stage workflow. Stages form a DAG where each stage uses any executor (codex/gemini) and results flow between stages via prompt templates."""
138
+ result = supervisor.create_workflow(
139
+ name=name,
140
+ repo_path=repo_path,
141
+ task_prompt=task_prompt,
142
+ stages=stages,
143
+ base_ref=base_ref,
144
+ timeout_seconds=timeout_seconds,
145
+ )
146
+ return result.model_dump()
147
+
148
+ @mcp.tool()
149
+ def get_workflow_status(workflow_id: str) -> dict:
150
+ """Return workflow state with per-stage worker statuses."""
151
+ result = supervisor.get_workflow_status(workflow_id)
152
+ return result.model_dump()
153
+
154
+ @mcp.tool()
155
+ def list_workflows(
156
+ statuses: Optional[list[str]] = None,
157
+ limit: int = 25,
158
+ ) -> dict:
159
+ """List workflows with optional status filter."""
160
+ results = supervisor.list_workflows(statuses=statuses, limit=limit)
161
+ return {"workflows": [r.model_dump() for r in results], "count": len(results)}
162
+
163
+ @mcp.tool()
164
+ def cancel_workflow(workflow_id: str) -> dict:
165
+ """Cancel all running stages and mark workflow cancelled."""
166
+ result = supervisor.cancel_workflow(workflow_id)
167
+ return result.model_dump()
168
+
169
+ @mcp.tool()
170
+ def collect_workflow_result(
171
+ workflow_id: str,
172
+ include_all_stages: bool = False,
173
+ include_logs: bool = False,
174
+ ) -> dict:
175
+ """Get the final stage's result (or all stages' results)."""
176
+ return supervisor.collect_workflow_result(
177
+ workflow_id=workflow_id,
178
+ include_all_stages=include_all_stages,
179
+ include_logs=include_logs,
180
+ )
181
+
182
+ @mcp.tool()
183
+ def cleanup_workflow(workflow_id: str) -> dict:
184
+ """Clean up all worktrees, branches, and worker dirs for a terminal workflow."""
185
+ return supervisor.cleanup_workflow(workflow_id)
186
+
187
+ return mcp
188
+
189
+
190
+ def main():
191
+ server = create_server()
192
+ server.run()
193
+
194
+
195
+ if __name__ == "__main__":
196
+ main()