sequant 1.17.0 → 1.18.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-plugin/marketplace.json +1 -1
- package/.claude-plugin/plugin.json +14 -2
- package/dist/marketplace/external_plugins/sequant/.claude-plugin/plugin.json +21 -0
- package/dist/marketplace/external_plugins/sequant/README.md +38 -0
- package/dist/marketplace/external_plugins/sequant/hooks/post-tool.sh +292 -0
- package/dist/marketplace/external_plugins/sequant/hooks/pre-tool.sh +463 -0
- package/dist/marketplace/external_plugins/sequant/skills/_shared/references/prompt-templates.md +350 -0
- package/dist/marketplace/external_plugins/sequant/skills/_shared/references/subagent-types.md +131 -0
- package/dist/marketplace/external_plugins/sequant/skills/assess/SKILL.md +474 -0
- package/dist/marketplace/external_plugins/sequant/skills/clean/SKILL.md +211 -0
- package/dist/marketplace/external_plugins/sequant/skills/docs/SKILL.md +337 -0
- package/dist/marketplace/external_plugins/sequant/skills/exec/SKILL.md +807 -0
- package/dist/marketplace/external_plugins/sequant/skills/fullsolve/SKILL.md +678 -0
- package/dist/marketplace/external_plugins/sequant/skills/improve/SKILL.md +668 -0
- package/dist/marketplace/external_plugins/sequant/skills/loop/SKILL.md +374 -0
- package/dist/marketplace/external_plugins/sequant/skills/qa/SKILL.md +570 -0
- package/dist/marketplace/external_plugins/sequant/skills/qa/references/code-quality-exemplars.md +107 -0
- package/dist/marketplace/external_plugins/sequant/skills/qa/references/code-review-checklist.md +65 -0
- package/dist/marketplace/external_plugins/sequant/skills/qa/references/quality-gates.md +179 -0
- package/dist/marketplace/external_plugins/sequant/skills/qa/references/semgrep-rules.md +207 -0
- package/dist/marketplace/external_plugins/sequant/skills/qa/references/testing-requirements.md +109 -0
- package/dist/marketplace/external_plugins/sequant/skills/qa/scripts/quality-checks.sh +622 -0
- package/dist/marketplace/external_plugins/sequant/skills/reflect/SKILL.md +175 -0
- package/dist/marketplace/external_plugins/sequant/skills/reflect/references/documentation-tiers.md +70 -0
- package/dist/marketplace/external_plugins/sequant/skills/reflect/references/phase-reflection.md +95 -0
- package/dist/marketplace/external_plugins/sequant/skills/security-review/SKILL.md +358 -0
- package/dist/marketplace/external_plugins/sequant/skills/security-review/references/security-checklists.md +432 -0
- package/dist/marketplace/external_plugins/sequant/skills/solve/SKILL.md +697 -0
- package/dist/marketplace/external_plugins/sequant/skills/spec/SKILL.md +754 -0
- package/dist/marketplace/external_plugins/sequant/skills/spec/references/parallel-groups.md +72 -0
- package/dist/marketplace/external_plugins/sequant/skills/spec/references/recommended-workflow.md +92 -0
- package/dist/marketplace/external_plugins/sequant/skills/spec/references/verification-criteria.md +104 -0
- package/dist/marketplace/external_plugins/sequant/skills/test/SKILL.md +600 -0
- package/dist/marketplace/external_plugins/sequant/skills/testgen/SKILL.md +576 -0
- package/dist/marketplace/external_plugins/sequant/skills/verify/SKILL.md +281 -0
- package/dist/src/commands/run.d.ts +13 -280
- package/dist/src/commands/run.js +23 -1956
- package/dist/src/commands/sync.js +3 -0
- package/dist/src/commands/update.js +3 -0
- package/dist/src/lib/plugin-version-sync.d.ts +2 -1
- package/dist/src/lib/plugin-version-sync.js +28 -7
- package/dist/src/lib/solve-comment-parser.d.ts +26 -0
- package/dist/src/lib/solve-comment-parser.js +63 -7
- package/dist/src/lib/workflow/batch-executor.d.ts +117 -0
- package/dist/src/lib/workflow/batch-executor.js +574 -0
- package/dist/src/lib/workflow/phase-executor.d.ts +40 -0
- package/dist/src/lib/workflow/phase-executor.js +381 -0
- package/dist/src/lib/workflow/phase-mapper.d.ts +65 -0
- package/dist/src/lib/workflow/phase-mapper.js +147 -0
- package/dist/src/lib/workflow/pr-operations.d.ts +86 -0
- package/dist/src/lib/workflow/pr-operations.js +326 -0
- package/dist/src/lib/workflow/pr-status.d.ts +9 -7
- package/dist/src/lib/workflow/pr-status.js +13 -11
- package/dist/src/lib/workflow/run-summary.d.ts +36 -0
- package/dist/src/lib/workflow/run-summary.js +142 -0
- package/dist/src/lib/workflow/worktree-manager.d.ts +205 -0
- package/dist/src/lib/workflow/worktree-manager.js +918 -0
- package/package.json +3 -1
- package/templates/skills/fullsolve/SKILL.md +11 -1
- package/templates/skills/qa/SKILL.md +41 -1
- package/templates/skills/solve/SKILL.md +86 -0
- package/templates/skills/spec/SKILL.md +53 -0
- package/templates/skills/test/SKILL.md +10 -0
package/dist/src/commands/run.js
CHANGED
|
@@ -1,1557 +1,37 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* sequant run - Execute workflow for GitHub issues
|
|
3
3
|
*
|
|
4
|
-
*
|
|
5
|
-
*
|
|
4
|
+
* Orchestrator module that composes focused workflow modules:
|
|
5
|
+
* - worktree-manager: Worktree lifecycle (ensure, list, cleanup, changed files)
|
|
6
|
+
* - phase-executor: Phase execution with retry and failure handling
|
|
7
|
+
* - phase-mapper: Label-to-phase detection and workflow parsing
|
|
8
|
+
* - batch-executor: Batch execution, dependency sorting, issue logging
|
|
6
9
|
*/
|
|
7
10
|
import chalk from "chalk";
|
|
8
11
|
import { spawnSync } from "child_process";
|
|
9
|
-
import { existsSync, readFileSync } from "fs";
|
|
10
|
-
import path from "path";
|
|
11
|
-
import { query } from "@anthropic-ai/claude-agent-sdk";
|
|
12
12
|
import { getManifest } from "../lib/manifest.js";
|
|
13
13
|
import { getSettings } from "../lib/settings.js";
|
|
14
|
-
import {
|
|
15
|
-
import { LogWriter, createPhaseLogFromTiming, } from "../lib/workflow/log-writer.js";
|
|
14
|
+
import { LogWriter } from "../lib/workflow/log-writer.js";
|
|
16
15
|
import { StateManager } from "../lib/workflow/state-manager.js";
|
|
17
16
|
import { DEFAULT_PHASES, DEFAULT_CONFIG, } from "../lib/workflow/types.js";
|
|
18
17
|
import { ShutdownManager } from "../lib/shutdown.js";
|
|
19
|
-
import { getMcpServersConfig } from "../lib/system.js";
|
|
20
18
|
import { checkVersionCached, getVersionWarning } from "../lib/version-check.js";
|
|
21
19
|
import { MetricsWriter } from "../lib/workflow/metrics-writer.js";
|
|
22
20
|
import { determineOutcome, } from "../lib/workflow/metrics-schema.js";
|
|
23
|
-
import { getResumablePhasesForIssue } from "../lib/workflow/phase-detection.js";
|
|
24
21
|
import { ui, colors } from "../lib/cli-ui.js";
|
|
25
|
-
import {
|
|
26
|
-
import { getGitDiffStats, getCommitHash, } from "../lib/workflow/git-diff-utils.js";
|
|
22
|
+
import { getCommitHash } from "../lib/workflow/git-diff-utils.js";
|
|
27
23
|
import { getTokenUsageForRun } from "../lib/workflow/token-utils.js";
|
|
28
24
|
import { reconcileStateAtStartup } from "../lib/workflow/state-utils.js";
|
|
29
25
|
import { analyzeRun, formatReflection } from "../lib/workflow/run-reflect.js";
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
}
|
|
40
|
-
/**
|
|
41
|
-
* Parse QA verdict from phase output
|
|
42
|
-
*
|
|
43
|
-
* Looks for verdict patterns in the QA output:
|
|
44
|
-
* - "### Verdict: READY_FOR_MERGE"
|
|
45
|
-
* - "**Verdict:** AC_NOT_MET"
|
|
46
|
-
* - "Verdict: AC_MET_BUT_NOT_A_PLUS"
|
|
47
|
-
*
|
|
48
|
-
* @param output - The captured output from QA phase
|
|
49
|
-
* @returns The parsed verdict or null if not found
|
|
50
|
-
*/
|
|
51
|
-
export function parseQaVerdict(output) {
|
|
52
|
-
if (!output)
|
|
53
|
-
return null;
|
|
54
|
-
// Match various verdict formats:
|
|
55
|
-
// - "### Verdict: X" (markdown header)
|
|
56
|
-
// - "**Verdict:** X" (bold label with colon inside)
|
|
57
|
-
// - "**Verdict:** **X**" (bold label and bold value)
|
|
58
|
-
// - "Verdict: X" (plain)
|
|
59
|
-
// Case insensitive, handles optional markdown formatting
|
|
60
|
-
const verdictMatch = output.match(/(?:###?\s*)?(?:\*\*)?Verdict:?\*?\*?\s*\*?\*?\s*(READY_FOR_MERGE|AC_MET_BUT_NOT_A_PLUS|AC_NOT_MET|NEEDS_VERIFICATION)\*?\*?/i);
|
|
61
|
-
if (!verdictMatch)
|
|
62
|
-
return null;
|
|
63
|
-
// Normalize to uppercase with underscores
|
|
64
|
-
const verdict = verdictMatch[1].toUpperCase().replace(/-/g, "_");
|
|
65
|
-
return verdict;
|
|
66
|
-
}
|
|
67
|
-
/**
|
|
68
|
-
* Get the git repository root directory
|
|
69
|
-
*/
|
|
70
|
-
function getGitRoot() {
|
|
71
|
-
const result = spawnSync("git", ["rev-parse", "--show-toplevel"], {
|
|
72
|
-
stdio: "pipe",
|
|
73
|
-
});
|
|
74
|
-
if (result.status === 0) {
|
|
75
|
-
return result.stdout.toString().trim();
|
|
76
|
-
}
|
|
77
|
-
return null;
|
|
78
|
-
}
|
|
79
|
-
/**
|
|
80
|
-
* Check if a worktree exists for a given branch
|
|
81
|
-
*/
|
|
82
|
-
function findExistingWorktree(branch) {
|
|
83
|
-
const result = spawnSync("git", ["worktree", "list", "--porcelain"], {
|
|
84
|
-
stdio: "pipe",
|
|
85
|
-
});
|
|
86
|
-
if (result.status !== 0)
|
|
87
|
-
return null;
|
|
88
|
-
const output = result.stdout.toString();
|
|
89
|
-
const lines = output.split("\n");
|
|
90
|
-
let currentPath = "";
|
|
91
|
-
for (const line of lines) {
|
|
92
|
-
if (line.startsWith("worktree ")) {
|
|
93
|
-
currentPath = line.substring(9);
|
|
94
|
-
}
|
|
95
|
-
else if (line.startsWith("branch refs/heads/") && line.includes(branch)) {
|
|
96
|
-
return currentPath;
|
|
97
|
-
}
|
|
98
|
-
}
|
|
99
|
-
return null;
|
|
100
|
-
}
|
|
101
|
-
/**
|
|
102
|
-
* Check if a worktree is stale (behind origin/main) and should be recreated
|
|
103
|
-
*
|
|
104
|
-
* @param worktreePath - Path to the worktree
|
|
105
|
-
* @param verbose - Enable verbose output
|
|
106
|
-
* @returns Freshness check result
|
|
107
|
-
*/
|
|
108
|
-
export function checkWorktreeFreshness(worktreePath, verbose) {
|
|
109
|
-
const result = {
|
|
110
|
-
isStale: false,
|
|
111
|
-
commitsBehind: 0,
|
|
112
|
-
hasUncommittedChanges: false,
|
|
113
|
-
hasUnpushedCommits: false,
|
|
114
|
-
};
|
|
115
|
-
// Fetch latest main to ensure accurate comparison
|
|
116
|
-
spawnSync("git", ["-C", worktreePath, "fetch", "origin", "main"], {
|
|
117
|
-
stdio: "pipe",
|
|
118
|
-
timeout: 30000,
|
|
119
|
-
});
|
|
120
|
-
// Check for uncommitted changes
|
|
121
|
-
const statusResult = spawnSync("git", ["-C", worktreePath, "status", "--porcelain"], { stdio: "pipe" });
|
|
122
|
-
if (statusResult.status === 0) {
|
|
123
|
-
result.hasUncommittedChanges =
|
|
124
|
-
statusResult.stdout.toString().trim().length > 0;
|
|
125
|
-
}
|
|
126
|
-
// Get merge base with origin/main
|
|
127
|
-
const mergeBaseResult = spawnSync("git", ["-C", worktreePath, "merge-base", "HEAD", "origin/main"], { stdio: "pipe" });
|
|
128
|
-
if (mergeBaseResult.status !== 0) {
|
|
129
|
-
// Can't determine merge base - not stale
|
|
130
|
-
return result;
|
|
131
|
-
}
|
|
132
|
-
const mergeBase = mergeBaseResult.stdout.toString().trim();
|
|
133
|
-
// Get origin/main HEAD
|
|
134
|
-
const mainHeadResult = spawnSync("git", ["-C", worktreePath, "rev-parse", "origin/main"], { stdio: "pipe" });
|
|
135
|
-
if (mainHeadResult.status !== 0) {
|
|
136
|
-
return result;
|
|
137
|
-
}
|
|
138
|
-
const mainHead = mainHeadResult.stdout.toString().trim();
|
|
139
|
-
// Count commits behind main
|
|
140
|
-
if (mergeBase !== mainHead) {
|
|
141
|
-
const countResult = spawnSync("git", ["-C", worktreePath, "rev-list", "--count", `${mergeBase}..${mainHead}`], { stdio: "pipe" });
|
|
142
|
-
if (countResult.status === 0) {
|
|
143
|
-
result.commitsBehind = parseInt(countResult.stdout.toString().trim(), 10);
|
|
144
|
-
// Consider stale if more than 5 commits behind (configurable threshold)
|
|
145
|
-
result.isStale = result.commitsBehind > 5;
|
|
146
|
-
}
|
|
147
|
-
}
|
|
148
|
-
// Check for unpushed commits (work in progress)
|
|
149
|
-
const unpushedResult = spawnSync("git", ["-C", worktreePath, "log", "--oneline", "@{u}..HEAD"], { stdio: "pipe" });
|
|
150
|
-
if (unpushedResult.status === 0) {
|
|
151
|
-
result.hasUnpushedCommits =
|
|
152
|
-
unpushedResult.stdout.toString().trim().length > 0;
|
|
153
|
-
}
|
|
154
|
-
if (verbose && result.isStale) {
|
|
155
|
-
console.log(chalk.gray(` 📊 Worktree is ${result.commitsBehind} commits behind origin/main`));
|
|
156
|
-
}
|
|
157
|
-
return result;
|
|
158
|
-
}
|
|
159
|
-
/**
|
|
160
|
-
* Remove and recreate a stale worktree
|
|
161
|
-
*
|
|
162
|
-
* @param existingPath - Path to existing worktree
|
|
163
|
-
* @param branch - Branch name
|
|
164
|
-
* @param verbose - Enable verbose output
|
|
165
|
-
* @returns true if worktree was removed
|
|
166
|
-
*/
|
|
167
|
-
export function removeStaleWorktree(existingPath, branch, verbose) {
|
|
168
|
-
if (verbose) {
|
|
169
|
-
console.log(chalk.gray(` 🗑️ Removing stale worktree...`));
|
|
170
|
-
}
|
|
171
|
-
// Remove the worktree
|
|
172
|
-
const removeResult = spawnSync("git", ["worktree", "remove", "--force", existingPath], { stdio: "pipe" });
|
|
173
|
-
if (removeResult.status !== 0) {
|
|
174
|
-
const error = removeResult.stderr.toString();
|
|
175
|
-
console.log(chalk.yellow(` ⚠️ Could not remove worktree: ${error}`));
|
|
176
|
-
return false;
|
|
177
|
-
}
|
|
178
|
-
// Delete the branch so it can be recreated fresh
|
|
179
|
-
const deleteResult = spawnSync("git", ["branch", "-D", branch], {
|
|
180
|
-
stdio: "pipe",
|
|
181
|
-
});
|
|
182
|
-
if (deleteResult.status !== 0 && verbose) {
|
|
183
|
-
console.log(chalk.gray(` ℹ️ Branch ${branch} not deleted (may not exist locally)`));
|
|
184
|
-
}
|
|
185
|
-
return true;
|
|
186
|
-
}
|
|
187
|
-
/**
|
|
188
|
-
* List all active worktrees with their branches
|
|
189
|
-
*/
|
|
190
|
-
export function listWorktrees() {
|
|
191
|
-
const result = spawnSync("git", ["worktree", "list", "--porcelain"], {
|
|
192
|
-
stdio: "pipe",
|
|
193
|
-
});
|
|
194
|
-
if (result.status !== 0)
|
|
195
|
-
return [];
|
|
196
|
-
const output = result.stdout.toString();
|
|
197
|
-
const lines = output.split("\n");
|
|
198
|
-
const worktrees = [];
|
|
199
|
-
let currentPath = "";
|
|
200
|
-
let currentBranch = "";
|
|
201
|
-
for (const line of lines) {
|
|
202
|
-
if (line.startsWith("worktree ")) {
|
|
203
|
-
currentPath = line.substring(9);
|
|
204
|
-
}
|
|
205
|
-
else if (line.startsWith("branch refs/heads/")) {
|
|
206
|
-
currentBranch = line.substring(18);
|
|
207
|
-
// Extract issue number from branch name (e.g., feature/123-some-title)
|
|
208
|
-
const issueMatch = currentBranch.match(/feature\/(\d+)-/);
|
|
209
|
-
const issue = issueMatch ? parseInt(issueMatch[1], 10) : null;
|
|
210
|
-
worktrees.push({ path: currentPath, branch: currentBranch, issue });
|
|
211
|
-
currentPath = "";
|
|
212
|
-
currentBranch = "";
|
|
213
|
-
}
|
|
214
|
-
}
|
|
215
|
-
return worktrees;
|
|
216
|
-
}
|
|
217
|
-
/**
|
|
218
|
-
* Get changed files in a worktree compared to main
|
|
219
|
-
*/
|
|
220
|
-
export function getWorktreeChangedFiles(worktreePath) {
|
|
221
|
-
const result = spawnSync("git", ["-C", worktreePath, "diff", "--name-only", "main...HEAD"], { stdio: "pipe" });
|
|
222
|
-
if (result.status !== 0)
|
|
223
|
-
return [];
|
|
224
|
-
return result.stdout
|
|
225
|
-
.toString()
|
|
226
|
-
.trim()
|
|
227
|
-
.split("\n")
|
|
228
|
-
.filter((f) => f.length > 0);
|
|
229
|
-
}
|
|
230
|
-
/**
|
|
231
|
-
* Get diff stats for a worktree (files changed, lines added)
|
|
232
|
-
* Returns aggregate metrics only - no file paths to preserve privacy
|
|
233
|
-
*/
|
|
234
|
-
export function getWorktreeDiffStats(worktreePath) {
|
|
235
|
-
const result = spawnSync("git", ["-C", worktreePath, "diff", "--stat", "main...HEAD"], { stdio: "pipe" });
|
|
236
|
-
if (result.status !== 0) {
|
|
237
|
-
return { filesChanged: 0, linesAdded: 0 };
|
|
238
|
-
}
|
|
239
|
-
const output = result.stdout.toString();
|
|
240
|
-
const lines = output.trim().split("\n");
|
|
241
|
-
// Summary line is last and looks like: " 5 files changed, 100 insertions(+), 20 deletions(-)"
|
|
242
|
-
const summaryLine = lines[lines.length - 1];
|
|
243
|
-
if (!summaryLine) {
|
|
244
|
-
return { filesChanged: 0, linesAdded: 0 };
|
|
245
|
-
}
|
|
246
|
-
const filesMatch = summaryLine.match(/(\d+)\s+files?\s+changed/);
|
|
247
|
-
const insertionsMatch = summaryLine.match(/(\d+)\s+insertions?\(\+\)/);
|
|
248
|
-
return {
|
|
249
|
-
filesChanged: filesMatch ? parseInt(filesMatch[1], 10) : 0,
|
|
250
|
-
linesAdded: insertionsMatch ? parseInt(insertionsMatch[1], 10) : 0,
|
|
251
|
-
};
|
|
252
|
-
}
|
|
253
|
-
/**
|
|
254
|
-
* Read cache metrics from QA phase (AC-7)
|
|
255
|
-
*
|
|
256
|
-
* @param worktreePath - Path to the worktree
|
|
257
|
-
* @returns CacheMetrics or undefined if not available
|
|
258
|
-
*/
|
|
259
|
-
function readCacheMetrics(worktreePath) {
|
|
260
|
-
const cacheMetricsPath = worktreePath
|
|
261
|
-
? path.join(worktreePath, ".sequant/.cache/qa/cache-metrics.json")
|
|
262
|
-
: ".sequant/.cache/qa/cache-metrics.json";
|
|
263
|
-
if (!existsSync(cacheMetricsPath)) {
|
|
264
|
-
return undefined;
|
|
265
|
-
}
|
|
266
|
-
try {
|
|
267
|
-
const content = readFileSync(cacheMetricsPath, "utf-8");
|
|
268
|
-
const data = JSON.parse(content);
|
|
269
|
-
if (typeof data.hits === "number" &&
|
|
270
|
-
typeof data.misses === "number" &&
|
|
271
|
-
typeof data.skipped === "number") {
|
|
272
|
-
return {
|
|
273
|
-
hits: data.hits,
|
|
274
|
-
misses: data.misses,
|
|
275
|
-
skipped: data.skipped,
|
|
276
|
-
};
|
|
277
|
-
}
|
|
278
|
-
}
|
|
279
|
-
catch {
|
|
280
|
-
// Ignore parse errors
|
|
281
|
-
}
|
|
282
|
-
return undefined;
|
|
283
|
-
}
|
|
284
|
-
/**
|
|
285
|
-
* Filter phases based on resume status.
|
|
286
|
-
*
|
|
287
|
-
* When `resume` is true, calls `getResumablePhasesForIssue` to determine
|
|
288
|
-
* which phases have already completed (via GitHub issue comment markers)
|
|
289
|
-
* and removes them from the execution list.
|
|
290
|
-
*
|
|
291
|
-
* @param issueNumber - GitHub issue number
|
|
292
|
-
* @param phases - The phases to potentially filter
|
|
293
|
-
* @param resume - Whether the --resume flag is set
|
|
294
|
-
* @returns Object with filtered phases and any skipped phases
|
|
295
|
-
*/
|
|
296
|
-
export function filterResumedPhases(issueNumber, phases, resume) {
|
|
297
|
-
if (!resume) {
|
|
298
|
-
return { phases: [...phases], skipped: [] };
|
|
299
|
-
}
|
|
300
|
-
const resumable = getResumablePhasesForIssue(issueNumber, phases);
|
|
301
|
-
const skipped = phases.filter((p) => !resumable.includes(p));
|
|
302
|
-
return { phases: resumable, skipped };
|
|
303
|
-
}
|
|
304
|
-
/**
|
|
305
|
-
* Create or reuse a worktree for an issue
|
|
306
|
-
* @param baseBranch - Optional branch to use as base instead of origin/main (for chain mode)
|
|
307
|
-
* @param chainMode - If true and branch exists, rebase onto baseBranch instead of using as-is
|
|
308
|
-
*/
|
|
309
|
-
async function ensureWorktree(issueNumber, title, verbose, packageManager, baseBranch, chainMode) {
|
|
310
|
-
const gitRoot = getGitRoot();
|
|
311
|
-
if (!gitRoot) {
|
|
312
|
-
console.log(chalk.red(" ❌ Not in a git repository"));
|
|
313
|
-
return null;
|
|
314
|
-
}
|
|
315
|
-
const slug = slugify(title);
|
|
316
|
-
const branch = `feature/${issueNumber}-${slug}`;
|
|
317
|
-
const worktreesDir = path.join(path.dirname(gitRoot), "worktrees");
|
|
318
|
-
const worktreePath = path.join(worktreesDir, branch);
|
|
319
|
-
// Check if worktree already exists
|
|
320
|
-
let existingPath = findExistingWorktree(branch);
|
|
321
|
-
if (existingPath) {
|
|
322
|
-
// AC-3: Check if worktree is stale and needs recreation
|
|
323
|
-
const freshness = checkWorktreeFreshness(existingPath, verbose);
|
|
324
|
-
if (freshness.isStale) {
|
|
325
|
-
// AC-3: Handle stale worktrees - check for work in progress
|
|
326
|
-
if (freshness.hasUncommittedChanges) {
|
|
327
|
-
console.log(chalk.yellow(` ⚠️ Worktree is ${freshness.commitsBehind} commits behind main but has uncommitted changes`));
|
|
328
|
-
console.log(chalk.yellow(` ℹ️ Keeping existing worktree. Commit or stash changes, then re-run.`));
|
|
329
|
-
// Continue with existing worktree
|
|
330
|
-
}
|
|
331
|
-
else if (freshness.hasUnpushedCommits) {
|
|
332
|
-
console.log(chalk.yellow(` ⚠️ Worktree is ${freshness.commitsBehind} commits behind main but has unpushed commits`));
|
|
333
|
-
console.log(chalk.yellow(` ℹ️ Keeping existing worktree with WIP commits.`));
|
|
334
|
-
// Continue with existing worktree
|
|
335
|
-
}
|
|
336
|
-
else {
|
|
337
|
-
// Safe to recreate - no uncommitted/unpushed work
|
|
338
|
-
console.log(chalk.yellow(` ⚠️ Worktree is ${freshness.commitsBehind} commits behind main — recreating fresh`));
|
|
339
|
-
if (removeStaleWorktree(existingPath, branch, verbose)) {
|
|
340
|
-
existingPath = null; // Will fall through to create new worktree
|
|
341
|
-
}
|
|
342
|
-
}
|
|
343
|
-
}
|
|
344
|
-
}
|
|
345
|
-
if (existingPath) {
|
|
346
|
-
if (verbose) {
|
|
347
|
-
console.log(chalk.gray(` 📂 Reusing existing worktree: ${existingPath}`));
|
|
348
|
-
}
|
|
349
|
-
// In chain mode, rebase existing worktree onto previous chain link
|
|
350
|
-
if (chainMode && baseBranch) {
|
|
351
|
-
if (verbose) {
|
|
352
|
-
console.log(chalk.gray(` 🔄 Rebasing existing worktree onto chain base (${baseBranch})...`));
|
|
353
|
-
}
|
|
354
|
-
const rebaseResult = spawnSync("git", ["-C", existingPath, "rebase", baseBranch], { stdio: "pipe" });
|
|
355
|
-
if (rebaseResult.status !== 0) {
|
|
356
|
-
const rebaseError = rebaseResult.stderr.toString();
|
|
357
|
-
// Check if it's a conflict
|
|
358
|
-
if (rebaseError.includes("CONFLICT") ||
|
|
359
|
-
rebaseError.includes("could not apply")) {
|
|
360
|
-
console.log(chalk.yellow(` ⚠️ Rebase conflict detected. Aborting rebase and keeping original branch state.`));
|
|
361
|
-
console.log(chalk.yellow(` ℹ️ Branch ${branch} is not properly chained. Manual rebase may be required.`));
|
|
362
|
-
// Abort the rebase to restore branch state
|
|
363
|
-
spawnSync("git", ["-C", existingPath, "rebase", "--abort"], {
|
|
364
|
-
stdio: "pipe",
|
|
365
|
-
});
|
|
366
|
-
}
|
|
367
|
-
else {
|
|
368
|
-
console.log(chalk.yellow(` ⚠️ Rebase failed: ${rebaseError.trim()}`));
|
|
369
|
-
console.log(chalk.yellow(` ℹ️ Continuing with branch in its original state.`));
|
|
370
|
-
}
|
|
371
|
-
return {
|
|
372
|
-
issue: issueNumber,
|
|
373
|
-
path: existingPath,
|
|
374
|
-
branch,
|
|
375
|
-
existed: true,
|
|
376
|
-
rebased: false,
|
|
377
|
-
};
|
|
378
|
-
}
|
|
379
|
-
if (verbose) {
|
|
380
|
-
console.log(chalk.green(` ✅ Existing worktree rebased onto ${baseBranch}`));
|
|
381
|
-
}
|
|
382
|
-
return {
|
|
383
|
-
issue: issueNumber,
|
|
384
|
-
path: existingPath,
|
|
385
|
-
branch,
|
|
386
|
-
existed: true,
|
|
387
|
-
rebased: true,
|
|
388
|
-
};
|
|
389
|
-
}
|
|
390
|
-
return {
|
|
391
|
-
issue: issueNumber,
|
|
392
|
-
path: existingPath,
|
|
393
|
-
branch,
|
|
394
|
-
existed: true,
|
|
395
|
-
rebased: false,
|
|
396
|
-
};
|
|
397
|
-
}
|
|
398
|
-
// Check if branch exists (but no worktree)
|
|
399
|
-
const branchCheck = spawnSync("git", ["show-ref", "--verify", "--quiet", `refs/heads/${branch}`], { stdio: "pipe" });
|
|
400
|
-
const branchExists = branchCheck.status === 0;
|
|
401
|
-
if (verbose) {
|
|
402
|
-
console.log(chalk.gray(` 🌿 Creating worktree for #${issueNumber}...`));
|
|
403
|
-
}
|
|
404
|
-
// Determine the base for the new branch
|
|
405
|
-
// For custom base branches, use origin/<branch> if it's a remote-style reference
|
|
406
|
-
// For local branches (chain mode), use as-is
|
|
407
|
-
const isLocalBranch = baseBranch && !baseBranch.startsWith("origin/") && baseBranch !== "main";
|
|
408
|
-
const baseRef = baseBranch
|
|
409
|
-
? isLocalBranch
|
|
410
|
-
? baseBranch
|
|
411
|
-
: baseBranch.startsWith("origin/")
|
|
412
|
-
? baseBranch
|
|
413
|
-
: `origin/${baseBranch}`
|
|
414
|
-
: "origin/main";
|
|
415
|
-
// Fetch the base branch to ensure worktree starts from fresh baseline
|
|
416
|
-
const branchToFetch = baseBranch
|
|
417
|
-
? baseBranch.replace(/^origin\//, "")
|
|
418
|
-
: "main";
|
|
419
|
-
if (!isLocalBranch) {
|
|
420
|
-
if (verbose) {
|
|
421
|
-
console.log(chalk.gray(` 🔄 Fetching latest ${branchToFetch}...`));
|
|
422
|
-
}
|
|
423
|
-
const fetchResult = spawnSync("git", ["fetch", "origin", branchToFetch], {
|
|
424
|
-
stdio: "pipe",
|
|
425
|
-
});
|
|
426
|
-
if (fetchResult.status !== 0 && verbose) {
|
|
427
|
-
console.log(chalk.yellow(` ⚠️ Could not fetch origin/${branchToFetch}, using local state`));
|
|
428
|
-
}
|
|
429
|
-
}
|
|
430
|
-
else if (verbose) {
|
|
431
|
-
console.log(chalk.gray(` 🔗 Chaining from branch: ${baseBranch}`));
|
|
432
|
-
}
|
|
433
|
-
// Ensure worktrees directory exists
|
|
434
|
-
if (!existsSync(worktreesDir)) {
|
|
435
|
-
spawnSync("mkdir", ["-p", worktreesDir], { stdio: "pipe" });
|
|
436
|
-
}
|
|
437
|
-
// Create the worktree
|
|
438
|
-
let createResult;
|
|
439
|
-
let needsRebase = false;
|
|
440
|
-
if (branchExists) {
|
|
441
|
-
// Use existing branch
|
|
442
|
-
createResult = spawnSync("git", ["worktree", "add", worktreePath, branch], {
|
|
443
|
-
stdio: "pipe",
|
|
444
|
-
});
|
|
445
|
-
// In chain mode with existing branch, mark for rebase onto previous chain link
|
|
446
|
-
if (chainMode && baseBranch) {
|
|
447
|
-
needsRebase = true;
|
|
448
|
-
}
|
|
449
|
-
}
|
|
450
|
-
else {
|
|
451
|
-
// Create new branch from base reference (origin/main or previous branch in chain)
|
|
452
|
-
createResult = spawnSync("git", ["worktree", "add", worktreePath, "-b", branch, baseRef], { stdio: "pipe" });
|
|
453
|
-
}
|
|
454
|
-
if (createResult.status !== 0) {
|
|
455
|
-
const error = createResult.stderr.toString();
|
|
456
|
-
console.log(chalk.red(` ❌ Failed to create worktree: ${error}`));
|
|
457
|
-
return null;
|
|
458
|
-
}
|
|
459
|
-
// Rebase existing branch onto chain base if needed
|
|
460
|
-
let rebased = false;
|
|
461
|
-
if (needsRebase) {
|
|
462
|
-
if (verbose) {
|
|
463
|
-
console.log(chalk.gray(` 🔄 Rebasing existing branch onto previous chain link (${baseRef})...`));
|
|
464
|
-
}
|
|
465
|
-
const rebaseResult = spawnSync("git", ["-C", worktreePath, "rebase", baseRef], {
|
|
466
|
-
stdio: "pipe",
|
|
467
|
-
});
|
|
468
|
-
if (rebaseResult.status !== 0) {
|
|
469
|
-
const rebaseError = rebaseResult.stderr.toString();
|
|
470
|
-
// Check if it's a conflict
|
|
471
|
-
if (rebaseError.includes("CONFLICT") ||
|
|
472
|
-
rebaseError.includes("could not apply")) {
|
|
473
|
-
console.log(chalk.yellow(` ⚠️ Rebase conflict detected. Aborting rebase and keeping original branch state.`));
|
|
474
|
-
console.log(chalk.yellow(` ℹ️ Branch ${branch} is not properly chained. Manual rebase may be required.`));
|
|
475
|
-
// Abort the rebase to restore branch state
|
|
476
|
-
spawnSync("git", ["-C", worktreePath, "rebase", "--abort"], {
|
|
477
|
-
stdio: "pipe",
|
|
478
|
-
});
|
|
479
|
-
}
|
|
480
|
-
else {
|
|
481
|
-
console.log(chalk.yellow(` ⚠️ Rebase failed: ${rebaseError.trim()}`));
|
|
482
|
-
console.log(chalk.yellow(` ℹ️ Continuing with branch in its original state.`));
|
|
483
|
-
}
|
|
484
|
-
}
|
|
485
|
-
else {
|
|
486
|
-
rebased = true;
|
|
487
|
-
if (verbose) {
|
|
488
|
-
console.log(chalk.green(` ✅ Branch rebased onto ${baseRef}`));
|
|
489
|
-
}
|
|
490
|
-
}
|
|
491
|
-
}
|
|
492
|
-
// Copy .env.local if it exists
|
|
493
|
-
const envLocalSrc = path.join(gitRoot, ".env.local");
|
|
494
|
-
const envLocalDst = path.join(worktreePath, ".env.local");
|
|
495
|
-
if (existsSync(envLocalSrc) && !existsSync(envLocalDst)) {
|
|
496
|
-
spawnSync("cp", [envLocalSrc, envLocalDst], { stdio: "pipe" });
|
|
497
|
-
}
|
|
498
|
-
// Copy .claude/settings.local.json if it exists
|
|
499
|
-
const claudeSettingsSrc = path.join(gitRoot, ".claude", "settings.local.json");
|
|
500
|
-
const claudeSettingsDst = path.join(worktreePath, ".claude", "settings.local.json");
|
|
501
|
-
if (existsSync(claudeSettingsSrc) && !existsSync(claudeSettingsDst)) {
|
|
502
|
-
spawnSync("mkdir", ["-p", path.join(worktreePath, ".claude")], {
|
|
503
|
-
stdio: "pipe",
|
|
504
|
-
});
|
|
505
|
-
spawnSync("cp", [claudeSettingsSrc, claudeSettingsDst], { stdio: "pipe" });
|
|
506
|
-
}
|
|
507
|
-
// Install dependencies if needed
|
|
508
|
-
const nodeModulesPath = path.join(worktreePath, "node_modules");
|
|
509
|
-
if (!existsSync(nodeModulesPath)) {
|
|
510
|
-
if (verbose) {
|
|
511
|
-
console.log(chalk.gray(` 📦 Installing dependencies...`));
|
|
512
|
-
}
|
|
513
|
-
// Use detected package manager or default to npm
|
|
514
|
-
const pm = packageManager || "npm";
|
|
515
|
-
const pmConfig = PM_CONFIG[pm];
|
|
516
|
-
const [cmd, ...args] = pmConfig.installSilent.split(" ");
|
|
517
|
-
spawnSync(cmd, args, {
|
|
518
|
-
cwd: worktreePath,
|
|
519
|
-
stdio: "pipe",
|
|
520
|
-
});
|
|
521
|
-
}
|
|
522
|
-
if (verbose) {
|
|
523
|
-
console.log(chalk.green(` ✅ Worktree ready: ${worktreePath}`));
|
|
524
|
-
}
|
|
525
|
-
return {
|
|
526
|
-
issue: issueNumber,
|
|
527
|
-
path: worktreePath,
|
|
528
|
-
branch,
|
|
529
|
-
existed: false,
|
|
530
|
-
rebased,
|
|
531
|
-
};
|
|
532
|
-
}
|
|
533
|
-
/**
|
|
534
|
-
* Ensure worktrees exist for all issues before execution
|
|
535
|
-
* @param baseBranch - Optional base branch for worktree creation (default: main)
|
|
536
|
-
*/
|
|
537
|
-
async function ensureWorktrees(issues, verbose, packageManager, baseBranch) {
|
|
538
|
-
const worktrees = new Map();
|
|
539
|
-
const baseDisplay = baseBranch || "main";
|
|
540
|
-
console.log(chalk.blue(`\n 📂 Preparing worktrees from ${baseDisplay}...`));
|
|
541
|
-
for (const issue of issues) {
|
|
542
|
-
const worktree = await ensureWorktree(issue.number, issue.title, verbose, packageManager, baseBranch, false);
|
|
543
|
-
if (worktree) {
|
|
544
|
-
worktrees.set(issue.number, worktree);
|
|
545
|
-
}
|
|
546
|
-
}
|
|
547
|
-
const created = Array.from(worktrees.values()).filter((w) => !w.existed).length;
|
|
548
|
-
const reused = Array.from(worktrees.values()).filter((w) => w.existed).length;
|
|
549
|
-
if (created > 0 || reused > 0) {
|
|
550
|
-
console.log(chalk.gray(` Worktrees: ${created} created, ${reused} reused`));
|
|
551
|
-
}
|
|
552
|
-
return worktrees;
|
|
553
|
-
}
|
|
554
|
-
/**
|
|
555
|
-
* Ensure worktrees exist for all issues in chain mode
|
|
556
|
-
* Each issue branches from the previous issue's branch
|
|
557
|
-
* @param baseBranch - Optional starting base branch for the chain (default: main)
|
|
558
|
-
*/
|
|
559
|
-
async function ensureWorktreesChain(issues, verbose, packageManager, baseBranch) {
|
|
560
|
-
const worktrees = new Map();
|
|
561
|
-
const baseDisplay = baseBranch || "main";
|
|
562
|
-
console.log(chalk.blue(`\n 🔗 Preparing chained worktrees from ${baseDisplay}...`));
|
|
563
|
-
// First issue starts from the specified base branch (or main)
|
|
564
|
-
let previousBranch = baseBranch;
|
|
565
|
-
for (const issue of issues) {
|
|
566
|
-
const worktree = await ensureWorktree(issue.number, issue.title, verbose, packageManager, previousBranch, // Chain from previous branch (or base branch for first issue)
|
|
567
|
-
true);
|
|
568
|
-
if (worktree) {
|
|
569
|
-
worktrees.set(issue.number, worktree);
|
|
570
|
-
previousBranch = worktree.branch; // Next issue will branch from this
|
|
571
|
-
}
|
|
572
|
-
else {
|
|
573
|
-
// If worktree creation fails, stop the chain
|
|
574
|
-
console.log(chalk.red(` ❌ Chain broken: could not create worktree for #${issue.number}`));
|
|
575
|
-
break;
|
|
576
|
-
}
|
|
577
|
-
}
|
|
578
|
-
const created = Array.from(worktrees.values()).filter((w) => !w.existed).length;
|
|
579
|
-
const reused = Array.from(worktrees.values()).filter((w) => w.existed).length;
|
|
580
|
-
const rebased = Array.from(worktrees.values()).filter((w) => w.rebased).length;
|
|
581
|
-
if (created > 0 || reused > 0) {
|
|
582
|
-
let msg = ` Chained worktrees: ${created} created, ${reused} reused`;
|
|
583
|
-
if (rebased > 0) {
|
|
584
|
-
msg += `, ${rebased} rebased`;
|
|
585
|
-
}
|
|
586
|
-
console.log(chalk.gray(msg));
|
|
587
|
-
}
|
|
588
|
-
// Show chain structure
|
|
589
|
-
if (worktrees.size > 0) {
|
|
590
|
-
const chainOrder = issues
|
|
591
|
-
.filter((i) => worktrees.has(i.number))
|
|
592
|
-
.map((i) => `#${i.number}`)
|
|
593
|
-
.join(" → ");
|
|
594
|
-
console.log(chalk.gray(` Chain: ${baseDisplay} → ${chainOrder}`));
|
|
595
|
-
}
|
|
596
|
-
return worktrees;
|
|
597
|
-
}
|
|
598
|
-
/**
|
|
599
|
-
* Create a checkpoint commit in the worktree after QA passes
|
|
600
|
-
* This allows recovery in case later issues in the chain fail
|
|
601
|
-
* @internal Exported for testing
|
|
602
|
-
*/
|
|
603
|
-
export function createCheckpointCommit(worktreePath, issueNumber, verbose) {
|
|
604
|
-
// Check if there are uncommitted changes
|
|
605
|
-
const statusResult = spawnSync("git", ["-C", worktreePath, "status", "--porcelain"], { stdio: "pipe" });
|
|
606
|
-
if (statusResult.status !== 0) {
|
|
607
|
-
if (verbose) {
|
|
608
|
-
console.log(chalk.yellow(` ⚠️ Could not check git status for checkpoint`));
|
|
609
|
-
}
|
|
610
|
-
return false;
|
|
611
|
-
}
|
|
612
|
-
const hasChanges = statusResult.stdout.toString().trim().length > 0;
|
|
613
|
-
if (!hasChanges) {
|
|
614
|
-
if (verbose) {
|
|
615
|
-
console.log(chalk.gray(` 📌 No changes to checkpoint (already committed)`));
|
|
616
|
-
}
|
|
617
|
-
return true;
|
|
618
|
-
}
|
|
619
|
-
// Stage all changes
|
|
620
|
-
const addResult = spawnSync("git", ["-C", worktreePath, "add", "-A"], {
|
|
621
|
-
stdio: "pipe",
|
|
622
|
-
});
|
|
623
|
-
if (addResult.status !== 0) {
|
|
624
|
-
if (verbose) {
|
|
625
|
-
console.log(chalk.yellow(` ⚠️ Could not stage changes for checkpoint`));
|
|
626
|
-
}
|
|
627
|
-
return false;
|
|
628
|
-
}
|
|
629
|
-
// Create checkpoint commit
|
|
630
|
-
const commitMessage = `checkpoint(#${issueNumber}): QA passed
|
|
631
|
-
|
|
632
|
-
This is an automatic checkpoint commit created after issue #${issueNumber}
|
|
633
|
-
passed QA in chain mode. It serves as a recovery point if later issues fail.`;
|
|
634
|
-
const commitResult = spawnSync("git", ["-C", worktreePath, "commit", "-m", commitMessage], { stdio: "pipe" });
|
|
635
|
-
if (commitResult.status !== 0) {
|
|
636
|
-
const error = commitResult.stderr.toString();
|
|
637
|
-
if (verbose) {
|
|
638
|
-
console.log(chalk.yellow(` ⚠️ Could not create checkpoint commit: ${error}`));
|
|
639
|
-
}
|
|
640
|
-
return false;
|
|
641
|
-
}
|
|
642
|
-
console.log(chalk.green(` 📌 Checkpoint commit created for #${issueNumber}`));
|
|
643
|
-
return true;
|
|
644
|
-
}
|
|
645
|
-
/**
|
|
646
|
-
* Lockfile names for different package managers
|
|
647
|
-
*/
|
|
648
|
-
const LOCKFILES = [
|
|
649
|
-
"package-lock.json",
|
|
650
|
-
"pnpm-lock.yaml",
|
|
651
|
-
"bun.lock",
|
|
652
|
-
"yarn.lock",
|
|
653
|
-
];
|
|
654
|
-
/**
|
|
655
|
-
* Check if any lockfile changed during a rebase and re-run install if needed.
|
|
656
|
-
* This prevents dependency drift when the lockfile was updated on main.
|
|
657
|
-
* @param worktreePath Path to the worktree
|
|
658
|
-
* @param packageManager Package manager to use for install
|
|
659
|
-
* @param verbose Whether to show verbose output
|
|
660
|
-
* @param preRebaseRef Git ref pointing to pre-rebase HEAD (defaults to ORIG_HEAD,
|
|
661
|
-
* which git sets automatically after rebase). Using ORIG_HEAD captures all
|
|
662
|
-
* lockfile changes across multi-commit rebases, unlike HEAD~1 which only
|
|
663
|
-
* checks the last commit.
|
|
664
|
-
* @returns true if reinstall was performed, false otherwise
|
|
665
|
-
* @internal Exported for testing
|
|
666
|
-
*/
|
|
667
|
-
export function reinstallIfLockfileChanged(worktreePath, packageManager, verbose, preRebaseRef = "ORIG_HEAD") {
|
|
668
|
-
// Compare pre-rebase state to current HEAD to detect all lockfile changes
|
|
669
|
-
// introduced by the rebase (including changes from main that were pulled in)
|
|
670
|
-
let lockfileChanged = false;
|
|
671
|
-
for (const lockfile of LOCKFILES) {
|
|
672
|
-
const result = spawnSync("git", [
|
|
673
|
-
"-C",
|
|
674
|
-
worktreePath,
|
|
675
|
-
"diff",
|
|
676
|
-
"--name-only",
|
|
677
|
-
`${preRebaseRef}..HEAD`,
|
|
678
|
-
"--",
|
|
679
|
-
lockfile,
|
|
680
|
-
], { stdio: "pipe" });
|
|
681
|
-
if (result.status === 0 && result.stdout.toString().trim().length > 0) {
|
|
682
|
-
lockfileChanged = true;
|
|
683
|
-
if (verbose) {
|
|
684
|
-
console.log(chalk.gray(` 📦 Lockfile changed: ${lockfile}`));
|
|
685
|
-
}
|
|
686
|
-
break;
|
|
687
|
-
}
|
|
688
|
-
}
|
|
689
|
-
if (!lockfileChanged) {
|
|
690
|
-
if (verbose) {
|
|
691
|
-
console.log(chalk.gray(` 📦 No lockfile changes detected`));
|
|
692
|
-
}
|
|
693
|
-
return false;
|
|
694
|
-
}
|
|
695
|
-
// Re-run install to sync node_modules with updated lockfile
|
|
696
|
-
console.log(chalk.blue(` 📦 Reinstalling dependencies (lockfile changed)...`));
|
|
697
|
-
const pm = packageManager || "npm";
|
|
698
|
-
const pmConfig = PM_CONFIG[pm];
|
|
699
|
-
const [cmd, ...args] = pmConfig.installSilent.split(" ");
|
|
700
|
-
const installResult = spawnSync(cmd, args, {
|
|
701
|
-
cwd: worktreePath,
|
|
702
|
-
stdio: "pipe",
|
|
703
|
-
});
|
|
704
|
-
if (installResult.status !== 0) {
|
|
705
|
-
const error = installResult.stderr.toString();
|
|
706
|
-
console.log(chalk.yellow(` ⚠️ Dependency reinstall failed: ${error.trim()}`));
|
|
707
|
-
return false;
|
|
708
|
-
}
|
|
709
|
-
console.log(chalk.green(` ✅ Dependencies reinstalled`));
|
|
710
|
-
return true;
|
|
711
|
-
}
|
|
712
|
-
/**
|
|
713
|
-
* Rebase the worktree branch onto origin/main before PR creation.
|
|
714
|
-
* This ensures the branch is up-to-date and prevents lockfile drift.
|
|
715
|
-
*
|
|
716
|
-
* @param worktreePath Path to the worktree
|
|
717
|
-
* @param issueNumber Issue number (for logging)
|
|
718
|
-
* @param packageManager Package manager to use if reinstall needed
|
|
719
|
-
* @param verbose Whether to show verbose output
|
|
720
|
-
* @returns RebaseResult indicating success/failure and whether reinstall was performed
|
|
721
|
-
* @internal Exported for testing
|
|
722
|
-
*/
|
|
723
|
-
export function rebaseBeforePR(worktreePath, issueNumber, packageManager, verbose) {
|
|
724
|
-
if (verbose) {
|
|
725
|
-
console.log(chalk.gray(` 🔄 Rebasing #${issueNumber} onto origin/main before PR...`));
|
|
726
|
-
}
|
|
727
|
-
// Fetch latest main to ensure we're rebasing onto fresh state
|
|
728
|
-
const fetchResult = spawnSync("git", ["-C", worktreePath, "fetch", "origin", "main"], {
|
|
729
|
-
stdio: "pipe",
|
|
730
|
-
});
|
|
731
|
-
if (fetchResult.status !== 0) {
|
|
732
|
-
const error = fetchResult.stderr.toString();
|
|
733
|
-
console.log(chalk.yellow(` ⚠️ Could not fetch origin/main: ${error.trim()}`));
|
|
734
|
-
// Continue anyway - might work with local state
|
|
735
|
-
}
|
|
736
|
-
// Perform the rebase
|
|
737
|
-
const rebaseResult = spawnSync("git", ["-C", worktreePath, "rebase", "origin/main"], { stdio: "pipe" });
|
|
738
|
-
if (rebaseResult.status !== 0) {
|
|
739
|
-
const rebaseError = rebaseResult.stderr.toString();
|
|
740
|
-
// Check if it's a conflict
|
|
741
|
-
if (rebaseError.includes("CONFLICT") ||
|
|
742
|
-
rebaseError.includes("could not apply")) {
|
|
743
|
-
console.log(chalk.yellow(` ⚠️ Rebase conflict detected. Aborting rebase and keeping original branch state.`));
|
|
744
|
-
console.log(chalk.yellow(` ℹ️ PR will be created without rebase. Manual rebase may be required before merge.`));
|
|
745
|
-
// Abort the rebase to restore branch state
|
|
746
|
-
spawnSync("git", ["-C", worktreePath, "rebase", "--abort"], {
|
|
747
|
-
stdio: "pipe",
|
|
748
|
-
});
|
|
749
|
-
return {
|
|
750
|
-
performed: true,
|
|
751
|
-
success: false,
|
|
752
|
-
reinstalled: false,
|
|
753
|
-
error: "Rebase conflict - manual resolution required",
|
|
754
|
-
};
|
|
755
|
-
}
|
|
756
|
-
else {
|
|
757
|
-
console.log(chalk.yellow(` ⚠️ Rebase failed: ${rebaseError.trim()}`));
|
|
758
|
-
console.log(chalk.yellow(` ℹ️ Continuing with branch in its original state.`));
|
|
759
|
-
return {
|
|
760
|
-
performed: true,
|
|
761
|
-
success: false,
|
|
762
|
-
reinstalled: false,
|
|
763
|
-
error: rebaseError.trim(),
|
|
764
|
-
};
|
|
765
|
-
}
|
|
766
|
-
}
|
|
767
|
-
console.log(chalk.green(` ✅ Branch rebased onto origin/main`));
|
|
768
|
-
// Check if lockfile changed and reinstall if needed
|
|
769
|
-
const reinstalled = reinstallIfLockfileChanged(worktreePath, packageManager, verbose);
|
|
770
|
-
return {
|
|
771
|
-
performed: true,
|
|
772
|
-
success: true,
|
|
773
|
-
reinstalled,
|
|
774
|
-
};
|
|
775
|
-
}
|
|
776
|
-
/**
|
|
777
|
-
* Push branch and create a PR after successful QA.
|
|
778
|
-
*
|
|
779
|
-
* Handles both fresh PR creation and detection of existing PRs.
|
|
780
|
-
* Failures are warnings — they don't fail the run.
|
|
781
|
-
*
|
|
782
|
-
* @param worktreePath Path to the worktree
|
|
783
|
-
* @param issueNumber Issue number
|
|
784
|
-
* @param issueTitle Issue title (for PR title)
|
|
785
|
-
* @param branch Branch name
|
|
786
|
-
* @param verbose Whether to show verbose output
|
|
787
|
-
* @returns PRCreationResult with PR info or error
|
|
788
|
-
* @internal Exported for testing
|
|
789
|
-
*/
|
|
790
|
-
export function createPR(worktreePath, issueNumber, issueTitle, branch, verbose, labels) {
|
|
791
|
-
// Step 1: Check for existing PR on this branch
|
|
792
|
-
const existingPR = spawnSync("gh", ["pr", "view", branch, "--json", "number,url"], { stdio: "pipe", cwd: worktreePath, timeout: 15000 });
|
|
793
|
-
if (existingPR.status === 0 && existingPR.stdout) {
|
|
794
|
-
try {
|
|
795
|
-
const prInfo = JSON.parse(existingPR.stdout.toString());
|
|
796
|
-
if (prInfo.number && prInfo.url) {
|
|
797
|
-
if (verbose) {
|
|
798
|
-
console.log(chalk.gray(` ℹ️ PR #${prInfo.number} already exists for branch ${branch}`));
|
|
799
|
-
}
|
|
800
|
-
return {
|
|
801
|
-
attempted: true,
|
|
802
|
-
success: true,
|
|
803
|
-
prNumber: prInfo.number,
|
|
804
|
-
prUrl: prInfo.url,
|
|
805
|
-
};
|
|
806
|
-
}
|
|
807
|
-
}
|
|
808
|
-
catch {
|
|
809
|
-
// JSON parse failed — no existing PR, continue to create
|
|
810
|
-
}
|
|
811
|
-
}
|
|
812
|
-
// Step 2: Push branch to remote
|
|
813
|
-
if (verbose) {
|
|
814
|
-
console.log(chalk.gray(` 🚀 Pushing branch ${branch} to origin...`));
|
|
815
|
-
}
|
|
816
|
-
const pushResult = spawnSync("git", ["-C", worktreePath, "push", "-u", "origin", branch], { stdio: "pipe", timeout: 60000 });
|
|
817
|
-
if (pushResult.status !== 0) {
|
|
818
|
-
const pushError = pushResult.stderr?.toString().trim() ?? "Unknown error";
|
|
819
|
-
console.log(chalk.yellow(` ⚠️ git push failed: ${pushError}`));
|
|
820
|
-
return {
|
|
821
|
-
attempted: true,
|
|
822
|
-
success: false,
|
|
823
|
-
error: `git push failed: ${pushError}`,
|
|
824
|
-
};
|
|
825
|
-
}
|
|
826
|
-
// Step 3: Create PR
|
|
827
|
-
if (verbose) {
|
|
828
|
-
console.log(chalk.gray(` 📝 Creating PR for #${issueNumber}...`));
|
|
829
|
-
}
|
|
830
|
-
const isBug = labels?.some((l) => /^bug/i.test(l));
|
|
831
|
-
const prefix = isBug ? "fix" : "feat";
|
|
832
|
-
const prTitle = `${prefix}(#${issueNumber}): ${issueTitle}`;
|
|
833
|
-
const prBody = [
|
|
834
|
-
`## Summary`,
|
|
835
|
-
``,
|
|
836
|
-
`Automated PR for issue #${issueNumber}.`,
|
|
837
|
-
``,
|
|
838
|
-
`Fixes #${issueNumber}`,
|
|
839
|
-
``,
|
|
840
|
-
`---`,
|
|
841
|
-
`🤖 Generated by \`sequant run\``,
|
|
842
|
-
].join("\n");
|
|
843
|
-
const prResult = spawnSync("gh", ["pr", "create", "--title", prTitle, "--body", prBody, "--head", branch], { stdio: "pipe", cwd: worktreePath, timeout: 30000 });
|
|
844
|
-
if (prResult.status !== 0) {
|
|
845
|
-
const prError = prResult.stderr?.toString().trim() ?? "Unknown error";
|
|
846
|
-
// Check if PR already exists (race condition or push-before-PR scenarios)
|
|
847
|
-
if (prError.includes("already exists")) {
|
|
848
|
-
const retryView = spawnSync("gh", ["pr", "view", branch, "--json", "number,url"], { stdio: "pipe", cwd: worktreePath, timeout: 15000 });
|
|
849
|
-
if (retryView.status === 0 && retryView.stdout) {
|
|
850
|
-
try {
|
|
851
|
-
const prInfo = JSON.parse(retryView.stdout.toString());
|
|
852
|
-
return {
|
|
853
|
-
attempted: true,
|
|
854
|
-
success: true,
|
|
855
|
-
prNumber: prInfo.number,
|
|
856
|
-
prUrl: prInfo.url,
|
|
857
|
-
};
|
|
858
|
-
}
|
|
859
|
-
catch {
|
|
860
|
-
// Fall through to error
|
|
861
|
-
}
|
|
862
|
-
}
|
|
863
|
-
}
|
|
864
|
-
console.log(chalk.yellow(` ⚠️ PR creation failed: ${prError}`));
|
|
865
|
-
return {
|
|
866
|
-
attempted: true,
|
|
867
|
-
success: false,
|
|
868
|
-
error: `gh pr create failed: ${prError}`,
|
|
869
|
-
};
|
|
870
|
-
}
|
|
871
|
-
// Step 4: Extract PR URL from output and get PR details
|
|
872
|
-
const prOutput = prResult.stdout?.toString().trim() ?? "";
|
|
873
|
-
const prUrlMatch = prOutput.match(/https:\/\/github\.com\/[^\s]+\/pull\/(\d+)/);
|
|
874
|
-
if (prUrlMatch) {
|
|
875
|
-
const prNumber = parseInt(prUrlMatch[1], 10);
|
|
876
|
-
const prUrl = prUrlMatch[0];
|
|
877
|
-
console.log(chalk.green(` ✅ PR #${prNumber} created: ${prUrl}`));
|
|
878
|
-
return {
|
|
879
|
-
attempted: true,
|
|
880
|
-
success: true,
|
|
881
|
-
prNumber,
|
|
882
|
-
prUrl,
|
|
883
|
-
};
|
|
884
|
-
}
|
|
885
|
-
// Fallback: try gh pr view to get details
|
|
886
|
-
const viewResult = spawnSync("gh", ["pr", "view", branch, "--json", "number,url"], { stdio: "pipe", cwd: worktreePath, timeout: 15000 });
|
|
887
|
-
if (viewResult.status === 0 && viewResult.stdout) {
|
|
888
|
-
try {
|
|
889
|
-
const prInfo = JSON.parse(viewResult.stdout.toString());
|
|
890
|
-
console.log(chalk.green(` ✅ PR #${prInfo.number} created: ${prInfo.url}`));
|
|
891
|
-
return {
|
|
892
|
-
attempted: true,
|
|
893
|
-
success: true,
|
|
894
|
-
prNumber: prInfo.number,
|
|
895
|
-
prUrl: prInfo.url,
|
|
896
|
-
};
|
|
897
|
-
}
|
|
898
|
-
catch {
|
|
899
|
-
// Fall through
|
|
900
|
-
}
|
|
901
|
-
}
|
|
902
|
-
// PR was created but we couldn't parse the URL
|
|
903
|
-
console.log(chalk.yellow(` ⚠️ PR created but could not extract URL from output: ${prOutput}`));
|
|
904
|
-
return {
|
|
905
|
-
attempted: true,
|
|
906
|
-
success: true,
|
|
907
|
-
error: "PR created but URL extraction failed",
|
|
908
|
-
};
|
|
909
|
-
}
|
|
910
|
-
/**
|
|
911
|
-
* Natural language prompts for each phase
|
|
912
|
-
* These prompts will invoke the corresponding skills via natural language
|
|
913
|
-
*/
|
|
914
|
-
const PHASE_PROMPTS = {
|
|
915
|
-
spec: "Review GitHub issue #{issue} and create an implementation plan with verification criteria. Run the /spec {issue} workflow.",
|
|
916
|
-
"security-review": "Perform a deep security analysis for GitHub issue #{issue} focusing on auth, permissions, and sensitive operations. Run the /security-review {issue} workflow.",
|
|
917
|
-
testgen: "Generate test stubs for GitHub issue #{issue} based on the specification. Run the /testgen {issue} workflow.",
|
|
918
|
-
exec: "Implement the feature for GitHub issue #{issue} following the spec. Run the /exec {issue} workflow.",
|
|
919
|
-
test: "Execute structured browser-based testing for GitHub issue #{issue}. Run the /test {issue} workflow.",
|
|
920
|
-
qa: "Review the implementation for GitHub issue #{issue} against acceptance criteria. Run the /qa {issue} workflow.",
|
|
921
|
-
loop: "Parse test/QA findings for GitHub issue #{issue} and iterate until quality gates pass. Run the /loop {issue} workflow.",
|
|
922
|
-
};
|
|
923
|
-
/**
|
|
924
|
-
* UI-related labels that trigger automatic test phase
|
|
925
|
-
*/
|
|
926
|
-
const UI_LABELS = ["ui", "frontend", "admin", "web", "browser"];
|
|
927
|
-
/**
|
|
928
|
-
* Bug-related labels that skip spec phase
|
|
929
|
-
*/
|
|
930
|
-
const BUG_LABELS = ["bug", "fix", "hotfix", "patch"];
|
|
931
|
-
/**
|
|
932
|
-
* Documentation labels that skip spec phase
|
|
933
|
-
*/
|
|
934
|
-
const DOCS_LABELS = ["docs", "documentation", "readme"];
|
|
935
|
-
/**
|
|
936
|
-
* Complex labels that enable quality loop
|
|
937
|
-
*/
|
|
938
|
-
const COMPLEX_LABELS = ["complex", "refactor", "breaking", "major"];
|
|
939
|
-
/**
|
|
940
|
-
* Security-related labels that trigger security-review phase
|
|
941
|
-
*/
|
|
942
|
-
const SECURITY_LABELS = [
|
|
943
|
-
"security",
|
|
944
|
-
"auth",
|
|
945
|
-
"authentication",
|
|
946
|
-
"permissions",
|
|
947
|
-
"admin",
|
|
948
|
-
];
|
|
949
|
-
/**
|
|
950
|
-
* Detect phases based on issue labels (like /solve logic)
|
|
951
|
-
*/
|
|
952
|
-
export function detectPhasesFromLabels(labels) {
|
|
953
|
-
const lowerLabels = labels.map((l) => l.toLowerCase());
|
|
954
|
-
// Check for bug/fix labels → exec → qa (skip spec)
|
|
955
|
-
const isBugFix = lowerLabels.some((label) => BUG_LABELS.some((bugLabel) => label.includes(bugLabel)));
|
|
956
|
-
// Check for docs labels → exec → qa (skip spec)
|
|
957
|
-
const isDocs = lowerLabels.some((label) => DOCS_LABELS.some((docsLabel) => label.includes(docsLabel)));
|
|
958
|
-
// Check for UI labels → add test phase
|
|
959
|
-
const isUI = lowerLabels.some((label) => UI_LABELS.some((uiLabel) => label.includes(uiLabel)));
|
|
960
|
-
// Check for complex labels → enable quality loop
|
|
961
|
-
const isComplex = lowerLabels.some((label) => COMPLEX_LABELS.some((complexLabel) => label.includes(complexLabel)));
|
|
962
|
-
// Check for security labels → add security-review phase
|
|
963
|
-
const isSecurity = lowerLabels.some((label) => SECURITY_LABELS.some((secLabel) => label.includes(secLabel)));
|
|
964
|
-
// Build phase list
|
|
965
|
-
let phases;
|
|
966
|
-
if (isBugFix || isDocs) {
|
|
967
|
-
// Simple workflow: exec → qa
|
|
968
|
-
phases = ["exec", "qa"];
|
|
969
|
-
}
|
|
970
|
-
else if (isUI) {
|
|
971
|
-
// UI workflow: spec → exec → test → qa
|
|
972
|
-
phases = ["spec", "exec", "test", "qa"];
|
|
973
|
-
}
|
|
974
|
-
else {
|
|
975
|
-
// Standard workflow: spec → exec → qa
|
|
976
|
-
phases = ["spec", "exec", "qa"];
|
|
977
|
-
}
|
|
978
|
-
// Add security-review phase after spec if security labels detected
|
|
979
|
-
if (isSecurity && phases.includes("spec")) {
|
|
980
|
-
const specIndex = phases.indexOf("spec");
|
|
981
|
-
phases.splice(specIndex + 1, 0, "security-review");
|
|
982
|
-
}
|
|
983
|
-
return { phases, qualityLoop: isComplex };
|
|
984
|
-
}
|
|
985
|
-
/**
|
|
986
|
-
* Parse recommended workflow from /spec output
|
|
987
|
-
*
|
|
988
|
-
* Looks for:
|
|
989
|
-
* ## Recommended Workflow
|
|
990
|
-
* **Phases:** exec → qa
|
|
991
|
-
* **Quality Loop:** enabled|disabled
|
|
992
|
-
*/
|
|
993
|
-
export function parseRecommendedWorkflow(output) {
|
|
994
|
-
// Find the Recommended Workflow section
|
|
995
|
-
const workflowMatch = output.match(/## Recommended Workflow[\s\S]*?\*\*Phases:\*\*\s*([^\n]+)/i);
|
|
996
|
-
if (!workflowMatch) {
|
|
997
|
-
return null;
|
|
998
|
-
}
|
|
999
|
-
// Parse phases from "exec → qa" or "spec → exec → test → qa" format
|
|
1000
|
-
const phasesStr = workflowMatch[1].trim();
|
|
1001
|
-
const phaseNames = phasesStr
|
|
1002
|
-
.split(/\s*→\s*|\s*->\s*|\s*,\s*/)
|
|
1003
|
-
.map((p) => p.trim().toLowerCase())
|
|
1004
|
-
.filter((p) => p.length > 0);
|
|
1005
|
-
// Validate and convert to Phase type
|
|
1006
|
-
const validPhases = [];
|
|
1007
|
-
for (const name of phaseNames) {
|
|
1008
|
-
if ([
|
|
1009
|
-
"spec",
|
|
1010
|
-
"security-review",
|
|
1011
|
-
"testgen",
|
|
1012
|
-
"exec",
|
|
1013
|
-
"test",
|
|
1014
|
-
"qa",
|
|
1015
|
-
"loop",
|
|
1016
|
-
].includes(name)) {
|
|
1017
|
-
validPhases.push(name);
|
|
1018
|
-
}
|
|
1019
|
-
}
|
|
1020
|
-
if (validPhases.length === 0) {
|
|
1021
|
-
return null;
|
|
1022
|
-
}
|
|
1023
|
-
// Parse quality loop setting
|
|
1024
|
-
const qualityLoopMatch = output.match(/\*\*Quality Loop:\*\*\s*(enabled|disabled|true|false|yes|no)/i);
|
|
1025
|
-
const qualityLoop = qualityLoopMatch
|
|
1026
|
-
? ["enabled", "true", "yes"].includes(qualityLoopMatch[1].toLowerCase())
|
|
1027
|
-
: false;
|
|
1028
|
-
return { phases: validPhases, qualityLoop };
|
|
1029
|
-
}
|
|
1030
|
-
/**
|
|
1031
|
-
* Format duration in human-readable format
|
|
1032
|
-
*/
|
|
1033
|
-
function formatDuration(seconds) {
|
|
1034
|
-
if (seconds < 60) {
|
|
1035
|
-
return `${seconds.toFixed(1)}s`;
|
|
1036
|
-
}
|
|
1037
|
-
const mins = Math.floor(seconds / 60);
|
|
1038
|
-
const secs = seconds % 60;
|
|
1039
|
-
return `${mins}m ${secs.toFixed(0)}s`;
|
|
1040
|
-
}
|
|
1041
|
-
/**
|
|
1042
|
-
* Get the prompt for a phase with the issue number substituted
|
|
1043
|
-
*/
|
|
1044
|
-
function getPhasePrompt(phase, issueNumber) {
|
|
1045
|
-
return PHASE_PROMPTS[phase].replace(/\{issue\}/g, String(issueNumber));
|
|
1046
|
-
}
|
|
1047
|
-
/**
|
|
1048
|
-
* Phases that require worktree isolation (exec, test, qa)
|
|
1049
|
-
* Spec runs in main repo since it's planning-only
|
|
1050
|
-
*/
|
|
1051
|
-
const ISOLATED_PHASES = ["exec", "test", "qa"];
|
|
1052
|
-
/**
|
|
1053
|
-
* Execute a single phase for an issue using Claude Agent SDK
|
|
1054
|
-
*/
|
|
1055
|
-
async function executePhase(issueNumber, phase, config, sessionId, worktreePath, shutdownManager, spinner) {
|
|
1056
|
-
const startTime = Date.now();
|
|
1057
|
-
if (config.dryRun) {
|
|
1058
|
-
// Dry run - just simulate
|
|
1059
|
-
if (config.verbose) {
|
|
1060
|
-
console.log(chalk.gray(` Would execute: /${phase} ${issueNumber}`));
|
|
1061
|
-
}
|
|
1062
|
-
return {
|
|
1063
|
-
phase,
|
|
1064
|
-
success: true,
|
|
1065
|
-
durationSeconds: 0,
|
|
1066
|
-
};
|
|
1067
|
-
}
|
|
1068
|
-
const prompt = getPhasePrompt(phase, issueNumber);
|
|
1069
|
-
if (config.verbose) {
|
|
1070
|
-
console.log(chalk.gray(` Prompt: ${prompt}`));
|
|
1071
|
-
if (worktreePath && ISOLATED_PHASES.includes(phase)) {
|
|
1072
|
-
console.log(chalk.gray(` Worktree: ${worktreePath}`));
|
|
1073
|
-
}
|
|
1074
|
-
}
|
|
1075
|
-
// Determine working directory and environment
|
|
1076
|
-
const shouldUseWorktree = worktreePath && ISOLATED_PHASES.includes(phase);
|
|
1077
|
-
const cwd = shouldUseWorktree ? worktreePath : process.cwd();
|
|
1078
|
-
// Track stderr for error diagnostics (declared outside try for catch access)
|
|
1079
|
-
let capturedStderr = "";
|
|
1080
|
-
try {
|
|
1081
|
-
// Check if shutdown is in progress
|
|
1082
|
-
if (shutdownManager?.shuttingDown) {
|
|
1083
|
-
return {
|
|
1084
|
-
phase,
|
|
1085
|
-
success: false,
|
|
1086
|
-
durationSeconds: 0,
|
|
1087
|
-
error: "Shutdown in progress",
|
|
1088
|
-
};
|
|
1089
|
-
}
|
|
1090
|
-
// Create abort controller for timeout
|
|
1091
|
-
const abortController = new AbortController();
|
|
1092
|
-
const timeoutId = setTimeout(() => {
|
|
1093
|
-
abortController.abort();
|
|
1094
|
-
}, config.phaseTimeout * 1000);
|
|
1095
|
-
// Register abort controller with shutdown manager for graceful shutdown
|
|
1096
|
-
if (shutdownManager) {
|
|
1097
|
-
shutdownManager.setAbortController(abortController);
|
|
1098
|
-
}
|
|
1099
|
-
let resultSessionId;
|
|
1100
|
-
let resultMessage;
|
|
1101
|
-
let lastError;
|
|
1102
|
-
let capturedOutput = "";
|
|
1103
|
-
// Build environment with worktree isolation variables
|
|
1104
|
-
const env = {
|
|
1105
|
-
...process.env,
|
|
1106
|
-
CLAUDE_HOOKS_SMART_TESTS: config.noSmartTests ? "false" : "true",
|
|
1107
|
-
};
|
|
1108
|
-
// Set worktree isolation environment variables
|
|
1109
|
-
if (shouldUseWorktree) {
|
|
1110
|
-
env.SEQUANT_WORKTREE = worktreePath;
|
|
1111
|
-
env.SEQUANT_ISSUE = String(issueNumber);
|
|
1112
|
-
}
|
|
1113
|
-
// Set orchestration context for skills to detect they're part of a workflow
|
|
1114
|
-
// Skills can check these to skip redundant pre-flight checks
|
|
1115
|
-
env.SEQUANT_ORCHESTRATOR = "sequant-run";
|
|
1116
|
-
env.SEQUANT_PHASE = phase;
|
|
1117
|
-
// Execute using Claude Agent SDK
|
|
1118
|
-
// Note: Don't resume sessions when switching to worktree (different cwd breaks resume)
|
|
1119
|
-
const canResume = sessionId && !shouldUseWorktree;
|
|
1120
|
-
// Get MCP servers config if enabled
|
|
1121
|
-
// Reads from Claude Desktop config and passes to SDK for headless MCP support
|
|
1122
|
-
const mcpServers = config.mcp ? getMcpServersConfig() : undefined;
|
|
1123
|
-
// Track whether we're actively streaming verbose output
|
|
1124
|
-
// Pausing spinner once per streaming session prevents truncation from rapid pause/resume cycles
|
|
1125
|
-
// (Issue #283: ora's stop() clears the current line, which can truncate output when
|
|
1126
|
-
// pause/resume is called for every chunk in rapid succession)
|
|
1127
|
-
let verboseStreamingActive = false;
|
|
1128
|
-
const queryInstance = query({
|
|
1129
|
-
prompt,
|
|
1130
|
-
options: {
|
|
1131
|
-
abortController,
|
|
1132
|
-
cwd,
|
|
1133
|
-
// Load project settings including skills
|
|
1134
|
-
settingSources: ["project"],
|
|
1135
|
-
// Use Claude Code's system prompt and tools
|
|
1136
|
-
systemPrompt: { type: "preset", preset: "claude_code" },
|
|
1137
|
-
tools: { type: "preset", preset: "claude_code" },
|
|
1138
|
-
// Bypass permissions for headless execution
|
|
1139
|
-
permissionMode: "bypassPermissions",
|
|
1140
|
-
allowDangerouslySkipPermissions: true,
|
|
1141
|
-
// Resume from previous session if provided (but not when switching directories)
|
|
1142
|
-
...(canResume ? { resume: sessionId } : {}),
|
|
1143
|
-
// Configure smart tests and worktree isolation via environment
|
|
1144
|
-
env,
|
|
1145
|
-
// Pass MCP servers for headless mode (AC-2)
|
|
1146
|
-
...(mcpServers ? { mcpServers } : {}),
|
|
1147
|
-
// Capture stderr for debugging (helps diagnose early exit failures)
|
|
1148
|
-
stderr: (data) => {
|
|
1149
|
-
capturedStderr += data;
|
|
1150
|
-
// Write stderr in verbose mode
|
|
1151
|
-
if (config.verbose) {
|
|
1152
|
-
// Pause spinner once to avoid truncation (Issue #283)
|
|
1153
|
-
if (!verboseStreamingActive) {
|
|
1154
|
-
spinner?.pause();
|
|
1155
|
-
verboseStreamingActive = true;
|
|
1156
|
-
}
|
|
1157
|
-
process.stderr.write(chalk.red(data));
|
|
1158
|
-
}
|
|
1159
|
-
},
|
|
1160
|
-
},
|
|
1161
|
-
});
|
|
1162
|
-
// Stream and process messages
|
|
1163
|
-
for await (const message of queryInstance) {
|
|
1164
|
-
// Capture session ID from system init message
|
|
1165
|
-
if (message.type === "system" && message.subtype === "init") {
|
|
1166
|
-
resultSessionId = message.session_id;
|
|
1167
|
-
}
|
|
1168
|
-
// Capture output from assistant messages
|
|
1169
|
-
if (message.type === "assistant") {
|
|
1170
|
-
// Extract text content from the message
|
|
1171
|
-
const content = message.message.content;
|
|
1172
|
-
const textContent = content
|
|
1173
|
-
.filter((c) => c.type === "text" && c.text)
|
|
1174
|
-
.map((c) => c.text)
|
|
1175
|
-
.join("");
|
|
1176
|
-
if (textContent) {
|
|
1177
|
-
capturedOutput += textContent;
|
|
1178
|
-
// Show streaming output in verbose mode
|
|
1179
|
-
if (config.verbose) {
|
|
1180
|
-
// Pause spinner once at start of streaming to avoid truncation
|
|
1181
|
-
// (Issue #283: repeated pause/resume causes ora to clear lines between chunks)
|
|
1182
|
-
if (!verboseStreamingActive) {
|
|
1183
|
-
spinner?.pause();
|
|
1184
|
-
verboseStreamingActive = true;
|
|
1185
|
-
}
|
|
1186
|
-
process.stdout.write(chalk.gray(textContent));
|
|
1187
|
-
}
|
|
1188
|
-
}
|
|
1189
|
-
}
|
|
1190
|
-
// Capture the final result
|
|
1191
|
-
if (message.type === "result") {
|
|
1192
|
-
resultMessage = message;
|
|
1193
|
-
}
|
|
1194
|
-
}
|
|
1195
|
-
// Resume spinner after streaming completes (if we paused it)
|
|
1196
|
-
if (verboseStreamingActive) {
|
|
1197
|
-
spinner?.resume();
|
|
1198
|
-
verboseStreamingActive = false;
|
|
1199
|
-
}
|
|
1200
|
-
clearTimeout(timeoutId);
|
|
1201
|
-
// Clear abort controller from shutdown manager
|
|
1202
|
-
if (shutdownManager) {
|
|
1203
|
-
shutdownManager.clearAbortController();
|
|
1204
|
-
}
|
|
1205
|
-
const durationSeconds = (Date.now() - startTime) / 1000;
|
|
1206
|
-
// Check result status
|
|
1207
|
-
if (resultMessage) {
|
|
1208
|
-
if (resultMessage.subtype === "success") {
|
|
1209
|
-
// For QA phase, check the verdict to determine actual success
|
|
1210
|
-
// SDK "success" just means the query completed - we need to parse the verdict
|
|
1211
|
-
if (phase === "qa" && capturedOutput) {
|
|
1212
|
-
const verdict = parseQaVerdict(capturedOutput);
|
|
1213
|
-
// Only READY_FOR_MERGE and NEEDS_VERIFICATION are considered passing
|
|
1214
|
-
// NEEDS_VERIFICATION is external verification, not a code quality issue
|
|
1215
|
-
if (verdict &&
|
|
1216
|
-
verdict !== "READY_FOR_MERGE" &&
|
|
1217
|
-
verdict !== "NEEDS_VERIFICATION") {
|
|
1218
|
-
return {
|
|
1219
|
-
phase,
|
|
1220
|
-
success: false,
|
|
1221
|
-
durationSeconds,
|
|
1222
|
-
error: `QA verdict: ${verdict}`,
|
|
1223
|
-
sessionId: resultSessionId,
|
|
1224
|
-
output: capturedOutput,
|
|
1225
|
-
verdict, // Include parsed verdict
|
|
1226
|
-
};
|
|
1227
|
-
}
|
|
1228
|
-
// Pass case - include verdict for logging
|
|
1229
|
-
return {
|
|
1230
|
-
phase,
|
|
1231
|
-
success: true,
|
|
1232
|
-
durationSeconds,
|
|
1233
|
-
sessionId: resultSessionId,
|
|
1234
|
-
output: capturedOutput,
|
|
1235
|
-
verdict: verdict ?? undefined, // Include if found
|
|
1236
|
-
};
|
|
1237
|
-
}
|
|
1238
|
-
return {
|
|
1239
|
-
phase,
|
|
1240
|
-
success: true,
|
|
1241
|
-
durationSeconds,
|
|
1242
|
-
sessionId: resultSessionId,
|
|
1243
|
-
output: capturedOutput,
|
|
1244
|
-
};
|
|
1245
|
-
}
|
|
1246
|
-
else {
|
|
1247
|
-
// Handle error subtypes
|
|
1248
|
-
const errorSubtype = resultMessage.subtype;
|
|
1249
|
-
if (errorSubtype === "error_max_turns") {
|
|
1250
|
-
lastError = "Max turns reached";
|
|
1251
|
-
}
|
|
1252
|
-
else if (errorSubtype === "error_during_execution") {
|
|
1253
|
-
lastError =
|
|
1254
|
-
resultMessage.errors?.join(", ") || "Error during execution";
|
|
1255
|
-
}
|
|
1256
|
-
else if (errorSubtype === "error_max_budget_usd") {
|
|
1257
|
-
lastError = "Budget limit exceeded";
|
|
1258
|
-
}
|
|
1259
|
-
else {
|
|
1260
|
-
lastError = `Error: ${errorSubtype}`;
|
|
1261
|
-
}
|
|
1262
|
-
return {
|
|
1263
|
-
phase,
|
|
1264
|
-
success: false,
|
|
1265
|
-
durationSeconds,
|
|
1266
|
-
error: lastError,
|
|
1267
|
-
sessionId: resultSessionId,
|
|
1268
|
-
};
|
|
1269
|
-
}
|
|
1270
|
-
}
|
|
1271
|
-
// No result message received
|
|
1272
|
-
return {
|
|
1273
|
-
phase,
|
|
1274
|
-
success: false,
|
|
1275
|
-
durationSeconds: (Date.now() - startTime) / 1000,
|
|
1276
|
-
error: "No result received from Claude",
|
|
1277
|
-
sessionId: resultSessionId,
|
|
1278
|
-
};
|
|
1279
|
-
}
|
|
1280
|
-
catch (err) {
|
|
1281
|
-
const durationSeconds = (Date.now() - startTime) / 1000;
|
|
1282
|
-
const error = err instanceof Error ? err.message : String(err);
|
|
1283
|
-
// Check if it was an abort (timeout)
|
|
1284
|
-
if (error.includes("abort") || error.includes("AbortError")) {
|
|
1285
|
-
return {
|
|
1286
|
-
phase,
|
|
1287
|
-
success: false,
|
|
1288
|
-
durationSeconds,
|
|
1289
|
-
error: `Timeout after ${config.phaseTimeout}s`,
|
|
1290
|
-
};
|
|
1291
|
-
}
|
|
1292
|
-
// Include stderr in error message if available (helps diagnose early exit failures)
|
|
1293
|
-
const stderrSuffix = capturedStderr
|
|
1294
|
-
? `\nStderr: ${capturedStderr.slice(0, 500)}`
|
|
1295
|
-
: "";
|
|
1296
|
-
return {
|
|
1297
|
-
phase,
|
|
1298
|
-
success: false,
|
|
1299
|
-
durationSeconds,
|
|
1300
|
-
error: error + stderrSuffix,
|
|
1301
|
-
};
|
|
1302
|
-
}
|
|
1303
|
-
}
|
|
1304
|
-
/**
|
|
1305
|
-
* Cold-start retry threshold in seconds.
|
|
1306
|
-
* Failures under this duration are likely Claude Code subprocess initialization
|
|
1307
|
-
* issues rather than genuine phase failures (based on empirical data: cold-start
|
|
1308
|
-
* failures consistently complete in 15-39s vs 150-310s for real work).
|
|
1309
|
-
*/
|
|
1310
|
-
const COLD_START_THRESHOLD_SECONDS = 60;
|
|
1311
|
-
const COLD_START_MAX_RETRIES = 2;
|
|
1312
|
-
/**
|
|
1313
|
-
* Execute a phase with automatic retry for cold-start failures and MCP fallback.
|
|
1314
|
-
*
|
|
1315
|
-
* Retry strategy:
|
|
1316
|
-
* 1. If phase fails within COLD_START_THRESHOLD_SECONDS, retry up to COLD_START_MAX_RETRIES times
|
|
1317
|
-
* 2. If still failing and MCP is enabled, retry once with MCP disabled (npx-based MCP servers
|
|
1318
|
-
* can fail on first run due to cold-cache issues)
|
|
1319
|
-
*
|
|
1320
|
-
* The MCP fallback is safe because MCP servers are optional enhancements, not required
|
|
1321
|
-
* for core functionality.
|
|
1322
|
-
*/
|
|
1323
|
-
/**
|
|
1324
|
-
* @internal Exported for testing only
|
|
1325
|
-
*/
|
|
1326
|
-
export async function executePhaseWithRetry(issueNumber, phase, config, sessionId, worktreePath, shutdownManager, spinner,
|
|
1327
|
-
/** @internal Injected for testing — defaults to module-level executePhase */
|
|
1328
|
-
executePhaseFn = executePhase) {
|
|
1329
|
-
// Skip retry logic if explicitly disabled
|
|
1330
|
-
if (config.retry === false) {
|
|
1331
|
-
return executePhaseFn(issueNumber, phase, config, sessionId, worktreePath, shutdownManager, spinner);
|
|
1332
|
-
}
|
|
1333
|
-
let lastResult;
|
|
1334
|
-
// Phase 1: Cold-start retry attempts (with MCP enabled if configured)
|
|
1335
|
-
for (let attempt = 0; attempt <= COLD_START_MAX_RETRIES; attempt++) {
|
|
1336
|
-
lastResult = await executePhaseFn(issueNumber, phase, config, sessionId, worktreePath, shutdownManager, spinner);
|
|
1337
|
-
const duration = lastResult.durationSeconds ?? 0;
|
|
1338
|
-
// Success or genuine failure (took long enough to be real work)
|
|
1339
|
-
if (lastResult.success || duration >= COLD_START_THRESHOLD_SECONDS) {
|
|
1340
|
-
return lastResult;
|
|
1341
|
-
}
|
|
1342
|
-
// Cold-start failure detected — retry
|
|
1343
|
-
if (attempt < COLD_START_MAX_RETRIES) {
|
|
1344
|
-
if (config.verbose) {
|
|
1345
|
-
console.log(chalk.yellow(`\n ⟳ Cold-start failure detected (${duration.toFixed(1)}s), retrying... (attempt ${attempt + 2}/${COLD_START_MAX_RETRIES + 1})`));
|
|
1346
|
-
}
|
|
1347
|
-
}
|
|
1348
|
-
}
|
|
1349
|
-
// Capture the original error for better diagnostics
|
|
1350
|
-
const originalError = lastResult.error;
|
|
1351
|
-
// Phase 2: MCP fallback - if MCP is enabled and we're still failing, try without MCP
|
|
1352
|
-
// This handles npx-based MCP servers that fail on first run due to cold-cache issues
|
|
1353
|
-
if (config.mcp && !lastResult.success) {
|
|
1354
|
-
console.log(chalk.yellow(`\n ⚠️ Phase failed with MCP enabled, retrying without MCP...`));
|
|
1355
|
-
// Create config copy with MCP disabled
|
|
1356
|
-
const configWithoutMcp = {
|
|
1357
|
-
...config,
|
|
1358
|
-
mcp: false,
|
|
1359
|
-
};
|
|
1360
|
-
const retryResult = await executePhaseFn(issueNumber, phase, configWithoutMcp, sessionId, worktreePath, shutdownManager, spinner);
|
|
1361
|
-
if (retryResult.success) {
|
|
1362
|
-
console.log(chalk.green(` ✓ Phase succeeded without MCP (MCP cold-start issue detected)`));
|
|
1363
|
-
return retryResult;
|
|
1364
|
-
}
|
|
1365
|
-
// Both attempts failed - return original error for better diagnostics
|
|
1366
|
-
return {
|
|
1367
|
-
...lastResult,
|
|
1368
|
-
error: originalError,
|
|
1369
|
-
};
|
|
1370
|
-
}
|
|
1371
|
-
return lastResult;
|
|
1372
|
-
}
|
|
1373
|
-
/**
|
|
1374
|
-
* Fetch issue info from GitHub
|
|
1375
|
-
*/
|
|
1376
|
-
async function getIssueInfo(issueNumber) {
|
|
1377
|
-
try {
|
|
1378
|
-
const result = spawnSync("gh", ["issue", "view", String(issueNumber), "--json", "title,labels"], { stdio: "pipe" });
|
|
1379
|
-
if (result.status === 0) {
|
|
1380
|
-
const data = JSON.parse(result.stdout.toString());
|
|
1381
|
-
return {
|
|
1382
|
-
title: data.title || `Issue #${issueNumber}`,
|
|
1383
|
-
labels: Array.isArray(data.labels)
|
|
1384
|
-
? data.labels.map((l) => l.name)
|
|
1385
|
-
: [],
|
|
1386
|
-
};
|
|
1387
|
-
}
|
|
1388
|
-
}
|
|
1389
|
-
catch {
|
|
1390
|
-
// Ignore errors, use defaults
|
|
1391
|
-
}
|
|
1392
|
-
return { title: `Issue #${issueNumber}`, labels: [] };
|
|
1393
|
-
}
|
|
1394
|
-
/**
|
|
1395
|
-
* Parse dependencies from issue body and labels
|
|
1396
|
-
* Returns array of issue numbers this issue depends on
|
|
1397
|
-
*/
|
|
1398
|
-
function parseDependencies(issueNumber) {
|
|
1399
|
-
try {
|
|
1400
|
-
const result = spawnSync("gh", ["issue", "view", String(issueNumber), "--json", "body,labels"], { stdio: "pipe" });
|
|
1401
|
-
if (result.status !== 0)
|
|
1402
|
-
return [];
|
|
1403
|
-
const data = JSON.parse(result.stdout.toString());
|
|
1404
|
-
const dependencies = [];
|
|
1405
|
-
// Parse from body: "Depends on: #123" or "**Depends on**: #123"
|
|
1406
|
-
if (data.body) {
|
|
1407
|
-
const bodyMatch = data.body.match(/\*?\*?depends\s+on\*?\*?:?\s*#?(\d+)/gi);
|
|
1408
|
-
if (bodyMatch) {
|
|
1409
|
-
for (const match of bodyMatch) {
|
|
1410
|
-
const numMatch = match.match(/(\d+)/);
|
|
1411
|
-
if (numMatch) {
|
|
1412
|
-
dependencies.push(parseInt(numMatch[1], 10));
|
|
1413
|
-
}
|
|
1414
|
-
}
|
|
1415
|
-
}
|
|
1416
|
-
}
|
|
1417
|
-
// Parse from labels: "depends-on/123" or "depends-on-123"
|
|
1418
|
-
if (data.labels && Array.isArray(data.labels)) {
|
|
1419
|
-
for (const label of data.labels) {
|
|
1420
|
-
const labelName = label.name || label;
|
|
1421
|
-
const labelMatch = labelName.match(/depends-on[-/](\d+)/i);
|
|
1422
|
-
if (labelMatch) {
|
|
1423
|
-
dependencies.push(parseInt(labelMatch[1], 10));
|
|
1424
|
-
}
|
|
1425
|
-
}
|
|
1426
|
-
}
|
|
1427
|
-
return [...new Set(dependencies)]; // Remove duplicates
|
|
1428
|
-
}
|
|
1429
|
-
catch {
|
|
1430
|
-
return [];
|
|
1431
|
-
}
|
|
1432
|
-
}
|
|
1433
|
-
/**
|
|
1434
|
-
* Sort issues by dependencies (topological sort)
|
|
1435
|
-
* Issues with no dependencies come first, then issues that depend on them
|
|
1436
|
-
*/
|
|
1437
|
-
function sortByDependencies(issueNumbers) {
|
|
1438
|
-
// Build dependency graph
|
|
1439
|
-
const dependsOn = new Map();
|
|
1440
|
-
for (const issue of issueNumbers) {
|
|
1441
|
-
const deps = parseDependencies(issue);
|
|
1442
|
-
// Only include dependencies that are in our issue list
|
|
1443
|
-
dependsOn.set(issue, deps.filter((d) => issueNumbers.includes(d)));
|
|
1444
|
-
}
|
|
1445
|
-
// Topological sort using Kahn's algorithm
|
|
1446
|
-
const inDegree = new Map();
|
|
1447
|
-
for (const issue of issueNumbers) {
|
|
1448
|
-
inDegree.set(issue, 0);
|
|
1449
|
-
}
|
|
1450
|
-
for (const deps of dependsOn.values()) {
|
|
1451
|
-
for (const dep of deps) {
|
|
1452
|
-
inDegree.set(dep, (inDegree.get(dep) || 0) + 1);
|
|
1453
|
-
}
|
|
1454
|
-
}
|
|
1455
|
-
// Note: inDegree counts how many issues depend on each issue
|
|
1456
|
-
// We want to process issues that nothing depends on last
|
|
1457
|
-
// So we sort by: issues nothing depends on first, then dependent issues
|
|
1458
|
-
const sorted = [];
|
|
1459
|
-
const queue = [];
|
|
1460
|
-
// Start with issues that have no dependencies
|
|
1461
|
-
for (const issue of issueNumbers) {
|
|
1462
|
-
const deps = dependsOn.get(issue) || [];
|
|
1463
|
-
if (deps.length === 0) {
|
|
1464
|
-
queue.push(issue);
|
|
1465
|
-
}
|
|
1466
|
-
}
|
|
1467
|
-
const visited = new Set();
|
|
1468
|
-
while (queue.length > 0) {
|
|
1469
|
-
const issue = queue.shift();
|
|
1470
|
-
if (visited.has(issue))
|
|
1471
|
-
continue;
|
|
1472
|
-
visited.add(issue);
|
|
1473
|
-
sorted.push(issue);
|
|
1474
|
-
// Find issues that depend on this one
|
|
1475
|
-
for (const [other, deps] of dependsOn.entries()) {
|
|
1476
|
-
if (deps.includes(issue) && !visited.has(other)) {
|
|
1477
|
-
// Check if all dependencies of 'other' are satisfied
|
|
1478
|
-
const allDepsSatisfied = deps.every((d) => visited.has(d));
|
|
1479
|
-
if (allDepsSatisfied) {
|
|
1480
|
-
queue.push(other);
|
|
1481
|
-
}
|
|
1482
|
-
}
|
|
1483
|
-
}
|
|
1484
|
-
}
|
|
1485
|
-
// Add any remaining issues (circular dependencies or unvisited)
|
|
1486
|
-
for (const issue of issueNumbers) {
|
|
1487
|
-
if (!visited.has(issue)) {
|
|
1488
|
-
sorted.push(issue);
|
|
1489
|
-
}
|
|
1490
|
-
}
|
|
1491
|
-
return sorted;
|
|
1492
|
-
}
|
|
1493
|
-
/**
|
|
1494
|
-
* Check if an issue has UI-related labels
|
|
1495
|
-
*/
|
|
1496
|
-
function hasUILabels(labels) {
|
|
1497
|
-
return labels.some((label) => UI_LABELS.some((uiLabel) => label.toLowerCase().includes(uiLabel)));
|
|
1498
|
-
}
|
|
1499
|
-
/**
|
|
1500
|
-
* Determine phases to run based on options and issue labels
|
|
1501
|
-
*/
|
|
1502
|
-
function determinePhasesForIssue(basePhases, labels, options) {
|
|
1503
|
-
const phases = [...basePhases];
|
|
1504
|
-
// Add testgen phase after spec if requested
|
|
1505
|
-
if (options.testgen && phases.includes("spec")) {
|
|
1506
|
-
const specIndex = phases.indexOf("spec");
|
|
1507
|
-
if (!phases.includes("testgen")) {
|
|
1508
|
-
phases.splice(specIndex + 1, 0, "testgen");
|
|
1509
|
-
}
|
|
1510
|
-
}
|
|
1511
|
-
// Auto-detect UI issues and add test phase
|
|
1512
|
-
if (hasUILabels(labels) && !phases.includes("test")) {
|
|
1513
|
-
// Add test phase before qa if present, otherwise at the end
|
|
1514
|
-
const qaIndex = phases.indexOf("qa");
|
|
1515
|
-
if (qaIndex !== -1) {
|
|
1516
|
-
phases.splice(qaIndex, 0, "test");
|
|
1517
|
-
}
|
|
1518
|
-
else {
|
|
1519
|
-
phases.push("test");
|
|
1520
|
-
}
|
|
1521
|
-
}
|
|
1522
|
-
return phases;
|
|
1523
|
-
}
|
|
1524
|
-
/**
|
|
1525
|
-
* Parse environment variables for CI configuration
|
|
1526
|
-
*/
|
|
1527
|
-
function getEnvConfig() {
|
|
1528
|
-
const config = {};
|
|
1529
|
-
if (process.env.SEQUANT_QUALITY_LOOP === "true") {
|
|
1530
|
-
config.qualityLoop = true;
|
|
1531
|
-
}
|
|
1532
|
-
if (process.env.SEQUANT_MAX_ITERATIONS) {
|
|
1533
|
-
const maxIter = parseInt(process.env.SEQUANT_MAX_ITERATIONS, 10);
|
|
1534
|
-
if (!isNaN(maxIter)) {
|
|
1535
|
-
config.maxIterations = maxIter;
|
|
1536
|
-
}
|
|
1537
|
-
}
|
|
1538
|
-
if (process.env.SEQUANT_SMART_TESTS === "false") {
|
|
1539
|
-
config.noSmartTests = true;
|
|
1540
|
-
}
|
|
1541
|
-
if (process.env.SEQUANT_TESTGEN === "true") {
|
|
1542
|
-
config.testgen = true;
|
|
1543
|
-
}
|
|
1544
|
-
return config;
|
|
1545
|
-
}
|
|
1546
|
-
/**
|
|
1547
|
-
* Parse batch arguments into groups of issues
|
|
1548
|
-
*/
|
|
1549
|
-
function parseBatches(batchArgs) {
|
|
1550
|
-
return batchArgs.map((batch) => batch
|
|
1551
|
-
.split(/\s+/)
|
|
1552
|
-
.map((n) => parseInt(n, 10))
|
|
1553
|
-
.filter((n) => !isNaN(n)));
|
|
1554
|
-
}
|
|
26
|
+
// Extracted modules
|
|
27
|
+
import { detectDefaultBranch, ensureWorktrees, ensureWorktreesChain, getWorktreeDiffStats, } from "../lib/workflow/worktree-manager.js";
|
|
28
|
+
import { formatDuration } from "../lib/workflow/phase-executor.js";
|
|
29
|
+
import { getIssueInfo, sortByDependencies, parseBatches, getEnvConfig, executeBatch, runIssueWithLogging, } from "../lib/workflow/batch-executor.js";
|
|
30
|
+
// Re-export public API for backwards compatibility
|
|
31
|
+
export { parseQaVerdict, formatDuration, executePhaseWithRetry, } from "../lib/workflow/phase-executor.js";
|
|
32
|
+
export { detectDefaultBranch, checkWorktreeFreshness, removeStaleWorktree, listWorktrees, getWorktreeChangedFiles, getWorktreeDiffStats, readCacheMetrics, filterResumedPhases, ensureWorktree, createCheckpointCommit, reinstallIfLockfileChanged, rebaseBeforePR, createPR, } from "../lib/workflow/worktree-manager.js";
|
|
33
|
+
export { detectPhasesFromLabels, parseRecommendedWorkflow, determinePhasesForIssue, } from "../lib/workflow/phase-mapper.js";
|
|
34
|
+
export { getIssueInfo, sortByDependencies, parseBatches, getEnvConfig, executeBatch, runIssueWithLogging, } from "../lib/workflow/batch-executor.js";
|
|
1555
35
|
/**
|
|
1556
36
|
* Main run command
|
|
1557
37
|
*/
|
|
@@ -1610,8 +90,10 @@ export async function runCommand(issues, options) {
|
|
|
1610
90
|
// Determine if we should auto-detect phases from labels
|
|
1611
91
|
const autoDetectPhases = !options.phases && settings.run.autoDetectPhases;
|
|
1612
92
|
mergedOptions.autoDetectPhases = autoDetectPhases;
|
|
1613
|
-
// Resolve base branch: CLI flag → settings.run.defaultBase → 'main'
|
|
1614
|
-
const resolvedBaseBranch = options.base ??
|
|
93
|
+
// Resolve base branch: CLI flag → settings.run.defaultBase → auto-detect → 'main'
|
|
94
|
+
const resolvedBaseBranch = options.base ??
|
|
95
|
+
settings.run.defaultBase ??
|
|
96
|
+
detectDefaultBranch(mergedOptions.verbose ?? false);
|
|
1615
97
|
// Parse issue numbers (or use batch mode)
|
|
1616
98
|
let issueNumbers;
|
|
1617
99
|
let batches = null;
|
|
@@ -1886,7 +368,7 @@ export async function runCommand(issues, options) {
|
|
|
1886
368
|
for (let batchIdx = 0; batchIdx < batches.length; batchIdx++) {
|
|
1887
369
|
const batch = batches[batchIdx];
|
|
1888
370
|
console.log(chalk.blue(`\n Batch ${batchIdx + 1}/${batches.length}: Issues ${batch.map((n) => `#${n}`).join(", ")}`));
|
|
1889
|
-
const batchResults = await executeBatch(batch, config, logWriter, stateManager, mergedOptions, issueInfoMap, worktreeMap, shutdown, manifest.packageManager);
|
|
371
|
+
const batchResults = await executeBatch(batch, config, logWriter, stateManager, mergedOptions, issueInfoMap, worktreeMap, shutdown, manifest.packageManager, resolvedBaseBranch);
|
|
1890
372
|
results.push(...batchResults);
|
|
1891
373
|
// Check if batch failed and we should stop
|
|
1892
374
|
const batchFailed = batchResults.some((r) => !r.success);
|
|
@@ -1912,7 +394,7 @@ export async function runCommand(issues, options) {
|
|
|
1912
394
|
const result = await runIssueWithLogging(issueNumber, config, logWriter, stateManager, issueInfo.title, issueInfo.labels, mergedOptions, worktreeInfo?.path, worktreeInfo?.branch, shutdown, mergedOptions.chain, // Enable checkpoint commits in chain mode
|
|
1913
395
|
manifest.packageManager,
|
|
1914
396
|
// In chain mode, only the last issue should trigger pre-PR rebase
|
|
1915
|
-
mergedOptions.chain ? i === issueNumbers.length - 1 : undefined);
|
|
397
|
+
mergedOptions.chain ? i === issueNumbers.length - 1 : undefined, resolvedBaseBranch);
|
|
1916
398
|
results.push(result);
|
|
1917
399
|
// Record PR info in log before completing issue
|
|
1918
400
|
if (logWriter && result.prNumber && result.prUrl) {
|
|
@@ -1972,7 +454,7 @@ export async function runCommand(issues, options) {
|
|
|
1972
454
|
logWriter.startIssue(issueNumber, issueInfo.title, issueInfo.labels);
|
|
1973
455
|
}
|
|
1974
456
|
const result = await runIssueWithLogging(issueNumber, config, logWriter, stateManager, issueInfo.title, issueInfo.labels, mergedOptions, worktreeInfo?.path, worktreeInfo?.branch, shutdown, false, // Parallel mode doesn't support chain
|
|
1975
|
-
manifest.packageManager);
|
|
457
|
+
manifest.packageManager, undefined, resolvedBaseBranch);
|
|
1976
458
|
results.push(result);
|
|
1977
459
|
// Record PR info in log before completing issue
|
|
1978
460
|
if (logWriter && result.prNumber && result.prUrl) {
|
|
@@ -2147,418 +629,3 @@ export async function runCommand(issues, options) {
|
|
|
2147
629
|
process.exit(exitCode);
|
|
2148
630
|
}
|
|
2149
631
|
}
|
|
2150
|
-
/**
|
|
2151
|
-
* Execute a batch of issues
|
|
2152
|
-
*/
|
|
2153
|
-
async function executeBatch(issueNumbers, config, logWriter, stateManager, options, issueInfoMap, worktreeMap, shutdownManager, packageManager) {
|
|
2154
|
-
const results = [];
|
|
2155
|
-
for (const issueNumber of issueNumbers) {
|
|
2156
|
-
// Check if shutdown was triggered
|
|
2157
|
-
if (shutdownManager?.shuttingDown) {
|
|
2158
|
-
break;
|
|
2159
|
-
}
|
|
2160
|
-
const issueInfo = issueInfoMap.get(issueNumber) ?? {
|
|
2161
|
-
title: `Issue #${issueNumber}`,
|
|
2162
|
-
labels: [],
|
|
2163
|
-
};
|
|
2164
|
-
const worktreeInfo = worktreeMap.get(issueNumber);
|
|
2165
|
-
// Start issue logging
|
|
2166
|
-
if (logWriter) {
|
|
2167
|
-
logWriter.startIssue(issueNumber, issueInfo.title, issueInfo.labels);
|
|
2168
|
-
}
|
|
2169
|
-
const result = await runIssueWithLogging(issueNumber, config, logWriter, stateManager, issueInfo.title, issueInfo.labels, options, worktreeInfo?.path, worktreeInfo?.branch, shutdownManager, false, // Batch mode doesn't support chain
|
|
2170
|
-
packageManager);
|
|
2171
|
-
results.push(result);
|
|
2172
|
-
// Record PR info in log before completing issue
|
|
2173
|
-
if (logWriter && result.prNumber && result.prUrl) {
|
|
2174
|
-
logWriter.setPRInfo(result.prNumber, result.prUrl);
|
|
2175
|
-
}
|
|
2176
|
-
// Complete issue logging
|
|
2177
|
-
if (logWriter) {
|
|
2178
|
-
logWriter.completeIssue();
|
|
2179
|
-
}
|
|
2180
|
-
}
|
|
2181
|
-
return results;
|
|
2182
|
-
}
|
|
2183
|
-
/**
|
|
2184
|
-
* Execute all phases for a single issue with logging and quality loop
|
|
2185
|
-
*/
|
|
2186
|
-
async function runIssueWithLogging(issueNumber, config, logWriter, stateManager, issueTitle, labels, options, worktreePath, branch, shutdownManager, chainMode, packageManager, isLastInChain) {
|
|
2187
|
-
const startTime = Date.now();
|
|
2188
|
-
const phaseResults = [];
|
|
2189
|
-
let loopTriggered = false;
|
|
2190
|
-
let sessionId;
|
|
2191
|
-
console.log(chalk.blue(`\n Issue #${issueNumber}`));
|
|
2192
|
-
if (worktreePath) {
|
|
2193
|
-
console.log(chalk.gray(` Worktree: ${worktreePath}`));
|
|
2194
|
-
}
|
|
2195
|
-
// Initialize state tracking for this issue
|
|
2196
|
-
if (stateManager) {
|
|
2197
|
-
try {
|
|
2198
|
-
const existingState = await stateManager.getIssueState(issueNumber);
|
|
2199
|
-
if (!existingState) {
|
|
2200
|
-
await stateManager.initializeIssue(issueNumber, issueTitle, {
|
|
2201
|
-
worktree: worktreePath,
|
|
2202
|
-
branch,
|
|
2203
|
-
qualityLoop: config.qualityLoop,
|
|
2204
|
-
maxIterations: config.maxIterations,
|
|
2205
|
-
});
|
|
2206
|
-
}
|
|
2207
|
-
else {
|
|
2208
|
-
// Update worktree info if it changed
|
|
2209
|
-
if (worktreePath && branch) {
|
|
2210
|
-
await stateManager.updateWorktreeInfo(issueNumber, worktreePath, branch);
|
|
2211
|
-
}
|
|
2212
|
-
}
|
|
2213
|
-
}
|
|
2214
|
-
catch (error) {
|
|
2215
|
-
// State tracking errors shouldn't stop execution
|
|
2216
|
-
if (config.verbose) {
|
|
2217
|
-
console.log(chalk.yellow(` ⚠️ State tracking error: ${error}`));
|
|
2218
|
-
}
|
|
2219
|
-
}
|
|
2220
|
-
}
|
|
2221
|
-
// Determine phases for this specific issue
|
|
2222
|
-
let phases;
|
|
2223
|
-
let detectedQualityLoop = false;
|
|
2224
|
-
let specAlreadyRan = false;
|
|
2225
|
-
if (options.autoDetectPhases) {
|
|
2226
|
-
// Check if labels indicate a simple bug/fix (skip spec entirely)
|
|
2227
|
-
const lowerLabels = labels.map((l) => l.toLowerCase());
|
|
2228
|
-
const isSimpleBugFix = lowerLabels.some((label) => BUG_LABELS.some((bugLabel) => label.includes(bugLabel)));
|
|
2229
|
-
if (isSimpleBugFix) {
|
|
2230
|
-
// Simple bug fix: skip spec, go straight to exec → qa
|
|
2231
|
-
phases = ["exec", "qa"];
|
|
2232
|
-
console.log(chalk.gray(` Bug fix detected: ${phases.join(" → ")}`));
|
|
2233
|
-
}
|
|
2234
|
-
else {
|
|
2235
|
-
// Run spec first to get recommended workflow
|
|
2236
|
-
console.log(chalk.gray(` Running spec to determine workflow...`));
|
|
2237
|
-
// Create spinner for spec phase (1 of estimated 3: spec, exec, qa)
|
|
2238
|
-
const specSpinner = new PhaseSpinner({
|
|
2239
|
-
phase: "spec",
|
|
2240
|
-
phaseIndex: 1,
|
|
2241
|
-
totalPhases: 3, // Estimate; will be refined after spec
|
|
2242
|
-
shutdownManager,
|
|
2243
|
-
});
|
|
2244
|
-
specSpinner.start();
|
|
2245
|
-
// Track spec phase start in state
|
|
2246
|
-
if (stateManager) {
|
|
2247
|
-
try {
|
|
2248
|
-
await stateManager.updatePhaseStatus(issueNumber, "spec", "in_progress");
|
|
2249
|
-
}
|
|
2250
|
-
catch {
|
|
2251
|
-
// State tracking errors shouldn't stop execution
|
|
2252
|
-
}
|
|
2253
|
-
}
|
|
2254
|
-
const specStartTime = new Date();
|
|
2255
|
-
// Note: spec runs in main repo (not worktree) for planning
|
|
2256
|
-
const specResult = await executePhaseWithRetry(issueNumber, "spec", config, sessionId, worktreePath, // Will be ignored for spec (non-isolated phase)
|
|
2257
|
-
shutdownManager, specSpinner);
|
|
2258
|
-
const specEndTime = new Date();
|
|
2259
|
-
if (specResult.sessionId) {
|
|
2260
|
-
sessionId = specResult.sessionId;
|
|
2261
|
-
// Update session ID in state for resume capability
|
|
2262
|
-
if (stateManager) {
|
|
2263
|
-
try {
|
|
2264
|
-
await stateManager.updateSessionId(issueNumber, specResult.sessionId);
|
|
2265
|
-
}
|
|
2266
|
-
catch {
|
|
2267
|
-
// State tracking errors shouldn't stop execution
|
|
2268
|
-
}
|
|
2269
|
-
}
|
|
2270
|
-
}
|
|
2271
|
-
phaseResults.push(specResult);
|
|
2272
|
-
specAlreadyRan = true;
|
|
2273
|
-
// Log spec phase result
|
|
2274
|
-
// Note: Spec runs in main repo, not worktree, so no git diff stats
|
|
2275
|
-
if (logWriter) {
|
|
2276
|
-
const phaseLog = createPhaseLogFromTiming("spec", issueNumber, specStartTime, specEndTime, specResult.success
|
|
2277
|
-
? "success"
|
|
2278
|
-
: specResult.error?.includes("Timeout")
|
|
2279
|
-
? "timeout"
|
|
2280
|
-
: "failure", { error: specResult.error });
|
|
2281
|
-
logWriter.logPhase(phaseLog);
|
|
2282
|
-
}
|
|
2283
|
-
// Track spec phase completion in state
|
|
2284
|
-
if (stateManager) {
|
|
2285
|
-
try {
|
|
2286
|
-
const phaseStatus = specResult.success ? "completed" : "failed";
|
|
2287
|
-
await stateManager.updatePhaseStatus(issueNumber, "spec", phaseStatus, {
|
|
2288
|
-
error: specResult.error,
|
|
2289
|
-
});
|
|
2290
|
-
}
|
|
2291
|
-
catch {
|
|
2292
|
-
// State tracking errors shouldn't stop execution
|
|
2293
|
-
}
|
|
2294
|
-
}
|
|
2295
|
-
if (!specResult.success) {
|
|
2296
|
-
specSpinner.fail(specResult.error);
|
|
2297
|
-
const durationSeconds = (Date.now() - startTime) / 1000;
|
|
2298
|
-
return {
|
|
2299
|
-
issueNumber,
|
|
2300
|
-
success: false,
|
|
2301
|
-
phaseResults,
|
|
2302
|
-
durationSeconds,
|
|
2303
|
-
loopTriggered: false,
|
|
2304
|
-
};
|
|
2305
|
-
}
|
|
2306
|
-
specSpinner.succeed();
|
|
2307
|
-
// Parse recommended workflow from spec output
|
|
2308
|
-
const parsedWorkflow = specResult.output
|
|
2309
|
-
? parseRecommendedWorkflow(specResult.output)
|
|
2310
|
-
: null;
|
|
2311
|
-
if (parsedWorkflow) {
|
|
2312
|
-
// Remove spec from phases since we already ran it
|
|
2313
|
-
phases = parsedWorkflow.phases.filter((p) => p !== "spec");
|
|
2314
|
-
detectedQualityLoop = parsedWorkflow.qualityLoop;
|
|
2315
|
-
console.log(chalk.gray(` Spec recommends: ${phases.join(" → ")}${detectedQualityLoop ? " (quality loop)" : ""}`));
|
|
2316
|
-
}
|
|
2317
|
-
else {
|
|
2318
|
-
// Fall back to label-based detection
|
|
2319
|
-
console.log(chalk.yellow(` Could not parse spec recommendation, using label-based detection`));
|
|
2320
|
-
const detected = detectPhasesFromLabels(labels);
|
|
2321
|
-
phases = detected.phases.filter((p) => p !== "spec");
|
|
2322
|
-
detectedQualityLoop = detected.qualityLoop;
|
|
2323
|
-
console.log(chalk.gray(` Fallback: ${phases.join(" → ")}`));
|
|
2324
|
-
}
|
|
2325
|
-
}
|
|
2326
|
-
}
|
|
2327
|
-
else {
|
|
2328
|
-
// Use explicit phases with adjustments
|
|
2329
|
-
phases = determinePhasesForIssue(config.phases, labels, options);
|
|
2330
|
-
if (phases.length !== config.phases.length) {
|
|
2331
|
-
console.log(chalk.gray(` Phases adjusted: ${phases.join(" → ")}`));
|
|
2332
|
-
}
|
|
2333
|
-
}
|
|
2334
|
-
// Resume: filter out completed phases if --resume flag is set
|
|
2335
|
-
if (options.resume) {
|
|
2336
|
-
const resumeResult = filterResumedPhases(issueNumber, phases, true);
|
|
2337
|
-
if (resumeResult.skipped.length > 0) {
|
|
2338
|
-
console.log(chalk.gray(` Resume: skipping completed phases: ${resumeResult.skipped.join(", ")}`));
|
|
2339
|
-
phases = resumeResult.phases;
|
|
2340
|
-
}
|
|
2341
|
-
// Also skip spec if it was auto-detected as completed
|
|
2342
|
-
if (specAlreadyRan &&
|
|
2343
|
-
resumeResult.skipped.length === 0 &&
|
|
2344
|
-
resumeResult.phases.length === 0) {
|
|
2345
|
-
console.log(chalk.gray(` Resume: all phases already completed`));
|
|
2346
|
-
}
|
|
2347
|
-
}
|
|
2348
|
-
// Add testgen phase if requested (and spec was in the phases)
|
|
2349
|
-
if (options.testgen &&
|
|
2350
|
-
(phases.includes("spec") || specAlreadyRan) &&
|
|
2351
|
-
!phases.includes("testgen")) {
|
|
2352
|
-
// Insert testgen at the beginning if spec already ran, otherwise after spec
|
|
2353
|
-
if (specAlreadyRan) {
|
|
2354
|
-
phases.unshift("testgen");
|
|
2355
|
-
}
|
|
2356
|
-
else {
|
|
2357
|
-
const specIndex = phases.indexOf("spec");
|
|
2358
|
-
if (specIndex !== -1) {
|
|
2359
|
-
phases.splice(specIndex + 1, 0, "testgen");
|
|
2360
|
-
}
|
|
2361
|
-
}
|
|
2362
|
-
}
|
|
2363
|
-
let iteration = 0;
|
|
2364
|
-
const useQualityLoop = config.qualityLoop || detectedQualityLoop;
|
|
2365
|
-
const maxIterations = useQualityLoop ? config.maxIterations : 1;
|
|
2366
|
-
let completedSuccessfully = false;
|
|
2367
|
-
while (iteration < maxIterations) {
|
|
2368
|
-
iteration++;
|
|
2369
|
-
if (useQualityLoop && iteration > 1) {
|
|
2370
|
-
console.log(chalk.yellow(` Quality loop iteration ${iteration}/${maxIterations}`));
|
|
2371
|
-
loopTriggered = true;
|
|
2372
|
-
}
|
|
2373
|
-
let phasesFailed = false;
|
|
2374
|
-
// Calculate total phases for progress indicator
|
|
2375
|
-
// If spec already ran in auto-detect mode, it's counted separately
|
|
2376
|
-
const totalPhases = specAlreadyRan ? phases.length + 1 : phases.length;
|
|
2377
|
-
const phaseIndexOffset = specAlreadyRan ? 1 : 0;
|
|
2378
|
-
for (let phaseIdx = 0; phaseIdx < phases.length; phaseIdx++) {
|
|
2379
|
-
const phase = phases[phaseIdx];
|
|
2380
|
-
const phaseNumber = phaseIdx + 1 + phaseIndexOffset;
|
|
2381
|
-
// Create spinner for this phase
|
|
2382
|
-
const phaseSpinner = new PhaseSpinner({
|
|
2383
|
-
phase,
|
|
2384
|
-
phaseIndex: phaseNumber,
|
|
2385
|
-
totalPhases,
|
|
2386
|
-
shutdownManager,
|
|
2387
|
-
iteration: useQualityLoop ? iteration : undefined,
|
|
2388
|
-
});
|
|
2389
|
-
phaseSpinner.start();
|
|
2390
|
-
// Track phase start in state
|
|
2391
|
-
if (stateManager) {
|
|
2392
|
-
try {
|
|
2393
|
-
await stateManager.updatePhaseStatus(issueNumber, phase, "in_progress");
|
|
2394
|
-
}
|
|
2395
|
-
catch {
|
|
2396
|
-
// State tracking errors shouldn't stop execution
|
|
2397
|
-
}
|
|
2398
|
-
}
|
|
2399
|
-
const phaseStartTime = new Date();
|
|
2400
|
-
const result = await executePhaseWithRetry(issueNumber, phase, config, sessionId, worktreePath, shutdownManager, phaseSpinner);
|
|
2401
|
-
const phaseEndTime = new Date();
|
|
2402
|
-
// Capture session ID for subsequent phases
|
|
2403
|
-
if (result.sessionId) {
|
|
2404
|
-
sessionId = result.sessionId;
|
|
2405
|
-
// Update session ID in state for resume capability
|
|
2406
|
-
if (stateManager) {
|
|
2407
|
-
try {
|
|
2408
|
-
await stateManager.updateSessionId(issueNumber, result.sessionId);
|
|
2409
|
-
}
|
|
2410
|
-
catch {
|
|
2411
|
-
// State tracking errors shouldn't stop execution
|
|
2412
|
-
}
|
|
2413
|
-
}
|
|
2414
|
-
}
|
|
2415
|
-
phaseResults.push(result);
|
|
2416
|
-
// Log phase result with observability data (AC-1, AC-2, AC-3, AC-7)
|
|
2417
|
-
if (logWriter) {
|
|
2418
|
-
// Capture git diff stats for worktree phases (AC-1, AC-3)
|
|
2419
|
-
const diffStats = worktreePath
|
|
2420
|
-
? getGitDiffStats(worktreePath)
|
|
2421
|
-
: undefined;
|
|
2422
|
-
// Capture commit hash after phase (AC-2)
|
|
2423
|
-
const commitHash = worktreePath
|
|
2424
|
-
? getCommitHash(worktreePath)
|
|
2425
|
-
: undefined;
|
|
2426
|
-
// Read cache metrics for QA phase (AC-7)
|
|
2427
|
-
const cacheMetrics = phase === "qa" ? readCacheMetrics(worktreePath) : undefined;
|
|
2428
|
-
const phaseLog = createPhaseLogFromTiming(phase, issueNumber, phaseStartTime, phaseEndTime, result.success
|
|
2429
|
-
? "success"
|
|
2430
|
-
: result.error?.includes("Timeout")
|
|
2431
|
-
? "timeout"
|
|
2432
|
-
: "failure", {
|
|
2433
|
-
error: result.error,
|
|
2434
|
-
verdict: result.verdict,
|
|
2435
|
-
// Observability fields (AC-1, AC-2, AC-3, AC-7)
|
|
2436
|
-
filesModified: diffStats?.filesModified,
|
|
2437
|
-
fileDiffStats: diffStats?.fileDiffStats,
|
|
2438
|
-
commitHash,
|
|
2439
|
-
cacheMetrics,
|
|
2440
|
-
});
|
|
2441
|
-
logWriter.logPhase(phaseLog);
|
|
2442
|
-
}
|
|
2443
|
-
// Track phase completion in state
|
|
2444
|
-
if (stateManager) {
|
|
2445
|
-
try {
|
|
2446
|
-
const phaseStatus = result.success
|
|
2447
|
-
? "completed"
|
|
2448
|
-
: result.error?.includes("Timeout")
|
|
2449
|
-
? "failed"
|
|
2450
|
-
: "failed";
|
|
2451
|
-
await stateManager.updatePhaseStatus(issueNumber, phase, phaseStatus, { error: result.error });
|
|
2452
|
-
}
|
|
2453
|
-
catch {
|
|
2454
|
-
// State tracking errors shouldn't stop execution
|
|
2455
|
-
}
|
|
2456
|
-
}
|
|
2457
|
-
if (result.success) {
|
|
2458
|
-
phaseSpinner.succeed();
|
|
2459
|
-
}
|
|
2460
|
-
else {
|
|
2461
|
-
phaseSpinner.fail(result.error);
|
|
2462
|
-
phasesFailed = true;
|
|
2463
|
-
// If quality loop enabled, run loop phase to fix issues
|
|
2464
|
-
if (useQualityLoop && iteration < maxIterations) {
|
|
2465
|
-
// Create spinner for loop phase
|
|
2466
|
-
const loopSpinner = new PhaseSpinner({
|
|
2467
|
-
phase: "loop",
|
|
2468
|
-
phaseIndex: phaseNumber,
|
|
2469
|
-
totalPhases,
|
|
2470
|
-
shutdownManager,
|
|
2471
|
-
iteration,
|
|
2472
|
-
});
|
|
2473
|
-
loopSpinner.start();
|
|
2474
|
-
const loopResult = await executePhaseWithRetry(issueNumber, "loop", config, sessionId, worktreePath, shutdownManager, loopSpinner);
|
|
2475
|
-
phaseResults.push(loopResult);
|
|
2476
|
-
if (loopResult.sessionId) {
|
|
2477
|
-
sessionId = loopResult.sessionId;
|
|
2478
|
-
}
|
|
2479
|
-
if (loopResult.success) {
|
|
2480
|
-
loopSpinner.succeed();
|
|
2481
|
-
// Continue to next iteration
|
|
2482
|
-
break;
|
|
2483
|
-
}
|
|
2484
|
-
else {
|
|
2485
|
-
loopSpinner.fail(loopResult.error);
|
|
2486
|
-
}
|
|
2487
|
-
}
|
|
2488
|
-
// Stop on first failure (if not in quality loop or loop failed)
|
|
2489
|
-
break;
|
|
2490
|
-
}
|
|
2491
|
-
}
|
|
2492
|
-
// If all phases passed, exit the loop
|
|
2493
|
-
if (!phasesFailed) {
|
|
2494
|
-
completedSuccessfully = true;
|
|
2495
|
-
break;
|
|
2496
|
-
}
|
|
2497
|
-
// If we're not in quality loop mode, don't retry
|
|
2498
|
-
if (!config.qualityLoop) {
|
|
2499
|
-
break;
|
|
2500
|
-
}
|
|
2501
|
-
}
|
|
2502
|
-
const durationSeconds = (Date.now() - startTime) / 1000;
|
|
2503
|
-
// Success is determined by whether all phases completed in any iteration,
|
|
2504
|
-
// not whether all accumulated phase results passed (which would fail after loop recovery)
|
|
2505
|
-
const success = completedSuccessfully;
|
|
2506
|
-
// Update final issue status in state
|
|
2507
|
-
if (stateManager) {
|
|
2508
|
-
try {
|
|
2509
|
-
const finalStatus = success ? "ready_for_merge" : "in_progress";
|
|
2510
|
-
await stateManager.updateIssueStatus(issueNumber, finalStatus);
|
|
2511
|
-
}
|
|
2512
|
-
catch {
|
|
2513
|
-
// State tracking errors shouldn't stop execution
|
|
2514
|
-
}
|
|
2515
|
-
}
|
|
2516
|
-
// Create checkpoint commit in chain mode after QA passes
|
|
2517
|
-
if (success && chainMode && worktreePath) {
|
|
2518
|
-
createCheckpointCommit(worktreePath, issueNumber, config.verbose);
|
|
2519
|
-
}
|
|
2520
|
-
// Rebase onto origin/main before PR creation (unless --no-rebase)
|
|
2521
|
-
// This ensures the branch is up-to-date and prevents lockfile drift
|
|
2522
|
-
// AC-1: Non-chain mode rebases onto origin/main before PR
|
|
2523
|
-
// AC-2: Chain mode rebases only the final branch onto origin/main before PR
|
|
2524
|
-
// (intermediate branches must stay based on their predecessor)
|
|
2525
|
-
const shouldRebase = success &&
|
|
2526
|
-
worktreePath &&
|
|
2527
|
-
!options.noRebase &&
|
|
2528
|
-
(!chainMode || isLastInChain);
|
|
2529
|
-
if (shouldRebase) {
|
|
2530
|
-
rebaseBeforePR(worktreePath, issueNumber, packageManager, config.verbose);
|
|
2531
|
-
}
|
|
2532
|
-
// Create PR after successful QA + rebase (unless --no-pr)
|
|
2533
|
-
let prNumber;
|
|
2534
|
-
let prUrl;
|
|
2535
|
-
const shouldCreatePR = success && worktreePath && branch && !options.noPr;
|
|
2536
|
-
if (shouldCreatePR) {
|
|
2537
|
-
const prResult = createPR(worktreePath, issueNumber, issueTitle, branch, config.verbose, labels);
|
|
2538
|
-
if (prResult.success && prResult.prNumber && prResult.prUrl) {
|
|
2539
|
-
prNumber = prResult.prNumber;
|
|
2540
|
-
prUrl = prResult.prUrl;
|
|
2541
|
-
// Update workflow state with PR info
|
|
2542
|
-
if (stateManager) {
|
|
2543
|
-
try {
|
|
2544
|
-
await stateManager.updatePRInfo(issueNumber, {
|
|
2545
|
-
number: prResult.prNumber,
|
|
2546
|
-
url: prResult.prUrl,
|
|
2547
|
-
});
|
|
2548
|
-
}
|
|
2549
|
-
catch {
|
|
2550
|
-
// State tracking errors shouldn't stop execution
|
|
2551
|
-
}
|
|
2552
|
-
}
|
|
2553
|
-
}
|
|
2554
|
-
}
|
|
2555
|
-
return {
|
|
2556
|
-
issueNumber,
|
|
2557
|
-
success,
|
|
2558
|
-
phaseResults,
|
|
2559
|
-
durationSeconds,
|
|
2560
|
-
loopTriggered,
|
|
2561
|
-
prNumber,
|
|
2562
|
-
prUrl,
|
|
2563
|
-
};
|
|
2564
|
-
}
|