@blockspool/cli 0.4.1 → 0.4.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bin/blockspool.d.ts +16 -0
- package/dist/bin/blockspool.d.ts.map +1 -0
- package/dist/bin/blockspool.js +45 -0
- package/dist/bin/blockspool.js.map +1 -0
- package/dist/commands/solo-auto.d.ts +6 -0
- package/dist/commands/solo-auto.d.ts.map +1 -0
- package/dist/commands/solo-auto.js +418 -0
- package/dist/commands/solo-auto.js.map +1 -0
- package/dist/commands/solo-exec.d.ts +6 -0
- package/dist/commands/solo-exec.d.ts.map +1 -0
- package/dist/commands/solo-exec.js +656 -0
- package/dist/commands/solo-exec.js.map +1 -0
- package/dist/commands/solo-inspect.d.ts +6 -0
- package/dist/commands/solo-inspect.d.ts.map +1 -0
- package/dist/commands/solo-inspect.js +690 -0
- package/dist/commands/solo-inspect.js.map +1 -0
- package/dist/commands/solo-lifecycle.d.ts +6 -0
- package/dist/commands/solo-lifecycle.d.ts.map +1 -0
- package/dist/commands/solo-lifecycle.js +188 -0
- package/dist/commands/solo-lifecycle.js.map +1 -0
- package/dist/commands/solo-nudge.d.ts +6 -0
- package/dist/commands/solo-nudge.d.ts.map +1 -0
- package/dist/commands/solo-nudge.js +49 -0
- package/dist/commands/solo-nudge.js.map +1 -0
- package/dist/commands/solo-qa.d.ts +6 -0
- package/dist/commands/solo-qa.d.ts.map +1 -0
- package/dist/commands/solo-qa.js +254 -0
- package/dist/commands/solo-qa.js.map +1 -0
- package/dist/commands/solo.d.ts +11 -0
- package/dist/commands/solo.d.ts.map +1 -0
- package/dist/commands/solo.js +43 -0
- package/dist/commands/solo.js.map +1 -0
- package/dist/index.d.ts +18 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +18 -0
- package/dist/index.js.map +1 -0
- package/dist/lib/artifacts.d.ts +136 -0
- package/dist/lib/artifacts.d.ts.map +1 -0
- package/dist/lib/artifacts.js +146 -0
- package/dist/lib/artifacts.js.map +1 -0
- package/dist/lib/doctor.d.ts +45 -0
- package/dist/lib/doctor.d.ts.map +1 -0
- package/dist/lib/doctor.js +383 -0
- package/dist/lib/doctor.js.map +1 -0
- package/dist/lib/exec.d.ts +24 -0
- package/dist/lib/exec.d.ts.map +1 -0
- package/dist/lib/exec.js +295 -0
- package/dist/lib/exec.js.map +1 -0
- package/dist/lib/formulas.d.ts +78 -0
- package/dist/lib/formulas.d.ts.map +1 -0
- package/dist/lib/formulas.js +295 -0
- package/dist/lib/formulas.js.map +1 -0
- package/dist/lib/git.d.ts +9 -0
- package/dist/lib/git.d.ts.map +1 -0
- package/dist/lib/git.js +60 -0
- package/dist/lib/git.js.map +1 -0
- package/dist/lib/guidelines.d.ts +43 -0
- package/dist/lib/guidelines.d.ts.map +1 -0
- package/dist/lib/guidelines.js +195 -0
- package/dist/lib/guidelines.js.map +1 -0
- package/dist/lib/logger.d.ts +17 -0
- package/dist/lib/logger.d.ts.map +1 -0
- package/dist/lib/logger.js +42 -0
- package/dist/lib/logger.js.map +1 -0
- package/dist/lib/retention.d.ts +62 -0
- package/dist/lib/retention.d.ts.map +1 -0
- package/dist/lib/retention.js +285 -0
- package/dist/lib/retention.js.map +1 -0
- package/dist/lib/run-history.d.ts +52 -0
- package/dist/lib/run-history.d.ts.map +1 -0
- package/dist/lib/run-history.js +116 -0
- package/dist/lib/run-history.js.map +1 -0
- package/dist/lib/run-state.d.ts +58 -0
- package/dist/lib/run-state.d.ts.map +1 -0
- package/dist/lib/run-state.js +119 -0
- package/dist/lib/run-state.js.map +1 -0
- package/dist/lib/scope.d.ts +95 -0
- package/dist/lib/scope.d.ts.map +1 -0
- package/dist/lib/scope.js +291 -0
- package/dist/lib/scope.js.map +1 -0
- package/dist/lib/selection.d.ts +35 -0
- package/dist/lib/selection.d.ts.map +1 -0
- package/dist/lib/selection.js +110 -0
- package/dist/lib/selection.js.map +1 -0
- package/dist/lib/solo-auto.d.ts +87 -0
- package/dist/lib/solo-auto.d.ts.map +1 -0
- package/dist/lib/solo-auto.js +1230 -0
- package/dist/lib/solo-auto.js.map +1 -0
- package/dist/lib/solo-ci.d.ts +84 -0
- package/dist/lib/solo-ci.d.ts.map +1 -0
- package/dist/lib/solo-ci.js +300 -0
- package/dist/lib/solo-ci.js.map +1 -0
- package/dist/lib/solo-config.d.ts +155 -0
- package/dist/lib/solo-config.d.ts.map +1 -0
- package/dist/lib/solo-config.js +236 -0
- package/dist/lib/solo-config.js.map +1 -0
- package/dist/lib/solo-git.d.ts +44 -0
- package/dist/lib/solo-git.d.ts.map +1 -0
- package/dist/lib/solo-git.js +174 -0
- package/dist/lib/solo-git.js.map +1 -0
- package/dist/lib/solo-hints.d.ts +32 -0
- package/dist/lib/solo-hints.d.ts.map +1 -0
- package/dist/lib/solo-hints.js +98 -0
- package/dist/lib/solo-hints.js.map +1 -0
- package/dist/lib/solo-remote.d.ts +14 -0
- package/dist/lib/solo-remote.d.ts.map +1 -0
- package/dist/lib/solo-remote.js +48 -0
- package/dist/lib/solo-remote.js.map +1 -0
- package/dist/lib/solo-stdin.d.ts +13 -0
- package/dist/lib/solo-stdin.d.ts.map +1 -0
- package/dist/lib/solo-stdin.js +33 -0
- package/dist/lib/solo-stdin.js.map +1 -0
- package/dist/lib/solo-ticket.d.ts +213 -0
- package/dist/lib/solo-ticket.d.ts.map +1 -0
- package/dist/lib/solo-ticket.js +850 -0
- package/dist/lib/solo-ticket.js.map +1 -0
- package/dist/lib/solo-utils.d.ts +133 -0
- package/dist/lib/solo-utils.d.ts.map +1 -0
- package/dist/lib/solo-utils.js +300 -0
- package/dist/lib/solo-utils.js.map +1 -0
- package/dist/lib/spindle.d.ts +144 -0
- package/dist/lib/spindle.d.ts.map +1 -0
- package/dist/lib/spindle.js +388 -0
- package/dist/lib/spindle.js.map +1 -0
- package/dist/tui/app.d.ts +17 -0
- package/dist/tui/app.d.ts.map +1 -0
- package/dist/tui/app.js +139 -0
- package/dist/tui/app.js.map +1 -0
- package/dist/tui/index.d.ts +8 -0
- package/dist/tui/index.d.ts.map +1 -0
- package/dist/tui/index.js +7 -0
- package/dist/tui/index.js.map +1 -0
- package/dist/tui/poller.d.ts +42 -0
- package/dist/tui/poller.d.ts.map +1 -0
- package/dist/tui/poller.js +62 -0
- package/dist/tui/poller.js.map +1 -0
- package/dist/tui/screens/overview.d.ts +9 -0
- package/dist/tui/screens/overview.d.ts.map +1 -0
- package/dist/tui/screens/overview.js +189 -0
- package/dist/tui/screens/overview.js.map +1 -0
- package/dist/tui/state.d.ts +93 -0
- package/dist/tui/state.d.ts.map +1 -0
- package/dist/tui/state.js +169 -0
- package/dist/tui/state.js.map +1 -0
- package/dist/tui/types.d.ts +18 -0
- package/dist/tui/types.d.ts.map +1 -0
- package/dist/tui/types.js +5 -0
- package/dist/tui/types.js.map +1 -0
- package/package.json +1 -1
|
@@ -0,0 +1,850 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Solo mode ticket execution
|
|
3
|
+
*/
|
|
4
|
+
import * as path from 'node:path';
|
|
5
|
+
import * as fs from 'node:fs';
|
|
6
|
+
import { spawn } from 'node:child_process';
|
|
7
|
+
import { runQa, getQaRunDetails, } from '@blockspool/core/services';
|
|
8
|
+
import { projects, runSteps } from '@blockspool/core/repos';
|
|
9
|
+
import { writeJsonArtifact, } from '../lib/artifacts.js';
|
|
10
|
+
import { checkScopeViolations, parseChangedFiles, analyzeViolationsForExpansion, } from '../lib/scope.js';
|
|
11
|
+
import { checkSpindleLoop, createSpindleState, DEFAULT_SPINDLE_CONFIG, formatSpindleResult, } from '../lib/spindle.js';
|
|
12
|
+
import { createExecRunner } from '../lib/exec.js';
|
|
13
|
+
import { createLogger } from '../lib/logger.js';
|
|
14
|
+
import { getBlockspoolDir } from './solo-config.js';
|
|
15
|
+
import { normalizeQaConfig } from './solo-utils.js';
|
|
16
|
+
import { withGitMutex, gitExec, cleanupWorktree } from './solo-git.js';
|
|
17
|
+
import { generateSpindleRecommendations } from './solo-ci.js';
|
|
18
|
+
/**
|
|
19
|
+
* Exit codes for solo run
|
|
20
|
+
*/
|
|
21
|
+
export const EXIT_CODES = {
|
|
22
|
+
SUCCESS: 0,
|
|
23
|
+
FAILURE: 1,
|
|
24
|
+
SPINDLE_ABORT: 2,
|
|
25
|
+
SIGINT: 130,
|
|
26
|
+
};
|
|
27
|
+
/**
|
|
28
|
+
* Execution step definitions
|
|
29
|
+
*/
|
|
30
|
+
const EXECUTE_STEPS = [
|
|
31
|
+
{ name: 'worktree', kind: 'git' },
|
|
32
|
+
{ name: 'agent', kind: 'internal' },
|
|
33
|
+
{ name: 'scope', kind: 'internal' },
|
|
34
|
+
{ name: 'commit', kind: 'git' },
|
|
35
|
+
{ name: 'push', kind: 'git' },
|
|
36
|
+
{ name: 'qa', kind: 'command' },
|
|
37
|
+
{ name: 'pr', kind: 'git' },
|
|
38
|
+
{ name: 'cleanup', kind: 'internal' },
|
|
39
|
+
];
|
|
40
|
+
/**
|
|
41
|
+
* Build the prompt for Claude from a ticket
|
|
42
|
+
*/
|
|
43
|
+
export function buildTicketPrompt(ticket, guidelinesContext) {
|
|
44
|
+
const parts = [];
|
|
45
|
+
if (guidelinesContext) {
|
|
46
|
+
parts.push(guidelinesContext, '');
|
|
47
|
+
}
|
|
48
|
+
parts.push(`# Task: ${ticket.title}`, '', ticket.description ?? '', '');
|
|
49
|
+
if (ticket.allowedPaths.length > 0) {
|
|
50
|
+
parts.push('## Allowed Paths');
|
|
51
|
+
parts.push('Only modify files in these paths:');
|
|
52
|
+
for (const p of ticket.allowedPaths) {
|
|
53
|
+
parts.push(`- ${p}`);
|
|
54
|
+
}
|
|
55
|
+
parts.push('');
|
|
56
|
+
}
|
|
57
|
+
if (ticket.forbiddenPaths.length > 0) {
|
|
58
|
+
parts.push('## Forbidden Paths');
|
|
59
|
+
parts.push('Do NOT modify files in these paths:');
|
|
60
|
+
for (const p of ticket.forbiddenPaths) {
|
|
61
|
+
parts.push(`- ${p}`);
|
|
62
|
+
}
|
|
63
|
+
parts.push('');
|
|
64
|
+
}
|
|
65
|
+
if (ticket.verificationCommands.length > 0) {
|
|
66
|
+
parts.push('## Verification');
|
|
67
|
+
parts.push('After making changes, verify with:');
|
|
68
|
+
for (const cmd of ticket.verificationCommands) {
|
|
69
|
+
parts.push(`- \`${cmd}\``);
|
|
70
|
+
}
|
|
71
|
+
parts.push('');
|
|
72
|
+
}
|
|
73
|
+
parts.push('## Instructions');
|
|
74
|
+
parts.push('1. Analyze the codebase to understand the context');
|
|
75
|
+
parts.push('2. Implement the required changes');
|
|
76
|
+
parts.push('3. Ensure all verification commands pass');
|
|
77
|
+
parts.push('4. Keep changes minimal and focused');
|
|
78
|
+
return parts.join('\n');
|
|
79
|
+
}
|
|
80
|
+
/**
|
|
81
|
+
* Claude Code CLI execution backend (default)
|
|
82
|
+
*/
|
|
83
|
+
export class ClaudeExecutionBackend {
|
|
84
|
+
name = 'claude';
|
|
85
|
+
run(opts) {
|
|
86
|
+
return runClaude(opts);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
/**
|
|
90
|
+
* Codex CLI execution backend
|
|
91
|
+
*
|
|
92
|
+
* Default: `--sandbox workspace-write --ask-for-approval never` (safe unattended mode).
|
|
93
|
+
* Optional: `unsafeBypassSandbox` enables `--dangerously-bypass-approvals-and-sandbox`
|
|
94
|
+
* for use inside externally hardened/isolated runners only.
|
|
95
|
+
*/
|
|
96
|
+
export class CodexExecutionBackend {
|
|
97
|
+
name = 'codex';
|
|
98
|
+
apiKey;
|
|
99
|
+
model;
|
|
100
|
+
unsafeBypassSandbox;
|
|
101
|
+
constructor(opts) {
|
|
102
|
+
this.apiKey = opts?.apiKey;
|
|
103
|
+
this.model = opts?.model ?? 'gpt-5.2-codex';
|
|
104
|
+
this.unsafeBypassSandbox = opts?.unsafeBypassSandbox ?? false;
|
|
105
|
+
}
|
|
106
|
+
async run(opts) {
|
|
107
|
+
const { worktreePath, prompt, timeoutMs, verbose, onProgress } = opts;
|
|
108
|
+
const startTime = Date.now();
|
|
109
|
+
const { mkdtempSync, readFileSync, rmSync } = await import('node:fs');
|
|
110
|
+
const { tmpdir } = await import('node:os');
|
|
111
|
+
const { join } = await import('node:path');
|
|
112
|
+
const tmpDir = mkdtempSync(join(tmpdir(), 'blockspool-codex-exec-'));
|
|
113
|
+
const outPath = join(tmpDir, 'output.md');
|
|
114
|
+
try {
|
|
115
|
+
return await new Promise((resolve) => {
|
|
116
|
+
const args = ['exec', '--json', '--output-last-message', outPath];
|
|
117
|
+
if (this.unsafeBypassSandbox) {
|
|
118
|
+
args.push('--dangerously-bypass-approvals-and-sandbox');
|
|
119
|
+
}
|
|
120
|
+
else {
|
|
121
|
+
args.push('--sandbox', 'workspace-write');
|
|
122
|
+
args.push('--ask-for-approval', 'never');
|
|
123
|
+
}
|
|
124
|
+
args.push('--model', this.model);
|
|
125
|
+
args.push('--cd', worktreePath);
|
|
126
|
+
args.push('-');
|
|
127
|
+
const env = { ...process.env };
|
|
128
|
+
if (this.apiKey) {
|
|
129
|
+
env.CODEX_API_KEY = this.apiKey;
|
|
130
|
+
}
|
|
131
|
+
const proc = spawn('codex', args, {
|
|
132
|
+
cwd: worktreePath,
|
|
133
|
+
env,
|
|
134
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
135
|
+
});
|
|
136
|
+
let stdout = '';
|
|
137
|
+
let stderr = '';
|
|
138
|
+
let timedOut = false;
|
|
139
|
+
const timer = setTimeout(() => {
|
|
140
|
+
timedOut = true;
|
|
141
|
+
proc.kill('SIGTERM');
|
|
142
|
+
setTimeout(() => proc.kill('SIGKILL'), 5000);
|
|
143
|
+
}, timeoutMs);
|
|
144
|
+
proc.stdin.write(prompt);
|
|
145
|
+
proc.stdin.end();
|
|
146
|
+
proc.stdout.on('data', (data) => {
|
|
147
|
+
const text = data.toString();
|
|
148
|
+
stdout += text;
|
|
149
|
+
if (verbose) {
|
|
150
|
+
onProgress(text.trim().slice(0, 100));
|
|
151
|
+
}
|
|
152
|
+
});
|
|
153
|
+
proc.stderr.on('data', (data) => {
|
|
154
|
+
stderr += data.toString();
|
|
155
|
+
});
|
|
156
|
+
proc.on('close', (code) => {
|
|
157
|
+
clearTimeout(timer);
|
|
158
|
+
const durationMs = Date.now() - startTime;
|
|
159
|
+
if (timedOut) {
|
|
160
|
+
resolve({ success: false, error: `Timed out after ${timeoutMs}ms`, stdout, stderr, exitCode: code, timedOut: true, durationMs });
|
|
161
|
+
return;
|
|
162
|
+
}
|
|
163
|
+
// Prefer --output-last-message file over stdout (stdout is JSONL telemetry)
|
|
164
|
+
let output = stdout;
|
|
165
|
+
try {
|
|
166
|
+
output = readFileSync(outPath, 'utf-8');
|
|
167
|
+
}
|
|
168
|
+
catch {
|
|
169
|
+
// Fall back to stdout if file wasn't written
|
|
170
|
+
}
|
|
171
|
+
if (code !== 0) {
|
|
172
|
+
resolve({ success: false, error: `codex exited with code ${code}: ${stderr.slice(0, 200)}`, stdout: output, stderr, exitCode: code, timedOut: false, durationMs });
|
|
173
|
+
return;
|
|
174
|
+
}
|
|
175
|
+
resolve({ success: true, stdout: output, stderr, exitCode: code, timedOut: false, durationMs });
|
|
176
|
+
});
|
|
177
|
+
proc.on('error', (err) => {
|
|
178
|
+
clearTimeout(timer);
|
|
179
|
+
resolve({ success: false, error: err.message, stdout, stderr, exitCode: null, timedOut: false, durationMs: Date.now() - startTime });
|
|
180
|
+
});
|
|
181
|
+
});
|
|
182
|
+
}
|
|
183
|
+
finally {
|
|
184
|
+
try {
|
|
185
|
+
rmSync(tmpDir, { recursive: true, force: true });
|
|
186
|
+
}
|
|
187
|
+
catch { /* best-effort */ }
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
/**
|
|
192
|
+
* Run Claude Code CLI
|
|
193
|
+
*/
|
|
194
|
+
export async function runClaude(opts) {
|
|
195
|
+
const { worktreePath, prompt, timeoutMs, verbose, onProgress } = opts;
|
|
196
|
+
// Gate: require ANTHROPIC_API_KEY for automated Claude Code usage
|
|
197
|
+
if (!process.env.ANTHROPIC_API_KEY) {
|
|
198
|
+
throw new Error('Running Claude Code in automation requires ANTHROPIC_API_KEY.\n' +
|
|
199
|
+
'Set the env var for API access, or use the BlockSpool plugin (/blockspool:run) inside Claude Code.');
|
|
200
|
+
}
|
|
201
|
+
const startTime = Date.now();
|
|
202
|
+
return new Promise((resolve) => {
|
|
203
|
+
const claude = spawn('claude', ['-p', '--dangerously-skip-permissions'], {
|
|
204
|
+
cwd: worktreePath,
|
|
205
|
+
env: { ...process.env, CLAUDE_CODE_NON_INTERACTIVE: '1' },
|
|
206
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
207
|
+
});
|
|
208
|
+
let stdout = '';
|
|
209
|
+
let stderr = '';
|
|
210
|
+
let timedOut = false;
|
|
211
|
+
const timer = setTimeout(() => {
|
|
212
|
+
timedOut = true;
|
|
213
|
+
claude.kill('SIGTERM');
|
|
214
|
+
}, timeoutMs);
|
|
215
|
+
claude.stdin.write(prompt);
|
|
216
|
+
claude.stdin.end();
|
|
217
|
+
claude.stdout.on('data', (data) => {
|
|
218
|
+
const text = data.toString();
|
|
219
|
+
stdout += text;
|
|
220
|
+
if (verbose) {
|
|
221
|
+
onProgress(text.trim().slice(0, 100));
|
|
222
|
+
}
|
|
223
|
+
});
|
|
224
|
+
claude.stderr.on('data', (data) => {
|
|
225
|
+
stderr += data.toString();
|
|
226
|
+
});
|
|
227
|
+
claude.on('close', (code) => {
|
|
228
|
+
clearTimeout(timer);
|
|
229
|
+
const durationMs = Date.now() - startTime;
|
|
230
|
+
if (timedOut) {
|
|
231
|
+
resolve({
|
|
232
|
+
success: false,
|
|
233
|
+
error: `Timed out after ${timeoutMs}ms`,
|
|
234
|
+
stdout,
|
|
235
|
+
stderr,
|
|
236
|
+
exitCode: code,
|
|
237
|
+
timedOut: true,
|
|
238
|
+
durationMs,
|
|
239
|
+
});
|
|
240
|
+
return;
|
|
241
|
+
}
|
|
242
|
+
if (code !== 0) {
|
|
243
|
+
resolve({
|
|
244
|
+
success: false,
|
|
245
|
+
error: `Claude exited with code ${code}: ${stderr.slice(0, 200)}`,
|
|
246
|
+
stdout,
|
|
247
|
+
stderr,
|
|
248
|
+
exitCode: code,
|
|
249
|
+
timedOut: false,
|
|
250
|
+
durationMs,
|
|
251
|
+
});
|
|
252
|
+
return;
|
|
253
|
+
}
|
|
254
|
+
resolve({
|
|
255
|
+
success: true,
|
|
256
|
+
stdout,
|
|
257
|
+
stderr,
|
|
258
|
+
exitCode: code,
|
|
259
|
+
timedOut: false,
|
|
260
|
+
durationMs,
|
|
261
|
+
});
|
|
262
|
+
});
|
|
263
|
+
claude.on('error', (err) => {
|
|
264
|
+
clearTimeout(timer);
|
|
265
|
+
resolve({
|
|
266
|
+
success: false,
|
|
267
|
+
error: err.message,
|
|
268
|
+
stdout,
|
|
269
|
+
stderr,
|
|
270
|
+
exitCode: null,
|
|
271
|
+
timedOut: false,
|
|
272
|
+
durationMs: Date.now() - startTime,
|
|
273
|
+
});
|
|
274
|
+
});
|
|
275
|
+
});
|
|
276
|
+
}
|
|
277
|
+
/**
|
|
278
|
+
* Execute a ticket in isolation with step tracking
|
|
279
|
+
*
|
|
280
|
+
* Steps tracked: worktree → agent → commit → push → qa → pr → cleanup
|
|
281
|
+
*/
|
|
282
|
+
export async function soloRunTicket(opts) {
|
|
283
|
+
const { ticket, repoRoot, config, adapter, runId, skipQa, createPr, draftPr = false, timeoutMs, verbose, onProgress, } = opts;
|
|
284
|
+
const startTime = Date.now();
|
|
285
|
+
const branchName = `blockspool/${ticket.id}`;
|
|
286
|
+
const worktreePath = path.join(repoRoot, '.blockspool', 'worktrees', ticket.id);
|
|
287
|
+
const baseDir = getBlockspoolDir(repoRoot);
|
|
288
|
+
// Create all steps upfront
|
|
289
|
+
const stepRecords = new Map();
|
|
290
|
+
for (let i = 0; i < EXECUTE_STEPS.length; i++) {
|
|
291
|
+
const stepDef = EXECUTE_STEPS[i];
|
|
292
|
+
const step = await runSteps.create(adapter, {
|
|
293
|
+
runId,
|
|
294
|
+
ordinal: i,
|
|
295
|
+
name: stepDef.name,
|
|
296
|
+
kind: stepDef.kind,
|
|
297
|
+
});
|
|
298
|
+
stepRecords.set(stepDef.name, step);
|
|
299
|
+
}
|
|
300
|
+
// Track artifact paths for run summary
|
|
301
|
+
const artifactPaths = {};
|
|
302
|
+
// Initialize Spindle state for loop detection
|
|
303
|
+
const spindleConfig = {
|
|
304
|
+
...DEFAULT_SPINDLE_CONFIG,
|
|
305
|
+
...config?.spindle,
|
|
306
|
+
};
|
|
307
|
+
const spindleState = createSpindleState();
|
|
308
|
+
// Track step results for run summary
|
|
309
|
+
const stepResults = [];
|
|
310
|
+
// Helper to save run summary artifact
|
|
311
|
+
async function saveRunSummary(result) {
|
|
312
|
+
const summary = {
|
|
313
|
+
runId,
|
|
314
|
+
ticketId: ticket.id,
|
|
315
|
+
ticketTitle: ticket.title,
|
|
316
|
+
projectId: ticket.projectId,
|
|
317
|
+
success: result.success,
|
|
318
|
+
startedAt: new Date(startTime).toISOString(),
|
|
319
|
+
completedAt: new Date().toISOString(),
|
|
320
|
+
durationMs: result.durationMs,
|
|
321
|
+
branchName: result.branchName,
|
|
322
|
+
prUrl: result.prUrl,
|
|
323
|
+
error: result.error,
|
|
324
|
+
steps: stepResults.map(s => ({
|
|
325
|
+
name: s.name,
|
|
326
|
+
status: s.status,
|
|
327
|
+
durationMs: s.startedAt && s.completedAt ? s.completedAt - s.startedAt : undefined,
|
|
328
|
+
errorMessage: s.errorMessage,
|
|
329
|
+
})),
|
|
330
|
+
artifacts: artifactPaths,
|
|
331
|
+
};
|
|
332
|
+
return writeJsonArtifact({
|
|
333
|
+
baseDir,
|
|
334
|
+
type: 'runs',
|
|
335
|
+
id: runId,
|
|
336
|
+
data: summary,
|
|
337
|
+
});
|
|
338
|
+
}
|
|
339
|
+
// Helper to mark step progress
|
|
340
|
+
async function markStep(name, status, markOpts) {
|
|
341
|
+
const step = stepRecords.get(name);
|
|
342
|
+
if (!step)
|
|
343
|
+
return;
|
|
344
|
+
onProgress(`${name}...`);
|
|
345
|
+
let stepResult = stepResults.find(s => s.name === name);
|
|
346
|
+
if (!stepResult) {
|
|
347
|
+
stepResult = { name, status: 'skipped' };
|
|
348
|
+
stepResults.push(stepResult);
|
|
349
|
+
}
|
|
350
|
+
switch (status) {
|
|
351
|
+
case 'started':
|
|
352
|
+
stepResult.startedAt = Date.now();
|
|
353
|
+
await runSteps.markStarted(adapter, step.id);
|
|
354
|
+
break;
|
|
355
|
+
case 'success':
|
|
356
|
+
stepResult.status = 'success';
|
|
357
|
+
stepResult.completedAt = Date.now();
|
|
358
|
+
await runSteps.markSuccess(adapter, step.id, markOpts);
|
|
359
|
+
break;
|
|
360
|
+
case 'failed':
|
|
361
|
+
stepResult.status = 'failed';
|
|
362
|
+
stepResult.completedAt = Date.now();
|
|
363
|
+
stepResult.errorMessage = markOpts?.errorMessage;
|
|
364
|
+
await runSteps.markFailed(adapter, step.id, {
|
|
365
|
+
errorMessage: markOpts?.errorMessage,
|
|
366
|
+
metadata: markOpts?.metadata,
|
|
367
|
+
});
|
|
368
|
+
break;
|
|
369
|
+
case 'skipped':
|
|
370
|
+
stepResult.status = 'skipped';
|
|
371
|
+
stepResult.errorMessage = markOpts?.errorMessage;
|
|
372
|
+
await runSteps.markSkipped(adapter, step.id, markOpts?.errorMessage);
|
|
373
|
+
break;
|
|
374
|
+
}
|
|
375
|
+
}
|
|
376
|
+
// Helper to mark remaining steps as skipped
|
|
377
|
+
async function skipRemaining(fromIndex, reason) {
|
|
378
|
+
for (let i = fromIndex; i < EXECUTE_STEPS.length; i++) {
|
|
379
|
+
await markStep(EXECUTE_STEPS[i].name, 'skipped', { errorMessage: reason });
|
|
380
|
+
}
|
|
381
|
+
}
|
|
382
|
+
try {
|
|
383
|
+
// Step 1: Create worktree
|
|
384
|
+
await markStep('worktree', 'started');
|
|
385
|
+
const worktreesDir = path.join(repoRoot, '.blockspool', 'worktrees');
|
|
386
|
+
if (!fs.existsSync(worktreesDir)) {
|
|
387
|
+
fs.mkdirSync(worktreesDir, { recursive: true });
|
|
388
|
+
}
|
|
389
|
+
let baseBranch = 'master';
|
|
390
|
+
await withGitMutex(async () => {
|
|
391
|
+
if (fs.existsSync(worktreePath)) {
|
|
392
|
+
await gitExec(`git worktree remove --force "${worktreePath}"`, { cwd: repoRoot });
|
|
393
|
+
}
|
|
394
|
+
if (opts.baseBranch) {
|
|
395
|
+
// Use provided base branch (e.g. milestone branch)
|
|
396
|
+
baseBranch = opts.baseBranch;
|
|
397
|
+
}
|
|
398
|
+
else {
|
|
399
|
+
let detectedBranch = 'master';
|
|
400
|
+
try {
|
|
401
|
+
const remoteHead = (await gitExec('git symbolic-ref refs/remotes/origin/HEAD 2>/dev/null || echo "refs/remotes/origin/master"', { cwd: repoRoot })).trim();
|
|
402
|
+
detectedBranch = remoteHead.replace('refs/remotes/origin/', '');
|
|
403
|
+
}
|
|
404
|
+
catch {
|
|
405
|
+
// Fall back to master
|
|
406
|
+
}
|
|
407
|
+
baseBranch = detectedBranch;
|
|
408
|
+
try {
|
|
409
|
+
await gitExec(`git fetch origin ${baseBranch}`, { cwd: repoRoot });
|
|
410
|
+
}
|
|
411
|
+
catch {
|
|
412
|
+
// Fetch failed, continue with what we have
|
|
413
|
+
}
|
|
414
|
+
}
|
|
415
|
+
const branchBase = opts.baseBranch ? opts.baseBranch : `origin/${baseBranch}`;
|
|
416
|
+
try {
|
|
417
|
+
await gitExec(`git branch "${branchName}" "${branchBase}"`, { cwd: repoRoot });
|
|
418
|
+
}
|
|
419
|
+
catch {
|
|
420
|
+
// Branch already exists
|
|
421
|
+
}
|
|
422
|
+
await gitExec(`git worktree add "${worktreePath}" "${branchName}"`, { cwd: repoRoot });
|
|
423
|
+
});
|
|
424
|
+
await markStep('worktree', 'success', { metadata: { branchName, worktreePath } });
|
|
425
|
+
// Step 2: Run agent
|
|
426
|
+
await markStep('agent', 'started');
|
|
427
|
+
const prompt = buildTicketPrompt(ticket, opts.guidelinesContext);
|
|
428
|
+
const execBackend = opts.executionBackend ?? new ClaudeExecutionBackend();
|
|
429
|
+
const claudeResult = await execBackend.run({
|
|
430
|
+
worktreePath,
|
|
431
|
+
prompt,
|
|
432
|
+
timeoutMs,
|
|
433
|
+
verbose,
|
|
434
|
+
onProgress: verbose ? onProgress : () => { },
|
|
435
|
+
});
|
|
436
|
+
// Save agent artifact
|
|
437
|
+
const agentArtifactPath = writeJsonArtifact({
|
|
438
|
+
baseDir,
|
|
439
|
+
type: 'executions',
|
|
440
|
+
id: runId,
|
|
441
|
+
data: {
|
|
442
|
+
runId,
|
|
443
|
+
ticketId: ticket.id,
|
|
444
|
+
prompt,
|
|
445
|
+
stdout: claudeResult.stdout,
|
|
446
|
+
stderr: claudeResult.stderr,
|
|
447
|
+
exitCode: claudeResult.exitCode,
|
|
448
|
+
timedOut: claudeResult.timedOut,
|
|
449
|
+
durationMs: claudeResult.durationMs,
|
|
450
|
+
},
|
|
451
|
+
});
|
|
452
|
+
artifactPaths.execution = agentArtifactPath;
|
|
453
|
+
if (!claudeResult.success) {
|
|
454
|
+
await markStep('agent', 'failed', {
|
|
455
|
+
errorMessage: claudeResult.error ?? 'Agent execution failed',
|
|
456
|
+
metadata: { artifactPath: agentArtifactPath },
|
|
457
|
+
});
|
|
458
|
+
await skipRemaining(2, 'Agent failed');
|
|
459
|
+
await cleanupWorktree(repoRoot, worktreePath);
|
|
460
|
+
const baseError = claudeResult.error ?? 'Claude execution failed';
|
|
461
|
+
const errorParts = [
|
|
462
|
+
claudeResult.timedOut ? 'Agent timed out' : 'Agent execution failed',
|
|
463
|
+
` ${baseError}`,
|
|
464
|
+
'',
|
|
465
|
+
];
|
|
466
|
+
if (claudeResult.timedOut) {
|
|
467
|
+
errorParts.push('The agent exceeded its time limit.');
|
|
468
|
+
errorParts.push('Consider breaking down the ticket into smaller tasks.');
|
|
469
|
+
}
|
|
470
|
+
errorParts.push(`To retry: blockspool solo run ${ticket.id}`);
|
|
471
|
+
errorParts.push(`Execution logs: ${agentArtifactPath}`);
|
|
472
|
+
const result = {
|
|
473
|
+
success: false,
|
|
474
|
+
durationMs: Date.now() - startTime,
|
|
475
|
+
error: errorParts.join('\n'),
|
|
476
|
+
failureReason: claudeResult.timedOut ? 'timeout' : 'agent_error',
|
|
477
|
+
artifacts: { ...artifactPaths },
|
|
478
|
+
};
|
|
479
|
+
await saveRunSummary(result);
|
|
480
|
+
return result;
|
|
481
|
+
}
|
|
482
|
+
await markStep('agent', 'success', {
|
|
483
|
+
metadata: { artifactPath: agentArtifactPath, durationMs: claudeResult.durationMs },
|
|
484
|
+
});
|
|
485
|
+
// Spindle check
|
|
486
|
+
if (spindleConfig.enabled) {
|
|
487
|
+
let prelimDiff = null;
|
|
488
|
+
try {
|
|
489
|
+
prelimDiff = (await gitExec('git diff', {
|
|
490
|
+
cwd: worktreePath,
|
|
491
|
+
maxBuffer: 10 * 1024 * 1024,
|
|
492
|
+
})).trim();
|
|
493
|
+
}
|
|
494
|
+
catch {
|
|
495
|
+
// Ignore diff errors for Spindle check
|
|
496
|
+
}
|
|
497
|
+
const spindleCheck = checkSpindleLoop(spindleState, claudeResult.stdout, prelimDiff, spindleConfig);
|
|
498
|
+
for (const warning of spindleState.warnings) {
|
|
499
|
+
onProgress(`⚠ ${warning}`);
|
|
500
|
+
}
|
|
501
|
+
spindleState.warnings = [];
|
|
502
|
+
if (spindleCheck.shouldAbort) {
|
|
503
|
+
const trigger = spindleCheck.reason;
|
|
504
|
+
const recommendations = generateSpindleRecommendations(trigger, ticket, spindleConfig);
|
|
505
|
+
const spindleArtifactData = {
|
|
506
|
+
runId,
|
|
507
|
+
ticketId: ticket.id,
|
|
508
|
+
triggeredAtMs: Date.now(),
|
|
509
|
+
iteration: spindleState.outputs.length,
|
|
510
|
+
reason: trigger,
|
|
511
|
+
metrics: {
|
|
512
|
+
similarity: spindleCheck.diagnostics.similarityScore,
|
|
513
|
+
similarOutputs: spindleState.outputs.length,
|
|
514
|
+
stallIterations: spindleCheck.diagnostics.iterationsWithoutChange,
|
|
515
|
+
estimatedTokens: spindleState.estimatedTokens,
|
|
516
|
+
repeatedPatterns: spindleCheck.diagnostics.repeatedPatterns,
|
|
517
|
+
oscillationPattern: spindleCheck.diagnostics.oscillationPattern,
|
|
518
|
+
},
|
|
519
|
+
thresholds: {
|
|
520
|
+
similarityThreshold: spindleConfig.similarityThreshold,
|
|
521
|
+
maxSimilarOutputs: spindleConfig.maxSimilarOutputs,
|
|
522
|
+
maxStallIterations: spindleConfig.maxStallIterations,
|
|
523
|
+
tokenBudgetWarning: spindleConfig.tokenBudgetWarning,
|
|
524
|
+
tokenBudgetAbort: spindleConfig.tokenBudgetAbort,
|
|
525
|
+
},
|
|
526
|
+
pointers: {
|
|
527
|
+
agentExecution: artifactPaths.execution,
|
|
528
|
+
},
|
|
529
|
+
recommendations,
|
|
530
|
+
recentOutputs: spindleState.outputs.slice(-3),
|
|
531
|
+
recentDiffs: spindleState.diffs.slice(-3),
|
|
532
|
+
formatted: formatSpindleResult(spindleCheck),
|
|
533
|
+
};
|
|
534
|
+
const spindleArtifactPath = writeJsonArtifact({
|
|
535
|
+
baseDir,
|
|
536
|
+
type: 'spindle',
|
|
537
|
+
id: runId,
|
|
538
|
+
data: spindleArtifactData,
|
|
539
|
+
});
|
|
540
|
+
artifactPaths.spindle = spindleArtifactPath;
|
|
541
|
+
const spindleDetails = {
|
|
542
|
+
trigger,
|
|
543
|
+
confidence: spindleCheck.confidence,
|
|
544
|
+
estimatedTokens: spindleState.estimatedTokens,
|
|
545
|
+
iteration: spindleState.outputs.length,
|
|
546
|
+
thresholds: {
|
|
547
|
+
similarityThreshold: spindleConfig.similarityThreshold,
|
|
548
|
+
maxSimilarOutputs: spindleConfig.maxSimilarOutputs,
|
|
549
|
+
maxStallIterations: spindleConfig.maxStallIterations,
|
|
550
|
+
tokenBudgetWarning: spindleConfig.tokenBudgetWarning,
|
|
551
|
+
tokenBudgetAbort: spindleConfig.tokenBudgetAbort,
|
|
552
|
+
},
|
|
553
|
+
metrics: {
|
|
554
|
+
similarityScore: spindleCheck.diagnostics.similarityScore,
|
|
555
|
+
iterationsWithoutChange: spindleCheck.diagnostics.iterationsWithoutChange,
|
|
556
|
+
repeatedPatterns: spindleCheck.diagnostics.repeatedPatterns,
|
|
557
|
+
oscillationPattern: spindleCheck.diagnostics.oscillationPattern,
|
|
558
|
+
},
|
|
559
|
+
recommendations,
|
|
560
|
+
artifactPath: spindleArtifactPath,
|
|
561
|
+
};
|
|
562
|
+
onProgress(`Spindle loop detected: ${trigger}`);
|
|
563
|
+
onProgress(` Confidence: ${(spindleCheck.confidence * 100).toFixed(0)}%`);
|
|
564
|
+
onProgress(` Tokens: ~${spindleState.estimatedTokens.toLocaleString()}`);
|
|
565
|
+
await skipRemaining(2, `Spindle loop: ${trigger}`);
|
|
566
|
+
await cleanupWorktree(repoRoot, worktreePath);
|
|
567
|
+
const result = {
|
|
568
|
+
success: false,
|
|
569
|
+
durationMs: Date.now() - startTime,
|
|
570
|
+
error: `Spindle loop detected: ${trigger} (confidence: ${(spindleCheck.confidence * 100).toFixed(0)}%)`,
|
|
571
|
+
failureReason: 'spindle_abort',
|
|
572
|
+
spindle: spindleDetails,
|
|
573
|
+
artifacts: { ...artifactPaths },
|
|
574
|
+
};
|
|
575
|
+
await saveRunSummary(result);
|
|
576
|
+
return result;
|
|
577
|
+
}
|
|
578
|
+
}
|
|
579
|
+
// Step 3: Scope check
|
|
580
|
+
await markStep('scope', 'started');
|
|
581
|
+
const statusOutput = (await gitExec('git status --porcelain', {
|
|
582
|
+
cwd: worktreePath,
|
|
583
|
+
})).trim();
|
|
584
|
+
if (!statusOutput) {
|
|
585
|
+
await markStep('scope', 'success', { errorMessage: 'No changes needed' });
|
|
586
|
+
await skipRemaining(3, 'No changes needed');
|
|
587
|
+
await cleanupWorktree(repoRoot, worktreePath);
|
|
588
|
+
const result = {
|
|
589
|
+
success: true,
|
|
590
|
+
durationMs: Date.now() - startTime,
|
|
591
|
+
completionOutcome: 'no_changes_needed',
|
|
592
|
+
artifacts: { ...artifactPaths },
|
|
593
|
+
};
|
|
594
|
+
await saveRunSummary(result);
|
|
595
|
+
return result;
|
|
596
|
+
}
|
|
597
|
+
const changedFiles = parseChangedFiles(statusOutput);
|
|
598
|
+
const violations = checkScopeViolations(changedFiles, ticket.allowedPaths, ticket.forbiddenPaths);
|
|
599
|
+
if (violations.length > 0) {
|
|
600
|
+
const violationsData = {
|
|
601
|
+
runId,
|
|
602
|
+
ticketId: ticket.id,
|
|
603
|
+
changedFiles,
|
|
604
|
+
allowedPaths: ticket.allowedPaths,
|
|
605
|
+
forbiddenPaths: ticket.forbiddenPaths,
|
|
606
|
+
violations,
|
|
607
|
+
};
|
|
608
|
+
const violationsArtifactPath = writeJsonArtifact({
|
|
609
|
+
baseDir,
|
|
610
|
+
type: 'violations',
|
|
611
|
+
id: runId,
|
|
612
|
+
data: violationsData,
|
|
613
|
+
});
|
|
614
|
+
artifactPaths.violations = violationsArtifactPath;
|
|
615
|
+
const canAutoRetry = ticket.retryCount < ticket.maxRetries;
|
|
616
|
+
const expansionResult = canAutoRetry
|
|
617
|
+
? analyzeViolationsForExpansion(violations, ticket.allowedPaths)
|
|
618
|
+
: { canExpand: false, expandedPaths: ticket.allowedPaths, addedPaths: [], reason: 'Max retries exceeded' };
|
|
619
|
+
if (expansionResult.canExpand && expansionResult.addedPaths.length > 0) {
|
|
620
|
+
const newRetryCount = ticket.retryCount + 1;
|
|
621
|
+
await adapter.query(`UPDATE tickets SET
|
|
622
|
+
allowed_paths = $1,
|
|
623
|
+
retry_count = $2,
|
|
624
|
+
status = 'ready',
|
|
625
|
+
updated_at = datetime('now')
|
|
626
|
+
WHERE id = $3`, [JSON.stringify(expansionResult.expandedPaths), newRetryCount, ticket.id]);
|
|
627
|
+
await markStep('scope', 'failed', {
|
|
628
|
+
errorMessage: `Scope expanded: +${expansionResult.addedPaths.length} paths, retry ${newRetryCount}/${ticket.maxRetries}`,
|
|
629
|
+
metadata: { violations, expansionResult, artifactPath: violationsArtifactPath },
|
|
630
|
+
});
|
|
631
|
+
await skipRemaining(4, 'Scope expansion - retry scheduled');
|
|
632
|
+
await cleanupWorktree(repoRoot, worktreePath);
|
|
633
|
+
const result = {
|
|
634
|
+
success: false,
|
|
635
|
+
durationMs: Date.now() - startTime,
|
|
636
|
+
error: `Scope auto-expanded: ${expansionResult.addedPaths.join(', ')}`,
|
|
637
|
+
failureReason: 'scope_violation',
|
|
638
|
+
artifacts: { ...artifactPaths },
|
|
639
|
+
scopeExpanded: {
|
|
640
|
+
addedPaths: expansionResult.addedPaths,
|
|
641
|
+
newRetryCount,
|
|
642
|
+
},
|
|
643
|
+
};
|
|
644
|
+
await saveRunSummary(result);
|
|
645
|
+
return result;
|
|
646
|
+
}
|
|
647
|
+
const violationSummary = violations
|
|
648
|
+
.map(v => v.violation === 'in_forbidden'
|
|
649
|
+
? `${v.file} (forbidden by ${v.pattern})`
|
|
650
|
+
: `${v.file} (not in allowed paths)`)
|
|
651
|
+
.join(', ');
|
|
652
|
+
await markStep('scope', 'failed', {
|
|
653
|
+
errorMessage: `Scope violations: ${violationSummary}`,
|
|
654
|
+
metadata: { violations, expansionResult, artifactPath: violationsArtifactPath },
|
|
655
|
+
});
|
|
656
|
+
await skipRemaining(4, 'Scope violations');
|
|
657
|
+
await cleanupWorktree(repoRoot, worktreePath);
|
|
658
|
+
const violationDetails = violations
|
|
659
|
+
.map(v => v.violation === 'in_forbidden'
|
|
660
|
+
? ` ${v.file} (forbidden by ${v.pattern})`
|
|
661
|
+
: ` ${v.file} (not in allowed_paths)`)
|
|
662
|
+
.join('\n');
|
|
663
|
+
const blockReason = expansionResult.reason
|
|
664
|
+
? `\nNote: ${expansionResult.reason}`
|
|
665
|
+
: '';
|
|
666
|
+
const errorMessage = [
|
|
667
|
+
`Scope violation: Changes outside allowed paths`,
|
|
668
|
+
violationDetails,
|
|
669
|
+
blockReason,
|
|
670
|
+
``,
|
|
671
|
+
`To fix: blockspool solo retry ${ticket.id}`,
|
|
672
|
+
` This regenerates allowed_paths and resets the ticket to 'ready'`,
|
|
673
|
+
].join('\n');
|
|
674
|
+
const result = {
|
|
675
|
+
success: false,
|
|
676
|
+
durationMs: Date.now() - startTime,
|
|
677
|
+
error: errorMessage,
|
|
678
|
+
failureReason: 'scope_violation',
|
|
679
|
+
artifacts: { ...artifactPaths },
|
|
680
|
+
};
|
|
681
|
+
await saveRunSummary(result);
|
|
682
|
+
return result;
|
|
683
|
+
}
|
|
684
|
+
await markStep('scope', 'success', {
|
|
685
|
+
metadata: { filesChecked: changedFiles.length },
|
|
686
|
+
});
|
|
687
|
+
// Step 4: Commit changes
|
|
688
|
+
await markStep('commit', 'started');
|
|
689
|
+
const diffOutput = await gitExec('git diff HEAD', {
|
|
690
|
+
cwd: worktreePath,
|
|
691
|
+
maxBuffer: 10 * 1024 * 1024,
|
|
692
|
+
});
|
|
693
|
+
const diffArtifactPath = writeJsonArtifact({
|
|
694
|
+
baseDir,
|
|
695
|
+
type: 'diffs',
|
|
696
|
+
id: runId,
|
|
697
|
+
data: {
|
|
698
|
+
runId,
|
|
699
|
+
ticketId: ticket.id,
|
|
700
|
+
diff: diffOutput,
|
|
701
|
+
filesChanged: statusOutput.split('\n').length,
|
|
702
|
+
changedFiles,
|
|
703
|
+
},
|
|
704
|
+
});
|
|
705
|
+
artifactPaths.diff = diffArtifactPath;
|
|
706
|
+
await gitExec('git add -A', { cwd: worktreePath });
|
|
707
|
+
await gitExec(`git commit -m "${ticket.title.replace(/"/g, '\\"')}"`, { cwd: worktreePath });
|
|
708
|
+
await markStep('commit', 'success', { metadata: { diffArtifactPath } });
|
|
709
|
+
// Step 5: Push branch
|
|
710
|
+
if (opts.skipPush) {
|
|
711
|
+
await markStep('push', 'skipped', { errorMessage: 'Skipped (milestone mode)' });
|
|
712
|
+
}
|
|
713
|
+
else {
|
|
714
|
+
await markStep('push', 'started');
|
|
715
|
+
try {
|
|
716
|
+
const { assertPushSafe } = await import('./solo-remote.js');
|
|
717
|
+
await assertPushSafe(worktreePath, config?.allowedRemote);
|
|
718
|
+
await gitExec(`git push -u origin "${branchName}"`, { cwd: worktreePath });
|
|
719
|
+
await markStep('push', 'success');
|
|
720
|
+
}
|
|
721
|
+
catch (pushError) {
|
|
722
|
+
await markStep('push', 'failed', {
|
|
723
|
+
errorMessage: pushError instanceof Error ? pushError.message : String(pushError),
|
|
724
|
+
});
|
|
725
|
+
await skipRemaining(5, 'Push failed');
|
|
726
|
+
await cleanupWorktree(repoRoot, worktreePath);
|
|
727
|
+
const result = {
|
|
728
|
+
success: false,
|
|
729
|
+
branchName,
|
|
730
|
+
durationMs: Date.now() - startTime,
|
|
731
|
+
error: `Push failed: ${pushError instanceof Error ? pushError.message : pushError}`,
|
|
732
|
+
};
|
|
733
|
+
await saveRunSummary(result);
|
|
734
|
+
return result;
|
|
735
|
+
}
|
|
736
|
+
}
|
|
737
|
+
// Step 6: Run QA
|
|
738
|
+
if (!skipQa && config?.qa?.commands?.length) {
|
|
739
|
+
await markStep('qa', 'started');
|
|
740
|
+
const qaConfig = normalizeQaConfig(config);
|
|
741
|
+
const exec = createExecRunner({
|
|
742
|
+
defaultMaxLogBytes: qaConfig.artifacts.maxLogBytes,
|
|
743
|
+
defaultTailBytes: qaConfig.artifacts.tailBytes,
|
|
744
|
+
});
|
|
745
|
+
const logger = createLogger({ quiet: true });
|
|
746
|
+
const project = await projects.ensureForRepo(adapter, {
|
|
747
|
+
name: path.basename(repoRoot),
|
|
748
|
+
rootPath: repoRoot,
|
|
749
|
+
});
|
|
750
|
+
const qaResult = await runQa({ db: adapter, exec, logger }, {
|
|
751
|
+
projectId: project.id,
|
|
752
|
+
repoRoot: worktreePath,
|
|
753
|
+
config: qaConfig,
|
|
754
|
+
});
|
|
755
|
+
if (qaResult.status !== 'success') {
|
|
756
|
+
await markStep('qa', 'failed', {
|
|
757
|
+
errorMessage: `QA failed at ${qaResult.failedAt?.stepName ?? 'unknown step'}`,
|
|
758
|
+
metadata: { qaRunId: qaResult.runId },
|
|
759
|
+
});
|
|
760
|
+
await skipRemaining(6, 'QA failed');
|
|
761
|
+
const failedStep = qaResult.failedAt?.stepName ?? 'unknown step';
|
|
762
|
+
const errorParts = [`QA failed at: ${failedStep}`];
|
|
763
|
+
const qaDetails = await getQaRunDetails(adapter, qaResult.runId);
|
|
764
|
+
if (qaDetails) {
|
|
765
|
+
const failedStepInfo = qaDetails.steps.find(s => s.name === failedStep && s.status === 'failed');
|
|
766
|
+
if (failedStepInfo) {
|
|
767
|
+
const errorOutput = failedStepInfo.stderrTail || failedStepInfo.stdoutTail;
|
|
768
|
+
if (errorOutput) {
|
|
769
|
+
const truncated = errorOutput.length > 500
|
|
770
|
+
? '...' + errorOutput.slice(-497)
|
|
771
|
+
: errorOutput;
|
|
772
|
+
errorParts.push('');
|
|
773
|
+
errorParts.push('Error output:');
|
|
774
|
+
errorParts.push(truncated.split('\n').map(l => ` ${l}`).join('\n'));
|
|
775
|
+
}
|
|
776
|
+
if (failedStepInfo.errorMessage) {
|
|
777
|
+
errorParts.push('');
|
|
778
|
+
errorParts.push(`Error: ${failedStepInfo.errorMessage}`);
|
|
779
|
+
}
|
|
780
|
+
}
|
|
781
|
+
}
|
|
782
|
+
errorParts.push('');
|
|
783
|
+
errorParts.push(`To retry: blockspool solo run ${ticket.id}`);
|
|
784
|
+
errorParts.push(`Worktree preserved for inspection: ${worktreePath}`);
|
|
785
|
+
const result = {
|
|
786
|
+
success: false,
|
|
787
|
+
branchName,
|
|
788
|
+
durationMs: Date.now() - startTime,
|
|
789
|
+
error: errorParts.join('\n'),
|
|
790
|
+
};
|
|
791
|
+
await saveRunSummary(result);
|
|
792
|
+
return result;
|
|
793
|
+
}
|
|
794
|
+
await markStep('qa', 'success', { metadata: { qaRunId: qaResult.runId } });
|
|
795
|
+
}
|
|
796
|
+
else {
|
|
797
|
+
await markStep('qa', 'skipped', { errorMessage: skipQa ? 'Skipped by flag' : 'No QA configured' });
|
|
798
|
+
}
|
|
799
|
+
// Step 7: Create PR
|
|
800
|
+
let prUrl;
|
|
801
|
+
if (opts.skipPr) {
|
|
802
|
+
await markStep('pr', 'skipped', { errorMessage: 'Skipped (milestone mode)' });
|
|
803
|
+
}
|
|
804
|
+
else if (createPr) {
|
|
805
|
+
await markStep('pr', 'started');
|
|
806
|
+
try {
|
|
807
|
+
const { assertPushSafe: assertPrSafe } = await import('./solo-remote.js');
|
|
808
|
+
await assertPrSafe(worktreePath, config?.allowedRemote);
|
|
809
|
+
const prBody = `## Summary\n\n${ticket.description ?? ticket.title}\n\n---\n_Created by BlockSpool_`;
|
|
810
|
+
const draftFlag = draftPr ? ' --draft' : '';
|
|
811
|
+
const prOutput = (await gitExec(`gh pr create --title "${ticket.title.replace(/"/g, '\\"')}" --body "${prBody.replace(/"/g, '\\"')}" --head "${branchName}"${draftFlag}`, { cwd: worktreePath })).trim();
|
|
812
|
+
const urlMatch = prOutput.match(/https:\/\/github\.com\/[^\s]+/);
|
|
813
|
+
prUrl = urlMatch ? urlMatch[0] : undefined;
|
|
814
|
+
await markStep('pr', 'success', { metadata: { prUrl } });
|
|
815
|
+
}
|
|
816
|
+
catch (prError) {
|
|
817
|
+
await markStep('pr', 'failed', {
|
|
818
|
+
errorMessage: prError instanceof Error ? prError.message : String(prError),
|
|
819
|
+
});
|
|
820
|
+
onProgress(`PR creation failed: ${prError instanceof Error ? prError.message : prError}`);
|
|
821
|
+
}
|
|
822
|
+
}
|
|
823
|
+
else {
|
|
824
|
+
await markStep('pr', 'skipped', { errorMessage: 'Not requested' });
|
|
825
|
+
}
|
|
826
|
+
// Step 8: Clean up worktree
|
|
827
|
+
await markStep('cleanup', 'started');
|
|
828
|
+
await cleanupWorktree(repoRoot, worktreePath);
|
|
829
|
+
await markStep('cleanup', 'success');
|
|
830
|
+
const result = {
|
|
831
|
+
success: true,
|
|
832
|
+
branchName,
|
|
833
|
+
prUrl,
|
|
834
|
+
durationMs: Date.now() - startTime,
|
|
835
|
+
};
|
|
836
|
+
await saveRunSummary(result);
|
|
837
|
+
return result;
|
|
838
|
+
}
|
|
839
|
+
catch (error) {
|
|
840
|
+
await cleanupWorktree(repoRoot, worktreePath);
|
|
841
|
+
const result = {
|
|
842
|
+
success: false,
|
|
843
|
+
durationMs: Date.now() - startTime,
|
|
844
|
+
error: error instanceof Error ? error.message : String(error),
|
|
845
|
+
};
|
|
846
|
+
await saveRunSummary(result);
|
|
847
|
+
return result;
|
|
848
|
+
}
|
|
849
|
+
}
|
|
850
|
+
//# sourceMappingURL=solo-ticket.js.map
|