orchestr8 2.7.0 → 2.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.blueprint/features/feature_parallel-abort/FEATURE_SPEC.md +117 -0
- package/.blueprint/features/feature_parallel-confirm/FEATURE_SPEC.md +90 -0
- package/.blueprint/features/feature_parallel-features/FEATURE_SPEC.md +291 -0
- package/.blueprint/features/feature_parallel-features/IMPLEMENTATION_PLAN.md +73 -0
- package/.blueprint/features/feature_parallel-lock/FEATURE_SPEC.md +119 -0
- package/.blueprint/features/feature_parallel-logging/FEATURE_SPEC.md +105 -0
- package/.blueprint/features/feature_parallel-preflight/FEATURE_SPEC.md +141 -0
- package/README.md +218 -0
- package/bin/cli.js +159 -0
- package/package.json +1 -1
- package/src/parallel.js +1544 -0
package/src/parallel.js
ADDED
|
@@ -0,0 +1,1544 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const path = require('path');
|
|
4
|
+
const { execSync, spawn } = require('child_process');
|
|
5
|
+
const fs = require('fs');
|
|
6
|
+
const readline = require('readline');
|
|
7
|
+
|
|
8
|
+
const CONFIG_FILE = '.claude/parallel-config.json';
|
|
9
|
+
const LOCK_FILE = '.claude/parallel.lock';
|
|
10
|
+
|
|
11
|
+
// Track running processes for abort handling
|
|
12
|
+
let runningProcesses = new Map();
|
|
13
|
+
let isAborting = false;
|
|
14
|
+
|
|
15
|
+
function getDefaultParallelConfig() {
|
|
16
|
+
return {
|
|
17
|
+
maxConcurrency: 3,
|
|
18
|
+
maxFeatures: 10,
|
|
19
|
+
timeout: 30, // minutes per pipeline
|
|
20
|
+
minDiskSpaceMB: 500,
|
|
21
|
+
cli: 'npx claude',
|
|
22
|
+
skill: '/implement-feature',
|
|
23
|
+
skillFlags: '--no-commit',
|
|
24
|
+
worktreeDir: '.claude/worktrees',
|
|
25
|
+
queueFile: '.claude/parallel-queue.json'
|
|
26
|
+
};
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
function readParallelConfig() {
|
|
30
|
+
if (!fs.existsSync(CONFIG_FILE)) {
|
|
31
|
+
return getDefaultParallelConfig();
|
|
32
|
+
}
|
|
33
|
+
try {
|
|
34
|
+
const content = fs.readFileSync(CONFIG_FILE, 'utf8');
|
|
35
|
+
return { ...getDefaultParallelConfig(), ...JSON.parse(content) };
|
|
36
|
+
} catch {
|
|
37
|
+
return getDefaultParallelConfig();
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
function writeParallelConfig(config) {
|
|
42
|
+
const dir = path.dirname(CONFIG_FILE);
|
|
43
|
+
if (!fs.existsSync(dir)) {
|
|
44
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
45
|
+
}
|
|
46
|
+
fs.writeFileSync(CONFIG_FILE, JSON.stringify(config, null, 2));
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
function getQueueFile() {
|
|
50
|
+
return readParallelConfig().queueFile;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
const QUEUE_FILE = '.claude/parallel-queue.json'; // Legacy reference
|
|
54
|
+
|
|
55
|
+
function buildWorktreePath(slug, config = null) {
|
|
56
|
+
const cfg = config || readParallelConfig();
|
|
57
|
+
return `${cfg.worktreeDir}/feat-${slug}`;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
function buildBranchName(slug) {
|
|
61
|
+
return `feature/${slug}`;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
function getDefaultConfig() {
|
|
65
|
+
const cfg = readParallelConfig();
|
|
66
|
+
return { maxConcurrency: cfg.maxConcurrency };
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
function getQueuePath(worktreePath) {
|
|
70
|
+
return path.join(worktreePath, '.claude', 'implement-queue.json');
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
function shouldCleanupWorktree(state) {
|
|
74
|
+
return state.status === 'parallel_complete' || state.status === 'aborted';
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
function validatePreFlight({ isGitRepo, isDirty, gitVersion }) {
|
|
78
|
+
const errors = [];
|
|
79
|
+
|
|
80
|
+
if (!isGitRepo) {
|
|
81
|
+
errors.push('Not in a git repository');
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
if (isDirty) {
|
|
85
|
+
errors.push('Working tree has uncommitted changes');
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
if (gitVersion && !isGitVersionSupported(gitVersion)) {
|
|
89
|
+
errors.push('Git version 2.5+ required for worktree support');
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
return {
|
|
93
|
+
valid: errors.length === 0,
|
|
94
|
+
errors
|
|
95
|
+
};
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
function isGitVersionSupported(versionString) {
|
|
99
|
+
const match = versionString.match(/(\d+)\.(\d+)/);
|
|
100
|
+
if (!match) return false;
|
|
101
|
+
|
|
102
|
+
const major = parseInt(match[1], 10);
|
|
103
|
+
const minor = parseInt(match[2], 10);
|
|
104
|
+
|
|
105
|
+
if (major > 2) return true;
|
|
106
|
+
if (major === 2 && minor >= 5) return true;
|
|
107
|
+
return false;
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
function splitByLimit(slugs, maxConcurrency) {
|
|
111
|
+
return {
|
|
112
|
+
active: slugs.slice(0, maxConcurrency),
|
|
113
|
+
queued: slugs.slice(maxConcurrency)
|
|
114
|
+
};
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
function promoteFromQueue(state) {
|
|
118
|
+
const { active, queued, maxConcurrency } = state;
|
|
119
|
+
const newActive = [...active];
|
|
120
|
+
const newQueued = [...queued];
|
|
121
|
+
|
|
122
|
+
while (newActive.length < maxConcurrency && newQueued.length > 0) {
|
|
123
|
+
newActive.push(newQueued.shift());
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
return {
|
|
127
|
+
...state,
|
|
128
|
+
active: newActive,
|
|
129
|
+
queued: newQueued
|
|
130
|
+
};
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
function buildPipelineCommand(slug, worktreePath, config = null) {
|
|
134
|
+
const cfg = config || readParallelConfig();
|
|
135
|
+
const flags = cfg.skillFlags ? ` ${cfg.skillFlags}` : '';
|
|
136
|
+
return `${cfg.cli} --cwd ${worktreePath} ${cfg.skill} "${slug}"${flags}`;
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
function canFastForward({ mainHead, branchBase }) {
|
|
140
|
+
return mainHead === branchBase;
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
function hasMergeConflict(gitOutput) {
|
|
144
|
+
return gitOutput.includes('CONFLICT');
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
function handleMergeConflict(state, conflictOutput) {
|
|
148
|
+
return {
|
|
149
|
+
...state,
|
|
150
|
+
status: 'merge_conflict',
|
|
151
|
+
conflictDetails: conflictOutput || null
|
|
152
|
+
};
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
function orderByCompletion(features) {
|
|
156
|
+
return [...features].sort((a, b) => {
|
|
157
|
+
const timeA = new Date(a.completedAt).getTime();
|
|
158
|
+
const timeB = new Date(b.completedAt).getTime();
|
|
159
|
+
return timeA - timeB;
|
|
160
|
+
});
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
const VALID_TRANSITIONS = {
|
|
164
|
+
parallel_queued: ['worktree_created', 'aborted'],
|
|
165
|
+
worktree_created: ['parallel_running', 'parallel_failed', 'aborted'],
|
|
166
|
+
parallel_running: ['merge_pending', 'parallel_failed', 'aborted'],
|
|
167
|
+
merge_pending: ['parallel_complete', 'merge_conflict', 'aborted'],
|
|
168
|
+
parallel_failed: [],
|
|
169
|
+
parallel_complete: [],
|
|
170
|
+
merge_conflict: [],
|
|
171
|
+
aborted: []
|
|
172
|
+
};
|
|
173
|
+
|
|
174
|
+
function transition(state, newStatus) {
|
|
175
|
+
return {
|
|
176
|
+
...state,
|
|
177
|
+
status: newStatus
|
|
178
|
+
};
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
function formatStatus(states) {
|
|
182
|
+
return states.map(s => formatFeatureStatus(s)).join('\n');
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
function formatFeatureStatus(state) {
|
|
186
|
+
const statusDisplay = state.status.replace('parallel_', '');
|
|
187
|
+
const stage = state.stage ? ` (${state.stage})` : '';
|
|
188
|
+
return `${state.slug}: ${statusDisplay}${stage}`;
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
function summarizeFinal(results) {
|
|
192
|
+
return {
|
|
193
|
+
completed: results.filter(r => r.status === 'parallel_complete').length,
|
|
194
|
+
failed: results.filter(r => r.status === 'parallel_failed').length,
|
|
195
|
+
conflicts: results.filter(r => r.status === 'merge_conflict').length
|
|
196
|
+
};
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
function aggregateResults(results) {
|
|
200
|
+
return {
|
|
201
|
+
completed: results.filter(r => r.status === 'parallel_complete').length,
|
|
202
|
+
failed: results.filter(r => r.status === 'parallel_failed').length,
|
|
203
|
+
total: results.length
|
|
204
|
+
};
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
function abortFeature(states, slug) {
|
|
208
|
+
return states.map(s => {
|
|
209
|
+
if (s.slug === slug) {
|
|
210
|
+
return { ...s, status: 'aborted' };
|
|
211
|
+
}
|
|
212
|
+
return s;
|
|
213
|
+
});
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
function abortAll(states) {
|
|
217
|
+
return states.map(s => ({ ...s, status: 'aborted' }));
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
// --- Git Operations ---
|
|
221
|
+
|
|
222
|
+
function checkGitStatus() {
|
|
223
|
+
try {
|
|
224
|
+
execSync('git rev-parse --git-dir', { stdio: 'pipe' });
|
|
225
|
+
const isGitRepo = true;
|
|
226
|
+
const status = execSync('git status --porcelain', { encoding: 'utf8' });
|
|
227
|
+
const isDirty = status.trim().length > 0;
|
|
228
|
+
const versionOutput = execSync('git --version', { encoding: 'utf8' });
|
|
229
|
+
const gitVersion = versionOutput.match(/(\d+\.\d+\.\d+)/)?.[1] || '0.0.0';
|
|
230
|
+
return { isGitRepo, isDirty, gitVersion };
|
|
231
|
+
} catch {
|
|
232
|
+
return { isGitRepo: false, isDirty: false, gitVersion: '0.0.0' };
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
function createWorktree(slug) {
|
|
237
|
+
const worktreePath = buildWorktreePath(slug);
|
|
238
|
+
const branchName = buildBranchName(slug);
|
|
239
|
+
|
|
240
|
+
fs.mkdirSync(path.dirname(worktreePath), { recursive: true });
|
|
241
|
+
execSync(`git worktree add "${worktreePath}" -b "${branchName}"`, { stdio: 'pipe' });
|
|
242
|
+
|
|
243
|
+
return { worktreePath, branchName };
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
function removeWorktree(slug) {
|
|
247
|
+
const worktreePath = buildWorktreePath(slug);
|
|
248
|
+
const branchName = buildBranchName(slug);
|
|
249
|
+
|
|
250
|
+
try {
|
|
251
|
+
execSync(`git worktree remove "${worktreePath}" --force`, { stdio: 'pipe' });
|
|
252
|
+
} catch {
|
|
253
|
+
// Worktree may already be removed
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
try {
|
|
257
|
+
execSync(`git branch -D "${branchName}"`, { stdio: 'pipe' });
|
|
258
|
+
} catch {
|
|
259
|
+
// Branch may already be deleted
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
function mergeBranch(slug) {
|
|
264
|
+
const branchName = buildBranchName(slug);
|
|
265
|
+
|
|
266
|
+
try {
|
|
267
|
+
const output = execSync(`git merge "${branchName}" --no-edit`, { encoding: 'utf8' });
|
|
268
|
+
return { success: true, output };
|
|
269
|
+
} catch (err) {
|
|
270
|
+
const output = err.stdout || err.message;
|
|
271
|
+
if (hasMergeConflict(output)) {
|
|
272
|
+
return { success: false, conflict: true, output };
|
|
273
|
+
}
|
|
274
|
+
return { success: false, conflict: false, output };
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
function getCurrentBranch() {
|
|
279
|
+
return execSync('git branch --show-current', { encoding: 'utf8' }).trim();
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
// --- Confirmation Prompt ---
|
|
283
|
+
|
|
284
|
+
function promptConfirm(message) {
|
|
285
|
+
return new Promise((resolve) => {
|
|
286
|
+
const rl = readline.createInterface({
|
|
287
|
+
input: process.stdin,
|
|
288
|
+
output: process.stdout
|
|
289
|
+
});
|
|
290
|
+
|
|
291
|
+
rl.question(`${message} [y/N] `, (answer) => {
|
|
292
|
+
rl.close();
|
|
293
|
+
resolve(answer.toLowerCase() === 'y' || answer.toLowerCase() === 'yes');
|
|
294
|
+
});
|
|
295
|
+
});
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
function buildConfirmMessage(slugs, config) {
|
|
299
|
+
const parallelCfg = readParallelConfig();
|
|
300
|
+
const { active, queued } = splitByLimit(slugs, config.maxConcurrency);
|
|
301
|
+
|
|
302
|
+
let msg = '\nThis will:\n';
|
|
303
|
+
msg += ` • Create ${slugs.length} git worktree(s) in ${parallelCfg.worktreeDir}/\n`;
|
|
304
|
+
msg += ` • Start ${active.length} parallel pipeline(s) (max concurrent: ${config.maxConcurrency})\n`;
|
|
305
|
+
if (queued.length > 0) {
|
|
306
|
+
msg += ` • Queue ${queued.length} additional feature(s)\n`;
|
|
307
|
+
}
|
|
308
|
+
msg += ` • Branches: ${slugs.map(s => `feature/${s}`).join(', ')}\n`;
|
|
309
|
+
msg += '\nContinue?';
|
|
310
|
+
return msg;
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
// --- Lock File ---
|
|
314
|
+
|
|
315
|
+
function acquireLock(slugs) {
|
|
316
|
+
if (fs.existsSync(LOCK_FILE)) {
|
|
317
|
+
const lock = JSON.parse(fs.readFileSync(LOCK_FILE, 'utf8'));
|
|
318
|
+
|
|
319
|
+
// Check if process is still running
|
|
320
|
+
try {
|
|
321
|
+
process.kill(lock.pid, 0);
|
|
322
|
+
// Process exists, lock is valid
|
|
323
|
+
return {
|
|
324
|
+
acquired: false,
|
|
325
|
+
existingLock: lock
|
|
326
|
+
};
|
|
327
|
+
} catch {
|
|
328
|
+
// Process doesn't exist, stale lock
|
|
329
|
+
console.log(`Warning: Found stale lock file (PID ${lock.pid} not running)`);
|
|
330
|
+
console.log('Removing stale lock and continuing...\n');
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
|
|
334
|
+
const lock = {
|
|
335
|
+
pid: process.pid,
|
|
336
|
+
startedAt: new Date().toISOString(),
|
|
337
|
+
features: slugs
|
|
338
|
+
};
|
|
339
|
+
|
|
340
|
+
const dir = path.dirname(LOCK_FILE);
|
|
341
|
+
if (!fs.existsSync(dir)) {
|
|
342
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
343
|
+
}
|
|
344
|
+
fs.writeFileSync(LOCK_FILE, JSON.stringify(lock, null, 2));
|
|
345
|
+
|
|
346
|
+
return { acquired: true };
|
|
347
|
+
}
|
|
348
|
+
|
|
349
|
+
function releaseLock() {
|
|
350
|
+
try {
|
|
351
|
+
if (fs.existsSync(LOCK_FILE)) {
|
|
352
|
+
fs.unlinkSync(LOCK_FILE);
|
|
353
|
+
}
|
|
354
|
+
} catch {
|
|
355
|
+
// Ignore errors during cleanup
|
|
356
|
+
}
|
|
357
|
+
}
|
|
358
|
+
|
|
359
|
+
function getLockInfo() {
|
|
360
|
+
if (!fs.existsSync(LOCK_FILE)) {
|
|
361
|
+
return null;
|
|
362
|
+
}
|
|
363
|
+
try {
|
|
364
|
+
return JSON.parse(fs.readFileSync(LOCK_FILE, 'utf8'));
|
|
365
|
+
} catch {
|
|
366
|
+
return null;
|
|
367
|
+
}
|
|
368
|
+
}
|
|
369
|
+
|
|
370
|
+
// --- Feature Limit ---
|
|
371
|
+
|
|
372
|
+
function validateFeatureLimit(slugs) {
|
|
373
|
+
const config = readParallelConfig();
|
|
374
|
+
if (slugs.length > config.maxFeatures) {
|
|
375
|
+
return {
|
|
376
|
+
valid: false,
|
|
377
|
+
error: `Too many features: ${slugs.length} requested, max is ${config.maxFeatures}`,
|
|
378
|
+
requested: slugs.length,
|
|
379
|
+
max: config.maxFeatures
|
|
380
|
+
};
|
|
381
|
+
}
|
|
382
|
+
return { valid: true };
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
// --- Disk Space Check ---
|
|
386
|
+
|
|
387
|
+
function checkDiskSpace() {
|
|
388
|
+
const config = readParallelConfig();
|
|
389
|
+
try {
|
|
390
|
+
// Get available space on current filesystem
|
|
391
|
+
const output = execSync('df -m . | tail -1', { encoding: 'utf8' });
|
|
392
|
+
const parts = output.trim().split(/\s+/);
|
|
393
|
+
const availableMB = parseInt(parts[3], 10);
|
|
394
|
+
|
|
395
|
+
return {
|
|
396
|
+
availableMB,
|
|
397
|
+
requiredMB: config.minDiskSpaceMB,
|
|
398
|
+
sufficient: availableMB >= config.minDiskSpaceMB
|
|
399
|
+
};
|
|
400
|
+
} catch {
|
|
401
|
+
// Can't check disk space, assume it's fine
|
|
402
|
+
return { availableMB: -1, requiredMB: config.minDiskSpaceMB, sufficient: true };
|
|
403
|
+
}
|
|
404
|
+
}
|
|
405
|
+
|
|
406
|
+
function validateDiskSpace() {
|
|
407
|
+
const space = checkDiskSpace();
|
|
408
|
+
if (!space.sufficient && space.availableMB > 0) {
|
|
409
|
+
return {
|
|
410
|
+
valid: false,
|
|
411
|
+
error: `Low disk space: ${space.availableMB}MB available, ${space.requiredMB}MB recommended`,
|
|
412
|
+
availableMB: space.availableMB,
|
|
413
|
+
requiredMB: space.requiredMB
|
|
414
|
+
};
|
|
415
|
+
}
|
|
416
|
+
return { valid: true, availableMB: space.availableMB };
|
|
417
|
+
}
|
|
418
|
+
|
|
419
|
+
// --- Logging ---
|
|
420
|
+
|
|
421
|
+
function createLogStream(slug, config) {
|
|
422
|
+
const parallelCfg = config || readParallelConfig();
|
|
423
|
+
const worktreePath = buildWorktreePath(slug, parallelCfg);
|
|
424
|
+
const logPath = path.join(worktreePath, 'pipeline.log');
|
|
425
|
+
|
|
426
|
+
// Ensure directory exists
|
|
427
|
+
const logDir = path.dirname(logPath);
|
|
428
|
+
if (!fs.existsSync(logDir)) {
|
|
429
|
+
fs.mkdirSync(logDir, { recursive: true });
|
|
430
|
+
}
|
|
431
|
+
|
|
432
|
+
return {
|
|
433
|
+
path: logPath,
|
|
434
|
+
stream: fs.createWriteStream(logPath, { flags: 'a' })
|
|
435
|
+
};
|
|
436
|
+
}
|
|
437
|
+
|
|
438
|
+
function logWithTimestamp(stream, prefix, data) {
|
|
439
|
+
const timestamp = new Date().toISOString();
|
|
440
|
+
const lines = data.toString().split('\n');
|
|
441
|
+
lines.forEach(line => {
|
|
442
|
+
if (line.trim()) {
|
|
443
|
+
stream.write(`[${timestamp}] [${prefix}] ${line}\n`);
|
|
444
|
+
}
|
|
445
|
+
});
|
|
446
|
+
}
|
|
447
|
+
|
|
448
|
+
// --- Pre-flight Feature Validation ---
|
|
449
|
+
|
|
450
|
+
function validateFeatureSpec(slug) {
|
|
451
|
+
const featDir = `.blueprint/features/feature_${slug}`;
|
|
452
|
+
const specPath = path.join(featDir, 'FEATURE_SPEC.md');
|
|
453
|
+
const planPath = path.join(featDir, 'IMPLEMENTATION_PLAN.md');
|
|
454
|
+
|
|
455
|
+
const result = {
|
|
456
|
+
slug,
|
|
457
|
+
valid: true,
|
|
458
|
+
specExists: false,
|
|
459
|
+
specComplete: false,
|
|
460
|
+
storiesExist: false,
|
|
461
|
+
storyCount: 0,
|
|
462
|
+
planExists: false,
|
|
463
|
+
filesToModify: [],
|
|
464
|
+
errors: [],
|
|
465
|
+
warnings: []
|
|
466
|
+
};
|
|
467
|
+
|
|
468
|
+
// Check feature spec exists
|
|
469
|
+
if (!fs.existsSync(specPath)) {
|
|
470
|
+
result.errors.push('Missing FEATURE_SPEC.md');
|
|
471
|
+
result.valid = false;
|
|
472
|
+
} else {
|
|
473
|
+
result.specExists = true;
|
|
474
|
+
// Check if spec has required sections
|
|
475
|
+
const specContent = fs.readFileSync(specPath, 'utf8');
|
|
476
|
+
const hasIntent = specContent.includes('## 1. Feature Intent') || specContent.includes('# Feature Intent');
|
|
477
|
+
const hasScope = specContent.includes('## 2. Scope') || specContent.includes('# Scope');
|
|
478
|
+
const hasBehaviour = specContent.includes('## 3. Behaviour') || specContent.includes('Behaviour Overview');
|
|
479
|
+
|
|
480
|
+
if (!hasIntent || !hasScope || !hasBehaviour) {
|
|
481
|
+
result.warnings.push('Spec may be incomplete (missing required sections)');
|
|
482
|
+
} else {
|
|
483
|
+
result.specComplete = true;
|
|
484
|
+
}
|
|
485
|
+
}
|
|
486
|
+
|
|
487
|
+
// Check for stories
|
|
488
|
+
if (fs.existsSync(featDir)) {
|
|
489
|
+
const files = fs.readdirSync(featDir);
|
|
490
|
+
const stories = files.filter(f => f.startsWith('story-') && f.endsWith('.md'));
|
|
491
|
+
result.storyCount = stories.length;
|
|
492
|
+
result.storiesExist = stories.length > 0;
|
|
493
|
+
|
|
494
|
+
if (!result.storiesExist) {
|
|
495
|
+
result.warnings.push('No user stories found (story-*.md)');
|
|
496
|
+
}
|
|
497
|
+
}
|
|
498
|
+
|
|
499
|
+
// Check for implementation plan and extract files to modify
|
|
500
|
+
if (fs.existsSync(planPath)) {
|
|
501
|
+
result.planExists = true;
|
|
502
|
+
const planContent = fs.readFileSync(planPath, 'utf8');
|
|
503
|
+
result.filesToModify = extractFilesToModify(planContent);
|
|
504
|
+
}
|
|
505
|
+
|
|
506
|
+
return result;
|
|
507
|
+
}
|
|
508
|
+
|
|
509
|
+
function extractFilesToModify(planContent) {
|
|
510
|
+
const files = [];
|
|
511
|
+
const lines = planContent.split('\n');
|
|
512
|
+
let inFilesSection = false;
|
|
513
|
+
|
|
514
|
+
for (const line of lines) {
|
|
515
|
+
// Detect "Files to Create/Modify" section
|
|
516
|
+
if (line.includes('Files to Create') || line.includes('Files to Modify')) {
|
|
517
|
+
inFilesSection = true;
|
|
518
|
+
continue;
|
|
519
|
+
}
|
|
520
|
+
|
|
521
|
+
// Stop at next section
|
|
522
|
+
if (inFilesSection && line.startsWith('## ')) {
|
|
523
|
+
break;
|
|
524
|
+
}
|
|
525
|
+
|
|
526
|
+
// Extract file paths from table rows or bullet points
|
|
527
|
+
if (inFilesSection) {
|
|
528
|
+
// Match table format: | path | action | purpose |
|
|
529
|
+
const tableMatch = line.match(/\|\s*`?([^|`]+)`?\s*\|/);
|
|
530
|
+
if (tableMatch && tableMatch[1].includes('/') || tableMatch && tableMatch[1].includes('.')) {
|
|
531
|
+
const filePath = tableMatch[1].trim();
|
|
532
|
+
if (filePath && !filePath.includes('---') && !filePath.toLowerCase().includes('path')) {
|
|
533
|
+
files.push(filePath);
|
|
534
|
+
}
|
|
535
|
+
}
|
|
536
|
+
|
|
537
|
+
// Match bullet point format: - path/to/file
|
|
538
|
+
const bulletMatch = line.match(/^[\s*-]+\s*`?([^\s`]+\.[a-z]+)`?/i);
|
|
539
|
+
if (bulletMatch) {
|
|
540
|
+
files.push(bulletMatch[1].trim());
|
|
541
|
+
}
|
|
542
|
+
}
|
|
543
|
+
}
|
|
544
|
+
|
|
545
|
+
return [...new Set(files)]; // Dedupe
|
|
546
|
+
}
|
|
547
|
+
|
|
548
|
+
function detectFileOverlap(featureValidations) {
|
|
549
|
+
const fileToFeatures = new Map();
|
|
550
|
+
|
|
551
|
+
for (const fv of featureValidations) {
|
|
552
|
+
for (const file of fv.filesToModify) {
|
|
553
|
+
if (!fileToFeatures.has(file)) {
|
|
554
|
+
fileToFeatures.set(file, []);
|
|
555
|
+
}
|
|
556
|
+
fileToFeatures.get(file).push(fv.slug);
|
|
557
|
+
}
|
|
558
|
+
}
|
|
559
|
+
|
|
560
|
+
const overlaps = [];
|
|
561
|
+
for (const [file, features] of fileToFeatures) {
|
|
562
|
+
if (features.length > 1) {
|
|
563
|
+
overlaps.push({ file, features });
|
|
564
|
+
}
|
|
565
|
+
}
|
|
566
|
+
|
|
567
|
+
return overlaps;
|
|
568
|
+
}
|
|
569
|
+
|
|
570
|
+
function detectDependencies(featureValidations) {
|
|
571
|
+
const dependencies = [];
|
|
572
|
+
const slugs = featureValidations.map(fv => fv.slug);
|
|
573
|
+
|
|
574
|
+
for (const fv of featureValidations) {
|
|
575
|
+
if (!fv.specExists) continue;
|
|
576
|
+
|
|
577
|
+
const specPath = `.blueprint/features/feature_${fv.slug}/FEATURE_SPEC.md`;
|
|
578
|
+
try {
|
|
579
|
+
const content = fs.readFileSync(specPath, 'utf8').toLowerCase();
|
|
580
|
+
|
|
581
|
+
// Check if spec references other features in the batch
|
|
582
|
+
for (const otherSlug of slugs) {
|
|
583
|
+
if (otherSlug !== fv.slug) {
|
|
584
|
+
if (content.includes(otherSlug) || content.includes(`depends on ${otherSlug}`) || content.includes(`requires ${otherSlug}`)) {
|
|
585
|
+
dependencies.push({ feature: fv.slug, dependsOn: otherSlug });
|
|
586
|
+
}
|
|
587
|
+
}
|
|
588
|
+
}
|
|
589
|
+
} catch {
|
|
590
|
+
// Skip if can't read
|
|
591
|
+
}
|
|
592
|
+
}
|
|
593
|
+
|
|
594
|
+
return dependencies;
|
|
595
|
+
}
|
|
596
|
+
|
|
597
|
+
function estimateScope(featureValidations) {
|
|
598
|
+
return featureValidations.map(fv => {
|
|
599
|
+
// Estimate based on story count and files to modify
|
|
600
|
+
let estimatedMinutes = 10; // Base time
|
|
601
|
+
estimatedMinutes += fv.storyCount * 5; // 5 min per story
|
|
602
|
+
estimatedMinutes += fv.filesToModify.length * 2; // 2 min per file
|
|
603
|
+
|
|
604
|
+
return {
|
|
605
|
+
slug: fv.slug,
|
|
606
|
+
storyCount: fv.storyCount,
|
|
607
|
+
fileCount: fv.filesToModify.length,
|
|
608
|
+
estimatedMinutes
|
|
609
|
+
};
|
|
610
|
+
});
|
|
611
|
+
}
|
|
612
|
+
|
|
613
|
+
function validateParallelBatch(slugs) {
|
|
614
|
+
const featureValidations = slugs.map(validateFeatureSpec);
|
|
615
|
+
const fileOverlaps = detectFileOverlap(featureValidations);
|
|
616
|
+
const dependencies = detectDependencies(featureValidations);
|
|
617
|
+
const scopeEstimates = estimateScope(featureValidations);
|
|
618
|
+
|
|
619
|
+
// Check for blocking errors
|
|
620
|
+
const invalidFeatures = featureValidations.filter(fv => !fv.valid);
|
|
621
|
+
const hasBlockingErrors = invalidFeatures.length > 0;
|
|
622
|
+
|
|
623
|
+
// Determine overall validity
|
|
624
|
+
const canProceed = !hasBlockingErrors;
|
|
625
|
+
|
|
626
|
+
// Build recommendations
|
|
627
|
+
const recommendations = [];
|
|
628
|
+
|
|
629
|
+
if (fileOverlaps.length > 0) {
|
|
630
|
+
// Suggest running features with overlaps sequentially
|
|
631
|
+
const overlappingFeatures = new Set();
|
|
632
|
+
fileOverlaps.forEach(o => o.features.forEach(f => overlappingFeatures.add(f)));
|
|
633
|
+
const nonOverlapping = slugs.filter(s => !overlappingFeatures.has(s));
|
|
634
|
+
|
|
635
|
+
if (nonOverlapping.length > 0) {
|
|
636
|
+
recommendations.push(`Consider running ${[...overlappingFeatures].join(', ')} sequentially due to file overlap`);
|
|
637
|
+
}
|
|
638
|
+
}
|
|
639
|
+
|
|
640
|
+
if (dependencies.length > 0) {
|
|
641
|
+
recommendations.push(`Dependency detected: ${dependencies.map(d => `${d.feature} → ${d.dependsOn}`).join(', ')}`);
|
|
642
|
+
}
|
|
643
|
+
|
|
644
|
+
// Calculate totals
|
|
645
|
+
const totalEstimatedMinutes = scopeEstimates.reduce((sum, s) => sum + s.estimatedMinutes, 0);
|
|
646
|
+
const maxEstimatedMinutes = Math.max(...scopeEstimates.map(s => s.estimatedMinutes));
|
|
647
|
+
|
|
648
|
+
return {
|
|
649
|
+
valid: canProceed,
|
|
650
|
+
features: featureValidations,
|
|
651
|
+
fileOverlaps,
|
|
652
|
+
dependencies,
|
|
653
|
+
scopeEstimates,
|
|
654
|
+
recommendations,
|
|
655
|
+
totalEstimatedMinutes,
|
|
656
|
+
parallelEstimatedMinutes: maxEstimatedMinutes, // Time if run in parallel
|
|
657
|
+
invalidFeatures
|
|
658
|
+
};
|
|
659
|
+
}
|
|
660
|
+
|
|
661
|
+
function formatPreflightResults(results, options = {}) {
|
|
662
|
+
let output = '\nPre-flight Validation\n=====================\n\n';
|
|
663
|
+
|
|
664
|
+
// Feature status
|
|
665
|
+
for (const fv of results.features) {
|
|
666
|
+
const icon = fv.valid ? '✓' : '✗';
|
|
667
|
+
let status = [];
|
|
668
|
+
if (fv.specComplete) status.push('Spec complete');
|
|
669
|
+
if (fv.storiesExist) status.push(`${fv.storyCount} stories`);
|
|
670
|
+
if (fv.planExists) status.push('Plan exists');
|
|
671
|
+
|
|
672
|
+
output += `${icon} ${fv.slug}: ${status.length > 0 ? status.join(', ') : 'Not ready'}\n`;
|
|
673
|
+
|
|
674
|
+
for (const err of fv.errors) {
|
|
675
|
+
output += ` ✗ ${err}\n`;
|
|
676
|
+
}
|
|
677
|
+
for (const warn of fv.warnings) {
|
|
678
|
+
output += ` ⚠ ${warn}\n`;
|
|
679
|
+
}
|
|
680
|
+
}
|
|
681
|
+
|
|
682
|
+
// File overlap
|
|
683
|
+
if (results.fileOverlaps.length > 0) {
|
|
684
|
+
output += '\nConflict Analysis\n=================\n\n';
|
|
685
|
+
output += '⚠ File overlap detected:\n';
|
|
686
|
+
for (const overlap of results.fileOverlaps) {
|
|
687
|
+
output += ` • ${overlap.file}: ${overlap.features.join(', ')} both modify\n`;
|
|
688
|
+
}
|
|
689
|
+
}
|
|
690
|
+
|
|
691
|
+
// Dependencies
|
|
692
|
+
if (results.dependencies.length > 0) {
|
|
693
|
+
output += '\n⚠ Dependencies detected:\n';
|
|
694
|
+
for (const dep of results.dependencies) {
|
|
695
|
+
output += ` • ${dep.feature} depends on ${dep.dependsOn}\n`;
|
|
696
|
+
}
|
|
697
|
+
}
|
|
698
|
+
|
|
699
|
+
// Scope estimation
|
|
700
|
+
output += '\nScope Estimation\n================\n\n';
|
|
701
|
+
output += ' Feature | Stories | Files | Est. Time\n';
|
|
702
|
+
output += ' ----------------|---------|-------|----------\n';
|
|
703
|
+
for (const scope of results.scopeEstimates) {
|
|
704
|
+
const slugPad = scope.slug.padEnd(15);
|
|
705
|
+
const storiesPad = String(scope.storyCount).padStart(7);
|
|
706
|
+
const filesPad = String(scope.fileCount).padStart(5);
|
|
707
|
+
output += ` ${slugPad} |${storiesPad} |${filesPad} | ~${scope.estimatedMinutes} min\n`;
|
|
708
|
+
}
|
|
709
|
+
|
|
710
|
+
output += `\nTotal estimated: ~${results.totalEstimatedMinutes} min (parallel: ~${results.parallelEstimatedMinutes} min)\n`;
|
|
711
|
+
|
|
712
|
+
// Recommendations
|
|
713
|
+
if (results.recommendations.length > 0) {
|
|
714
|
+
output += '\nRecommendations\n===============\n';
|
|
715
|
+
for (const rec of results.recommendations) {
|
|
716
|
+
output += ` • ${rec}\n`;
|
|
717
|
+
}
|
|
718
|
+
}
|
|
719
|
+
|
|
720
|
+
return output;
|
|
721
|
+
}
|
|
722
|
+
|
|
723
|
+
// --- Timeout ---
|
|
724
|
+
|
|
725
|
+
function withTimeout(promise, timeoutMs, slug) {
|
|
726
|
+
return new Promise((resolve) => {
|
|
727
|
+
const timer = setTimeout(() => {
|
|
728
|
+
resolve({
|
|
729
|
+
slug,
|
|
730
|
+
success: false,
|
|
731
|
+
timedOut: true,
|
|
732
|
+
error: `Pipeline timed out after ${timeoutMs / 60000} minutes`
|
|
733
|
+
});
|
|
734
|
+
}, timeoutMs);
|
|
735
|
+
|
|
736
|
+
promise.then((result) => {
|
|
737
|
+
clearTimeout(timer);
|
|
738
|
+
resolve(result);
|
|
739
|
+
}).catch((err) => {
|
|
740
|
+
clearTimeout(timer);
|
|
741
|
+
resolve({ slug, success: false, error: err.message });
|
|
742
|
+
});
|
|
743
|
+
});
|
|
744
|
+
}
|
|
745
|
+
|
|
746
|
+
function getTimeoutMs() {
|
|
747
|
+
const config = readParallelConfig();
|
|
748
|
+
return config.timeout * 60 * 1000; // Convert minutes to ms
|
|
749
|
+
}
|
|
750
|
+
|
|
751
|
+
// --- Progress Tracking ---
|
|
752
|
+
|
|
753
|
+
function getProgressFromLog(logPath) {
|
|
754
|
+
if (!fs.existsSync(logPath)) {
|
|
755
|
+
return { stage: 'starting', percent: 0 };
|
|
756
|
+
}
|
|
757
|
+
|
|
758
|
+
try {
|
|
759
|
+
const content = fs.readFileSync(logPath, 'utf8');
|
|
760
|
+
const lines = content.toLowerCase();
|
|
761
|
+
|
|
762
|
+
// Detect stage based on log content
|
|
763
|
+
if (lines.includes('codey') && lines.includes('implement')) {
|
|
764
|
+
return { stage: 'codey-implement', percent: 90 };
|
|
765
|
+
}
|
|
766
|
+
if (lines.includes('codey') && lines.includes('plan')) {
|
|
767
|
+
return { stage: 'codey-plan', percent: 75 };
|
|
768
|
+
}
|
|
769
|
+
if (lines.includes('nigel')) {
|
|
770
|
+
return { stage: 'nigel', percent: 50 };
|
|
771
|
+
}
|
|
772
|
+
if (lines.includes('cass')) {
|
|
773
|
+
return { stage: 'cass', percent: 35 };
|
|
774
|
+
}
|
|
775
|
+
if (lines.includes('alex')) {
|
|
776
|
+
return { stage: 'alex', percent: 20 };
|
|
777
|
+
}
|
|
778
|
+
|
|
779
|
+
return { stage: 'running', percent: 10 };
|
|
780
|
+
} catch {
|
|
781
|
+
return { stage: 'unknown', percent: 0 };
|
|
782
|
+
}
|
|
783
|
+
}
|
|
784
|
+
|
|
785
|
+
function getDetailedStatus() {
|
|
786
|
+
const queue = loadQueue();
|
|
787
|
+
if (!queue.features || queue.features.length === 0) {
|
|
788
|
+
return { active: false, features: [] };
|
|
789
|
+
}
|
|
790
|
+
|
|
791
|
+
const features = queue.features.map(f => {
|
|
792
|
+
const progress = f.logPath ? getProgressFromLog(f.logPath) : { stage: 'pending', percent: 0 };
|
|
793
|
+
const elapsed = f.startedAt
|
|
794
|
+
? Math.round((Date.now() - new Date(f.startedAt).getTime()) / 1000)
|
|
795
|
+
: 0;
|
|
796
|
+
|
|
797
|
+
return {
|
|
798
|
+
slug: f.slug,
|
|
799
|
+
status: f.status,
|
|
800
|
+
stage: progress.stage,
|
|
801
|
+
percent: progress.percent,
|
|
802
|
+
elapsedSeconds: elapsed,
|
|
803
|
+
logPath: f.logPath,
|
|
804
|
+
worktreePath: f.worktreePath,
|
|
805
|
+
branchName: f.branchName
|
|
806
|
+
};
|
|
807
|
+
});
|
|
808
|
+
|
|
809
|
+
return {
|
|
810
|
+
active: features.some(f => f.status === 'parallel_running'),
|
|
811
|
+
features
|
|
812
|
+
};
|
|
813
|
+
}
|
|
814
|
+
|
|
815
|
+
function formatDetailedStatus(details) {
|
|
816
|
+
if (!details.active && details.features.length === 0) {
|
|
817
|
+
return 'No parallel pipelines active.';
|
|
818
|
+
}
|
|
819
|
+
|
|
820
|
+
let output = 'Parallel Pipeline Status\n\n';
|
|
821
|
+
|
|
822
|
+
for (const f of details.features) {
|
|
823
|
+
const statusIcon = {
|
|
824
|
+
'parallel_queued': '⏳',
|
|
825
|
+
'worktree_created': '📁',
|
|
826
|
+
'parallel_running': '🔄',
|
|
827
|
+
'merge_pending': '🔀',
|
|
828
|
+
'parallel_complete': '✅',
|
|
829
|
+
'parallel_failed': '❌',
|
|
830
|
+
'merge_conflict': '⚠️',
|
|
831
|
+
'aborted': '🛑'
|
|
832
|
+
}[f.status] || '❓';
|
|
833
|
+
|
|
834
|
+
const elapsed = f.elapsedSeconds > 0
|
|
835
|
+
? ` (${Math.floor(f.elapsedSeconds / 60)}m ${f.elapsedSeconds % 60}s)`
|
|
836
|
+
: '';
|
|
837
|
+
|
|
838
|
+
output += `${statusIcon} ${f.slug}${elapsed}\n`;
|
|
839
|
+
|
|
840
|
+
if (f.status === 'parallel_running') {
|
|
841
|
+
const bar = progressBar(f.percent);
|
|
842
|
+
output += ` ${bar} ${f.percent}% - ${f.stage}\n`;
|
|
843
|
+
}
|
|
844
|
+
}
|
|
845
|
+
|
|
846
|
+
return output;
|
|
847
|
+
}
|
|
848
|
+
|
|
849
|
+
function progressBar(percent, width = 20) {
|
|
850
|
+
const filled = Math.round((percent / 100) * width);
|
|
851
|
+
const empty = width - filled;
|
|
852
|
+
return '[' + '█'.repeat(filled) + '░'.repeat(empty) + ']';
|
|
853
|
+
}
|
|
854
|
+
|
|
855
|
+
// --- Abort Handling ---
|
|
856
|
+
|
|
857
|
+
function setupAbortHandler(queue) {
|
|
858
|
+
const handler = async () => {
|
|
859
|
+
if (isAborting) return;
|
|
860
|
+
isAborting = true;
|
|
861
|
+
|
|
862
|
+
console.log('\n\nReceived interrupt signal. Stopping pipelines...\n');
|
|
863
|
+
|
|
864
|
+
// Kill all running processes
|
|
865
|
+
for (const [slug, procInfo] of runningProcesses) {
|
|
866
|
+
console.log(`Stopping ${slug} (PID: ${procInfo.pid})...`);
|
|
867
|
+
try {
|
|
868
|
+
process.kill(procInfo.pid, 'SIGTERM');
|
|
869
|
+
} catch {
|
|
870
|
+
// Process may already be dead
|
|
871
|
+
}
|
|
872
|
+
}
|
|
873
|
+
|
|
874
|
+
// Update queue state
|
|
875
|
+
if (queue && queue.features) {
|
|
876
|
+
queue.features.forEach(f => {
|
|
877
|
+
if (f.status === 'parallel_running' || f.status === 'worktree_created') {
|
|
878
|
+
f.status = 'aborted';
|
|
879
|
+
}
|
|
880
|
+
});
|
|
881
|
+
saveQueue(queue);
|
|
882
|
+
}
|
|
883
|
+
|
|
884
|
+
releaseLock();
|
|
885
|
+
|
|
886
|
+
console.log('\nAborted. Worktrees preserved for debugging.');
|
|
887
|
+
console.log("Run 'orchestr8 parallel cleanup' to remove.\n");
|
|
888
|
+
|
|
889
|
+
process.exit(130); // Standard exit code for Ctrl+C
|
|
890
|
+
};
|
|
891
|
+
|
|
892
|
+
process.on('SIGINT', handler);
|
|
893
|
+
process.on('SIGTERM', handler);
|
|
894
|
+
|
|
895
|
+
return handler;
|
|
896
|
+
}
|
|
897
|
+
|
|
898
|
+
async function abortParallel(options = {}) {
|
|
899
|
+
const lock = getLockInfo();
|
|
900
|
+
const queue = loadQueue();
|
|
901
|
+
|
|
902
|
+
if (!lock && (!queue.features || queue.features.length === 0)) {
|
|
903
|
+
console.log('No parallel pipelines are currently running.');
|
|
904
|
+
return { success: true };
|
|
905
|
+
}
|
|
906
|
+
|
|
907
|
+
console.log('Stopping parallel pipelines...\n');
|
|
908
|
+
|
|
909
|
+
// Try to kill the main process if we have lock info
|
|
910
|
+
if (lock && lock.pid !== process.pid) {
|
|
911
|
+
console.log(`Sending stop signal to main process (PID: ${lock.pid})...`);
|
|
912
|
+
try {
|
|
913
|
+
process.kill(lock.pid, 'SIGTERM');
|
|
914
|
+
} catch {
|
|
915
|
+
console.log('Main process not running.');
|
|
916
|
+
}
|
|
917
|
+
}
|
|
918
|
+
|
|
919
|
+
// Update queue state
|
|
920
|
+
let abortedCount = 0;
|
|
921
|
+
if (queue.features) {
|
|
922
|
+
queue.features.forEach(f => {
|
|
923
|
+
if (f.status === 'parallel_running' || f.status === 'worktree_created' || f.status === 'parallel_queued') {
|
|
924
|
+
f.status = 'aborted';
|
|
925
|
+
abortedCount++;
|
|
926
|
+
console.log(`${f.slug}: Marked as aborted`);
|
|
927
|
+
}
|
|
928
|
+
});
|
|
929
|
+
saveQueue(queue);
|
|
930
|
+
}
|
|
931
|
+
|
|
932
|
+
releaseLock();
|
|
933
|
+
|
|
934
|
+
if (options.cleanup) {
|
|
935
|
+
console.log('\nCleaning up worktrees...');
|
|
936
|
+
await cleanupWorktrees();
|
|
937
|
+
} else {
|
|
938
|
+
console.log('\nWorktrees preserved for debugging.');
|
|
939
|
+
if (queue.features) {
|
|
940
|
+
const worktrees = queue.features
|
|
941
|
+
.filter(f => f.worktreePath)
|
|
942
|
+
.map(f => f.worktreePath);
|
|
943
|
+
if (worktrees.length > 0) {
|
|
944
|
+
console.log('Locations:');
|
|
945
|
+
worktrees.forEach(w => console.log(` • ${w}`));
|
|
946
|
+
}
|
|
947
|
+
}
|
|
948
|
+
console.log("\nTo clean up: orchestr8 parallel cleanup");
|
|
949
|
+
}
|
|
950
|
+
|
|
951
|
+
return { success: true, abortedCount };
|
|
952
|
+
}
|
|
953
|
+
|
|
954
|
+
// --- Queue Persistence ---
|
|
955
|
+
|
|
956
|
+
function loadQueue() {
|
|
957
|
+
const queueFile = getQueueFile();
|
|
958
|
+
if (!fs.existsSync(queueFile)) {
|
|
959
|
+
return { features: [], startedAt: null };
|
|
960
|
+
}
|
|
961
|
+
return JSON.parse(fs.readFileSync(queueFile, 'utf8'));
|
|
962
|
+
}
|
|
963
|
+
|
|
964
|
+
function saveQueue(queue) {
|
|
965
|
+
const queueFile = getQueueFile();
|
|
966
|
+
fs.mkdirSync(path.dirname(queueFile), { recursive: true });
|
|
967
|
+
fs.writeFileSync(queueFile, JSON.stringify(queue, null, 2));
|
|
968
|
+
}
|
|
969
|
+
|
|
970
|
+
// --- Pipeline Execution ---
|
|
971
|
+
|
|
972
|
+
function runPipelineInWorktree(slug, worktreePath, config = null, options = {}) {
|
|
973
|
+
const cfg = config || readParallelConfig();
|
|
974
|
+
const cliParts = cfg.cli.split(' ');
|
|
975
|
+
const skillParts = cfg.skill.split(' ');
|
|
976
|
+
const flagParts = cfg.skillFlags ? cfg.skillFlags.split(' ') : [];
|
|
977
|
+
const allArgs = [...cliParts.slice(1), ...skillParts, slug, ...flagParts];
|
|
978
|
+
|
|
979
|
+
// Create log stream
|
|
980
|
+
const log = createLogStream(slug, cfg);
|
|
981
|
+
log.stream.write(`[${new Date().toISOString()}] Pipeline started for ${slug}\n`);
|
|
982
|
+
log.stream.write(`[${new Date().toISOString()}] Command: ${cliParts[0]} ${allArgs.join(' ')}\n`);
|
|
983
|
+
log.stream.write(`[${new Date().toISOString()}] Working directory: ${worktreePath}\n\n`);
|
|
984
|
+
|
|
985
|
+
return new Promise((resolve) => {
|
|
986
|
+
const proc = spawn(cliParts[0], allArgs, {
|
|
987
|
+
cwd: worktreePath,
|
|
988
|
+
stdio: options.verbose ? 'inherit' : ['pipe', 'pipe', 'pipe'],
|
|
989
|
+
shell: true
|
|
990
|
+
});
|
|
991
|
+
|
|
992
|
+
// Track process for abort handling
|
|
993
|
+
runningProcesses.set(slug, { pid: proc.pid, process: proc });
|
|
994
|
+
|
|
995
|
+
if (!options.verbose) {
|
|
996
|
+
// Log stdout
|
|
997
|
+
if (proc.stdout) {
|
|
998
|
+
proc.stdout.on('data', (data) => {
|
|
999
|
+
logWithTimestamp(log.stream, 'stdout', data);
|
|
1000
|
+
});
|
|
1001
|
+
}
|
|
1002
|
+
|
|
1003
|
+
// Log stderr
|
|
1004
|
+
if (proc.stderr) {
|
|
1005
|
+
proc.stderr.on('data', (data) => {
|
|
1006
|
+
logWithTimestamp(log.stream, 'stderr', data);
|
|
1007
|
+
});
|
|
1008
|
+
}
|
|
1009
|
+
}
|
|
1010
|
+
|
|
1011
|
+
proc.on('close', (code) => {
|
|
1012
|
+
runningProcesses.delete(slug);
|
|
1013
|
+
log.stream.write(`\n[${new Date().toISOString()}] Pipeline completed with exit code ${code}\n`);
|
|
1014
|
+
log.stream.end();
|
|
1015
|
+
resolve({ slug, success: code === 0, exitCode: code, logPath: log.path });
|
|
1016
|
+
});
|
|
1017
|
+
|
|
1018
|
+
proc.on('error', (err) => {
|
|
1019
|
+
runningProcesses.delete(slug);
|
|
1020
|
+
log.stream.write(`\n[${new Date().toISOString()}] Pipeline error: ${err.message}\n`);
|
|
1021
|
+
log.stream.end();
|
|
1022
|
+
resolve({ slug, success: false, error: err.message, logPath: log.path });
|
|
1023
|
+
});
|
|
1024
|
+
});
|
|
1025
|
+
}
|
|
1026
|
+
|
|
1027
|
+
// --- Main Orchestration ---
|
|
1028
|
+
|
|
1029
|
+
function dryRun(slugs, config, baseBranch, gitStatus, validation, batchValidation = null) {
|
|
1030
|
+
const parallelCfg = readParallelConfig();
|
|
1031
|
+
const { active, queued } = splitByLimit(slugs, config.maxConcurrency);
|
|
1032
|
+
|
|
1033
|
+
console.log('\n=== DRY RUN MODE ===\n');
|
|
1034
|
+
console.log('Git Checks:');
|
|
1035
|
+
console.log(` ${gitStatus.isGitRepo ? '✓' : '✗'} Git repository: ${gitStatus.isGitRepo ? 'yes' : 'no'}`);
|
|
1036
|
+
console.log(` ${!gitStatus.isDirty ? '✓' : '✗'} Working tree: ${gitStatus.isDirty ? 'dirty (has uncommitted changes)' : 'clean'}`);
|
|
1037
|
+
console.log(` ✓ Git version: ${gitStatus.gitVersion}`);
|
|
1038
|
+
console.log(` ✓ Base branch: ${baseBranch}`);
|
|
1039
|
+
|
|
1040
|
+
if (!validation.valid) {
|
|
1041
|
+
console.log(`\n⚠️ WARNING: Git checks failed. Real execution would abort.`);
|
|
1042
|
+
validation.errors.forEach(e => console.log(` - ${e}`));
|
|
1043
|
+
}
|
|
1044
|
+
|
|
1045
|
+
// Show batch validation results (already printed in runParallel if issues found)
|
|
1046
|
+
if (batchValidation && !batchValidation.valid) {
|
|
1047
|
+
console.log(`\n⚠️ WARNING: Feature validation failed. Real execution would abort.`);
|
|
1048
|
+
}
|
|
1049
|
+
|
|
1050
|
+
console.log(`\nConfiguration:`);
|
|
1051
|
+
console.log(` Max concurrency: ${config.maxConcurrency}`);
|
|
1052
|
+
console.log(` Max features: ${parallelCfg.maxFeatures}`);
|
|
1053
|
+
console.log(` Timeout: ${parallelCfg.timeout} min per pipeline`);
|
|
1054
|
+
console.log(` Min disk space: ${parallelCfg.minDiskSpaceMB} MB`);
|
|
1055
|
+
console.log(` CLI: ${parallelCfg.cli}`);
|
|
1056
|
+
console.log(` Skill: ${parallelCfg.skill}`);
|
|
1057
|
+
console.log(` Flags: ${parallelCfg.skillFlags || '(none)'}`);
|
|
1058
|
+
console.log(` Worktree dir: ${parallelCfg.worktreeDir}`);
|
|
1059
|
+
console.log(` Total features: ${slugs.length}`);
|
|
1060
|
+
|
|
1061
|
+
console.log(`\nInitial batch (${active.length} features):`);
|
|
1062
|
+
active.forEach(slug => {
|
|
1063
|
+
console.log(` → ${slug}`);
|
|
1064
|
+
console.log(` Worktree: ${buildWorktreePath(slug, parallelCfg)}`);
|
|
1065
|
+
console.log(` Branch: ${buildBranchName(slug)}`);
|
|
1066
|
+
console.log(` Command: ${buildPipelineCommand(slug, buildWorktreePath(slug, parallelCfg), parallelCfg)}`);
|
|
1067
|
+
});
|
|
1068
|
+
|
|
1069
|
+
if (queued.length > 0) {
|
|
1070
|
+
console.log(`\nQueued (${queued.length} features, will start as slots free):`);
|
|
1071
|
+
queued.forEach(slug => {
|
|
1072
|
+
console.log(` ⏳ ${slug}`);
|
|
1073
|
+
});
|
|
1074
|
+
}
|
|
1075
|
+
|
|
1076
|
+
console.log(`\nExecution plan:`);
|
|
1077
|
+
console.log(` 1. Create ${active.length} git worktrees`);
|
|
1078
|
+
console.log(` 2. Spawn ${active.length} parallel pipeline processes`);
|
|
1079
|
+
console.log(` 3. As each completes: merge to ${baseBranch}, cleanup worktree`);
|
|
1080
|
+
if (queued.length > 0) {
|
|
1081
|
+
console.log(` 4. Promote queued features as slots free`);
|
|
1082
|
+
}
|
|
1083
|
+
console.log(` 5. Report final summary`);
|
|
1084
|
+
|
|
1085
|
+
console.log(`\nTo execute for real, run without --dry-run`);
|
|
1086
|
+
console.log('===================\n');
|
|
1087
|
+
|
|
1088
|
+
return { success: true, dryRun: true };
|
|
1089
|
+
}
|
|
1090
|
+
|
|
1091
|
+
async function runParallel(slugs, options = {}) {
|
|
1092
|
+
const config = { ...getDefaultConfig(), ...options };
|
|
1093
|
+
const baseBranch = getCurrentBranch();
|
|
1094
|
+
|
|
1095
|
+
// Pre-flight validation
|
|
1096
|
+
const gitStatus = checkGitStatus();
|
|
1097
|
+
const validation = validatePreFlight(gitStatus);
|
|
1098
|
+
|
|
1099
|
+
// Batch validation (unless skipped)
|
|
1100
|
+
let batchValidation = null;
|
|
1101
|
+
if (!options.skipPreflight) {
|
|
1102
|
+
batchValidation = validateParallelBatch(slugs);
|
|
1103
|
+
|
|
1104
|
+
// Show pre-flight results in dry-run or if there are issues
|
|
1105
|
+
if (options.dryRun || !batchValidation.valid || batchValidation.fileOverlaps.length > 0 || batchValidation.dependencies.length > 0) {
|
|
1106
|
+
console.log(formatPreflightResults(batchValidation));
|
|
1107
|
+
}
|
|
1108
|
+
|
|
1109
|
+
// Block if there are invalid features
|
|
1110
|
+
if (!batchValidation.valid && !options.dryRun) {
|
|
1111
|
+
console.error('\nCannot proceed. Fix issues above or use --skip-preflight to override.\n');
|
|
1112
|
+
console.error('Suggested commands:');
|
|
1113
|
+
for (const inv of batchValidation.invalidFeatures) {
|
|
1114
|
+
if (!inv.specExists) {
|
|
1115
|
+
console.error(` /implement-feature "${inv.slug}" --pause-after=alex`);
|
|
1116
|
+
} else if (!inv.storiesExist) {
|
|
1117
|
+
console.error(` /implement-feature "${inv.slug}" --pause-after=cass`);
|
|
1118
|
+
}
|
|
1119
|
+
}
|
|
1120
|
+
return { success: false, error: 'preflight-failed', validation: batchValidation };
|
|
1121
|
+
}
|
|
1122
|
+
|
|
1123
|
+
// Warn about conflicts but allow proceeding with confirmation
|
|
1124
|
+
if (batchValidation.fileOverlaps.length > 0 && !options.dryRun && !options.yes) {
|
|
1125
|
+
console.warn('\n⚠ File overlaps detected - merge conflicts are likely.\n');
|
|
1126
|
+
}
|
|
1127
|
+
}
|
|
1128
|
+
|
|
1129
|
+
// Dry run mode - show what would happen without executing
|
|
1130
|
+
if (options.dryRun) {
|
|
1131
|
+
return dryRun(slugs, config, baseBranch, gitStatus, validation, batchValidation);
|
|
1132
|
+
}
|
|
1133
|
+
|
|
1134
|
+
if (!validation.valid) {
|
|
1135
|
+
console.error('Pre-flight validation failed:');
|
|
1136
|
+
validation.errors.forEach(e => console.error(` - ${e}`));
|
|
1137
|
+
return { success: false, errors: validation.errors };
|
|
1138
|
+
}
|
|
1139
|
+
|
|
1140
|
+
// Check feature limit
|
|
1141
|
+
const limitCheck = validateFeatureLimit(slugs);
|
|
1142
|
+
if (!limitCheck.valid) {
|
|
1143
|
+
console.error(`\nError: ${limitCheck.error}`);
|
|
1144
|
+
console.error(`\nTo increase limit: orchestr8 parallel-config set maxFeatures <N>\n`);
|
|
1145
|
+
return { success: false, error: 'feature-limit-exceeded' };
|
|
1146
|
+
}
|
|
1147
|
+
|
|
1148
|
+
// Check disk space (warn but don't block unless --strict)
|
|
1149
|
+
const diskCheck = validateDiskSpace();
|
|
1150
|
+
if (!diskCheck.valid) {
|
|
1151
|
+
console.warn(`\nWarning: ${diskCheck.error}`);
|
|
1152
|
+
if (options.strict) {
|
|
1153
|
+
console.error('Use --skip-disk-check to proceed anyway.\n');
|
|
1154
|
+
return { success: false, error: 'low-disk-space' };
|
|
1155
|
+
}
|
|
1156
|
+
console.warn('Proceeding anyway...\n');
|
|
1157
|
+
}
|
|
1158
|
+
|
|
1159
|
+
// Check lock (unless forcing)
|
|
1160
|
+
if (!options.force) {
|
|
1161
|
+
const lockResult = acquireLock(slugs);
|
|
1162
|
+
if (!lockResult.acquired) {
|
|
1163
|
+
const lock = lockResult.existingLock;
|
|
1164
|
+
console.error('\nError: Another parallel execution is in progress');
|
|
1165
|
+
console.error(` PID: ${lock.pid}`);
|
|
1166
|
+
console.error(` Started: ${lock.startedAt}`);
|
|
1167
|
+
console.error(` Features: ${lock.features.join(', ')}`);
|
|
1168
|
+
console.error('\nOptions:');
|
|
1169
|
+
console.error(' • Wait for it to complete');
|
|
1170
|
+
console.error(' • Run: orchestr8 parallel status');
|
|
1171
|
+
console.error(' • Force override: orchestr8 parallel ... --force\n');
|
|
1172
|
+
return { success: false, error: 'locked' };
|
|
1173
|
+
}
|
|
1174
|
+
} else {
|
|
1175
|
+
// Force mode - acquire lock anyway
|
|
1176
|
+
const lock = getLockInfo();
|
|
1177
|
+
if (lock) {
|
|
1178
|
+
console.log(`Warning: Overriding existing lock (PID: ${lock.pid})\n`);
|
|
1179
|
+
}
|
|
1180
|
+
acquireLock(slugs);
|
|
1181
|
+
}
|
|
1182
|
+
|
|
1183
|
+
// Confirmation prompt (unless --yes flag)
|
|
1184
|
+
if (!options.yes) {
|
|
1185
|
+
const confirmMsg = buildConfirmMessage(slugs, config);
|
|
1186
|
+
const confirmed = await promptConfirm(confirmMsg);
|
|
1187
|
+
if (!confirmed) {
|
|
1188
|
+
releaseLock();
|
|
1189
|
+
console.log('\nAborted.\n');
|
|
1190
|
+
return { success: true, aborted: true };
|
|
1191
|
+
}
|
|
1192
|
+
}
|
|
1193
|
+
|
|
1194
|
+
console.log(`\nStarting parallel pipelines for ${slugs.length} features`);
|
|
1195
|
+
console.log(`Base branch: ${baseBranch}`);
|
|
1196
|
+
console.log(`Max concurrency: ${config.maxConcurrency}\n`);
|
|
1197
|
+
|
|
1198
|
+
// Initialize queue
|
|
1199
|
+
const queue = {
|
|
1200
|
+
features: slugs.map(slug => ({
|
|
1201
|
+
slug,
|
|
1202
|
+
status: 'parallel_queued',
|
|
1203
|
+
worktreePath: null,
|
|
1204
|
+
branchName: null,
|
|
1205
|
+
startedAt: null,
|
|
1206
|
+
completedAt: null,
|
|
1207
|
+
logPath: null
|
|
1208
|
+
})),
|
|
1209
|
+
startedAt: new Date().toISOString(),
|
|
1210
|
+
baseBranch,
|
|
1211
|
+
maxConcurrency: config.maxConcurrency
|
|
1212
|
+
};
|
|
1213
|
+
saveQueue(queue);
|
|
1214
|
+
|
|
1215
|
+
// Setup abort handler for Ctrl+C
|
|
1216
|
+
setupAbortHandler(queue);
|
|
1217
|
+
|
|
1218
|
+
const { active, queued } = splitByLimit(slugs, config.maxConcurrency);
|
|
1219
|
+
const running = new Map();
|
|
1220
|
+
const completed = [];
|
|
1221
|
+
let remaining = [...queued];
|
|
1222
|
+
|
|
1223
|
+
try {
|
|
1224
|
+
// Start initial batch
|
|
1225
|
+
for (const slug of active) {
|
|
1226
|
+
await startFeature(slug, queue, running, options);
|
|
1227
|
+
}
|
|
1228
|
+
|
|
1229
|
+
// Process until all complete
|
|
1230
|
+
while (running.size > 0 || remaining.length > 0) {
|
|
1231
|
+
// Wait for any running pipeline to complete
|
|
1232
|
+
if (running.size > 0) {
|
|
1233
|
+
const result = await Promise.race(running.values());
|
|
1234
|
+
running.delete(result.slug);
|
|
1235
|
+
|
|
1236
|
+
// Update feature state
|
|
1237
|
+
const feature = queue.features.find(f => f.slug === result.slug);
|
|
1238
|
+
feature.completedAt = new Date().toISOString();
|
|
1239
|
+
|
|
1240
|
+
// Update log path from result
|
|
1241
|
+
if (result.logPath) {
|
|
1242
|
+
feature.logPath = result.logPath;
|
|
1243
|
+
}
|
|
1244
|
+
|
|
1245
|
+
const timestamp = new Date().toISOString().slice(11, 19);
|
|
1246
|
+
|
|
1247
|
+
if (result.success) {
|
|
1248
|
+
feature.status = 'merge_pending';
|
|
1249
|
+
console.log(`[${timestamp}] ${result.slug}: Completed ✓`);
|
|
1250
|
+
|
|
1251
|
+
// Attempt merge
|
|
1252
|
+
const mergeResult = mergeBranch(result.slug);
|
|
1253
|
+
if (mergeResult.success) {
|
|
1254
|
+
feature.status = 'parallel_complete';
|
|
1255
|
+
console.log(`[${timestamp}] ${result.slug}: Merged to ${baseBranch} ✓`);
|
|
1256
|
+
removeWorktree(result.slug);
|
|
1257
|
+
} else if (mergeResult.conflict) {
|
|
1258
|
+
feature.status = 'merge_conflict';
|
|
1259
|
+
feature.conflictDetails = mergeResult.output;
|
|
1260
|
+
console.log(`[${timestamp}] ${result.slug}: Merge conflict ⚠ (branch preserved)`);
|
|
1261
|
+
execSync('git merge --abort', { stdio: 'pipe' });
|
|
1262
|
+
} else {
|
|
1263
|
+
feature.status = 'parallel_failed';
|
|
1264
|
+
console.log(`[${timestamp}] ${result.slug}: Merge failed ✗`);
|
|
1265
|
+
}
|
|
1266
|
+
} else {
|
|
1267
|
+
feature.status = 'parallel_failed';
|
|
1268
|
+
if (result.timedOut) {
|
|
1269
|
+
console.log(`[${timestamp}] ${result.slug}: Timed out ⏱ (see log: ${feature.logPath})`);
|
|
1270
|
+
feature.timedOut = true;
|
|
1271
|
+
} else {
|
|
1272
|
+
console.log(`[${timestamp}] ${result.slug}: Failed ✗ (see log: ${feature.logPath})`);
|
|
1273
|
+
}
|
|
1274
|
+
// Preserve worktree for debugging
|
|
1275
|
+
}
|
|
1276
|
+
|
|
1277
|
+
completed.push(feature);
|
|
1278
|
+
saveQueue(queue);
|
|
1279
|
+
|
|
1280
|
+
// Promote from queue if slots available
|
|
1281
|
+
if (remaining.length > 0 && running.size < config.maxConcurrency) {
|
|
1282
|
+
const nextSlug = remaining.shift();
|
|
1283
|
+
await startFeature(nextSlug, queue, running);
|
|
1284
|
+
}
|
|
1285
|
+
}
|
|
1286
|
+
}
|
|
1287
|
+
|
|
1288
|
+
// Final summary
|
|
1289
|
+
const summary = summarizeFinal(queue.features);
|
|
1290
|
+
console.log('\n--- Parallel Execution Complete ---');
|
|
1291
|
+
console.log(`Completed: ${summary.completed}`);
|
|
1292
|
+
console.log(`Failed: ${summary.failed}`);
|
|
1293
|
+
console.log(`Conflicts: ${summary.conflicts}`);
|
|
1294
|
+
|
|
1295
|
+
if (summary.conflicts > 0) {
|
|
1296
|
+
console.log('\nFeatures with conflicts (branches preserved):');
|
|
1297
|
+
queue.features
|
|
1298
|
+
.filter(f => f.status === 'merge_conflict')
|
|
1299
|
+
.forEach(f => console.log(` - ${f.branchName}`));
|
|
1300
|
+
}
|
|
1301
|
+
|
|
1302
|
+
if (summary.failed > 0) {
|
|
1303
|
+
console.log('\nFailed features (worktrees preserved for debugging):');
|
|
1304
|
+
queue.features
|
|
1305
|
+
.filter(f => f.status === 'parallel_failed')
|
|
1306
|
+
.forEach(f => {
|
|
1307
|
+
console.log(` - ${f.worktreePath}`);
|
|
1308
|
+
if (f.logPath) {
|
|
1309
|
+
console.log(` Log: ${f.logPath}`);
|
|
1310
|
+
}
|
|
1311
|
+
});
|
|
1312
|
+
}
|
|
1313
|
+
|
|
1314
|
+
return { success: summary.failed === 0 && summary.conflicts === 0, summary };
|
|
1315
|
+
} finally {
|
|
1316
|
+
// Always release lock when done
|
|
1317
|
+
releaseLock();
|
|
1318
|
+
}
|
|
1319
|
+
}
|
|
1320
|
+
|
|
1321
|
+
async function startFeature(slug, queue, running, options = {}) {
|
|
1322
|
+
const feature = queue.features.find(f => f.slug === slug);
|
|
1323
|
+
const parallelCfg = readParallelConfig();
|
|
1324
|
+
|
|
1325
|
+
console.log(`[${new Date().toISOString().slice(11, 19)}] ${slug}: Creating worktree...`);
|
|
1326
|
+
const { worktreePath, branchName } = createWorktree(slug);
|
|
1327
|
+
|
|
1328
|
+
feature.worktreePath = worktreePath;
|
|
1329
|
+
feature.branchName = branchName;
|
|
1330
|
+
feature.status = 'worktree_created';
|
|
1331
|
+
feature.startedAt = new Date().toISOString();
|
|
1332
|
+
|
|
1333
|
+
// Set log path
|
|
1334
|
+
feature.logPath = path.join(worktreePath, 'pipeline.log');
|
|
1335
|
+
|
|
1336
|
+
saveQueue(queue);
|
|
1337
|
+
|
|
1338
|
+
const timeoutMs = getTimeoutMs();
|
|
1339
|
+
const timeoutMin = timeoutMs / 60000;
|
|
1340
|
+
console.log(`[${new Date().toISOString().slice(11, 19)}] ${slug}: Started (log: ${feature.logPath}, timeout: ${timeoutMin}min)`);
|
|
1341
|
+
feature.status = 'parallel_running';
|
|
1342
|
+
saveQueue(queue);
|
|
1343
|
+
|
|
1344
|
+
const pipelinePromise = runPipelineInWorktree(slug, worktreePath, parallelCfg, options);
|
|
1345
|
+
const promise = withTimeout(pipelinePromise, timeoutMs, slug);
|
|
1346
|
+
running.set(slug, promise);
|
|
1347
|
+
}
|
|
1348
|
+
|
|
1349
|
+
// --- Rollback ---
|
|
1350
|
+
|
|
1351
|
+
async function rollbackParallel(options = {}) {
|
|
1352
|
+
const queue = loadQueue();
|
|
1353
|
+
|
|
1354
|
+
if (!queue.features || queue.features.length === 0) {
|
|
1355
|
+
console.log('No parallel run to rollback.');
|
|
1356
|
+
return { success: true, rolledBack: 0 };
|
|
1357
|
+
}
|
|
1358
|
+
|
|
1359
|
+
const completedFeatures = queue.features.filter(f => f.status === 'parallel_complete');
|
|
1360
|
+
const failedFeatures = queue.features.filter(f =>
|
|
1361
|
+
f.status === 'parallel_failed' || f.status === 'merge_conflict'
|
|
1362
|
+
);
|
|
1363
|
+
|
|
1364
|
+
if (completedFeatures.length === 0 && failedFeatures.length === 0) {
|
|
1365
|
+
console.log('No completed or failed features to rollback.');
|
|
1366
|
+
return { success: true, rolledBack: 0 };
|
|
1367
|
+
}
|
|
1368
|
+
|
|
1369
|
+
console.log('\nParallel Run Rollback\n');
|
|
1370
|
+
|
|
1371
|
+
if (options.dryRun) {
|
|
1372
|
+
console.log('DRY RUN - No changes will be made\n');
|
|
1373
|
+
}
|
|
1374
|
+
|
|
1375
|
+
let rolledBack = 0;
|
|
1376
|
+
|
|
1377
|
+
// Rollback completed features (revert merges)
|
|
1378
|
+
for (const f of completedFeatures) {
|
|
1379
|
+
console.log(`Rolling back ${f.slug}...`);
|
|
1380
|
+
if (!options.dryRun) {
|
|
1381
|
+
try {
|
|
1382
|
+
// Find and revert the merge commit
|
|
1383
|
+
const branchName = f.branchName || `feature/${f.slug}`;
|
|
1384
|
+
const logOutput = execSync(
|
|
1385
|
+
`git log --oneline --grep="${f.slug}" -n 1`,
|
|
1386
|
+
{ encoding: 'utf8' }
|
|
1387
|
+
).trim();
|
|
1388
|
+
|
|
1389
|
+
if (logOutput) {
|
|
1390
|
+
const commitHash = logOutput.split(' ')[0];
|
|
1391
|
+
execSync(`git revert --no-commit ${commitHash}`, { stdio: 'pipe' });
|
|
1392
|
+
execSync(`git commit -m "Revert: ${f.slug} (parallel rollback)"`, { stdio: 'pipe' });
|
|
1393
|
+
console.log(` ✓ Reverted commit ${commitHash}`);
|
|
1394
|
+
rolledBack++;
|
|
1395
|
+
} else {
|
|
1396
|
+
console.log(` ⚠ Could not find merge commit for ${f.slug}`);
|
|
1397
|
+
}
|
|
1398
|
+
} catch (err) {
|
|
1399
|
+
console.log(` ✗ Failed to rollback: ${err.message}`);
|
|
1400
|
+
if (!options.force) {
|
|
1401
|
+
execSync('git revert --abort', { stdio: 'pipe' }).catch(() => {});
|
|
1402
|
+
}
|
|
1403
|
+
}
|
|
1404
|
+
} else {
|
|
1405
|
+
console.log(` Would revert merge for ${f.slug}`);
|
|
1406
|
+
rolledBack++;
|
|
1407
|
+
}
|
|
1408
|
+
}
|
|
1409
|
+
|
|
1410
|
+
// Clean up failed/conflict worktrees
|
|
1411
|
+
for (const f of failedFeatures) {
|
|
1412
|
+
if (f.worktreePath) {
|
|
1413
|
+
console.log(`Cleaning up ${f.slug}...`);
|
|
1414
|
+
if (!options.dryRun) {
|
|
1415
|
+
try {
|
|
1416
|
+
removeWorktree(f.slug);
|
|
1417
|
+
console.log(` ✓ Removed worktree`);
|
|
1418
|
+
rolledBack++;
|
|
1419
|
+
} catch {
|
|
1420
|
+
console.log(` ⚠ Could not remove worktree`);
|
|
1421
|
+
}
|
|
1422
|
+
} else {
|
|
1423
|
+
console.log(` Would remove worktree: ${f.worktreePath}`);
|
|
1424
|
+
rolledBack++;
|
|
1425
|
+
}
|
|
1426
|
+
}
|
|
1427
|
+
}
|
|
1428
|
+
|
|
1429
|
+
// Clear the queue
|
|
1430
|
+
if (!options.dryRun && !options.preserveQueue) {
|
|
1431
|
+
saveQueue({ features: [], startedAt: null });
|
|
1432
|
+
console.log('\n✓ Queue cleared');
|
|
1433
|
+
}
|
|
1434
|
+
|
|
1435
|
+
console.log(`\nRollback complete: ${rolledBack} item(s) processed`);
|
|
1436
|
+
|
|
1437
|
+
return { success: true, rolledBack };
|
|
1438
|
+
}
|
|
1439
|
+
|
|
1440
|
+
async function cleanupWorktrees() {
|
|
1441
|
+
const queue = loadQueue();
|
|
1442
|
+
let cleaned = 0;
|
|
1443
|
+
|
|
1444
|
+
for (const feature of queue.features) {
|
|
1445
|
+
if (shouldCleanupWorktree(feature) && feature.worktreePath) {
|
|
1446
|
+
try {
|
|
1447
|
+
removeWorktree(feature.slug);
|
|
1448
|
+
console.log(`Cleaned up: ${feature.worktreePath}`);
|
|
1449
|
+
cleaned++;
|
|
1450
|
+
} catch {
|
|
1451
|
+
console.log(`Could not clean: ${feature.worktreePath}`);
|
|
1452
|
+
}
|
|
1453
|
+
}
|
|
1454
|
+
}
|
|
1455
|
+
|
|
1456
|
+
if (cleaned === 0) {
|
|
1457
|
+
console.log('No worktrees to clean up.');
|
|
1458
|
+
}
|
|
1459
|
+
|
|
1460
|
+
return cleaned;
|
|
1461
|
+
}
|
|
1462
|
+
|
|
1463
|
+
module.exports = {
|
|
1464
|
+
// Configuration
|
|
1465
|
+
CONFIG_FILE,
|
|
1466
|
+
LOCK_FILE,
|
|
1467
|
+
getDefaultParallelConfig,
|
|
1468
|
+
readParallelConfig,
|
|
1469
|
+
writeParallelConfig,
|
|
1470
|
+
getQueueFile,
|
|
1471
|
+
// Utility functions
|
|
1472
|
+
buildWorktreePath,
|
|
1473
|
+
buildBranchName,
|
|
1474
|
+
getDefaultConfig,
|
|
1475
|
+
getQueuePath,
|
|
1476
|
+
shouldCleanupWorktree,
|
|
1477
|
+
validatePreFlight,
|
|
1478
|
+
isGitVersionSupported,
|
|
1479
|
+
splitByLimit,
|
|
1480
|
+
promoteFromQueue,
|
|
1481
|
+
buildPipelineCommand,
|
|
1482
|
+
canFastForward,
|
|
1483
|
+
hasMergeConflict,
|
|
1484
|
+
handleMergeConflict,
|
|
1485
|
+
orderByCompletion,
|
|
1486
|
+
transition,
|
|
1487
|
+
formatStatus,
|
|
1488
|
+
formatFeatureStatus,
|
|
1489
|
+
summarizeFinal,
|
|
1490
|
+
aggregateResults,
|
|
1491
|
+
abortFeature,
|
|
1492
|
+
abortAll,
|
|
1493
|
+
// Confirmation & Lock
|
|
1494
|
+
promptConfirm,
|
|
1495
|
+
buildConfirmMessage,
|
|
1496
|
+
acquireLock,
|
|
1497
|
+
releaseLock,
|
|
1498
|
+
getLockInfo,
|
|
1499
|
+
// Logging
|
|
1500
|
+
createLogStream,
|
|
1501
|
+
logWithTimestamp,
|
|
1502
|
+
// Feature limit
|
|
1503
|
+
validateFeatureLimit,
|
|
1504
|
+
// Disk space
|
|
1505
|
+
checkDiskSpace,
|
|
1506
|
+
validateDiskSpace,
|
|
1507
|
+
// Pre-flight batch validation
|
|
1508
|
+
validateFeatureSpec,
|
|
1509
|
+
extractFilesToModify,
|
|
1510
|
+
detectFileOverlap,
|
|
1511
|
+
detectDependencies,
|
|
1512
|
+
estimateScope,
|
|
1513
|
+
validateParallelBatch,
|
|
1514
|
+
formatPreflightResults,
|
|
1515
|
+
// Timeout
|
|
1516
|
+
withTimeout,
|
|
1517
|
+
getTimeoutMs,
|
|
1518
|
+
// Progress tracking
|
|
1519
|
+
getProgressFromLog,
|
|
1520
|
+
getDetailedStatus,
|
|
1521
|
+
formatDetailedStatus,
|
|
1522
|
+
progressBar,
|
|
1523
|
+
// Abort handling
|
|
1524
|
+
abortParallel,
|
|
1525
|
+
setupAbortHandler,
|
|
1526
|
+
// Rollback
|
|
1527
|
+
rollbackParallel,
|
|
1528
|
+
// Git operations
|
|
1529
|
+
checkGitStatus,
|
|
1530
|
+
createWorktree,
|
|
1531
|
+
removeWorktree,
|
|
1532
|
+
mergeBranch,
|
|
1533
|
+
getCurrentBranch,
|
|
1534
|
+
// Queue management
|
|
1535
|
+
loadQueue,
|
|
1536
|
+
saveQueue,
|
|
1537
|
+
QUEUE_FILE,
|
|
1538
|
+
// Execution
|
|
1539
|
+
dryRun,
|
|
1540
|
+
runPipelineInWorktree,
|
|
1541
|
+
runParallel,
|
|
1542
|
+
startFeature,
|
|
1543
|
+
cleanupWorktrees
|
|
1544
|
+
};
|