@jojonax/codex-copilot 1.5.3 → 1.5.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/commands/fix.js +49 -1
- package/src/commands/retry.js +38 -1
- package/src/commands/run.js +57 -4
- package/src/utils/checkpoint.js +99 -13
- package/src/utils/git.js +262 -7
- package/src/utils/github.js +58 -1
- package/src/utils/json.js +24 -1
- package/src/utils/self-heal.js +330 -0
package/package.json
CHANGED
package/src/commands/fix.js
CHANGED
|
@@ -288,13 +288,61 @@ export async function fix(projectDir) {
|
|
|
288
288
|
process.exit(1);
|
|
289
289
|
}
|
|
290
290
|
|
|
291
|
-
|
|
291
|
+
// Phase 1: JSON file repair
|
|
292
|
+
log.info('Scanning .codex-copilot/ for JSON issues...');
|
|
292
293
|
log.blank();
|
|
293
294
|
|
|
294
295
|
const results = runRepairs(copilotDir);
|
|
295
296
|
printReport(results);
|
|
297
|
+
|
|
298
|
+
// Phase 2: Git state self-healing
|
|
299
|
+
log.info('Checking git state...');
|
|
300
|
+
log.blank();
|
|
301
|
+
|
|
302
|
+
try {
|
|
303
|
+
const { git } = await import('../utils/git.js');
|
|
304
|
+
|
|
305
|
+
// Lock files
|
|
306
|
+
const locks = git.clearStaleLocks(projectDir);
|
|
307
|
+
if (locks.length > 0) {
|
|
308
|
+
log.info(`🔧 Removed ${locks.length} stale lock file(s)`);
|
|
309
|
+
} else {
|
|
310
|
+
log.info('✅ No stale lock files');
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
// Index state (merge/rebase/cherry-pick)
|
|
314
|
+
git.resolveIndex(projectDir);
|
|
315
|
+
|
|
316
|
+
// Detached HEAD
|
|
317
|
+
const recovered = git.recoverDetachedHead(projectDir);
|
|
318
|
+
if (recovered) {
|
|
319
|
+
log.info(`🔧 Recovered from detached HEAD → ${recovered}`);
|
|
320
|
+
} else {
|
|
321
|
+
log.info('✅ HEAD is attached to a branch');
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
// Stash check
|
|
325
|
+
const stash = git.checkOrphanedStash(projectDir);
|
|
326
|
+
if (stash.found) {
|
|
327
|
+
log.warn(`⚠ Found ${stash.count} stash entry(s) — run 'git stash list' to review`);
|
|
328
|
+
} else {
|
|
329
|
+
log.info('✅ No orphaned stashes');
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
// Repo integrity
|
|
333
|
+
const integrity = git.verifyRepo(projectDir);
|
|
334
|
+
if (integrity.ok) {
|
|
335
|
+
log.info('✅ Git repository integrity OK');
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
log.blank();
|
|
339
|
+
} catch (err) {
|
|
340
|
+
log.warn(`Git state check failed: ${err.message}`);
|
|
341
|
+
log.blank();
|
|
342
|
+
}
|
|
296
343
|
}
|
|
297
344
|
|
|
345
|
+
|
|
298
346
|
/**
|
|
299
347
|
* Auto-fix — called programmatically by other commands when errors are detected.
|
|
300
348
|
* Returns true if all files are healthy (either clean or successfully repaired).
|
package/src/commands/retry.js
CHANGED
|
@@ -14,6 +14,7 @@ import { git } from '../utils/git.js';
|
|
|
14
14
|
import { github } from '../utils/github.js';
|
|
15
15
|
import { createCheckpoint } from '../utils/checkpoint.js';
|
|
16
16
|
import { readJSON, writeJSON } from '../utils/json.js';
|
|
17
|
+
import { preFlightCheck } from '../utils/self-heal.js';
|
|
17
18
|
|
|
18
19
|
|
|
19
20
|
|
|
@@ -45,6 +46,11 @@ export async function retry(projectDir) {
|
|
|
45
46
|
// Ensure retry context directory exists
|
|
46
47
|
mkdirSync(retryDir, { recursive: true });
|
|
47
48
|
|
|
49
|
+
// Pre-flight: resolve any git/environment issues before retrying
|
|
50
|
+
log.blank();
|
|
51
|
+
preFlightCheck(projectDir, 'main', {});
|
|
52
|
+
log.blank();
|
|
53
|
+
|
|
48
54
|
for (const task of blocked) {
|
|
49
55
|
const reason = task.block_reason || 'unknown';
|
|
50
56
|
log.info(` #${task.id}: ${task.title}`);
|
|
@@ -54,8 +60,14 @@ export async function retry(projectDir) {
|
|
|
54
60
|
await recoverReviewBlocked(projectDir, task, checkpoint, retryDir);
|
|
55
61
|
} else if (reason === 'merge_failed') {
|
|
56
62
|
recoverMergeBlocked(task, checkpoint);
|
|
63
|
+
} else if (reason === 'git_checkout_failed') {
|
|
64
|
+
recoverGitBlocked(projectDir, task, checkpoint, retryDir);
|
|
65
|
+
} else if (reason === 'ai_timeout') {
|
|
66
|
+
// AI provider timed out — clear checkpoint so it retries from prompt step
|
|
67
|
+
checkpoint.clearTask(task.id);
|
|
68
|
+
log.dim(' Cleared checkpoint — will retry AI execution');
|
|
57
69
|
} else {
|
|
58
|
-
// dev_failed or unknown
|
|
70
|
+
// dev_failed, rate_limited, or unknown
|
|
59
71
|
recoverDevBlocked(projectDir, task, checkpoint, retryDir);
|
|
60
72
|
}
|
|
61
73
|
|
|
@@ -151,3 +163,28 @@ function recoverMergeBlocked(task, checkpoint) {
|
|
|
151
163
|
checkpoint.clearStep(task.id, 'merge', 'merged');
|
|
152
164
|
log.dim(' Reset merge step \u2014 will retry merge on next run');
|
|
153
165
|
}
|
|
166
|
+
|
|
167
|
+
/**
|
|
168
|
+
* Git-checkout-blocked: resolve index issues and clear develop checkpoint.
|
|
169
|
+
* The pre-flight check already resolved most git issues;
|
|
170
|
+
* here we just reset the checkpoint so the task retries from branch creation.
|
|
171
|
+
*/
|
|
172
|
+
function recoverGitBlocked(projectDir, task, checkpoint, retryDir) {
|
|
173
|
+
// Resolve any lingering index issues
|
|
174
|
+
git.resolveIndex(projectDir);
|
|
175
|
+
git.clearStaleLocks(projectDir);
|
|
176
|
+
|
|
177
|
+
// Save context for retry prompt
|
|
178
|
+
const contextPath = resolve(retryDir, `${task.id}.md`);
|
|
179
|
+
writeFileSync(contextPath, [
|
|
180
|
+
`# Retry Context for Task #${task.id}`,
|
|
181
|
+
'',
|
|
182
|
+
'## Previous Attempt',
|
|
183
|
+
'The task failed due to a git checkout error (corrupted index, stuck merge/rebase, or lock file).',
|
|
184
|
+
'The git state has been automatically repaired. Try again with the same approach.',
|
|
185
|
+
].join('\n'));
|
|
186
|
+
log.dim(' Resolved git state and saved retry context');
|
|
187
|
+
|
|
188
|
+
// Clear all checkpoints — restart from scratch
|
|
189
|
+
checkpoint.clearTask(task.id);
|
|
190
|
+
}
|
package/src/commands/run.js
CHANGED
|
@@ -14,6 +14,7 @@ import { closePrompt } from '../utils/prompt.js';
|
|
|
14
14
|
import { createCheckpoint } from '../utils/checkpoint.js';
|
|
15
15
|
import { provider } from '../utils/provider.js';
|
|
16
16
|
import { readJSON, writeJSON } from '../utils/json.js';
|
|
17
|
+
import { preFlightCheck, releaseLock } from '../utils/self-heal.js';
|
|
17
18
|
|
|
18
19
|
const maxRateLimitRetries = 3;
|
|
19
20
|
|
|
@@ -105,6 +106,7 @@ export async function run(projectDir) {
|
|
|
105
106
|
log.warn('Interrupt received — saving checkpoint...');
|
|
106
107
|
// State is already saved at each step, just need to save tasks.json
|
|
107
108
|
writeJSON(tasksPath, tasks);
|
|
109
|
+
releaseLock(projectDir);
|
|
108
110
|
log.info('✅ Checkpoint saved. Run `codex-copilot run` to resume.');
|
|
109
111
|
log.blank();
|
|
110
112
|
closePrompt();
|
|
@@ -119,6 +121,21 @@ export async function run(projectDir) {
|
|
|
119
121
|
log.info(`Total tasks: ${tasks.total}`);
|
|
120
122
|
log.info(`Base branch: ${baseBranch}`);
|
|
121
123
|
|
|
124
|
+
// ===== Pre-flight self-healing =====
|
|
125
|
+
const health = preFlightCheck(projectDir, baseBranch, { checkpoint, tasks });
|
|
126
|
+
if (!health.ok) {
|
|
127
|
+
log.blank();
|
|
128
|
+
log.error('Pre-flight check failed — cannot proceed:');
|
|
129
|
+
for (const b of health.blockers) {
|
|
130
|
+
log.error(` 🛑 ${b}`);
|
|
131
|
+
}
|
|
132
|
+
log.blank();
|
|
133
|
+
releaseLock(projectDir);
|
|
134
|
+
closePrompt();
|
|
135
|
+
process.exit(1);
|
|
136
|
+
}
|
|
137
|
+
log.blank();
|
|
138
|
+
|
|
122
139
|
// ===== Pre-flight: ensure base branch is committed & pushed =====
|
|
123
140
|
await ensureBaseReady(projectDir, baseBranch, isPrivate);
|
|
124
141
|
|
|
@@ -329,6 +346,7 @@ export async function run(projectDir) {
|
|
|
329
346
|
log.blank();
|
|
330
347
|
process.removeListener('SIGINT', gracefulShutdown);
|
|
331
348
|
process.removeListener('SIGTERM', gracefulShutdown);
|
|
349
|
+
releaseLock(projectDir);
|
|
332
350
|
|
|
333
351
|
// ===== Auto-evolve: trigger next round if enabled =====
|
|
334
352
|
if (config.auto_evolve !== false) {
|
|
@@ -351,14 +369,28 @@ async function developPhase(projectDir, task, baseBranch, checkpoint, providerId
|
|
|
351
369
|
|
|
352
370
|
// Step 1: Switch to feature branch
|
|
353
371
|
if (!checkpoint.isStepDone(task.id, 'develop', 'branch_created')) {
|
|
354
|
-
|
|
372
|
+
try {
|
|
373
|
+
git.checkoutBranch(projectDir, task.branch, baseBranch);
|
|
374
|
+
} catch (err) {
|
|
375
|
+
log.error(`Failed to switch to branch ${task.branch}: ${err.message}`);
|
|
376
|
+
task.status = 'blocked';
|
|
377
|
+
task.block_reason = 'git_checkout_failed';
|
|
378
|
+
return;
|
|
379
|
+
}
|
|
355
380
|
log.info(`Switched to branch: ${task.branch}`);
|
|
356
381
|
checkpoint.saveStep(task.id, 'develop', 'branch_created', { branch: task.branch });
|
|
357
382
|
} else {
|
|
358
383
|
// Ensure we're on the right branch
|
|
359
384
|
const current = git.currentBranch(projectDir);
|
|
360
385
|
if (current !== task.branch) {
|
|
361
|
-
|
|
386
|
+
try {
|
|
387
|
+
git.checkoutBranch(projectDir, task.branch, baseBranch);
|
|
388
|
+
} catch (err) {
|
|
389
|
+
log.error(`Failed to switch to branch ${task.branch}: ${err.message}`);
|
|
390
|
+
task.status = 'blocked';
|
|
391
|
+
task.block_reason = 'git_checkout_failed';
|
|
392
|
+
return;
|
|
393
|
+
}
|
|
362
394
|
}
|
|
363
395
|
log.dim('⏩ Branch already created');
|
|
364
396
|
}
|
|
@@ -374,13 +406,34 @@ async function developPhase(projectDir, task, baseBranch, checkpoint, providerId
|
|
|
374
406
|
log.dim('⏩ Prompt already generated');
|
|
375
407
|
}
|
|
376
408
|
|
|
377
|
-
// Step 3: Execute via AI Provider (with rate limit auto-retry)
|
|
409
|
+
// Step 3: Execute via AI Provider (with rate limit auto-retry + timeout protection)
|
|
378
410
|
if (!checkpoint.isStepDone(task.id, 'develop', 'codex_complete')) {
|
|
379
411
|
const promptPath = resolve(projectDir, '.codex-copilot/_current_prompt.md');
|
|
380
412
|
let rateLimitRetries = 0;
|
|
413
|
+
const AI_TIMEOUT_MS = 30 * 60 * 1000; // D4: 30 minute timeout for AI execution
|
|
381
414
|
|
|
382
415
|
while (rateLimitRetries < maxRateLimitRetries) {
|
|
383
|
-
|
|
416
|
+
// D4: Race the AI execution against a timeout to prevent infinite hangs
|
|
417
|
+
const timeoutPromise = new Promise((_, reject) =>
|
|
418
|
+
setTimeout(() => reject(new Error('AI_TIMEOUT')), AI_TIMEOUT_MS)
|
|
419
|
+
);
|
|
420
|
+
|
|
421
|
+
let result;
|
|
422
|
+
try {
|
|
423
|
+
result = await Promise.race([
|
|
424
|
+
provider.executePrompt(providerId, promptPath, projectDir),
|
|
425
|
+
timeoutPromise,
|
|
426
|
+
]);
|
|
427
|
+
} catch (timeoutErr) {
|
|
428
|
+
if (timeoutErr.message === 'AI_TIMEOUT') {
|
|
429
|
+
log.error(`AI provider timed out after ${AI_TIMEOUT_MS / 60000} minutes — marking task as blocked`);
|
|
430
|
+
task.status = 'blocked';
|
|
431
|
+
task.block_reason = 'ai_timeout';
|
|
432
|
+
return;
|
|
433
|
+
}
|
|
434
|
+
throw timeoutErr;
|
|
435
|
+
}
|
|
436
|
+
|
|
384
437
|
if (result.ok) {
|
|
385
438
|
log.info('Development complete');
|
|
386
439
|
checkpoint.saveStep(task.id, 'develop', 'codex_complete', { branch: task.branch });
|
package/src/utils/checkpoint.js
CHANGED
|
@@ -8,6 +8,14 @@
|
|
|
8
8
|
import { readFileSync, writeFileSync, renameSync, existsSync } from 'fs';
|
|
9
9
|
import { resolve } from 'path';
|
|
10
10
|
|
|
11
|
+
const PHASE_ORDER = ['develop', 'pr', 'review', 'merge'];
|
|
12
|
+
const STEP_ORDERS = {
|
|
13
|
+
develop: ['branch_created', 'prompt_ready', 'codex_complete'],
|
|
14
|
+
pr: ['committed', 'pushed', 'pr_created'],
|
|
15
|
+
review: ['waiting_review', 'feedback_received', 'fix_applied'],
|
|
16
|
+
merge: ['merged'],
|
|
17
|
+
};
|
|
18
|
+
|
|
11
19
|
/**
|
|
12
20
|
* Create a checkpoint manager for a project
|
|
13
21
|
* @param {string} projectDir - Project root directory
|
|
@@ -45,9 +53,34 @@ export function createCheckpoint(projectDir) {
|
|
|
45
53
|
}
|
|
46
54
|
|
|
47
55
|
/**
|
|
48
|
-
* Mark a specific phase & step as reached
|
|
56
|
+
* Mark a specific phase & step as reached.
|
|
57
|
+
* Includes monotonic progress guard — prevents regression from a later
|
|
58
|
+
* phase back to an earlier one (e.g., 'pr' → 'develop' is blocked).
|
|
49
59
|
*/
|
|
50
60
|
function saveStep(taskId, phase, phaseStep, extra = {}) {
|
|
61
|
+
const current = load();
|
|
62
|
+
|
|
63
|
+
// Monotonic progress guard: only allow forward movement within the same task
|
|
64
|
+
if (current.current_task === taskId && current.phase) {
|
|
65
|
+
const currentPhaseIdx = PHASE_ORDER.indexOf(current.phase);
|
|
66
|
+
const newPhaseIdx = PHASE_ORDER.indexOf(phase);
|
|
67
|
+
|
|
68
|
+
if (newPhaseIdx < currentPhaseIdx) {
|
|
69
|
+
// Attempting to go backward — ignore silently (likely a stale call)
|
|
70
|
+
return current;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
// Same phase: ensure step is not going backward
|
|
74
|
+
if (newPhaseIdx === currentPhaseIdx) {
|
|
75
|
+
const steps = STEP_ORDERS[phase] || [];
|
|
76
|
+
const currentStepIdx = steps.indexOf(current.phase_step);
|
|
77
|
+
const newStepIdx = steps.indexOf(phaseStep);
|
|
78
|
+
if (newStepIdx < currentStepIdx) {
|
|
79
|
+
return current; // Step regression — ignore
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
51
84
|
return save({
|
|
52
85
|
current_task: taskId,
|
|
53
86
|
phase,
|
|
@@ -79,9 +112,8 @@ export function createCheckpoint(projectDir) {
|
|
|
79
112
|
if (state.current_task !== taskId) return false;
|
|
80
113
|
if (!state.phase) return false;
|
|
81
114
|
|
|
82
|
-
const
|
|
83
|
-
const
|
|
84
|
-
const targetPhaseIdx = phaseOrder.indexOf(phase);
|
|
115
|
+
const savedPhaseIdx = PHASE_ORDER.indexOf(state.phase);
|
|
116
|
+
const targetPhaseIdx = PHASE_ORDER.indexOf(phase);
|
|
85
117
|
|
|
86
118
|
// If saved phase is ahead, this step is done
|
|
87
119
|
if (savedPhaseIdx > targetPhaseIdx) return true;
|
|
@@ -89,14 +121,7 @@ export function createCheckpoint(projectDir) {
|
|
|
89
121
|
if (savedPhaseIdx < targetPhaseIdx) return false;
|
|
90
122
|
|
|
91
123
|
// Same phase: compare steps within phase
|
|
92
|
-
const
|
|
93
|
-
develop: ['branch_created', 'prompt_ready', 'codex_complete'],
|
|
94
|
-
pr: ['committed', 'pushed', 'pr_created'],
|
|
95
|
-
review: ['waiting_review', 'feedback_received', 'fix_applied'],
|
|
96
|
-
merge: ['merged'],
|
|
97
|
-
};
|
|
98
|
-
|
|
99
|
-
const steps = stepOrders[phase] || [];
|
|
124
|
+
const steps = STEP_ORDERS[phase] || [];
|
|
100
125
|
const savedStepIdx = steps.indexOf(state.phase_step);
|
|
101
126
|
const targetStepIdx = steps.indexOf(phaseStep);
|
|
102
127
|
|
|
@@ -143,7 +168,67 @@ export function createCheckpoint(projectDir) {
|
|
|
143
168
|
return state;
|
|
144
169
|
}
|
|
145
170
|
|
|
146
|
-
|
|
171
|
+
/**
|
|
172
|
+
* E1/E4: Validate consistency between checkpoint state and tasks status.
|
|
173
|
+
* Detects and repairs mismatches such as:
|
|
174
|
+
* - Task marked 'in_progress' but no checkpoint data for it
|
|
175
|
+
* - Checkpoint points to a task that's already completed
|
|
176
|
+
* - Multiple tasks marked 'in_progress' simultaneously
|
|
177
|
+
* @param {object} tasks - The tasks.json data structure
|
|
178
|
+
* @returns {{ ok: boolean, repairs: string[] }}
|
|
179
|
+
*/
|
|
180
|
+
function validateConsistency(tasks) {
|
|
181
|
+
const state = load();
|
|
182
|
+
const repairs = [];
|
|
183
|
+
|
|
184
|
+
if (!tasks?.tasks || !Array.isArray(tasks.tasks)) {
|
|
185
|
+
return { ok: true, repairs: [] };
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
// Check 1: Multiple in_progress tasks (should only be one at a time)
|
|
189
|
+
const inProgress = tasks.tasks.filter(t => t.status === 'in_progress');
|
|
190
|
+
if (inProgress.length > 1) {
|
|
191
|
+
// Keep the one matching checkpoint, reset others to pending
|
|
192
|
+
for (const task of inProgress) {
|
|
193
|
+
if (task.id !== state.current_task) {
|
|
194
|
+
task.status = 'pending';
|
|
195
|
+
repairs.push(`Task #${task.id}: was 'in_progress' but not current — reset to 'pending'`);
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
// Check 2: Task is in_progress but checkpoint has no data for it
|
|
201
|
+
if (inProgress.length === 1 && state.current_task !== inProgress[0].id) {
|
|
202
|
+
if (!state.phase) {
|
|
203
|
+
// Checkpoint is empty — task was marked in_progress but never started
|
|
204
|
+
inProgress[0].status = 'pending';
|
|
205
|
+
repairs.push(`Task #${inProgress[0].id}: marked 'in_progress' but no checkpoint data — reset to 'pending'`);
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
// Check 3: Checkpoint points to a completed task (stale checkpoint)
|
|
210
|
+
if (state.current_task > 0 && state.phase) {
|
|
211
|
+
const checkpointTask = tasks.tasks.find(t => t.id === state.current_task);
|
|
212
|
+
if (checkpointTask && checkpointTask.status === 'completed') {
|
|
213
|
+
// Checkpoint is stale — clear it
|
|
214
|
+
save({ phase: null, phase_step: null, current_pr: null, review_round: 0, branch: null });
|
|
215
|
+
repairs.push(`Checkpoint for task #${state.current_task} cleared (task already completed)`);
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
// Check 4: Checkpoint task ID doesn't exist in tasks list
|
|
220
|
+
if (state.current_task > 0) {
|
|
221
|
+
const exists = tasks.tasks.find(t => t.id === state.current_task);
|
|
222
|
+
if (!exists) {
|
|
223
|
+
save({ current_task: 0, phase: null, phase_step: null, current_pr: null, review_round: 0, branch: null });
|
|
224
|
+
repairs.push(`Checkpoint referenced non-existent task #${state.current_task} — reset`);
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
return { ok: repairs.length === 0, repairs };
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
return { load, save, saveStep, completeTask, isStepDone, reset, clearTask, clearStep, validateConsistency };
|
|
147
232
|
}
|
|
148
233
|
|
|
149
234
|
function getDefaultState() {
|
|
@@ -158,3 +243,4 @@ function getDefaultState() {
|
|
|
158
243
|
last_updated: new Date().toISOString(),
|
|
159
244
|
};
|
|
160
245
|
}
|
|
246
|
+
|
package/src/utils/git.js
CHANGED
|
@@ -3,6 +3,8 @@
|
|
|
3
3
|
*/
|
|
4
4
|
|
|
5
5
|
import { execSync } from 'child_process';
|
|
6
|
+
import { existsSync, statSync, unlinkSync } from 'fs';
|
|
7
|
+
import { resolve } from 'path';
|
|
6
8
|
import { log } from './logger.js';
|
|
7
9
|
|
|
8
10
|
function exec(cmd, cwd) {
|
|
@@ -26,6 +28,103 @@ function validateBranch(name) {
|
|
|
26
28
|
return name;
|
|
27
29
|
}
|
|
28
30
|
|
|
31
|
+
/**
|
|
32
|
+
* Resolve a corrupted git index by aborting any in-progress merge/rebase/cherry-pick.
|
|
33
|
+
*
|
|
34
|
+
* Common causes:
|
|
35
|
+
* - A prior `git rebase` left conflict markers in the index
|
|
36
|
+
* - A `git merge` was interrupted (SIGKILL, power loss, etc.)
|
|
37
|
+
* - A `git cherry-pick` left unresolved conflicts
|
|
38
|
+
*
|
|
39
|
+
* After aborting, the index is reset to HEAD so checkout can proceed.
|
|
40
|
+
*
|
|
41
|
+
* @returns {boolean} true if index was resolved, false if no action needed
|
|
42
|
+
*/
|
|
43
|
+
function resolveIndex(cwd) {
|
|
44
|
+
const gitDir = resolve(cwd, '.git');
|
|
45
|
+
|
|
46
|
+
let resolved = false;
|
|
47
|
+
|
|
48
|
+
// 1. Abort in-progress rebase (interactive or non-interactive)
|
|
49
|
+
const rebaseDir = resolve(gitDir, 'rebase-merge');
|
|
50
|
+
const rebaseApplyDir = resolve(gitDir, 'rebase-apply');
|
|
51
|
+
if (existsSync(rebaseDir) || existsSync(rebaseApplyDir)) {
|
|
52
|
+
log.warn('Detected stuck rebase — aborting...');
|
|
53
|
+
execSafe('git rebase --abort', cwd);
|
|
54
|
+
resolved = true;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// 2. Abort in-progress merge
|
|
58
|
+
const mergeHeadFile = resolve(gitDir, 'MERGE_HEAD');
|
|
59
|
+
if (existsSync(mergeHeadFile)) {
|
|
60
|
+
log.warn('Detected stuck merge — aborting...');
|
|
61
|
+
execSafe('git merge --abort', cwd);
|
|
62
|
+
resolved = true;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
// 3. Abort in-progress cherry-pick
|
|
66
|
+
const cherryPickFile = resolve(gitDir, 'CHERRY_PICK_HEAD');
|
|
67
|
+
if (existsSync(cherryPickFile)) {
|
|
68
|
+
log.warn('Detected stuck cherry-pick — aborting...');
|
|
69
|
+
execSafe('git cherry-pick --abort', cwd);
|
|
70
|
+
resolved = true;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
// 4. If nothing specific was detected but index is still bad, force-reset index to HEAD
|
|
74
|
+
if (!resolved) {
|
|
75
|
+
log.warn('Resetting index to HEAD...');
|
|
76
|
+
execSafe('git reset HEAD', cwd);
|
|
77
|
+
resolved = true;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
return resolved;
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
/**
|
|
84
|
+
* Safely execute a git checkout with auto-recovery for index issues.
|
|
85
|
+
* If checkout fails with "resolve your current index", automatically resolves
|
|
86
|
+
* the index and retries once.
|
|
87
|
+
* @param {string} cmd - The full git checkout command to execute
|
|
88
|
+
* @param {string} cwd - Working directory
|
|
89
|
+
*/
|
|
90
|
+
function safeCheckout(cmd, cwd) {
|
|
91
|
+
const result = execSafe(cmd, cwd);
|
|
92
|
+
if (result.ok) return;
|
|
93
|
+
|
|
94
|
+
const errMsg = result.output || '';
|
|
95
|
+
if (errMsg.includes('resolve your current index') ||
|
|
96
|
+
errMsg.includes('needs merge') ||
|
|
97
|
+
errMsg.includes('not possible because you have unmerged files') ||
|
|
98
|
+
errMsg.includes('overwritten by checkout')) {
|
|
99
|
+
log.warn(`Checkout failed: ${errMsg.split('\n')[0]}`);
|
|
100
|
+
resolveIndex(cwd);
|
|
101
|
+
|
|
102
|
+
// After resolving, stash any leftover dirty files to ensure clean state
|
|
103
|
+
const stillDirty = !isClean(cwd);
|
|
104
|
+
if (stillDirty) {
|
|
105
|
+
log.dim('Stashing leftover changes after index resolution...');
|
|
106
|
+
execSafe('git stash --include-untracked', cwd);
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
// Retry checkout
|
|
110
|
+
const retry = execSafe(cmd, cwd);
|
|
111
|
+
if (!retry.ok) {
|
|
112
|
+
// Last resort: hard reset to HEAD and try once more
|
|
113
|
+
log.warn('Retry failed — hard resetting to HEAD...');
|
|
114
|
+
execSafe('git reset --hard HEAD', cwd);
|
|
115
|
+
exec(cmd, cwd); // This will throw if it still fails — unrecoverable
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
// Restore stashed changes
|
|
119
|
+
if (stillDirty) {
|
|
120
|
+
execSafe('git stash pop', cwd);
|
|
121
|
+
}
|
|
122
|
+
} else {
|
|
123
|
+
// Non-index-related error — re-throw
|
|
124
|
+
throw new Error(`Command failed: ${cmd}\n${errMsg}`);
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
|
|
29
128
|
/**
|
|
30
129
|
* Check if the git working tree is clean
|
|
31
130
|
*/
|
|
@@ -58,10 +157,23 @@ export function checkoutBranch(cwd, branch, baseBranch = 'main') {
|
|
|
58
157
|
const current = currentBranch(cwd);
|
|
59
158
|
if (current === branch) return;
|
|
60
159
|
|
|
160
|
+
// Pre-flight: resolve any stuck index state before attempting anything
|
|
161
|
+
const indexCheck = execSafe('git diff --check', cwd);
|
|
162
|
+
if (!indexCheck.ok && indexCheck.output && indexCheck.output.includes('conflict')) {
|
|
163
|
+
log.warn('Pre-flight: detected conflict markers in index');
|
|
164
|
+
resolveIndex(cwd);
|
|
165
|
+
}
|
|
166
|
+
|
|
61
167
|
// Stash any uncommitted changes to allow safe branch switching
|
|
62
168
|
const hasChanges = !isClean(cwd);
|
|
63
169
|
if (hasChanges) {
|
|
64
|
-
execSafe('git stash --include-untracked', cwd);
|
|
170
|
+
const stashResult = execSafe('git stash --include-untracked', cwd);
|
|
171
|
+
if (!stashResult.ok) {
|
|
172
|
+
// Stash itself can fail if the index is in a bad state
|
|
173
|
+
log.warn('Stash failed — resolving index first...');
|
|
174
|
+
resolveIndex(cwd);
|
|
175
|
+
execSafe('git stash --include-untracked', cwd);
|
|
176
|
+
}
|
|
65
177
|
}
|
|
66
178
|
|
|
67
179
|
// Check if the branch already exists locally
|
|
@@ -69,16 +181,19 @@ export function checkoutBranch(cwd, branch, baseBranch = 'main') {
|
|
|
69
181
|
|
|
70
182
|
if (branchExists) {
|
|
71
183
|
// Branch exists — just switch to it (preserving all previous work)
|
|
72
|
-
|
|
184
|
+
safeCheckout(`git checkout ${branch}`, cwd);
|
|
73
185
|
|
|
74
186
|
// Try to rebase onto latest base to pick up any new changes
|
|
75
187
|
execSafe(`git fetch origin ${baseBranch}`, cwd);
|
|
76
|
-
execSafe(`git rebase origin/${baseBranch}`, cwd);
|
|
188
|
+
const rebaseResult = execSafe(`git rebase origin/${baseBranch}`, cwd);
|
|
77
189
|
// If rebase fails (conflicts), abort and continue with existing state
|
|
78
|
-
|
|
190
|
+
if (!rebaseResult.ok) {
|
|
191
|
+
log.dim('Rebase had conflicts — aborting to preserve current state');
|
|
192
|
+
execSafe('git rebase --abort', cwd);
|
|
193
|
+
}
|
|
79
194
|
} else {
|
|
80
195
|
// Branch doesn't exist — create from latest base
|
|
81
|
-
|
|
196
|
+
safeCheckout(`git checkout ${baseBranch}`, cwd);
|
|
82
197
|
execSafe(`git pull origin ${baseBranch}`, cwd);
|
|
83
198
|
exec(`git checkout -b ${branch}`, cwd);
|
|
84
199
|
}
|
|
@@ -120,14 +235,154 @@ export function pushBranch(cwd, branch) {
|
|
|
120
235
|
}
|
|
121
236
|
|
|
122
237
|
/**
|
|
123
|
-
* Switch back to main branch
|
|
238
|
+
* Switch back to main branch (with safe checkout recovery)
|
|
124
239
|
*/
|
|
125
240
|
export function checkoutMain(cwd, baseBranch = 'main') {
|
|
126
241
|
validateBranch(baseBranch);
|
|
127
|
-
|
|
242
|
+
safeCheckout(`git checkout ${baseBranch}`, cwd);
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
// ──────────────────────────────────────────────
|
|
246
|
+
// Self-Healing Utilities (Layer 2)
|
|
247
|
+
// ──────────────────────────────────────────────
|
|
248
|
+
|
|
249
|
+
/**
|
|
250
|
+
* A2: Clear stale git lock files left behind by crashed processes.
|
|
251
|
+
* Only removes lock files older than `maxAgeMs` to avoid deleting
|
|
252
|
+
* locks from concurrent, legitimate operations.
|
|
253
|
+
* @param {string} cwd - Working directory
|
|
254
|
+
* @param {number} maxAgeMs - Max age in ms before lock is considered stale (default 5s)
|
|
255
|
+
* @returns {string[]} list of removed lock files
|
|
256
|
+
*/
|
|
257
|
+
export function clearStaleLocks(cwd, maxAgeMs = 5000) {
|
|
258
|
+
const gitDir = resolve(cwd, '.git');
|
|
259
|
+
const removed = [];
|
|
260
|
+
|
|
261
|
+
for (const lockName of ['index.lock', 'HEAD.lock', 'config.lock']) {
|
|
262
|
+
const lockPath = resolve(gitDir, lockName);
|
|
263
|
+
if (existsSync(lockPath)) {
|
|
264
|
+
try {
|
|
265
|
+
const stats = statSync(lockPath);
|
|
266
|
+
const age = Date.now() - stats.mtimeMs;
|
|
267
|
+
if (age > maxAgeMs) {
|
|
268
|
+
unlinkSync(lockPath);
|
|
269
|
+
removed.push(lockName);
|
|
270
|
+
log.warn(`Removed stale lock: ${lockName} (age: ${Math.round(age / 1000)}s)`);
|
|
271
|
+
}
|
|
272
|
+
} catch {
|
|
273
|
+
// If we can't stat, try to remove anyway (it's stale if the process is gone)
|
|
274
|
+
try {
|
|
275
|
+
unlinkSync(lockPath);
|
|
276
|
+
removed.push(lockName);
|
|
277
|
+
log.warn(`Removed lock file: ${lockName}`);
|
|
278
|
+
} catch { /* locked by active process — leave it */ }
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
return removed;
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
/**
|
|
287
|
+
* A3: Recover from detached HEAD state.
|
|
288
|
+
* When HEAD is detached (e.g., after interrupted rebase), find
|
|
289
|
+
* the most recent branch that points to HEAD and checkout.
|
|
290
|
+
* @returns {string|null} branch name restored to, or null if not detached
|
|
291
|
+
*/
|
|
292
|
+
export function recoverDetachedHead(cwd) {
|
|
293
|
+
const branch = currentBranch(cwd);
|
|
294
|
+
if (branch) return null; // Not detached — nothing to do
|
|
295
|
+
|
|
296
|
+
log.warn('Detected detached HEAD state');
|
|
297
|
+
|
|
298
|
+
// Find branches containing the current commit
|
|
299
|
+
const result = execSafe('git branch --contains HEAD', cwd);
|
|
300
|
+
if (!result.ok) {
|
|
301
|
+
log.warn('Cannot determine branches containing HEAD');
|
|
302
|
+
return null;
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
// Parse branch list, filter out detached indicator
|
|
306
|
+
const branches = result.output
|
|
307
|
+
.split('\n')
|
|
308
|
+
.map(b => b.replace(/^\*?\s+/, '').trim())
|
|
309
|
+
.filter(b => b && !b.startsWith('(') && !b.includes('HEAD detached'));
|
|
310
|
+
|
|
311
|
+
if (branches.length === 0) {
|
|
312
|
+
log.warn('No branch contains current HEAD — manual intervention required');
|
|
313
|
+
return null;
|
|
314
|
+
}
|
|
315
|
+
|
|
316
|
+
// Prefer main/master, then the first branch found
|
|
317
|
+
const preferred = branches.find(b => b === 'main' || b === 'master') || branches[0];
|
|
318
|
+
log.warn(`Re-attaching HEAD to branch: ${preferred}`);
|
|
319
|
+
execSafe(`git checkout ${preferred}`, cwd);
|
|
320
|
+
return preferred;
|
|
321
|
+
}
|
|
322
|
+
|
|
323
|
+
/**
|
|
324
|
+
* A4: Check for orphaned stash entries from interrupted codex-copilot operations.
|
|
325
|
+
* Stash entries created by checkoutBranch() may leak if the process is killed
|
|
326
|
+
* between stash push and stash pop.
|
|
327
|
+
* @returns {{ found: boolean, count: number }} stash status
|
|
328
|
+
*/
|
|
329
|
+
export function checkOrphanedStash(cwd) {
|
|
330
|
+
const result = execSafe('git stash list', cwd);
|
|
331
|
+
if (!result.ok || !result.output.trim()) {
|
|
332
|
+
return { found: false, count: 0 };
|
|
333
|
+
}
|
|
334
|
+
|
|
335
|
+
const entries = result.output.trim().split('\n');
|
|
336
|
+
if (entries.length > 0) {
|
|
337
|
+
log.warn(`Found ${entries.length} stash entry(s) — recent stash(es):`);
|
|
338
|
+
for (const entry of entries.slice(0, 3)) {
|
|
339
|
+
log.dim(` ${entry}`);
|
|
340
|
+
}
|
|
341
|
+
// Only warn — automatic pop is risky (may cause merge conflicts)
|
|
342
|
+
// User can decide to pop or drop
|
|
343
|
+
if (entries.length > 3) {
|
|
344
|
+
log.dim(` ... and ${entries.length - 3} more`);
|
|
345
|
+
}
|
|
346
|
+
return { found: true, count: entries.length };
|
|
347
|
+
}
|
|
348
|
+
|
|
349
|
+
return { found: false, count: 0 };
|
|
350
|
+
}
|
|
351
|
+
|
|
352
|
+
/**
|
|
353
|
+
* A7: Quick git repository integrity check.
|
|
354
|
+
* Runs `git fsck --no-full --no-dangling` for a fast check,
|
|
355
|
+
* then `git gc --auto` if issues are found.
|
|
356
|
+
* @returns {{ ok: boolean, issues: string[] }}
|
|
357
|
+
*/
|
|
358
|
+
export function verifyRepo(cwd) {
|
|
359
|
+
const result = execSafe('git fsck --no-full --no-dangling --connectivity-only 2>&1', cwd);
|
|
360
|
+
if (result.ok) {
|
|
361
|
+
return { ok: true, issues: [] };
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
const issues = (result.output || '')
|
|
365
|
+
.split('\n')
|
|
366
|
+
.filter(l => l.trim() && !l.includes('notice') && !l.includes('Checking'))
|
|
367
|
+
.slice(0, 10);
|
|
368
|
+
|
|
369
|
+
if (issues.length > 0) {
|
|
370
|
+
log.warn(`Git integrity issues detected (${issues.length}):`);
|
|
371
|
+
for (const issue of issues.slice(0, 3)) {
|
|
372
|
+
log.dim(` ${issue}`);
|
|
373
|
+
}
|
|
374
|
+
// Attempt auto-repair
|
|
375
|
+
log.info('Running git gc to repair...');
|
|
376
|
+
execSafe('git gc --auto', cwd);
|
|
377
|
+
}
|
|
378
|
+
|
|
379
|
+
return { ok: issues.length === 0, issues };
|
|
128
380
|
}
|
|
129
381
|
|
|
130
382
|
export const git = {
|
|
131
383
|
isClean, currentBranch, getRepoInfo, checkoutBranch,
|
|
132
384
|
commitAll, pushBranch, checkoutMain, exec, execSafe,
|
|
385
|
+
resolveIndex, clearStaleLocks, recoverDetachedHead,
|
|
386
|
+
checkOrphanedStash, verifyRepo,
|
|
133
387
|
};
|
|
388
|
+
|
package/src/utils/github.js
CHANGED
|
@@ -6,7 +6,12 @@ import { execSync } from 'child_process';
|
|
|
6
6
|
import { log } from './logger.js';
|
|
7
7
|
|
|
8
8
|
function gh(cmd, cwd) {
|
|
9
|
-
return execSync(`gh ${cmd}`, {
|
|
9
|
+
return execSync(`gh ${cmd}`, {
|
|
10
|
+
cwd,
|
|
11
|
+
encoding: 'utf-8',
|
|
12
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
13
|
+
timeout: 30000, // 30s timeout to prevent hanging on network issues
|
|
14
|
+
}).trim();
|
|
10
15
|
}
|
|
11
16
|
|
|
12
17
|
function ghSafe(cmd, cwd) {
|
|
@@ -378,12 +383,64 @@ export function requestReReview(cwd, prNumber) {
|
|
|
378
383
|
}
|
|
379
384
|
}
|
|
380
385
|
|
|
386
|
+
/**
|
|
387
|
+
* C4: Verify GitHub CLI authentication is valid.
|
|
388
|
+
* Checks both auth status and token expiry.
|
|
389
|
+
* @returns {{ ok: boolean, user: string|null, error: string|null }}
|
|
390
|
+
*/
|
|
391
|
+
export function ensureAuth(cwd) {
|
|
392
|
+
try {
|
|
393
|
+
const output = execSync('gh auth status 2>&1', {
|
|
394
|
+
cwd, encoding: 'utf-8', stdio: ['pipe', 'pipe', 'pipe'], timeout: 10000,
|
|
395
|
+
});
|
|
396
|
+
const userMatch = output.match(/Logged in to .+ as (\S+)/);
|
|
397
|
+
return { ok: true, user: userMatch?.[1] || 'unknown', error: null };
|
|
398
|
+
} catch (err) {
|
|
399
|
+
const msg = err.stderr || err.message || '';
|
|
400
|
+
if (msg.includes('not logged in') || msg.includes('authentication')) {
|
|
401
|
+
return { ok: false, user: null, error: 'Not authenticated. Run: gh auth login' };
|
|
402
|
+
}
|
|
403
|
+
if (msg.includes('token') && msg.includes('expir')) {
|
|
404
|
+
return { ok: false, user: null, error: 'Token expired. Run: gh auth refresh' };
|
|
405
|
+
}
|
|
406
|
+
return { ok: false, user: null, error: `Auth check failed: ${msg.substring(0, 100)}` };
|
|
407
|
+
}
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
/**
|
|
411
|
+
* C2/C5: Check network connectivity to the git remote.
|
|
412
|
+
* Verifies that we can reach the origin remote.
|
|
413
|
+
* @returns {{ ok: boolean, error: string|null }}
|
|
414
|
+
*/
|
|
415
|
+
export function checkConnectivity(cwd) {
|
|
416
|
+
try {
|
|
417
|
+
execSync('git ls-remote --exit-code origin HEAD', {
|
|
418
|
+
cwd, encoding: 'utf-8', stdio: ['pipe', 'pipe', 'pipe'], timeout: 15000,
|
|
419
|
+
});
|
|
420
|
+
return { ok: true, error: null };
|
|
421
|
+
} catch (err) {
|
|
422
|
+
const msg = err.stderr || err.message || '';
|
|
423
|
+
if (msg.includes('Could not resolve host') || msg.includes('Network is unreachable')) {
|
|
424
|
+
return { ok: false, error: 'Network unreachable — check internet connection' };
|
|
425
|
+
}
|
|
426
|
+
if (msg.includes('Permission denied') || msg.includes('Authentication failed')) {
|
|
427
|
+
return { ok: false, error: 'Authentication failed — check SSH keys or GH token' };
|
|
428
|
+
}
|
|
429
|
+
if (msg.includes('Repository not found')) {
|
|
430
|
+
return { ok: false, error: 'Remote repository not found — verify origin URL' };
|
|
431
|
+
}
|
|
432
|
+
// Timeout or other error
|
|
433
|
+
return { ok: false, error: `Cannot reach remote: ${msg.substring(0, 100)}` };
|
|
434
|
+
}
|
|
435
|
+
}
|
|
436
|
+
|
|
381
437
|
export const github = {
|
|
382
438
|
checkGhAuth, createPR, createPRWithRecovery, findExistingPR,
|
|
383
439
|
ensureRemoteBranch, hasCommitsBetween,
|
|
384
440
|
getReviews, getReviewComments, getIssueComments,
|
|
385
441
|
getLatestReviewState, mergePR, collectReviewFeedback,
|
|
386
442
|
isPrivateRepo, requestReReview, closePR, deleteBranch, getPRState,
|
|
443
|
+
ensureAuth, checkConnectivity,
|
|
387
444
|
};
|
|
388
445
|
|
|
389
446
|
/**
|
package/src/utils/json.js
CHANGED
|
@@ -156,8 +156,31 @@ function tryParse(s) {
|
|
|
156
156
|
*/
|
|
157
157
|
export function writeJSON(filePath, data) {
|
|
158
158
|
const tempPath = filePath + '.tmp';
|
|
159
|
-
|
|
159
|
+
const content = JSON.stringify(data, null, 2);
|
|
160
|
+
|
|
161
|
+
// B4: Disk space protection — catch ENOSPC/EDQUOT errors with clear message
|
|
162
|
+
try {
|
|
163
|
+
writeFileSync(tempPath, content);
|
|
164
|
+
} catch (writeErr) {
|
|
165
|
+
if (writeErr.code === 'ENOSPC' || writeErr.code === 'EDQUOT') {
|
|
166
|
+
throw new Error(`Disk full — cannot write ${filePath}. Free up space and retry.`);
|
|
167
|
+
}
|
|
168
|
+
if (writeErr.code === 'EACCES' || writeErr.code === 'EPERM') {
|
|
169
|
+
throw new Error(`Permission denied — cannot write ${filePath}. Check file/directory permissions.`);
|
|
170
|
+
}
|
|
171
|
+
throw writeErr;
|
|
172
|
+
}
|
|
173
|
+
|
|
160
174
|
renameSync(tempPath, filePath);
|
|
175
|
+
|
|
176
|
+
// Write-back verification: ensure what we wrote is valid and readable
|
|
177
|
+
try {
|
|
178
|
+
const readBack = readFileSync(filePath, 'utf-8');
|
|
179
|
+
JSON.parse(readBack);
|
|
180
|
+
} catch (verifyErr) {
|
|
181
|
+
// Write succeeded but verification failed — attempt direct overwrite
|
|
182
|
+
writeFileSync(filePath, content);
|
|
183
|
+
}
|
|
161
184
|
}
|
|
162
185
|
|
|
163
186
|
/**
|
|
@@ -0,0 +1,330 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Self-Healing Module — Unified recovery and health check entry point
|
|
3
|
+
*
|
|
4
|
+
* Layer 3-4 of the self-healing architecture:
|
|
5
|
+
* - preFlightCheck(): Full environment validation before run
|
|
6
|
+
* - validatePhaseEntry(): Phase-specific guard before each phase starts
|
|
7
|
+
*
|
|
8
|
+
* Uses Layer 2 building blocks from git.js and github.js.
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
import { existsSync, writeFileSync, readFileSync, unlinkSync, accessSync, constants } from 'fs';
|
|
12
|
+
import { resolve } from 'path';
|
|
13
|
+
import { git } from './git.js';
|
|
14
|
+
import { github } from './github.js';
|
|
15
|
+
import { log } from './logger.js';
|
|
16
|
+
import { readJSON, writeJSON } from './json.js';
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* Layer 4: Pre-flight health check — run before starting the automation loop.
|
|
20
|
+
*
|
|
21
|
+
* Performs comprehensive environment validation and auto-repairs:
|
|
22
|
+
* 1. Concurrent execution lock (PID file)
|
|
23
|
+
* 2. Git lock file cleanup
|
|
24
|
+
* 3. Detached HEAD recovery
|
|
25
|
+
* 4. Orphaned stash detection
|
|
26
|
+
* 5. Git repository integrity
|
|
27
|
+
* 6. GitHub auth verification
|
|
28
|
+
* 7. Temp file cleanup
|
|
29
|
+
* 8. Task/Checkpoint consistency validation
|
|
30
|
+
*
|
|
31
|
+
* @param {string} projectDir - Project root directory
|
|
32
|
+
* @param {string} baseBranch - Base branch name (e.g., 'main')
|
|
33
|
+
* @param {object} [options]
|
|
34
|
+
* @param {object} [options.checkpoint] - Checkpoint manager instance
|
|
35
|
+
* @param {object} [options.tasks] - Tasks data from tasks.json
|
|
36
|
+
* @returns {{ ok: boolean, repairs: string[], warnings: string[], blockers: string[] }}
|
|
37
|
+
*/
|
|
38
|
+
export function preFlightCheck(projectDir, baseBranch, options = {}) {
|
|
39
|
+
const copilotDir = resolve(projectDir, '.codex-copilot');
|
|
40
|
+
const repairs = [];
|
|
41
|
+
const warnings = [];
|
|
42
|
+
const blockers = [];
|
|
43
|
+
|
|
44
|
+
log.dim('Running pre-flight health check...');
|
|
45
|
+
|
|
46
|
+
// ── 1. Concurrent execution lock (D3) ──
|
|
47
|
+
const pidPath = resolve(copilotDir, '.pid');
|
|
48
|
+
if (existsSync(pidPath)) {
|
|
49
|
+
try {
|
|
50
|
+
const pidContent = readFileSync(pidPath, 'utf-8').trim();
|
|
51
|
+
const [pid, startTime] = pidContent.split(':');
|
|
52
|
+
const pidNum = parseInt(pid, 10);
|
|
53
|
+
|
|
54
|
+
// Check if the process is still running
|
|
55
|
+
let isRunning = false;
|
|
56
|
+
try {
|
|
57
|
+
process.kill(pidNum, 0); // Signal 0: check if process exists
|
|
58
|
+
isRunning = true;
|
|
59
|
+
} catch {
|
|
60
|
+
isRunning = false; // Process doesn't exist
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
if (isRunning) {
|
|
64
|
+
const age = startTime ? Math.round((Date.now() - parseInt(startTime, 10)) / 1000) : 0;
|
|
65
|
+
blockers.push(`Another codex-copilot instance is running (PID: ${pidNum}, age: ${age}s). Kill it first or wait.`);
|
|
66
|
+
} else {
|
|
67
|
+
// Stale PID file — remove it
|
|
68
|
+
unlinkSync(pidPath);
|
|
69
|
+
repairs.push('Removed stale PID lock file from crashed process');
|
|
70
|
+
}
|
|
71
|
+
} catch {
|
|
72
|
+
// Can't read PID file — remove it
|
|
73
|
+
try { unlinkSync(pidPath); } catch {}
|
|
74
|
+
repairs.push('Removed unreadable PID lock file');
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
// Write our PID (even if there are other blockers, we need to own the lock)
|
|
79
|
+
if (blockers.length === 0) {
|
|
80
|
+
try {
|
|
81
|
+
writeFileSync(pidPath, `${process.pid}:${Date.now()}`);
|
|
82
|
+
} catch { /* non-critical */ }
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
// ── 2. Git lock file cleanup (A2) ──
|
|
86
|
+
try {
|
|
87
|
+
const removedLocks = git.clearStaleLocks(projectDir);
|
|
88
|
+
if (removedLocks.length > 0) {
|
|
89
|
+
repairs.push(`Cleared ${removedLocks.length} stale git lock file(s): ${removedLocks.join(', ')}`);
|
|
90
|
+
}
|
|
91
|
+
} catch (err) {
|
|
92
|
+
warnings.push(`Lock file check failed: ${err.message}`);
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
// ── 3. Detached HEAD recovery (A3) ──
|
|
96
|
+
try {
|
|
97
|
+
const recovered = git.recoverDetachedHead(projectDir);
|
|
98
|
+
if (recovered) {
|
|
99
|
+
repairs.push(`Recovered from detached HEAD → ${recovered}`);
|
|
100
|
+
}
|
|
101
|
+
} catch (err) {
|
|
102
|
+
warnings.push(`Detached HEAD check failed: ${err.message}`);
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
// ── 4. Orphaned stash detection (A4) ──
|
|
106
|
+
try {
|
|
107
|
+
const stashStatus = git.checkOrphanedStash(projectDir);
|
|
108
|
+
if (stashStatus.found) {
|
|
109
|
+
warnings.push(`${stashStatus.count} stash entry(s) found — run 'git stash list' to inspect`);
|
|
110
|
+
}
|
|
111
|
+
} catch (err) {
|
|
112
|
+
// Non-critical
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
// ── 5. Git index health (A1) ──
|
|
116
|
+
try {
|
|
117
|
+
const gitDir = resolve(projectDir, '.git');
|
|
118
|
+
const hasRebase = existsSync(resolve(gitDir, 'rebase-merge')) || existsSync(resolve(gitDir, 'rebase-apply'));
|
|
119
|
+
const hasMerge = existsSync(resolve(gitDir, 'MERGE_HEAD'));
|
|
120
|
+
const hasCherryPick = existsSync(resolve(gitDir, 'CHERRY_PICK_HEAD'));
|
|
121
|
+
|
|
122
|
+
if (hasRebase || hasMerge || hasCherryPick) {
|
|
123
|
+
git.resolveIndex(projectDir);
|
|
124
|
+
const operations = [];
|
|
125
|
+
if (hasRebase) operations.push('rebase');
|
|
126
|
+
if (hasMerge) operations.push('merge');
|
|
127
|
+
if (hasCherryPick) operations.push('cherry-pick');
|
|
128
|
+
repairs.push(`Resolved stuck git operation(s): ${operations.join(', ')}`);
|
|
129
|
+
}
|
|
130
|
+
} catch (err) {
|
|
131
|
+
warnings.push(`Index health check failed: ${err.message}`);
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
// ── 6. GitHub auth verification (C4) ──
|
|
135
|
+
try {
|
|
136
|
+
const authStatus = github.ensureAuth(projectDir);
|
|
137
|
+
if (!authStatus.ok) {
|
|
138
|
+
blockers.push(`GitHub auth: ${authStatus.error}`);
|
|
139
|
+
}
|
|
140
|
+
} catch (err) {
|
|
141
|
+
warnings.push(`GitHub auth check failed: ${err.message}`);
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
// ── 7. Temp file cleanup (B3) ──
|
|
145
|
+
try {
|
|
146
|
+
const tempFiles = [
|
|
147
|
+
'_current_prompt.md.tmp',
|
|
148
|
+
'state.json.tmp',
|
|
149
|
+
'tasks.json.tmp',
|
|
150
|
+
];
|
|
151
|
+
for (const tmpName of tempFiles) {
|
|
152
|
+
const tmpPath = resolve(copilotDir, tmpName);
|
|
153
|
+
if (existsSync(tmpPath)) {
|
|
154
|
+
unlinkSync(tmpPath);
|
|
155
|
+
repairs.push(`Cleaned up temp file: ${tmpName}`);
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
} catch { /* non-critical */ }
|
|
159
|
+
|
|
160
|
+
// ── 8. Disk space check (B4) ──
|
|
161
|
+
try {
|
|
162
|
+
const probePath = resolve(copilotDir, '.disk_probe');
|
|
163
|
+
writeFileSync(probePath, 'probe');
|
|
164
|
+
unlinkSync(probePath);
|
|
165
|
+
} catch (err) {
|
|
166
|
+
if (err.code === 'ENOSPC' || err.code === 'EDQUOT') {
|
|
167
|
+
blockers.push('Disk full — cannot write to project directory. Free up space before running.');
|
|
168
|
+
} else if (err.code === 'EACCES' || err.code === 'EPERM') {
|
|
169
|
+
blockers.push(`Permission denied writing to ${copilotDir}. Check directory permissions.`);
|
|
170
|
+
}
|
|
171
|
+
// Other errors are non-critical (e.g., copilotDir doesn't exist yet)
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
// ── 9. File permissions check (B5) ──
|
|
175
|
+
try {
|
|
176
|
+
const criticalFiles = ['tasks.json', 'state.json', 'config.json'];
|
|
177
|
+
for (const fileName of criticalFiles) {
|
|
178
|
+
const filePath = resolve(copilotDir, fileName);
|
|
179
|
+
if (existsSync(filePath)) {
|
|
180
|
+
try {
|
|
181
|
+
// Verify write access
|
|
182
|
+
accessSync(filePath, constants.R_OK | constants.W_OK);
|
|
183
|
+
} catch (permErr) {
|
|
184
|
+
if (permErr.code === 'EACCES' || permErr.code === 'EPERM') {
|
|
185
|
+
blockers.push(`No read/write permission on ${fileName}. Fix with: chmod 644 ${filePath}`);
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
} catch { /* non-critical */ }
|
|
191
|
+
|
|
192
|
+
// ── 10. Task/Checkpoint consistency (E1, E4) ──
|
|
193
|
+
if (options.checkpoint && options.tasks) {
|
|
194
|
+
try {
|
|
195
|
+
const consistency = options.checkpoint.validateConsistency(options.tasks);
|
|
196
|
+
if (!consistency.ok) {
|
|
197
|
+
repairs.push(...consistency.repairs);
|
|
198
|
+
// Write back repaired tasks
|
|
199
|
+
const tasksPath = resolve(copilotDir, 'tasks.json');
|
|
200
|
+
writeJSON(tasksPath, options.tasks);
|
|
201
|
+
}
|
|
202
|
+
} catch (err) {
|
|
203
|
+
warnings.push(`Consistency check failed: ${err.message}`);
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
// ── Report ──
|
|
208
|
+
if (repairs.length > 0) {
|
|
209
|
+
log.info(`🔧 Pre-flight: ${repairs.length} issue(s) auto-repaired`);
|
|
210
|
+
for (const r of repairs) {
|
|
211
|
+
log.dim(` ✅ ${r}`);
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
if (warnings.length > 0) {
|
|
216
|
+
for (const w of warnings) {
|
|
217
|
+
log.warn(` ⚠ ${w}`);
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
if (blockers.length > 0) {
|
|
222
|
+
for (const b of blockers) {
|
|
223
|
+
log.error(` 🛑 ${b}`);
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
if (repairs.length === 0 && warnings.length === 0 && blockers.length === 0) {
|
|
228
|
+
log.dim('Pre-flight: all checks passed ✓');
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
return { ok: blockers.length === 0, repairs, warnings, blockers };
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
/**
|
|
235
|
+
* Layer 3: Phase entry validation — called before each automation phase.
|
|
236
|
+
*
|
|
237
|
+
* Ensures the environment is in the correct state for the target phase:
|
|
238
|
+
* - develop: correct branch, clean-enough index
|
|
239
|
+
* - pr: has diff to commit, network is reachable
|
|
240
|
+
* - review: PR exists and is in 'open' state
|
|
241
|
+
* - merge: PR is approved or at least not rejected
|
|
242
|
+
*
|
|
243
|
+
* @param {string} projectDir - Project root
|
|
244
|
+
* @param {object} task - Current task object
|
|
245
|
+
* @param {object} checkpoint - Checkpoint manager
|
|
246
|
+
* @param {string} targetPhase - 'develop' | 'pr' | 'review' | 'merge'
|
|
247
|
+
* @param {object} [extra] - Extra context (e.g., prInfo for review/merge)
|
|
248
|
+
* @returns {{ ok: boolean, error: string|null }}
|
|
249
|
+
*/
|
|
250
|
+
export function validatePhaseEntry(projectDir, task, checkpoint, targetPhase, extra = {}) {
|
|
251
|
+
try {
|
|
252
|
+
switch (targetPhase) {
|
|
253
|
+
case 'develop': {
|
|
254
|
+
// Verify we can identify current branch (not detached)
|
|
255
|
+
const current = git.currentBranch(projectDir);
|
|
256
|
+
if (!current) {
|
|
257
|
+
git.recoverDetachedHead(projectDir);
|
|
258
|
+
const recovered = git.currentBranch(projectDir);
|
|
259
|
+
if (!recovered) {
|
|
260
|
+
return { ok: false, error: 'Cannot determine current branch (detached HEAD, recovery failed)' };
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
return { ok: true, error: null };
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
case 'pr': {
|
|
267
|
+
// Verify we're on the task's feature branch
|
|
268
|
+
const current = git.currentBranch(projectDir);
|
|
269
|
+
if (current !== task.branch) {
|
|
270
|
+
return { ok: false, error: `Expected branch '${task.branch}' but on '${current}'` };
|
|
271
|
+
}
|
|
272
|
+
return { ok: true, error: null };
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
case 'review': {
|
|
276
|
+
// Verify PR exists and is open
|
|
277
|
+
if (!extra.prNumber) {
|
|
278
|
+
return { ok: false, error: 'No PR number available for review phase' };
|
|
279
|
+
}
|
|
280
|
+
const prState = github.getPRState(projectDir, extra.prNumber);
|
|
281
|
+
if (prState === 'merged') {
|
|
282
|
+
return { ok: false, error: `PR #${extra.prNumber} is already merged` };
|
|
283
|
+
}
|
|
284
|
+
if (prState === 'closed') {
|
|
285
|
+
return { ok: false, error: `PR #${extra.prNumber} was closed — needs re-creation` };
|
|
286
|
+
}
|
|
287
|
+
return { ok: true, error: null };
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
case 'merge': {
|
|
291
|
+
// Verify PR is in a mergeable state
|
|
292
|
+
if (!extra.prNumber) {
|
|
293
|
+
return { ok: false, error: 'No PR number available for merge phase' };
|
|
294
|
+
}
|
|
295
|
+
const prState = github.getPRState(projectDir, extra.prNumber);
|
|
296
|
+
if (prState === 'merged') {
|
|
297
|
+
// Already merged — this is fine, skip merge
|
|
298
|
+
return { ok: true, error: null };
|
|
299
|
+
}
|
|
300
|
+
if (prState === 'closed') {
|
|
301
|
+
return { ok: false, error: `PR #${extra.prNumber} is closed — cannot merge` };
|
|
302
|
+
}
|
|
303
|
+
return { ok: true, error: null };
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
default:
|
|
307
|
+
return { ok: true, error: null };
|
|
308
|
+
}
|
|
309
|
+
} catch (err) {
|
|
310
|
+
return { ok: false, error: `Phase validation error: ${err.message}` };
|
|
311
|
+
}
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
/**
|
|
315
|
+
* Remove the PID lock file (called on clean exit).
|
|
316
|
+
*/
|
|
317
|
+
export function releaseLock(projectDir) {
|
|
318
|
+
const pidPath = resolve(projectDir, '.codex-copilot/.pid');
|
|
319
|
+
try {
|
|
320
|
+
if (existsSync(pidPath)) {
|
|
321
|
+
unlinkSync(pidPath);
|
|
322
|
+
}
|
|
323
|
+
} catch { /* best effort */ }
|
|
324
|
+
}
|
|
325
|
+
|
|
326
|
+
export const selfHeal = {
|
|
327
|
+
preFlightCheck,
|
|
328
|
+
validatePhaseEntry,
|
|
329
|
+
releaseLock,
|
|
330
|
+
};
|