@anyi61/codex-claude-delegate-mcp 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +262 -0
  3. package/dist/claude-cli.d.ts +84 -0
  4. package/dist/claude-cli.js +3123 -0
  5. package/dist/claude-cli.js.map +1 -0
  6. package/dist/cli.d.ts +61 -0
  7. package/dist/cli.js +334 -0
  8. package/dist/cli.js.map +1 -0
  9. package/dist/codex-config.d.ts +104 -0
  10. package/dist/codex-config.js +446 -0
  11. package/dist/codex-config.js.map +1 -0
  12. package/dist/guard.d.ts +27 -0
  13. package/dist/guard.js +229 -0
  14. package/dist/guard.js.map +1 -0
  15. package/dist/job-runner.d.ts +13 -0
  16. package/dist/job-runner.js +75 -0
  17. package/dist/job-runner.js.map +1 -0
  18. package/dist/jobs.d.ts +46 -0
  19. package/dist/jobs.js +175 -0
  20. package/dist/jobs.js.map +1 -0
  21. package/dist/package-info.d.ts +4 -0
  22. package/dist/package-info.js +14 -0
  23. package/dist/package-info.js.map +1 -0
  24. package/dist/schema.d.ts +779 -0
  25. package/dist/schema.js +325 -0
  26. package/dist/schema.js.map +1 -0
  27. package/dist/server.d.ts +1142 -0
  28. package/dist/server.js +693 -0
  29. package/dist/server.js.map +1 -0
  30. package/dist/session.d.ts +35 -0
  31. package/dist/session.js +109 -0
  32. package/dist/session.js.map +1 -0
  33. package/package.json +49 -0
  34. package/plugins/codex-claude-delegate/.codex-plugin/plugin.json +36 -0
  35. package/plugins/codex-claude-delegate/.mcp.json +9 -0
  36. package/plugins/codex-claude-delegate/hooks/hooks.json +16 -0
  37. package/plugins/codex-claude-delegate/hooks/review-gate-stop.mjs +66 -0
  38. package/plugins/codex-claude-delegate/server/job-runner.js +16999 -0
  39. package/plugins/codex-claude-delegate/server/server.js +28048 -0
  40. package/plugins/codex-claude-delegate/skills/claude-delegate.md +30 -0
  41. package/plugins/codex-claude-delegate/skills/claude-rescue.md +52 -0
  42. package/plugins/codex-claude-delegate/skills/claude-review.md +25 -0
@@ -0,0 +1,3123 @@
1
+ import { spawn } from "node:child_process";
2
+ import { cp, rm, writeFile, mkdir, readFile, readdir, stat } from "node:fs/promises";
3
+ import { existsSync } from "node:fs";
4
+ import { createHash, randomUUID } from "node:crypto";
5
+ import { fileURLToPath } from "node:url";
6
+ import path from "node:path";
7
+ import { execCapture, sanitizeEnv, resolveRepoLocalPath } from "./guard.js";
8
+ import { JobStore } from "./jobs.js";
9
+ import { SessionStore, computeRepoKey, RECENT_WINDOW_MINUTES } from "./session.js";
10
+ import { QUERY_SCHEMA, REVIEW_SCHEMA, IMPLEMENT_SCHEMA, buildImplementPrompt, buildQueryPrompt, buildReviewPrompt, StructuredToolError, } from "./schema.js";
11
+ const CLAUDE_BIN = process.env.CLAUDE_BIN ?? "claude";
12
+ function getRunLogDir(cwd) {
13
+ if (process.env.CODEX_CLAUDE_RUN_LOG_DIR) {
14
+ return path.resolve(process.env.CODEX_CLAUDE_RUN_LOG_DIR);
15
+ }
16
+ const base = cwd ?? process.cwd();
17
+ return path.join(base, ".codex-claude-delegate", "runs");
18
+ }
19
+ const JOB_STATE_DIR_ENV = "CODEX_CLAUDE_BACKGROUND_STATE_DIR";
20
+ const REVIEW_GATE_RELATIVE_PATH = path.join(".codex-claude-delegate", "review-gate.json");
21
+ const REVIEW_GATE_HOOK_COMMAND = "node '${CLAUDE_PLUGIN_ROOT}/hooks/review-gate-stop.mjs'";
22
+ const JOB_HEARTBEAT_INTERVAL_MS = 15000;
23
+ const STALE_CANDIDATE_HEARTBEAT_MS = 90_000;
24
+ const STALE_HEARTBEAT_MS = 300_000;
25
+ // ---- Session store (cwd-scoped, lazy init) ----
26
+ const stores = new Map();
27
+ let activeClaudeChild = null;
28
+ async function getStore(cwd) {
29
+ let sessionStore = stores.get(cwd);
30
+ if (!sessionStore) {
31
+ const sessionDir = path.join(cwd, ".codex-claude-delegate");
32
+ sessionStore = new SessionStore(sessionDir);
33
+ await sessionStore.init();
34
+ stores.set(cwd, sessionStore);
35
+ }
36
+ return sessionStore;
37
+ }
38
+ function getBackgroundStateDir() {
39
+ if (process.env[JOB_STATE_DIR_ENV]) {
40
+ return path.resolve(process.env[JOB_STATE_DIR_ENV]);
41
+ }
42
+ if (process.env.CODEX_CLAUDE_RUN_LOG_DIR) {
43
+ return path.dirname(path.resolve(process.env.CODEX_CLAUDE_RUN_LOG_DIR));
44
+ }
45
+ return path.join(process.cwd(), ".codex-claude-delegate");
46
+ }
47
+ async function getJobStore() {
48
+ const store = new JobStore(getBackgroundStateDir());
49
+ await store.init();
50
+ return store;
51
+ }
52
+ function getRepoRootFromModule() {
53
+ return path.resolve(path.dirname(fileURLToPath(import.meta.url)), "..");
54
+ }
55
+ function resolvePluginRootFromModule() {
56
+ const moduleDir = path.dirname(fileURLToPath(import.meta.url));
57
+ const envPluginRoot = process.env.CLAUDE_PLUGIN_ROOT ? path.resolve(process.env.CLAUDE_PLUGIN_ROOT) : null;
58
+ const candidates = [
59
+ envPluginRoot,
60
+ path.resolve(moduleDir, ".."),
61
+ path.join(getRepoRootFromModule(), "plugins", "codex-claude-delegate"),
62
+ ].filter((candidate) => typeof candidate === "string");
63
+ for (const candidate of candidates) {
64
+ const hooksDir = path.join(candidate, "hooks");
65
+ if (existsSync(path.join(hooksDir, "hooks.json")) || existsSync(path.join(hooksDir, "review-gate-stop.mjs"))) {
66
+ return candidate;
67
+ }
68
+ }
69
+ // Fallback to the repository layout used during source development.
70
+ return path.join(getRepoRootFromModule(), "plugins", "codex-claude-delegate");
71
+ }
72
+ function getHookManifestPath() {
73
+ return path.join(resolvePluginRootFromModule(), "hooks", "hooks.json");
74
+ }
75
+ function getHookScriptPath() {
76
+ return path.join(resolvePluginRootFromModule(), "hooks", "review-gate-stop.mjs");
77
+ }
78
+ function getReviewGateStatePath(cwd) {
79
+ return path.join(cwd, REVIEW_GATE_RELATIVE_PATH);
80
+ }
81
+ async function readReviewGateState(cwd) {
82
+ const filePath = getReviewGateStatePath(cwd);
83
+ if (!existsSync(filePath))
84
+ return null;
85
+ try {
86
+ return JSON.parse(await readFile(filePath, "utf8"));
87
+ }
88
+ catch {
89
+ return null;
90
+ }
91
+ }
92
+ async function writeReviewGateState(cwd, enabled) {
93
+ const pathCheck = resolveRepoLocalPath(cwd, REVIEW_GATE_RELATIVE_PATH);
94
+ if (!pathCheck.ok) {
95
+ throw new Error(pathCheck.error);
96
+ }
97
+ await mkdir(path.dirname(pathCheck.resolved), { recursive: true });
98
+ const next = {
99
+ workspace_root: cwd,
100
+ config_path: pathCheck.resolved,
101
+ hook_manifest_path: getHookManifestPath(),
102
+ hook_script_path: getHookScriptPath(),
103
+ hook_installed: existsSync(getHookManifestPath()) && existsSync(getHookScriptPath()),
104
+ enabled,
105
+ mode: "soft-stop",
106
+ pending_review: false,
107
+ updated_at: new Date().toISOString(),
108
+ };
109
+ await writeFile(pathCheck.resolved, JSON.stringify(next, null, 2), "utf8");
110
+ return next;
111
+ }
112
+ async function markReviewGatePending(cwd, pending, activity) {
113
+ const current = await readReviewGateState(cwd);
114
+ if (!current?.enabled)
115
+ return;
116
+ const pathCheck = resolveRepoLocalPath(cwd, REVIEW_GATE_RELATIVE_PATH);
117
+ if (!pathCheck.ok) {
118
+ throw new Error(pathCheck.error);
119
+ }
120
+ const now = new Date().toISOString();
121
+ const next = {
122
+ ...current,
123
+ config_path: pathCheck.resolved,
124
+ hook_manifest_path: getHookManifestPath(),
125
+ hook_script_path: getHookScriptPath(),
126
+ hook_installed: existsSync(getHookManifestPath()) && existsSync(getHookScriptPath()),
127
+ pending_review: pending,
128
+ updated_at: now,
129
+ last_write_at: activity === "write" ? now : current.last_write_at,
130
+ last_review_at: activity === "review" ? now : current.last_review_at,
131
+ };
132
+ await mkdir(path.dirname(pathCheck.resolved), { recursive: true });
133
+ await writeFile(pathCheck.resolved, JSON.stringify(next, null, 2), "utf8");
134
+ }
135
+ async function ensureReviewGateHookManifest() {
136
+ const manifestPath = getHookManifestPath();
137
+ await mkdir(path.dirname(manifestPath), { recursive: true });
138
+ const existingRaw = existsSync(manifestPath) ? await readFile(manifestPath, "utf8").catch(() => "") : "";
139
+ let parsed = {};
140
+ if (existingRaw.trim()) {
141
+ try {
142
+ parsed = JSON.parse(existingRaw);
143
+ }
144
+ catch {
145
+ parsed = {};
146
+ }
147
+ }
148
+ const hooksRoot = parsed.hooks && typeof parsed.hooks === "object"
149
+ ? parsed.hooks
150
+ : {};
151
+ const stopEntries = Array.isArray(hooksRoot.Stop) ? [...hooksRoot.Stop] : [];
152
+ const alreadyInstalled = stopEntries.some((entry) => {
153
+ const hookEntries = Array.isArray(entry.hooks) ? entry.hooks : [];
154
+ return hookEntries.some((hook) => hook.type === "command" && hook.command === REVIEW_GATE_HOOK_COMMAND);
155
+ });
156
+ if (!alreadyInstalled) {
157
+ stopEntries.push({
158
+ matcher: ".*",
159
+ hooks: [
160
+ {
161
+ type: "command",
162
+ command: REVIEW_GATE_HOOK_COMMAND,
163
+ async: false,
164
+ },
165
+ ],
166
+ });
167
+ }
168
+ hooksRoot.Stop = stopEntries;
169
+ await writeFile(manifestPath, JSON.stringify({ hooks: hooksRoot }, null, 2), "utf8");
170
+ }
171
+ function getReviewGateNextSteps(enabled, hookInstallable, pendingReview = false) {
172
+ if (!hookInstallable) {
173
+ return ["Review gate hook assets are missing. Restore the plugin hook files before enabling the gate."];
174
+ }
175
+ if (enabled) {
176
+ return [
177
+ "Review gate is enabled for this workspace.",
178
+ pendingReview
179
+ ? "A review is pending for the latest write-oriented workflow in this workspace."
180
+ : "No pending review is currently tracked for this workspace.",
181
+ "Verify the plugin loads hooks/hooks.json and that the stop hook script is reachable.",
182
+ "Before finishing a coding session, expect a stop-time reminder to run claude_review or claude_task with mode=review.",
183
+ ];
184
+ }
185
+ return [
186
+ "Review gate is disabled for this workspace.",
187
+ "Call claude_review_gate with action=enable to persist the local gate state and install/update the stop-hook manifest.",
188
+ ];
189
+ }
190
+ function buildReviewGateState(cwd, state, hookInstalled) {
191
+ return {
192
+ workspace_root: cwd,
193
+ config_path: getReviewGateStatePath(cwd),
194
+ hook_manifest_path: getHookManifestPath(),
195
+ hook_script_path: getHookScriptPath(),
196
+ hook_installed: hookInstalled,
197
+ enabled: state?.enabled === true,
198
+ mode: "soft-stop",
199
+ pending_review: state?.pending_review === true,
200
+ updated_at: state?.updated_at,
201
+ last_write_at: state?.last_write_at,
202
+ last_review_at: state?.last_review_at,
203
+ };
204
+ }
205
+ function normalizeSessionType(value) {
206
+ if (value === "query" || value === "review" || value === "implement")
207
+ return value;
208
+ return undefined;
209
+ }
210
+ function toWorkflowSessionSummaryFromStore(session) {
211
+ return {
212
+ session_id: session.session_id,
213
+ type: session.type,
214
+ repo_path: session.repo_path,
215
+ last_used: session.last_used,
216
+ use_count: session.use_count,
217
+ summary: session.summary,
218
+ source: "store",
219
+ };
220
+ }
221
+ function toJobSummary(record) {
222
+ return {
223
+ job_id: record.job_id,
224
+ type: record.type,
225
+ status: record.status,
226
+ result_status: record.result_status ?? extractBackgroundResultStatus(record.result),
227
+ cwd: record.cwd,
228
+ created_at: record.created_at,
229
+ updated_at: record.updated_at,
230
+ heartbeat_at: record.heartbeat_at,
231
+ last_wait_at: record.last_wait_at,
232
+ last_wait_recommended_delay_ms: record.last_wait_recommended_delay_ms,
233
+ fingerprint: record.fingerprint,
234
+ pid: record.pid,
235
+ run_id: record.run_id,
236
+ worktree_name: record.worktree_name,
237
+ summary: record.summary,
238
+ error: record.error,
239
+ };
240
+ }
241
+ function extractBackgroundResultStatus(result) {
242
+ if (!result)
243
+ return undefined;
244
+ if (isRunLogStatus(result.status))
245
+ return result.status;
246
+ const claudeReport = result.claude_report;
247
+ if (claudeReport && typeof claudeReport === "object") {
248
+ const reportStatus = claudeReport.status;
249
+ if (isRunLogStatus(reportStatus))
250
+ return reportStatus;
251
+ }
252
+ return undefined;
253
+ }
254
+ function isRunLogStatus(value) {
255
+ return value === "success" || value === "failed" || value === "partial" || value === "needs_user" || value === "unknown";
256
+ }
257
+ function stableJson(value) {
258
+ if (Array.isArray(value)) {
259
+ return `[${value.map(stableJson).join(",")}]`;
260
+ }
261
+ if (value && typeof value === "object") {
262
+ const entries = Object.entries(value)
263
+ .filter(([, entryValue]) => entryValue !== undefined)
264
+ .sort(([left], [right]) => left.localeCompare(right));
265
+ return `{${entries.map(([key, entryValue]) => `${JSON.stringify(key)}:${stableJson(entryValue)}`).join(",")}}`;
266
+ }
267
+ return JSON.stringify(value);
268
+ }
269
+ function normalizeStringArray(value) {
270
+ if (!Array.isArray(value))
271
+ return undefined;
272
+ return value
273
+ .filter((entry) => typeof entry === "string")
274
+ .map((entry) => entry.trim())
275
+ .filter((entry) => entry.length > 0)
276
+ .sort((a, b) => a.localeCompare(b));
277
+ }
278
+ function buildFingerprintPayload(input) {
279
+ return {
280
+ cwd: path.resolve(input.cwd),
281
+ tool: input.type,
282
+ mode: input.payload.mode,
283
+ task: typeof input.payload.task === "string" ? input.payload.task.trim() : undefined,
284
+ files: normalizeStringArray(input.payload.files),
285
+ instruction_files: normalizeStringArray(input.payload.instruction_files),
286
+ dirty_policy: input.payload.dirty_policy ?? (input.type === "implement" ? "ask" : undefined),
287
+ session_key: input.payload.session_key,
288
+ resume_latest: input.payload.resume_latest,
289
+ fork_session: input.payload.fork_session,
290
+ max_changed_files: input.payload.max_changed_files,
291
+ max_cost_usd: input.payload.max_cost_usd,
292
+ };
293
+ }
294
+ export function createTaskFingerprint(input) {
295
+ const normalized = buildFingerprintPayload(input);
296
+ return createHash("sha256").update(stableJson(normalized)).digest("hex");
297
+ }
298
+ function buildDuplicateJobMessage(job) {
299
+ return `An equivalent ${job.type} job is already ${job.status}: ${job.job_id}. Continue polling claude_job_wait for this job_id; do not restart or duplicate the task.`;
300
+ }
301
+ function startJobHeartbeat(jobStore, jobId, intervalMs = JOB_HEARTBEAT_INTERVAL_MS) {
302
+ let stopped = false;
303
+ const touch = () => {
304
+ if (stopped)
305
+ return;
306
+ void jobStore.touchHeartbeat(jobId).catch(() => { });
307
+ };
308
+ touch();
309
+ const timer = setInterval(touch, intervalMs);
310
+ timer.unref?.();
311
+ return () => {
312
+ stopped = true;
313
+ clearInterval(timer);
314
+ };
315
+ }
316
+ function ageMsSince(timestamp, nowMs = Date.now()) {
317
+ if (!timestamp)
318
+ return undefined;
319
+ const parsed = Date.parse(timestamp);
320
+ if (!Number.isFinite(parsed))
321
+ return undefined;
322
+ return Math.max(0, nowMs - parsed);
323
+ }
324
+ function isPidAlive(pid) {
325
+ if (!pid)
326
+ return undefined;
327
+ try {
328
+ process.kill(pid, 0);
329
+ return true;
330
+ }
331
+ catch (err) {
332
+ const code = err.code;
333
+ return code === "EPERM";
334
+ }
335
+ }
336
+ function classifyJobStaleState(input) {
337
+ if (input.job.status !== "queued" && input.job.status !== "running") {
338
+ return "fresh";
339
+ }
340
+ if (input.pidAlive === false) {
341
+ return "stale";
342
+ }
343
+ const heartbeatAgeMs = input.heartbeatAgeMs;
344
+ if (heartbeatAgeMs === undefined) {
345
+ return "fresh";
346
+ }
347
+ if (heartbeatAgeMs > STALE_HEARTBEAT_MS) {
348
+ return "stale";
349
+ }
350
+ if (heartbeatAgeMs > STALE_CANDIDATE_HEARTBEAT_MS) {
351
+ return "stale_candidate";
352
+ }
353
+ return "fresh";
354
+ }
355
+ function recommendedDelayMs(input) {
356
+ if (input.staleState === "stale")
357
+ return undefined;
358
+ if (input.staleState === "stale_candidate")
359
+ return 30_000;
360
+ if (input.ageMs < 30_000)
361
+ return 10_000;
362
+ if (input.ageMs < 120_000)
363
+ return 20_000;
364
+ if (input.ageMs < 600_000)
365
+ return 45_000;
366
+ if (input.ageMs < 1_800_000)
367
+ return 60_000;
368
+ return 90_000;
369
+ }
370
+ function buildActiveWaitActions(input) {
371
+ if (input.staleState === "stale") {
372
+ return [
373
+ {
374
+ tool: "claude_job_cancel",
375
+ reason: "Job appears stale because heartbeat is too old or its pid is gone. Cancel only after inspecting or when intentionally abandoning this run.",
376
+ args: { cwd: input.cwd, job_id: input.job.job_id },
377
+ },
378
+ {
379
+ tool: "claude_workspace_status",
380
+ reason: "Inspect workspace-level state before deciding whether to cancel, apply, cleanup, or retry.",
381
+ args: { cwd: input.cwd },
382
+ },
383
+ {
384
+ tool: "claude_job_result",
385
+ reason: "Inspect persisted job details before starting a replacement job.",
386
+ args: { cwd: input.cwd, job_id: input.job.job_id },
387
+ },
388
+ ];
389
+ }
390
+ return [
391
+ {
392
+ tool: "claude_job_wait",
393
+ reason: input.nextAllowedPollAt
394
+ ? "Poll again only after next_allowed_poll_at. Do not start a duplicate task."
395
+ : input.staleState === "stale_candidate"
396
+ ? "Job heartbeat is delayed but not stale yet. Wait once more before inspecting or cancelling; do not start a duplicate job."
397
+ : "Job is active. Poll this same job_id again after the recommended delay; do not start a duplicate job.",
398
+ args: {
399
+ cwd: input.cwd,
400
+ job_id: input.job.job_id,
401
+ ...(input.nextAllowedPollAt ? { not_before: input.nextAllowedPollAt } : {}),
402
+ },
403
+ },
404
+ ];
405
+ }
406
+ function summarizeWithOutcome(type, result, summary) {
407
+ const status = extractBackgroundResultStatus(result);
408
+ if (!status || status === "success")
409
+ return summary;
410
+ const label = type === "query" ? "Query" :
411
+ type === "review" ? "Review" :
412
+ type === "implement" ? "Implement" :
413
+ type === "apply" ? "Apply" : "Cleanup";
414
+ return `${label} ${status}: ${summary}`;
415
+ }
416
+ function summarizeBackgroundResult(type, result) {
417
+ if (type === "query") {
418
+ const data = result.data;
419
+ if (data && typeof data === "object" && typeof data.answer === "string") {
420
+ return summarizeWithOutcome(type, result, `Query completed: ${String(data.answer).slice(0, 80)}`);
421
+ }
422
+ return summarizeWithOutcome(type, result, "Query completed");
423
+ }
424
+ if (type === "implement") {
425
+ const claudeReport = result.claude_report;
426
+ if (claudeReport && typeof claudeReport === "object" && typeof claudeReport.summary === "string") {
427
+ return summarizeWithOutcome(type, result, claudeReport.summary);
428
+ }
429
+ return summarizeWithOutcome(type, result, "Implement job completed");
430
+ }
431
+ if (type === "apply") {
432
+ const data = result.data;
433
+ if (data && typeof data === "object" && Array.isArray(data.applied_files)) {
434
+ return `Apply completed (${data.applied_files.length} files)`;
435
+ }
436
+ return "Apply completed";
437
+ }
438
+ if (type === "cleanup") {
439
+ const data = result.data;
440
+ if (data && typeof data === "object" && typeof data.removed_count === "number") {
441
+ return `Cleanup completed (${data.removed_count} removed)`;
442
+ }
443
+ return "Cleanup completed";
444
+ }
445
+ const data = result.data;
446
+ if (data && typeof data === "object" && typeof data.severity === "string") {
447
+ return summarizeWithOutcome(type, result, `Review completed (${data.severity})`);
448
+ }
449
+ return summarizeWithOutcome(type, result, "Review completed");
450
+ }
451
+ function getBackgroundWorktreeName(type, payload, result) {
452
+ if (type === "apply" && typeof payload.worktree_path === "string") {
453
+ return path.basename(payload.worktree_path);
454
+ }
455
+ const observed = result.server_observed;
456
+ if (!observed || typeof observed !== "object")
457
+ return undefined;
458
+ return typeof observed.worktree_name === "string"
459
+ ? observed.worktree_name
460
+ : undefined;
461
+ }
462
+ function getJobRunnerArgs(jobId) {
463
+ const currentFile = fileURLToPath(import.meta.url);
464
+ const currentDir = path.dirname(currentFile);
465
+ if (currentFile.endsWith(".ts") && process.argv[1]?.includes("tsx")) {
466
+ return [process.argv[1], path.join(currentDir, "job-runner.ts"), jobId];
467
+ }
468
+ return [path.join(currentDir, "job-runner.js"), jobId];
469
+ }
470
+ // ---- Logging (stderr only, never stdout) ----
471
+ function log(msg) {
472
+ process.stderr.write(`[claude-delegate] ${msg}\n`);
473
+ }
474
+ async function logRun(runId, data, cwd) {
475
+ const logDir = getRunLogDir(cwd);
476
+ try {
477
+ await mkdir(logDir, { recursive: true });
478
+ const timestamp = new Date().toISOString();
479
+ await writeFile(path.join(logDir, `${runId}.json`), JSON.stringify({ started_at: timestamp, updated_at: timestamp, ...data }, null, 2));
480
+ }
481
+ catch {
482
+ // best-effort logging
483
+ }
484
+ }
485
+ function normalizeRepoPath(cwd, file) {
486
+ const repoRelative = path.isAbsolute(file) ? path.relative(cwd, file) : file;
487
+ return repoRelative.replaceAll(path.sep, "/").replace(/^\.\//, "");
488
+ }
489
+ function isUnderRequestedFile(file, requested) {
490
+ return file === requested || file.startsWith(`${requested.replace(/\/$/, "")}/`);
491
+ }
492
+ function normalizeRequestedFiles(cwd, files) {
493
+ if (!files?.length)
494
+ return [];
495
+ const normalized = new Set();
496
+ for (const file of files) {
497
+ const repoPath = normalizeRepoPath(cwd, file).replace(/\/+$/g, "");
498
+ if (repoPath)
499
+ normalized.add(repoPath);
500
+ }
501
+ return [...normalized].sort();
502
+ }
503
+ async function findImplementJobForWorktree(worktreePath, cwd) {
504
+ const wtName = path.basename(worktreePath);
505
+ const jobStore = await getJobStore();
506
+ const jobs = await jobStore.list({
507
+ cwd,
508
+ limit: 100,
509
+ type: "implement",
510
+ });
511
+ const terminalStatuses = new Set(["succeeded", "failed", "cancelled"]);
512
+ return jobs.find((job) => job.worktree_name === wtName && terminalStatuses.has(job.status)) ?? null;
513
+ }
514
+ async function findImplementLogForWorktree(worktreePath, cwd) {
515
+ const logDir = getRunLogDir(cwd);
516
+ try {
517
+ const entries = await readdir(logDir);
518
+ const candidates = await Promise.all(entries
519
+ .filter((name) => name.endsWith(".json"))
520
+ .map(async (name) => {
521
+ const file = path.join(logDir, name);
522
+ try {
523
+ return { file, mtimeMs: (await stat(file)).mtimeMs };
524
+ }
525
+ catch {
526
+ return null;
527
+ }
528
+ }));
529
+ for (const entry of candidates
530
+ .filter((item) => item !== null)
531
+ .sort((a, b) => b.mtimeMs - a.mtimeMs)) {
532
+ try {
533
+ const parsed = JSON.parse(await readFile(entry.file, "utf8"));
534
+ if (parsed.type === "implement" && parsed.observed?.worktree_path === worktreePath) {
535
+ return parsed;
536
+ }
537
+ }
538
+ catch {
539
+ // Ignore malformed or concurrently written logs.
540
+ }
541
+ }
542
+ }
543
+ catch {
544
+ // Missing logs should not block legacy/manual apply flows.
545
+ }
546
+ return null;
547
+ }
548
+ async function findImplementLogRecordForWorktree(worktreePath, cwd) {
549
+ const logDir = getRunLogDir(cwd);
550
+ try {
551
+ const entries = await readdir(logDir);
552
+ const candidates = await Promise.all(entries
553
+ .filter((name) => name.endsWith(".json"))
554
+ .map(async (name) => {
555
+ const file = path.join(logDir, name);
556
+ try {
557
+ return { file, mtimeMs: (await stat(file)).mtimeMs };
558
+ }
559
+ catch {
560
+ return null;
561
+ }
562
+ }));
563
+ for (const entry of candidates
564
+ .filter((item) => item !== null)
565
+ .sort((a, b) => b.mtimeMs - a.mtimeMs)) {
566
+ try {
567
+ const parsed = JSON.parse(await readFile(entry.file, "utf8"));
568
+ if (parsed.type === "implement" && parsed.observed?.worktree_path === worktreePath) {
569
+ return { file: entry.file, parsed };
570
+ }
571
+ }
572
+ catch {
573
+ continue;
574
+ }
575
+ }
576
+ }
577
+ catch {
578
+ return null;
579
+ }
580
+ return null;
581
+ }
582
+ async function updateImplementLifecycleForWorktree(worktreePath, update, cwd) {
583
+ const record = await findImplementLogRecordForWorktree(worktreePath, cwd);
584
+ if (!record)
585
+ return;
586
+ const parsed = record.parsed;
587
+ const timestamp = new Date().toISOString();
588
+ const next = {
589
+ ...parsed,
590
+ updated_at: timestamp,
591
+ downstream: {
592
+ ...(parsed.downstream ?? {}),
593
+ ...update,
594
+ },
595
+ };
596
+ await writeFile(record.file, JSON.stringify(next, null, 2));
597
+ }
598
+ function parseRunStatus(raw) {
599
+ if (typeof raw.error === "string" && raw.error.length > 0)
600
+ return "failed";
601
+ const exitCode = raw.execution?.exit_code;
602
+ const timedOut = raw.execution?.timed_out === true;
603
+ if (timedOut || (typeof exitCode === "number" && exitCode !== 0)) {
604
+ const changedFiles = Array.isArray(raw.observed?.changed_files) ? raw.observed.changed_files.length : 0;
605
+ return changedFiles > 0 ? "partial" : "failed";
606
+ }
607
+ const status = raw.report?.status;
608
+ if (status === "success" || status === "failed" || status === "partial" || status === "needs_user") {
609
+ return status;
610
+ }
611
+ return "unknown";
612
+ }
613
+ function parseRunLifecycle(raw) {
614
+ const downstreamLifecycle = raw.downstream?.current_lifecycle;
615
+ if (downstreamLifecycle === "queued" ||
616
+ downstreamLifecycle === "running" ||
617
+ downstreamLifecycle === "success" ||
618
+ downstreamLifecycle === "partial" ||
619
+ downstreamLifecycle === "failed" ||
620
+ downstreamLifecycle === "apply_blocked" ||
621
+ downstreamLifecycle === "applied" ||
622
+ downstreamLifecycle === "cleaned" ||
623
+ downstreamLifecycle === "unknown") {
624
+ return downstreamLifecycle;
625
+ }
626
+ const type = typeof raw.type === "string" ? raw.type : "unknown";
627
+ const status = parseRunStatus(raw);
628
+ if (type === "apply") {
629
+ if (typeof raw.error === "string" && raw.error.length > 0)
630
+ return "apply_blocked";
631
+ const appliedCount = Array.isArray(raw.applied_files) ? raw.applied_files.length : 0;
632
+ if (appliedCount > 0)
633
+ return "applied";
634
+ if (raw.preview === true)
635
+ return "success";
636
+ }
637
+ if (type === "cleanup") {
638
+ const removedCount = typeof raw.removed_count === "number" ? raw.removed_count : 0;
639
+ const failedCount = typeof raw.failed_count === "number" ? raw.failed_count : 0;
640
+ if (removedCount > 0 && failedCount === 0)
641
+ return "cleaned";
642
+ if (failedCount > 0)
643
+ return "partial";
644
+ return "success";
645
+ }
646
+ if (status === "needs_user") {
647
+ return "partial";
648
+ }
649
+ if (status === "success" || status === "partial" || status === "failed") {
650
+ return status;
651
+ }
652
+ return "unknown";
653
+ }
654
+ function summarizeRunLog(runId, raw, updatedAt) {
655
+ const worktreePath = typeof raw.observed?.worktree_path === "string"
656
+ ? raw.observed.worktree_path
657
+ : typeof raw.input?.worktree_path === "string"
658
+ ? raw.input.worktree_path
659
+ : undefined;
660
+ return {
661
+ run_id: runId,
662
+ type: typeof raw.type === "string" ? raw.type : "unknown",
663
+ status: parseRunStatus(raw),
664
+ lifecycle: parseRunLifecycle(raw),
665
+ cwd: typeof raw.input?.cwd === "string" ? raw.input.cwd : undefined,
666
+ summary: typeof raw.report?.summary === "string" ? raw.report.summary : undefined,
667
+ error: typeof raw.error === "string" ? raw.error : undefined,
668
+ worktree_path: worktreePath,
669
+ worktree_name: typeof raw.observed?.worktree_name === "string"
670
+ ? raw.observed.worktree_name
671
+ : worktreePath
672
+ ? path.basename(worktreePath)
673
+ : undefined,
674
+ requested_session_id: typeof raw.session?.requested_session_id === "string" || raw.session?.requested_session_id === null
675
+ ? raw.session.requested_session_id
676
+ : undefined,
677
+ returned_session_id: typeof raw.session?.returned_session_id === "string" || raw.session?.returned_session_id === null
678
+ ? raw.session.returned_session_id
679
+ : undefined,
680
+ retried_after_session_expired: raw.retried_after_session_expired === true,
681
+ started_at: typeof raw.started_at === "string" ? raw.started_at : updatedAt,
682
+ updated_at: typeof raw.updated_at === "string" ? raw.updated_at : updatedAt,
683
+ };
684
+ }
685
+ function summarizeRecentRuns(entries) {
686
+ const lifecycleCounts = {};
687
+ for (const entry of entries) {
688
+ lifecycleCounts[entry.lifecycle] = (lifecycleCounts[entry.lifecycle] ?? 0) + 1;
689
+ }
690
+ return { entries, lifecycle_counts: lifecycleCounts };
691
+ }
692
+ async function readRunLogFile(runId, cwd) {
693
+ const logDir = getRunLogDir(cwd);
694
+ const file = path.join(logDir, `${runId}.json`);
695
+ try {
696
+ return JSON.parse(await readFile(file, "utf8"));
697
+ }
698
+ catch {
699
+ return null;
700
+ }
701
+ }
702
+ export async function listRunLogs(input) {
703
+ const limit = input.limit ?? 20;
704
+ const logDir = getRunLogDir(input.cwd);
705
+ try {
706
+ const entries = await readdir(logDir);
707
+ const candidates = await Promise.all(entries
708
+ .filter((name) => name.endsWith(".json"))
709
+ .map(async (name) => {
710
+ const file = path.join(logDir, name);
711
+ try {
712
+ const stats = await stat(file);
713
+ return { file, runId: name.replace(/\.json$/, ""), mtimeMs: stats.mtimeMs, updatedAt: new Date(stats.mtimeMs).toISOString() };
714
+ }
715
+ catch {
716
+ return null;
717
+ }
718
+ }));
719
+ const summaries = [];
720
+ for (const candidate of candidates
721
+ .filter((item) => item !== null)
722
+ .sort((a, b) => b.mtimeMs - a.mtimeMs)) {
723
+ try {
724
+ const raw = JSON.parse(await readFile(candidate.file, "utf8"));
725
+ const summary = summarizeRunLog(candidate.runId, raw, candidate.updatedAt);
726
+ if (summary.cwd && summary.cwd !== input.cwd)
727
+ continue;
728
+ if (input.type && summary.type !== input.type)
729
+ continue;
730
+ if (input.status && summary.status !== input.status)
731
+ continue;
732
+ if (input.worktree_name && summary.worktree_name !== input.worktree_name)
733
+ continue;
734
+ summaries.push(summary);
735
+ if (summaries.length >= limit)
736
+ break;
737
+ }
738
+ catch {
739
+ continue;
740
+ }
741
+ }
742
+ return { entries: summaries, total_entries: summaries.length };
743
+ }
744
+ catch {
745
+ return { entries: [], total_entries: 0 };
746
+ }
747
+ }
748
+ export async function getRecentRunsSummary(cwd, limit = 5) {
749
+ const runs = await listRunLogs({ cwd, limit });
750
+ return summarizeRecentRuns(runs.entries);
751
+ }
752
+ export async function getRunLogById(input) {
753
+ const raw = await readRunLogFile(input.run_id, input.cwd);
754
+ if (!raw)
755
+ return null;
756
+ const summary = summarizeRunLog(input.run_id, raw);
757
+ if (summary.cwd && summary.cwd !== input.cwd)
758
+ return null;
759
+ return {
760
+ entry: summary,
761
+ raw: raw,
762
+ related_runs: {
763
+ apply_run_id: typeof raw.downstream?.last_apply_run_id === "string" ? raw.downstream.last_apply_run_id : undefined,
764
+ cleanup_run_id: typeof raw.downstream?.last_cleanup_run_id === "string" ? raw.downstream.last_cleanup_run_id : undefined,
765
+ },
766
+ };
767
+ }
768
+ function buildRunResultPayload(raw) {
769
+ const payload = {
770
+ type: typeof raw.type === "string" ? raw.type : "unknown",
771
+ };
772
+ if (raw.report && typeof raw.report === "object")
773
+ payload.report = raw.report;
774
+ if (typeof raw.error === "string" && raw.error.length > 0)
775
+ payload.error = raw.error;
776
+ if (typeof raw.preview === "boolean")
777
+ payload.preview = raw.preview;
778
+ if (Array.isArray(raw.applied_files))
779
+ payload.applied_files = raw.applied_files;
780
+ if (typeof raw.removed_count === "number")
781
+ payload.removed_count = raw.removed_count;
782
+ if (typeof raw.failed_count === "number")
783
+ payload.failed_count = raw.failed_count;
784
+ if (raw.observed && typeof raw.observed === "object")
785
+ payload.observed = raw.observed;
786
+ if (raw.downstream && typeof raw.downstream === "object")
787
+ payload.downstream = raw.downstream;
788
+ return payload;
789
+ }
790
+ function buildResultSummaryFromRun(entry) {
791
+ if (entry.summary)
792
+ return entry.summary;
793
+ if (entry.error)
794
+ return `${entry.type} failed: ${entry.error}`;
795
+ return `${entry.type} ${entry.lifecycle}`;
796
+ }
797
+ async function resolveWorkflowSessionSummary(input) {
798
+ const store = await getStore(input.cwd);
799
+ const repoKey = await computeRepoKey(input.cwd);
800
+ const run = input.run;
801
+ if (run?.returned_session_id) {
802
+ const stored = store.getById(run.returned_session_id);
803
+ if (stored) {
804
+ return {
805
+ ...toWorkflowSessionSummaryFromStore(stored),
806
+ requested_session_id: run.requested_session_id,
807
+ returned_session_id: run.returned_session_id,
808
+ resumed: !!run.requested_session_id,
809
+ source: "run",
810
+ };
811
+ }
812
+ const type = normalizeSessionType(run.type);
813
+ if (type) {
814
+ return {
815
+ session_id: run.returned_session_id,
816
+ type,
817
+ requested_session_id: run.requested_session_id,
818
+ returned_session_id: run.returned_session_id,
819
+ resumed: !!run.requested_session_id,
820
+ source: "run",
821
+ };
822
+ }
823
+ }
824
+ const type = normalizeSessionType(run?.type);
825
+ if (!type)
826
+ return undefined;
827
+ const recent = store.listByRepo(repoKey, 20).find((session) => session.type === type && !session.expired);
828
+ return recent ? toWorkflowSessionSummaryFromStore(recent) : undefined;
829
+ }
830
+ function buildNextActions(input) {
831
+ const actions = [];
832
+ const run = input.run;
833
+ const job = input.job;
834
+ const type = run?.type ?? job?.type;
835
+ const worktreePath = run?.worktree_path;
836
+ if (job && (job.status === "queued" || job.status === "running")) {
837
+ return [
838
+ {
839
+ tool: "claude_job_wait",
840
+ reason: "This job is still active. Continue polling this job_id and do not start another job for the same task.",
841
+ args: { cwd: input.cwd, job_id: job.job_id },
842
+ },
843
+ ];
844
+ }
845
+ if (type === "implement") {
846
+ if (worktreePath) {
847
+ actions.push({
848
+ tool: "claude_apply",
849
+ reason: "Preview the delegated worktree diff before modifying the main workspace. After preview, ask the user for explicit approval before any non-preview apply.",
850
+ args: { cwd: input.cwd, worktree_path: worktreePath, preview: true },
851
+ });
852
+ }
853
+ if (input.session?.session_id) {
854
+ actions.push({
855
+ tool: "claude_implement",
856
+ reason: "This implementation has a resumable Claude session.",
857
+ args: { cwd: input.cwd, resume_latest: true },
858
+ });
859
+ }
860
+ }
861
+ if (type === "review") {
862
+ actions.push({
863
+ tool: "claude_review",
864
+ reason: "Run another review pass or adjust review instructions if follow-up validation is needed.",
865
+ args: { cwd: input.cwd },
866
+ });
867
+ }
868
+ if (run?.lifecycle === "apply_blocked") {
869
+ actions.push({
870
+ tool: "claude_run_inspect",
871
+ reason: "The apply step was blocked and usually needs a closer look at the underlying run details.",
872
+ args: { cwd: input.cwd, run_id: run.run_id },
873
+ });
874
+ }
875
+ if (run?.status === "needs_user") {
876
+ actions.push({
877
+ tool: "claude_run_inspect",
878
+ reason: "Claude stopped for user input; inspect the run before deciding whether to resume, apply, or discard it.",
879
+ args: { cwd: input.cwd, run_id: run.run_id },
880
+ });
881
+ if (worktreePath) {
882
+ actions.push({
883
+ tool: "claude_cleanup",
884
+ reason: "If the needs_user worktree is not useful, clean delegated worktrees after inspection.",
885
+ args: { cwd: input.cwd, dry_run: true },
886
+ });
887
+ }
888
+ actions.push({
889
+ tool: "claude_implement",
890
+ reason: "If the Claude session cannot be resumed, start a fresh implementation with the same task.",
891
+ args: { cwd: input.cwd },
892
+ });
893
+ }
894
+ if (input.related_runs?.cleanup_run_id) {
895
+ actions.push({
896
+ tool: "claude_run_inspect",
897
+ reason: "A related cleanup run exists for this workflow.",
898
+ args: { cwd: input.cwd, run_id: input.related_runs.cleanup_run_id },
899
+ });
900
+ }
901
+ return actions;
902
+ }
903
+ function compareRecency(a, b) {
904
+ const aTime = a ? Date.parse(a) : 0;
905
+ const bTime = b ? Date.parse(b) : 0;
906
+ return aTime - bTime;
907
+ }
908
+ async function listDelegatedWorktrees(cwd) {
909
+ const worktreeDir = path.join(cwd, ".claude", "worktrees");
910
+ const cutoff = Date.now() - 24 * 60 * 60 * 1000;
911
+ try {
912
+ const entries = await readdir(worktreeDir, { withFileTypes: true });
913
+ const summarized = await Promise.all(entries
914
+ .filter((entry) => entry.isDirectory() && entry.name.startsWith("codex-delegated-"))
915
+ .map(async (entry) => {
916
+ const worktreePath = path.join(worktreeDir, entry.name);
917
+ try {
918
+ const details = await stat(worktreePath);
919
+ return {
920
+ worktree_name: entry.name,
921
+ worktree_path: worktreePath,
922
+ updated_at: new Date(details.mtimeMs).toISOString(),
923
+ stale: details.mtimeMs < cutoff,
924
+ };
925
+ }
926
+ catch {
927
+ return {
928
+ worktree_name: entry.name,
929
+ worktree_path: worktreePath,
930
+ stale: false,
931
+ };
932
+ }
933
+ }));
934
+ return summarized.sort((a, b) => compareRecency(b.updated_at, a.updated_at));
935
+ }
936
+ catch {
937
+ return [];
938
+ }
939
+ }
940
+ export async function getClaudeResult(input) {
941
+ const prefer = input.prefer ?? "latest-job";
942
+ let resolvedJob;
943
+ let resolvedRun = null;
944
+ let resultPayload;
945
+ if (input.job_id) {
946
+ const jobResult = await getBackgroundJobResult({ cwd: input.cwd, job_id: input.job_id });
947
+ if (!jobResult) {
948
+ throw new Error(`Job not found: ${input.job_id}`);
949
+ }
950
+ resolvedJob = jobResult.job;
951
+ resultPayload = jobResult.result;
952
+ if (jobResult.job.run_id) {
953
+ resolvedRun = await getRunLogById({ cwd: input.cwd, run_id: jobResult.job.run_id });
954
+ }
955
+ }
956
+ else if (input.run_id) {
957
+ resolvedRun = await getRunLogById({ cwd: input.cwd, run_id: input.run_id });
958
+ if (!resolvedRun) {
959
+ throw new Error(`Run not found: ${input.run_id}`);
960
+ }
961
+ resultPayload = buildRunResultPayload((await readRunLogFile(input.run_id, input.cwd)) ?? {});
962
+ }
963
+ else {
964
+ const runTypeFilter = prefer === "latest-implement" ? "implement" : prefer === "latest-review" ? "review" : undefined;
965
+ const jobTypeFilter = runTypeFilter;
966
+ const terminalJobLists = await Promise.all([
967
+ listBackgroundJobs({ cwd: input.cwd, limit: 20, status: "succeeded", type: jobTypeFilter }),
968
+ listBackgroundJobs({ cwd: input.cwd, limit: 20, status: "failed", type: jobTypeFilter }),
969
+ listBackgroundJobs({ cwd: input.cwd, limit: 20, status: "cancelled", type: jobTypeFilter }),
970
+ ]);
971
+ const latestJob = terminalJobLists
972
+ .flatMap((result) => result.entries)
973
+ .sort((a, b) => compareRecency(b.updated_at, a.updated_at))[0];
974
+ const latestRun = (await listRunLogs({ cwd: input.cwd, limit: 20, type: runTypeFilter })).entries
975
+ .find((entry) => entry.status !== "unknown" || entry.lifecycle !== "unknown");
976
+ if (prefer === "latest-run" || (!latestJob && latestRun)) {
977
+ if (latestRun) {
978
+ resolvedRun = await getRunLogById({ cwd: input.cwd, run_id: latestRun.run_id });
979
+ resultPayload = buildRunResultPayload((await readRunLogFile(latestRun.run_id, input.cwd)) ?? {});
980
+ }
981
+ }
982
+ else if (prefer === "latest-job" || !latestRun) {
983
+ if (latestJob) {
984
+ const jobResult = await getBackgroundJobResult({ cwd: input.cwd, job_id: latestJob.job_id });
985
+ resolvedJob = latestJob;
986
+ resultPayload = jobResult?.result;
987
+ if (latestJob.run_id) {
988
+ resolvedRun = await getRunLogById({ cwd: input.cwd, run_id: latestJob.run_id });
989
+ }
990
+ }
991
+ }
992
+ else if (latestJob && latestRun) {
993
+ if (compareRecency(latestJob.updated_at, latestRun.updated_at) >= 0) {
994
+ const jobResult = await getBackgroundJobResult({ cwd: input.cwd, job_id: latestJob.job_id });
995
+ resolvedJob = latestJob;
996
+ resultPayload = jobResult?.result;
997
+ if (latestJob.run_id) {
998
+ resolvedRun = await getRunLogById({ cwd: input.cwd, run_id: latestJob.run_id });
999
+ }
1000
+ }
1001
+ else {
1002
+ resolvedRun = await getRunLogById({ cwd: input.cwd, run_id: latestRun.run_id });
1003
+ resultPayload = buildRunResultPayload((await readRunLogFile(latestRun.run_id, input.cwd)) ?? {});
1004
+ }
1005
+ }
1006
+ }
1007
+ if (!resolvedJob && !resolvedRun) {
1008
+ throw new Error("No matching finished job or run found for this workspace.");
1009
+ }
1010
+ const runEntry = resolvedRun?.entry;
1011
+ const session = await resolveWorkflowSessionSummary({ cwd: input.cwd, run: runEntry });
1012
+ const summary = resolvedJob?.summary ?? (runEntry ? buildResultSummaryFromRun(runEntry) : "Background job resolved");
1013
+ const jobIsActive = resolvedJob?.status === "queued" || resolvedJob?.status === "running";
1014
+ return {
1015
+ source_type: resolvedJob ? "job" : "run",
1016
+ summary,
1017
+ job: resolvedJob,
1018
+ run: runEntry,
1019
+ session,
1020
+ result: resultPayload,
1021
+ related_runs: resolvedRun?.related_runs,
1022
+ do_not_start_duplicate_job: jobIsActive ? true : undefined,
1023
+ next_actions: buildNextActions({
1024
+ cwd: input.cwd,
1025
+ job: resolvedJob,
1026
+ run: runEntry,
1027
+ related_runs: resolvedRun?.related_runs,
1028
+ session,
1029
+ }),
1030
+ };
1031
+ }
1032
+ export async function getWorkspaceStatus(input) {
1033
+ const limit = input.limit ?? 10;
1034
+ const [runningJobs, queuedJobs, succeededJobs, failedJobs, cancelledJobs, recentRuns, worktrees] = await Promise.all([
1035
+ listBackgroundJobs({ cwd: input.cwd, limit, status: "running" }),
1036
+ listBackgroundJobs({ cwd: input.cwd, limit, status: "queued" }),
1037
+ input.include_terminal ? listBackgroundJobs({ cwd: input.cwd, limit, status: "succeeded" }) : Promise.resolve({ entries: [] }),
1038
+ input.include_terminal ? listBackgroundJobs({ cwd: input.cwd, limit, status: "failed" }) : Promise.resolve({ entries: [] }),
1039
+ input.include_terminal ? listBackgroundJobs({ cwd: input.cwd, limit, status: "cancelled" }) : Promise.resolve({ entries: [] }),
1040
+ listRunLogs({ cwd: input.cwd, limit }),
1041
+ listDelegatedWorktrees(input.cwd),
1042
+ ]);
1043
+ const terminalJobs = [...succeededJobs.entries, ...failedJobs.entries, ...cancelledJobs.entries]
1044
+ .sort((a, b) => compareRecency(b.updated_at, a.updated_at))
1045
+ .slice(0, limit);
1046
+ const referencedWorktreeNames = new Set();
1047
+ for (const run of recentRuns.entries) {
1048
+ if (run.worktree_name)
1049
+ referencedWorktreeNames.add(run.worktree_name);
1050
+ }
1051
+ for (const job of [...runningJobs.entries, ...queuedJobs.entries, ...terminalJobs]) {
1052
+ if (job.worktree_name)
1053
+ referencedWorktreeNames.add(job.worktree_name);
1054
+ }
1055
+ const summarizedWorktrees = worktrees.map((worktree) => ({
1056
+ ...worktree,
1057
+ orphaned: !referencedWorktreeNames.has(worktree.worktree_name),
1058
+ }));
1059
+ const store = await getStore(input.cwd);
1060
+ const repoKey = await computeRepoKey(input.cwd);
1061
+ const latestSessions = store.listByRepo(repoKey, limit)
1062
+ .filter((session) => !session.expired)
1063
+ .map(toWorkflowSessionSummaryFromStore);
1064
+ const attentionItems = [];
1065
+ const queuedCutoff = Date.now() - 10 * 60 * 1000;
1066
+ for (const job of queuedJobs.entries) {
1067
+ const createdAt = Date.parse(job.created_at);
1068
+ if (Number.isFinite(createdAt) && createdAt <= queuedCutoff) {
1069
+ attentionItems.push({
1070
+ kind: "queued_job",
1071
+ severity: "warning",
1072
+ message: `Queued job ${job.job_id} has been waiting for more than 10 minutes.`,
1073
+ });
1074
+ }
1075
+ }
1076
+ for (const run of recentRuns.entries) {
1077
+ if (run.lifecycle === "apply_blocked") {
1078
+ attentionItems.push({
1079
+ kind: "apply_blocked",
1080
+ severity: "warning",
1081
+ message: `Run ${run.run_id} is apply_blocked and may need manual inspection before changes can land.`,
1082
+ });
1083
+ }
1084
+ }
1085
+ for (const worktree of summarizedWorktrees) {
1086
+ if (worktree.stale) {
1087
+ attentionItems.push({
1088
+ kind: "stale_worktree",
1089
+ severity: "info",
1090
+ message: `Delegated worktree ${worktree.worktree_name} looks stale and may be ready for cleanup.`,
1091
+ });
1092
+ }
1093
+ if (worktree.orphaned) {
1094
+ attentionItems.push({
1095
+ kind: "orphan_worktree",
1096
+ severity: "info",
1097
+ message: `Delegated worktree ${worktree.worktree_name} is not referenced by recent runs or jobs.`,
1098
+ });
1099
+ }
1100
+ }
1101
+ const activeJobs = [...runningJobs.entries, ...queuedJobs.entries]
1102
+ .sort((a, b) => compareRecency(b.updated_at, a.updated_at));
1103
+ const workspaceNextActions = activeJobs.slice(0, limit).map((job) => ({
1104
+ tool: "claude_job_wait",
1105
+ reason: "Workspace has an active delegated job. Poll this job_id instead of starting a duplicate task.",
1106
+ args: { cwd: input.cwd, job_id: job.job_id },
1107
+ }));
1108
+ return {
1109
+ workspace_root: input.cwd,
1110
+ running_jobs: runningJobs.entries,
1111
+ queued_jobs: queuedJobs.entries,
1112
+ recent_terminal_jobs: terminalJobs,
1113
+ recent_runs: recentRuns.entries,
1114
+ latest_sessions: latestSessions,
1115
+ delegated_worktrees: summarizedWorktrees,
1116
+ counts: {
1117
+ running_jobs: runningJobs.entries.length,
1118
+ queued_jobs: queuedJobs.entries.length,
1119
+ terminal_jobs: terminalJobs.length,
1120
+ recent_runs: recentRuns.entries.length,
1121
+ delegated_worktrees: summarizedWorktrees.length,
1122
+ stale_worktrees: summarizedWorktrees.filter((worktree) => worktree.stale).length,
1123
+ orphan_worktrees: summarizedWorktrees.filter((worktree) => worktree.orphaned).length,
1124
+ apply_blocked_runs: recentRuns.entries.filter((run) => run.lifecycle === "apply_blocked").length,
1125
+ },
1126
+ do_not_start_duplicate_job: activeJobs.length > 0 ? true : undefined,
1127
+ next_actions: workspaceNextActions.length > 0 ? workspaceNextActions : undefined,
1128
+ attention_items: attentionItems,
1129
+ };
1130
+ }
1131
+ export async function runClaudeSetup(input) {
1132
+ const hookManifestPath = getHookManifestPath();
1133
+ const hookScriptPath = getHookScriptPath();
1134
+ const hookInstalled = existsSync(hookManifestPath) && existsSync(hookScriptPath);
1135
+ const gateState = await readReviewGateState(input.cwd);
1136
+ const status = await checkClaudeStatus(input.cwd);
1137
+ const reviewGate = buildReviewGateState(input.cwd, gateState, hookInstalled);
1138
+ const authStatus = status.auth_status === "authenticated"
1139
+ ? "ok"
1140
+ : status.auth_status === "not authenticated" || status.auth_status === "unauthenticated or unknown"
1141
+ ? "missing"
1142
+ : "unknown";
1143
+ return {
1144
+ workspace_root: input.cwd,
1145
+ review_gate: reviewGate,
1146
+ claude_available: status.claude_available,
1147
+ claude_version: status.claude_version,
1148
+ auth_status: authStatus,
1149
+ git_available: status.git_available,
1150
+ worktree_capable: status.worktree_capable,
1151
+ cwd_valid: status.cwd_valid,
1152
+ cwd_is_git_repo: status.cwd_is_git_repo,
1153
+ errors: status.errors,
1154
+ next_steps: [
1155
+ ...(status.claude_available && status.git_available && status.cwd_valid
1156
+ ? []
1157
+ : ["Run claude_status and fix Claude CLI, git, or workspace readiness issues before using the review gate."]),
1158
+ ...getReviewGateNextSteps(reviewGate.enabled, hookInstalled, reviewGate.pending_review),
1159
+ ],
1160
+ };
1161
+ }
1162
+ export async function manageClaudeReviewGate(input) {
1163
+ const hookManifestPath = getHookManifestPath();
1164
+ const hookScriptPath = getHookScriptPath();
1165
+ const hookInstallable = existsSync(hookScriptPath);
1166
+ const current = await readReviewGateState(input.cwd);
1167
+ const action = input.action ?? "status";
1168
+ const hookInstalled = existsSync(hookManifestPath) && hookInstallable;
1169
+ if (action === "status") {
1170
+ const reviewGate = buildReviewGateState(input.cwd, current, hookInstalled);
1171
+ return {
1172
+ ...reviewGate,
1173
+ action,
1174
+ changed: false,
1175
+ summary: reviewGate.enabled
1176
+ ? (reviewGate.pending_review ? "Review gate is enabled and a review is pending." : "Review gate is enabled for this workspace.")
1177
+ : "Review gate is disabled for this workspace.",
1178
+ next_steps: getReviewGateNextSteps(reviewGate.enabled, hookInstalled, reviewGate.pending_review),
1179
+ };
1180
+ }
1181
+ if (!hookInstallable) {
1182
+ throw new Error(`Review gate hook script is missing: ${hookScriptPath}`);
1183
+ }
1184
+ if (action === "enable") {
1185
+ await ensureReviewGateHookManifest();
1186
+ }
1187
+ const nextState = await writeReviewGateState(input.cwd, action === "enable");
1188
+ const reviewGate = buildReviewGateState(input.cwd, nextState, existsSync(hookManifestPath) && hookInstallable);
1189
+ return {
1190
+ ...reviewGate,
1191
+ action,
1192
+ changed: current?.enabled !== nextState.enabled || !current,
1193
+ summary: nextState.enabled
1194
+ ? "Review gate enabled for this workspace and stop-hook manifest is ready."
1195
+ : "Review gate disabled for this workspace. Hook asset is left installed but locally inactive.",
1196
+ next_steps: getReviewGateNextSteps(nextState.enabled, existsSync(hookManifestPath) && hookInstallable, reviewGate.pending_review),
1197
+ };
1198
+ }
1199
+ function inferClaudeTaskMode(input) {
1200
+ if (input.mode && input.mode !== "auto") {
1201
+ return input.mode;
1202
+ }
1203
+ const text = input.task.toLowerCase();
1204
+ if (typeof input.diff === "string" && input.diff.trim().length > 0) {
1205
+ return "review";
1206
+ }
1207
+ const writeHints = /\b(fix|change|implement|write|edit|modify|update|refactor|patch|add|create)\b/;
1208
+ const reviewHints = /\b(review|audit|inspect|check|find bugs|look for issues|critique)\b/;
1209
+ const readHints = /\b(explain|analyze|analyse|why|how|what|summarize|describe|read-only|understand)\b/;
1210
+ if ((input.constraints?.length ?? 0) > 0) {
1211
+ return "write";
1212
+ }
1213
+ if (writeHints.test(text)) {
1214
+ return "write";
1215
+ }
1216
+ if (reviewHints.test(text)) {
1217
+ return "review";
1218
+ }
1219
+ if (readHints.test(text)) {
1220
+ return "read";
1221
+ }
1222
+ if ((input.files?.length ?? 0) > 0) {
1223
+ return "review";
1224
+ }
1225
+ return "read";
1226
+ }
1227
+ function summarizeTaskDispatch(mode, background) {
1228
+ if (background) {
1229
+ return `Delegated ${mode} task as a background job.`;
1230
+ }
1231
+ return `Delegated ${mode} task and returned the current result.`;
1232
+ }
1233
+ const CLAUDE_TASK_FILES_DEPRECATED_WARNING = "claude_task.files is deprecated and treated as instruction_files, not apply scope. Use advanced claude_implement allowed_files/scope options for strict file modification limits.";
1234
+ function resolveTaskInstructionFiles(input) {
1235
+ const merged = [...(input.instruction_files ?? []), ...(input.files ?? [])]
1236
+ .map((file) => file.trim())
1237
+ .filter((file) => file.length > 0);
1238
+ const instructionFiles = [...new Set(merged)].sort((a, b) => a.localeCompare(b));
1239
+ return {
1240
+ instructionFiles: instructionFiles.length > 0 ? instructionFiles : undefined,
1241
+ warnings: input.files?.length ? [CLAUDE_TASK_FILES_DEPRECATED_WARNING] : [],
1242
+ };
1243
+ }
1244
+ export async function runClaudeTask(input, _runId) {
1245
+ const delegatedMode = inferClaudeTaskMode(input);
1246
+ const { instructionFiles, warnings } = resolveTaskInstructionFiles(input);
1247
+ if (delegatedMode === "read") {
1248
+ const queued = await startBackgroundQuery({
1249
+ cwd: input.cwd,
1250
+ task: input.task,
1251
+ instruction_files: instructionFiles,
1252
+ timeout_sec: input.timeout_sec,
1253
+ });
1254
+ return {
1255
+ delegated_mode: delegatedMode,
1256
+ summary: queued.message ?? summarizeTaskDispatch(delegatedMode, true),
1257
+ job: queued.job,
1258
+ deduped: queued.deduped,
1259
+ do_not_start_duplicate_job: queued.do_not_start_duplicate_job,
1260
+ warnings,
1261
+ next_actions: queued.next_actions ?? buildNextActions({ cwd: input.cwd, job: queued.job }),
1262
+ };
1263
+ }
1264
+ if (delegatedMode === "review") {
1265
+ const queued = await startBackgroundReview({
1266
+ cwd: input.cwd,
1267
+ task: input.task,
1268
+ diff: input.diff,
1269
+ instruction_files: instructionFiles,
1270
+ timeout_sec: input.timeout_sec,
1271
+ });
1272
+ return {
1273
+ delegated_mode: delegatedMode,
1274
+ summary: queued.message ?? summarizeTaskDispatch(delegatedMode, true),
1275
+ job: queued.job,
1276
+ deduped: queued.deduped,
1277
+ do_not_start_duplicate_job: queued.do_not_start_duplicate_job,
1278
+ warnings,
1279
+ next_actions: queued.next_actions ?? buildNextActions({ cwd: input.cwd, job: queued.job }),
1280
+ };
1281
+ }
1282
+ const queued = await startBackgroundImplement({
1283
+ cwd: input.cwd,
1284
+ task: input.task,
1285
+ instruction_files: instructionFiles,
1286
+ constraints: input.constraints,
1287
+ timeout_sec: input.timeout_sec,
1288
+ resume_latest: input.resume_latest,
1289
+ dirty_policy: input.dirty_policy,
1290
+ });
1291
+ if (!("job" in queued)) {
1292
+ return {
1293
+ delegated_mode: delegatedMode,
1294
+ summary: "Write task needs a dirty-workspace decision before it can be delegated.",
1295
+ result: queued,
1296
+ warnings,
1297
+ next_actions: [],
1298
+ };
1299
+ }
1300
+ return {
1301
+ delegated_mode: delegatedMode,
1302
+ summary: queued.message ?? summarizeTaskDispatch(delegatedMode, true),
1303
+ job: queued.job,
1304
+ deduped: queued.deduped,
1305
+ do_not_start_duplicate_job: queued.do_not_start_duplicate_job,
1306
+ warnings,
1307
+ next_actions: queued.next_actions ?? buildNextActions({ cwd: input.cwd, job: queued.job }),
1308
+ };
1309
+ }
1310
+ function buildBackgroundJobResponse(input) {
1311
+ const deduped = input.deduped === true;
1312
+ return {
1313
+ job: input.job,
1314
+ deduped,
1315
+ do_not_start_duplicate_job: deduped || input.job.status === "queued" || input.job.status === "running",
1316
+ message: deduped ? buildDuplicateJobMessage(input.job) : undefined,
1317
+ next_actions: buildNextActions({ cwd: input.cwd, job: input.job }),
1318
+ };
1319
+ }
1320
+ export async function enqueueBackgroundJob(input) {
1321
+ const stateDir = getBackgroundStateDir();
1322
+ const jobStore = new JobStore(stateDir);
1323
+ await jobStore.init();
1324
+ const fingerprint = input.dedupe === true
1325
+ ? createTaskFingerprint({ cwd: input.cwd, type: input.type, payload: input.payload })
1326
+ : undefined;
1327
+ if (fingerprint) {
1328
+ const existing = await jobStore.findActiveByFingerprint({
1329
+ cwd: input.cwd,
1330
+ type: input.type,
1331
+ fingerprint,
1332
+ });
1333
+ if (existing) {
1334
+ return buildBackgroundJobResponse({
1335
+ cwd: input.cwd,
1336
+ job: toJobSummary(existing),
1337
+ deduped: true,
1338
+ });
1339
+ }
1340
+ }
1341
+ const now = new Date().toISOString();
1342
+ const jobId = `job-${randomUUID()}`;
1343
+ const record = {
1344
+ job_id: jobId,
1345
+ type: input.type,
1346
+ status: "queued",
1347
+ cwd: input.cwd,
1348
+ created_at: now,
1349
+ updated_at: now,
1350
+ fingerprint,
1351
+ payload: input.payload,
1352
+ };
1353
+ await jobStore.create(record);
1354
+ const child = spawn(process.execPath, getJobRunnerArgs(jobId), {
1355
+ cwd: input.cwd,
1356
+ detached: true,
1357
+ stdio: "ignore",
1358
+ env: {
1359
+ ...sanitizeEnv(),
1360
+ [JOB_STATE_DIR_ENV]: getBackgroundStateDir(),
1361
+ },
1362
+ });
1363
+ child.unref();
1364
+ const updated = await jobStore.update(jobId, {
1365
+ pid: child.pid ?? undefined,
1366
+ updated_at: new Date().toISOString(),
1367
+ });
1368
+ return buildBackgroundJobResponse({
1369
+ cwd: input.cwd,
1370
+ job: toJobSummary(updated ?? { ...record, pid: child.pid ?? undefined }),
1371
+ });
1372
+ }
1373
+ export async function startBackgroundReview(input) {
1374
+ const queued = await enqueueBackgroundJob({
1375
+ cwd: input.cwd,
1376
+ type: "review",
1377
+ payload: input,
1378
+ dedupe: true,
1379
+ });
1380
+ await markReviewGatePending(input.cwd, false, "review").catch(() => { });
1381
+ return queued;
1382
+ }
1383
+ export async function startBackgroundQuery(input) {
1384
+ return enqueueBackgroundJob({
1385
+ cwd: input.cwd,
1386
+ type: "query",
1387
+ payload: input,
1388
+ dedupe: true,
1389
+ });
1390
+ }
1391
+ export async function startBackgroundImplement(input) {
1392
+ if ((input.dirty_policy ?? "ask") === "ask") {
1393
+ const { requestedFiles, dirtyFiles } = await preflightImplementDirtyState(input);
1394
+ if (dirtyFiles.length > 0) {
1395
+ return dirtyNeedsUserResult(input, dirtyFiles, requestedFiles);
1396
+ }
1397
+ }
1398
+ const queued = await enqueueBackgroundJob({
1399
+ cwd: input.cwd,
1400
+ type: "implement",
1401
+ payload: input,
1402
+ dedupe: true,
1403
+ });
1404
+ await markReviewGatePending(input.cwd, true, "write").catch(() => { });
1405
+ return queued;
1406
+ }
1407
+ export async function startBackgroundApply(input) {
1408
+ const queued = await enqueueBackgroundJob({
1409
+ cwd: input.cwd,
1410
+ type: "apply",
1411
+ payload: input,
1412
+ });
1413
+ await markReviewGatePending(input.cwd, true, "write").catch(() => { });
1414
+ return queued;
1415
+ }
1416
+ export async function startBackgroundCleanup(input) {
1417
+ return enqueueBackgroundJob({
1418
+ cwd: input.cwd,
1419
+ type: "cleanup",
1420
+ payload: input,
1421
+ });
1422
+ }
1423
+ export async function listBackgroundJobs(input) {
1424
+ const jobStore = await getJobStore();
1425
+ const entries = await jobStore.list({
1426
+ cwd: input.cwd,
1427
+ limit: input.limit ?? 20,
1428
+ status: input.status,
1429
+ type: input.type,
1430
+ });
1431
+ return { entries: entries.map(toJobSummary) };
1432
+ }
1433
+ export async function getBackgroundJobResult(input) {
1434
+ const jobStore = await getJobStore();
1435
+ const job = await jobStore.get(input.job_id);
1436
+ if (!job || job.cwd !== input.cwd)
1437
+ return null;
1438
+ return {
1439
+ job: toJobSummary(job),
1440
+ result: job.result,
1441
+ };
1442
+ }
1443
+ export async function waitForBackgroundJob(input) {
1444
+ const jobStore = await getJobStore();
1445
+ const record = await jobStore.get(input.job_id);
1446
+ if (!record || record.cwd !== input.cwd) {
1447
+ throw new Error(`Job not found: ${input.job_id}`);
1448
+ }
1449
+ const result = {
1450
+ job: toJobSummary(record),
1451
+ result: record.result,
1452
+ };
1453
+ const terminal = result.job.status === "succeeded" || result.job.status === "failed" || result.job.status === "cancelled";
1454
+ const nowMs = Date.now();
1455
+ const ageMs = ageMsSince(result.job.created_at, nowMs) ?? 0;
1456
+ const heartbeatAgeMs = terminal
1457
+ ? undefined
1458
+ : ageMsSince(result.job.heartbeat_at ?? result.job.updated_at, nowMs);
1459
+ const staleState = classifyJobStaleState({
1460
+ job: result.job,
1461
+ heartbeatAgeMs,
1462
+ pidAlive: terminal ? undefined : isPidAlive(result.job.pid),
1463
+ });
1464
+ const delayMs = terminal ? undefined : recommendedDelayMs({ ageMs, staleState });
1465
+ const previousDelayMs = typeof result.job.last_wait_recommended_delay_ms === "number"
1466
+ ? result.job.last_wait_recommended_delay_ms
1467
+ : delayMs;
1468
+ const lastWaitAgeMs = terminal ? undefined : ageMsSince(result.job.last_wait_at, nowMs);
1469
+ const pollTooSoon = !terminal && staleState !== "stale" && previousDelayMs !== undefined && lastWaitAgeMs !== undefined && lastWaitAgeMs < previousDelayMs;
1470
+ const remainingDelayMs = pollTooSoon && previousDelayMs !== undefined && lastWaitAgeMs !== undefined
1471
+ ? Math.max(0, previousDelayMs - lastWaitAgeMs)
1472
+ : undefined;
1473
+ const nextAllowedPollAt = !terminal && staleState !== "stale" && delayMs !== undefined
1474
+ ? new Date((result.job.last_wait_at ? Date.parse(result.job.last_wait_at) : nowMs) + (pollTooSoon ? previousDelayMs ?? delayMs : delayMs)).toISOString()
1475
+ : undefined;
1476
+ let job = result.job;
1477
+ if (!terminal && !pollTooSoon) {
1478
+ const updated = await jobStore.touchWait(input.job_id, new Date(nowMs).toISOString(), delayMs);
1479
+ if (updated) {
1480
+ job = toJobSummary(updated);
1481
+ }
1482
+ }
1483
+ return {
1484
+ job,
1485
+ result: result.result,
1486
+ status: job.status,
1487
+ summary: terminal
1488
+ ? `Job ${result.job.job_id} is ${result.job.status}; use the returned result or claude_result for follow-up.`
1489
+ : staleState === "stale"
1490
+ ? `Job ${result.job.job_id} appears stale; inspect or cancel it before starting any replacement job.`
1491
+ : pollTooSoon
1492
+ ? `Job ${result.job.job_id} was polled too soon. Do not call claude_job_wait again before ${nextAllowedPollAt}; wait ${remainingDelayMs}ms and poll the same job_id.`
1493
+ : `Job ${result.job.job_id} is still ${result.job.status}; do not duplicate this task locally. Poll claude_job_wait again after the recommended delay.`,
1494
+ waiting: !terminal,
1495
+ timed_out: false,
1496
+ do_not_start_duplicate_job: !terminal && staleState !== "stale",
1497
+ poll_too_soon: pollTooSoon || undefined,
1498
+ recommended_delay_ms: delayMs,
1499
+ remaining_delay_ms: remainingDelayMs,
1500
+ next_allowed_poll_at: nextAllowedPollAt,
1501
+ age_ms: ageMs,
1502
+ heartbeat_age_ms: heartbeatAgeMs,
1503
+ stale_state: staleState,
1504
+ next_actions: terminal
1505
+ ? buildNextActions({ cwd: input.cwd, job })
1506
+ : buildActiveWaitActions({ cwd: input.cwd, job, staleState, nextAllowedPollAt }),
1507
+ };
1508
+ }
1509
+ export async function cancelBackgroundJob(input) {
1510
+ const jobStore = await getJobStore();
1511
+ const job = await jobStore.get(input.job_id);
1512
+ if (!job || job.cwd !== input.cwd) {
1513
+ return { cancelled: false, error: `Job not found: ${input.job_id}` };
1514
+ }
1515
+ if (job.status === "cancelled" || job.status === "failed" || job.status === "succeeded") {
1516
+ return { cancelled: false, job: toJobSummary(job), error: `Job is already ${job.status}` };
1517
+ }
1518
+ if (job.status === "running" && job.pid) {
1519
+ try {
1520
+ process.kill(job.pid, "SIGTERM");
1521
+ }
1522
+ catch (err) {
1523
+ return {
1524
+ cancelled: false,
1525
+ job: toJobSummary(job),
1526
+ error: err instanceof Error ? err.message : String(err),
1527
+ };
1528
+ }
1529
+ }
1530
+ const updated = await jobStore.update(job.job_id, {
1531
+ status: "cancelled",
1532
+ updated_at: new Date().toISOString(),
1533
+ summary: job.summary ?? "Cancelled by user",
1534
+ error: undefined,
1535
+ });
1536
+ return { cancelled: true, job: updated ? toJobSummary(updated) : undefined };
1537
+ }
1538
+ export async function cleanupBackgroundJobs(input) {
1539
+ const jobStore = await getJobStore();
1540
+ return jobStore.cleanup({
1541
+ cwd: input.cwd,
1542
+ older_than_hours: input.older_than_hours ?? 24,
1543
+ dry_run: input.dry_run ?? true,
1544
+ limit: input.limit ?? 20,
1545
+ });
1546
+ }
1547
+ export async function resolveLatestImplementSession(input) {
1548
+ const runs = await listRunLogs({ cwd: input.cwd, type: "implement", limit: 50 });
1549
+ for (const run of runs.entries) {
1550
+ const raw = await readRunLogFile(run.run_id, input.cwd);
1551
+ const sessionId = raw && typeof raw.session?.returned_session_id === "string"
1552
+ ? raw.session.returned_session_id
1553
+ : null;
1554
+ if (sessionId) {
1555
+ return { run_id: run.run_id, session_id: sessionId };
1556
+ }
1557
+ }
1558
+ return null;
1559
+ }
1560
+ async function ensureImplementWorkspaceScaffold(worktreePath) {
1561
+ await Promise.all([
1562
+ mkdir(path.join(worktreePath, "src"), { recursive: true }),
1563
+ mkdir(path.join(worktreePath, "tests"), { recursive: true }),
1564
+ mkdir(path.join(worktreePath, ".github", "workflows"), { recursive: true }),
1565
+ ]);
1566
+ }
1567
+ async function findDirtyFiles(cwd, requestedFiles) {
1568
+ if (requestedFiles.length === 0)
1569
+ return [];
1570
+ const output = await execCapture("git", ["status", "--porcelain=v1", "-z", "--", ...requestedFiles], { cwd }).catch(() => "");
1571
+ const dirty = new Set();
1572
+ for (const entry of parseStatusPorcelainZ(output)) {
1573
+ if (entry.file)
1574
+ dirty.add(entry.file);
1575
+ }
1576
+ return [...dirty].sort();
1577
+ }
1578
+ function isIgnoredMainWorkspaceDirtyFile(file) {
1579
+ return file === ".claude" ||
1580
+ file.startsWith(".claude/") ||
1581
+ file === ".codex-claude-delegate" ||
1582
+ file.startsWith(".codex-claude-delegate/");
1583
+ }
1584
+ async function findDirtyMainWorkspaceFiles(cwd) {
1585
+ const output = await execCapture("git", ["status", "--porcelain=v1", "-z"], { cwd }).catch(() => "");
1586
+ const dirty = new Set();
1587
+ for (const entry of parseStatusPorcelainZ(output)) {
1588
+ const file = normalizeRepoPath(cwd, entry.file);
1589
+ if (!file || isIgnoredMainWorkspaceDirtyFile(file))
1590
+ continue;
1591
+ dirty.add(file);
1592
+ }
1593
+ return [...dirty].sort();
1594
+ }
1595
+ async function listDirtyMainWorkspaceEntries(cwd) {
1596
+ const output = await execCapture("git", ["status", "--porcelain=v1", "-z"], { cwd }).catch(() => "");
1597
+ return parseStatusPorcelainZ(output)
1598
+ .map((entry) => ({ ...entry, file: normalizeRepoPath(cwd, entry.file) }))
1599
+ .filter((entry) => entry.file && !isIgnoredMainWorkspaceDirtyFile(entry.file))
1600
+ .sort((a, b) => a.file.localeCompare(b.file));
1601
+ }
1602
+ async function findDirtyImplementFiles(cwd, requestedFiles) {
1603
+ return requestedFiles.length > 0
1604
+ ? findDirtyFiles(cwd, requestedFiles)
1605
+ : findDirtyMainWorkspaceFiles(cwd);
1606
+ }
1607
+ function formatDirtyImplementMessage(dirtyFiles, requestedFiles) {
1608
+ return requestedFiles.length > 0
1609
+ ? `Requested files contain uncommitted changes in main workspace: ${dirtyFiles.join(", ")}. Choose dirty_policy=\"snapshot\" to include current uncommitted changes, dirty_policy=\"committed\" to use HEAD only, or commit/stash/clean them before retrying.`
1610
+ : `Main workspace contains uncommitted changes: ${dirtyFiles.join(", ")}. Choose dirty_policy=\"snapshot\" to include current uncommitted changes, dirty_policy=\"committed\" to use HEAD only, or commit/stash/clean them before retrying.`;
1611
+ }
1612
+ function dirtyNeedsUserResult(input, dirtyFiles, requestedFiles, startTime = Date.now()) {
1613
+ const summary = requestedFiles.length > 0
1614
+ ? `Requested files have uncommitted changes: ${dirtyFiles.join(", ")}.`
1615
+ : `Main workspace has uncommitted changes: ${dirtyFiles.join(", ")}.`;
1616
+ const report = {
1617
+ status: "needs_user",
1618
+ summary,
1619
+ changed_files: dirtyFiles,
1620
+ commands_run: ["git status --porcelain=v1 -z"],
1621
+ tests: { ran: false },
1622
+ risks: [
1623
+ "A delegated worktree created from HEAD will not include uncommitted main-workspace changes unless dirty_policy=\"snapshot\" is used.",
1624
+ ],
1625
+ next_steps: [
1626
+ "Commit or stash the current main-workspace changes, then rerun claude_task without dirty_policy.",
1627
+ "Use committed state only and intentionally ignore current uncommitted changes: dirty_policy=committed.",
1628
+ "Snapshot current uncommitted changes into the delegated worktree before Claude starts: dirty_policy=snapshot.",
1629
+ ],
1630
+ };
1631
+ return makeEnvelope("needs_user", undefined, successExecution(Date.now() - startTime), [], {
1632
+ claude_report: report,
1633
+ server_observed: {
1634
+ repo_root: input.cwd,
1635
+ changed_files: dirtyFiles,
1636
+ diff_stat: "",
1637
+ diff_name_only: dirtyFiles.map((file) => `dirty\t${file}`).join("\n"),
1638
+ scope: {
1639
+ requested_files: requestedFiles.length > 0 ? requestedFiles : undefined,
1640
+ out_of_scope_files: [],
1641
+ scope_exceeded: false,
1642
+ warnings: [],
1643
+ },
1644
+ },
1645
+ });
1646
+ }
1647
+ async function preflightImplementDirtyState(input) {
1648
+ const requestedFiles = normalizeRequestedFiles(input.cwd, input.files);
1649
+ const dirtyFiles = await findDirtyImplementFiles(input.cwd, requestedFiles);
1650
+ return { requestedFiles, dirtyFiles };
1651
+ }
1652
+ async function applyDirtySnapshotToWorktree(cwd, worktreePath) {
1653
+ const entries = await listDirtyMainWorkspaceEntries(cwd);
1654
+ const copied = [];
1655
+ for (const entry of entries) {
1656
+ const source = path.join(cwd, entry.file);
1657
+ const destination = path.join(worktreePath, entry.file);
1658
+ if (entry.status === "D") {
1659
+ await rm(destination, { recursive: true, force: true });
1660
+ copied.push(entry.file);
1661
+ continue;
1662
+ }
1663
+ if (!existsSync(source))
1664
+ continue;
1665
+ await mkdir(path.dirname(destination), { recursive: true });
1666
+ await cp(source, destination, { recursive: true, force: true });
1667
+ copied.push(entry.file);
1668
+ }
1669
+ return copied;
1670
+ }
1671
+ // ---- Sensitive data redaction for stderr ----
1672
+ function redactSensitive(input) {
1673
+ return input
1674
+ .replace(/(ANTHROPIC_AUTH_TOKEN=)[^\s]+/gi, "$1***")
1675
+ .replace(/(ANTHROPIC_API_KEY=)[^\s]+/gi, "$1***")
1676
+ .replace(/(Authorization:\s*Bearer\s+)[^\s]+/gi, "$1***")
1677
+ .replace(/\b(sk-ant-[a-zA-Z0-9]{20,})\b/g, "sk-ant-***")
1678
+ .replace(/\b(sk-[a-zA-Z0-9]{20,})\b/g, "sk-***");
1679
+ }
1680
+ // ---- Git status/diff parsing helpers ----
1681
+ export function parseStatusPorcelainZ(output) {
1682
+ const entries = output.split("\0");
1683
+ const parsed = [];
1684
+ for (let i = 0; i < entries.length; i++) {
1685
+ const entry = entries[i];
1686
+ if (!entry)
1687
+ continue;
1688
+ const match = entry.match(/^(.{2}) (.+)$/s) ?? entry.match(/^([ MADRCU?!]) (.+)$/s);
1689
+ if (!match)
1690
+ continue;
1691
+ const xy = match[1].length === 1 ? `${match[1]} ` : match[1];
1692
+ const firstPath = match[2];
1693
+ if (!firstPath)
1694
+ continue;
1695
+ let status = "?";
1696
+ if (xy === "??") {
1697
+ status = "A";
1698
+ }
1699
+ else if (xy.includes("R") || xy.includes("C")) {
1700
+ status = "unsupported";
1701
+ const nextPath = entries[i + 1];
1702
+ const file = nextPath || firstPath;
1703
+ if (nextPath)
1704
+ i++;
1705
+ parsed.push({ status, file });
1706
+ continue;
1707
+ }
1708
+ else if (xy.includes("D")) {
1709
+ status = "D";
1710
+ }
1711
+ else if (xy.includes("A")) {
1712
+ status = "A";
1713
+ }
1714
+ else if (xy.includes("M")) {
1715
+ status = "M";
1716
+ }
1717
+ else {
1718
+ status = xy.trim() || "?";
1719
+ }
1720
+ parsed.push({ status, file: firstPath });
1721
+ }
1722
+ return parsed;
1723
+ }
1724
+ export function parseNameStatusPorcelainZ(output) {
1725
+ const entries = output.split("\0");
1726
+ const parsed = [];
1727
+ for (let i = 0; i < entries.length; i++) {
1728
+ const entry = entries[i];
1729
+ if (!entry)
1730
+ continue;
1731
+ let rawStatus = "";
1732
+ let firstPath = "";
1733
+ let consumedExtraPath = false;
1734
+ const tabIndex = entry.indexOf("\t");
1735
+ if (tabIndex > 0) {
1736
+ rawStatus = entry.slice(0, tabIndex);
1737
+ firstPath = entry.slice(tabIndex + 1);
1738
+ }
1739
+ else if (/^[A-Z?][0-9]*$/.test(entry)) {
1740
+ rawStatus = entry;
1741
+ firstPath = entries[i + 1] ?? "";
1742
+ consumedExtraPath = true;
1743
+ }
1744
+ else {
1745
+ continue;
1746
+ }
1747
+ if (!firstPath)
1748
+ continue;
1749
+ const statusCode = rawStatus[0] ?? "?";
1750
+ if (statusCode === "R" || statusCode === "C") {
1751
+ const nextPath = entries[i + (consumedExtraPath ? 2 : 1)];
1752
+ const file = nextPath || firstPath;
1753
+ i += consumedExtraPath ? 1 : 0;
1754
+ if (nextPath)
1755
+ i++;
1756
+ parsed.push({ status: "unsupported", file });
1757
+ continue;
1758
+ }
1759
+ if (statusCode === "A" || statusCode === "M" || statusCode === "D") {
1760
+ if (consumedExtraPath)
1761
+ i++;
1762
+ parsed.push({ status: statusCode, file: firstPath });
1763
+ continue;
1764
+ }
1765
+ if (consumedExtraPath)
1766
+ i++;
1767
+ parsed.push({ status: statusCode, file: firstPath });
1768
+ }
1769
+ return parsed;
1770
+ }
1771
+ async function expandDirectoryChange(change, worktreeRoot) {
1772
+ if (change.status === "D")
1773
+ return [change];
1774
+ const sourcePath = path.join(worktreeRoot, change.file);
1775
+ let sourceStat;
1776
+ try {
1777
+ sourceStat = await stat(sourcePath);
1778
+ }
1779
+ catch {
1780
+ return [change];
1781
+ }
1782
+ if (!sourceStat.isDirectory())
1783
+ return [change];
1784
+ const expanded = [];
1785
+ const walk = async (relativeDir) => {
1786
+ const dirPath = path.join(worktreeRoot, relativeDir);
1787
+ const entries = await readdir(dirPath, { withFileTypes: true });
1788
+ for (const entry of entries) {
1789
+ const childRelative = path.join(relativeDir, entry.name);
1790
+ if (entry.isDirectory()) {
1791
+ await walk(childRelative);
1792
+ }
1793
+ else if (entry.isFile()) {
1794
+ expanded.push({
1795
+ status: change.status,
1796
+ file: normalizeRepoPath(worktreeRoot, childRelative),
1797
+ });
1798
+ }
1799
+ }
1800
+ };
1801
+ await walk(change.file);
1802
+ return expanded.sort((a, b) => a.file.localeCompare(b.file));
1803
+ }
1804
+ export const DANGEROUS_DISALLOWED_TOOLS = [
1805
+ "Bash(rm *)",
1806
+ "Bash(rm -rf *)",
1807
+ "Bash(rm -r *)",
1808
+ "Bash(sudo *)",
1809
+ "Bash(curl *)",
1810
+ "Bash(wget *)",
1811
+ "Bash(chmod *)",
1812
+ "Bash(chown *)",
1813
+ "Bash(git push *)",
1814
+ "Bash(ssh *)",
1815
+ "Bash(scp *)",
1816
+ "Bash(nc *)",
1817
+ "Bash(netcat *)",
1818
+ ];
1819
+ export function truncateTail(input, maxChars = 4000) {
1820
+ return input.length <= maxChars ? input : input.slice(-maxChars);
1821
+ }
1822
+ export function buildSafeEnv() {
1823
+ return sanitizeEnv();
1824
+ }
1825
+ export function abortActiveClaudeRun(signal = "SIGTERM") {
1826
+ if (!activeClaudeChild?.pid)
1827
+ return false;
1828
+ try {
1829
+ process.kill(activeClaudeChild.pid, signal);
1830
+ return true;
1831
+ }
1832
+ catch {
1833
+ return false;
1834
+ }
1835
+ }
1836
+ export function buildClaudeArgs(opts) {
1837
+ const args = ["-p"];
1838
+ if (opts.worktree) {
1839
+ args.push("-w", opts.worktree);
1840
+ }
1841
+ if (opts.resumeSessionId) {
1842
+ args.push("-r", opts.resumeSessionId);
1843
+ }
1844
+ if (opts.forkSession) {
1845
+ args.push("--fork-session");
1846
+ }
1847
+ if (opts.noSessionPersistence) {
1848
+ args.push("--no-session-persistence");
1849
+ }
1850
+ args.push("--permission-mode", "dontAsk");
1851
+ if (opts.maxBudgetUsd !== undefined) {
1852
+ args.push("--max-budget-usd", String(opts.maxBudgetUsd));
1853
+ }
1854
+ args.push("--tools", opts.tools);
1855
+ if (opts.maxTurns !== undefined) {
1856
+ args.push("--max-turns", String(opts.maxTurns));
1857
+ }
1858
+ args.push("--output-format", "json");
1859
+ // --allowedTools / --disallowedTools must come before --json-schema.
1860
+ // --json-schema must be the last flag before the positional prompt.
1861
+ // If placed before --allowedTools/--disallowedTools, the CLI incorrectly
1862
+ // consumes subsequent flags as part of the schema value.
1863
+ if (opts.allowedTools.length > 0) {
1864
+ args.push("--allowedTools");
1865
+ for (const t of opts.allowedTools) {
1866
+ args.push(t);
1867
+ }
1868
+ }
1869
+ if (opts.disallowedTools.length > 0) {
1870
+ args.push("--disallowedTools");
1871
+ for (const t of opts.disallowedTools) {
1872
+ args.push(t);
1873
+ }
1874
+ }
1875
+ args.push("--json-schema", JSON.stringify(opts.jsonSchema));
1876
+ args.push(opts.prompt);
1877
+ return args;
1878
+ }
1879
+ export function buildQueryArgs(input) {
1880
+ return buildClaudeArgs(createQueryOptions(input));
1881
+ }
1882
+ export function buildReviewArgs(input) {
1883
+ return buildClaudeArgs(createReviewOptions(input));
1884
+ }
1885
+ export function buildImplementArgs(input) {
1886
+ return buildClaudeArgs(createImplementOptions(input));
1887
+ }
1888
+ function successExecution(durationMs = 0) {
1889
+ return { exit_code: 0, duration_ms: durationMs, timed_out: false, stdout_tail: "", stderr_tail: "" };
1890
+ }
1891
+ function makeEnvelope(status, data, execution, warnings = [], extra = {}) {
1892
+ return { status, data, execution, warnings, ...extra };
1893
+ }
1894
+ function reportIndicatesFailure(report, execution) {
1895
+ return ((execution.exit_code !== null && execution.exit_code !== 0) ||
1896
+ execution.timed_out ||
1897
+ report.is_error === true ||
1898
+ report.status === "failed");
1899
+ }
1900
+ function implementEnvelopeStatus(report, execution, observed) {
1901
+ if (!reportIndicatesFailure(report, execution)) {
1902
+ if (report.status === "partial" || report.status === "needs_user")
1903
+ return report.status;
1904
+ return "success";
1905
+ }
1906
+ return observed.changed_files.length > 0 ? "partial" : "failed";
1907
+ }
1908
+ function noOutputPayload(message, opts, code, signal, stdout, stderr, stderrTail, environmentDiagnostics) {
1909
+ return {
1910
+ error: message,
1911
+ diagnostics: {
1912
+ exit_code: code,
1913
+ signal: signal ?? "none",
1914
+ timeout_sec: opts.timeoutSec,
1915
+ stdout_len: stdout.length,
1916
+ stderr_len: stderr.length,
1917
+ stderr_tail: stderrTail,
1918
+ environment_diagnostics: environmentDiagnostics,
1919
+ },
1920
+ next_actions: [
1921
+ {
1922
+ tool: "claude_review",
1923
+ reason: "Retry with a higher timeout_sec if the review scope is broad or Claude was still starting.",
1924
+ },
1925
+ {
1926
+ tool: "claude_review",
1927
+ args: { background: true },
1928
+ reason: "Run broad reviews in the background so Codex can poll the job instead of timing out foreground execution.",
1929
+ },
1930
+ {
1931
+ tool: "claude_status",
1932
+ reason: "Check Claude CLI auth, PATH, proxy, and local environment diagnostics before retrying.",
1933
+ },
1934
+ ],
1935
+ };
1936
+ }
1937
+ function spawnClaude(opts) {
1938
+ const args = buildClaudeArgs(opts);
1939
+ const safeEnv = sanitizeEnv();
1940
+ const startTime = Date.now();
1941
+ log(`spawning: ${CLAUDE_BIN} -p (${args.length} args, worktree=${opts.worktree ?? "none"}, maxTurns=${opts.maxTurns ?? "unlimited"})`);
1942
+ return new Promise((resolve, reject) => {
1943
+ const child = spawn(CLAUDE_BIN, args, {
1944
+ cwd: opts.cwd,
1945
+ env: safeEnv,
1946
+ timeout: opts.timeoutSec * 1000,
1947
+ stdio: ["ignore", "pipe", "pipe"],
1948
+ });
1949
+ activeClaudeChild = child;
1950
+ let stdout = "";
1951
+ let stderr = "";
1952
+ child.stdout?.on("data", (chunk) => {
1953
+ stdout += chunk.toString();
1954
+ });
1955
+ child.stderr?.on("data", (chunk) => {
1956
+ stderr += chunk.toString();
1957
+ });
1958
+ child.on("error", (err) => {
1959
+ activeClaudeChild = null;
1960
+ if (err.code === "ENOENT") {
1961
+ reject(new Error(`Claude CLI not found. Ensure "claude" is in PATH or set CLAUDE_BIN env var.`));
1962
+ }
1963
+ else {
1964
+ reject(err);
1965
+ }
1966
+ });
1967
+ child.on("close", async (code, signal) => {
1968
+ activeClaudeChild = null;
1969
+ if (stderr)
1970
+ log(`claude stderr: ${redactSensitive(stderr.slice(0, 2000))}`);
1971
+ // Try to parse stdout even when exit code is non-zero.
1972
+ // Claude may exit with code 1 on max_turns but still produce
1973
+ // valid structured_output in the result payload.
1974
+ try {
1975
+ const trimmed = stdout.trim();
1976
+ if (!trimmed) {
1977
+ const stderrTail = redactSensitive(stderr.slice(-1000));
1978
+ let environmentDiagnostics;
1979
+ let diagStr = "";
1980
+ try {
1981
+ environmentDiagnostics = await getEnvironmentDiagnostics(safeEnv);
1982
+ diagStr = ` environment_diagnostics=${JSON.stringify(environmentDiagnostics)}`;
1983
+ }
1984
+ catch { }
1985
+ const message = `Claude produced no output (exit ${code}, signal ${signal ?? "none"}, timeout_sec=${opts.timeoutSec}, stdoutLen=${stdout.length}, stderrLen=${stderr.length}, stderrTail=${JSON.stringify(stderrTail)})` +
1986
+ diagStr;
1987
+ reject(new StructuredToolError(message, noOutputPayload(message, opts, code, signal, stdout, stderr, stderrTail, environmentDiagnostics)));
1988
+ return;
1989
+ }
1990
+ let parsed;
1991
+ try {
1992
+ parsed = JSON.parse(trimmed);
1993
+ }
1994
+ catch {
1995
+ const lines = trimmed.split("\n").filter((l) => l.trim());
1996
+ if (lines.length === 0) {
1997
+ reject(new Error(`Claude produced unparseable output (exit ${code}): ${trimmed.slice(0, 500)}`));
1998
+ return;
1999
+ }
2000
+ const lastLine = lines[lines.length - 1];
2001
+ parsed = JSON.parse(lastLine);
2002
+ }
2003
+ // Extract structured_output if present, otherwise use the whole result
2004
+ const report = (parsed.structured_output ?? parsed);
2005
+ // Extract session_id for session management
2006
+ const sessionId = parsed.session_id ?? null;
2007
+ // If Claude hit max_turns with partial results, still return what we have.
2008
+ // The subtype field signals whether this was a clean completion or an early exit.
2009
+ if (code !== 0 && code !== null) {
2010
+ log(`Claude exited ${code} (subtype=${parsed.subtype ?? "unknown"}), returning partial result`);
2011
+ }
2012
+ resolve({
2013
+ report,
2014
+ session_id: sessionId,
2015
+ execution: {
2016
+ exit_code: code,
2017
+ duration_ms: Date.now() - startTime,
2018
+ timed_out: signal === "SIGTERM",
2019
+ stdout_tail: truncateTail(stdout),
2020
+ stderr_tail: redactSensitive(truncateTail(stderr)),
2021
+ },
2022
+ });
2023
+ }
2024
+ catch (err) {
2025
+ const diag = `exit=${code}, signal=${signal ?? "none"}, timeout_sec=${opts.timeoutSec}, stdoutLen=${stdout.length}, stderrLen=${stderr.length}, stderr=${redactSensitive(stderr.slice(0, 200))}`;
2026
+ reject(new Error(`Failed to parse Claude output. ${diag}\n${err.message}`));
2027
+ }
2028
+ });
2029
+ });
2030
+ }
2031
+ // ---- Environment diagnostics ----
2032
+ function redactEnvStatus(key, safeEnv) {
2033
+ if (safeEnv[key]) {
2034
+ return key.includes("TOKEN") || key.includes("API_KEY") ? "set-redacted" : "set";
2035
+ }
2036
+ if (process.env[key]) {
2037
+ return "present-in-parent-stripped";
2038
+ }
2039
+ return "unset";
2040
+ }
2041
+ function parseLocalProxy(raw) {
2042
+ if (!raw)
2043
+ return null;
2044
+ try {
2045
+ const url = new URL(raw);
2046
+ const host = url.hostname;
2047
+ const port = Number.parseInt(url.port, 10);
2048
+ if (!host || !Number.isFinite(port))
2049
+ return null;
2050
+ if (host !== "127.0.0.1" && host !== "localhost" && host !== "::1")
2051
+ return null;
2052
+ return { host, port };
2053
+ }
2054
+ catch {
2055
+ return null;
2056
+ }
2057
+ }
2058
+ async function probeLocalPort(host, port, timeoutMs = 1000) {
2059
+ const net = await import("node:net");
2060
+ return await new Promise((resolve) => {
2061
+ const socket = net.createConnection({ host, port });
2062
+ const timer = setTimeout(() => {
2063
+ socket.destroy();
2064
+ resolve({ reachable: false, error: "timeout" });
2065
+ }, timeoutMs);
2066
+ socket.once("connect", () => {
2067
+ clearTimeout(timer);
2068
+ socket.destroy();
2069
+ resolve({ reachable: true });
2070
+ });
2071
+ socket.once("error", (err) => {
2072
+ clearTimeout(timer);
2073
+ resolve({ reachable: false, error: err.code ?? err.message });
2074
+ });
2075
+ });
2076
+ }
2077
+ async function getEnvironmentDiagnostics(safeEnv = sanitizeEnv()) {
2078
+ const proxyRaw = safeEnv.HTTPS_PROXY ?? safeEnv.HTTP_PROXY ?? process.env.HTTPS_PROXY ?? process.env.HTTP_PROXY;
2079
+ const localProxy = parseLocalProxy(proxyRaw);
2080
+ let reachable;
2081
+ let proxyError;
2082
+ if (localProxy) {
2083
+ const probe = await probeLocalPort(localProxy.host, localProxy.port);
2084
+ reachable = probe.reachable;
2085
+ proxyError = probe.error;
2086
+ }
2087
+ const likelySandboxBlocked = !!localProxy &&
2088
+ reachable === false &&
2089
+ (proxyError === "EPERM" || proxyError === "EACCES" || proxyError === "timeout");
2090
+ return {
2091
+ proxy_env_present: !!(safeEnv.HTTP_PROXY || safeEnv.HTTPS_PROXY || process.env.HTTP_PROXY || process.env.HTTPS_PROXY),
2092
+ http_proxy: redactEnvStatus("HTTP_PROXY", safeEnv),
2093
+ https_proxy: redactEnvStatus("HTTPS_PROXY", safeEnv),
2094
+ no_proxy: redactEnvStatus("NO_PROXY", safeEnv),
2095
+ anthropic_base_url: redactEnvStatus("ANTHROPIC_BASE_URL", safeEnv),
2096
+ anthropic_auth_token: redactEnvStatus("ANTHROPIC_AUTH_TOKEN", safeEnv),
2097
+ anthropic_api_key: redactEnvStatus("ANTHROPIC_API_KEY", safeEnv),
2098
+ local_proxy_host: localProxy?.host,
2099
+ local_proxy_port: localProxy?.port,
2100
+ local_proxy_reachable: reachable,
2101
+ local_proxy_error: proxyError,
2102
+ likely_sandbox_blocked: likelySandboxBlocked,
2103
+ recommendation: likelySandboxBlocked
2104
+ ? "Claude CLI likely cannot reach its local proxy/API from this sandbox. Run the MCP server outside the restricted sandbox or approve the outer command execution."
2105
+ : undefined,
2106
+ };
2107
+ }
2108
+ // ---- Server-side observation ----
2109
+ async function observeResult(cwd, worktree, baseCommit, requestedFiles) {
2110
+ const obsCwd = worktree ? path.join(cwd, ".claude", "worktrees", worktree) : cwd;
2111
+ const warnings = [];
2112
+ const gitStatusShort = await execCapture("git", ["status", "--short"], { cwd: obsCwd }).catch((err) => {
2113
+ warnings.push(`Unable to read git status: ${err instanceof Error ? err.message : String(err)}`);
2114
+ return "";
2115
+ });
2116
+ const headCommit = await execCapture("git", ["rev-parse", "HEAD"], { cwd: obsCwd }).catch((err) => {
2117
+ warnings.push(`Unable to read HEAD commit: ${err instanceof Error ? err.message : String(err)}`);
2118
+ return "";
2119
+ });
2120
+ try {
2121
+ const trackedCommittedNameOnly = baseCommit
2122
+ ? await execCapture("git", ["diff", "--name-only", baseCommit, "HEAD"], { cwd: obsCwd }).catch(() => "")
2123
+ : "";
2124
+ const trackedCommittedStat = baseCommit
2125
+ ? await execCapture("git", ["diff", "--stat", baseCommit, "HEAD"], { cwd: obsCwd }).catch(() => "")
2126
+ : "";
2127
+ const [trackedUncommittedNameOnly, untrackedStatusPorcelainZ] = await Promise.all([
2128
+ execCapture("git", ["diff", "--name-only"], { cwd: obsCwd }).catch(() => ""),
2129
+ execCapture("git", ["status", "--porcelain=v1", "-z"], { cwd: obsCwd }).catch(() => ""),
2130
+ ]);
2131
+ const fileSet = new Set();
2132
+ for (const source of [trackedCommittedNameOnly, trackedUncommittedNameOnly]) {
2133
+ for (const line of source.split("\n")) {
2134
+ const file = line.trim();
2135
+ if (file)
2136
+ fileSet.add(file);
2137
+ }
2138
+ }
2139
+ for (const entry of parseStatusPorcelainZ(untrackedStatusPorcelainZ)) {
2140
+ if (entry.file)
2141
+ fileSet.add(entry.file);
2142
+ }
2143
+ const changedFiles = [...fileSet].sort();
2144
+ const normalizedRequestedFiles = normalizeRequestedFiles(cwd, requestedFiles);
2145
+ const outOfScopeFiles = normalizedRequestedFiles.length === 0
2146
+ ? []
2147
+ : changedFiles.filter((file) => !normalizedRequestedFiles.some((requested) => isUnderRequestedFile(file, requested)));
2148
+ const scopeWarnings = outOfScopeFiles.map((file) => `Changed ${file} outside requested files: ${normalizedRequestedFiles.join(", ")}`);
2149
+ const diffNameOnlySegments = [];
2150
+ if (trackedCommittedNameOnly.trim()) {
2151
+ diffNameOnlySegments.push(`[tracked_since_base ${baseCommit ?? "unknown"}..HEAD]\n${trackedCommittedNameOnly.trimEnd()}`);
2152
+ }
2153
+ if (trackedUncommittedNameOnly.trim()) {
2154
+ diffNameOnlySegments.push(`[uncommitted_tracked]\n${trackedUncommittedNameOnly.trimEnd()}`);
2155
+ }
2156
+ if (untrackedStatusPorcelainZ.trim()) {
2157
+ const untrackedLines = parseStatusPorcelainZ(untrackedStatusPorcelainZ)
2158
+ .map((entry) => `${entry.status}\t${entry.file}`)
2159
+ .join("\n");
2160
+ if (untrackedLines) {
2161
+ diffNameOnlySegments.push(`[status_porcelain_z]\n${untrackedLines}`);
2162
+ }
2163
+ }
2164
+ const diffStatSegments = [];
2165
+ if (trackedCommittedStat.trim()) {
2166
+ diffStatSegments.push(`[tracked_since_base ${baseCommit ?? "unknown"}..HEAD]\n${trackedCommittedStat.trimEnd()}`);
2167
+ }
2168
+ const fallbackStat = changedFiles.length > 0
2169
+ ? changedFiles.map((file) => `*\t${file}`).join("\n")
2170
+ : "(no changes)";
2171
+ const diffStat = diffStatSegments.join("\n\n") || fallbackStat;
2172
+ const diffNameOnly = diffNameOnlySegments.join("\n\n") || "(no changes)";
2173
+ return {
2174
+ repo_root: cwd,
2175
+ worktree_name: worktree,
2176
+ changed_files: changedFiles,
2177
+ diff_stat: diffStat,
2178
+ diff_name_only: diffNameOnly,
2179
+ base_commit: baseCommit,
2180
+ head_commit: headCommit.trim() || undefined,
2181
+ git_status_short: gitStatusShort,
2182
+ worktree_path: worktree ? `.claude/worktrees/${worktree}` : undefined,
2183
+ scope: {
2184
+ requested_files: normalizedRequestedFiles.length > 0 ? normalizedRequestedFiles : undefined,
2185
+ out_of_scope_files: outOfScopeFiles,
2186
+ scope_exceeded: outOfScopeFiles.length > 0,
2187
+ warnings: [...warnings, ...scopeWarnings],
2188
+ },
2189
+ };
2190
+ }
2191
+ catch {
2192
+ const normalizedRequestedFiles = normalizeRequestedFiles(cwd, requestedFiles);
2193
+ return {
2194
+ repo_root: cwd,
2195
+ worktree_name: worktree,
2196
+ changed_files: [],
2197
+ diff_stat: "(unable to observe)",
2198
+ diff_name_only: "(unable to observe)",
2199
+ base_commit: baseCommit,
2200
+ head_commit: headCommit.trim() || undefined,
2201
+ git_status_short: gitStatusShort,
2202
+ worktree_path: worktree ? `.claude/worktrees/${worktree}` : undefined,
2203
+ scope: {
2204
+ requested_files: normalizedRequestedFiles.length > 0 ? normalizedRequestedFiles : undefined,
2205
+ out_of_scope_files: [],
2206
+ scope_exceeded: false,
2207
+ warnings,
2208
+ },
2209
+ };
2210
+ }
2211
+ }
2212
+ // ---- Check git status in worktree ----
2213
+ async function getWorktreeStatus(cwd, worktree) {
2214
+ const worktreePath = path.join(cwd, ".claude", "worktrees", worktree);
2215
+ try {
2216
+ return await execCapture("git", ["status", "--short"], { cwd: worktreePath });
2217
+ }
2218
+ catch {
2219
+ return "(unable to get worktree status)";
2220
+ }
2221
+ }
2222
+ // ---- Public API ----
2223
+ function readOnlyDisallowedTools() {
2224
+ return DANGEROUS_DISALLOWED_TOOLS;
2225
+ }
2226
+ function createQueryOptions(input) {
2227
+ const effectiveMaxTurns = input.max_turns ?? (input.fast ? 2 : undefined);
2228
+ const effectiveTimeoutSec = input.timeout_sec ?? (input.fast ? 45 : 120);
2229
+ return {
2230
+ prompt: buildQueryPrompt(input),
2231
+ cwd: input.cwd,
2232
+ tools: "Read,Glob,Grep,Bash",
2233
+ allowedTools: [
2234
+ "Read",
2235
+ "Glob",
2236
+ "Grep",
2237
+ "Bash(git diff *)",
2238
+ "Bash(git log *)",
2239
+ "Bash(git status)",
2240
+ "Bash(git show *)",
2241
+ "Bash(find *)",
2242
+ "Bash(rg *)",
2243
+ "Bash(wc *)",
2244
+ "Bash(ls *)",
2245
+ "Bash(head *)",
2246
+ "Bash(tail *)",
2247
+ "Bash(cat *)",
2248
+ ],
2249
+ disallowedTools: readOnlyDisallowedTools(),
2250
+ maxTurns: effectiveMaxTurns,
2251
+ timeoutSec: effectiveTimeoutSec,
2252
+ jsonSchema: QUERY_SCHEMA,
2253
+ };
2254
+ }
2255
+ function createReviewOptions(input) {
2256
+ return {
2257
+ prompt: buildReviewPrompt(input),
2258
+ cwd: input.cwd,
2259
+ tools: "Read,Glob,Grep,Bash",
2260
+ allowedTools: [
2261
+ "Read",
2262
+ "Glob",
2263
+ "Grep",
2264
+ "Bash(git diff *)",
2265
+ "Bash(git log *)",
2266
+ "Bash(git status)",
2267
+ "Bash(git show *)",
2268
+ "Bash(git blame *)",
2269
+ ],
2270
+ disallowedTools: readOnlyDisallowedTools(),
2271
+ maxTurns: input.max_turns,
2272
+ timeoutSec: input.timeout_sec ?? 180,
2273
+ jsonSchema: REVIEW_SCHEMA,
2274
+ noSessionPersistence: true,
2275
+ };
2276
+ }
2277
+ function createImplementOptions(input, resumeSessionId, forked) {
2278
+ return {
2279
+ prompt: buildImplementPrompt(input),
2280
+ cwd: input.cwd,
2281
+ tools: "Read,Glob,Grep,Edit,Write,Bash",
2282
+ allowedTools: [
2283
+ "Read",
2284
+ "Glob",
2285
+ "Grep",
2286
+ "Edit",
2287
+ "Write",
2288
+ "Bash(git status)",
2289
+ "Bash(git diff *)",
2290
+ "Bash(git add *)",
2291
+ "Bash(git log *)",
2292
+ "Bash(git show *)",
2293
+ "Bash(npm test *)",
2294
+ "Bash(npm run test *)",
2295
+ "Bash(npm run lint *)",
2296
+ "Bash(npx *)",
2297
+ "Bash(pytest *)",
2298
+ "Bash(go test *)",
2299
+ "Bash(cargo test *)",
2300
+ "Bash(yarn test *)",
2301
+ "Bash(pnpm test *)",
2302
+ "Bash(pnpm run lint *)",
2303
+ "Bash(ls *)",
2304
+ "Bash(cat *)",
2305
+ "Bash(wc *)",
2306
+ "Bash(find *)",
2307
+ "Bash(head *)",
2308
+ "Bash(tail *)",
2309
+ "Bash(sort *)",
2310
+ "Bash(uniq *)",
2311
+ "Bash(grep *)",
2312
+ "Bash(rg *)",
2313
+ "Bash(which *)",
2314
+ "Bash(echo *)",
2315
+ "Bash(date *)",
2316
+ "Bash(mkdir *)",
2317
+ "Bash(mkdir -p *)",
2318
+ "Bash(cp *)",
2319
+ "Bash(mv *)",
2320
+ "Bash(node *)",
2321
+ "Bash(python *)",
2322
+ "Bash(python3 *)",
2323
+ "Bash(tsc *)",
2324
+ "Bash(eslint *)",
2325
+ ],
2326
+ disallowedTools: [
2327
+ ...DANGEROUS_DISALLOWED_TOOLS,
2328
+ "Bash(git push --force *)",
2329
+ "Bash(git branch -D *)",
2330
+ "Bash(git reset --hard *)",
2331
+ "Bash(git clean *)",
2332
+ "Bash(shutdown *)",
2333
+ "Bash(reboot *)",
2334
+ "Bash(docker *)",
2335
+ "Bash(kubectl *)",
2336
+ "Bash(brew *)",
2337
+ "Bash(npm install *)",
2338
+ "Bash(npm uninstall *)",
2339
+ "Bash(npm publish *)",
2340
+ "Bash(pip install *)",
2341
+ "Bash(pip uninstall *)",
2342
+ "Bash(yarn add *)",
2343
+ "Bash(yarn remove *)",
2344
+ "Bash(pnpm add *)",
2345
+ "Bash(pnpm remove *)",
2346
+ ],
2347
+ maxTurns: input.max_turns,
2348
+ timeoutSec: input.timeout_sec ?? 600,
2349
+ jsonSchema: IMPLEMENT_SCHEMA,
2350
+ resumeSessionId,
2351
+ forkSession: forked,
2352
+ maxBudgetUsd: input.max_cost_usd,
2353
+ };
2354
+ }
2355
+ export async function checkClaudeStatus(cwd) {
2356
+ const result = {
2357
+ claude_available: false,
2358
+ claude_version: null,
2359
+ auth_status: null,
2360
+ git_available: false,
2361
+ worktree_capable: false,
2362
+ cwd_valid: false,
2363
+ cwd_is_git_repo: false,
2364
+ delegated_worktrees_count: 0,
2365
+ delegated_worktrees: [],
2366
+ stale_worktrees_count: 0,
2367
+ errors: [],
2368
+ };
2369
+ const [claudeVersionResult, gitVersionResult, gitRepoResult] = await Promise.all([
2370
+ execCapture(CLAUDE_BIN, ["--version"], { cwd }).then((version) => ({ ok: true, version })).catch(() => ({ ok: false })),
2371
+ execCapture("git", ["--version"], { cwd }).then(() => ({ ok: true })).catch(() => ({ ok: false })),
2372
+ execCapture("git", ["rev-parse", "--git-dir"], { cwd }).then(() => ({ ok: true })).catch(() => ({ ok: false })),
2373
+ ]);
2374
+ if (claudeVersionResult.ok) {
2375
+ result.claude_available = true;
2376
+ result.claude_version = claudeVersionResult.version;
2377
+ }
2378
+ else {
2379
+ result.errors.push("claude CLI not found in PATH");
2380
+ }
2381
+ if (gitVersionResult.ok) {
2382
+ result.git_available = true;
2383
+ }
2384
+ else {
2385
+ result.errors.push("git not found in PATH");
2386
+ }
2387
+ result.cwd_is_git_repo = gitRepoResult.ok;
2388
+ const [authResult, worktreeResult] = await Promise.all([
2389
+ result.claude_available
2390
+ ? execCapture(CLAUDE_BIN, ["auth", "status"], { cwd })
2391
+ .then((authOutput) => ({ ok: true, authOutput }))
2392
+ .catch(() => ({ ok: false }))
2393
+ : Promise.resolve({ ok: false }),
2394
+ result.git_available
2395
+ ? execCapture("git", ["worktree", "list"], { cwd })
2396
+ .then((wl) => ({ ok: true, wl }))
2397
+ .catch(() => ({ ok: false }))
2398
+ : Promise.resolve({ ok: false }),
2399
+ ]);
2400
+ if (result.claude_available) {
2401
+ if (authResult.ok) {
2402
+ try {
2403
+ const authJson = JSON.parse(authResult.authOutput);
2404
+ result.auth_status = authJson.loggedIn === true ? "authenticated" : "not authenticated";
2405
+ }
2406
+ catch {
2407
+ result.auth_status = authResult.authOutput.includes("Logged in") || authResult.authOutput.includes("loggedIn") ? "authenticated" : "unknown";
2408
+ }
2409
+ }
2410
+ else {
2411
+ result.auth_status = "unauthenticated or unknown";
2412
+ result.errors.push("claude auth status could not be verified");
2413
+ }
2414
+ }
2415
+ if (result.git_available) {
2416
+ if (worktreeResult.ok) {
2417
+ result.worktree_capable = worktreeResult.wl.length >= 0;
2418
+ }
2419
+ else {
2420
+ result.errors.push("git worktree not supported in this repo");
2421
+ }
2422
+ }
2423
+ result.cwd_valid = result.git_available && result.cwd_is_git_repo;
2424
+ // Scan for delegated worktrees
2425
+ const worktreeDir = path.join(cwd, ".claude", "worktrees");
2426
+ try {
2427
+ const { readdirSync, statSync } = await import("node:fs");
2428
+ if (existsSync(worktreeDir)) {
2429
+ const entries = readdirSync(worktreeDir, { withFileTypes: true })
2430
+ .filter((d) => d.isDirectory())
2431
+ .map((d) => d.name);
2432
+ result.delegated_worktrees = entries.filter((n) => n.startsWith("codex-delegated-")).sort();
2433
+ result.delegated_worktrees_count = result.delegated_worktrees.length;
2434
+ // Count worktrees older than 24h as stale
2435
+ const cutoff = Date.now() - 24 * 60 * 60 * 1000;
2436
+ result.stale_worktrees_count = result.delegated_worktrees.filter((n) => {
2437
+ try {
2438
+ return statSync(path.join(worktreeDir, n)).mtimeMs < cutoff;
2439
+ }
2440
+ catch {
2441
+ return false;
2442
+ }
2443
+ }).length;
2444
+ }
2445
+ }
2446
+ catch {
2447
+ // best-effort worktree scan
2448
+ }
2449
+ // Environment diagnostics (best-effort)
2450
+ try {
2451
+ result.environment_diagnostics = await getEnvironmentDiagnostics();
2452
+ }
2453
+ catch {
2454
+ // best-effort only
2455
+ }
2456
+ return result;
2457
+ }
2458
+ export async function runClaudeQuery(input, runId) {
2459
+ const queryStart = Date.now();
2460
+ const store = await getStore(input.cwd);
2461
+ const repoKey = await computeRepoKey(input.cwd);
2462
+ const sessionLookupStart = Date.now();
2463
+ // Auto-resume: find recent query session for the same repo
2464
+ const shouldResume = input.resume ?? !input.fast;
2465
+ const recent = shouldResume ? store.getRecent(repoKey, "query", RECENT_WINDOW_MINUTES) : null;
2466
+ const requestedSessionId = shouldResume ? (recent?.session_id ?? null) : null;
2467
+ const sessionLookupMs = Date.now() - sessionLookupStart;
2468
+ let resumed = false;
2469
+ let forked = false;
2470
+ const opts = {
2471
+ ...createQueryOptions(input),
2472
+ resumeSessionId: requestedSessionId ?? undefined,
2473
+ };
2474
+ let returnedSessionId = null;
2475
+ try {
2476
+ const claudeRunStart = Date.now();
2477
+ const { report, session_id, execution } = await spawnClaude(opts);
2478
+ const claudeRunMs = Date.now() - claudeRunStart;
2479
+ returnedSessionId = session_id;
2480
+ resumed = !!requestedSessionId;
2481
+ // Persist session
2482
+ if (session_id) {
2483
+ store.upsert(session_id, "query", repoKey, input.cwd, String(report.answer ?? "").slice(0, 200));
2484
+ }
2485
+ const sessionLog = { requested_session_id: requestedSessionId, resumed, forked, returned_session_id: session_id };
2486
+ const logWriteStart = Date.now();
2487
+ await logRun(runId, { type: "query", input, report, session: sessionLog }, input.cwd);
2488
+ const logWriteMs = Date.now() - logWriteStart;
2489
+ const pruneStart = Date.now();
2490
+ store.prune();
2491
+ const pruneMs = Date.now() - pruneStart;
2492
+ return makeEnvelope("success", report, {
2493
+ ...execution,
2494
+ timings: {
2495
+ session_lookup_ms: sessionLookupMs,
2496
+ claude_run_ms: claudeRunMs,
2497
+ log_write_ms: logWriteMs,
2498
+ store_prune_ms: pruneMs,
2499
+ total_ms: Date.now() - queryStart,
2500
+ },
2501
+ }, [], { claude_report: report });
2502
+ }
2503
+ catch (err) {
2504
+ const errorMsg = err.message;
2505
+ // If resume failed (session not found / expired), mark expired and retry without resume
2506
+ if (requestedSessionId && isSessionNotFoundError(errorMsg)) {
2507
+ store.markExpired(requestedSessionId);
2508
+ log(`Session ${requestedSessionId} not found, falling back to new session`);
2509
+ // Retry without resume
2510
+ const retryOpts = { ...opts, resumeSessionId: undefined };
2511
+ try {
2512
+ const claudeRunStart = Date.now();
2513
+ const { report, session_id, execution } = await spawnClaude(retryOpts);
2514
+ const claudeRunMs = Date.now() - claudeRunStart;
2515
+ returnedSessionId = session_id;
2516
+ if (session_id) {
2517
+ store.upsert(session_id, "query", repoKey, input.cwd, String(report.answer ?? "").slice(0, 200));
2518
+ }
2519
+ const sessionLog = { requested_session_id: requestedSessionId, resumed: false, forked: false, returned_session_id: session_id };
2520
+ const logWriteStart = Date.now();
2521
+ await logRun(runId, { type: "query", input, report, session: sessionLog, retried_after_session_expired: true }, input.cwd);
2522
+ const logWriteMs = Date.now() - logWriteStart;
2523
+ return makeEnvelope("success", report, {
2524
+ ...execution,
2525
+ timings: {
2526
+ session_lookup_ms: sessionLookupMs,
2527
+ claude_run_ms: claudeRunMs,
2528
+ log_write_ms: logWriteMs,
2529
+ retried_after_session_expired: 1,
2530
+ total_ms: Date.now() - queryStart,
2531
+ },
2532
+ }, [], { claude_report: report });
2533
+ }
2534
+ catch (retryErr) {
2535
+ await logRun(runId, { type: "query", input, error: retryErr.message, retried_after_session_expired: true }, input.cwd);
2536
+ throw retryErr;
2537
+ }
2538
+ }
2539
+ await logRun(runId, { type: "query", input, error: errorMsg }, input.cwd);
2540
+ throw err;
2541
+ }
2542
+ }
2543
+ // ---- Session failure detection ----
2544
+ function isSessionNotFoundError(msg) {
2545
+ const patterns = ["session not found", "no conversation found", "not found", "session.*expired", "invalid session"];
2546
+ return patterns.some((p) => new RegExp(p, "i").test(msg));
2547
+ }
2548
+ export async function executeBackgroundJob(jobId) {
2549
+ const jobStore = await getJobStore();
2550
+ const job = await jobStore.get(jobId);
2551
+ if (!job) {
2552
+ throw new Error(`Background job not found: ${jobId}`);
2553
+ }
2554
+ if (job.status === "cancelled") {
2555
+ return;
2556
+ }
2557
+ const runningAt = new Date().toISOString();
2558
+ const running = await jobStore.update(jobId, {
2559
+ status: "running",
2560
+ updated_at: runningAt,
2561
+ heartbeat_at: runningAt,
2562
+ });
2563
+ if (!running || running.status === "cancelled") {
2564
+ return;
2565
+ }
2566
+ const runId = randomUUID();
2567
+ const stopHeartbeat = startJobHeartbeat(jobStore, jobId);
2568
+ try {
2569
+ let result;
2570
+ if (running.type === "query") {
2571
+ result = await runClaudeQuery(running.payload, runId);
2572
+ }
2573
+ else if (running.type === "review") {
2574
+ result = await runClaudeReview(running.payload, runId);
2575
+ }
2576
+ else if (running.type === "implement") {
2577
+ result = await runClaudeImplement(running.payload, runId);
2578
+ }
2579
+ else if (running.type === "apply") {
2580
+ result = await runClaudeApply(running.payload, runId);
2581
+ }
2582
+ else {
2583
+ result = await runClaudeCleanup(running.payload, runId);
2584
+ }
2585
+ await jobStore.update(jobId, {
2586
+ status: "succeeded",
2587
+ result_status: extractBackgroundResultStatus(result),
2588
+ updated_at: new Date().toISOString(),
2589
+ result,
2590
+ summary: summarizeBackgroundResult(running.type, result),
2591
+ run_id: runId,
2592
+ worktree_name: getBackgroundWorktreeName(running.type, running.payload, result),
2593
+ error: undefined,
2594
+ });
2595
+ }
2596
+ catch (err) {
2597
+ const current = await jobStore.get(jobId);
2598
+ if (current?.status === "cancelled") {
2599
+ return;
2600
+ }
2601
+ await jobStore.update(jobId, {
2602
+ status: "failed",
2603
+ updated_at: new Date().toISOString(),
2604
+ error: err instanceof Error ? err.message : String(err),
2605
+ summary: `Background ${running.type} job failed`,
2606
+ });
2607
+ throw err;
2608
+ }
2609
+ finally {
2610
+ stopHeartbeat();
2611
+ }
2612
+ }
2613
+ export async function runClaudeReview(input, runId) {
2614
+ const opts = createReviewOptions(input);
2615
+ try {
2616
+ const { report, execution } = await spawnClaude(opts);
2617
+ await logRun(runId, { type: "review", input, report }, input.cwd);
2618
+ await markReviewGatePending(input.cwd, false, "review").catch(() => { });
2619
+ return makeEnvelope("success", report, execution, [], { claude_report: report });
2620
+ }
2621
+ catch (err) {
2622
+ await logRun(runId, { type: "review", input, error: err.message }, input.cwd);
2623
+ throw err;
2624
+ }
2625
+ }
2626
+ export async function runClaudeImplement(input, runId) {
2627
+ const store = await getStore(input.cwd);
2628
+ const repoKey = await computeRepoKey(input.cwd);
2629
+ let implementInput = input;
2630
+ if (implementInput.resume_latest) {
2631
+ const latest = await resolveLatestImplementSession({ cwd: implementInput.cwd });
2632
+ if (!latest) {
2633
+ throw new Error("No resumable implement session found for this repository.");
2634
+ }
2635
+ implementInput = { ...implementInput, session_key: latest.session_id };
2636
+ }
2637
+ const worktreeName = implementInput.worktreeName ?? `codex-delegated-${runId.slice(0, 8)}`;
2638
+ const worktreeRelPath = path.join(".claude", "worktrees", worktreeName);
2639
+ const worktreePath = path.join(implementInput.cwd, worktreeRelPath);
2640
+ const requestedFiles = normalizeRequestedFiles(implementInput.cwd, implementInput.files);
2641
+ const dirtyPolicy = implementInput.dirty_policy ?? "ask";
2642
+ let baseCommit;
2643
+ const dirtyFiles = await findDirtyImplementFiles(implementInput.cwd, requestedFiles);
2644
+ if (dirtyPolicy === "ask" && dirtyFiles.length > 0) {
2645
+ const message = formatDirtyImplementMessage(dirtyFiles, requestedFiles);
2646
+ const result = dirtyNeedsUserResult(implementInput, dirtyFiles, requestedFiles);
2647
+ await logRun(runId, {
2648
+ type: "implement",
2649
+ input: implementInput,
2650
+ report: result.claude_report,
2651
+ observed: result.server_observed,
2652
+ execution: result.execution,
2653
+ requested_files: requestedFiles,
2654
+ dirty_requested_files: dirtyFiles,
2655
+ error: message,
2656
+ duration_ms: 0,
2657
+ }, implementInput.cwd);
2658
+ return result;
2659
+ }
2660
+ try {
2661
+ if (!existsSync(worktreePath)) {
2662
+ await mkdir(path.dirname(worktreePath), { recursive: true });
2663
+ await execCapture("git", ["worktree", "add", "--detach", worktreeRelPath, "HEAD"], {
2664
+ cwd: implementInput.cwd,
2665
+ timeoutMs: 30000,
2666
+ });
2667
+ }
2668
+ const resolvedBase = await execCapture("git", ["rev-parse", "HEAD"], { cwd: worktreePath });
2669
+ baseCommit = resolvedBase.trim() || undefined;
2670
+ if (dirtyPolicy === "snapshot") {
2671
+ await applyDirtySnapshotToWorktree(implementInput.cwd, worktreePath);
2672
+ }
2673
+ await ensureImplementWorkspaceScaffold(worktreePath);
2674
+ }
2675
+ catch (err) {
2676
+ await logRun(runId, {
2677
+ type: "implement",
2678
+ input: implementInput,
2679
+ error: `Failed to prepare worktree/base commit: ${err instanceof Error ? err.message : String(err)}`,
2680
+ duration_ms: 0,
2681
+ }, implementInput.cwd);
2682
+ throw err;
2683
+ }
2684
+ const resumeSessionId = implementInput.session_key ?? undefined;
2685
+ const forked = implementInput.fork_session ?? false;
2686
+ const claudeInput = {
2687
+ ...implementInput,
2688
+ cwd: worktreePath,
2689
+ files: requestedFiles.length > 0 ? requestedFiles : undefined,
2690
+ };
2691
+ const opts = createImplementOptions(claudeInput, resumeSessionId, forked);
2692
+ let report;
2693
+ let returnedSessionId = null;
2694
+ let execution;
2695
+ const startTime = Date.now();
2696
+ try {
2697
+ const result = await spawnClaude(opts);
2698
+ report = result.report;
2699
+ returnedSessionId = result.session_id;
2700
+ execution = result.execution;
2701
+ }
2702
+ catch (err) {
2703
+ const errorMsg = err.message;
2704
+ if (resumeSessionId && isSessionNotFoundError(errorMsg)) {
2705
+ store.markExpired(resumeSessionId);
2706
+ log(`Session ${resumeSessionId} not found, marked expired`);
2707
+ const durationMs = Date.now() - startTime;
2708
+ const failedExecution = {
2709
+ exit_code: 1,
2710
+ duration_ms: durationMs,
2711
+ timed_out: false,
2712
+ stdout_tail: "",
2713
+ stderr_tail: errorMsg.slice(-4000),
2714
+ };
2715
+ const warnings = [
2716
+ `Claude session ${resumeSessionId} is unavailable and was marked expired. Start a fresh claude_implement run instead of resume_latest.`,
2717
+ ];
2718
+ const failedReport = {
2719
+ status: "failed",
2720
+ summary: `Claude session ${resumeSessionId} is unavailable.`,
2721
+ changed_files: [],
2722
+ commands_run: [],
2723
+ tests: { ran: false },
2724
+ risks: ["The delegated worktree may still exist and should be inspected or cleaned up."],
2725
+ next_steps: [
2726
+ "Inspect the failed run with claude_run_inspect.",
2727
+ "Start a fresh claude_implement run if the task still needs to continue.",
2728
+ "Clean up the delegated worktree if it is not useful.",
2729
+ ],
2730
+ };
2731
+ const observed = await observeResult(implementInput.cwd, worktreeName, baseCommit, requestedFiles).catch(() => undefined);
2732
+ const sessionLog = {
2733
+ requested_session_id: resumeSessionId,
2734
+ resumed: true,
2735
+ forked,
2736
+ returned_session_id: null,
2737
+ };
2738
+ await logRun(runId, {
2739
+ type: "implement",
2740
+ input: implementInput,
2741
+ report: failedReport,
2742
+ observed,
2743
+ execution: failedExecution,
2744
+ session: sessionLog,
2745
+ error: errorMsg,
2746
+ duration_ms: durationMs,
2747
+ }, implementInput.cwd);
2748
+ return makeEnvelope("failed", undefined, failedExecution, warnings, {
2749
+ claude_report: failedReport,
2750
+ server_observed: observed,
2751
+ });
2752
+ }
2753
+ await logRun(runId, { type: "implement", input: implementInput, error: errorMsg, duration_ms: Date.now() - startTime }, implementInput.cwd);
2754
+ throw err;
2755
+ }
2756
+ // Persist session (record only, never auto-resume implement)
2757
+ if (returnedSessionId) {
2758
+ store.upsert(returnedSessionId, "implement", repoKey, implementInput.cwd, report.summary ?? "");
2759
+ }
2760
+ // Observe actual changes (don't trust Claude's self-report alone)
2761
+ const observed = await observeResult(implementInput.cwd, worktreeName, baseCommit, requestedFiles);
2762
+ // Check resource limits
2763
+ if (implementInput.max_changed_files !== undefined || implementInput.max_cost_usd !== undefined) {
2764
+ const warnings = [];
2765
+ const exceeded = implementInput.max_changed_files !== undefined &&
2766
+ observed.changed_files.length > implementInput.max_changed_files;
2767
+ if (exceeded) {
2768
+ const msg = `Changed ${observed.changed_files.length} files, exceeds limit of ${implementInput.max_changed_files}`;
2769
+ warnings.push(msg);
2770
+ log(`Resource warning: ${msg}`);
2771
+ }
2772
+ observed.resource_limits = {
2773
+ max_cost_usd: implementInput.max_cost_usd,
2774
+ max_changed_files: implementInput.max_changed_files,
2775
+ actual_changed_files: observed.changed_files.length,
2776
+ changed_files_exceeded: exceeded,
2777
+ warnings,
2778
+ };
2779
+ }
2780
+ if (observed.scope?.scope_exceeded) {
2781
+ for (const warning of observed.scope.warnings) {
2782
+ log(`Scope warning: ${warning}`);
2783
+ }
2784
+ }
2785
+ const sessionLog = {
2786
+ requested_session_id: resumeSessionId ?? null,
2787
+ resumed: !!resumeSessionId,
2788
+ forked,
2789
+ returned_session_id: returnedSessionId,
2790
+ };
2791
+ await logRun(runId, {
2792
+ type: "implement",
2793
+ input: implementInput,
2794
+ report,
2795
+ observed,
2796
+ execution,
2797
+ session: sessionLog,
2798
+ duration_ms: Date.now() - startTime,
2799
+ }, implementInput.cwd);
2800
+ store.prune();
2801
+ const status = implementEnvelopeStatus(report, execution, observed);
2802
+ const recoveryWarnings = status === "partial"
2803
+ ? [
2804
+ "Claude ended before a clean completion, but changed files were observed. Inspect the worktree with claude_result or claude_run_inspect before preview/apply, and consider resuming with claude_implement if needed.",
2805
+ ]
2806
+ : status === "failed"
2807
+ ? [
2808
+ "Claude ended before a clean completion and no changed files were observed. Inspect diagnostics, then retry or resume instead of applying this worktree.",
2809
+ ]
2810
+ : [];
2811
+ const warnings = [
2812
+ ...(observed.resource_limits?.warnings ?? []),
2813
+ ...(observed.scope?.warnings ?? []),
2814
+ ...recoveryWarnings,
2815
+ "Worktree is retained for inspection. After applying results, call claude_cleanup to remove old delegated worktrees.",
2816
+ ];
2817
+ await markReviewGatePending(implementInput.cwd, true, "write").catch(() => { });
2818
+ return makeEnvelope(status, undefined, execution, warnings, {
2819
+ claude_report: report,
2820
+ server_observed: observed,
2821
+ });
2822
+ }
2823
+ // ---- Apply worktree diff to main workspace ----
2824
+ export async function runClaudeApply(input, runId) {
2825
+ const startTime = Date.now();
2826
+ const finish = async (result) => {
2827
+ await logRun(runId, {
2828
+ type: "apply",
2829
+ input,
2830
+ applied_files: result.applied_files,
2831
+ cleanup_performed: result.cleanup_performed,
2832
+ preview: input.preview === true,
2833
+ planned_changes: result.planned_changes,
2834
+ conflicts: result.conflicts,
2835
+ error: result.error,
2836
+ duration_ms: Date.now() - startTime,
2837
+ }, input.cwd);
2838
+ const wtRelPath = path.join(".claude", "worktrees", path.basename(path.resolve(input.cwd, input.worktree_path)));
2839
+ if (input.preview === true) {
2840
+ await updateImplementLifecycleForWorktree(wtRelPath, {
2841
+ current_lifecycle: result.error ? "apply_blocked" : "success",
2842
+ previewed_at: new Date().toISOString(),
2843
+ last_apply_run_id: runId,
2844
+ }, input.cwd).catch(() => { });
2845
+ }
2846
+ else {
2847
+ await updateImplementLifecycleForWorktree(wtRelPath, {
2848
+ current_lifecycle: result.error ? "apply_blocked" : (result.applied_files.length > 0 ? "applied" : "unknown"),
2849
+ applied_at: result.applied_files.length > 0 ? new Date().toISOString() : undefined,
2850
+ last_apply_run_id: runId,
2851
+ }, input.cwd).catch(() => { });
2852
+ }
2853
+ if (!result.error && input.preview !== true && result.applied_files.length > 0) {
2854
+ await markReviewGatePending(input.cwd, true, "write").catch(() => { });
2855
+ }
2856
+ return result;
2857
+ };
2858
+ // Validate worktree path
2859
+ const wtReal = path.resolve(input.cwd, input.worktree_path);
2860
+ const wtDir = path.join(input.cwd, ".claude", "worktrees");
2861
+ if (!wtReal.startsWith(wtDir + path.sep)) {
2862
+ return finish({ applied_files: [], diff_stat: "", cleanup_performed: false, conflicts: [], error: `worktree_path must be under ${wtDir}` });
2863
+ }
2864
+ if (!wtReal.startsWith(wtDir + path.sep + "codex-delegated-")) {
2865
+ return finish({ applied_files: [], diff_stat: "", cleanup_performed: false, conflicts: [], error: "worktree_path must be a delegated worktree (codex-delegated-*)" });
2866
+ }
2867
+ if (!existsSync(wtReal)) {
2868
+ return finish({ applied_files: [], diff_stat: "", cleanup_performed: false, conflicts: [], error: `worktree directory not found: ${wtReal}` });
2869
+ }
2870
+ // Non-preview apply requires explicit user approval
2871
+ if (input.preview !== true && input.confirmed_by_user !== true) {
2872
+ return finish({
2873
+ applied_files: [],
2874
+ diff_stat: "",
2875
+ cleanup_performed: false,
2876
+ conflicts: [],
2877
+ error: "Non-preview claude_apply requires confirmed_by_user=true after the user explicitly approves applying the previewed diff.",
2878
+ preview: false,
2879
+ planned_changes: [],
2880
+ });
2881
+ }
2882
+ const wtRelPath = path.join(".claude", "worktrees", path.basename(wtReal));
2883
+ // Try deterministic job-based lookup first, then scan-based fallback
2884
+ const jobMatch = await findImplementJobForWorktree(wtRelPath, input.cwd);
2885
+ let implementLog = null;
2886
+ if (jobMatch?.run_id) {
2887
+ const raw = await readRunLogFile(jobMatch.run_id, input.cwd);
2888
+ if (raw) {
2889
+ implementLog = raw;
2890
+ }
2891
+ }
2892
+ if (!implementLog) {
2893
+ implementLog = await findImplementLogForWorktree(wtRelPath, input.cwd);
2894
+ }
2895
+ const observedBaseCommit = typeof implementLog?.observed?.base_commit === "string" ? implementLog.observed.base_commit.trim() : "";
2896
+ const baseCommit = observedBaseCommit || undefined;
2897
+ const observedChangedFiles = Array.isArray(implementLog?.observed?.changed_files)
2898
+ ? implementLog.observed.changed_files.filter((item) => typeof item === "string" && item.length > 0)
2899
+ : [];
2900
+ const hasObservedScope = baseCommit !== undefined && observedChangedFiles.length > 0;
2901
+ const pathspecs = hasObservedScope ? observedChangedFiles : [];
2902
+ // Fail closed when implement metadata is missing — no legacy fallback
2903
+ if (!baseCommit) {
2904
+ const wtName = path.basename(wtReal);
2905
+ return finish({
2906
+ applied_files: [],
2907
+ diff_stat: "",
2908
+ cleanup_performed: false,
2909
+ conflicts: [],
2910
+ error: `No implement metadata found for worktree "${wtName}". The implement run's base commit and changed files could not be resolved. Use claude_result or claude_run_inspect to find the implement session, then retry apply with the correct worktree_path.`,
2911
+ preview: input.preview === true,
2912
+ planned_changes: [],
2913
+ });
2914
+ }
2915
+ let diffStat = "";
2916
+ diffStat = await execCapture("git", ["diff", "--stat", baseCommit, "HEAD", "--", ...pathspecs], { cwd: wtReal, timeoutMs: 10000 }).catch(() => "");
2917
+ const [trackedStatus, uncommittedStatus, untrackedStatus] = await Promise.all([
2918
+ execCapture("git", ["diff", "--name-status", "-z", baseCommit, "HEAD", "--", ...pathspecs], { cwd: wtReal, timeoutMs: 10000 }).catch(() => ""),
2919
+ execCapture("git", ["diff", "--name-status", "-z", "--", ...pathspecs], { cwd: wtReal, timeoutMs: 10000 }).catch(() => ""),
2920
+ execCapture("git", ["status", "--porcelain=v1", "-z", "--", ...pathspecs], { cwd: wtReal, timeoutMs: 10000 }).catch(() => ""),
2921
+ ]);
2922
+ const changesByFile = new Map();
2923
+ function addChange(change) {
2924
+ if (hasObservedScope && !observedChangedFiles.some((observed) => isUnderRequestedFile(change.file, observed))) {
2925
+ return;
2926
+ }
2927
+ changesByFile.set(change.file, change);
2928
+ }
2929
+ for (const change of parseNameStatusPorcelainZ(trackedStatus))
2930
+ addChange(change);
2931
+ for (const change of parseNameStatusPorcelainZ(uncommittedStatus))
2932
+ addChange(change);
2933
+ for (const change of parseStatusPorcelainZ(untrackedStatus))
2934
+ addChange(change);
2935
+ const changes = (await Promise.all([...changesByFile.values()].map((change) => expandDirectoryChange(change, wtReal))))
2936
+ .flat()
2937
+ .sort((a, b) => a.file.localeCompare(b.file));
2938
+ if (!diffStat.trim() && changes.length > 0) {
2939
+ diffStat = changes.map((c) => `${c.status}\t${c.file}`).join("\n");
2940
+ }
2941
+ const plannedChanges = changes.map((c) => ({ status: c.status, file: c.file }));
2942
+ if (changes.length === 0) {
2943
+ return finish({ applied_files: [], diff_stat: diffStat, cleanup_performed: false, conflicts: [], error: "No changed files found in worktree", preview: input.preview === true, planned_changes: plannedChanges });
2944
+ }
2945
+ const resourceLimits = implementLog?.observed?.resource_limits;
2946
+ if (resourceLimits?.changed_files_exceeded === true) {
2947
+ const warnings = Array.isArray(resourceLimits.warnings)
2948
+ ? resourceLimits.warnings.filter((item) => typeof item === "string")
2949
+ : [];
2950
+ return finish({
2951
+ applied_files: [],
2952
+ diff_stat: diffStat,
2953
+ cleanup_performed: false,
2954
+ conflicts: warnings,
2955
+ error: "Worktree exceeded implement resource limits; apply refused",
2956
+ preview: input.preview === true,
2957
+ planned_changes: plannedChanges,
2958
+ });
2959
+ }
2960
+ const observedScope = implementLog?.observed?.scope;
2961
+ if (observedScope?.scope_exceeded === true) {
2962
+ const warnings = Array.isArray(observedScope.warnings)
2963
+ ? observedScope.warnings.filter((item) => typeof item === "string")
2964
+ : [];
2965
+ return finish({
2966
+ applied_files: [],
2967
+ diff_stat: diffStat,
2968
+ cleanup_performed: false,
2969
+ conflicts: warnings,
2970
+ error: "Worktree contains changes outside requested files; apply refused",
2971
+ preview: input.preview === true,
2972
+ planned_changes: plannedChanges,
2973
+ });
2974
+ }
2975
+ // Preflight: check for uncommitted changes in main workspace and
2976
+ // unsupported status codes. If any issues found, refuse the entire apply.
2977
+ const conflicts = [];
2978
+ const validStatuses = new Set(["A", "M", "D"]);
2979
+ for (const c of changes) {
2980
+ if (!validStatuses.has(c.status)) {
2981
+ conflicts.push(`${c.file}: unsupported status "${c.status}" (only A/M/D supported)`);
2982
+ continue;
2983
+ }
2984
+ try {
2985
+ const shortStat = await execCapture("git", ["status", "--short", "--", c.file], { cwd: input.cwd, timeoutMs: 10000 });
2986
+ if (shortStat.trim()) {
2987
+ conflicts.push(`${c.file}: main workspace has uncommitted changes (${shortStat.trim().slice(0, 80)})`);
2988
+ }
2989
+ }
2990
+ catch { }
2991
+ }
2992
+ if (conflicts.length > 0) {
2993
+ return finish({ applied_files: [], diff_stat: diffStat, cleanup_performed: false, conflicts, error: "Main workspace has uncommitted or unsupported changes; apply refused", preview: input.preview === true, planned_changes: plannedChanges });
2994
+ }
2995
+ if (input.preview) {
2996
+ return finish({
2997
+ applied_files: [],
2998
+ diff_stat: diffStat,
2999
+ cleanup_performed: false,
3000
+ conflicts: [],
3001
+ preview: true,
3002
+ planned_changes: plannedChanges,
3003
+ });
3004
+ }
3005
+ // Apply changes
3006
+ const copied = [];
3007
+ for (const c of changes) {
3008
+ const dest = path.join(input.cwd, c.file);
3009
+ const src = path.join(wtReal, c.file);
3010
+ try {
3011
+ if (c.status === "D") {
3012
+ // Deletion
3013
+ if (existsSync(dest)) {
3014
+ await import("node:fs/promises").then((m) => m.rm(dest, { recursive: true, force: true }).catch(() => { }));
3015
+ }
3016
+ copied.push(c.file);
3017
+ }
3018
+ else {
3019
+ const content = await import("node:fs/promises").then((m) => m.readFile(src));
3020
+ await mkdir(path.dirname(dest), { recursive: true });
3021
+ await writeFile(dest, content);
3022
+ copied.push(c.file);
3023
+ }
3024
+ }
3025
+ catch (err) {
3026
+ conflicts.push(`${c.file} (${c.status}): ${err instanceof Error ? err.message : String(err)}`);
3027
+ }
3028
+ }
3029
+ if (copied.length === 0) {
3030
+ return finish({ applied_files: [], diff_stat: diffStat, cleanup_performed: false, conflicts, error: "No changes could be applied", planned_changes: plannedChanges });
3031
+ }
3032
+ // Optional: cleanup worktree
3033
+ let cleanupPerformed = false;
3034
+ if (input.cleanup) {
3035
+ try {
3036
+ await execCapture("git", ["worktree", "remove", "--force", wtRelPath], { cwd: input.cwd, timeoutMs: 30000 });
3037
+ await execCapture("git", ["worktree", "prune"], { cwd: input.cwd, timeoutMs: 10000 });
3038
+ cleanupPerformed = true;
3039
+ }
3040
+ catch (err) {
3041
+ log(`worktree remove failed for ${wtReal}: ${err}`);
3042
+ }
3043
+ }
3044
+ return finish({ applied_files: copied, diff_stat: diffStat, cleanup_performed: cleanupPerformed, conflicts, planned_changes: plannedChanges });
3045
+ }
3046
+ // ---- Cleanup delegated worktrees ----
3047
+ export async function runClaudeCleanup(input, runId) {
3048
+ const startTime = Date.now();
3049
+ const dryRun = input.dry_run !== false; // default true
3050
+ const olderThanHours = input.older_than_hours ?? 0;
3051
+ const worktreeDir = path.join(input.cwd, ".claude", "worktrees");
3052
+ const entries = [];
3053
+ let removedCount = 0;
3054
+ let failedCount = 0;
3055
+ try {
3056
+ const { readdirSync } = await import("node:fs");
3057
+ const { statSync } = await import("node:fs");
3058
+ if (!existsSync(worktreeDir)) {
3059
+ return { dry_run: dryRun, removed_count: 0, failed_count: 0, entries: [] };
3060
+ }
3061
+ const dirs = readdirSync(worktreeDir, { withFileTypes: true })
3062
+ .filter((d) => d.isDirectory() && d.name.startsWith("codex-delegated-"))
3063
+ .map((d) => d.name);
3064
+ const cutoff = olderThanHours > 0 ? Date.now() - olderThanHours * 60 * 60 * 1000 : 0;
3065
+ for (const name of dirs) {
3066
+ const dirPath = path.join(worktreeDir, name);
3067
+ const wtRelPath = path.join(".claude", "worktrees", name);
3068
+ // Check age if filter set
3069
+ if (olderThanHours > 0) {
3070
+ try {
3071
+ if (statSync(dirPath).mtimeMs > cutoff) {
3072
+ entries.push({ worktree_name: name, removed: false, error: "skipped (within time window)" });
3073
+ continue;
3074
+ }
3075
+ }
3076
+ catch {
3077
+ entries.push({ worktree_name: name, removed: false, error: "unable to stat" });
3078
+ continue;
3079
+ }
3080
+ }
3081
+ if (dryRun) {
3082
+ entries.push({ worktree_name: name, removed: false });
3083
+ continue;
3084
+ }
3085
+ // Actual remove — use relative path from repo root, not just basename
3086
+ try {
3087
+ await execCapture("git", ["worktree", "remove", "--force", wtRelPath], { cwd: input.cwd, timeoutMs: 30000 });
3088
+ removedCount++;
3089
+ entries.push({ worktree_name: name, removed: true });
3090
+ await updateImplementLifecycleForWorktree(wtRelPath, {
3091
+ current_lifecycle: "cleaned",
3092
+ cleaned_at: new Date().toISOString(),
3093
+ last_cleanup_run_id: runId,
3094
+ }, input.cwd).catch(() => { });
3095
+ }
3096
+ catch (err) {
3097
+ failedCount++;
3098
+ entries.push({ worktree_name: name, removed: false, error: err instanceof Error ? err.message : String(err) });
3099
+ }
3100
+ }
3101
+ if (!dryRun) {
3102
+ await execCapture("git", ["worktree", "prune"], { cwd: input.cwd, timeoutMs: 10000 }).catch(() => { });
3103
+ }
3104
+ }
3105
+ catch (err) {
3106
+ log(`cleanup scan failed: ${err}`);
3107
+ return {
3108
+ dry_run: dryRun,
3109
+ removed_count: 0,
3110
+ failed_count: 1,
3111
+ entries: [{ worktree_name: "", removed: false, error: err instanceof Error ? err.message : String(err) }],
3112
+ };
3113
+ }
3114
+ await logRun(runId, {
3115
+ type: "cleanup",
3116
+ input,
3117
+ removed_count: removedCount,
3118
+ failed_count: failedCount,
3119
+ duration_ms: Date.now() - startTime,
3120
+ }, input.cwd);
3121
+ return { dry_run: dryRun, removed_count: removedCount, failed_count: failedCount, entries };
3122
+ }
3123
+ //# sourceMappingURL=claude-cli.js.map