bosun 0.33.3 → 0.33.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/agent-pool.mjs CHANGED
@@ -239,8 +239,8 @@ async function withSanitizedOpenAiEnv(fn) {
239
239
  * provider settings via `config` and maps the API key via `env`.
240
240
  * Otherwise strips OPENAI_BASE_URL so the SDK uses its default auth.
241
241
  */
242
- function buildCodexSdkOptions() {
243
- const { env: resolvedEnv } = resolveCodexProfileRuntime(process.env);
242
+ function buildCodexSdkOptions(envInput = process.env) {
243
+ const { env: resolvedEnv } = resolveCodexProfileRuntime(envInput);
244
244
  const baseUrl = resolvedEnv.OPENAI_BASE_URL || "";
245
245
  const isAzure = baseUrl.includes(".openai.azure.com");
246
246
  const env = { ...resolvedEnv };
@@ -544,7 +544,13 @@ export function getAvailableSdks() {
544
544
  * @returns {Promise<{ success: boolean, output: string, items: Array, error: string|null, sdk: string }>}
545
545
  */
546
546
  async function launchCodexThread(prompt, cwd, timeoutMs, extra = {}) {
547
- const { onEvent, abortController: externalAC, onThreadReady = null, taskKey: steerKey = null } = extra;
547
+ const {
548
+ onEvent,
549
+ abortController: externalAC,
550
+ onThreadReady = null,
551
+ taskKey: steerKey = null,
552
+ envOverrides = null,
553
+ } = extra;
548
554
 
549
555
  let reportedThreadId = null;
550
556
  const emitThreadReady = (threadId) => {
@@ -583,7 +589,16 @@ async function launchCodexThread(prompt, cwd, timeoutMs, extra = {}) {
583
589
 
584
590
  // Pass feature overrides via --config so sub-agent and memory features are
585
591
  // available even if ~/.codex/config.toml hasn't been patched yet.
586
- const codexOpts = buildCodexSdkOptions();
592
+ const codexRuntimeEnv =
593
+ envOverrides && typeof envOverrides === "object"
594
+ ? { ...process.env, ...envOverrides }
595
+ : process.env;
596
+ const codexOpts = buildCodexSdkOptions(codexRuntimeEnv);
597
+ const modelOverride = String(extra?.model || "").trim();
598
+ if (modelOverride) {
599
+ codexOpts.env = { ...(codexOpts.env || {}), CODEX_MODEL: modelOverride };
600
+ codexOpts.config = { ...(codexOpts.config || {}), model: modelOverride };
601
+ }
587
602
  codexOpts.config = {
588
603
  ...(codexOpts.config || {}),
589
604
  features: {
@@ -1947,7 +1962,7 @@ function isPoisonedCodexResumeError(errorValue) {
1947
1962
  * @returns {Promise<{ success: boolean, output: string, items: Array, error: string|null, sdk: string, threadId: string|null }>}
1948
1963
  */
1949
1964
  async function resumeCodexThread(threadId, prompt, cwd, timeoutMs, extra = {}) {
1950
- const { onEvent, abortController: externalAC } = extra;
1965
+ const { onEvent, abortController: externalAC, envOverrides = null } = extra;
1951
1966
 
1952
1967
  let CodexClass;
1953
1968
  try {
@@ -1965,7 +1980,17 @@ async function resumeCodexThread(threadId, prompt, cwd, timeoutMs, extra = {}) {
1965
1980
  };
1966
1981
  }
1967
1982
 
1968
- const codex = new CodexClass(buildCodexSdkOptions());
1983
+ const codexRuntimeEnv =
1984
+ envOverrides && typeof envOverrides === "object"
1985
+ ? { ...process.env, ...envOverrides }
1986
+ : process.env;
1987
+ const codexOpts = buildCodexSdkOptions(codexRuntimeEnv);
1988
+ const modelOverride = String(extra?.model || "").trim();
1989
+ if (modelOverride) {
1990
+ codexOpts.env = { ...(codexOpts.env || {}), CODEX_MODEL: modelOverride };
1991
+ codexOpts.config = { ...(codexOpts.config || {}), model: modelOverride };
1992
+ }
1993
+ const codex = new CodexClass(codexOpts);
1969
1994
 
1970
1995
  let thread;
1971
1996
  try {
@@ -13,14 +13,12 @@
13
13
  */
14
14
 
15
15
  import { readFile, writeFile, appendFile, stat, watch, mkdir } from "fs/promises";
16
- import { createReadStream, existsSync, mkdirSync } from "fs";
16
+ import { createReadStream, existsSync } from "fs";
17
17
  import { createInterface } from "readline";
18
18
  import { resolve, dirname } from "path";
19
- import { fileURLToPath } from "url";
19
+ import { resolveRepoRoot } from "./repo-root.mjs";
20
20
 
21
- const __filename = fileURLToPath(import.meta.url);
22
- const __dirname = dirname(__filename);
23
- const repoRoot = resolve(__dirname, "../..");
21
+ const repoRoot = resolveRepoRoot({ cwd: process.cwd() });
24
22
 
25
23
  // ── Configuration ───────────────────────────────────────────────────────────
26
24
  const AGENT_WORK_STREAM = resolve(
@@ -45,6 +43,13 @@ const TOOL_LOOP_WINDOW_MS = 60 * 1000; // 1 minute
45
43
  const STUCK_DETECTION_THRESHOLD_MS = Number(
46
44
  process.env.AGENT_STUCK_THRESHOLD_MS || String(5 * 60 * 1000),
47
45
  ); // 5 minutes
46
+ const STUCK_SWEEP_INTERVAL_MS = Number(
47
+ process.env.AGENT_STUCK_SWEEP_INTERVAL_MS || "30000",
48
+ ); // 30 seconds
49
+ const INITIAL_REPLAY_MAX_SESSION_AGE_MS = Number(
50
+ process.env.AGENT_INITIAL_REPLAY_MAX_SESSION_AGE_MS ||
51
+ String(Math.max(STUCK_DETECTION_THRESHOLD_MS * 3, 15 * 60 * 1000)),
52
+ ); // Trim stale sessions after startup replay
48
53
 
49
54
  const COST_ANOMALY_THRESHOLD_USD = Number(
50
55
  process.env.AGENT_COST_ANOMALY_THRESHOLD || "1.0",
@@ -63,6 +68,7 @@ const ALERT_COOLDOWN_MS = 5 * 60 * 1000; // 5 minutes between same alert
63
68
 
64
69
  let filePosition = 0;
65
70
  let isRunning = false;
71
+ let stuckSweepTimer = null;
66
72
 
67
73
  /**
68
74
  * Start the analyzer loop
@@ -89,6 +95,7 @@ export async function startAnalyzer() {
89
95
  // Initial read of existing log
90
96
  if (existsSync(AGENT_WORK_STREAM)) {
91
97
  filePosition = await processLogFile(filePosition);
98
+ pruneStaleSessionsAfterReplay();
92
99
  } else {
93
100
  // Ensure the stream file exists so the watcher doesn't throw
94
101
  try {
@@ -98,6 +105,8 @@ export async function startAnalyzer() {
98
105
  }
99
106
  }
100
107
 
108
+ startStuckSweep();
109
+
101
110
  // Watch for changes — retry loop handles the case where the file
102
111
  // is deleted and recreated (e.g. log rotation).
103
112
  console.log(`[agent-work-analyzer] Watching: ${AGENT_WORK_STREAM}`);
@@ -130,6 +139,10 @@ export async function startAnalyzer() {
130
139
  */
131
140
  export function stopAnalyzer() {
132
141
  isRunning = false;
142
+ if (stuckSweepTimer) {
143
+ clearInterval(stuckSweepTimer);
144
+ stuckSweepTimer = null;
145
+ }
133
146
  console.log("[agent-work-analyzer] Stopped");
134
147
  }
135
148
 
@@ -182,7 +195,10 @@ async function processLogFile(startPosition) {
182
195
  * @param {Object} event - Parsed JSONL event
183
196
  */
184
197
  async function analyzeEvent(event) {
185
- const { attempt_id, event_type, timestamp, data } = event;
198
+ const { attempt_id, event_type, timestamp } = event;
199
+ const parsedTs = Date.parse(timestamp);
200
+ const eventTime = Number.isFinite(parsedTs) ? parsedTs : Date.now();
201
+ const eventIso = new Date(eventTime).toISOString();
186
202
 
187
203
  // Initialize session state if needed
188
204
  if (!activeSessions.has(attempt_id)) {
@@ -190,15 +206,15 @@ async function analyzeEvent(event) {
190
206
  attempt_id,
191
207
  errors: [],
192
208
  toolCalls: [],
193
- lastActivity: timestamp,
194
- startedAt: timestamp,
209
+ lastActivity: eventIso,
210
+ startedAt: eventIso,
195
211
  taskId: event.task_id,
196
212
  executor: event.executor,
197
213
  });
198
214
  }
199
215
 
200
216
  const session = activeSessions.get(attempt_id);
201
- session.lastActivity = timestamp;
217
+ session.lastActivity = eventIso;
202
218
 
203
219
  // Route to specific analyzers
204
220
  switch (event_type) {
@@ -217,8 +233,7 @@ async function analyzeEvent(event) {
217
233
  break;
218
234
  }
219
235
 
220
- // Continuous checks
221
- await checkStuckAgent(session, event);
236
+ // Stuck checks are timer-driven to avoid replay-triggered false positives.
222
237
  }
223
238
 
224
239
  // ── Pattern Analyzers ───────────────────────────────────────────────────────
@@ -376,9 +391,10 @@ async function analyzeSessionEnd(session, event) {
376
391
  /**
377
392
  * Check if agent appears stuck (no activity for X minutes)
378
393
  */
379
- async function checkStuckAgent(session, event) {
394
+ async function checkStuckAgent(session, nowMs = Date.now()) {
380
395
  const lastActivityTime = new Date(session.lastActivity).getTime();
381
- const timeSinceActivity = Date.now() - lastActivityTime;
396
+ if (!Number.isFinite(lastActivityTime)) return;
397
+ const timeSinceActivity = nowMs - lastActivityTime;
382
398
 
383
399
  if (timeSinceActivity > STUCK_DETECTION_THRESHOLD_MS) {
384
400
  await emitAlert({
@@ -394,6 +410,35 @@ async function checkStuckAgent(session, event) {
394
410
  }
395
411
  }
396
412
 
413
+ function pruneStaleSessionsAfterReplay() {
414
+ const now = Date.now();
415
+ for (const [attemptId, session] of activeSessions.entries()) {
416
+ const lastActivityTime = new Date(session.lastActivity).getTime();
417
+ if (
418
+ !Number.isFinite(lastActivityTime) ||
419
+ now - lastActivityTime > INITIAL_REPLAY_MAX_SESSION_AGE_MS
420
+ ) {
421
+ activeSessions.delete(attemptId);
422
+ }
423
+ }
424
+ }
425
+
426
+ async function runStuckSweep() {
427
+ if (!isRunning) return;
428
+ const now = Date.now();
429
+ for (const session of activeSessions.values()) {
430
+ await checkStuckAgent(session, now);
431
+ }
432
+ }
433
+
434
+ function startStuckSweep() {
435
+ if (stuckSweepTimer) return;
436
+ stuckSweepTimer = setInterval(() => {
437
+ void runStuckSweep();
438
+ }, STUCK_SWEEP_INTERVAL_MS);
439
+ stuckSweepTimer.unref?.();
440
+ }
441
+
397
442
  // ── Alert System ────────────────────────────────────────────────────────────
398
443
 
399
444
  /**
package/codex-config.mjs CHANGED
@@ -24,7 +24,7 @@
24
24
  */
25
25
 
26
26
  import { existsSync, readFileSync, writeFileSync, mkdirSync, statSync } from "node:fs";
27
- import { resolve, dirname, parse } from "node:path";
27
+ import { resolve, dirname, parse, isAbsolute } from "node:path";
28
28
  import { homedir } from "node:os";
29
29
  import { fileURLToPath } from "node:url";
30
30
  import { resolveCodexProfileRuntime } from "./codex-model-profiles.mjs";
@@ -145,7 +145,6 @@ const RECOMMENDED_FEATURES = {
145
145
  skill_mcp_dependency_install: { default: true, envVar: null, comment: "Auto-install MCP skill deps" },
146
146
 
147
147
  // Experimental (disabled by default unless explicitly enabled)
148
- apps: { default: true, envVar: "CODEX_FEATURES_APPS", comment: "ChatGPT Apps integration" },
149
148
  };
150
149
 
151
150
  const CRITICAL_ALWAYS_ON_FEATURES = new Set([
@@ -474,11 +473,12 @@ function parseTomlArrayLiteral(raw) {
474
473
  .split(",")
475
474
  .map((item) => item.trim())
476
475
  .filter(Boolean)
477
- .map((item) => item.replace(/^"(.*)"$/, "$1"));
476
+ .map((item) => item.replace(/^"(.*)"$/, "$1"))
477
+ .map((item) => item.replace(/\\(["\\])/g, "$1"));
478
478
  }
479
479
 
480
480
  function formatTomlArray(values) {
481
- return `[${values.map((value) => `"${String(value).replace(/"/g, '\\"')}"`).join(", ")}]`;
481
+ return `[${values.map((value) => `"${String(value).replace(/\\/g, "\\\\").replace(/"/g, '\\"')}"`).join(", ")}]`;
482
482
  }
483
483
 
484
484
  function normalizeWritableRoots(input, { repoRoot, additionalRoots, validateExistence = false } = {}) {
@@ -489,7 +489,7 @@ function normalizeWritableRoots(input, { repoRoot, additionalRoots, validateExis
489
489
  // Reject bare relative paths like ".git" — they resolve relative to CWD
490
490
  // at Codex launch time and cause "writable root does not exist" errors
491
491
  // (e.g. /home/user/.codex/.git). Only accept absolute paths.
492
- if (!trimmed.startsWith("/")) return;
492
+ if (!isAbsolute(trimmed)) return;
493
493
  // When validateExistence is true, skip paths that don't exist on disk.
494
494
  // This prevents the sandbox from failing to start with phantom roots.
495
495
  if (validateExistence && !existsSync(trimmed)) return;
@@ -499,7 +499,7 @@ function normalizeWritableRoots(input, { repoRoot, additionalRoots, validateExis
499
499
  // present even if validateExistence is true — they're the intended CWD.
500
500
  const addPrimaryRoot = (value) => {
501
501
  const trimmed = String(value || "").trim();
502
- if (!trimmed || !trimmed.startsWith("/")) return;
502
+ if (!trimmed || !isAbsolute(trimmed)) return;
503
503
  roots.add(trimmed);
504
504
  };
505
505
  if (Array.isArray(input)) {
@@ -516,7 +516,7 @@ function normalizeWritableRoots(input, { repoRoot, additionalRoots, validateExis
516
516
  const addRepoRootPaths = (repo) => {
517
517
  if (!repo) return;
518
518
  const r = String(repo).trim();
519
- if (!r || !r.startsWith("/")) return;
519
+ if (!r || !isAbsolute(r)) return;
520
520
  // Repo root and parent are always added (they're primary working dirs)
521
521
  addPrimaryRoot(r);
522
522
  const gitDir = resolve(r, ".git");
@@ -598,13 +598,13 @@ export function ensureGitAncestor(dir) {
598
598
  * @returns {string[]} Merged writable roots
599
599
  */
600
600
  export function buildTaskWritableRoots({ worktreePath, repoRoot, existingRoots = [] } = {}) {
601
- const roots = new Set(existingRoots.filter(r => r && r.startsWith("/") && existsSync(r)));
601
+ const roots = new Set(existingRoots.filter(r => r && isAbsolute(r) && existsSync(r)));
602
602
  const addIfExists = (p) => {
603
- if (p && p.startsWith("/") && existsSync(p)) roots.add(p);
603
+ if (p && isAbsolute(p) && existsSync(p)) roots.add(p);
604
604
  };
605
605
  // Add path even if it doesn't exist yet (will be created by the task)
606
606
  const addRoot = (p) => {
607
- if (p && p.startsWith("/")) roots.add(p);
607
+ if (p && isAbsolute(p)) roots.add(p);
608
608
  };
609
609
 
610
610
  if (worktreePath) {
@@ -634,6 +634,32 @@ export function hasSandboxWorkspaceWrite(toml) {
634
634
  return /^\[sandbox_workspace_write\]/m.test(toml);
635
635
  }
636
636
 
637
+ export function buildSandboxWorkspaceWrite(options = {}) {
638
+ const {
639
+ writableRoots = [],
640
+ repoRoot,
641
+ additionalRoots,
642
+ networkAccess = true,
643
+ excludeTmpdirEnvVar = false,
644
+ excludeSlashTmp = false,
645
+ } = options;
646
+
647
+ const desiredRoots = normalizeWritableRoots(writableRoots, { repoRoot, additionalRoots, validateExistence: true });
648
+ if (desiredRoots.length === 0) {
649
+ return "";
650
+ }
651
+ return [
652
+ "",
653
+ "# ── Workspace-write sandbox defaults (added by bosun) ──",
654
+ "[sandbox_workspace_write]",
655
+ `network_access = ${networkAccess}`,
656
+ `exclude_tmpdir_env_var = ${excludeTmpdirEnvVar}`,
657
+ `exclude_slash_tmp = ${excludeSlashTmp}`,
658
+ `writable_roots = ${formatTomlArray(desiredRoots)}`,
659
+ "",
660
+ ].join("\n");
661
+ }
662
+
637
663
  export function ensureSandboxWorkspaceWrite(toml, options = {}) {
638
664
  const {
639
665
  writableRoots = [],
@@ -1211,300 +1237,9 @@ export function ensureCodexConfig({
1211
1237
  profileProvidersAdded: [],
1212
1238
  timeoutsFixed: [],
1213
1239
  retriesAdded: [],
1214
- noChanges: false,
1240
+ noChanges: true,
1215
1241
  };
1216
1242
 
1217
- let toml = readCodexConfig();
1218
-
1219
- // If config.toml doesn't exist at all, create a minimal one
1220
- if (!toml) {
1221
- result.created = true;
1222
- toml = [
1223
- "# Codex CLI configuration",
1224
- "# Generated by bosun setup wizard",
1225
- "#",
1226
- "# See: codex --help or https://github.com/openai/codex for details.",
1227
- "",
1228
- "",
1229
- ].join("\n");
1230
- }
1231
-
1232
- // ── 1. Vibe-Kanban MCP server ────────────────────────────
1233
- // When VK is not the active kanban backend, remove the MCP section
1234
- // so the Codex CLI doesn't try to spawn it.
1235
-
1236
- if (skipVk) {
1237
- if (hasVibeKanbanMcp(toml)) {
1238
- toml = removeVibeKanbanMcp(toml);
1239
- result.vkRemoved = true;
1240
- }
1241
- } else if (!hasVibeKanbanMcp(toml)) {
1242
- toml += buildVibeKanbanBlock({ vkBaseUrl });
1243
- result.vkAdded = true;
1244
- } else {
1245
- // MCP section exists — ensure env vars are up to date
1246
- if (!hasVibeKanbanEnv(toml)) {
1247
- // Has the server but no env section — append env block
1248
- const envBlock = [
1249
- "",
1250
- "[mcp_servers.vibe_kanban.env]",
1251
- "# Ensure MCP always targets the correct VK API endpoint.",
1252
- `VK_BASE_URL = "${vkBaseUrl}"`,
1253
- `VK_ENDPOINT_URL = "${vkBaseUrl}"`,
1254
- "",
1255
- ].join("\n");
1256
-
1257
- // Insert after [mcp_servers.vibe_kanban] section content, before next section
1258
- const vkHeader = "[mcp_servers.vibe_kanban]";
1259
- const vkIdx = toml.indexOf(vkHeader);
1260
- const afterVk = vkIdx + vkHeader.length;
1261
- const nextSectionAfterVk = toml.indexOf("\n[", afterVk);
1262
-
1263
- if (nextSectionAfterVk === -1) {
1264
- toml += envBlock;
1265
- } else {
1266
- toml =
1267
- toml.substring(0, nextSectionAfterVk) +
1268
- "\n" +
1269
- envBlock +
1270
- toml.substring(nextSectionAfterVk);
1271
- }
1272
- result.vkEnvUpdated = true;
1273
- } else {
1274
- // Both server and env exist — ensure values match
1275
- const envVars = {
1276
- VK_BASE_URL: vkBaseUrl,
1277
- VK_ENDPOINT_URL: vkBaseUrl,
1278
- };
1279
- const before = toml;
1280
- toml = updateVibeKanbanEnv(toml, envVars);
1281
- if (toml !== before) {
1282
- result.vkEnvUpdated = true;
1283
- }
1284
- }
1285
- }
1286
-
1287
- // ── 1b. Ensure agent SDK selection block ──────────────────
1288
-
1289
- // Resolve which SDK should be primary:
1290
- // 1. Explicit primarySdk parameter
1291
- // 2. PRIMARY_AGENT env var (e.g. "copilot-sdk" → "copilot")
1292
- // 3. Default: "codex"
1293
- const resolvedPrimary = (() => {
1294
- if (primarySdk && ["codex", "copilot", "claude"].includes(primarySdk)) {
1295
- return primarySdk;
1296
- }
1297
- const envPrimary = (env.PRIMARY_AGENT || "").trim().toLowerCase().replace(/-sdk$/, "");
1298
- if (["codex", "copilot", "claude"].includes(envPrimary)) return envPrimary;
1299
- return "codex";
1300
- })();
1301
-
1302
- if (!hasAgentSdkConfig(toml)) {
1303
- toml += buildAgentSdkBlock({ primary: resolvedPrimary });
1304
- result.agentSdkAdded = true;
1305
- }
1306
-
1307
- // ── 1c. Ensure feature flags (sub-agents, memory, etc.) ──
1308
-
1309
- {
1310
- const { toml: updated, added } = ensureFeatureFlags(toml, env);
1311
- if (added.length > 0) {
1312
- toml = updated;
1313
- result.featuresAdded = added;
1314
- }
1315
- }
1316
-
1317
- // ── 1d. Ensure agent thread limits ──────────────────────
1318
-
1319
- {
1320
- const desired = resolveAgentMaxThreads(env);
1321
- const ensured = ensureAgentMaxThreads(toml, {
1322
- maxThreads: desired.value,
1323
- overwrite: desired.explicit,
1324
- });
1325
- if (ensured.changed) {
1326
- toml = ensured.toml;
1327
- result.agentMaxThreads = {
1328
- from: ensured.existing,
1329
- to: ensured.applied,
1330
- explicit: desired.explicit,
1331
- };
1332
- } else if (ensured.skipped && desired.explicit) {
1333
- result.agentMaxThreadsSkipped = desired.raw;
1334
- }
1335
- }
1336
-
1337
- // ── 1e. Ensure sandbox permissions ────────────────────────
1338
-
1339
- {
1340
- const envPerms = env.CODEX_SANDBOX_PERMISSIONS || "";
1341
- const ensured = ensureTopLevelSandboxPermissions(toml, envPerms);
1342
- if (ensured.changed) {
1343
- toml = ensured.toml;
1344
- result.sandboxAdded = true;
1345
- }
1346
- }
1347
-
1348
- // ── 1f. Ensure sandbox workspace-write defaults ───────────
1349
-
1350
- {
1351
- // Determine primary repo root — prefer workspace agent root
1352
- const primaryRepoRoot = env.BOSUN_AGENT_REPO_ROOT || env.REPO_ROOT || "";
1353
- const additionalRoots = [];
1354
- // If agent repo root differs from REPO_ROOT, include both
1355
- if (env.BOSUN_AGENT_REPO_ROOT && env.REPO_ROOT &&
1356
- env.BOSUN_AGENT_REPO_ROOT !== env.REPO_ROOT) {
1357
- additionalRoots.push(env.REPO_ROOT);
1358
- }
1359
- // Enumerate ALL workspace repo paths so every repo's .git/.cache is writable
1360
- try {
1361
- // Inline workspace config read (sync) — avoids async import in sync function
1362
- const configDirGuess = env.BOSUN_DIR || resolve(homedir(), "bosun");
1363
- const bosunConfigPath = resolve(configDirGuess, "bosun.config.json");
1364
- if (existsSync(bosunConfigPath)) {
1365
- const bosunCfg = JSON.parse(readFileSync(bosunConfigPath, "utf8"));
1366
- const wsDir = resolve(configDirGuess, "workspaces");
1367
- const allWs = Array.isArray(bosunCfg.workspaces) ? bosunCfg.workspaces : [];
1368
- for (const ws of allWs) {
1369
- const wsPath = resolve(wsDir, ws.id || ws.name || "");
1370
- for (const repo of ws.repos || []) {
1371
- const repoPath = resolve(wsPath, repo.name);
1372
- if (repoPath && !additionalRoots.includes(repoPath) &&
1373
- repoPath !== primaryRepoRoot) {
1374
- additionalRoots.push(repoPath);
1375
- }
1376
- }
1377
- }
1378
- }
1379
- } catch { /* workspace config read failed — skip */ }
1380
- const ensured = ensureSandboxWorkspaceWrite(toml, {
1381
- writableRoots: env.CODEX_SANDBOX_WRITABLE_ROOTS || "",
1382
- repoRoot: primaryRepoRoot,
1383
- additionalRoots,
1384
- });
1385
- if (ensured.changed) {
1386
- toml = ensured.toml;
1387
- result.sandboxWorkspaceAdded = ensured.added;
1388
- result.sandboxWorkspaceUpdated = !ensured.added;
1389
- result.sandboxWorkspaceRootsAdded = ensured.rootsAdded || [];
1390
- }
1391
-
1392
- // Prune any writable_roots that no longer exist on disk
1393
- const pruned = pruneStaleSandboxRoots(toml);
1394
- if (pruned.changed) {
1395
- toml = pruned.toml;
1396
- result.sandboxWorkspaceUpdated = true;
1397
- result.sandboxStaleRootsRemoved = pruned.removed;
1398
- }
1399
- }
1400
-
1401
- // ── 1g. Ensure shell environment policy ───────────────────
1402
-
1403
- if (!hasShellEnvPolicy(toml)) {
1404
- const policy = env.CODEX_SHELL_ENV_POLICY || "all";
1405
- toml += buildShellEnvPolicy(policy);
1406
- result.shellEnvAdded = true;
1407
- }
1408
-
1409
- // ── 1f. Ensure common MCP servers ───────────────────────────
1410
-
1411
- {
1412
- const missing = [];
1413
- if (!hasContext7Mcp(toml)) missing.push("context7");
1414
- if (!hasNamedMcpServer(toml, "sequential-thinking")) {
1415
- missing.push("sequential-thinking");
1416
- }
1417
- if (!hasNamedMcpServer(toml, "playwright")) missing.push("playwright");
1418
- if (!hasMicrosoftDocsMcp(toml)) missing.push("microsoft-docs");
1419
-
1420
- if (missing.length > 0) {
1421
- if (missing.length >= 4) {
1422
- toml += buildCommonMcpBlocks();
1423
- } else {
1424
- if (missing.includes("context7")) {
1425
- toml += [
1426
- "",
1427
- "[mcp_servers.context7]",
1428
- 'command = "npx"',
1429
- 'args = ["-y", "@upstash/context7-mcp"]',
1430
- "",
1431
- ].join("\n");
1432
- }
1433
- if (missing.includes("sequential-thinking")) {
1434
- toml += [
1435
- "",
1436
- "[mcp_servers.sequential-thinking]",
1437
- 'command = "npx"',
1438
- 'args = ["-y", "@modelcontextprotocol/server-sequential-thinking"]',
1439
- "",
1440
- ].join("\n");
1441
- }
1442
- if (missing.includes("playwright")) {
1443
- toml += [
1444
- "",
1445
- "[mcp_servers.playwright]",
1446
- 'command = "npx"',
1447
- 'args = ["-y", "@playwright/mcp@latest"]',
1448
- "",
1449
- ].join("\n");
1450
- }
1451
- if (missing.includes("microsoft-docs")) {
1452
- toml += [
1453
- "",
1454
- "[mcp_servers.microsoft-docs]",
1455
- 'url = "https://learn.microsoft.com/api/mcp"',
1456
- "",
1457
- ].join("\n");
1458
- }
1459
- }
1460
- result.commonMcpAdded = true;
1461
- }
1462
- }
1463
-
1464
- // ── 2. Audit and fix stream timeouts ──────────────────────
1465
-
1466
- {
1467
- const ensured = ensureModelProviderSectionsFromEnv(toml, env);
1468
- toml = ensured.toml;
1469
- result.profileProvidersAdded = ensured.added;
1470
- }
1471
-
1472
- const timeouts = auditStreamTimeouts(toml);
1473
- for (const t of timeouts) {
1474
- if (t.needsUpdate) {
1475
- toml = setStreamTimeout(toml, t.provider, t.recommended);
1476
- result.timeoutsFixed.push({
1477
- provider: t.provider,
1478
- from: t.currentValue,
1479
- to: t.recommended,
1480
- });
1481
- }
1482
- }
1483
-
1484
- // ── 3. Ensure retry settings ──────────────────────────────
1485
-
1486
- for (const t of timeouts) {
1487
- const before = toml;
1488
- toml = ensureRetrySettings(toml, t.provider);
1489
- if (toml !== before) {
1490
- result.retriesAdded.push(t.provider);
1491
- }
1492
- }
1493
-
1494
- // ── Check if anything changed ─────────────────────────────
1495
-
1496
- const original = readCodexConfig();
1497
- if (toml === original && !result.created) {
1498
- result.noChanges = true;
1499
- return result;
1500
- }
1501
-
1502
- // ── Write ─────────────────────────────────────────────────
1503
-
1504
- if (!dryRun) {
1505
- writeCodexConfig(toml);
1506
- }
1507
-
1508
1243
  return result;
1509
1244
  }
1510
1245
 
package/config-doctor.mjs CHANGED
@@ -578,7 +578,7 @@ export function runConfigDoctor(options = {}) {
578
578
  }
579
579
 
580
580
  // ── Codex config.toml feature flag / sub-agent checks ──────────────────────
581
- const codexConfigToml = join(homedir(), ".codex", "config.toml");
581
+ const codexConfigToml = join(repoRoot, ".codex", "config.toml");
582
582
  if (existsSync(codexConfigToml)) {
583
583
  const toml = readFileSync(codexConfigToml, "utf-8");
584
584
  if (!/^\[features\]/m.test(toml)) {
@@ -592,14 +592,14 @@ export function runConfigDoctor(options = {}) {
592
592
  issues.warnings.push({
593
593
  code: "CODEX_NO_CHILD_AGENTS",
594
594
  message: "child_agents_md not enabled — Codex cannot spawn sub-agents or discover CODEX.md.",
595
- fix: 'Add child_agents_md = true under [features] in ~/.codex/config.toml',
595
+ fix: 'Add child_agents_md = true under [features] in .codex/config.toml',
596
596
  });
597
597
  }
598
598
  if (!/memory_tool\s*=\s*true/i.test(toml)) {
599
599
  issues.warnings.push({
600
600
  code: "CODEX_NO_MEMORY",
601
601
  message: "memory_tool not enabled — Codex has no persistent memory across sessions.",
602
- fix: 'Add memory_tool = true under [features] in ~/.codex/config.toml',
602
+ fix: 'Add memory_tool = true under [features] in .codex/config.toml',
603
603
  });
604
604
  }
605
605
  }
@@ -613,13 +613,6 @@ export function runConfigDoctor(options = {}) {
613
613
  fix: "Run bosun --setup to auto-configure sandbox permissions",
614
614
  });
615
615
  }
616
- if (!/^\[sandbox_workspace_write\]/m.test(toml)) {
617
- issues.warnings.push({
618
- code: "CODEX_NO_SANDBOX_WORKSPACE",
619
- message: "No [sandbox_workspace_write] section in Codex config — workspace-write roots may be missing.",
620
- fix: "Run bosun --setup to add workspace-write defaults (writable_roots, network_access).",
621
- });
622
- }
623
616
  if (
624
617
  isUserNamespaceDisabled() &&
625
618
  /use_linux_sandbox_bwrap\s*=\s*true/i.test(toml)
@@ -627,14 +620,14 @@ export function runConfigDoctor(options = {}) {
627
620
  issues.warnings.push({
628
621
  code: "CODEX_BWRAP_DISABLED",
629
622
  message: "Bubblewrap sandbox is enabled but unprivileged user namespaces appear disabled.",
630
- fix: "Set CODEX_FEATURES_BWRAP=false and re-run bosun --setup (or edit ~/.codex/config.toml [features]).",
623
+ fix: "Set CODEX_FEATURES_BWRAP=false and re-run bosun --setup (or edit .codex/config.toml [features]).",
631
624
  });
632
625
  }
633
626
  } else {
634
627
  issues.warnings.push({
635
628
  code: "CODEX_CONFIG_MISSING",
636
- message: "~/.codex/config.toml not found — Codex CLI may not be configured.",
637
- fix: "Run bosun --setup or 'codex --setup' to create initial config",
629
+ message: "repo-level .codex/config.toml not found — Codex CLI may not be configured for this workspace.",
630
+ fix: "Run bosun --setup to create initial config",
638
631
  });
639
632
  }
640
633