@fitlab-ai/agent-infra 0.4.4 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/README.md +16 -2
  2. package/README.zh-CN.md +16 -2
  3. package/bin/cli.js +19 -0
  4. package/lib/defaults.json +17 -0
  5. package/lib/init.js +1 -0
  6. package/lib/log.js +5 -10
  7. package/lib/merge.js +465 -0
  8. package/lib/sandbox/commands/create.js +1047 -0
  9. package/lib/sandbox/commands/enter.js +31 -0
  10. package/lib/sandbox/commands/ls.js +70 -0
  11. package/lib/sandbox/commands/rebuild.js +102 -0
  12. package/lib/sandbox/commands/rm.js +211 -0
  13. package/lib/sandbox/commands/vm.js +101 -0
  14. package/lib/sandbox/config.js +79 -0
  15. package/lib/sandbox/constants.js +113 -0
  16. package/lib/sandbox/dockerfile.js +95 -0
  17. package/lib/sandbox/engine.js +93 -0
  18. package/lib/sandbox/index.js +64 -0
  19. package/lib/sandbox/runtimes/ai-tools.dockerfile +26 -0
  20. package/lib/sandbox/runtimes/base.dockerfile +30 -0
  21. package/lib/sandbox/runtimes/java17.dockerfile +3 -0
  22. package/lib/sandbox/runtimes/java21.dockerfile +3 -0
  23. package/lib/sandbox/runtimes/node20.dockerfile +3 -0
  24. package/lib/sandbox/runtimes/node22.dockerfile +3 -0
  25. package/lib/sandbox/runtimes/python3.dockerfile +3 -0
  26. package/lib/sandbox/shell.js +48 -0
  27. package/lib/sandbox/task-resolver.js +35 -0
  28. package/lib/sandbox/tools.js +131 -0
  29. package/lib/update.js +16 -2
  30. package/package.json +5 -1
  31. package/templates/.agents/rules/commit-and-pr.md +30 -0
  32. package/templates/.agents/rules/commit-and-pr.zh-CN.md +30 -0
  33. package/templates/.agents/rules/issue-sync.md +12 -2
  34. package/templates/.agents/rules/issue-sync.zh-CN.md +12 -2
  35. package/templates/.agents/rules/task-management.md +28 -0
  36. package/templates/.agents/rules/task-management.zh-CN.md +28 -0
  37. package/templates/.agents/scripts/validate-artifact.js +40 -0
  38. package/templates/.agents/skills/archive-tasks/SKILL.md +6 -3
  39. package/templates/.agents/skills/archive-tasks/SKILL.zh-CN.md +6 -3
  40. package/templates/.agents/skills/archive-tasks/scripts/archive-tasks.sh +91 -8
  41. package/templates/.agents/skills/create-task/SKILL.md +6 -0
  42. package/templates/.agents/skills/create-task/SKILL.zh-CN.md +6 -0
  43. package/templates/.agents/skills/create-task/config/verify.json +1 -0
  44. package/templates/.agents/skills/import-issue/SKILL.md +2 -0
  45. package/templates/.agents/skills/import-issue/SKILL.zh-CN.md +2 -0
  46. package/templates/.agents/skills/import-issue/config/verify.json +1 -0
  47. package/templates/.agents/skills/update-agent-infra/scripts/sync-templates.js +18 -1
  48. package/templates/.agents/templates/task.md +5 -4
  49. package/templates/.agents/templates/task.zh-CN.md +5 -4
@@ -0,0 +1,1047 @@
1
+ import fs from 'node:fs';
2
+ import path from 'node:path';
3
+ import { createHash } from 'node:crypto';
4
+ import { execFileSync } from 'node:child_process';
5
+ import { parseArgs } from 'node:util';
6
+ import * as p from '@clack/prompts';
7
+ import pc from 'picocolors';
8
+ import { loadConfig } from '../config.js';
9
+ import {
10
+ assertValidBranchName,
11
+ containerName,
12
+ containerNameCandidates,
13
+ parsePositiveIntegerOption,
14
+ sandboxBranchLabel,
15
+ sandboxImageConfigLabel,
16
+ sandboxLabel,
17
+ worktreeDirCandidates
18
+ } from '../constants.js';
19
+ import { prepareDockerfile } from '../dockerfile.js';
20
+ import { ensureDocker } from '../engine.js';
21
+ import { run, runOk, runSafe, runVerbose } from '../shell.js';
22
+ import { resolveTaskBranch } from '../task-resolver.js';
23
+ import { resolveTools, toolConfigDirCandidates, toolNpmPackagesArg } from '../tools.js';
24
+
25
+ const OPENCODE_YOLO_PERMISSION = '{"*":"allow","read":"allow","bash":"allow","edit":"allow","webfetch":"allow","external_directory":"allow","doom_loop":"allow"}';
26
+ const SANDBOX_ALIAS_BLOCK_BEGIN = '# >>> agent-infra managed aliases >>>';
27
+ const SANDBOX_ALIAS_BLOCK_END = '# <<< agent-infra managed aliases <<<';
28
+ const SANDBOX_ALIAS_NAMES = [
29
+ 'claude-yolo',
30
+ 'opencode-yolo',
31
+ 'codex-yolo',
32
+ 'gemini-yolo',
33
+ 'cy',
34
+ 'oy',
35
+ 'xy',
36
+ 'gy'
37
+ ];
38
+ const DEFAULT_SANDBOX_ALIASES = `alias claude-yolo='claude --dangerously-skip-permissions; tput ed'
39
+ alias opencode-yolo='OPENCODE_PERMISSION='\\''${OPENCODE_YOLO_PERMISSION}'\\'' opencode; tput ed'
40
+ alias codex-yolo='codex --yolo; tput ed'
41
+ alias gemini-yolo='gemini --yolo; tput ed'
42
+
43
+ alias cy='claude --dangerously-skip-permissions; tput ed'
44
+ alias oy='OPENCODE_PERMISSION='\\''${OPENCODE_YOLO_PERMISSION}'\\'' opencode; tput ed'
45
+ alias xy='codex --yolo; tput ed'
46
+ alias gy='gemini --yolo; tput ed'
47
+ `;
48
+ const CONTAINER_HOME = '/home/devuser';
49
+ const USAGE = `Usage: ai sandbox create <branch> [base] [--cpu <n>] [--memory <n>]
50
+
51
+ Host aliases:
52
+ ${'~'}/.ai-sandbox-aliases is auto-created on first run and synced to
53
+ ${CONTAINER_HOME}/.bash_aliases inside the sandbox container.`;
54
+
55
+ function buildSignature(preparedDockerfile, tools) {
56
+ return createHash('sha256')
57
+ .update(JSON.stringify({
58
+ dockerfile: preparedDockerfile.signature,
59
+ tools: tools.map((tool) => tool.npmPackage)
60
+ }))
61
+ .digest('hex')
62
+ .slice(0, 12);
63
+ }
64
+
65
+ function resolveToolDirs(config, tools, branch) {
66
+ return tools.map((tool) => {
67
+ const candidates = toolConfigDirCandidates(tool, config.project, branch);
68
+ return {
69
+ tool,
70
+ dir: candidates.find((candidate) => fs.existsSync(candidate)) ?? candidates[0]
71
+ };
72
+ });
73
+ }
74
+
75
+ function runtimeChecks(runtimes) {
76
+ const checks = [];
77
+ if (runtimes.some((runtime) => runtime.startsWith('node'))) {
78
+ checks.push({ name: 'Node.js', cmd: ['node', '--version'] });
79
+ }
80
+ if (runtimes.some((runtime) => runtime.startsWith('java'))) {
81
+ checks.push({ name: 'Java', cmd: ['java', '-version'] });
82
+ checks.push({ name: 'Maven', cmd: ['mvn', '--version'] });
83
+ }
84
+ if (runtimes.includes('python3')) {
85
+ checks.push({ name: 'Python', cmd: ['python3', '--version'] });
86
+ }
87
+ return checks;
88
+ }
89
+
90
+ export function detectGpgConfig(gitconfig) {
91
+ return /\bgpgsign\s*=\s*true\b/i.test(gitconfig) || /^\s*\[gpg(?:\s|"|\])/im.test(gitconfig);
92
+ }
93
+
94
+ export function sanitizeGitConfig(gitconfig, home, { stripGpg = false } = {}) {
95
+ const lines = gitconfig
96
+ .replaceAll(home, CONTAINER_HOME)
97
+ .replace(/\[difftool "sourcetree"\][^\[]*/gs, '')
98
+ .replace(/\[mergetool "sourcetree"\][^\[]*/gs, '')
99
+ .split(/\r?\n/);
100
+
101
+ const sanitized = [];
102
+ let inGpgSection = false;
103
+
104
+ for (const line of lines) {
105
+ const sectionMatch = line.match(/^\s*\[([^\]]+)\]\s*$/);
106
+ if (sectionMatch) {
107
+ inGpgSection = /^gpg(?:\s+"[^"]+")?$/i.test(sectionMatch[1].trim());
108
+ if (stripGpg && inGpgSection) {
109
+ continue;
110
+ }
111
+ sanitized.push(line);
112
+ continue;
113
+ }
114
+
115
+ if (inGpgSection) {
116
+ if (stripGpg) {
117
+ continue;
118
+ }
119
+ if (/^\s*program\s*=.*$/i.test(line)) {
120
+ continue;
121
+ }
122
+ }
123
+
124
+ sanitized.push(line);
125
+ }
126
+
127
+ return sanitized.join('\n');
128
+ }
129
+
130
+ function gpgCacheDir(home, project) {
131
+ return path.join(home, `.${project}-gpg-cache`);
132
+ }
133
+
134
+ function normalizeSigningKey(signingKey) {
135
+ if (typeof signingKey !== 'string') {
136
+ return null;
137
+ }
138
+
139
+ const trimmed = signingKey.trim();
140
+ return trimmed.length > 0 ? trimmed : null;
141
+ }
142
+
143
+ function normalizeWorktreePath(worktreePath) {
144
+ if (!worktreePath) {
145
+ return '';
146
+ }
147
+
148
+ try {
149
+ return fs.existsSync(worktreePath) ? fs.realpathSync(worktreePath) : path.resolve(worktreePath);
150
+ } catch {
151
+ return path.resolve(worktreePath);
152
+ }
153
+ }
154
+
155
+ export function getGitSigningKey({ home, repoPath = null, execFn = execFileSync } = {}) {
156
+ if (!home) {
157
+ return null;
158
+ }
159
+ try {
160
+ const output = execFn('git', [
161
+ ...(repoPath ? ['-C', repoPath] : []),
162
+ 'config',
163
+ ...(repoPath ? [] : ['--global']),
164
+ 'user.signingKey'
165
+ ], {
166
+ encoding: 'utf8',
167
+ env: { ...process.env, HOME: home },
168
+ stdio: ['ignore', 'pipe', 'pipe']
169
+ });
170
+ return normalizeSigningKey(output);
171
+ } catch {
172
+ return null;
173
+ }
174
+ }
175
+
176
+ export function currentKeyringFingerprint(home, execFn = execFileSync) {
177
+ const hostEnv = { ...process.env, HOME: home };
178
+ try {
179
+ const keyring = execFn('gpg', ['--list-secret-keys', '--with-colons'], {
180
+ encoding: 'utf8',
181
+ env: hostEnv,
182
+ stdio: ['ignore', 'pipe', 'pipe']
183
+ });
184
+ if (!keyring || keyring.trim().length === 0) {
185
+ return null;
186
+ }
187
+ return createHash('sha256').update(keyring).digest('hex');
188
+ } catch {
189
+ return null;
190
+ }
191
+ }
192
+
193
+ export function readGpgCache(home, project, execFn = execFileSync, signingKey = null) {
194
+ const cacheDir = gpgCacheDir(home, project);
195
+ const pubPath = path.join(cacheDir, 'public.asc');
196
+ const secPath = path.join(cacheDir, 'secret.asc');
197
+ const statePath = path.join(cacheDir, 'state.json');
198
+
199
+ try {
200
+ const state = JSON.parse(fs.readFileSync(statePath, 'utf8'));
201
+ if (typeof state?.fingerprint !== 'string' || state.fingerprint.length === 0) {
202
+ return null;
203
+ }
204
+ if (normalizeSigningKey(state?.signingKey) !== normalizeSigningKey(signingKey)) {
205
+ return null;
206
+ }
207
+
208
+ const currentFingerprint = currentKeyringFingerprint(home, execFn);
209
+ if (!currentFingerprint || currentFingerprint !== state.fingerprint) {
210
+ return null;
211
+ }
212
+
213
+ const pub = fs.readFileSync(pubPath);
214
+ const sec = fs.readFileSync(secPath);
215
+ if (pub.length === 0 || sec.length === 0) {
216
+ return null;
217
+ }
218
+
219
+ return { pub, sec };
220
+ } catch {
221
+ return null;
222
+ }
223
+ }
224
+
225
+ export function writeGpgCache(home, project, pub, sec, fingerprint, signingKey = null) {
226
+ if (!fingerprint) {
227
+ return false;
228
+ }
229
+
230
+ const cacheDir = gpgCacheDir(home, project);
231
+ const pubPath = path.join(cacheDir, 'public.asc');
232
+ const secPath = path.join(cacheDir, 'secret.asc');
233
+ const statePath = path.join(cacheDir, 'state.json');
234
+
235
+ try {
236
+ const state = { fingerprint };
237
+ const normalizedSigningKey = normalizeSigningKey(signingKey);
238
+ if (normalizedSigningKey) {
239
+ state.signingKey = normalizedSigningKey;
240
+ }
241
+
242
+ fs.mkdirSync(cacheDir, { recursive: true, mode: 0o700 });
243
+ fs.chmodSync(cacheDir, 0o700);
244
+
245
+ fs.writeFileSync(pubPath, pub, { mode: 0o600 });
246
+ fs.chmodSync(pubPath, 0o600);
247
+
248
+ fs.writeFileSync(secPath, sec, { mode: 0o600 });
249
+ fs.chmodSync(secPath, 0o600);
250
+
251
+ fs.writeFileSync(statePath, `${JSON.stringify(state, null, 2)}\n`, { mode: 0o600 });
252
+ fs.chmodSync(statePath, 0o600);
253
+
254
+ return true;
255
+ } catch {
256
+ return false;
257
+ }
258
+ }
259
+
260
+ export function syncGpgKeys(
261
+ container,
262
+ home,
263
+ project,
264
+ execFn = execFileSync,
265
+ runSafeFn = runSafe,
266
+ options = {}
267
+ ) {
268
+ const {
269
+ cachedOverride = null,
270
+ repoPath = null,
271
+ signingKey: signingKeyOverride
272
+ } = options;
273
+ const hostEnv = { ...process.env, HOME: home };
274
+ let signingKey = normalizeSigningKey(signingKeyOverride);
275
+ let resolvedSigningKey = Object.hasOwn(options, 'signingKey');
276
+ // Allow callers to supply a pre-computed cache read so we don't re-invoke
277
+ // `gpg --list-secret-keys` just to decide the progress message.
278
+ if (cachedOverride === null && !resolvedSigningKey) {
279
+ signingKey = getGitSigningKey({ repoPath, home, execFn });
280
+ resolvedSigningKey = true;
281
+ }
282
+ const cached = cachedOverride ?? readGpgCache(home, project, execFn, signingKey);
283
+ let pubKeys = cached?.pub ?? null;
284
+ let secKeys = cached?.sec ?? null;
285
+
286
+ if (!cached && !resolvedSigningKey) {
287
+ signingKey = getGitSigningKey({ repoPath, home, execFn });
288
+ resolvedSigningKey = true;
289
+ }
290
+
291
+ if (!cached) {
292
+ const exportArgs = signingKey ? ['--export', signingKey] : ['--export'];
293
+ const exportSecretArgs = signingKey
294
+ ? ['--export-secret-keys', signingKey]
295
+ : ['--export-secret-keys'];
296
+
297
+ pubKeys = execFn('gpg', exportArgs, {
298
+ env: hostEnv,
299
+ stdio: ['ignore', 'pipe', 'pipe']
300
+ });
301
+ if (!pubKeys || pubKeys.length === 0) {
302
+ return false;
303
+ }
304
+
305
+ secKeys = execFn('gpg', exportSecretArgs, {
306
+ env: hostEnv,
307
+ stdio: ['ignore', 'pipe', 'pipe']
308
+ });
309
+ if (!secKeys || secKeys.length === 0) {
310
+ return false;
311
+ }
312
+
313
+ const fingerprint = currentKeyringFingerprint(home, execFn);
314
+ if (fingerprint) {
315
+ const written = writeGpgCache(home, project, pubKeys, secKeys, fingerprint, signingKey);
316
+ if (!written) {
317
+ process.stderr.write(
318
+ 'Warning: failed to cache GPG keys; next sandbox create may prompt again.\n'
319
+ );
320
+ }
321
+ }
322
+ }
323
+
324
+ execFn('docker', ['exec', '-i', container, 'gpg', '--import'], {
325
+ input: pubKeys,
326
+ stdio: ['pipe', 'pipe', 'pipe']
327
+ });
328
+ execFn('docker', ['exec', '-i', container, 'gpg', '--batch', '--import'], {
329
+ input: secKeys,
330
+ stdio: ['pipe', 'pipe', 'pipe']
331
+ });
332
+
333
+ runSafeFn('docker', ['exec', container, 'gpgconf', '--launch', 'gpg-agent']);
334
+ return true;
335
+ }
336
+
337
+ function syncGitConfig(container, repoRoot, home, { gpgMounted = false } = {}) {
338
+ const gitconfigPath = path.join(home, '.gitconfig');
339
+ if (!fs.existsSync(gitconfigPath)) {
340
+ return;
341
+ }
342
+
343
+ const gitconfig = sanitizeGitConfig(fs.readFileSync(gitconfigPath, 'utf8'), home, {
344
+ stripGpg: !gpgMounted
345
+ });
346
+
347
+ execFileSync('docker', ['exec', '-i', container, 'sh', '-c', `cat > ${CONTAINER_HOME}/.gitconfig`], {
348
+ input: gitconfig,
349
+ stdio: ['pipe', 'pipe', 'pipe']
350
+ });
351
+
352
+ if (!gpgMounted) {
353
+ for (const key of ['commit.gpgsign', 'tag.gpgsign', 'user.signingKey']) {
354
+ runSafe('docker', ['exec', container, 'git', 'config', '--global', '--unset-all', key]);
355
+ }
356
+ }
357
+
358
+ runSafe('docker', ['exec', container, 'git', 'config', '--global', '--add', 'safe.directory', '/workspace']);
359
+ runSafe('docker', ['exec', container, 'git', 'config', '--global', '--add', 'safe.directory', repoRoot]);
360
+
361
+ for (const file of ['.gitignore_global', '.stCommitMsg']) {
362
+ const hostFile = path.join(home, file);
363
+ if (fs.existsSync(hostFile)) {
364
+ runSafe('docker', ['cp', hostFile, `${container}:${CONTAINER_HOME}/${file}`]);
365
+ }
366
+ }
367
+ }
368
+
369
+ export function syncShellAliases(container, home, execDocker = execFileSync) {
370
+ const aliasesPath = sandboxAliasesPath(home);
371
+ if (!fs.existsSync(aliasesPath)) {
372
+ return false;
373
+ }
374
+
375
+ const aliases = fs.readFileSync(aliasesPath, 'utf8');
376
+ execDocker('docker', ['exec', '-i', container, 'sh', '-c', `cat > ${CONTAINER_HOME}/.bash_aliases`], {
377
+ input: aliases,
378
+ stdio: ['pipe', 'pipe', 'pipe']
379
+ });
380
+ return true;
381
+ }
382
+
383
+ export function buildContainerEnvArgs(resolvedTools, runSafeCommand = runSafe) {
384
+ const envArgs = resolvedTools.flatMap(({ tool }) =>
385
+ Object.entries(tool.envVars ?? {}).flatMap(([key, value]) => ['-e', `${key}=${value}`])
386
+ );
387
+ const ghToken = runSafeCommand('gh', ['auth', 'token']);
388
+ if (ghToken) {
389
+ envArgs.push('-e', `GH_TOKEN=${ghToken}`);
390
+ }
391
+ return envArgs;
392
+ }
393
+
394
+ export function assertBranchAvailable(
395
+ repoRoot,
396
+ branch,
397
+ { allowedWorktrees = [], runFn = runSafe } = {}
398
+ ) {
399
+ const normalizedAllowedWorktrees = new Set(allowedWorktrees.map((worktree) => normalizeWorktreePath(worktree)));
400
+ const output = runFn('git', ['-C', repoRoot, 'worktree', 'list', '--porcelain']);
401
+ if (!output) {
402
+ return;
403
+ }
404
+
405
+ let currentWorktree = '';
406
+ for (const line of output.split('\n')) {
407
+ if (line.startsWith('worktree ')) {
408
+ currentWorktree = line.slice('worktree '.length).trim();
409
+ continue;
410
+ }
411
+ if (!line.startsWith('branch refs/heads/')) {
412
+ continue;
413
+ }
414
+
415
+ const usedBranch = line.slice('branch refs/heads/'.length).trim();
416
+ if (usedBranch === branch) {
417
+ if (normalizedAllowedWorktrees.has(normalizeWorktreePath(currentWorktree))) {
418
+ continue;
419
+ }
420
+ throw new Error(
421
+ `Branch '${branch}' is already checked out at '${currentWorktree}'.\n`
422
+ + `Use a different branch name, or run 'git switch <other>' in that worktree first.`
423
+ );
424
+ }
425
+ }
426
+ }
427
+
428
+ export function ensureClaudeOnboarding(toolDir) {
429
+ const claudeJsonPath = path.join(toolDir, '.claude.json');
430
+ let data = {};
431
+ if (fs.existsSync(claudeJsonPath)) {
432
+ try {
433
+ data = JSON.parse(fs.readFileSync(claudeJsonPath, 'utf8'));
434
+ } catch {
435
+ // malformed JSON, start fresh
436
+ }
437
+ }
438
+ let changed = false;
439
+ if (!data.hasCompletedOnboarding) {
440
+ data.hasCompletedOnboarding = true;
441
+ changed = true;
442
+ }
443
+ if (!data.projects) {
444
+ data.projects = {};
445
+ changed = true;
446
+ }
447
+ if (!data.projects['/workspace']) {
448
+ data.projects['/workspace'] = {};
449
+ changed = true;
450
+ }
451
+ if (!data.projects['/workspace'].hasTrustDialogAccepted) {
452
+ data.projects['/workspace'].hasTrustDialogAccepted = true;
453
+ changed = true;
454
+ }
455
+ if (changed) {
456
+ fs.writeFileSync(claudeJsonPath, JSON.stringify(data, null, 4), 'utf8');
457
+ }
458
+ }
459
+
460
+ export function ensureClaudeSettings(toolDir) {
461
+ const settingsPath = path.join(toolDir, 'settings.json');
462
+ let data = {};
463
+ if (fs.existsSync(settingsPath)) {
464
+ try {
465
+ data = JSON.parse(fs.readFileSync(settingsPath, 'utf8'));
466
+ } catch {
467
+ // malformed JSON, start fresh
468
+ }
469
+ }
470
+ if (data.skipDangerousModePermissionPrompt !== true) {
471
+ data.skipDangerousModePermissionPrompt = true;
472
+ fs.writeFileSync(settingsPath, JSON.stringify(data, null, 4), 'utf8');
473
+ }
474
+ }
475
+
476
+ export function ensureCodexWorkspaceTrust(toolDir) {
477
+ const configPath = path.join(toolDir, 'config.toml');
478
+ let content = '';
479
+ if (fs.existsSync(configPath)) {
480
+ content = fs.readFileSync(configPath, 'utf8');
481
+ }
482
+ if (!content.includes('[projects."/workspace"]')) {
483
+ const entry = '\n[projects."/workspace"]\ntrust_level = "trusted"\n';
484
+ fs.writeFileSync(configPath, content + entry, 'utf8');
485
+ }
486
+ }
487
+
488
+ export function ensureGeminiWorkspaceTrust(toolDir) {
489
+ const trustPath = path.join(toolDir, 'trustedFolders.json');
490
+ let data = {};
491
+ if (fs.existsSync(trustPath)) {
492
+ try {
493
+ data = JSON.parse(fs.readFileSync(trustPath, 'utf8'));
494
+ } catch {
495
+ // malformed JSON, start fresh
496
+ }
497
+ }
498
+ if (data['/workspace'] !== 'TRUST_FOLDER') {
499
+ data['/workspace'] = 'TRUST_FOLDER';
500
+ fs.writeFileSync(trustPath, JSON.stringify(data, null, 2), 'utf8');
501
+ }
502
+ }
503
+
504
+ export function extractClaudeCredentialsBlob(home, execFn = execFileSync) {
505
+ if (process.platform === 'darwin') {
506
+ try {
507
+ const keychainAccount = path.basename(home);
508
+ const credentials = execFn('security', [
509
+ 'find-generic-password',
510
+ '-a',
511
+ keychainAccount,
512
+ '-s',
513
+ 'Claude Code-credentials',
514
+ '-w'
515
+ ], {
516
+ encoding: 'utf8',
517
+ stdio: ['ignore', 'pipe', 'pipe']
518
+ });
519
+ const trimmed = typeof credentials === 'string' ? credentials.trim() : '';
520
+ if (!trimmed) {
521
+ return null;
522
+ }
523
+
524
+ const parsed = JSON.parse(trimmed);
525
+ const payload = parsed?.claudeAiOauth ?? parsed;
526
+ const scopes = Array.isArray(payload?.scopes) ? payload.scopes : [];
527
+ const hasRequiredScopes = scopes.includes('user:profile')
528
+ && scopes.includes('user:sessions:claude_code');
529
+ if (!payload?.accessToken || !payload?.refreshToken || !hasRequiredScopes) {
530
+ return null;
531
+ }
532
+ return trimmed;
533
+ } catch {
534
+ return null;
535
+ }
536
+ }
537
+
538
+ const credentialsPath = path.join(home, '.claude', '.credentials.json');
539
+ if (!fs.existsSync(credentialsPath)) {
540
+ return null;
541
+ }
542
+
543
+ try {
544
+ const raw = fs.readFileSync(credentialsPath, 'utf8');
545
+ const parsed = JSON.parse(raw);
546
+ const payload = parsed?.claudeAiOauth ?? parsed;
547
+ const scopes = Array.isArray(payload?.scopes) ? payload.scopes : [];
548
+ const hasRequiredScopes = scopes.includes('user:profile')
549
+ && scopes.includes('user:sessions:claude_code');
550
+ if (!payload?.accessToken || !payload?.refreshToken || !hasRequiredScopes) {
551
+ return null;
552
+ }
553
+ return raw;
554
+ } catch {
555
+ return null;
556
+ }
557
+ }
558
+
559
+ export function claudeCredentialsDir(home, project) {
560
+ return path.join(home, `.${project}-claude-credentials`);
561
+ }
562
+
563
+ export function claudeCredentialsPath(home, project) {
564
+ return path.join(claudeCredentialsDir(home, project), '.credentials.json');
565
+ }
566
+
567
+ export function writeClaudeCredentialsFile(home, project, blob) {
568
+ const dir = claudeCredentialsDir(home, project);
569
+ const filePath = claudeCredentialsPath(home, project);
570
+
571
+ fs.mkdirSync(dir, { recursive: true, mode: 0o700 });
572
+ fs.chmodSync(dir, 0o700);
573
+ fs.writeFileSync(filePath, blob, { mode: 0o600 });
574
+ fs.chmodSync(filePath, 0o600);
575
+ }
576
+
577
+ export function assertClaudeCredentialsAvailable(
578
+ home,
579
+ project,
580
+ resolvedTools,
581
+ extractFn = extractClaudeCredentialsBlob,
582
+ writeFn = writeClaudeCredentialsFile
583
+ ) {
584
+ const claudeCodeEntry = resolvedTools.find(({ tool }) => tool.id === 'claude-code');
585
+ if (!claudeCodeEntry) {
586
+ return;
587
+ }
588
+
589
+ const blob = extractFn(home);
590
+ if (!blob) {
591
+ throw new Error([
592
+ 'Claude Code credentials not found on host.',
593
+ '',
594
+ 'The sandbox needs your Claude Code OAuth credentials so the container can use Claude Code.',
595
+ '',
596
+ 'To fix:',
597
+ ' 1. On the host, run "claude" once and complete the OAuth login flow.',
598
+ ' 2. Verify with "claude /status" that you see your subscription.',
599
+ ' 3. Re-run "ai sandbox create".',
600
+ '',
601
+ 'Alternatively, if you do not need Claude Code in this sandbox,',
602
+ 'remove "claude-code" from the "sandbox.tools" array in .agents/.airc.json.'
603
+ ].join('\n'));
604
+ }
605
+
606
+ writeFn(home, project, blob);
607
+ }
608
+
609
+ export function sandboxAliasesPath(home) {
610
+ return path.join(home, '.ai-sandbox-aliases');
611
+ }
612
+
613
+ function escapeRegExp(value) {
614
+ return value.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
615
+ }
616
+
617
+ function stripManagedSandboxAliasBlocks(content) {
618
+ const blockPattern = new RegExp(
619
+ `${escapeRegExp(SANDBOX_ALIAS_BLOCK_BEGIN)}[\\s\\S]*?${escapeRegExp(SANDBOX_ALIAS_BLOCK_END)}\\n?`,
620
+ 'g'
621
+ );
622
+ return content.replace(blockPattern, '').trimEnd();
623
+ }
624
+
625
+ function isLegacyManagedSandboxAliasFile(content) {
626
+ const lines = content
627
+ .split(/\r?\n/)
628
+ .map((line) => line.trim())
629
+ .filter(Boolean);
630
+
631
+ if (lines.length === 0) {
632
+ return false;
633
+ }
634
+
635
+ const aliasPattern = new RegExp(`^alias (${SANDBOX_ALIAS_NAMES.map(escapeRegExp).join('|')})=`);
636
+ return lines.every((line) => aliasPattern.test(line));
637
+ }
638
+
639
+ export function ensureSandboxAliasesFile(home) {
640
+ const aliasesPath = sandboxAliasesPath(home);
641
+ const managedBlock = `${SANDBOX_ALIAS_BLOCK_BEGIN}\n${DEFAULT_SANDBOX_ALIASES}${SANDBOX_ALIAS_BLOCK_END}\n`;
642
+ const created = !fs.existsSync(aliasesPath);
643
+ let existing = '';
644
+
645
+ if (!created) {
646
+ existing = fs.readFileSync(aliasesPath, 'utf8');
647
+ }
648
+
649
+ const userContent = isLegacyManagedSandboxAliasFile(existing)
650
+ ? ''
651
+ : stripManagedSandboxAliasBlocks(existing);
652
+ const nextContent = userContent
653
+ ? `${userContent}\n\n${managedBlock}`
654
+ : managedBlock;
655
+
656
+ if (created || nextContent !== existing) {
657
+ fs.writeFileSync(aliasesPath, nextContent, 'utf8');
658
+ }
659
+
660
+ return { created, path: aliasesPath };
661
+ }
662
+
663
+ export function commandErrorMessage(error) {
664
+ const stderr = error?.stderr?.toString().trim();
665
+ return stderr || error?.message || 'Command failed';
666
+ }
667
+
668
+ function runTaskCommand(cmd, args, opts = {}) {
669
+ try {
670
+ return run(cmd, args, opts);
671
+ } catch (error) {
672
+ throw new Error(commandErrorMessage(error));
673
+ }
674
+ }
675
+
676
+ export function buildImage(
677
+ config,
678
+ tools,
679
+ dockerfilePath,
680
+ imageSignature,
681
+ { runFn = run, runVerboseFn = runVerbose } = {}
682
+ ) {
683
+ const hostUid = runFn('id', ['-u']);
684
+ const hostGid = runFn('id', ['-g']);
685
+
686
+ runVerboseFn('docker', [
687
+ 'build',
688
+ '-t',
689
+ config.imageName,
690
+ '--build-arg',
691
+ `HOST_UID=${hostUid}`,
692
+ '--build-arg',
693
+ `HOST_GID=${hostGid}`,
694
+ '--build-arg',
695
+ `AI_TOOL_PACKAGES=${toolNpmPackagesArg(tools)}`,
696
+ '--label',
697
+ sandboxLabel(config),
698
+ '--label',
699
+ `${sandboxImageConfigLabel(config)}=${imageSignature}`,
700
+ '-f',
701
+ dockerfilePath,
702
+ config.repoRoot
703
+ ], { cwd: config.repoRoot });
704
+ }
705
+
706
+ export async function create(args) {
707
+ const { values, positionals } = parseArgs({
708
+ args,
709
+ allowPositionals: true,
710
+ strict: true,
711
+ options: {
712
+ cpu: { type: 'string' },
713
+ memory: { type: 'string' },
714
+ help: { type: 'boolean', short: 'h' }
715
+ }
716
+ });
717
+
718
+ if (values.help) {
719
+ process.stdout.write(`${USAGE}\n`);
720
+ return;
721
+ }
722
+
723
+ if (positionals.length < 1 || positionals.length > 2) {
724
+ throw new Error(USAGE);
725
+ }
726
+
727
+ const config = loadConfig();
728
+ const [branchOrTaskId, base] = positionals;
729
+ const branch = resolveTaskBranch(branchOrTaskId, config.repoRoot);
730
+ assertValidBranchName(branch);
731
+ const effectiveConfig = {
732
+ ...config,
733
+ vm: {
734
+ ...config.vm,
735
+ cpu: parsePositiveIntegerOption(values.cpu, '--cpu') ?? config.vm.cpu,
736
+ memory: parsePositiveIntegerOption(values.memory, '--memory') ?? config.vm.memory
737
+ }
738
+ };
739
+ const worktreeCandidates = worktreeDirCandidates(effectiveConfig, branch);
740
+ assertBranchAvailable(config.repoRoot, branch, { allowedWorktrees: worktreeCandidates });
741
+ const tools = resolveTools(effectiveConfig);
742
+ const resolvedTools = resolveToolDirs(effectiveConfig, tools, branch);
743
+ // Fail fast before any filesystem/docker side effects so a missing
744
+ // Claude Code credential blob doesn't leave the user with a stale
745
+ // worktree, docker image, or temporary Dockerfile they need to manually
746
+ // clean up.
747
+ assertClaudeCredentialsAvailable(
748
+ effectiveConfig.home,
749
+ effectiveConfig.project,
750
+ resolvedTools
751
+ );
752
+ const container = containerName(effectiveConfig, branch);
753
+ const worktree = worktreeCandidates.find((candidate) => fs.existsSync(candidate)) ?? worktreeCandidates[0];
754
+ const preparedDockerfile = prepareDockerfile(effectiveConfig);
755
+ const baseBranch = base ?? runSafe('git', ['-C', effectiveConfig.repoRoot, 'branch', '--show-current']);
756
+ const expectedImageSignature = buildSignature(preparedDockerfile, tools);
757
+
758
+ p.intro(pc.cyan('AI Sandbox'));
759
+ p.log.info(
760
+ `Project: ${pc.bold(effectiveConfig.project)} | Branch: ${pc.bold(branch)} | Base: ${pc.bold(baseBranch || 'HEAD')}`
761
+ );
762
+
763
+ try {
764
+ p.log.step('Checking container engine...');
765
+ await ensureDocker(effectiveConfig, (detail) => {
766
+ p.log.info(` ${detail}`);
767
+ });
768
+ p.log.success('Docker is ready');
769
+
770
+ const imageExists = runOk('docker', ['image', 'inspect', effectiveConfig.imageName]);
771
+ const currentImageSignature = imageExists
772
+ ? runSafe('docker', [
773
+ 'image',
774
+ 'inspect',
775
+ '--format',
776
+ `{{ index .Config.Labels "${sandboxImageConfigLabel(effectiveConfig)}" }}`,
777
+ effectiveConfig.imageName
778
+ ])
779
+ : '';
780
+ const needsImageBuild = !imageExists || currentImageSignature !== expectedImageSignature;
781
+
782
+ if (needsImageBuild) {
783
+ p.log.step(imageExists ? 'Rebuilding stale image...' : 'Building image for first use...');
784
+ buildImage(
785
+ effectiveConfig,
786
+ tools,
787
+ preparedDockerfile.path,
788
+ expectedImageSignature
789
+ );
790
+ p.log.success(imageExists ? 'Image rebuilt' : 'Image built');
791
+ } else {
792
+ p.log.step(`Using existing image ${effectiveConfig.imageName}`);
793
+ }
794
+
795
+ await p.tasks([
796
+ {
797
+ title: 'Setting up git worktree',
798
+ task: async (message) => {
799
+ if (fs.existsSync(worktree)) {
800
+ if (fs.readdirSync(worktree).length > 0) {
801
+ return `Worktree exists at ${worktree}`;
802
+ }
803
+ fs.rmSync(worktree, { recursive: true, force: true });
804
+ }
805
+
806
+ const branchExists = runOk('git', [
807
+ '-C',
808
+ effectiveConfig.repoRoot,
809
+ 'show-ref',
810
+ '--verify',
811
+ '--quiet',
812
+ `refs/heads/${branch}`
813
+ ]);
814
+
815
+ if (branchExists) {
816
+ message(`Using existing branch '${branch}'...`);
817
+ runTaskCommand('git', ['-C', effectiveConfig.repoRoot, 'worktree', 'add', worktree, branch]);
818
+ } else {
819
+ message(`Creating branch '${branch}' from '${baseBranch}'...`);
820
+ runTaskCommand('git', ['-C', effectiveConfig.repoRoot, 'worktree', 'add', '-b', branch, worktree, baseBranch]);
821
+ }
822
+
823
+ return `Worktree ready at ${worktree}`;
824
+ }
825
+ },
826
+ {
827
+ title: 'Preparing tool state',
828
+ task: async () => {
829
+ for (const { tool, dir } of resolvedTools) {
830
+ fs.mkdirSync(dir, { recursive: true });
831
+
832
+ for (const { hostPath, sandboxName } of tool.hostPreSeedFiles ?? []) {
833
+ const destination = path.join(dir, sandboxName);
834
+ if (fs.existsSync(hostPath) && !fs.existsSync(destination)) {
835
+ fs.mkdirSync(path.dirname(destination), { recursive: true });
836
+ fs.copyFileSync(hostPath, destination);
837
+ }
838
+ }
839
+
840
+ for (const { hostDir, sandboxSubdir } of tool.hostPreSeedDirs ?? []) {
841
+ const destination = path.join(dir, sandboxSubdir);
842
+ if (fs.existsSync(hostDir) && !fs.existsSync(destination)) {
843
+ fs.cpSync(hostDir, destination, { recursive: true });
844
+ }
845
+ }
846
+
847
+ for (const relativePath of tool.pathRewriteFiles ?? []) {
848
+ const filePath = path.join(dir, relativePath);
849
+ if (!fs.existsSync(filePath)) {
850
+ continue;
851
+ }
852
+ let content = fs.readFileSync(filePath, 'utf8');
853
+ content = content.replaceAll(effectiveConfig.repoRoot, '/workspace');
854
+ content = content.replaceAll(effectiveConfig.home, path.dirname(tool.containerMount));
855
+ fs.writeFileSync(filePath, content, 'utf8');
856
+ }
857
+ }
858
+
859
+ return `${resolvedTools.length} tool config directories ready`;
860
+ }
861
+ },
862
+ {
863
+ title: `Starting container '${container}'`,
864
+ task: async (message) => {
865
+ const existing = runSafe('docker', ['ps', '-a', '--format', '{{.Names}}']).split('\n').filter(Boolean);
866
+ const matchedContainers = containerNameCandidates(effectiveConfig, branch)
867
+ .filter((name) => existing.includes(name));
868
+
869
+ if (matchedContainers.length > 0) {
870
+ message('Removing old container instance...');
871
+ for (const name of matchedContainers) {
872
+ runSafe('docker', ['stop', name]);
873
+ runSafe('docker', ['rm', name]);
874
+ }
875
+ }
876
+
877
+ const aliasesFile = ensureSandboxAliasesFile(effectiveConfig.home);
878
+ if (aliasesFile.created) {
879
+ message(`Created default sandbox aliases at ${aliasesFile.path}`);
880
+ }
881
+
882
+ const gitconfigPath = path.join(effectiveConfig.home, '.gitconfig');
883
+ const gitconfigContent = fs.existsSync(gitconfigPath)
884
+ ? fs.readFileSync(gitconfigPath, 'utf8')
885
+ : '';
886
+ const needsGpg = detectGpgConfig(gitconfigContent);
887
+ const envArgs = buildContainerEnvArgs(resolvedTools);
888
+ const claudeCodeEntry = resolvedTools.find(({ tool }) => tool.id === 'claude-code');
889
+ if (claudeCodeEntry) {
890
+ ensureClaudeOnboarding(claudeCodeEntry.dir);
891
+ ensureClaudeSettings(claudeCodeEntry.dir);
892
+ // Credential availability is asserted up-front in create() so we
893
+ // know the shared credentials file already exists at this point.
894
+ }
895
+ const codexEntry = resolvedTools.find(({ tool }) => tool.id === 'codex');
896
+ if (codexEntry) {
897
+ ensureCodexWorkspaceTrust(codexEntry.dir);
898
+ }
899
+ const geminiEntry = resolvedTools.find(({ tool }) => tool.id === 'gemini-cli');
900
+ if (geminiEntry) {
901
+ ensureGeminiWorkspaceTrust(geminiEntry.dir);
902
+ }
903
+ // OpenCode has no workspace trust mechanism, so no preseed step is needed.
904
+ const toolVolumes = resolvedTools.flatMap(({ tool, dir }) => ['-v', `${dir}:${tool.containerMount}`]);
905
+ const workspaceDir = path.join(effectiveConfig.repoRoot, '.agents', 'workspace');
906
+ const liveMountVolumes = resolvedTools.flatMap(({ tool }) =>
907
+ (tool.hostLiveMounts ?? [])
908
+ .filter(({ hostPath }) => fs.existsSync(hostPath))
909
+ .flatMap(({ hostPath, containerSubpath }) => [
910
+ '-v',
911
+ `${hostPath}:${path.join(tool.containerMount, containerSubpath)}`
912
+ ])
913
+ );
914
+
915
+ fs.mkdirSync(workspaceDir, { recursive: true });
916
+
917
+ runTaskCommand('docker', [
918
+ 'run',
919
+ '-d',
920
+ '--name',
921
+ container,
922
+ '--hostname',
923
+ `${effectiveConfig.project}-sandbox`,
924
+ '--label',
925
+ sandboxLabel(effectiveConfig),
926
+ '--label',
927
+ `${sandboxBranchLabel(effectiveConfig)}=${branch}`,
928
+ '-v',
929
+ `${worktree}:/workspace`,
930
+ '-v',
931
+ `${workspaceDir}:/workspace/.agents/workspace`,
932
+ '-v',
933
+ `${effectiveConfig.repoRoot}/.git:${effectiveConfig.repoRoot}/.git`,
934
+ '-v',
935
+ `${path.join(effectiveConfig.home, '.ssh')}:/home/devuser/.ssh:ro`,
936
+ ...toolVolumes,
937
+ ...liveMountVolumes,
938
+ ...envArgs,
939
+ '-w',
940
+ '/workspace',
941
+ effectiveConfig.imageName
942
+ ]);
943
+
944
+ message('Syncing git config...');
945
+ syncGitConfig(container, effectiveConfig.repoRoot, effectiveConfig.home, { gpgMounted: false });
946
+ if (needsGpg) {
947
+ const signingKey = getGitSigningKey({ repoPath: worktree, home: effectiveConfig.home });
948
+ const cachedGpg = readGpgCache(
949
+ effectiveConfig.home,
950
+ effectiveConfig.project,
951
+ undefined,
952
+ signingKey
953
+ );
954
+ message(
955
+ cachedGpg
956
+ ? 'Syncing GPG keys from cache...'
957
+ : 'Syncing GPG keys (you may be prompted for your passphrase)...'
958
+ );
959
+ try {
960
+ if (syncGpgKeys(
961
+ container,
962
+ effectiveConfig.home,
963
+ effectiveConfig.project,
964
+ undefined,
965
+ undefined,
966
+ {
967
+ cachedOverride: cachedGpg,
968
+ repoPath: worktree,
969
+ signingKey
970
+ }
971
+ )) {
972
+ syncGitConfig(container, effectiveConfig.repoRoot, effectiveConfig.home, { gpgMounted: true });
973
+ }
974
+ } catch {
975
+ // Keep the stripped fallback gitconfig when host GPG export/import fails.
976
+ }
977
+ }
978
+ syncShellAliases(container, effectiveConfig.home);
979
+
980
+ for (const { tool } of resolvedTools) {
981
+ for (const command of tool.postSetupCmds ?? []) {
982
+ runSafe('docker', ['exec', container, 'bash', '-lc', command]);
983
+ }
984
+ }
985
+
986
+ return 'Container started';
987
+ }
988
+ }
989
+ ]);
990
+ } finally {
991
+ preparedDockerfile.cleanup();
992
+ }
993
+
994
+ p.log.step('Verifying setup...');
995
+ const runningContainers = runSafe('docker', ['ps', '--format', '{{.Names}}']).split('\n');
996
+ const checks = [
997
+ { name: 'Container running', ok: runningContainers.includes(container) },
998
+ ...runtimeChecks(effectiveConfig.runtimes).map((check) => ({
999
+ name: check.name,
1000
+ ok: runOk('docker', ['exec', container, ...check.cmd])
1001
+ })),
1002
+ { name: 'GitHub CLI', ok: runOk('docker', ['exec', container, 'gh', '--version']) }
1003
+ ];
1004
+ const toolChecks = tools.map((tool) => ({
1005
+ name: tool.name,
1006
+ ok: runOk('docker', ['exec', container, 'bash', '-lc', tool.versionCmd]),
1007
+ hint: tool.setupHint
1008
+ }));
1009
+
1010
+ for (const check of checks) {
1011
+ p.log.info(` ${check.ok ? pc.green('✓') : pc.yellow('?')} ${check.name}`);
1012
+ }
1013
+ for (const check of toolChecks) {
1014
+ p.log.info(` ${check.ok ? pc.green('✓') : pc.yellow('?')} ${check.name}`);
1015
+ if (!check.ok) {
1016
+ p.log.warn(` ${check.hint}`);
1017
+ }
1018
+ }
1019
+
1020
+ p.outro(pc.green('Sandbox ready'));
1021
+
1022
+ const toolHints = resolvedTools.map(({ tool, dir }) => {
1023
+ const hasLiveMount = (tool.hostLiveMounts ?? []).some(({ hostPath }) => fs.existsSync(hostPath));
1024
+ const hint = hasLiveMount
1025
+ ? 'Live-mounted auth/config files stay in sync with the host.'
1026
+ : tool.setupHint;
1027
+ return `${tool.name}: ${hint} Config dir: ${dir}`;
1028
+ }).join('\n');
1029
+
1030
+ process.stdout.write(`
1031
+ Container: ${container}
1032
+ Image: ${effectiveConfig.imageName}
1033
+ Worktree: ${worktree}
1034
+ Host aliases: ${sandboxAliasesPath(effectiveConfig.home)}
1035
+
1036
+ Management:
1037
+ ai sandbox ls
1038
+ ai sandbox exec ${branch}
1039
+ ai sandbox rm ${branch}
1040
+
1041
+ Sandbox aliases:
1042
+ Edit the host aliases file to customize shortcuts synced to ${CONTAINER_HOME}/.bash_aliases.
1043
+
1044
+ Tool notes:
1045
+ ${toolHints}
1046
+ `);
1047
+ }