@fitlab-ai/agent-infra 0.4.4 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/README.md +16 -2
  2. package/README.zh-CN.md +16 -2
  3. package/bin/cli.js +19 -0
  4. package/lib/defaults.json +17 -0
  5. package/lib/init.js +1 -0
  6. package/lib/log.js +5 -10
  7. package/lib/merge.js +465 -0
  8. package/lib/sandbox/commands/create.js +1047 -0
  9. package/lib/sandbox/commands/enter.js +31 -0
  10. package/lib/sandbox/commands/ls.js +70 -0
  11. package/lib/sandbox/commands/rebuild.js +102 -0
  12. package/lib/sandbox/commands/rm.js +211 -0
  13. package/lib/sandbox/commands/vm.js +101 -0
  14. package/lib/sandbox/config.js +79 -0
  15. package/lib/sandbox/constants.js +113 -0
  16. package/lib/sandbox/dockerfile.js +95 -0
  17. package/lib/sandbox/engine.js +93 -0
  18. package/lib/sandbox/index.js +64 -0
  19. package/lib/sandbox/runtimes/ai-tools.dockerfile +26 -0
  20. package/lib/sandbox/runtimes/base.dockerfile +30 -0
  21. package/lib/sandbox/runtimes/java17.dockerfile +3 -0
  22. package/lib/sandbox/runtimes/java21.dockerfile +3 -0
  23. package/lib/sandbox/runtimes/node20.dockerfile +3 -0
  24. package/lib/sandbox/runtimes/node22.dockerfile +3 -0
  25. package/lib/sandbox/runtimes/python3.dockerfile +3 -0
  26. package/lib/sandbox/shell.js +48 -0
  27. package/lib/sandbox/task-resolver.js +35 -0
  28. package/lib/sandbox/tools.js +131 -0
  29. package/lib/update.js +16 -2
  30. package/package.json +5 -1
  31. package/templates/.agents/rules/commit-and-pr.md +30 -0
  32. package/templates/.agents/rules/commit-and-pr.zh-CN.md +30 -0
  33. package/templates/.agents/rules/issue-sync.md +12 -2
  34. package/templates/.agents/rules/issue-sync.zh-CN.md +12 -2
  35. package/templates/.agents/rules/task-management.md +28 -0
  36. package/templates/.agents/rules/task-management.zh-CN.md +28 -0
  37. package/templates/.agents/scripts/validate-artifact.js +40 -0
  38. package/templates/.agents/skills/archive-tasks/SKILL.md +6 -3
  39. package/templates/.agents/skills/archive-tasks/SKILL.zh-CN.md +6 -3
  40. package/templates/.agents/skills/archive-tasks/scripts/archive-tasks.sh +91 -8
  41. package/templates/.agents/skills/create-task/SKILL.md +6 -0
  42. package/templates/.agents/skills/create-task/SKILL.zh-CN.md +6 -0
  43. package/templates/.agents/skills/create-task/config/verify.json +1 -0
  44. package/templates/.agents/skills/import-issue/SKILL.md +2 -0
  45. package/templates/.agents/skills/import-issue/SKILL.zh-CN.md +2 -0
  46. package/templates/.agents/skills/import-issue/config/verify.json +1 -0
  47. package/templates/.agents/skills/update-agent-infra/scripts/sync-templates.js +18 -1
  48. package/templates/.agents/templates/task.md +5 -4
  49. package/templates/.agents/templates/task.zh-CN.md +5 -4
@@ -0,0 +1,95 @@
1
+ import fs from 'node:fs';
2
+ import os from 'node:os';
3
+ import path from 'node:path';
4
+ import { createHash } from 'node:crypto';
5
+ import { fileURLToPath } from 'node:url';
6
+
7
+ const RUNTIMES_DIR = path.join(
8
+ path.dirname(fileURLToPath(import.meta.url)),
9
+ 'runtimes'
10
+ );
11
+
12
+ function listRuntimeFragments() {
13
+ return fs.readdirSync(RUNTIMES_DIR)
14
+ .filter((file) => file.endsWith('.dockerfile'))
15
+ .map((file) => file.replace(/\.dockerfile$/, ''));
16
+ }
17
+
18
+ export function availableRuntimes() {
19
+ return listRuntimeFragments()
20
+ .filter((name) => name !== 'base' && name !== 'ai-tools')
21
+ .sort();
22
+ }
23
+
24
+ function dockerfileContent(config) {
25
+ if (config.dockerfile) {
26
+ const customPath = path.resolve(config.repoRoot, config.dockerfile);
27
+ if (!fs.existsSync(customPath)) {
28
+ throw new Error(`Custom Dockerfile not found: ${customPath}`);
29
+ }
30
+ return fs.readFileSync(customPath, 'utf8');
31
+ }
32
+
33
+ const validRuntimes = new Set(availableRuntimes());
34
+ for (const runtime of config.runtimes) {
35
+ if (!validRuntimes.has(runtime)) {
36
+ throw new Error(
37
+ `Unknown runtime: ${runtime}. Available runtimes: ${[...validRuntimes].join(', ')}`
38
+ );
39
+ }
40
+ }
41
+
42
+ const fragments = [
43
+ 'base.dockerfile',
44
+ ...config.runtimes.map((runtime) => `${runtime}.dockerfile`),
45
+ 'ai-tools.dockerfile'
46
+ ];
47
+
48
+ const content = fragments
49
+ .map((fragment) => fs.readFileSync(path.join(RUNTIMES_DIR, fragment), 'utf8').trimEnd())
50
+ .join('\n\n');
51
+
52
+ return `${content}\n`;
53
+ }
54
+
55
+ export function dockerfileSignature(config) {
56
+ return createHash('sha256')
57
+ .update(dockerfileContent(config))
58
+ .digest('hex')
59
+ .slice(0, 12);
60
+ }
61
+
62
+ export function prepareDockerfile(config) {
63
+ if (config.dockerfile) {
64
+ const customPath = path.resolve(config.repoRoot, config.dockerfile);
65
+ if (!fs.existsSync(customPath)) {
66
+ throw new Error(`Custom Dockerfile not found: ${customPath}`);
67
+ }
68
+
69
+ return {
70
+ path: customPath,
71
+ signature: dockerfileSignature(config),
72
+ cleanup() {}
73
+ };
74
+ }
75
+
76
+ const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), `${config.project}-sandbox-`));
77
+ const tempPath = path.join(tempDir, 'Dockerfile');
78
+ fs.writeFileSync(tempPath, dockerfileContent(config), 'utf8');
79
+
80
+ return {
81
+ path: tempPath,
82
+ signature: dockerfileSignature(config),
83
+ cleanup() {
84
+ fs.rmSync(tempDir, { recursive: true, force: true });
85
+ }
86
+ };
87
+ }
88
+
89
+ export function composeDockerfile(config) {
90
+ const content = dockerfileContent(config);
91
+
92
+ const tempPath = path.join(os.tmpdir(), `${config.project}-sandbox.Dockerfile`);
93
+ fs.writeFileSync(tempPath, content, 'utf8');
94
+ return tempPath;
95
+ }
@@ -0,0 +1,93 @@
1
+ import { platform } from 'node:os';
2
+ import { detectHostResources } from './constants.js';
3
+ import { run, runOk, runSafe, runVerbose } from './shell.js';
4
+
5
+ export function detectEngine() {
6
+ const os = platform();
7
+ if (os === 'darwin') {
8
+ return 'colima';
9
+ }
10
+ if (os === 'linux') {
11
+ return 'native';
12
+ }
13
+ if (os === 'win32') {
14
+ return 'wsl2';
15
+ }
16
+ return 'unsupported';
17
+ }
18
+
19
+ function colimaArgs(config, runSafeFn = runSafe) {
20
+ const arch = runSafeFn('uname', ['-m']);
21
+ const defaults = detectHostResources();
22
+ const cpu = config.vm.cpu ?? defaults.cpu;
23
+ const memory = config.vm.memory ?? defaults.memory;
24
+ const disk = config.vm.disk ?? 60;
25
+ const args = ['start', '--cpu', String(cpu), '--memory', String(memory), '--disk', String(disk)];
26
+
27
+ if (arch === 'arm64') {
28
+ args.push('--arch', 'aarch64', '--vm-type=vz', '--mount-type=virtiofs');
29
+ } else {
30
+ args.push('--arch', 'x86_64');
31
+ }
32
+
33
+ return args;
34
+ }
35
+
36
+ export async function ensureColima(
37
+ config,
38
+ onMessage,
39
+ { runOkFn = runOk, runSafeFn = runSafe, runVerboseFn = runVerbose } = {}
40
+ ) {
41
+ if (!runOkFn('which', ['colima'])) {
42
+ onMessage?.('Installing colima + docker via Homebrew...');
43
+ runVerboseFn('brew', ['install', 'colima', 'docker']);
44
+ }
45
+
46
+ if (!runOkFn('colima', ['status'])) {
47
+ onMessage?.('Starting Colima VM...');
48
+ runVerboseFn('colima', colimaArgs(config, runSafeFn));
49
+ }
50
+
51
+ if (!runOkFn('docker', ['info'])) {
52
+ throw new Error('Docker daemon is not available after starting Colima');
53
+ }
54
+ }
55
+
56
+ export async function ensureDocker(config, onMessage) {
57
+ const engine = detectEngine();
58
+
59
+ if (engine === 'colima') {
60
+ await ensureColima(config, onMessage);
61
+ return;
62
+ }
63
+
64
+ if (engine === 'native') {
65
+ if (!runOk('docker', ['info'])) {
66
+ throw new Error('Docker daemon is not running. Please start Docker first.');
67
+ }
68
+ return;
69
+ }
70
+
71
+ if (engine === 'wsl2') {
72
+ throw new Error('Windows sandbox support is reserved for a future WSL2 implementation.');
73
+ }
74
+
75
+ throw new Error(`Unsupported sandbox engine: ${engine}`);
76
+ }
77
+
78
+ export function isVmManaged() {
79
+ return detectEngine() === 'colima';
80
+ }
81
+
82
+ export function startManagedVm(config) {
83
+ if (!isVmManaged()) {
84
+ throw new Error('VM management is only available on macOS with Colima.');
85
+ }
86
+
87
+ if (runOk('colima', ['status'])) {
88
+ return 'already-running';
89
+ }
90
+
91
+ runVerbose('colima', colimaArgs(config));
92
+ return 'started';
93
+ }
@@ -0,0 +1,64 @@
1
+ const USAGE = `Usage: ai sandbox <command> [options]
2
+
3
+ Commands:
4
+ create <branch> [base] Create a sandbox (VM + image + worktree + container)
5
+ exec <branch> [cmd...] Enter sandbox or run a command
6
+ ls List sandboxes for the current project
7
+ rm <branch> [--all] Remove a sandbox or all sandboxes
8
+ vm status|start|stop Manage the sandbox VM (macOS only)
9
+ rebuild [--quiet] Rebuild the sandbox image
10
+
11
+ Run 'ai sandbox <command> --help' for details.`;
12
+
13
+ export async function runSandbox(args) {
14
+ const [subcommand, ...rest] = args;
15
+
16
+ if (!subcommand) {
17
+ process.stdout.write(`${USAGE}\n`);
18
+ process.exitCode = 1;
19
+ return;
20
+ }
21
+
22
+ if (subcommand === '--help' || subcommand === '-h' || subcommand === 'help') {
23
+ process.stdout.write(`${USAGE}\n`);
24
+ return;
25
+ }
26
+
27
+ switch (subcommand) {
28
+ case 'create': {
29
+ const { create } = await import('./commands/create.js');
30
+ await create(rest);
31
+ break;
32
+ }
33
+ case 'exec': {
34
+ const { enter } = await import('./commands/enter.js');
35
+ const exitCode = enter(rest);
36
+ if (typeof exitCode === 'number' && exitCode !== 0) {
37
+ process.exitCode = exitCode;
38
+ }
39
+ break;
40
+ }
41
+ case 'ls': {
42
+ const { ls } = await import('./commands/ls.js');
43
+ ls(rest);
44
+ break;
45
+ }
46
+ case 'rm': {
47
+ const { rm } = await import('./commands/rm.js');
48
+ await rm(rest);
49
+ break;
50
+ }
51
+ case 'vm': {
52
+ const { vm } = await import('./commands/vm.js');
53
+ await vm(rest);
54
+ break;
55
+ }
56
+ case 'rebuild': {
57
+ const { rebuild } = await import('./commands/rebuild.js');
58
+ await rebuild(rest);
59
+ break;
60
+ }
61
+ default:
62
+ throw new Error(`Unknown sandbox command: ${subcommand}`);
63
+ }
64
+ }
@@ -0,0 +1,26 @@
1
+ USER devuser
2
+ ENV NPM_CONFIG_PREFIX=/home/devuser/.npm-global
3
+ ENV PATH="/home/devuser/.npm-global/bin:${PATH}"
4
+
5
+ ARG AI_TOOL_PACKAGES
6
+ RUN if [ -z "${AI_TOOL_PACKAGES}" ]; then \
7
+ echo "AI_TOOL_PACKAGES build arg is required"; \
8
+ exit 1; \
9
+ fi && \
10
+ npm install -g ${AI_TOOL_PACKAGES}
11
+
12
+ RUN npm install -g pyright
13
+
14
+ RUN mkdir -p /home/devuser/.local/share /home/devuser/.local/state
15
+
16
+ RUN git config --global --add safe.directory /workspace
17
+
18
+ RUN echo 'export NPM_CONFIG_PREFIX=/home/devuser/.npm-global' >> /home/devuser/.bashrc && \
19
+ echo 'export PATH="/home/devuser/.npm-global/bin:${PATH}"' >> /home/devuser/.bashrc && \
20
+ echo 'export GIT_CONFIG_GLOBAL=/home/devuser/.gitconfig' >> /home/devuser/.bashrc && \
21
+ echo 'export GPG_TTY=$(tty)' >> /home/devuser/.bashrc && \
22
+ echo '[ -f ~/.bash_aliases ] && . ~/.bash_aliases' >> /home/devuser/.bashrc
23
+
24
+ WORKDIR /workspace
25
+
26
+ CMD ["tail", "-f", "/dev/null"]
@@ -0,0 +1,30 @@
1
+ FROM ubuntu:22.04
2
+
3
+ LABEL description="AI coding sandbox"
4
+
5
+ ENV DEBIAN_FRONTEND=noninteractive
6
+ ENV TZ=Asia/Shanghai
7
+
8
+ ARG HOST_UID=1000
9
+ ARG HOST_GID=1000
10
+ RUN (groupadd -g ${HOST_GID} devuser || true) && \
11
+ useradd -u ${HOST_UID} -g ${HOST_GID} -m -s /bin/bash devuser
12
+
13
+ RUN apt-get update && apt-get install -y \
14
+ curl wget git vim file \
15
+ build-essential ca-certificates gnupg lsb-release \
16
+ locales \
17
+ && locale-gen en_US.UTF-8 \
18
+ && (curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg \
19
+ | dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg) \
20
+ && echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" \
21
+ > /etc/apt/sources.list.d/github-cli.list \
22
+ && apt-get update && apt-get install -y gh \
23
+ && rm -rf /var/lib/apt/lists/*
24
+
25
+ ENV LANG=en_US.UTF-8
26
+ ENV LC_ALL=en_US.UTF-8
27
+ ENV TERM=xterm-256color
28
+ ENV COLORTERM=truecolor
29
+
30
+ RUN ln -s /workspace /home/devuser/workspace
@@ -0,0 +1,3 @@
1
+ RUN apt-get update && apt-get install -y \
2
+ openjdk-17-jdk maven \
3
+ && rm -rf /var/lib/apt/lists/*
@@ -0,0 +1,3 @@
1
+ RUN apt-get update && apt-get install -y \
2
+ openjdk-21-jdk maven \
3
+ && rm -rf /var/lib/apt/lists/*
@@ -0,0 +1,3 @@
1
+ RUN curl -fsSL https://deb.nodesource.com/setup_20.x | bash - && \
2
+ apt-get install -y nodejs && \
3
+ rm -rf /var/lib/apt/lists/*
@@ -0,0 +1,3 @@
1
+ RUN curl -fsSL https://deb.nodesource.com/setup_22.x | bash - && \
2
+ apt-get install -y nodejs && \
3
+ rm -rf /var/lib/apt/lists/*
@@ -0,0 +1,3 @@
1
+ RUN apt-get update && apt-get install -y \
2
+ python3 python3-pip python3-venv \
3
+ && rm -rf /var/lib/apt/lists/*
@@ -0,0 +1,48 @@
1
+ import { execFileSync, spawnSync } from 'node:child_process';
2
+
3
+ const DEFAULT_TIMEOUT_MS = 60 * 60 * 1000;
4
+
5
+ function normalizeOptions(opts = {}, stdio) {
6
+ return {
7
+ cwd: opts.cwd,
8
+ encoding: opts.encoding,
9
+ stdio,
10
+ timeout: opts.timeout ?? DEFAULT_TIMEOUT_MS
11
+ };
12
+ }
13
+
14
+ export function run(cmd, args, opts = {}) {
15
+ return execFileSync(cmd, args, {
16
+ ...normalizeOptions(opts, ['pipe', 'pipe', 'pipe']),
17
+ encoding: 'utf8'
18
+ }).trim();
19
+ }
20
+
21
+ export function runOk(cmd, args, opts = {}) {
22
+ const result = spawnSync(cmd, args, normalizeOptions(opts, 'pipe'));
23
+ return result.status === 0;
24
+ }
25
+
26
+ export function runInteractive(cmd, args, opts = {}) {
27
+ const result = spawnSync(cmd, args, normalizeOptions(opts, 'inherit'));
28
+ return result.status ?? 1;
29
+ }
30
+
31
+ export function runVerbose(cmd, args, opts = {}) {
32
+ const result = spawnSync(cmd, args, normalizeOptions(opts, 'inherit'));
33
+
34
+ if (result.status !== 0) {
35
+ if (result.signal === 'SIGTERM') {
36
+ throw new Error(`Command timed out after ${opts.timeout ?? DEFAULT_TIMEOUT_MS}ms: ${cmd} ${args.join(' ')}`);
37
+ }
38
+ throw new Error(`Command failed with exit code ${result.status}: ${cmd} ${args.join(' ')}`);
39
+ }
40
+ }
41
+
42
+ export function runSafe(cmd, args, opts = {}) {
43
+ const result = spawnSync(cmd, args, {
44
+ ...normalizeOptions(opts, ['pipe', 'pipe', 'pipe']),
45
+ encoding: 'utf8',
46
+ });
47
+ return (result.stdout ?? '').trim();
48
+ }
@@ -0,0 +1,35 @@
1
+ import fs from 'node:fs';
2
+ import path from 'node:path';
3
+
4
+ const TASK_ID_RE = /^TASK-\d{8}-\d{6}$/;
5
+
6
+ function readTaskContent(repoRoot, taskId) {
7
+ const taskPath = path.join(repoRoot, '.agents', 'workspace', 'active', taskId, 'task.md');
8
+ if (!fs.existsSync(taskPath)) {
9
+ throw new Error(`Task not found: ${taskId}`);
10
+ }
11
+ return fs.readFileSync(taskPath, 'utf8');
12
+ }
13
+
14
+ function resolveBranchFromTaskContent(content, taskId) {
15
+ const frontmatterBranch = content.match(/^branch:\s*(.+)$/m);
16
+ if (frontmatterBranch && frontmatterBranch[1].trim()) {
17
+ return frontmatterBranch[1].trim();
18
+ }
19
+
20
+ const contextBranch = content.match(/^- \*\*(?:分支|Branch)\*\*:[ \t]*`?([^`\n]+)`?$/m);
21
+ if (contextBranch && contextBranch[1].trim()) {
22
+ return contextBranch[1].trim();
23
+ }
24
+
25
+ throw new Error(`Task ${taskId} has no branch field in task.md`);
26
+ }
27
+
28
+ export function resolveTaskBranch(arg, repoRoot) {
29
+ if (!TASK_ID_RE.test(arg)) {
30
+ return arg;
31
+ }
32
+
33
+ const content = readTaskContent(repoRoot, arg);
34
+ return resolveBranchFromTaskContent(content, arg);
35
+ }
@@ -0,0 +1,131 @@
1
+ import path from 'node:path';
2
+ import { safeNameCandidates, sanitizeBranchName } from './constants.js';
3
+
4
+ /**
5
+ * @typedef {Object} SandboxTool
6
+ * @property {string} id
7
+ * @property {string} name
8
+ * @property {string} npmPackage
9
+ * @property {string} sandboxBase
10
+ * @property {string} containerMount
11
+ * @property {string} versionCmd
12
+ * @property {string} setupHint
13
+ * @property {Record<string, string>=} envVars
14
+ * @property {Array<{ hostPath: string, sandboxName: string }>=} hostPreSeedFiles
15
+ * @property {Array<{ hostDir: string, sandboxSubdir: string }>=} hostPreSeedDirs
16
+ * @property {string[]=} pathRewriteFiles
17
+ * @property {Array<{ hostPath: string, containerSubpath: string }>=} hostLiveMounts
18
+ * @property {string[]=} postSetupCmds
19
+ */
20
+
21
+ function createBuiltinTools(home, project) {
22
+ /** @type {Record<string, SandboxTool>} */
23
+ return {
24
+ 'claude-code': {
25
+ id: 'claude-code',
26
+ name: 'Claude Code',
27
+ npmPackage: '@anthropic-ai/claude-code',
28
+ sandboxBase: path.join(home, '.claude-sandboxes'),
29
+ containerMount: '/home/devuser/.claude',
30
+ versionCmd: 'claude --version',
31
+ setupHint: 'Authenticates via host credentials live-mounted at ~/.claude/.credentials.json',
32
+ // Claude Code stores user data (.claude.json — onboarding state, theme,
33
+ // workspace trust) at $HOME/.claude.json by default, which sits OUTSIDE
34
+ // the bind-mounted /home/devuser/.claude tree, so our preseeded
35
+ // .claude.json never gets read and the theme picker re-runs on every
36
+ // container start. Pinning CLAUDE_CONFIG_DIR to the tool mount relocates
37
+ // .claude.json into the same directory as .credentials.json/settings.json,
38
+ // letting ensureClaudeOnboarding actually take effect.
39
+ envVars: { CLAUDE_CONFIG_DIR: '/home/devuser/.claude' },
40
+ hostPreSeedDirs: [
41
+ { hostDir: path.join(home, '.claude', 'plugins'), sandboxSubdir: 'plugins' }
42
+ ],
43
+ pathRewriteFiles: [
44
+ 'plugins/installed_plugins.json',
45
+ 'plugins/known_marketplaces.json'
46
+ ],
47
+ hostLiveMounts: [
48
+ {
49
+ hostPath: path.join(home, `.${project}-claude-credentials`, '.credentials.json'),
50
+ containerSubpath: '.credentials.json'
51
+ }
52
+ ]
53
+ },
54
+ codex: {
55
+ id: 'codex',
56
+ name: 'Codex',
57
+ npmPackage: '@openai/codex',
58
+ sandboxBase: path.join(home, '.codex-sandboxes'),
59
+ containerMount: '/home/devuser/.codex',
60
+ versionCmd: 'codex --version',
61
+ setupHint: 'Run codex once inside the container and choose Device Code login if needed.',
62
+ hostLiveMounts: [
63
+ { hostPath: path.join(home, '.codex', 'auth.json'), containerSubpath: 'auth.json' }
64
+ ],
65
+ postSetupCmds: [
66
+ 'test -d /workspace/.codex/commands && ln -sfn /workspace/.codex/commands /home/devuser/.codex/prompts || true'
67
+ ]
68
+ },
69
+ opencode: {
70
+ id: 'opencode',
71
+ name: 'OpenCode',
72
+ npmPackage: 'opencode-ai',
73
+ sandboxBase: path.join(home, '.opencode-sandboxes'),
74
+ containerMount: '/home/devuser/.local/share/opencode',
75
+ versionCmd: 'opencode version',
76
+ setupHint: 'Configure OpenCode credentials inside the container before first use.',
77
+ hostLiveMounts: [
78
+ {
79
+ hostPath: path.join(home, '.local', 'share', 'opencode', 'auth.json'),
80
+ containerSubpath: 'auth.json'
81
+ }
82
+ ]
83
+ },
84
+ 'gemini-cli': {
85
+ id: 'gemini-cli',
86
+ name: 'Gemini CLI',
87
+ npmPackage: '@google/gemini-cli',
88
+ sandboxBase: path.join(home, '.gemini-sandboxes'),
89
+ containerMount: '/home/devuser/.gemini',
90
+ versionCmd: 'gemini --version',
91
+ setupHint: 'Run gemini inside the container to finish authentication.',
92
+ hostLiveMounts: [
93
+ { hostPath: path.join(home, '.gemini', 'oauth_creds.json'), containerSubpath: 'oauth_creds.json' }
94
+ ],
95
+ hostPreSeedFiles: [
96
+ { hostPath: path.join(home, '.gemini', 'settings.json'), sandboxName: 'settings.json' },
97
+ { hostPath: path.join(home, '.gemini', 'google_accounts.json'), sandboxName: 'google_accounts.json' }
98
+ ]
99
+ }
100
+ };
101
+ }
102
+
103
+ function validateTool(tool) {
104
+ if (!tool.npmPackage || !tool.containerMount.startsWith('/')) {
105
+ throw new Error(`Invalid sandbox tool descriptor: ${tool.id}`);
106
+ }
107
+ }
108
+
109
+ export function resolveTools(config) {
110
+ const builtins = createBuiltinTools(config.home, config.project);
111
+ return config.tools.map((id) => {
112
+ const tool = builtins[id];
113
+ if (!tool) {
114
+ throw new Error(`Unknown sandbox tool: ${id}`);
115
+ }
116
+ validateTool(tool);
117
+ return tool;
118
+ });
119
+ }
120
+
121
+ export function toolConfigDir(tool, project, branch) {
122
+ return path.join(tool.sandboxBase, project, sanitizeBranchName(branch));
123
+ }
124
+
125
+ export function toolConfigDirCandidates(tool, project, branch) {
126
+ return safeNameCandidates(branch).map((name) => path.join(tool.sandboxBase, project, name));
127
+ }
128
+
129
+ export function toolNpmPackagesArg(tools) {
130
+ return tools.map((tool) => tool.npmPackage).join(' ');
131
+ }
package/lib/update.js CHANGED
@@ -139,9 +139,15 @@ async function cmdUpdate() {
139
139
  // sync file registry
140
140
  const { added, changed } = syncFileRegistry(config);
141
141
  const hasNewEntries = added.managed.length > 0 || added.merged.length > 0;
142
+ const sandboxAdded = !config.sandbox;
142
143
  const labelsAdded = !config.labels;
143
144
  let configChanged = changed;
144
145
 
146
+ if (sandboxAdded) {
147
+ config.sandbox = structuredClone(defaults.sandbox);
148
+ configChanged = true;
149
+ }
150
+
145
151
  if (labelsAdded) {
146
152
  config.labels = structuredClone(defaults.labels);
147
153
  configChanged = true;
@@ -157,11 +163,19 @@ async function cmdUpdate() {
157
163
  for (const entry of added.merged) {
158
164
  ok(` merged: ${entry}`);
159
165
  }
160
- } else if (labelsAdded) {
161
- info(`Default labels.in config added to ${CONFIG_PATH}.`);
166
+ } else if (sandboxAdded || labelsAdded) {
167
+ if (sandboxAdded) {
168
+ info(`Default sandbox config added to ${CONFIG_PATH}.`);
169
+ }
170
+ if (labelsAdded) {
171
+ info(`Default labels.in config added to ${CONFIG_PATH}.`);
172
+ }
162
173
  } else {
163
174
  info(`File registry changed in ${CONFIG_PATH}.`);
164
175
  }
176
+ if (hasNewEntries && sandboxAdded) {
177
+ info(`Default sandbox config added to ${CONFIG_PATH}.`);
178
+ }
165
179
  if (hasNewEntries && labelsAdded) {
166
180
  info(`Default labels.in config added to ${CONFIG_PATH}.`);
167
181
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@fitlab-ai/agent-infra",
3
- "version": "0.4.4",
3
+ "version": "0.5.0",
4
4
  "description": "Bootstrap tool for AI multi-tool collaboration infrastructure — works with Claude Code, Codex, Gemini CLI, and OpenCode",
5
5
  "license": "MIT",
6
6
  "type": "module",
@@ -40,6 +40,10 @@
40
40
  "bootstrap",
41
41
  "installer"
42
42
  ],
43
+ "dependencies": {
44
+ "@clack/prompts": "1.2.0",
45
+ "picocolors": "1.1.1"
46
+ },
43
47
  "scripts": {
44
48
  "build": "node scripts/build-inline.js",
45
49
  "prepare": "git config core.hooksPath .github/hooks || true",
@@ -0,0 +1,30 @@
1
+ # General Rules - Commit and PR
2
+
3
+ ## Commit Message Format
4
+
5
+ - Use Conventional Commits: `<type>(<scope>): <subject>`
6
+ - Allowed `type` values: `feat`, `fix`, `docs`, `refactor`, `test`, `chore`
7
+ - `scope`: module name (optional)
8
+ - Write the `subject` in concise imperative English
9
+
10
+ ## No Automatic Commits
11
+
12
+ - Never run `git commit` or `git add` automatically
13
+ - Enter the commit workflow only when the user explicitly requests a commit
14
+ - After finishing code changes, remind the user to use the appropriate TUI commit command
15
+
16
+ ## PR Rules
17
+
18
+ Before creating a PR, make sure:
19
+ - all tests pass
20
+ - code checks pass
21
+ - the build succeeds
22
+ - public API documentation is updated when applicable
23
+ - copyright header years are updated when applicable
24
+
25
+ ## Copyright Year Updates
26
+
27
+ - Run `date +%Y` first and do not hardcode the year
28
+ - Update examples:
29
+ - `2024-2025` -> `2024-2026`
30
+ - `2024` -> `2024-2026`
@@ -0,0 +1,30 @@
1
+ # 通用规则 - 提交与 PR
2
+
3
+ ## 提交信息格式
4
+
5
+ - 使用 Conventional Commits:`<type>(<scope>): <subject>`
6
+ - `type` 仅限:`feat`、`fix`、`docs`、`refactor`、`test`、`chore`
7
+ - `scope`:模块名(可省略)
8
+ - `subject` 使用英文祈使语气,保持简洁
9
+
10
+ ## 禁止自动提交
11
+
12
+ - 绝对不要自动执行 `git commit` 或 `git add`
13
+ - 仅在用户明确发起提交命令时才进入提交流程
14
+ - 完成代码修改后,提醒用户使用对应 TUI 的提交命令
15
+
16
+ ## PR 提交规则
17
+
18
+ 创建 PR 前必须确保:
19
+ - 所有测试通过
20
+ - 代码检查通过
21
+ - 构建成功
22
+ - 公共 API 已补充文档(如适用)
23
+ - 版权头年份已更新(如适用)
24
+
25
+ ## 版权年份更新
26
+
27
+ - 先运行 `date +%Y` 获取当前年份,不要硬编码
28
+ - 更新格式示例:
29
+ - `2024-2025` -> `2024-2026`
30
+ - `2024` -> `2024-2026`