claude-code-runner 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +559 -0
- package/README.zh-Hans.md +559 -0
- package/dist/cli.d.ts +3 -0
- package/dist/cli.d.ts.map +1 -0
- package/dist/cli.js +377 -0
- package/dist/cli.js.map +1 -0
- package/dist/config.d.ts +4 -0
- package/dist/config.d.ts.map +1 -0
- package/dist/config.js +50 -0
- package/dist/config.js.map +1 -0
- package/dist/container.d.ts +23 -0
- package/dist/container.d.ts.map +1 -0
- package/dist/container.js +971 -0
- package/dist/container.js.map +1 -0
- package/dist/credentials.d.ts +8 -0
- package/dist/credentials.d.ts.map +1 -0
- package/dist/credentials.js +145 -0
- package/dist/credentials.js.map +1 -0
- package/dist/docker-config.d.ts +19 -0
- package/dist/docker-config.d.ts.map +1 -0
- package/dist/docker-config.js +101 -0
- package/dist/docker-config.js.map +1 -0
- package/dist/git/shadow-repository.d.ts +30 -0
- package/dist/git/shadow-repository.d.ts.map +1 -0
- package/dist/git/shadow-repository.js +645 -0
- package/dist/git/shadow-repository.js.map +1 -0
- package/dist/git-monitor.d.ts +15 -0
- package/dist/git-monitor.d.ts.map +1 -0
- package/dist/git-monitor.js +94 -0
- package/dist/git-monitor.js.map +1 -0
- package/dist/index.d.ts +22 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +221 -0
- package/dist/index.js.map +1 -0
- package/dist/types.d.ts +49 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +3 -0
- package/dist/types.js.map +1 -0
- package/dist/ui.d.ts +12 -0
- package/dist/ui.d.ts.map +1 -0
- package/dist/ui.js +82 -0
- package/dist/ui.js.map +1 -0
- package/dist/web-server-attach.d.ts +16 -0
- package/dist/web-server-attach.d.ts.map +1 -0
- package/dist/web-server-attach.js +249 -0
- package/dist/web-server-attach.js.map +1 -0
- package/dist/web-server.d.ts +27 -0
- package/dist/web-server.d.ts.map +1 -0
- package/dist/web-server.js +812 -0
- package/dist/web-server.js.map +1 -0
- package/package.json +77 -0
|
@@ -0,0 +1,971 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
36
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
37
|
+
};
|
|
38
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
|
+
exports.ContainerManager = void 0;
|
|
40
|
+
const node_buffer_1 = require("node:buffer");
|
|
41
|
+
const node_child_process_1 = require("node:child_process");
|
|
42
|
+
const fs = __importStar(require("node:fs"));
|
|
43
|
+
const os = __importStar(require("node:os"));
|
|
44
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
45
|
+
const node_process_1 = __importDefault(require("node:process"));
|
|
46
|
+
const chalk_1 = __importDefault(require("chalk"));
|
|
47
|
+
const tar_stream_1 = __importDefault(require("tar-stream"));
|
|
48
|
+
class ContainerManager {
|
|
49
|
+
docker;
|
|
50
|
+
config;
|
|
51
|
+
containers = new Map();
|
|
52
|
+
constructor(docker, config) {
|
|
53
|
+
this.docker = docker;
|
|
54
|
+
this.config = config;
|
|
55
|
+
}
|
|
56
|
+
async start(containerConfig) {
|
|
57
|
+
// Build or pull image
|
|
58
|
+
await this.ensureImage();
|
|
59
|
+
// Create container
|
|
60
|
+
const container = await this.createContainer(containerConfig);
|
|
61
|
+
this.containers.set(container.id, container);
|
|
62
|
+
// Start container
|
|
63
|
+
await container.start();
|
|
64
|
+
console.log(chalk_1.default.green('✓ Container started'));
|
|
65
|
+
// Copy working directory into container
|
|
66
|
+
console.log(chalk_1.default.blue('• Copying files into container...'));
|
|
67
|
+
try {
|
|
68
|
+
await this._copyWorkingDirectory(container, containerConfig.workDir);
|
|
69
|
+
console.log(chalk_1.default.green('✓ Files copied'));
|
|
70
|
+
// Copy Claude configuration if it exists
|
|
71
|
+
await this._copyClaudeConfig(container);
|
|
72
|
+
// Copy git configuration if it exists
|
|
73
|
+
await this._copyGitConfig(container);
|
|
74
|
+
}
|
|
75
|
+
catch (error) {
|
|
76
|
+
console.error(chalk_1.default.red('✗ File copy failed:'), error);
|
|
77
|
+
// Clean up container on failure
|
|
78
|
+
await container.stop().catch(() => { });
|
|
79
|
+
await container.remove().catch(() => { });
|
|
80
|
+
this.containers.delete(container.id);
|
|
81
|
+
throw error;
|
|
82
|
+
}
|
|
83
|
+
// Give the container a moment to initialize
|
|
84
|
+
await new Promise(resolve => setTimeout(resolve, 500));
|
|
85
|
+
console.log(chalk_1.default.green('✓ Container ready'));
|
|
86
|
+
// Set up git branch and startup script
|
|
87
|
+
await this.setupGitAndStartupScript(container, containerConfig.branchName, containerConfig.prFetchRef, containerConfig.remoteFetchRef);
|
|
88
|
+
// Run setup commands
|
|
89
|
+
await this.runSetupCommands(container);
|
|
90
|
+
return container.id;
|
|
91
|
+
}
|
|
92
|
+
async ensureImage() {
|
|
93
|
+
const imageName = this.config.dockerImage || 'claude-code-runner:latest';
|
|
94
|
+
// Check if image already exists
|
|
95
|
+
try {
|
|
96
|
+
await this.docker.getImage(imageName).inspect();
|
|
97
|
+
console.log(chalk_1.default.green(`✓ Using existing image: ${imageName}`));
|
|
98
|
+
return;
|
|
99
|
+
}
|
|
100
|
+
catch (error) {
|
|
101
|
+
// Image doesn't exist, decide whether to build or pull
|
|
102
|
+
}
|
|
103
|
+
// Determine if we should build (default) or pull
|
|
104
|
+
const shouldBuild = this.config.buildImage !== false; // default to true
|
|
105
|
+
const hasLocalDockerfile = this.config.dockerfile || fs.existsSync(node_path_1.default.join(__dirname, '..', 'docker', 'Dockerfile'));
|
|
106
|
+
if (shouldBuild && hasLocalDockerfile) {
|
|
107
|
+
console.log(chalk_1.default.blue(`• Building image: ${imageName}...`));
|
|
108
|
+
// Check if we need to build from custom Dockerfile
|
|
109
|
+
if (this.config.dockerfile) {
|
|
110
|
+
await this.buildImage(this.config.dockerfile, imageName);
|
|
111
|
+
}
|
|
112
|
+
else {
|
|
113
|
+
// Use default Dockerfile from docker/ directory
|
|
114
|
+
const defaultDockerfilePath = node_path_1.default.join(__dirname, '..', 'docker', 'Dockerfile');
|
|
115
|
+
console.log(chalk_1.default.blue(`• Using default Dockerfile: ${defaultDockerfilePath}`));
|
|
116
|
+
await this.buildImage(defaultDockerfilePath, imageName);
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
else if (!shouldBuild) {
|
|
120
|
+
// Pull image from registry
|
|
121
|
+
console.log(chalk_1.default.blue(`• Pulling image from registry: ${imageName}...`));
|
|
122
|
+
await this.pullImage(imageName);
|
|
123
|
+
}
|
|
124
|
+
else {
|
|
125
|
+
// No Dockerfile and shouldBuild is false, try to pull
|
|
126
|
+
console.log(chalk_1.default.blue(`• Pulling image from registry: ${imageName}...`));
|
|
127
|
+
try {
|
|
128
|
+
await this.pullImage(imageName);
|
|
129
|
+
}
|
|
130
|
+
catch (error) {
|
|
131
|
+
console.log(chalk_1.default.yellow('⚠ Failed to pull image, using inline Dockerfile'));
|
|
132
|
+
await this.buildDefaultImage(imageName);
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
async pullImage(imageName) {
|
|
137
|
+
// Parse image name to get registry info
|
|
138
|
+
const stream = await this.docker.pull(imageName);
|
|
139
|
+
return new Promise((resolve, reject) => {
|
|
140
|
+
this.docker.modem.followProgress(stream, (err, res) => {
|
|
141
|
+
if (err)
|
|
142
|
+
reject(err);
|
|
143
|
+
else
|
|
144
|
+
resolve();
|
|
145
|
+
}, (event) => {
|
|
146
|
+
if (event.status) {
|
|
147
|
+
node_process_1.default.stdout.write(`${event.status}`);
|
|
148
|
+
if (event.progress)
|
|
149
|
+
node_process_1.default.stdout.write(` ${event.progress}`);
|
|
150
|
+
node_process_1.default.stdout.write('\n');
|
|
151
|
+
}
|
|
152
|
+
});
|
|
153
|
+
});
|
|
154
|
+
}
|
|
155
|
+
async buildDefaultImage(imageName) {
|
|
156
|
+
const dockerfile = `
|
|
157
|
+
FROM docker.io/library/almalinux:10
|
|
158
|
+
|
|
159
|
+
# Install system dependencies
|
|
160
|
+
RUN dnf install -y epel-release && dnf install -y \
|
|
161
|
+
curl \\
|
|
162
|
+
git \\
|
|
163
|
+
openssh-clients \\
|
|
164
|
+
python3 \\
|
|
165
|
+
python3-pip \\
|
|
166
|
+
gcc \\
|
|
167
|
+
gcc-c++ \\
|
|
168
|
+
make \\
|
|
169
|
+
sudo \\
|
|
170
|
+
vim \\
|
|
171
|
+
jq \\
|
|
172
|
+
ca-certificates \\
|
|
173
|
+
gnupg2 \\
|
|
174
|
+
inotify-tools \\
|
|
175
|
+
rsync \\
|
|
176
|
+
&& dnf clean all
|
|
177
|
+
|
|
178
|
+
# Install Node.js 24.x
|
|
179
|
+
RUN curl -fsSL https://rpm.nodesource.com/setup_24.x | bash - \\
|
|
180
|
+
&& dnf install -y nodejs
|
|
181
|
+
|
|
182
|
+
# Install GitHub CLI
|
|
183
|
+
RUN dnf install -y 'dnf-command(config-manager)' \\
|
|
184
|
+
&& dnf config-manager --add-repo https://cli.github.com/packages/rpm/gh-cli.repo \\
|
|
185
|
+
&& dnf install -y gh
|
|
186
|
+
|
|
187
|
+
# Install Claude Code
|
|
188
|
+
RUN npm install -g @anthropic-ai/claude-code@latest
|
|
189
|
+
|
|
190
|
+
# Create a non-root user with sudo privileges
|
|
191
|
+
RUN useradd -m -s /bin/bash claude && \\
|
|
192
|
+
echo 'claude ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers && \\
|
|
193
|
+
usermod -aG wheel claude
|
|
194
|
+
|
|
195
|
+
# Create workspace directory and set ownership
|
|
196
|
+
RUN mkdir -p /workspace && \\
|
|
197
|
+
chown -R claude:claude /workspace
|
|
198
|
+
|
|
199
|
+
# Switch to non-root user
|
|
200
|
+
USER claude
|
|
201
|
+
WORKDIR /workspace
|
|
202
|
+
|
|
203
|
+
# Set up entrypoint
|
|
204
|
+
ENTRYPOINT ["/bin/bash", "-c"]
|
|
205
|
+
`;
|
|
206
|
+
/*
|
|
207
|
+
RUN echo '#!/bin/bash\\n\\
|
|
208
|
+
# Allow the initial branch creation\\n\\
|
|
209
|
+
if [ ! -f /tmp/.branch-created ]; then\\n\\
|
|
210
|
+
/usr/bin/git "$@"\\n\\
|
|
211
|
+
if [[ "$1" == "checkout" ]] && [[ "$2" == "-b" ]]; then\\n\\
|
|
212
|
+
touch /tmp/.branch-created\\n\\
|
|
213
|
+
fi\\n\\
|
|
214
|
+
else\\n\\
|
|
215
|
+
# After initial branch creation, prevent switching\\n\\
|
|
216
|
+
if [[ "$1" == "checkout" ]] && [[ "$2" != "-b" ]]; then\\n\\
|
|
217
|
+
echo "Branch switching is disabled in claude-code-runner"\\n\\
|
|
218
|
+
exit 1\\n\\
|
|
219
|
+
fi\\n\\
|
|
220
|
+
if [[ "$1" == "switch" ]]; then\\n\\
|
|
221
|
+
echo "Branch switching is disabled in claude-code-runner"\\n\\
|
|
222
|
+
exit 1\\n\\
|
|
223
|
+
fi\\n\\
|
|
224
|
+
/usr/bin/git "$@"\\n\\
|
|
225
|
+
fi' > /usr/local/bin/git && \\
|
|
226
|
+
chmod +x /usr/local/bin/git
|
|
227
|
+
# Create startup script
|
|
228
|
+
RUN echo '#!/bin/bash\\n\\
|
|
229
|
+
echo "Waiting for attachment..."\\n\\
|
|
230
|
+
sleep 2\\n\\
|
|
231
|
+
cd /workspace\\n\\
|
|
232
|
+
git checkout -b "$1"\\n\\
|
|
233
|
+
echo "Starting Claude Code on branch $1..."\\n\\
|
|
234
|
+
exec claude --dangerously-skip-permissions' > /start-claude.sh && \\
|
|
235
|
+
chmod +x /start-claude.sh */
|
|
236
|
+
// Build image from string
|
|
237
|
+
const pack = tar_stream_1.default.pack();
|
|
238
|
+
// Add Dockerfile to tar
|
|
239
|
+
pack.entry({ name: 'Dockerfile' }, dockerfile, (err) => {
|
|
240
|
+
if (err)
|
|
241
|
+
throw err;
|
|
242
|
+
pack.finalize();
|
|
243
|
+
});
|
|
244
|
+
// Convert to buffer for docker
|
|
245
|
+
const chunks = [];
|
|
246
|
+
pack.on('data', (chunk) => chunks.push(chunk));
|
|
247
|
+
await new Promise((resolve) => {
|
|
248
|
+
pack.on('end', resolve);
|
|
249
|
+
});
|
|
250
|
+
const tarBuffer = node_buffer_1.Buffer.concat(chunks);
|
|
251
|
+
const buildStream = await this.docker.buildImage(tarBuffer, {
|
|
252
|
+
t: imageName,
|
|
253
|
+
});
|
|
254
|
+
// Wait for build to complete
|
|
255
|
+
await new Promise((resolve, reject) => {
|
|
256
|
+
this.docker.modem.followProgress(buildStream, (err, res) => {
|
|
257
|
+
if (err)
|
|
258
|
+
reject(err);
|
|
259
|
+
else
|
|
260
|
+
resolve(res);
|
|
261
|
+
}, (event) => {
|
|
262
|
+
if (event.stream) {
|
|
263
|
+
node_process_1.default.stdout.write(event.stream);
|
|
264
|
+
}
|
|
265
|
+
});
|
|
266
|
+
});
|
|
267
|
+
}
|
|
268
|
+
async buildImage(dockerfilePath, imageName) {
|
|
269
|
+
const buildContext = node_path_1.default.dirname(dockerfilePath);
|
|
270
|
+
const buildStream = await this.docker.buildImage({
|
|
271
|
+
context: buildContext,
|
|
272
|
+
src: [node_path_1.default.basename(dockerfilePath)],
|
|
273
|
+
}, {
|
|
274
|
+
dockerfile: node_path_1.default.basename(dockerfilePath),
|
|
275
|
+
t: imageName,
|
|
276
|
+
});
|
|
277
|
+
await new Promise((resolve, reject) => {
|
|
278
|
+
this.docker.modem.followProgress(buildStream, (err, res) => {
|
|
279
|
+
if (err)
|
|
280
|
+
reject(err);
|
|
281
|
+
else
|
|
282
|
+
resolve(res);
|
|
283
|
+
}, (event) => {
|
|
284
|
+
if (event.stream) {
|
|
285
|
+
node_process_1.default.stdout.write(event.stream);
|
|
286
|
+
}
|
|
287
|
+
});
|
|
288
|
+
});
|
|
289
|
+
}
|
|
290
|
+
async createContainer(containerConfig) {
|
|
291
|
+
const { credentials, workDir } = containerConfig;
|
|
292
|
+
// Prepare environment variables
|
|
293
|
+
const env = this.prepareEnvironment(credentials);
|
|
294
|
+
// Prepare volumes
|
|
295
|
+
const volumes = this.prepareVolumes(workDir, credentials);
|
|
296
|
+
// Create container
|
|
297
|
+
const container = await this.docker.createContainer({
|
|
298
|
+
Image: this.config.dockerImage || 'claude-code-runner:latest',
|
|
299
|
+
name: `${this.config.containerPrefix || 'claude-code-runner'}-${Date.now()}`,
|
|
300
|
+
Env: env,
|
|
301
|
+
HostConfig: {
|
|
302
|
+
Binds: volumes,
|
|
303
|
+
AutoRemove: false,
|
|
304
|
+
NetworkMode: 'bridge',
|
|
305
|
+
},
|
|
306
|
+
WorkingDir: '/workspace',
|
|
307
|
+
Cmd: ['/bin/bash', '-l'],
|
|
308
|
+
AttachStdin: true,
|
|
309
|
+
AttachStdout: true,
|
|
310
|
+
AttachStderr: true,
|
|
311
|
+
Tty: true,
|
|
312
|
+
OpenStdin: true,
|
|
313
|
+
StdinOnce: false,
|
|
314
|
+
});
|
|
315
|
+
return container;
|
|
316
|
+
}
|
|
317
|
+
prepareEnvironment(credentials) {
|
|
318
|
+
const env = [];
|
|
319
|
+
// Load environment variables from .env file if specified
|
|
320
|
+
if (this.config.envFile) {
|
|
321
|
+
try {
|
|
322
|
+
const envFilePath = node_path_1.default.resolve(this.config.envFile);
|
|
323
|
+
if (fs.existsSync(envFilePath)) {
|
|
324
|
+
console.log(chalk_1.default.blue(`• Loading environment from ${this.config.envFile}...`));
|
|
325
|
+
const envContent = fs.readFileSync(envFilePath, 'utf-8');
|
|
326
|
+
const lines = envContent.split('\n');
|
|
327
|
+
for (const line of lines) {
|
|
328
|
+
const trimmedLine = line.trim();
|
|
329
|
+
// Skip empty lines and comments
|
|
330
|
+
if (!trimmedLine || trimmedLine.startsWith('#')) {
|
|
331
|
+
continue;
|
|
332
|
+
}
|
|
333
|
+
// Skip lines without = sign
|
|
334
|
+
if (!trimmedLine.includes('=')) {
|
|
335
|
+
continue;
|
|
336
|
+
}
|
|
337
|
+
// Parse key=value, handling values with = signs
|
|
338
|
+
const firstEqualIndex = trimmedLine.indexOf('=');
|
|
339
|
+
const key = trimmedLine.substring(0, firstEqualIndex).trim();
|
|
340
|
+
let value = trimmedLine.substring(firstEqualIndex + 1).trim();
|
|
341
|
+
// Remove surrounding quotes if present
|
|
342
|
+
if ((value.startsWith('"') && value.endsWith('"'))
|
|
343
|
+
|| (value.startsWith('\'') && value.endsWith('\''))) {
|
|
344
|
+
value = value.slice(1, -1);
|
|
345
|
+
}
|
|
346
|
+
if (key) {
|
|
347
|
+
env.push(`${key}=${value}`);
|
|
348
|
+
}
|
|
349
|
+
}
|
|
350
|
+
console.log(chalk_1.default.green(`✓ Loaded ${env.length} environment variables from ${this.config.envFile}`));
|
|
351
|
+
}
|
|
352
|
+
else {
|
|
353
|
+
console.log(chalk_1.default.yellow(`⚠ Environment file ${this.config.envFile} not found`));
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
catch (error) {
|
|
357
|
+
console.error(chalk_1.default.yellow(`⚠ Failed to load environment file ${this.config.envFile}:`), error);
|
|
358
|
+
}
|
|
359
|
+
}
|
|
360
|
+
// Claude credentials from discovery
|
|
361
|
+
if (credentials.claude) {
|
|
362
|
+
switch (credentials.claude.type) {
|
|
363
|
+
case 'api_key':
|
|
364
|
+
env.push(`ANTHROPIC_API_KEY=${credentials.claude.value}`);
|
|
365
|
+
break;
|
|
366
|
+
case 'bedrock':
|
|
367
|
+
env.push('CLAUDE_CODE_USE_BEDROCK=1');
|
|
368
|
+
if (credentials.claude.region) {
|
|
369
|
+
env.push(`AWS_REGION=${credentials.claude.region}`);
|
|
370
|
+
}
|
|
371
|
+
break;
|
|
372
|
+
case 'vertex':
|
|
373
|
+
env.push('CLAUDE_CODE_USE_VERTEX=1');
|
|
374
|
+
if (credentials.claude.project) {
|
|
375
|
+
env.push(`GOOGLE_CLOUD_PROJECT=${credentials.claude.project}`);
|
|
376
|
+
}
|
|
377
|
+
break;
|
|
378
|
+
}
|
|
379
|
+
}
|
|
380
|
+
else if (node_process_1.default.env.ANTHROPIC_API_KEY) {
|
|
381
|
+
// If no Claude credentials were discovered but ANTHROPIC_API_KEY is in environment, pass it through
|
|
382
|
+
env.push(`ANTHROPIC_API_KEY=${node_process_1.default.env.ANTHROPIC_API_KEY}`);
|
|
383
|
+
}
|
|
384
|
+
// GitHub token - check multiple sources
|
|
385
|
+
if (credentials.github?.token) {
|
|
386
|
+
env.push(`GITHUB_TOKEN=${credentials.github.token}`);
|
|
387
|
+
}
|
|
388
|
+
else if (node_process_1.default.env.GITHUB_TOKEN) {
|
|
389
|
+
// Pass through from environment
|
|
390
|
+
env.push(`GITHUB_TOKEN=${node_process_1.default.env.GITHUB_TOKEN}`);
|
|
391
|
+
}
|
|
392
|
+
else if (node_process_1.default.env.GH_TOKEN) {
|
|
393
|
+
// GitHub CLI uses GH_TOKEN
|
|
394
|
+
env.push(`GITHUB_TOKEN=${node_process_1.default.env.GH_TOKEN}`);
|
|
395
|
+
env.push(`GH_TOKEN=${node_process_1.default.env.GH_TOKEN}`);
|
|
396
|
+
}
|
|
397
|
+
// Pass through git author info if available
|
|
398
|
+
if (node_process_1.default.env.GIT_AUTHOR_NAME) {
|
|
399
|
+
env.push(`GIT_AUTHOR_NAME=${node_process_1.default.env.GIT_AUTHOR_NAME}`);
|
|
400
|
+
}
|
|
401
|
+
if (node_process_1.default.env.GIT_AUTHOR_EMAIL) {
|
|
402
|
+
env.push(`GIT_AUTHOR_EMAIL=${node_process_1.default.env.GIT_AUTHOR_EMAIL}`);
|
|
403
|
+
}
|
|
404
|
+
if (node_process_1.default.env.GIT_COMMITTER_NAME) {
|
|
405
|
+
env.push(`GIT_COMMITTER_NAME=${node_process_1.default.env.GIT_COMMITTER_NAME}`);
|
|
406
|
+
}
|
|
407
|
+
if (node_process_1.default.env.GIT_COMMITTER_EMAIL) {
|
|
408
|
+
env.push(`GIT_COMMITTER_EMAIL=${node_process_1.default.env.GIT_COMMITTER_EMAIL}`);
|
|
409
|
+
}
|
|
410
|
+
// Additional config
|
|
411
|
+
env.push('CLAUDE_CODE_DISABLE_NONESSENTIAL_TRAFFIC=1');
|
|
412
|
+
if (this.config.maxThinkingTokens) {
|
|
413
|
+
env.push(`MAX_THINKING_TOKENS=${this.config.maxThinkingTokens}`);
|
|
414
|
+
}
|
|
415
|
+
if (this.config.bashTimeout) {
|
|
416
|
+
env.push(`BASH_MAX_TIMEOUT_MS=${this.config.bashTimeout}`);
|
|
417
|
+
}
|
|
418
|
+
// Add custom environment variables
|
|
419
|
+
if (this.config.environment) {
|
|
420
|
+
Object.entries(this.config.environment).forEach(([key, value]) => {
|
|
421
|
+
env.push(`${key}=${value}`);
|
|
422
|
+
});
|
|
423
|
+
}
|
|
424
|
+
return env;
|
|
425
|
+
}
|
|
426
|
+
prepareVolumes(_workDir, _credentials) {
|
|
427
|
+
// NO MOUNTING workspace - we'll copy files instead
|
|
428
|
+
const volumes = [];
|
|
429
|
+
// NO SSH mounting - we'll use GitHub tokens instead
|
|
430
|
+
// Add custom volumes (legacy format)
|
|
431
|
+
if (this.config.volumes) {
|
|
432
|
+
volumes.push(...this.config.volumes);
|
|
433
|
+
}
|
|
434
|
+
// Add mount configurations (new format)
|
|
435
|
+
if (this.config.mounts) {
|
|
436
|
+
for (const mount of this.config.mounts) {
|
|
437
|
+
try {
|
|
438
|
+
// Expand environment variables in source path
|
|
439
|
+
let expandedSource = mount.source.replace(/\$HOME/g, os.homedir());
|
|
440
|
+
expandedSource = expandedSource.replace(/\$(\w+)/g, (match, varName) => {
|
|
441
|
+
return node_process_1.default.env[varName] || match;
|
|
442
|
+
});
|
|
443
|
+
// Resolve the source path
|
|
444
|
+
const sourcePath = node_path_1.default.isAbsolute(expandedSource)
|
|
445
|
+
? expandedSource
|
|
446
|
+
: node_path_1.default.resolve(node_process_1.default.cwd(), expandedSource);
|
|
447
|
+
// Check if source exists
|
|
448
|
+
if (!fs.existsSync(sourcePath)) {
|
|
449
|
+
console.log(chalk_1.default.yellow(`⚠ Mount source does not exist: ${mount.source} (resolved to ${sourcePath})`));
|
|
450
|
+
continue;
|
|
451
|
+
}
|
|
452
|
+
// Expand environment variables in target path
|
|
453
|
+
let expandedTarget = mount.target.replace(/\$HOME/g, '/home/claude');
|
|
454
|
+
expandedTarget = expandedTarget.replace(/\$(\w+)/g, (match, varName) => {
|
|
455
|
+
// For container paths, we need to use container's environment
|
|
456
|
+
if (varName === 'HOME')
|
|
457
|
+
return '/home/claude';
|
|
458
|
+
return match; // Keep other variables as-is
|
|
459
|
+
});
|
|
460
|
+
// Ensure target path is absolute
|
|
461
|
+
const targetPath = node_path_1.default.isAbsolute(expandedTarget)
|
|
462
|
+
? expandedTarget
|
|
463
|
+
: node_path_1.default.join('/workspace', expandedTarget);
|
|
464
|
+
// Create mount string
|
|
465
|
+
const mountString = mount.readonly
|
|
466
|
+
? `${sourcePath}:${targetPath}:ro`
|
|
467
|
+
: `${sourcePath}:${targetPath}`;
|
|
468
|
+
volumes.push(mountString);
|
|
469
|
+
console.log(chalk_1.default.blue(`✓ Mounting ${mount.source} → ${targetPath}${mount.readonly ? ' (read-only)' : ''}`));
|
|
470
|
+
}
|
|
471
|
+
catch (error) {
|
|
472
|
+
console.error(chalk_1.default.yellow(`⚠ Failed to process mount ${mount.source}:`), error);
|
|
473
|
+
}
|
|
474
|
+
}
|
|
475
|
+
}
|
|
476
|
+
return volumes;
|
|
477
|
+
}
|
|
478
|
+
async _copyWorkingDirectory(container, workDir) {
|
|
479
|
+
// Helper function to get tar flags safely
|
|
480
|
+
const getTarFlags = () => {
|
|
481
|
+
try {
|
|
482
|
+
// Test if --no-xattrs is supported by checking tar help
|
|
483
|
+
(0, node_child_process_1.execSync)('tar --help 2>&1 | grep -q no-xattrs', { stdio: 'pipe' });
|
|
484
|
+
return '--no-xattrs';
|
|
485
|
+
}
|
|
486
|
+
catch {
|
|
487
|
+
// --no-xattrs not supported, use standard tar
|
|
488
|
+
return '';
|
|
489
|
+
}
|
|
490
|
+
};
|
|
491
|
+
try {
|
|
492
|
+
// Get list of git-tracked files (including uncommitted changes)
|
|
493
|
+
const trackedFiles = (0, node_child_process_1.execSync)('git ls-files', {
|
|
494
|
+
cwd: workDir,
|
|
495
|
+
encoding: 'utf-8',
|
|
496
|
+
})
|
|
497
|
+
.trim()
|
|
498
|
+
.split('\n')
|
|
499
|
+
.filter((f) => f);
|
|
500
|
+
// Get list of untracked files that aren't ignored (only if includeUntracked is true)
|
|
501
|
+
let untrackedFiles = [];
|
|
502
|
+
if (this.config.includeUntracked) {
|
|
503
|
+
untrackedFiles = (0, node_child_process_1.execSync)('git ls-files --others --exclude-standard', {
|
|
504
|
+
cwd: workDir,
|
|
505
|
+
encoding: 'utf-8',
|
|
506
|
+
})
|
|
507
|
+
.trim()
|
|
508
|
+
.split('\n')
|
|
509
|
+
.filter((f) => f);
|
|
510
|
+
}
|
|
511
|
+
// Combine all files
|
|
512
|
+
const allFiles = [...trackedFiles, ...untrackedFiles];
|
|
513
|
+
console.log(chalk_1.default.blue(`• Copying ${allFiles.length} files...`));
|
|
514
|
+
// Create tar archive using git archive for tracked files + untracked files
|
|
515
|
+
const tarFile = `/tmp/claude-runner-${Date.now()}.tar`;
|
|
516
|
+
// First create archive of tracked files using git archive
|
|
517
|
+
(0, node_child_process_1.execSync)(`git archive --format=tar -o "${tarFile}" HEAD`, {
|
|
518
|
+
cwd: workDir,
|
|
519
|
+
stdio: 'pipe',
|
|
520
|
+
});
|
|
521
|
+
// Add untracked files if any
|
|
522
|
+
if (untrackedFiles.length > 0) {
|
|
523
|
+
// Create a file list for tar
|
|
524
|
+
const fileListPath = `/tmp/claude-runner-files-${Date.now()}.txt`;
|
|
525
|
+
fs.writeFileSync(fileListPath, untrackedFiles.join('\n'));
|
|
526
|
+
// Append untracked files to the tar
|
|
527
|
+
(0, node_child_process_1.execSync)(`tar -rf "${tarFile}" --files-from="${fileListPath}"`, {
|
|
528
|
+
cwd: workDir,
|
|
529
|
+
stdio: 'pipe',
|
|
530
|
+
});
|
|
531
|
+
fs.unlinkSync(fileListPath);
|
|
532
|
+
}
|
|
533
|
+
// Read and copy the tar file in chunks to avoid memory issues
|
|
534
|
+
const stream = fs.createReadStream(tarFile);
|
|
535
|
+
// Add timeout for putArchive
|
|
536
|
+
const uploadPromise = container.putArchive(stream, {
|
|
537
|
+
path: '/workspace',
|
|
538
|
+
});
|
|
539
|
+
// Wait for both upload and stream to complete
|
|
540
|
+
await Promise.all([
|
|
541
|
+
uploadPromise,
|
|
542
|
+
new Promise((resolve, reject) => {
|
|
543
|
+
stream.on('end', () => {
|
|
544
|
+
resolve();
|
|
545
|
+
});
|
|
546
|
+
stream.on('error', reject);
|
|
547
|
+
}),
|
|
548
|
+
]);
|
|
549
|
+
// Clean up
|
|
550
|
+
fs.unlinkSync(tarFile);
|
|
551
|
+
// Also copy .git directory to preserve git history
|
|
552
|
+
console.log(chalk_1.default.blue('• Copying git history...'));
|
|
553
|
+
const gitTarFile = `/tmp/claude-runner-git-${Date.now()}.tar`;
|
|
554
|
+
// Exclude macOS resource fork files and .DS_Store when creating git archive
|
|
555
|
+
// Also strip extended attributes to prevent macOS xattr issues in Docker
|
|
556
|
+
const tarFlags = getTarFlags();
|
|
557
|
+
// On macOS, also exclude extended attributes that cause Docker issues
|
|
558
|
+
const additionalFlags = node_process_1.default.platform === 'darwin' ? '--no-xattrs --no-fflags' : '';
|
|
559
|
+
const combinedFlags = `${tarFlags} ${additionalFlags}`.trim();
|
|
560
|
+
(0, node_child_process_1.execSync)(`tar -cf "${gitTarFile}" --exclude="._*" --exclude=".DS_Store" ${combinedFlags} .git`, {
|
|
561
|
+
cwd: workDir,
|
|
562
|
+
stdio: 'pipe',
|
|
563
|
+
});
|
|
564
|
+
try {
|
|
565
|
+
const gitStream = fs.createReadStream(gitTarFile);
|
|
566
|
+
// Upload git archive
|
|
567
|
+
await container.putArchive(gitStream, {
|
|
568
|
+
path: '/workspace',
|
|
569
|
+
});
|
|
570
|
+
// Clean up
|
|
571
|
+
fs.unlinkSync(gitTarFile);
|
|
572
|
+
}
|
|
573
|
+
catch (error) {
|
|
574
|
+
console.error(chalk_1.default.red('✗ Git history copy failed:'), error);
|
|
575
|
+
// Clean up the tar file even if upload failed
|
|
576
|
+
try {
|
|
577
|
+
fs.unlinkSync(gitTarFile);
|
|
578
|
+
}
|
|
579
|
+
catch (e) {
|
|
580
|
+
// Ignore cleanup errors
|
|
581
|
+
}
|
|
582
|
+
throw error;
|
|
583
|
+
}
|
|
584
|
+
}
|
|
585
|
+
catch (error) {
|
|
586
|
+
console.error(chalk_1.default.red('✗ Failed to copy files:'), error);
|
|
587
|
+
throw error;
|
|
588
|
+
}
|
|
589
|
+
}
|
|
590
|
+
async _copyClaudeConfig(container) {
|
|
591
|
+
// Helper function to get tar flags safely
|
|
592
|
+
const getTarFlags = () => {
|
|
593
|
+
try {
|
|
594
|
+
// Test if --no-xattrs is supported by checking tar help
|
|
595
|
+
(0, node_child_process_1.execSync)('tar --help 2>&1 | grep -q no-xattrs', { stdio: 'pipe' });
|
|
596
|
+
return '--no-xattrs';
|
|
597
|
+
}
|
|
598
|
+
catch {
|
|
599
|
+
// --no-xattrs not supported, use standard tar
|
|
600
|
+
return '';
|
|
601
|
+
}
|
|
602
|
+
};
|
|
603
|
+
try {
|
|
604
|
+
// First, try to get credentials from macOS Keychain if on Mac
|
|
605
|
+
if (node_process_1.default.platform === 'darwin') {
|
|
606
|
+
try {
|
|
607
|
+
console.log(chalk_1.default.blue('• Checking macOS Keychain for Claude credentials...'));
|
|
608
|
+
const keychainCreds = (0, node_child_process_1.execSync)('security find-generic-password -s "Claude Code-credentials" -w', {
|
|
609
|
+
encoding: 'utf-8',
|
|
610
|
+
stdio: ['pipe', 'pipe', 'pipe'], // Suppress stderr
|
|
611
|
+
}).trim();
|
|
612
|
+
if (keychainCreds) {
|
|
613
|
+
console.log(chalk_1.default.green('✓ Found Claude credentials in macOS Keychain'));
|
|
614
|
+
// Create .claude directory structure
|
|
615
|
+
const claudeDirTar = `/tmp/claude-keychain-${Date.now()}.tar`;
|
|
616
|
+
const pack = tar_stream_1.default.pack();
|
|
617
|
+
// Add .credentials.json to the tar
|
|
618
|
+
pack.entry({ name: '.claude/.credentials.json', mode: 0o600 }, keychainCreds, (err) => {
|
|
619
|
+
if (err)
|
|
620
|
+
throw err;
|
|
621
|
+
pack.finalize();
|
|
622
|
+
});
|
|
623
|
+
const chunks = [];
|
|
624
|
+
pack.on('data', (chunk) => chunks.push(chunk));
|
|
625
|
+
await new Promise((resolve, reject) => {
|
|
626
|
+
pack.on('end', () => {
|
|
627
|
+
fs.writeFileSync(claudeDirTar, node_buffer_1.Buffer.concat(chunks));
|
|
628
|
+
resolve();
|
|
629
|
+
});
|
|
630
|
+
pack.on('error', reject);
|
|
631
|
+
});
|
|
632
|
+
const stream = fs.createReadStream(claudeDirTar);
|
|
633
|
+
await container.putArchive(stream, {
|
|
634
|
+
path: '/home/claude',
|
|
635
|
+
});
|
|
636
|
+
fs.unlinkSync(claudeDirTar);
|
|
637
|
+
// Fix permissions
|
|
638
|
+
await container
|
|
639
|
+
.exec({
|
|
640
|
+
Cmd: [
|
|
641
|
+
'/bin/bash',
|
|
642
|
+
'-c',
|
|
643
|
+
'sudo mkdir -p /home/claude/.claude && sudo chown -R claude:claude /home/claude/.claude && sudo chmod 700 /home/claude/.claude && sudo chmod 600 /home/claude/.claude/.credentials.json',
|
|
644
|
+
],
|
|
645
|
+
AttachStdout: false,
|
|
646
|
+
AttachStderr: false,
|
|
647
|
+
})
|
|
648
|
+
.then(exec => exec.start({}));
|
|
649
|
+
console.log(chalk_1.default.green('✓ Claude Keychain credentials copied to container'));
|
|
650
|
+
}
|
|
651
|
+
}
|
|
652
|
+
catch (error) {
|
|
653
|
+
// Keychain access failed or credentials not found - not critical
|
|
654
|
+
console.log(chalk_1.default.yellow('• No Claude credentials found in macOS Keychain'));
|
|
655
|
+
}
|
|
656
|
+
}
|
|
657
|
+
// Copy .claude.json if it exists
|
|
658
|
+
const claudeJsonPath = node_path_1.default.join(os.homedir(), '.claude.json');
|
|
659
|
+
if (fs.existsSync(claudeJsonPath)) {
|
|
660
|
+
console.log(chalk_1.default.blue('• Copying .claude.json...'));
|
|
661
|
+
const configContent = fs.readFileSync(claudeJsonPath, 'utf-8');
|
|
662
|
+
const tarFile = `/tmp/claude-json-${Date.now()}.tar`;
|
|
663
|
+
const pack = tar_stream_1.default.pack();
|
|
664
|
+
pack.entry({ name: '.claude.json', mode: 0o644 }, configContent, (err) => {
|
|
665
|
+
if (err)
|
|
666
|
+
throw err;
|
|
667
|
+
pack.finalize();
|
|
668
|
+
});
|
|
669
|
+
const chunks = [];
|
|
670
|
+
pack.on('data', (chunk) => chunks.push(chunk));
|
|
671
|
+
await new Promise((resolve, reject) => {
|
|
672
|
+
pack.on('end', () => {
|
|
673
|
+
fs.writeFileSync(tarFile, node_buffer_1.Buffer.concat(chunks));
|
|
674
|
+
resolve();
|
|
675
|
+
});
|
|
676
|
+
pack.on('error', reject);
|
|
677
|
+
});
|
|
678
|
+
const stream = fs.createReadStream(tarFile);
|
|
679
|
+
await container.putArchive(stream, {
|
|
680
|
+
path: '/home/claude',
|
|
681
|
+
});
|
|
682
|
+
fs.unlinkSync(tarFile);
|
|
683
|
+
// Fix permissions
|
|
684
|
+
await container
|
|
685
|
+
.exec({
|
|
686
|
+
Cmd: [
|
|
687
|
+
'/bin/bash',
|
|
688
|
+
'-c',
|
|
689
|
+
'sudo chown claude:claude /home/claude/.claude.json && chmod 644 /home/claude/.claude.json',
|
|
690
|
+
],
|
|
691
|
+
AttachStdout: false,
|
|
692
|
+
AttachStderr: false,
|
|
693
|
+
})
|
|
694
|
+
.then(exec => exec.start({}));
|
|
695
|
+
}
|
|
696
|
+
// Copy .claude directory if it exists (but skip if we already copied from Keychain)
|
|
697
|
+
const claudeDir = node_path_1.default.join(os.homedir(), '.claude');
|
|
698
|
+
if (fs.existsSync(claudeDir)
|
|
699
|
+
&& fs.statSync(claudeDir).isDirectory()
|
|
700
|
+
&& node_process_1.default.platform !== 'darwin') {
|
|
701
|
+
console.log(chalk_1.default.blue('• Copying .claude directory...'));
|
|
702
|
+
const tarFile = `/tmp/claude-dir-${Date.now()}.tar`;
|
|
703
|
+
const tarFlags = getTarFlags();
|
|
704
|
+
// On macOS, also exclude extended attributes that cause Docker issues
|
|
705
|
+
const additionalFlags = node_process_1.default.platform === 'darwin' ? '--no-xattrs --no-fflags' : '';
|
|
706
|
+
const combinedFlags = `${tarFlags} ${additionalFlags}`.trim();
|
|
707
|
+
(0, node_child_process_1.execSync)(`tar -cf "${tarFile}" ${combinedFlags} -C "${os.homedir()}" .claude`, {
|
|
708
|
+
stdio: 'pipe',
|
|
709
|
+
});
|
|
710
|
+
const stream = fs.createReadStream(tarFile);
|
|
711
|
+
await container.putArchive(stream, {
|
|
712
|
+
path: '/home/claude',
|
|
713
|
+
});
|
|
714
|
+
fs.unlinkSync(tarFile);
|
|
715
|
+
// Fix permissions recursively
|
|
716
|
+
await container
|
|
717
|
+
.exec({
|
|
718
|
+
Cmd: [
|
|
719
|
+
'/bin/bash',
|
|
720
|
+
'-c',
|
|
721
|
+
'sudo chown -R claude:claude /home/claude/.claude && chmod -R 755 /home/claude/.claude',
|
|
722
|
+
],
|
|
723
|
+
AttachStdout: false,
|
|
724
|
+
AttachStderr: false,
|
|
725
|
+
})
|
|
726
|
+
.then(exec => exec.start({}));
|
|
727
|
+
}
|
|
728
|
+
console.log(chalk_1.default.green('✓ Claude configuration copied successfully'));
|
|
729
|
+
}
|
|
730
|
+
catch (error) {
|
|
731
|
+
console.error(chalk_1.default.yellow('⚠ Failed to copy Claude configuration:'), error);
|
|
732
|
+
// Don't throw - this is not critical for container operation
|
|
733
|
+
}
|
|
734
|
+
}
|
|
735
|
+
async _copyGitConfig(container) {
|
|
736
|
+
const gitConfigPath = node_path_1.default.join(os.homedir(), '.gitconfig');
|
|
737
|
+
try {
|
|
738
|
+
// Check if the git config file exists
|
|
739
|
+
if (!fs.existsSync(gitConfigPath)) {
|
|
740
|
+
return; // No git config to copy
|
|
741
|
+
}
|
|
742
|
+
console.log(chalk_1.default.blue('• Copying git configuration...'));
|
|
743
|
+
// Read the git config file
|
|
744
|
+
const configContent = fs.readFileSync(gitConfigPath, 'utf-8');
|
|
745
|
+
// Create a temporary tar file with the git config
|
|
746
|
+
const tarFile = `/tmp/git-config-${Date.now()}.tar`;
|
|
747
|
+
const pack = tar_stream_1.default.pack();
|
|
748
|
+
// Add the .gitconfig file to the tar
|
|
749
|
+
pack.entry({ name: '.gitconfig', mode: 0o644 }, configContent, (err) => {
|
|
750
|
+
if (err)
|
|
751
|
+
throw err;
|
|
752
|
+
pack.finalize();
|
|
753
|
+
});
|
|
754
|
+
// Write the tar to a file
|
|
755
|
+
const chunks = [];
|
|
756
|
+
pack.on('data', (chunk) => chunks.push(chunk));
|
|
757
|
+
await new Promise((resolve, reject) => {
|
|
758
|
+
pack.on('end', () => {
|
|
759
|
+
fs.writeFileSync(tarFile, node_buffer_1.Buffer.concat(chunks));
|
|
760
|
+
resolve();
|
|
761
|
+
});
|
|
762
|
+
pack.on('error', reject);
|
|
763
|
+
});
|
|
764
|
+
// Copy the tar file to the container's claude user home directory
|
|
765
|
+
const stream = fs.createReadStream(tarFile);
|
|
766
|
+
await container.putArchive(stream, {
|
|
767
|
+
path: '/home/claude', // Copy to claude user's home directory
|
|
768
|
+
});
|
|
769
|
+
// Clean up
|
|
770
|
+
fs.unlinkSync(tarFile);
|
|
771
|
+
// Fix permissions on the copied file
|
|
772
|
+
const fixPermsExec = await container.exec({
|
|
773
|
+
Cmd: [
|
|
774
|
+
'/bin/bash',
|
|
775
|
+
'-c',
|
|
776
|
+
'sudo chown claude:claude /home/claude/.gitconfig',
|
|
777
|
+
],
|
|
778
|
+
AttachStdout: true,
|
|
779
|
+
AttachStderr: true,
|
|
780
|
+
});
|
|
781
|
+
const permStream = await fixPermsExec.start({});
|
|
782
|
+
// Consume the stream to allow it to complete
|
|
783
|
+
await new Promise((resolve, reject) => {
|
|
784
|
+
permStream.on('data', () => { }); // Consume data
|
|
785
|
+
permStream.on('end', resolve);
|
|
786
|
+
permStream.on('error', reject);
|
|
787
|
+
});
|
|
788
|
+
console.log(chalk_1.default.green('✓ Git configuration copied successfully'));
|
|
789
|
+
}
|
|
790
|
+
catch (error) {
|
|
791
|
+
console.error(chalk_1.default.yellow('⚠ Failed to copy git configuration:'), error);
|
|
792
|
+
// Don't throw - this is not critical for container operation
|
|
793
|
+
}
|
|
794
|
+
}
|
|
795
|
+
async setupGitAndStartupScript(container, branchName, prFetchRef, remoteFetchRef) {
|
|
796
|
+
console.log(chalk_1.default.blue('• Setting up git branch and startup script...'));
|
|
797
|
+
// Determine what to show in the web UI
|
|
798
|
+
const defaultShell = this.config.defaultShell || 'claude';
|
|
799
|
+
// Startup script that keeps session alive
|
|
800
|
+
const startupScript = defaultShell === 'claude'
|
|
801
|
+
? `#!/bin/bash
|
|
802
|
+
echo "🚀 Starting Claude Code..."
|
|
803
|
+
echo "Press Ctrl+C to drop to bash shell"
|
|
804
|
+
echo ""
|
|
805
|
+
|
|
806
|
+
# Run Claude but don't replace the shell process
|
|
807
|
+
claude --dangerously-skip-permissions
|
|
808
|
+
|
|
809
|
+
# After Claude exits, drop to bash
|
|
810
|
+
echo ""
|
|
811
|
+
echo "Claude exited. You're now in bash shell."
|
|
812
|
+
echo "Type 'claude --dangerously-skip-permissions' to restart Claude"
|
|
813
|
+
echo "Type 'exit' to end the session"
|
|
814
|
+
echo ""
|
|
815
|
+
exec /bin/bash`
|
|
816
|
+
: `#!/bin/bash
|
|
817
|
+
echo "Welcome to Claude Code Sandbox!"
|
|
818
|
+
echo "Type 'claude --dangerously-skip-permissions' to start Claude Code"
|
|
819
|
+
echo "Type 'exit' to end the session"
|
|
820
|
+
echo ""
|
|
821
|
+
exec /bin/bash`;
|
|
822
|
+
const setupExec = await container.exec({
|
|
823
|
+
Cmd: [
|
|
824
|
+
'/bin/bash',
|
|
825
|
+
'-c',
|
|
826
|
+
`
|
|
827
|
+
cd /workspace &&
|
|
828
|
+
sudo chown -R claude:claude /workspace &&
|
|
829
|
+
git config --global --add safe.directory /workspace &&
|
|
830
|
+
# Clean up macOS resource fork files in git pack directory
|
|
831
|
+
find .git/objects/pack -name "._pack-*.idx" -type f -delete 2>/dev/null || true &&
|
|
832
|
+
# Configure git to use GitHub token if available
|
|
833
|
+
if [ -n "$GITHUB_TOKEN" ]; then
|
|
834
|
+
git config --global url."https://\${GITHUB_TOKEN}@github.com/".insteadOf "https://github.com/"
|
|
835
|
+
git config --global url."https://\${GITHUB_TOKEN}@github.com/".insteadOf "git@github.com:"
|
|
836
|
+
echo "✓ Configured git to use GitHub token"
|
|
837
|
+
fi &&
|
|
838
|
+
# Handle different branch setup scenarios
|
|
839
|
+
if [ -n "${prFetchRef || ''}" ]; then
|
|
840
|
+
echo "• Fetching PR branch..." &&
|
|
841
|
+
git fetch origin ${prFetchRef} &&
|
|
842
|
+
if git show-ref --verify --quiet refs/heads/"${branchName}"; then
|
|
843
|
+
git checkout "${branchName}" &&
|
|
844
|
+
echo "✓ Switched to existing PR branch: ${branchName}"
|
|
845
|
+
else
|
|
846
|
+
git checkout "${branchName}" &&
|
|
847
|
+
echo "✓ Checked out PR branch: ${branchName}"
|
|
848
|
+
fi
|
|
849
|
+
elif [ -n "${remoteFetchRef || ''}" ]; then
|
|
850
|
+
echo "• Fetching remote branch..." &&
|
|
851
|
+
git fetch origin &&
|
|
852
|
+
if git show-ref --verify --quiet refs/heads/"${branchName}"; then
|
|
853
|
+
git checkout "${branchName}" &&
|
|
854
|
+
git pull origin "${branchName}" &&
|
|
855
|
+
echo "✓ Switched to existing remote branch: ${branchName}"
|
|
856
|
+
else
|
|
857
|
+
git checkout -b "${branchName}" "${remoteFetchRef}" &&
|
|
858
|
+
echo "✓ Created local branch from remote: ${branchName}"
|
|
859
|
+
fi
|
|
860
|
+
else
|
|
861
|
+
# Regular branch creation
|
|
862
|
+
if git show-ref --verify --quiet refs/heads/"${branchName}"; then
|
|
863
|
+
git checkout "${branchName}" &&
|
|
864
|
+
echo "✓ Switched to existing branch: ${branchName}"
|
|
865
|
+
else
|
|
866
|
+
git checkout -b "${branchName}" &&
|
|
867
|
+
echo "✓ Created new branch: ${branchName}"
|
|
868
|
+
fi
|
|
869
|
+
fi &&
|
|
870
|
+
cat > /home/claude/start-session.sh << 'EOF'
|
|
871
|
+
${startupScript}
|
|
872
|
+
EOF
|
|
873
|
+
chmod +x /home/claude/start-session.sh &&
|
|
874
|
+
echo "✓ Startup script created"
|
|
875
|
+
`,
|
|
876
|
+
],
|
|
877
|
+
AttachStdout: true,
|
|
878
|
+
AttachStderr: true,
|
|
879
|
+
});
|
|
880
|
+
const setupStream = await setupExec.start({});
|
|
881
|
+
// Wait for setup to complete
|
|
882
|
+
await new Promise((resolve, reject) => {
|
|
883
|
+
let output = '';
|
|
884
|
+
setupStream.on('data', (chunk) => {
|
|
885
|
+
output += chunk.toString();
|
|
886
|
+
node_process_1.default.stdout.write(chunk);
|
|
887
|
+
});
|
|
888
|
+
setupStream.on('end', () => {
|
|
889
|
+
if ((output.includes('✓ Created new branch')
|
|
890
|
+
|| output.includes('✓ Switched to existing branch')
|
|
891
|
+
|| output.includes('✓ Switched to existing remote branch')
|
|
892
|
+
|| output.includes('✓ Switched to existing PR branch')
|
|
893
|
+
|| output.includes('✓ Checked out PR branch')
|
|
894
|
+
|| output.includes('✓ Created local branch from remote'))
|
|
895
|
+
&& output.includes('✓ Startup script created')) {
|
|
896
|
+
resolve();
|
|
897
|
+
}
|
|
898
|
+
else {
|
|
899
|
+
reject(new Error('Setup failed'));
|
|
900
|
+
}
|
|
901
|
+
});
|
|
902
|
+
setupStream.on('error', reject);
|
|
903
|
+
});
|
|
904
|
+
console.log(chalk_1.default.green('✓ Git and startup script setup completed'));
|
|
905
|
+
}
|
|
906
|
+
async runSetupCommands(container) {
|
|
907
|
+
// Execute custom setup commands if provided
|
|
908
|
+
if (this.config.setupCommands && this.config.setupCommands.length > 0) {
|
|
909
|
+
console.log(chalk_1.default.blue('• Running custom setup commands...'));
|
|
910
|
+
console.log(chalk_1.default.blue(` Total commands to run: ${this.config.setupCommands.length}`));
|
|
911
|
+
for (let i = 0; i < this.config.setupCommands.length; i++) {
|
|
912
|
+
const command = this.config.setupCommands[i];
|
|
913
|
+
console.log(chalk_1.default.yellow(`\n[${i + 1}/${this.config.setupCommands.length}] Running command:`));
|
|
914
|
+
console.log(chalk_1.default.white(` ${command}`));
|
|
915
|
+
const cmdExec = await container.exec({
|
|
916
|
+
Cmd: ['/bin/bash', '-c', command],
|
|
917
|
+
AttachStdout: true,
|
|
918
|
+
AttachStderr: true,
|
|
919
|
+
WorkingDir: '/workspace',
|
|
920
|
+
User: 'claude',
|
|
921
|
+
});
|
|
922
|
+
const cmdStream = await cmdExec.start({});
|
|
923
|
+
// Wait for command to complete
|
|
924
|
+
await new Promise((resolve, reject) => {
|
|
925
|
+
let hasError = false;
|
|
926
|
+
cmdStream.on('data', (chunk) => {
|
|
927
|
+
node_process_1.default.stdout.write(` > ${chunk.toString()}`);
|
|
928
|
+
});
|
|
929
|
+
cmdStream.on('end', async () => {
|
|
930
|
+
// Check exit code
|
|
931
|
+
try {
|
|
932
|
+
const info = await cmdExec.inspect();
|
|
933
|
+
if (info.ExitCode !== 0) {
|
|
934
|
+
console.error(chalk_1.default.red(`✗ Command failed with exit code ${info.ExitCode}`));
|
|
935
|
+
hasError = true;
|
|
936
|
+
}
|
|
937
|
+
else {
|
|
938
|
+
console.log(chalk_1.default.green(`✓ Command completed successfully`));
|
|
939
|
+
}
|
|
940
|
+
}
|
|
941
|
+
catch (e) {
|
|
942
|
+
// Ignore inspection errors
|
|
943
|
+
}
|
|
944
|
+
if (hasError && this.config.setupCommands?.includes('set -e')) {
|
|
945
|
+
reject(new Error(`Setup command failed: ${command}`));
|
|
946
|
+
}
|
|
947
|
+
else {
|
|
948
|
+
resolve();
|
|
949
|
+
}
|
|
950
|
+
});
|
|
951
|
+
cmdStream.on('error', reject);
|
|
952
|
+
});
|
|
953
|
+
}
|
|
954
|
+
console.log(chalk_1.default.green('✓ All setup commands completed'));
|
|
955
|
+
}
|
|
956
|
+
}
|
|
957
|
+
async cleanup() {
|
|
958
|
+
for (const [, container] of this.containers) {
|
|
959
|
+
try {
|
|
960
|
+
await container.stop();
|
|
961
|
+
await container.remove();
|
|
962
|
+
}
|
|
963
|
+
catch (error) {
|
|
964
|
+
// Container might already be stopped
|
|
965
|
+
}
|
|
966
|
+
}
|
|
967
|
+
this.containers.clear();
|
|
968
|
+
}
|
|
969
|
+
}
|
|
970
|
+
exports.ContainerManager = ContainerManager;
|
|
971
|
+
//# sourceMappingURL=container.js.map
|