@vm0/runner 3.0.5 → 3.0.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +140 -118
- package/package.json +1 -1
package/index.js
CHANGED
|
@@ -1103,7 +1103,7 @@ var FirecrackerVM = class {
|
|
|
1103
1103
|
mem_size_mib: this.config.memoryMb
|
|
1104
1104
|
});
|
|
1105
1105
|
const networkBootArgs = generateNetworkBootArgs(this.networkConfig);
|
|
1106
|
-
const bootArgs = `console=ttyS0 reboot=k panic=1 pci=off nomodules random.trust_cpu=on quiet loglevel=0 nokaslr audit=0 numa=off mitigations=off noresume init=/sbin/
|
|
1106
|
+
const bootArgs = `console=ttyS0 reboot=k panic=1 pci=off nomodules random.trust_cpu=on quiet loglevel=0 nokaslr audit=0 numa=off mitigations=off noresume init=/sbin/vm-init ${networkBootArgs}`;
|
|
1107
1107
|
console.log(`[VM ${this.config.vmId}] Boot args: ${bootArgs}`);
|
|
1108
1108
|
await this.client.setBootSource({
|
|
1109
1109
|
kernel_image_path: this.config.kernelPath,
|
|
@@ -1224,16 +1224,79 @@ var FirecrackerVM = class {
|
|
|
1224
1224
|
// src/lib/firecracker/vsock.ts
|
|
1225
1225
|
import * as net from "net";
|
|
1226
1226
|
import * as fs4 from "fs";
|
|
1227
|
-
import * as crypto from "crypto";
|
|
1228
1227
|
var VSOCK_PORT = 1e3;
|
|
1229
1228
|
var HEADER_SIZE = 4;
|
|
1230
|
-
var MAX_MESSAGE_SIZE = 1024 * 1024;
|
|
1229
|
+
var MAX_MESSAGE_SIZE = 16 * 1024 * 1024;
|
|
1231
1230
|
var DEFAULT_EXEC_TIMEOUT_MS = 3e5;
|
|
1232
|
-
|
|
1233
|
-
|
|
1231
|
+
var MSG_READY = 0;
|
|
1232
|
+
var MSG_PING = 1;
|
|
1233
|
+
var MSG_PONG = 2;
|
|
1234
|
+
var MSG_EXEC = 3;
|
|
1235
|
+
var MSG_WRITE_FILE = 5;
|
|
1236
|
+
var MSG_ERROR = 255;
|
|
1237
|
+
var FLAG_SUDO = 1;
|
|
1238
|
+
function encode(type, seq, payload = Buffer.alloc(0)) {
|
|
1239
|
+
const body = Buffer.alloc(5 + payload.length);
|
|
1240
|
+
body.writeUInt8(type, 0);
|
|
1241
|
+
body.writeUInt32BE(seq, 1);
|
|
1242
|
+
payload.copy(body, 5);
|
|
1234
1243
|
const header = Buffer.alloc(HEADER_SIZE);
|
|
1235
|
-
header.writeUInt32BE(
|
|
1236
|
-
return Buffer.concat([header,
|
|
1244
|
+
header.writeUInt32BE(body.length, 0);
|
|
1245
|
+
return Buffer.concat([header, body]);
|
|
1246
|
+
}
|
|
1247
|
+
function encodeExecPayload(command, timeoutMs) {
|
|
1248
|
+
const cmdBuf = Buffer.from(command, "utf-8");
|
|
1249
|
+
const payload = Buffer.alloc(8 + cmdBuf.length);
|
|
1250
|
+
payload.writeUInt32BE(timeoutMs, 0);
|
|
1251
|
+
payload.writeUInt32BE(cmdBuf.length, 4);
|
|
1252
|
+
cmdBuf.copy(payload, 8);
|
|
1253
|
+
return payload;
|
|
1254
|
+
}
|
|
1255
|
+
function encodeWriteFilePayload(path6, content, sudo) {
|
|
1256
|
+
const pathBuf = Buffer.from(path6, "utf-8");
|
|
1257
|
+
if (pathBuf.length > 65535) {
|
|
1258
|
+
throw new Error(`Path too long: ${pathBuf.length} bytes (max 65535)`);
|
|
1259
|
+
}
|
|
1260
|
+
const payload = Buffer.alloc(2 + pathBuf.length + 1 + 4 + content.length);
|
|
1261
|
+
let offset = 0;
|
|
1262
|
+
payload.writeUInt16BE(pathBuf.length, offset);
|
|
1263
|
+
offset += 2;
|
|
1264
|
+
pathBuf.copy(payload, offset);
|
|
1265
|
+
offset += pathBuf.length;
|
|
1266
|
+
payload.writeUInt8(sudo ? FLAG_SUDO : 0, offset);
|
|
1267
|
+
offset += 1;
|
|
1268
|
+
payload.writeUInt32BE(content.length, offset);
|
|
1269
|
+
offset += 4;
|
|
1270
|
+
content.copy(payload, offset);
|
|
1271
|
+
return payload;
|
|
1272
|
+
}
|
|
1273
|
+
function decodeExecResult(payload) {
|
|
1274
|
+
if (payload.length < 8) {
|
|
1275
|
+
return { exitCode: 1, stdout: "", stderr: "Invalid exec_result payload" };
|
|
1276
|
+
}
|
|
1277
|
+
const exitCode = payload.readInt32BE(0);
|
|
1278
|
+
const stdoutLen = payload.readUInt32BE(4);
|
|
1279
|
+
const stdout = payload.subarray(8, 8 + stdoutLen).toString("utf-8");
|
|
1280
|
+
const stderrLenOffset = 8 + stdoutLen;
|
|
1281
|
+
const stderrLen = payload.readUInt32BE(stderrLenOffset);
|
|
1282
|
+
const stderr = payload.subarray(stderrLenOffset + 4, stderrLenOffset + 4 + stderrLen).toString("utf-8");
|
|
1283
|
+
return { exitCode, stdout, stderr };
|
|
1284
|
+
}
|
|
1285
|
+
function decodeWriteFileResult(payload) {
|
|
1286
|
+
if (payload.length < 3) {
|
|
1287
|
+
return { success: false, error: "Invalid write_file_result payload" };
|
|
1288
|
+
}
|
|
1289
|
+
const success = payload.readUInt8(0) === 1;
|
|
1290
|
+
const errorLen = payload.readUInt16BE(1);
|
|
1291
|
+
const error = payload.subarray(3, 3 + errorLen).toString("utf-8");
|
|
1292
|
+
return { success, error };
|
|
1293
|
+
}
|
|
1294
|
+
function decodeError(payload) {
|
|
1295
|
+
if (payload.length < 2) {
|
|
1296
|
+
return "Invalid error payload";
|
|
1297
|
+
}
|
|
1298
|
+
const errorLen = payload.readUInt16BE(0);
|
|
1299
|
+
return payload.subarray(2, 2 + errorLen).toString("utf-8");
|
|
1237
1300
|
}
|
|
1238
1301
|
var Decoder = class {
|
|
1239
1302
|
buf = Buffer.alloc(0);
|
|
@@ -1241,12 +1304,20 @@ var Decoder = class {
|
|
|
1241
1304
|
this.buf = Buffer.concat([this.buf, data]);
|
|
1242
1305
|
const messages = [];
|
|
1243
1306
|
while (this.buf.length >= HEADER_SIZE) {
|
|
1244
|
-
const
|
|
1245
|
-
if (
|
|
1246
|
-
|
|
1307
|
+
const length = this.buf.readUInt32BE(0);
|
|
1308
|
+
if (length > MAX_MESSAGE_SIZE) {
|
|
1309
|
+
throw new Error(`Message too large: ${length}`);
|
|
1310
|
+
}
|
|
1311
|
+
if (length < 5) {
|
|
1312
|
+
throw new Error(`Message too small: ${length}`);
|
|
1313
|
+
}
|
|
1314
|
+
const total = HEADER_SIZE + length;
|
|
1247
1315
|
if (this.buf.length < total) break;
|
|
1248
|
-
const
|
|
1249
|
-
|
|
1316
|
+
const body = this.buf.subarray(HEADER_SIZE, total);
|
|
1317
|
+
const type = body.readUInt8(0);
|
|
1318
|
+
const seq = body.readUInt32BE(1);
|
|
1319
|
+
const payload = body.subarray(5);
|
|
1320
|
+
messages.push({ type, seq, payload });
|
|
1250
1321
|
this.buf = this.buf.subarray(total);
|
|
1251
1322
|
}
|
|
1252
1323
|
return messages;
|
|
@@ -1256,18 +1327,28 @@ var VsockClient = class {
|
|
|
1256
1327
|
vsockPath;
|
|
1257
1328
|
socket = null;
|
|
1258
1329
|
connected = false;
|
|
1330
|
+
nextSeq = 1;
|
|
1259
1331
|
pendingRequests = /* @__PURE__ */ new Map();
|
|
1260
1332
|
constructor(vsockPath) {
|
|
1261
1333
|
this.vsockPath = vsockPath;
|
|
1262
1334
|
}
|
|
1335
|
+
/**
|
|
1336
|
+
* Get next sequence number
|
|
1337
|
+
*/
|
|
1338
|
+
getNextSeq() {
|
|
1339
|
+
const seq = this.nextSeq;
|
|
1340
|
+
this.nextSeq = this.nextSeq + 1 & 4294967295;
|
|
1341
|
+
if (this.nextSeq === 0) this.nextSeq = 1;
|
|
1342
|
+
return seq;
|
|
1343
|
+
}
|
|
1263
1344
|
/**
|
|
1264
1345
|
* Handle incoming message and route to pending request
|
|
1265
1346
|
*/
|
|
1266
1347
|
handleMessage(msg) {
|
|
1267
|
-
const pending = this.pendingRequests.get(msg.
|
|
1348
|
+
const pending = this.pendingRequests.get(msg.seq);
|
|
1268
1349
|
if (pending) {
|
|
1269
1350
|
clearTimeout(pending.timeout);
|
|
1270
|
-
this.pendingRequests.delete(msg.
|
|
1351
|
+
this.pendingRequests.delete(msg.seq);
|
|
1271
1352
|
pending.resolve(msg);
|
|
1272
1353
|
}
|
|
1273
1354
|
}
|
|
@@ -1278,19 +1359,14 @@ var VsockClient = class {
|
|
|
1278
1359
|
if (!this.connected || !this.socket) {
|
|
1279
1360
|
throw new Error("Not connected - call waitForGuestConnection() first");
|
|
1280
1361
|
}
|
|
1281
|
-
const
|
|
1282
|
-
const msg = { type, id, payload };
|
|
1362
|
+
const seq = this.getNextSeq();
|
|
1283
1363
|
return new Promise((resolve, reject) => {
|
|
1284
1364
|
const timeout = setTimeout(() => {
|
|
1285
|
-
this.pendingRequests.delete(
|
|
1286
|
-
reject(new Error(`Request timeout: ${type}`));
|
|
1365
|
+
this.pendingRequests.delete(seq);
|
|
1366
|
+
reject(new Error(`Request timeout: type=0x${type.toString(16)}`));
|
|
1287
1367
|
}, timeoutMs);
|
|
1288
|
-
this.pendingRequests.set(
|
|
1289
|
-
|
|
1290
|
-
reject,
|
|
1291
|
-
timeout
|
|
1292
|
-
});
|
|
1293
|
-
this.socket.write(encode(msg));
|
|
1368
|
+
this.pendingRequests.set(seq, { resolve, reject, timeout });
|
|
1369
|
+
this.socket.write(encode(type, seq, payload));
|
|
1294
1370
|
});
|
|
1295
1371
|
}
|
|
1296
1372
|
/**
|
|
@@ -1299,25 +1375,21 @@ var VsockClient = class {
|
|
|
1299
1375
|
async exec(command, timeoutMs) {
|
|
1300
1376
|
const actualTimeout = timeoutMs ?? DEFAULT_EXEC_TIMEOUT_MS;
|
|
1301
1377
|
try {
|
|
1378
|
+
const payload = encodeExecPayload(command, actualTimeout);
|
|
1302
1379
|
const response = await this.request(
|
|
1303
|
-
|
|
1304
|
-
|
|
1380
|
+
MSG_EXEC,
|
|
1381
|
+
payload,
|
|
1305
1382
|
actualTimeout + 5e3
|
|
1306
1383
|
// Add buffer for network latency
|
|
1307
1384
|
);
|
|
1308
|
-
if (response.type ===
|
|
1309
|
-
const errorPayload = response.payload;
|
|
1385
|
+
if (response.type === MSG_ERROR) {
|
|
1310
1386
|
return {
|
|
1311
1387
|
exitCode: 1,
|
|
1312
1388
|
stdout: "",
|
|
1313
|
-
stderr:
|
|
1389
|
+
stderr: decodeError(response.payload)
|
|
1314
1390
|
};
|
|
1315
1391
|
}
|
|
1316
|
-
return
|
|
1317
|
-
exitCode: response.payload.exitCode,
|
|
1318
|
-
stdout: response.payload.stdout,
|
|
1319
|
-
stderr: response.payload.stderr
|
|
1320
|
-
};
|
|
1392
|
+
return decodeExecResult(response.payload);
|
|
1321
1393
|
} catch (e) {
|
|
1322
1394
|
return {
|
|
1323
1395
|
exitCode: 1,
|
|
@@ -1341,42 +1413,38 @@ var VsockClient = class {
|
|
|
1341
1413
|
/**
|
|
1342
1414
|
* Write content to a file on the remote VM
|
|
1343
1415
|
*/
|
|
1344
|
-
async
|
|
1345
|
-
const
|
|
1346
|
-
|
|
1347
|
-
|
|
1348
|
-
|
|
1349
|
-
|
|
1350
|
-
|
|
1351
|
-
|
|
1352
|
-
|
|
1353
|
-
|
|
1354
|
-
|
|
1355
|
-
|
|
1356
|
-
|
|
1357
|
-
|
|
1416
|
+
async writeFileInternal(remotePath, content, sudo) {
|
|
1417
|
+
const contentBuf = Buffer.from(content, "utf-8");
|
|
1418
|
+
if (contentBuf.length > MAX_MESSAGE_SIZE - 1024) {
|
|
1419
|
+
throw new Error(
|
|
1420
|
+
`Content too large: ${contentBuf.length} bytes (max ${MAX_MESSAGE_SIZE - 1024})`
|
|
1421
|
+
);
|
|
1422
|
+
}
|
|
1423
|
+
const payload = encodeWriteFilePayload(remotePath, contentBuf, sudo);
|
|
1424
|
+
const response = await this.request(
|
|
1425
|
+
MSG_WRITE_FILE,
|
|
1426
|
+
payload,
|
|
1427
|
+
DEFAULT_EXEC_TIMEOUT_MS
|
|
1428
|
+
);
|
|
1429
|
+
if (response.type === MSG_ERROR) {
|
|
1430
|
+
throw new Error(`Write file failed: ${decodeError(response.payload)}`);
|
|
1431
|
+
}
|
|
1432
|
+
const result = decodeWriteFileResult(response.payload);
|
|
1433
|
+
if (!result.success) {
|
|
1434
|
+
throw new Error(`Write file failed: ${result.error}`);
|
|
1358
1435
|
}
|
|
1359
1436
|
}
|
|
1437
|
+
/**
|
|
1438
|
+
* Write content to a file on the remote VM
|
|
1439
|
+
*/
|
|
1440
|
+
async writeFile(remotePath, content) {
|
|
1441
|
+
return this.writeFileInternal(remotePath, content, false);
|
|
1442
|
+
}
|
|
1360
1443
|
/**
|
|
1361
1444
|
* Write content to a file on the remote VM using sudo
|
|
1362
1445
|
*/
|
|
1363
1446
|
async writeFileWithSudo(remotePath, content) {
|
|
1364
|
-
|
|
1365
|
-
const maxChunkSize = 65e3;
|
|
1366
|
-
if (encoded.length <= maxChunkSize) {
|
|
1367
|
-
await this.execOrThrow(
|
|
1368
|
-
`echo '${encoded}' | base64 -d | sudo tee '${remotePath}' > /dev/null`
|
|
1369
|
-
);
|
|
1370
|
-
} else {
|
|
1371
|
-
await this.execOrThrow(`sudo rm -f '${remotePath}'`);
|
|
1372
|
-
for (let i = 0; i < encoded.length; i += maxChunkSize) {
|
|
1373
|
-
const chunk = encoded.slice(i, i + maxChunkSize);
|
|
1374
|
-
const teeFlag = i === 0 ? "" : "-a";
|
|
1375
|
-
await this.execOrThrow(
|
|
1376
|
-
`echo '${chunk}' | base64 -d | sudo tee ${teeFlag} '${remotePath}' > /dev/null`
|
|
1377
|
-
);
|
|
1378
|
-
}
|
|
1379
|
-
}
|
|
1447
|
+
return this.writeFileInternal(remotePath, content, true);
|
|
1380
1448
|
}
|
|
1381
1449
|
/**
|
|
1382
1450
|
* Read a file from the remote VM
|
|
@@ -1457,15 +1525,15 @@ var VsockClient = class {
|
|
|
1457
1525
|
State2[State2["Connected"] = 2] = "Connected";
|
|
1458
1526
|
})(State || (State = {}));
|
|
1459
1527
|
let state = 0 /* WaitingForReady */;
|
|
1460
|
-
let
|
|
1528
|
+
let pingSeq = 0;
|
|
1461
1529
|
socket.on("data", (data) => {
|
|
1462
1530
|
try {
|
|
1463
1531
|
for (const msg of decoder.decode(data)) {
|
|
1464
|
-
if (state === 0 /* WaitingForReady */ && msg.type ===
|
|
1532
|
+
if (state === 0 /* WaitingForReady */ && msg.type === MSG_READY) {
|
|
1465
1533
|
state = 1 /* WaitingForPong */;
|
|
1466
|
-
|
|
1467
|
-
socket.write(encode(
|
|
1468
|
-
} else if (state === 1 /* WaitingForPong */ && msg.type ===
|
|
1534
|
+
pingSeq = this.getNextSeq();
|
|
1535
|
+
socket.write(encode(MSG_PING, pingSeq));
|
|
1536
|
+
} else if (state === 1 /* WaitingForPong */ && msg.type === MSG_PONG && msg.seq === pingSeq) {
|
|
1469
1537
|
if (settled) {
|
|
1470
1538
|
socket.destroy();
|
|
1471
1539
|
return;
|
|
@@ -7954,12 +8022,6 @@ var publicVolumeDownloadContract = c18.router({
|
|
|
7954
8022
|
}
|
|
7955
8023
|
});
|
|
7956
8024
|
|
|
7957
|
-
// ../../packages/core/src/sandbox/scripts/dist/bundled.ts
|
|
7958
|
-
var RUN_AGENT_SCRIPT = '#!/usr/bin/env node\n\n// src/sandbox/scripts/src/run-agent.ts\nimport * as fs7 from "fs";\nimport { spawn, execSync as execSync4 } from "child_process";\nimport * as readline from "readline";\n\n// src/sandbox/scripts/src/lib/common.ts\nimport * as fs from "fs";\nvar RUN_ID = process.env.VM0_RUN_ID ?? "";\nvar API_URL = process.env.VM0_API_URL ?? "";\nvar API_TOKEN = process.env.VM0_API_TOKEN ?? "";\nvar PROMPT = process.env.VM0_PROMPT ?? "";\nvar VERCEL_BYPASS = process.env.VERCEL_PROTECTION_BYPASS ?? "";\nvar RESUME_SESSION_ID = process.env.VM0_RESUME_SESSION_ID ?? "";\nvar CLI_AGENT_TYPE = process.env.CLI_AGENT_TYPE ?? "claude-code";\nvar OPENAI_MODEL = process.env.OPENAI_MODEL ?? "";\nvar WORKING_DIR = process.env.VM0_WORKING_DIR ?? "";\nvar ARTIFACT_DRIVER = process.env.VM0_ARTIFACT_DRIVER ?? "";\nvar ARTIFACT_MOUNT_PATH = process.env.VM0_ARTIFACT_MOUNT_PATH ?? "";\nvar ARTIFACT_VOLUME_NAME = process.env.VM0_ARTIFACT_VOLUME_NAME ?? "";\nvar ARTIFACT_VERSION_ID = process.env.VM0_ARTIFACT_VERSION_ID ?? "";\nvar WEBHOOK_URL = `${API_URL}/api/webhooks/agent/events`;\nvar CHECKPOINT_URL = `${API_URL}/api/webhooks/agent/checkpoints`;\nvar COMPLETE_URL = `${API_URL}/api/webhooks/agent/complete`;\nvar HEARTBEAT_URL = `${API_URL}/api/webhooks/agent/heartbeat`;\nvar TELEMETRY_URL = `${API_URL}/api/webhooks/agent/telemetry`;\nvar PROXY_URL = `${API_URL}/api/webhooks/agent/proxy`;\nvar STORAGE_PREPARE_URL = `${API_URL}/api/webhooks/agent/storages/prepare`;\nvar STORAGE_COMMIT_URL = `${API_URL}/api/webhooks/agent/storages/commit`;\nvar HEARTBEAT_INTERVAL = 60;\nvar TELEMETRY_INTERVAL = 30;\nvar HTTP_CONNECT_TIMEOUT = 10;\nvar HTTP_MAX_TIME = 30;\nvar HTTP_MAX_TIME_UPLOAD = 60;\nvar HTTP_MAX_RETRIES = 3;\nvar SESSION_ID_FILE = `/tmp/vm0-session-${RUN_ID}.txt`;\nvar SESSION_HISTORY_PATH_FILE = `/tmp/vm0-session-history-${RUN_ID}.txt`;\nvar EVENT_ERROR_FLAG = `/tmp/vm0-event-error-${RUN_ID}`;\nvar SYSTEM_LOG_FILE = `/tmp/vm0-main-${RUN_ID}.log`;\nvar AGENT_LOG_FILE = `/tmp/vm0-agent-${RUN_ID}.log`;\nvar METRICS_LOG_FILE = `/tmp/vm0-metrics-${RUN_ID}.jsonl`;\nvar NETWORK_LOG_FILE = `/tmp/vm0-network-${RUN_ID}.jsonl`;\nvar TELEMETRY_LOG_POS_FILE = `/tmp/vm0-telemetry-log-pos-${RUN_ID}.txt`;\nvar TELEMETRY_METRICS_POS_FILE = `/tmp/vm0-telemetry-metrics-pos-${RUN_ID}.txt`;\nvar TELEMETRY_NETWORK_POS_FILE = `/tmp/vm0-telemetry-network-pos-${RUN_ID}.txt`;\nvar TELEMETRY_SANDBOX_OPS_POS_FILE = `/tmp/vm0-telemetry-sandbox-ops-pos-${RUN_ID}.txt`;\nvar SANDBOX_OPS_LOG_FILE = `/tmp/vm0-sandbox-ops-${RUN_ID}.jsonl`;\nvar METRICS_INTERVAL = 5;\nfunction validateConfig() {\n if (!WORKING_DIR) {\n throw new Error("VM0_WORKING_DIR is required but not set");\n }\n return true;\n}\nfunction recordSandboxOp(actionType, durationMs, success, error) {\n const entry = {\n ts: (/* @__PURE__ */ new Date()).toISOString(),\n action_type: actionType,\n duration_ms: durationMs,\n success\n };\n if (error) {\n entry.error = error;\n }\n fs.appendFileSync(SANDBOX_OPS_LOG_FILE, JSON.stringify(entry) + "\\n");\n}\n\n// src/sandbox/scripts/src/lib/log.ts\nvar SCRIPT_NAME = process.env.LOG_SCRIPT_NAME ?? "run-agent";\nvar DEBUG_MODE = process.env.VM0_DEBUG === "1";\nfunction timestamp() {\n return (/* @__PURE__ */ new Date()).toISOString().replace(/\\.\\d{3}Z$/, "Z");\n}\nfunction logInfo(msg) {\n console.error(`[${timestamp()}] [INFO] [sandbox:${SCRIPT_NAME}] ${msg}`);\n}\nfunction logWarn(msg) {\n console.error(`[${timestamp()}] [WARN] [sandbox:${SCRIPT_NAME}] ${msg}`);\n}\nfunction logError(msg) {\n console.error(`[${timestamp()}] [ERROR] [sandbox:${SCRIPT_NAME}] ${msg}`);\n}\nfunction logDebug(msg) {\n if (DEBUG_MODE) {\n console.error(`[${timestamp()}] [DEBUG] [sandbox:${SCRIPT_NAME}] ${msg}`);\n }\n}\n\n// src/sandbox/scripts/src/lib/events.ts\nimport * as fs2 from "fs";\n\n// src/sandbox/scripts/src/lib/http-client.ts\nimport { execSync } from "child_process";\nfunction sleep(ms) {\n return new Promise((resolve) => setTimeout(resolve, ms));\n}\nasync function httpPostJson(url, data, maxRetries = HTTP_MAX_RETRIES) {\n const headers = {\n "Content-Type": "application/json",\n Authorization: `Bearer ${API_TOKEN}`\n };\n if (VERCEL_BYPASS) {\n headers["x-vercel-protection-bypass"] = VERCEL_BYPASS;\n }\n for (let attempt = 1; attempt <= maxRetries; attempt++) {\n logDebug(`HTTP POST attempt ${attempt}/${maxRetries} to ${url}`);\n try {\n const controller = new AbortController();\n const timeoutId = setTimeout(\n () => controller.abort(),\n HTTP_MAX_TIME * 1e3\n );\n const response = await fetch(url, {\n method: "POST",\n headers,\n body: JSON.stringify(data),\n signal: controller.signal\n });\n clearTimeout(timeoutId);\n if (response.ok) {\n const text = await response.text();\n if (text) {\n return JSON.parse(text);\n }\n return {};\n }\n logWarn(\n `HTTP POST failed (attempt ${attempt}/${maxRetries}): HTTP ${response.status}`\n );\n if (attempt < maxRetries) {\n await sleep(1e3);\n }\n } catch (error) {\n const errorMsg = error instanceof Error ? error.message : String(error);\n if (errorMsg.includes("abort")) {\n logWarn(`HTTP POST failed (attempt ${attempt}/${maxRetries}): Timeout`);\n } else {\n logWarn(\n `HTTP POST failed (attempt ${attempt}/${maxRetries}): ${errorMsg}`\n );\n }\n if (attempt < maxRetries) {\n await sleep(1e3);\n }\n }\n }\n logError(`HTTP POST failed after ${maxRetries} attempts to ${url}`);\n return null;\n}\nasync function httpPutPresigned(presignedUrl, filePath, contentType = "application/octet-stream", maxRetries = HTTP_MAX_RETRIES) {\n for (let attempt = 1; attempt <= maxRetries; attempt++) {\n logDebug(`HTTP PUT presigned attempt ${attempt}/${maxRetries}`);\n try {\n const curlCmd = [\n "curl",\n "-f",\n "-X",\n "PUT",\n "-H",\n `Content-Type: ${contentType}`,\n "--data-binary",\n `@${filePath}`,\n "--connect-timeout",\n String(HTTP_CONNECT_TIMEOUT),\n "--max-time",\n String(HTTP_MAX_TIME_UPLOAD),\n "--silent",\n `"${presignedUrl}"`\n ].join(" ");\n execSync(curlCmd, {\n timeout: HTTP_MAX_TIME_UPLOAD * 1e3,\n stdio: ["pipe", "pipe", "pipe"]\n });\n return true;\n } catch (error) {\n const errorMsg = error instanceof Error ? error.message : String(error);\n if (errorMsg.includes("ETIMEDOUT") || errorMsg.includes("timeout")) {\n logWarn(\n `HTTP PUT presigned failed (attempt ${attempt}/${maxRetries}): Timeout`\n );\n } else {\n logWarn(\n `HTTP PUT presigned failed (attempt ${attempt}/${maxRetries}): ${errorMsg}`\n );\n }\n if (attempt < maxRetries) {\n await sleep(1e3);\n }\n }\n }\n logError(`HTTP PUT presigned failed after ${maxRetries} attempts`);\n return false;\n}\n\n// src/sandbox/scripts/src/lib/secret-masker.ts\nvar MASK_PLACEHOLDER = "***";\nvar MIN_SECRET_LENGTH = 5;\nvar _masker = null;\nvar SecretMasker = class {\n patterns;\n /**\n * Initialize masker with secret values.\n *\n * @param secretValues - List of secret values to mask\n */\n constructor(secretValues) {\n this.patterns = /* @__PURE__ */ new Set();\n for (const secret of secretValues) {\n if (!secret || secret.length < MIN_SECRET_LENGTH) {\n continue;\n }\n this.patterns.add(secret);\n try {\n const b64 = Buffer.from(secret).toString("base64");\n if (b64.length >= MIN_SECRET_LENGTH) {\n this.patterns.add(b64);\n }\n } catch {\n }\n try {\n const urlEnc = encodeURIComponent(secret);\n if (urlEnc !== secret && urlEnc.length >= MIN_SECRET_LENGTH) {\n this.patterns.add(urlEnc);\n }\n } catch {\n }\n }\n }\n /**\n * Recursively mask all occurrences of secrets in the data.\n *\n * @param data - Data to mask (string, list, dict, or primitive)\n * @returns Masked data with the same structure\n */\n mask(data) {\n return this.deepMask(data);\n }\n deepMask(data) {\n if (typeof data === "string") {\n let result = data;\n for (const pattern of this.patterns) {\n result = result.split(pattern).join(MASK_PLACEHOLDER);\n }\n return result;\n }\n if (Array.isArray(data)) {\n return data.map((item) => this.deepMask(item));\n }\n if (data !== null && typeof data === "object") {\n const result = {};\n for (const [key, value] of Object.entries(\n data\n )) {\n result[key] = this.deepMask(value);\n }\n return result;\n }\n return data;\n }\n};\nfunction createMasker() {\n const secretValuesStr = process.env.VM0_SECRET_VALUES ?? "";\n if (!secretValuesStr) {\n return new SecretMasker([]);\n }\n const secretValues = [];\n for (const encodedValue of secretValuesStr.split(",")) {\n const trimmed = encodedValue.trim();\n if (trimmed) {\n try {\n const decoded = Buffer.from(trimmed, "base64").toString("utf-8");\n if (decoded) {\n secretValues.push(decoded);\n }\n } catch {\n }\n }\n }\n return new SecretMasker(secretValues);\n}\nfunction getMasker() {\n if (_masker === null) {\n _masker = createMasker();\n }\n return _masker;\n}\nfunction maskData(data) {\n return getMasker().mask(data);\n}\n\n// src/sandbox/scripts/src/lib/events.ts\nasync function sendEvent(event, sequenceNumber) {\n const eventType = event.type ?? "";\n const eventSubtype = event.subtype ?? "";\n let sessionId = null;\n if (CLI_AGENT_TYPE === "codex") {\n if (eventType === "thread.started") {\n sessionId = event.thread_id ?? "";\n }\n } else {\n if (eventType === "system" && eventSubtype === "init") {\n sessionId = event.session_id ?? "";\n }\n }\n if (sessionId && !fs2.existsSync(SESSION_ID_FILE)) {\n logInfo(`Captured session ID: ${sessionId}`);\n fs2.writeFileSync(SESSION_ID_FILE, sessionId);\n const homeDir = process.env.HOME ?? "/home/user";\n let sessionHistoryPath;\n if (CLI_AGENT_TYPE === "codex") {\n const codexHome = process.env.CODEX_HOME ?? `${homeDir}/.codex`;\n sessionHistoryPath = `CODEX_SEARCH:${codexHome}/sessions:${sessionId}`;\n } else {\n const projectName = WORKING_DIR.replace(/^\\//, "").replace(/\\//g, "-");\n sessionHistoryPath = `${homeDir}/.claude/projects/-${projectName}/${sessionId}.jsonl`;\n }\n fs2.writeFileSync(SESSION_HISTORY_PATH_FILE, sessionHistoryPath);\n logInfo(`Session history will be at: ${sessionHistoryPath}`);\n }\n const eventWithSequence = {\n ...event,\n sequenceNumber\n };\n const maskedEvent = maskData(eventWithSequence);\n const payload = {\n runId: RUN_ID,\n events: [maskedEvent]\n };\n const result = await httpPostJson(WEBHOOK_URL, payload);\n if (result === null) {\n logError("Failed to send event after retries");\n fs2.writeFileSync(EVENT_ERROR_FLAG, "1");\n return false;\n }\n return true;\n}\n\n// src/sandbox/scripts/src/lib/checkpoint.ts\nimport * as fs4 from "fs";\nimport * as path2 from "path";\n\n// src/sandbox/scripts/src/lib/direct-upload.ts\nimport * as fs3 from "fs";\nimport * as path from "path";\nimport * as crypto from "crypto";\nimport { execSync as execSync2 } from "child_process";\nfunction computeFileHash(filePath) {\n const hash = crypto.createHash("sha256");\n const buffer = fs3.readFileSync(filePath);\n hash.update(buffer);\n return hash.digest("hex");\n}\nfunction collectFileMetadata(dirPath) {\n const files = [];\n function walkDir(currentPath, relativePath) {\n const items = fs3.readdirSync(currentPath);\n for (const item of items) {\n if (item === ".git" || item === ".vm0") {\n continue;\n }\n const fullPath = path.join(currentPath, item);\n const relPath = relativePath ? path.join(relativePath, item) : item;\n const stat = fs3.statSync(fullPath);\n if (stat.isDirectory()) {\n walkDir(fullPath, relPath);\n } else if (stat.isFile()) {\n try {\n const fileHash = computeFileHash(fullPath);\n files.push({\n path: relPath,\n hash: fileHash,\n size: stat.size\n });\n } catch (error) {\n logWarn(`Could not process file ${relPath}: ${error}`);\n }\n }\n }\n }\n walkDir(dirPath, "");\n return files;\n}\nfunction createArchive(dirPath, tarPath) {\n try {\n execSync2(\n `tar -czf "${tarPath}" --exclude=\'.git\' --exclude=\'.vm0\' -C "${dirPath}" .`,\n { stdio: ["pipe", "pipe", "pipe"] }\n );\n return true;\n } catch (error) {\n logError(`Failed to create archive: ${error}`);\n return false;\n }\n}\nfunction createManifest(files, manifestPath) {\n try {\n const manifest = {\n version: 1,\n files,\n createdAt: (/* @__PURE__ */ new Date()).toISOString()\n };\n fs3.writeFileSync(manifestPath, JSON.stringify(manifest, null, 2));\n return true;\n } catch (error) {\n logError(`Failed to create manifest: ${error}`);\n return false;\n }\n}\nasync function createDirectUploadSnapshot(mountPath, storageName, storageType = "artifact", runId, message) {\n logInfo(\n `Creating direct upload snapshot for \'${storageName}\' (type: ${storageType})`\n );\n logInfo("Computing file hashes...");\n const hashStart = Date.now();\n const files = collectFileMetadata(mountPath);\n recordSandboxOp("artifact_hash_compute", Date.now() - hashStart, true);\n logInfo(`Found ${files.length} files`);\n if (files.length === 0) {\n logInfo("No files to upload, creating empty version");\n }\n logInfo("Calling prepare endpoint...");\n const prepareStart = Date.now();\n const preparePayload = {\n storageName,\n storageType,\n files\n };\n if (runId) {\n preparePayload.runId = runId;\n }\n const prepareResponse = await httpPostJson(\n STORAGE_PREPARE_URL,\n preparePayload\n );\n if (!prepareResponse) {\n logError("Failed to call prepare endpoint");\n recordSandboxOp("artifact_prepare_api", Date.now() - prepareStart, false);\n return null;\n }\n const versionId = prepareResponse.versionId;\n if (!versionId) {\n logError(`Invalid prepare response: ${JSON.stringify(prepareResponse)}`);\n recordSandboxOp("artifact_prepare_api", Date.now() - prepareStart, false);\n return null;\n }\n recordSandboxOp("artifact_prepare_api", Date.now() - prepareStart, true);\n if (prepareResponse.existing) {\n logInfo(`Version already exists (deduplicated): ${versionId.slice(0, 8)}`);\n logInfo("Updating HEAD pointer...");\n const commitPayload = {\n storageName,\n storageType,\n versionId,\n files\n };\n if (runId) {\n commitPayload.runId = runId;\n }\n const commitResponse = await httpPostJson(\n STORAGE_COMMIT_URL,\n commitPayload\n );\n if (!commitResponse || !commitResponse.success) {\n logError(`Failed to update HEAD: ${JSON.stringify(commitResponse)}`);\n return null;\n }\n return { versionId, deduplicated: true };\n }\n const uploads = prepareResponse.uploads;\n if (!uploads) {\n logError("No upload URLs in prepare response");\n return null;\n }\n const archiveInfo = uploads.archive;\n const manifestInfo = uploads.manifest;\n if (!archiveInfo || !manifestInfo) {\n logError("Missing archive or manifest upload info");\n return null;\n }\n const tempDir = fs3.mkdtempSync(`/tmp/direct-upload-${storageName}-`);\n try {\n logInfo("Creating archive...");\n const archiveStart = Date.now();\n const archivePath = path.join(tempDir, "archive.tar.gz");\n if (!createArchive(mountPath, archivePath)) {\n logError("Failed to create archive");\n recordSandboxOp(\n "artifact_archive_create",\n Date.now() - archiveStart,\n false\n );\n return null;\n }\n recordSandboxOp("artifact_archive_create", Date.now() - archiveStart, true);\n logInfo("Creating manifest...");\n const manifestPath = path.join(tempDir, "manifest.json");\n if (!createManifest(files, manifestPath)) {\n logError("Failed to create manifest");\n return null;\n }\n logInfo("Uploading archive to S3...");\n const s3UploadStart = Date.now();\n if (!await httpPutPresigned(\n archiveInfo.presignedUrl,\n archivePath,\n "application/gzip"\n )) {\n logError("Failed to upload archive to S3");\n recordSandboxOp("artifact_s3_upload", Date.now() - s3UploadStart, false);\n return null;\n }\n logInfo("Uploading manifest to S3...");\n if (!await httpPutPresigned(\n manifestInfo.presignedUrl,\n manifestPath,\n "application/json"\n )) {\n logError("Failed to upload manifest to S3");\n recordSandboxOp("artifact_s3_upload", Date.now() - s3UploadStart, false);\n return null;\n }\n recordSandboxOp("artifact_s3_upload", Date.now() - s3UploadStart, true);\n logInfo("Calling commit endpoint...");\n const commitStart = Date.now();\n const commitPayload = {\n storageName,\n storageType,\n versionId,\n files\n };\n if (runId) {\n commitPayload.runId = runId;\n }\n if (message) {\n commitPayload.message = message;\n }\n const commitResponse = await httpPostJson(\n STORAGE_COMMIT_URL,\n commitPayload\n );\n if (!commitResponse) {\n logError("Failed to call commit endpoint");\n recordSandboxOp("artifact_commit_api", Date.now() - commitStart, false);\n return null;\n }\n if (!commitResponse.success) {\n logError(`Commit failed: ${JSON.stringify(commitResponse)}`);\n recordSandboxOp("artifact_commit_api", Date.now() - commitStart, false);\n return null;\n }\n recordSandboxOp("artifact_commit_api", Date.now() - commitStart, true);\n logInfo(`Direct upload snapshot created: ${versionId.slice(0, 8)}`);\n return { versionId };\n } finally {\n try {\n fs3.rmSync(tempDir, { recursive: true, force: true });\n } catch {\n }\n }\n}\n\n// src/sandbox/scripts/src/lib/checkpoint.ts\nfunction findJsonlFiles(dir) {\n const files = [];\n function walk(currentDir) {\n try {\n const items = fs4.readdirSync(currentDir);\n for (const item of items) {\n const fullPath = path2.join(currentDir, item);\n const stat = fs4.statSync(fullPath);\n if (stat.isDirectory()) {\n walk(fullPath);\n } else if (item.endsWith(".jsonl")) {\n files.push(fullPath);\n }\n }\n } catch {\n }\n }\n walk(dir);\n return files;\n}\nfunction findCodexSessionFile(sessionsDir, sessionId) {\n const files = findJsonlFiles(sessionsDir);\n logInfo(`Searching for Codex session ${sessionId} in ${files.length} files`);\n for (const filepath of files) {\n const filename = path2.basename(filepath);\n if (filename.includes(sessionId) || filename.replace(/-/g, "").includes(sessionId.replace(/-/g, ""))) {\n logInfo(`Found Codex session file: ${filepath}`);\n return filepath;\n }\n }\n if (files.length > 0) {\n files.sort((a, b) => {\n const statA = fs4.statSync(a);\n const statB = fs4.statSync(b);\n return statB.mtimeMs - statA.mtimeMs;\n });\n const mostRecent = files[0] ?? null;\n if (mostRecent) {\n logInfo(\n `Session ID not found in filenames, using most recent: ${mostRecent}`\n );\n }\n return mostRecent;\n }\n return null;\n}\nasync function createCheckpoint() {\n const checkpointStart = Date.now();\n logInfo("Creating checkpoint...");\n const sessionIdStart = Date.now();\n if (!fs4.existsSync(SESSION_ID_FILE)) {\n logError("No session ID found, checkpoint creation failed");\n recordSandboxOp(\n "session_id_read",\n Date.now() - sessionIdStart,\n false,\n "Session ID file not found"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n const cliAgentSessionId = fs4.readFileSync(SESSION_ID_FILE, "utf-8").trim();\n recordSandboxOp("session_id_read", Date.now() - sessionIdStart, true);\n const sessionHistoryStart = Date.now();\n if (!fs4.existsSync(SESSION_HISTORY_PATH_FILE)) {\n logError("No session history path found, checkpoint creation failed");\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n false,\n "Session history path file not found"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n const sessionHistoryPathRaw = fs4.readFileSync(SESSION_HISTORY_PATH_FILE, "utf-8").trim();\n let sessionHistoryPath;\n if (sessionHistoryPathRaw.startsWith("CODEX_SEARCH:")) {\n const parts = sessionHistoryPathRaw.split(":");\n if (parts.length !== 3) {\n logError(`Invalid Codex search marker format: ${sessionHistoryPathRaw}`);\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n false,\n "Invalid Codex search marker"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n const sessionsDir = parts[1] ?? "";\n const codexSessionId = parts[2] ?? "";\n logInfo(`Searching for Codex session in ${sessionsDir}`);\n const foundPath = findCodexSessionFile(sessionsDir, codexSessionId);\n if (!foundPath) {\n logError(\n `Could not find Codex session file for ${codexSessionId} in ${sessionsDir}`\n );\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n false,\n "Codex session file not found"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n sessionHistoryPath = foundPath;\n } else {\n sessionHistoryPath = sessionHistoryPathRaw;\n }\n if (!fs4.existsSync(sessionHistoryPath)) {\n logError(\n `Session history file not found at ${sessionHistoryPath}, checkpoint creation failed`\n );\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n false,\n "Session history file not found"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n let cliAgentSessionHistory;\n try {\n cliAgentSessionHistory = fs4.readFileSync(sessionHistoryPath, "utf-8");\n } catch (error) {\n logError(`Failed to read session history: ${error}`);\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n false,\n String(error)\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n if (!cliAgentSessionHistory.trim()) {\n logError("Session history is empty, checkpoint creation failed");\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n false,\n "Session history empty"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n const lineCount = cliAgentSessionHistory.trim().split("\\n").length;\n logInfo(`Session history loaded (${lineCount} lines)`);\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n true\n );\n let artifactSnapshot = null;\n if (ARTIFACT_DRIVER && ARTIFACT_VOLUME_NAME) {\n logInfo(`Processing artifact with driver: ${ARTIFACT_DRIVER}`);\n if (ARTIFACT_DRIVER !== "vas") {\n logError(\n `Unknown artifact driver: ${ARTIFACT_DRIVER} (only \'vas\' is supported)`\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n logInfo(\n `Creating VAS snapshot for artifact \'${ARTIFACT_VOLUME_NAME}\' at ${ARTIFACT_MOUNT_PATH}`\n );\n logInfo("Using direct S3 upload...");\n const snapshot = await createDirectUploadSnapshot(\n ARTIFACT_MOUNT_PATH,\n ARTIFACT_VOLUME_NAME,\n "artifact",\n RUN_ID,\n `Checkpoint from run ${RUN_ID}`\n );\n if (!snapshot) {\n logError("Failed to create VAS snapshot for artifact");\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n const artifactVersion = snapshot.versionId;\n if (!artifactVersion) {\n logError("Failed to extract versionId from snapshot");\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n artifactSnapshot = {\n artifactName: ARTIFACT_VOLUME_NAME,\n artifactVersion\n };\n logInfo(\n `VAS artifact snapshot created: ${ARTIFACT_VOLUME_NAME}@${artifactVersion}`\n );\n } else {\n logInfo(\n "No artifact configured, creating checkpoint without artifact snapshot"\n );\n }\n logInfo("Calling checkpoint API...");\n const checkpointPayload = {\n runId: RUN_ID,\n cliAgentType: CLI_AGENT_TYPE,\n cliAgentSessionId,\n cliAgentSessionHistory\n };\n if (artifactSnapshot) {\n checkpointPayload.artifactSnapshot = artifactSnapshot;\n }\n const apiCallStart = Date.now();\n const result = await httpPostJson(\n CHECKPOINT_URL,\n checkpointPayload\n );\n if (result && result.checkpointId) {\n const checkpointId = result.checkpointId;\n logInfo(`Checkpoint created successfully: ${checkpointId}`);\n recordSandboxOp("checkpoint_api_call", Date.now() - apiCallStart, true);\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, true);\n return true;\n } else {\n logError(\n `Checkpoint API returned invalid response: ${JSON.stringify(result)}`\n );\n recordSandboxOp(\n "checkpoint_api_call",\n Date.now() - apiCallStart,\n false,\n "Invalid API response"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n}\n\n// src/sandbox/scripts/src/lib/metrics.ts\nimport * as fs5 from "fs";\nimport { execSync as execSync3 } from "child_process";\nvar shutdownRequested = false;\nfunction getCpuPercent() {\n try {\n const content = fs5.readFileSync("/proc/stat", "utf-8");\n const line = content.split("\\n")[0];\n if (!line) {\n return 0;\n }\n const parts = line.split(/\\s+/);\n if (parts[0] !== "cpu") {\n return 0;\n }\n const values = parts.slice(1).map((x) => parseInt(x, 10));\n const idleVal = values[3];\n const iowaitVal = values[4];\n if (idleVal === void 0 || iowaitVal === void 0) {\n return 0;\n }\n const idle = idleVal + iowaitVal;\n const total = values.reduce((a, b) => a + b, 0);\n if (total === 0) {\n return 0;\n }\n const cpuPercent = 100 * (1 - idle / total);\n return Math.round(cpuPercent * 100) / 100;\n } catch (error) {\n logDebug(`Failed to get CPU percent: ${error}`);\n return 0;\n }\n}\nfunction getMemoryInfo() {\n try {\n const result = execSync3("free -b", {\n encoding: "utf-8",\n timeout: 5e3,\n stdio: ["pipe", "pipe", "pipe"]\n });\n const lines = result.trim().split("\\n");\n for (const line of lines) {\n if (line.startsWith("Mem:")) {\n const parts = line.split(/\\s+/);\n const totalStr = parts[1];\n const usedStr = parts[2];\n if (!totalStr || !usedStr) {\n return [0, 0];\n }\n const total = parseInt(totalStr, 10);\n const used = parseInt(usedStr, 10);\n return [used, total];\n }\n }\n return [0, 0];\n } catch (error) {\n logDebug(`Failed to get memory info: ${error}`);\n return [0, 0];\n }\n}\nfunction getDiskInfo() {\n try {\n const result = execSync3("df -B1 /", {\n encoding: "utf-8",\n timeout: 5e3,\n stdio: ["pipe", "pipe", "pipe"]\n });\n const lines = result.trim().split("\\n");\n if (lines.length < 2) {\n return [0, 0];\n }\n const dataLine = lines[1];\n if (!dataLine) {\n return [0, 0];\n }\n const parts = dataLine.split(/\\s+/);\n const totalStr = parts[1];\n const usedStr = parts[2];\n if (!totalStr || !usedStr) {\n return [0, 0];\n }\n const total = parseInt(totalStr, 10);\n const used = parseInt(usedStr, 10);\n return [used, total];\n } catch (error) {\n logDebug(`Failed to get disk info: ${error}`);\n return [0, 0];\n }\n}\nfunction collectMetrics() {\n const cpu = getCpuPercent();\n const [memUsed, memTotal] = getMemoryInfo();\n const [diskUsed, diskTotal] = getDiskInfo();\n return {\n ts: (/* @__PURE__ */ new Date()).toISOString(),\n cpu,\n mem_used: memUsed,\n mem_total: memTotal,\n disk_used: diskUsed,\n disk_total: diskTotal\n };\n}\nfunction metricsCollectorLoop() {\n logInfo(`Metrics collector started, writing to ${METRICS_LOG_FILE}`);\n const writeMetrics = () => {\n if (shutdownRequested) {\n logInfo("Metrics collector stopped");\n return;\n }\n try {\n const metrics = collectMetrics();\n fs5.appendFileSync(METRICS_LOG_FILE, JSON.stringify(metrics) + "\\n");\n logDebug(\n `Metrics collected: cpu=${metrics.cpu}%, mem=${metrics.mem_used}/${metrics.mem_total}`\n );\n } catch (error) {\n logError(`Failed to collect/write metrics: ${error}`);\n }\n setTimeout(writeMetrics, METRICS_INTERVAL * 1e3);\n };\n writeMetrics();\n}\nfunction startMetricsCollector() {\n shutdownRequested = false;\n setTimeout(metricsCollectorLoop, 0);\n}\nfunction stopMetricsCollector() {\n shutdownRequested = true;\n}\n\n// src/sandbox/scripts/src/lib/upload-telemetry.ts\nimport * as fs6 from "fs";\nvar shutdownRequested2 = false;\nfunction readFileFromPosition(filePath, posFile) {\n let lastPos = 0;\n if (fs6.existsSync(posFile)) {\n try {\n const content = fs6.readFileSync(posFile, "utf-8").trim();\n lastPos = parseInt(content, 10) || 0;\n } catch {\n lastPos = 0;\n }\n }\n let newContent = "";\n let newPos = lastPos;\n if (fs6.existsSync(filePath)) {\n try {\n const fd = fs6.openSync(filePath, "r");\n const stats = fs6.fstatSync(fd);\n const bufferSize = stats.size - lastPos;\n if (bufferSize > 0) {\n const buffer = Buffer.alloc(bufferSize);\n fs6.readSync(fd, buffer, 0, bufferSize, lastPos);\n newContent = buffer.toString("utf-8");\n newPos = stats.size;\n }\n fs6.closeSync(fd);\n } catch (error) {\n logDebug(`Failed to read ${filePath}: ${error}`);\n }\n }\n return [newContent, newPos];\n}\nfunction savePosition(posFile, position) {\n try {\n fs6.writeFileSync(posFile, String(position));\n } catch (error) {\n logDebug(`Failed to save position to ${posFile}: ${error}`);\n }\n}\nfunction readJsonlFromPosition(filePath, posFile) {\n const [content, newPos] = readFileFromPosition(filePath, posFile);\n const entries = [];\n if (content) {\n for (const line of content.trim().split("\\n")) {\n if (line) {\n try {\n entries.push(JSON.parse(line));\n } catch {\n }\n }\n }\n }\n return [entries, newPos];\n}\nfunction readMetricsFromPosition(posFile) {\n return readJsonlFromPosition(METRICS_LOG_FILE, posFile);\n}\nfunction readNetworkLogsFromPosition(posFile) {\n return readJsonlFromPosition(NETWORK_LOG_FILE, posFile);\n}\nfunction readSandboxOpsFromPosition(posFile) {\n return readJsonlFromPosition(SANDBOX_OPS_LOG_FILE, posFile);\n}\nasync function uploadTelemetry() {\n const [systemLog, logPos] = readFileFromPosition(\n SYSTEM_LOG_FILE,\n TELEMETRY_LOG_POS_FILE\n );\n const [metrics, metricsPos] = readMetricsFromPosition(\n TELEMETRY_METRICS_POS_FILE\n );\n const [networkLogs, networkPos] = readNetworkLogsFromPosition(\n TELEMETRY_NETWORK_POS_FILE\n );\n const [sandboxOps, sandboxOpsPos] = readSandboxOpsFromPosition(\n TELEMETRY_SANDBOX_OPS_POS_FILE\n );\n if (!systemLog && metrics.length === 0 && networkLogs.length === 0 && sandboxOps.length === 0) {\n logDebug("No new telemetry data to upload");\n return true;\n }\n const maskedSystemLog = systemLog ? maskData(systemLog) : "";\n const maskedNetworkLogs = networkLogs.length > 0 ? maskData(networkLogs) : [];\n const payload = {\n runId: RUN_ID,\n systemLog: maskedSystemLog,\n metrics,\n // Metrics don\'t contain secrets (just numbers)\n networkLogs: maskedNetworkLogs,\n sandboxOperations: sandboxOps\n // Sandbox ops don\'t contain secrets (just timing data)\n };\n logDebug(\n `Uploading telemetry: ${systemLog.length} bytes log, ${metrics.length} metrics, ${networkLogs.length} network logs, ${sandboxOps.length} sandbox ops`\n );\n const result = await httpPostJson(TELEMETRY_URL, payload, 1);\n if (result) {\n savePosition(TELEMETRY_LOG_POS_FILE, logPos);\n savePosition(TELEMETRY_METRICS_POS_FILE, metricsPos);\n savePosition(TELEMETRY_NETWORK_POS_FILE, networkPos);\n savePosition(TELEMETRY_SANDBOX_OPS_POS_FILE, sandboxOpsPos);\n logDebug(\n `Telemetry uploaded successfully: ${result.id ?? "unknown"}`\n );\n return true;\n } else {\n logWarn("Failed to upload telemetry (will retry next interval)");\n return false;\n }\n}\nasync function telemetryUploadLoop() {\n logInfo(`Telemetry upload started (interval: ${TELEMETRY_INTERVAL}s)`);\n const runUpload = async () => {\n if (shutdownRequested2) {\n logInfo("Telemetry upload stopped");\n return;\n }\n try {\n await uploadTelemetry();\n } catch (error) {\n logError(`Telemetry upload error: ${error}`);\n }\n setTimeout(() => void runUpload(), TELEMETRY_INTERVAL * 1e3);\n };\n await runUpload();\n}\nfunction startTelemetryUpload() {\n shutdownRequested2 = false;\n setTimeout(() => void telemetryUploadLoop(), 0);\n}\nfunction stopTelemetryUpload() {\n shutdownRequested2 = true;\n}\nasync function finalTelemetryUpload() {\n logInfo("Performing final telemetry upload...");\n return uploadTelemetry();\n}\n\n// src/sandbox/scripts/src/run-agent.ts\nvar shutdownRequested3 = false;\nfunction heartbeatLoop() {\n const sendHeartbeat = async () => {\n if (shutdownRequested3) {\n return;\n }\n try {\n if (await httpPostJson(HEARTBEAT_URL, { runId: RUN_ID })) {\n logInfo("Heartbeat sent");\n } else {\n logWarn("Heartbeat failed");\n }\n } catch (error) {\n logWarn(`Heartbeat error: ${error}`);\n }\n setTimeout(() => {\n sendHeartbeat().catch(() => {\n });\n }, HEARTBEAT_INTERVAL * 1e3);\n };\n sendHeartbeat().catch(() => {\n });\n}\nasync function cleanup(exitCode, errorMessage) {\n logInfo("\\u25B7 Cleanup");\n const telemetryStart = Date.now();\n let telemetrySuccess = true;\n try {\n await finalTelemetryUpload();\n } catch (error) {\n telemetrySuccess = false;\n logError(`Final telemetry upload failed: ${error}`);\n }\n recordSandboxOp(\n "final_telemetry_upload",\n Date.now() - telemetryStart,\n telemetrySuccess\n );\n logInfo(`Calling complete API with exitCode=${exitCode}`);\n const completePayload = {\n runId: RUN_ID,\n exitCode\n };\n if (errorMessage) {\n completePayload.error = errorMessage;\n }\n const completeStart = Date.now();\n let completeSuccess = false;\n try {\n if (await httpPostJson(COMPLETE_URL, completePayload)) {\n logInfo("Complete API called successfully");\n completeSuccess = true;\n } else {\n logError("Failed to call complete API (sandbox may not be cleaned up)");\n }\n } catch (error) {\n logError(`Complete API call failed: ${error}`);\n }\n recordSandboxOp(\n "complete_api_call",\n Date.now() - completeStart,\n completeSuccess\n );\n shutdownRequested3 = true;\n stopMetricsCollector();\n stopTelemetryUpload();\n logInfo("Background processes stopped");\n if (exitCode === 0) {\n logInfo("\\u2713 Sandbox finished successfully");\n } else {\n logInfo(`\\u2717 Sandbox failed (exit code ${exitCode})`);\n }\n}\nasync function run() {\n validateConfig();\n logInfo(`\\u25B6 VM0 Sandbox ${RUN_ID}`);\n logInfo("\\u25B7 Initialization");\n const initStartTime = Date.now();\n logInfo(`Working directory: ${WORKING_DIR}`);\n const heartbeatStart = Date.now();\n heartbeatLoop();\n logInfo("Heartbeat started");\n recordSandboxOp("heartbeat_start", Date.now() - heartbeatStart, true);\n const metricsStart = Date.now();\n startMetricsCollector();\n logInfo("Metrics collector started");\n recordSandboxOp("metrics_collector_start", Date.now() - metricsStart, true);\n const telemetryStart = Date.now();\n startTelemetryUpload();\n logInfo("Telemetry upload started");\n recordSandboxOp("telemetry_upload_start", Date.now() - telemetryStart, true);\n const workingDirStart = Date.now();\n try {\n fs7.mkdirSync(WORKING_DIR, { recursive: true });\n process.chdir(WORKING_DIR);\n } catch (error) {\n recordSandboxOp(\n "working_dir_setup",\n Date.now() - workingDirStart,\n false,\n String(error)\n );\n throw new Error(\n `Failed to create/change to working directory: ${WORKING_DIR} - ${error}`\n );\n }\n recordSandboxOp("working_dir_setup", Date.now() - workingDirStart, true);\n if (CLI_AGENT_TYPE === "codex") {\n const homeDir = process.env.HOME ?? "/home/user";\n const codexHome = `${homeDir}/.codex`;\n fs7.mkdirSync(codexHome, { recursive: true });\n process.env.CODEX_HOME = codexHome;\n logInfo(`Codex home directory: ${codexHome}`);\n const codexLoginStart = Date.now();\n let codexLoginSuccess = false;\n const apiKey = process.env.OPENAI_API_KEY ?? "";\n if (apiKey) {\n try {\n execSync4("codex login --with-api-key", {\n input: apiKey,\n encoding: "utf-8",\n stdio: ["pipe", "pipe", "pipe"]\n });\n logInfo("Codex authenticated with API key");\n codexLoginSuccess = true;\n } catch (error) {\n logError(`Codex login failed: ${error}`);\n }\n } else {\n logError("OPENAI_API_KEY not set");\n }\n recordSandboxOp(\n "codex_login",\n Date.now() - codexLoginStart,\n codexLoginSuccess\n );\n }\n const initDurationMs = Date.now() - initStartTime;\n recordSandboxOp("init_total", initDurationMs, true);\n logInfo(`\\u2713 Initialization complete (${Math.floor(initDurationMs / 1e3)}s)`);\n logInfo("\\u25B7 Execution");\n const execStartTime = Date.now();\n logInfo(`Starting ${CLI_AGENT_TYPE} execution...`);\n logInfo(`Prompt: ${PROMPT}`);\n const useMock = process.env.USE_MOCK_CLAUDE === "true";\n let cmd;\n if (CLI_AGENT_TYPE === "codex") {\n if (useMock) {\n throw new Error("Mock mode not supported for Codex");\n }\n const codexArgs = [\n "exec",\n "--json",\n "--dangerously-bypass-approvals-and-sandbox",\n "--skip-git-repo-check",\n "-C",\n WORKING_DIR\n ];\n if (OPENAI_MODEL) {\n codexArgs.push("-m", OPENAI_MODEL);\n }\n if (RESUME_SESSION_ID) {\n logInfo(`Resuming session: ${RESUME_SESSION_ID}`);\n codexArgs.push("resume", RESUME_SESSION_ID, PROMPT);\n } else {\n logInfo("Starting new session");\n codexArgs.push(PROMPT);\n }\n cmd = ["codex", ...codexArgs];\n } else {\n const claudeArgs = [\n "--print",\n "--verbose",\n "--output-format",\n "stream-json",\n "--dangerously-skip-permissions"\n ];\n if (RESUME_SESSION_ID) {\n logInfo(`Resuming session: ${RESUME_SESSION_ID}`);\n claudeArgs.push("--resume", RESUME_SESSION_ID);\n } else {\n logInfo("Starting new session");\n }\n const claudeBin = useMock ? "/usr/local/bin/vm0-agent/mock-claude.mjs" : "claude";\n if (useMock) {\n logInfo("Using mock-claude for testing");\n }\n cmd = [claudeBin, ...claudeArgs, PROMPT];\n }\n let agentExitCode = 0;\n const stderrLines = [];\n let logFile = null;\n try {\n logFile = fs7.createWriteStream(AGENT_LOG_FILE);\n const cmdExe = cmd[0];\n if (!cmdExe) {\n throw new Error("Empty command");\n }\n const proc = spawn(cmdExe, cmd.slice(1), {\n stdio: ["ignore", "pipe", "pipe"]\n });\n const exitPromise = new Promise((resolve) => {\n let resolved = false;\n proc.on("error", (err) => {\n if (!resolved) {\n resolved = true;\n logError(`Failed to spawn ${CLI_AGENT_TYPE}: ${err.message}`);\n stderrLines.push(`Spawn error: ${err.message}`);\n resolve(1);\n }\n });\n proc.on("close", (code) => {\n if (!resolved) {\n resolved = true;\n resolve(code ?? 1);\n }\n });\n });\n if (proc.stderr) {\n const stderrRl = readline.createInterface({ input: proc.stderr });\n stderrRl.on("line", (line) => {\n stderrLines.push(line);\n if (logFile && !logFile.destroyed) {\n logFile.write(`[STDERR] ${line}\n`);\n }\n });\n }\n if (proc.stdout) {\n const stdoutRl = readline.createInterface({ input: proc.stdout });\n let eventSequence = 0;\n for await (const line of stdoutRl) {\n if (logFile && !logFile.destroyed) {\n logFile.write(line + "\\n");\n }\n const stripped = line.trim();\n if (!stripped) {\n continue;\n }\n try {\n const event = JSON.parse(stripped);\n await sendEvent(event, eventSequence);\n eventSequence++;\n if (event.type === "result") {\n const resultContent = event.result;\n if (resultContent) {\n console.log(resultContent);\n }\n }\n } catch {\n logDebug(`Non-JSON line from agent: ${stripped.slice(0, 100)}`);\n }\n }\n }\n agentExitCode = await exitPromise;\n } catch (error) {\n logError(`Failed to execute ${CLI_AGENT_TYPE}: ${error}`);\n agentExitCode = 1;\n } finally {\n if (logFile && !logFile.destroyed) {\n logFile.end();\n }\n }\n console.log();\n let finalExitCode = agentExitCode;\n let errorMessage = "";\n if (fs7.existsSync(EVENT_ERROR_FLAG)) {\n logError("Some events failed to send, marking run as failed");\n finalExitCode = 1;\n errorMessage = "Some events failed to send";\n }\n const execDurationMs = Date.now() - execStartTime;\n recordSandboxOp("cli_execution", execDurationMs, agentExitCode === 0);\n if (agentExitCode === 0 && finalExitCode === 0) {\n logInfo(`\\u2713 Execution complete (${Math.floor(execDurationMs / 1e3)}s)`);\n } else {\n logInfo(`\\u2717 Execution failed (${Math.floor(execDurationMs / 1e3)}s)`);\n }\n if (agentExitCode === 0 && finalExitCode === 0) {\n logInfo(`${CLI_AGENT_TYPE} completed successfully`);\n logInfo("\\u25B7 Checkpoint");\n const checkpointStartTime = Date.now();\n const checkpointSuccess = await createCheckpoint();\n const checkpointDuration = Math.floor(\n (Date.now() - checkpointStartTime) / 1e3\n );\n if (checkpointSuccess) {\n logInfo(`\\u2713 Checkpoint complete (${checkpointDuration}s)`);\n } else {\n logInfo(`\\u2717 Checkpoint failed (${checkpointDuration}s)`);\n }\n if (!checkpointSuccess) {\n logError("Checkpoint creation failed, marking run as failed");\n finalExitCode = 1;\n errorMessage = "Checkpoint creation failed";\n }\n } else {\n if (agentExitCode !== 0) {\n logInfo(`${CLI_AGENT_TYPE} failed with exit code ${agentExitCode}`);\n if (stderrLines.length > 0) {\n errorMessage = stderrLines.map((line) => line.trim()).join(" ");\n logInfo(`Captured stderr: ${errorMessage}`);\n } else {\n errorMessage = `Agent exited with code ${agentExitCode}`;\n }\n }\n }\n return [finalExitCode, errorMessage];\n}\nasync function main() {\n let exitCode = 1;\n let errorMessage = "Unexpected termination";\n try {\n [exitCode, errorMessage] = await run();\n } catch (error) {\n if (error instanceof Error) {\n exitCode = 1;\n errorMessage = error.message;\n logError(`Error: ${errorMessage}`);\n } else {\n exitCode = 1;\n errorMessage = `Unexpected error: ${error}`;\n logError(errorMessage);\n }\n } finally {\n await cleanup(exitCode, errorMessage);\n }\n return exitCode;\n}\nmain().then((code) => process.exit(code)).catch((error) => {\n console.error("Fatal error:", error);\n process.exit(1);\n});\n';
|
|
7959
|
-
var DOWNLOAD_SCRIPT = '#!/usr/bin/env node\n\n// src/sandbox/scripts/src/download.ts\nimport * as fs2 from "fs";\nimport * as path from "path";\nimport * as os from "os";\nimport { execSync as execSync2 } from "child_process";\n\n// src/sandbox/scripts/src/lib/common.ts\nimport * as fs from "fs";\nvar RUN_ID = process.env.VM0_RUN_ID ?? "";\nvar API_URL = process.env.VM0_API_URL ?? "";\nvar API_TOKEN = process.env.VM0_API_TOKEN ?? "";\nvar PROMPT = process.env.VM0_PROMPT ?? "";\nvar VERCEL_BYPASS = process.env.VERCEL_PROTECTION_BYPASS ?? "";\nvar RESUME_SESSION_ID = process.env.VM0_RESUME_SESSION_ID ?? "";\nvar CLI_AGENT_TYPE = process.env.CLI_AGENT_TYPE ?? "claude-code";\nvar OPENAI_MODEL = process.env.OPENAI_MODEL ?? "";\nvar WORKING_DIR = process.env.VM0_WORKING_DIR ?? "";\nvar ARTIFACT_DRIVER = process.env.VM0_ARTIFACT_DRIVER ?? "";\nvar ARTIFACT_MOUNT_PATH = process.env.VM0_ARTIFACT_MOUNT_PATH ?? "";\nvar ARTIFACT_VOLUME_NAME = process.env.VM0_ARTIFACT_VOLUME_NAME ?? "";\nvar ARTIFACT_VERSION_ID = process.env.VM0_ARTIFACT_VERSION_ID ?? "";\nvar WEBHOOK_URL = `${API_URL}/api/webhooks/agent/events`;\nvar CHECKPOINT_URL = `${API_URL}/api/webhooks/agent/checkpoints`;\nvar COMPLETE_URL = `${API_URL}/api/webhooks/agent/complete`;\nvar HEARTBEAT_URL = `${API_URL}/api/webhooks/agent/heartbeat`;\nvar TELEMETRY_URL = `${API_URL}/api/webhooks/agent/telemetry`;\nvar PROXY_URL = `${API_URL}/api/webhooks/agent/proxy`;\nvar STORAGE_PREPARE_URL = `${API_URL}/api/webhooks/agent/storages/prepare`;\nvar STORAGE_COMMIT_URL = `${API_URL}/api/webhooks/agent/storages/commit`;\nvar HTTP_MAX_TIME_UPLOAD = 60;\nvar HTTP_MAX_RETRIES = 3;\nvar SESSION_ID_FILE = `/tmp/vm0-session-${RUN_ID}.txt`;\nvar SESSION_HISTORY_PATH_FILE = `/tmp/vm0-session-history-${RUN_ID}.txt`;\nvar EVENT_ERROR_FLAG = `/tmp/vm0-event-error-${RUN_ID}`;\nvar SYSTEM_LOG_FILE = `/tmp/vm0-main-${RUN_ID}.log`;\nvar AGENT_LOG_FILE = `/tmp/vm0-agent-${RUN_ID}.log`;\nvar METRICS_LOG_FILE = `/tmp/vm0-metrics-${RUN_ID}.jsonl`;\nvar NETWORK_LOG_FILE = `/tmp/vm0-network-${RUN_ID}.jsonl`;\nvar TELEMETRY_LOG_POS_FILE = `/tmp/vm0-telemetry-log-pos-${RUN_ID}.txt`;\nvar TELEMETRY_METRICS_POS_FILE = `/tmp/vm0-telemetry-metrics-pos-${RUN_ID}.txt`;\nvar TELEMETRY_NETWORK_POS_FILE = `/tmp/vm0-telemetry-network-pos-${RUN_ID}.txt`;\nvar TELEMETRY_SANDBOX_OPS_POS_FILE = `/tmp/vm0-telemetry-sandbox-ops-pos-${RUN_ID}.txt`;\nvar SANDBOX_OPS_LOG_FILE = `/tmp/vm0-sandbox-ops-${RUN_ID}.jsonl`;\nfunction recordSandboxOp(actionType, durationMs, success, error) {\n const entry = {\n ts: (/* @__PURE__ */ new Date()).toISOString(),\n action_type: actionType,\n duration_ms: durationMs,\n success\n };\n if (error) {\n entry.error = error;\n }\n fs.appendFileSync(SANDBOX_OPS_LOG_FILE, JSON.stringify(entry) + "\\n");\n}\n\n// src/sandbox/scripts/src/lib/log.ts\nvar SCRIPT_NAME = process.env.LOG_SCRIPT_NAME ?? "run-agent";\nvar DEBUG_MODE = process.env.VM0_DEBUG === "1";\nfunction timestamp() {\n return (/* @__PURE__ */ new Date()).toISOString().replace(/\\.\\d{3}Z$/, "Z");\n}\nfunction logInfo(msg) {\n console.error(`[${timestamp()}] [INFO] [sandbox:${SCRIPT_NAME}] ${msg}`);\n}\nfunction logWarn(msg) {\n console.error(`[${timestamp()}] [WARN] [sandbox:${SCRIPT_NAME}] ${msg}`);\n}\nfunction logError(msg) {\n console.error(`[${timestamp()}] [ERROR] [sandbox:${SCRIPT_NAME}] ${msg}`);\n}\nfunction logDebug(msg) {\n if (DEBUG_MODE) {\n console.error(`[${timestamp()}] [DEBUG] [sandbox:${SCRIPT_NAME}] ${msg}`);\n }\n}\n\n// src/sandbox/scripts/src/lib/http-client.ts\nimport { execSync } from "child_process";\nfunction sleep(ms) {\n return new Promise((resolve) => setTimeout(resolve, ms));\n}\nasync function httpDownload(url, destPath, maxRetries = HTTP_MAX_RETRIES) {\n for (let attempt = 1; attempt <= maxRetries; attempt++) {\n logDebug(`HTTP download attempt ${attempt}/${maxRetries} from ${url}`);\n try {\n const curlCmd = ["curl", "-fsSL", "-o", destPath, `"${url}"`].join(" ");\n execSync(curlCmd, {\n timeout: HTTP_MAX_TIME_UPLOAD * 1e3,\n stdio: ["pipe", "pipe", "pipe"]\n });\n return true;\n } catch (error) {\n const errorMsg = error instanceof Error ? error.message : String(error);\n if (errorMsg.includes("ETIMEDOUT") || errorMsg.includes("timeout")) {\n logWarn(\n `HTTP download failed (attempt ${attempt}/${maxRetries}): Timeout`\n );\n } else {\n logWarn(\n `HTTP download failed (attempt ${attempt}/${maxRetries}): ${errorMsg}`\n );\n }\n if (attempt < maxRetries) {\n await sleep(1e3);\n }\n }\n }\n logError(`HTTP download failed after ${maxRetries} attempts from ${url}`);\n return false;\n}\n\n// src/sandbox/scripts/src/download.ts\nasync function downloadStorage(mountPath, archiveUrl) {\n logInfo(`Downloading storage to ${mountPath}`);\n const tempTar = path.join(\n os.tmpdir(),\n `storage-${Date.now()}-${Math.random().toString(36).slice(2)}.tar.gz`\n );\n try {\n if (!await httpDownload(archiveUrl, tempTar)) {\n logError(`Failed to download archive for ${mountPath}`);\n return false;\n }\n fs2.mkdirSync(mountPath, { recursive: true });\n try {\n execSync2(`tar -xzf "${tempTar}" -C "${mountPath}"`, {\n stdio: ["pipe", "pipe", "pipe"]\n });\n } catch {\n logInfo(`Archive appears empty for ${mountPath}`);\n }\n logInfo(`Successfully extracted to ${mountPath}`);\n return true;\n } finally {\n try {\n fs2.unlinkSync(tempTar);\n } catch {\n }\n }\n}\nasync function main() {\n const args = process.argv.slice(2);\n if (args.length < 1) {\n logError("Usage: node download.mjs <manifest_path>");\n process.exit(1);\n }\n const manifestPath = args[0] ?? "";\n if (!manifestPath || !fs2.existsSync(manifestPath)) {\n logError(`Manifest file not found: ${manifestPath}`);\n process.exit(1);\n }\n logInfo(`Starting storage download from manifest: ${manifestPath}`);\n let manifest;\n try {\n const content = fs2.readFileSync(manifestPath, "utf-8");\n manifest = JSON.parse(content);\n } catch (error) {\n logError(`Failed to load manifest: ${error}`);\n process.exit(1);\n }\n const storages = manifest.storages ?? [];\n const artifact = manifest.artifact;\n const storageCount = storages.length;\n const hasArtifact = artifact !== void 0;\n logInfo(`Found ${storageCount} storages, artifact: ${hasArtifact}`);\n const downloadTotalStart = Date.now();\n let downloadSuccess = true;\n for (const storage of storages) {\n const mountPath = storage.mountPath;\n const archiveUrl = storage.archiveUrl;\n if (archiveUrl && archiveUrl !== "null") {\n const storageStart = Date.now();\n const success = await downloadStorage(mountPath, archiveUrl);\n recordSandboxOp("storage_download", Date.now() - storageStart, success);\n if (!success) {\n downloadSuccess = false;\n }\n }\n }\n if (artifact) {\n const artifactMount = artifact.mountPath;\n const artifactUrl = artifact.archiveUrl;\n if (artifactUrl && artifactUrl !== "null") {\n const artifactStart = Date.now();\n const success = await downloadStorage(artifactMount, artifactUrl);\n recordSandboxOp("artifact_download", Date.now() - artifactStart, success);\n if (!success) {\n downloadSuccess = false;\n }\n }\n }\n recordSandboxOp(\n "download_total",\n Date.now() - downloadTotalStart,\n downloadSuccess\n );\n logInfo("All storages downloaded successfully");\n}\nmain().catch((error) => {\n logError(`Fatal error: ${error}`);\n process.exit(1);\n});\n';
|
|
7960
|
-
var MOCK_CLAUDE_SCRIPT = '#!/usr/bin/env node\n\n// src/sandbox/scripts/src/mock-claude.ts\nimport * as fs from "fs";\nimport * as path from "path";\nimport { execSync } from "child_process";\nfunction parseArgs(args) {\n const result = {\n outputFormat: "text",\n print: false,\n verbose: false,\n dangerouslySkipPermissions: false,\n resume: null,\n prompt: ""\n };\n const remaining = [];\n let i = 0;\n while (i < args.length) {\n const arg = args[i];\n if (arg === "--output-format" && i + 1 < args.length) {\n result.outputFormat = args[i + 1] ?? "text";\n i += 2;\n } else if (arg === "--print") {\n result.print = true;\n i++;\n } else if (arg === "--verbose") {\n result.verbose = true;\n i++;\n } else if (arg === "--dangerously-skip-permissions") {\n result.dangerouslySkipPermissions = true;\n i++;\n } else if (arg === "--resume" && i + 1 < args.length) {\n result.resume = args[i + 1] ?? null;\n i += 2;\n } else if (arg) {\n remaining.push(arg);\n i++;\n } else {\n i++;\n }\n }\n if (remaining.length > 0) {\n result.prompt = remaining[0] ?? "";\n }\n return result;\n}\nfunction createSessionHistory(sessionId, cwd) {\n const projectName = cwd.replace(/^\\//, "").replace(/\\//g, "-");\n const homeDir = process.env.HOME ?? "/home/user";\n const sessionDir = `${homeDir}/.claude/projects/-${projectName}`;\n fs.mkdirSync(sessionDir, { recursive: true });\n return path.join(sessionDir, `${sessionId}.jsonl`);\n}\nfunction main() {\n const sessionId = `mock-${Date.now() * 1e3 + Math.floor(Math.random() * 1e3)}`;\n const args = parseArgs(process.argv.slice(2));\n const prompt = args.prompt;\n const outputFormat = args.outputFormat;\n if (prompt.startsWith("@fail:")) {\n const errorMsg = prompt.slice(6);\n console.error(errorMsg);\n process.exit(1);\n }\n const cwd = process.cwd();\n if (outputFormat === "stream-json") {\n const sessionHistoryFile = createSessionHistory(sessionId, cwd);\n const events = [];\n const initEvent = {\n type: "system",\n subtype: "init",\n cwd,\n session_id: sessionId,\n tools: ["Bash"],\n model: "mock-claude"\n };\n console.log(JSON.stringify(initEvent));\n events.push(initEvent);\n const textEvent = {\n type: "assistant",\n message: {\n role: "assistant",\n content: [{ type: "text", text: "Executing command..." }]\n },\n session_id: sessionId\n };\n console.log(JSON.stringify(textEvent));\n events.push(textEvent);\n const toolUseEvent = {\n type: "assistant",\n message: {\n role: "assistant",\n content: [\n {\n type: "tool_use",\n id: "toolu_mock_001",\n name: "Bash",\n input: { command: prompt }\n }\n ]\n },\n session_id: sessionId\n };\n console.log(JSON.stringify(toolUseEvent));\n events.push(toolUseEvent);\n let output;\n let exitCode;\n try {\n output = execSync(`bash -c ${JSON.stringify(prompt)}`, {\n encoding: "utf-8",\n stdio: ["pipe", "pipe", "pipe"]\n });\n exitCode = 0;\n } catch (error) {\n const execError = error;\n output = (execError.stdout ?? "") + (execError.stderr ?? "");\n exitCode = execError.status ?? 1;\n }\n const isError = exitCode !== 0;\n const toolResultEvent = {\n type: "user",\n message: {\n role: "user",\n content: [\n {\n type: "tool_result",\n tool_use_id: "toolu_mock_001",\n content: output,\n is_error: isError\n }\n ]\n },\n session_id: sessionId\n };\n console.log(JSON.stringify(toolResultEvent));\n events.push(toolResultEvent);\n const resultEvent = {\n type: "result",\n subtype: exitCode === 0 ? "success" : "error",\n is_error: exitCode !== 0,\n duration_ms: 100,\n num_turns: 1,\n result: output,\n session_id: sessionId,\n total_cost_usd: 0,\n usage: { input_tokens: 0, output_tokens: 0 }\n };\n console.log(JSON.stringify(resultEvent));\n events.push(resultEvent);\n const historyContent = events.map((e) => JSON.stringify(e)).join("\\n") + "\\n";\n fs.writeFileSync(sessionHistoryFile, historyContent);\n process.exit(exitCode);\n } else {\n try {\n execSync(`bash -c ${JSON.stringify(prompt)}`, {\n stdio: "inherit"\n });\n process.exit(0);\n } catch (error) {\n const execError = error;\n process.exit(execError.status ?? 1);\n }\n }\n}\nvar isMainModule = process.argv[1]?.endsWith("mock-claude.mjs") || process.argv[1]?.endsWith("mock-claude.ts");\nif (isMainModule) {\n main();\n}\nexport {\n createSessionHistory,\n parseArgs\n};\n';
|
|
7961
|
-
var ENV_LOADER_SCRIPT = '#!/usr/bin/env node\n\n// src/sandbox/scripts/src/env-loader.ts\nimport * as fs from "fs";\nimport { spawn } from "child_process";\nvar ENV_JSON_PATH = "/tmp/vm0-env.json";\nconsole.log("[env-loader] Starting...");\nif (fs.existsSync(ENV_JSON_PATH)) {\n console.log(`[env-loader] Loading environment from ${ENV_JSON_PATH}`);\n try {\n const content = fs.readFileSync(ENV_JSON_PATH, "utf-8");\n const envData = JSON.parse(content);\n for (const [key, value] of Object.entries(envData)) {\n process.env[key] = value;\n }\n console.log(\n `[env-loader] Loaded ${Object.keys(envData).length} environment variables`\n );\n } catch (error) {\n console.error(`[env-loader] ERROR loading JSON: ${error}`);\n process.exit(1);\n }\n} else {\n console.error(\n `[env-loader] ERROR: Environment file not found: ${ENV_JSON_PATH}`\n );\n process.exit(1);\n}\nvar criticalVars = [\n "VM0_RUN_ID",\n "VM0_API_URL",\n "VM0_WORKING_DIR",\n "VM0_PROMPT"\n];\nfor (const varName of criticalVars) {\n const val = process.env[varName] ?? "";\n if (val) {\n const display = val.length > 50 ? val.substring(0, 50) + "..." : val;\n console.log(`[env-loader] ${varName}=${display}`);\n } else {\n console.log(`[env-loader] WARNING: ${varName} is empty`);\n }\n}\nvar runAgentPath = "/usr/local/bin/vm0-agent/run-agent.mjs";\nconsole.log(`[env-loader] Executing ${runAgentPath}`);\nvar child = spawn("node", [runAgentPath], {\n stdio: "inherit",\n env: process.env\n});\nchild.on("close", (code) => {\n process.exit(code ?? 1);\n});\n';
|
|
7962
|
-
|
|
7963
8025
|
// ../../packages/core/src/sandbox/scripts/index.ts
|
|
7964
8026
|
var SCRIPT_PATHS = {
|
|
7965
8027
|
/** Base directory for agent scripts */
|
|
@@ -7984,10 +8046,6 @@ var FEATURE_SWITCHES = {
|
|
|
7984
8046
|
maintainer: "ethan@vm0.ai",
|
|
7985
8047
|
enabled: true
|
|
7986
8048
|
},
|
|
7987
|
-
["platformOnboarding" /* PlatformOnboarding */]: {
|
|
7988
|
-
maintainer: "ethan@vm0.ai",
|
|
7989
|
-
enabled: false
|
|
7990
|
-
},
|
|
7991
8049
|
["platformAgents" /* PlatformAgents */]: {
|
|
7992
8050
|
maintainer: "ethan@vm0.ai",
|
|
7993
8051
|
enabled: false
|
|
@@ -9117,29 +9175,6 @@ async function uploadNetworkLogs(apiUrl, sandboxToken, runId) {
|
|
|
9117
9175
|
|
|
9118
9176
|
// src/lib/vm-setup/vm-setup.ts
|
|
9119
9177
|
import fs8 from "fs";
|
|
9120
|
-
|
|
9121
|
-
// src/lib/scripts/utils.ts
|
|
9122
|
-
function getAllScripts() {
|
|
9123
|
-
return [
|
|
9124
|
-
{ content: RUN_AGENT_SCRIPT, path: SCRIPT_PATHS.runAgent },
|
|
9125
|
-
{ content: DOWNLOAD_SCRIPT, path: SCRIPT_PATHS.download },
|
|
9126
|
-
{ content: MOCK_CLAUDE_SCRIPT, path: SCRIPT_PATHS.mockClaude },
|
|
9127
|
-
// Env loader is runner-specific (loads env from JSON before executing run-agent.mjs)
|
|
9128
|
-
{ content: ENV_LOADER_SCRIPT, path: ENV_LOADER_PATH }
|
|
9129
|
-
];
|
|
9130
|
-
}
|
|
9131
|
-
|
|
9132
|
-
// src/lib/vm-setup/vm-setup.ts
|
|
9133
|
-
async function uploadScripts(guest) {
|
|
9134
|
-
const scripts = getAllScripts();
|
|
9135
|
-
await guest.execOrThrow(`sudo mkdir -p ${SCRIPT_PATHS.baseDir}`);
|
|
9136
|
-
for (const script of scripts) {
|
|
9137
|
-
await guest.writeFileWithSudo(script.path, script.content);
|
|
9138
|
-
}
|
|
9139
|
-
await guest.execOrThrow(
|
|
9140
|
-
`sudo chmod +x ${SCRIPT_PATHS.baseDir}/*.mjs 2>/dev/null || true`
|
|
9141
|
-
);
|
|
9142
|
-
}
|
|
9143
9178
|
async function downloadStorages(guest, manifest) {
|
|
9144
9179
|
const totalArchives = manifest.storages.filter((s) => s.archiveUrl).length + (manifest.artifact?.archiveUrl ? 1 : 0);
|
|
9145
9180
|
if (totalArchives === 0) {
|
|
@@ -9194,14 +9229,6 @@ async function installProxyCA(guest, caCertPath) {
|
|
|
9194
9229
|
await guest.execOrThrow("sudo update-ca-certificates");
|
|
9195
9230
|
console.log(`[Executor] Proxy CA certificate installed successfully`);
|
|
9196
9231
|
}
|
|
9197
|
-
async function configureDNS(guest) {
|
|
9198
|
-
const dnsConfig = `nameserver 8.8.8.8
|
|
9199
|
-
nameserver 8.8.4.4
|
|
9200
|
-
nameserver 1.1.1.1`;
|
|
9201
|
-
await guest.execOrThrow(
|
|
9202
|
-
`sudo sh -c 'rm -f /etc/resolv.conf && echo "${dnsConfig}" > /etc/resolv.conf'`
|
|
9203
|
-
);
|
|
9204
|
-
}
|
|
9205
9232
|
|
|
9206
9233
|
// src/lib/executor.ts
|
|
9207
9234
|
function getVmIdFromRunId(runId) {
|
|
@@ -9312,11 +9339,6 @@ async function executeJob(context, config, options = {}) {
|
|
|
9312
9339
|
await installProxyCA(guest, caCertPath);
|
|
9313
9340
|
}
|
|
9314
9341
|
}
|
|
9315
|
-
log(`[Executor] Configuring DNS...`);
|
|
9316
|
-
await configureDNS(guest);
|
|
9317
|
-
log(`[Executor] Uploading scripts...`);
|
|
9318
|
-
await withSandboxTiming("script_upload", () => uploadScripts(guest));
|
|
9319
|
-
log(`[Executor] Scripts uploaded to ${SCRIPT_PATHS.baseDir}`);
|
|
9320
9342
|
if (context.storageManifest) {
|
|
9321
9343
|
await withSandboxTiming(
|
|
9322
9344
|
"storage_download",
|
|
@@ -10222,7 +10244,7 @@ async function confirm(message) {
|
|
|
10222
10244
|
|
|
10223
10245
|
// src/commands/benchmark.ts
|
|
10224
10246
|
import { Command as Command4 } from "commander";
|
|
10225
|
-
import
|
|
10247
|
+
import crypto from "crypto";
|
|
10226
10248
|
|
|
10227
10249
|
// src/lib/timing.ts
|
|
10228
10250
|
var Timer = class {
|
|
@@ -10257,7 +10279,7 @@ var Timer = class {
|
|
|
10257
10279
|
// src/commands/benchmark.ts
|
|
10258
10280
|
function createBenchmarkContext(prompt, options) {
|
|
10259
10281
|
return {
|
|
10260
|
-
runId:
|
|
10282
|
+
runId: crypto.randomUUID(),
|
|
10261
10283
|
prompt,
|
|
10262
10284
|
agentComposeVersionId: "benchmark-local",
|
|
10263
10285
|
vars: null,
|
|
@@ -10312,7 +10334,7 @@ var benchmarkCommand = new Command4("benchmark").description(
|
|
|
10312
10334
|
});
|
|
10313
10335
|
|
|
10314
10336
|
// src/index.ts
|
|
10315
|
-
var version = true ? "3.0.
|
|
10337
|
+
var version = true ? "3.0.6" : "0.1.0";
|
|
10316
10338
|
program.name("vm0-runner").version(version).description("Self-hosted runner for VM0 agents");
|
|
10317
10339
|
program.addCommand(startCommand);
|
|
10318
10340
|
program.addCommand(doctorCommand);
|