@vm0/runner 2.14.0 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +317 -167
- package/package.json +1 -1
package/index.js
CHANGED
|
@@ -189,7 +189,7 @@ async function completeJob(apiUrl, context, exitCode, error) {
|
|
|
189
189
|
}
|
|
190
190
|
|
|
191
191
|
// src/lib/executor.ts
|
|
192
|
-
import
|
|
192
|
+
import path4 from "path";
|
|
193
193
|
|
|
194
194
|
// src/lib/firecracker/vm.ts
|
|
195
195
|
import { execSync as execSync2, spawn } from "child_process";
|
|
@@ -207,7 +207,7 @@ var FirecrackerClient = class {
|
|
|
207
207
|
/**
|
|
208
208
|
* Make HTTP request to Firecracker API
|
|
209
209
|
*/
|
|
210
|
-
async request(method,
|
|
210
|
+
async request(method, path6, body) {
|
|
211
211
|
return new Promise((resolve, reject) => {
|
|
212
212
|
const bodyStr = body !== void 0 ? JSON.stringify(body) : void 0;
|
|
213
213
|
const headers = {
|
|
@@ -220,11 +220,11 @@ var FirecrackerClient = class {
|
|
|
220
220
|
headers["Content-Length"] = Buffer.byteLength(bodyStr);
|
|
221
221
|
}
|
|
222
222
|
console.log(
|
|
223
|
-
`[FC API] ${method} ${
|
|
223
|
+
`[FC API] ${method} ${path6}${bodyStr ? ` (${Buffer.byteLength(bodyStr)} bytes)` : ""}`
|
|
224
224
|
);
|
|
225
225
|
const options = {
|
|
226
226
|
socketPath: this.socketPath,
|
|
227
|
-
path:
|
|
227
|
+
path: path6,
|
|
228
228
|
method,
|
|
229
229
|
headers,
|
|
230
230
|
// Disable agent to ensure fresh connection for each request
|
|
@@ -969,11 +969,14 @@ var FirecrackerVM = class {
|
|
|
969
969
|
socketPath;
|
|
970
970
|
vmOverlayPath;
|
|
971
971
|
// Per-VM sparse overlay for writes
|
|
972
|
+
vsockPath;
|
|
973
|
+
// Vsock UDS path for host-guest communication
|
|
972
974
|
constructor(config) {
|
|
973
975
|
this.config = config;
|
|
974
976
|
this.workDir = config.workDir || `/tmp/vm0-vm-${config.vmId}`;
|
|
975
977
|
this.socketPath = path2.join(this.workDir, "firecracker.sock");
|
|
976
978
|
this.vmOverlayPath = path2.join(this.workDir, "overlay.ext4");
|
|
979
|
+
this.vsockPath = path2.join(this.workDir, "vsock.sock");
|
|
977
980
|
}
|
|
978
981
|
/**
|
|
979
982
|
* Get current VM state
|
|
@@ -999,6 +1002,12 @@ var FirecrackerVM = class {
|
|
|
999
1002
|
getSocketPath() {
|
|
1000
1003
|
return this.socketPath;
|
|
1001
1004
|
}
|
|
1005
|
+
/**
|
|
1006
|
+
* Get the vsock UDS path for host-guest communication
|
|
1007
|
+
*/
|
|
1008
|
+
getVsockPath() {
|
|
1009
|
+
return this.vsockPath;
|
|
1010
|
+
}
|
|
1002
1011
|
/**
|
|
1003
1012
|
* Start the VM
|
|
1004
1013
|
* This spawns Firecracker, configures it via API, and boots the VM
|
|
@@ -1124,6 +1133,12 @@ var FirecrackerVM = class {
|
|
|
1124
1133
|
guest_mac: this.networkConfig.guestMac,
|
|
1125
1134
|
host_dev_name: this.networkConfig.tapDevice
|
|
1126
1135
|
});
|
|
1136
|
+
console.log(`[VM ${this.config.vmId}] Vsock: ${this.vsockPath}`);
|
|
1137
|
+
await this.client.setVsock({
|
|
1138
|
+
vsock_id: "vsock0",
|
|
1139
|
+
guest_cid: 3,
|
|
1140
|
+
uds_path: this.vsockPath
|
|
1141
|
+
});
|
|
1127
1142
|
}
|
|
1128
1143
|
/**
|
|
1129
1144
|
* Stop the VM gracefully
|
|
@@ -1206,80 +1221,194 @@ var FirecrackerVM = class {
|
|
|
1206
1221
|
}
|
|
1207
1222
|
};
|
|
1208
1223
|
|
|
1209
|
-
// src/lib/firecracker/
|
|
1210
|
-
import
|
|
1211
|
-
import
|
|
1212
|
-
import
|
|
1213
|
-
|
|
1214
|
-
|
|
1215
|
-
var
|
|
1216
|
-
var
|
|
1217
|
-
|
|
1218
|
-
|
|
1219
|
-
"-
|
|
1220
|
-
|
|
1221
|
-
|
|
1222
|
-
|
|
1223
|
-
|
|
1224
|
-
|
|
1225
|
-
|
|
1226
|
-
|
|
1227
|
-
|
|
1228
|
-
|
|
1229
|
-
|
|
1230
|
-
|
|
1231
|
-
|
|
1232
|
-
|
|
1233
|
-
|
|
1234
|
-
|
|
1235
|
-
|
|
1236
|
-
|
|
1237
|
-
|
|
1224
|
+
// src/lib/firecracker/vsock.ts
|
|
1225
|
+
import * as net from "net";
|
|
1226
|
+
import * as fs4 from "fs";
|
|
1227
|
+
import * as crypto from "crypto";
|
|
1228
|
+
var VSOCK_PORT = 1e3;
|
|
1229
|
+
var CONNECT_TIMEOUT_MS = 5e3;
|
|
1230
|
+
var HEADER_SIZE = 4;
|
|
1231
|
+
var MAX_MESSAGE_SIZE = 1024 * 1024;
|
|
1232
|
+
var DEFAULT_EXEC_TIMEOUT_MS = 3e5;
|
|
1233
|
+
function encode(msg) {
|
|
1234
|
+
const json = Buffer.from(JSON.stringify(msg), "utf-8");
|
|
1235
|
+
const header = Buffer.alloc(HEADER_SIZE);
|
|
1236
|
+
header.writeUInt32BE(json.length, 0);
|
|
1237
|
+
return Buffer.concat([header, json]);
|
|
1238
|
+
}
|
|
1239
|
+
var Decoder = class {
|
|
1240
|
+
buf = Buffer.alloc(0);
|
|
1241
|
+
decode(data) {
|
|
1242
|
+
this.buf = Buffer.concat([this.buf, data]);
|
|
1243
|
+
const messages = [];
|
|
1244
|
+
while (this.buf.length >= HEADER_SIZE) {
|
|
1245
|
+
const len = this.buf.readUInt32BE(0);
|
|
1246
|
+
if (len > MAX_MESSAGE_SIZE) throw new Error(`Message too large: ${len}`);
|
|
1247
|
+
const total = HEADER_SIZE + len;
|
|
1248
|
+
if (this.buf.length < total) break;
|
|
1249
|
+
const json = this.buf.subarray(HEADER_SIZE, total);
|
|
1250
|
+
messages.push(JSON.parse(json.toString("utf-8")));
|
|
1251
|
+
this.buf = this.buf.subarray(total);
|
|
1252
|
+
}
|
|
1253
|
+
return messages;
|
|
1254
|
+
}
|
|
1255
|
+
};
|
|
1256
|
+
var VsockClient = class {
|
|
1257
|
+
vsockPath;
|
|
1258
|
+
socket = null;
|
|
1259
|
+
connected = false;
|
|
1260
|
+
pendingRequests = /* @__PURE__ */ new Map();
|
|
1261
|
+
constructor(vsockPath) {
|
|
1262
|
+
this.vsockPath = vsockPath;
|
|
1263
|
+
}
|
|
1264
|
+
/**
|
|
1265
|
+
* Connect to the guest agent via vsock
|
|
1266
|
+
*/
|
|
1267
|
+
async connect() {
|
|
1268
|
+
if (this.connected && this.socket) {
|
|
1269
|
+
return;
|
|
1238
1270
|
}
|
|
1239
|
-
|
|
1240
|
-
|
|
1241
|
-
|
|
1242
|
-
|
|
1271
|
+
return new Promise((resolve, reject) => {
|
|
1272
|
+
if (!fs4.existsSync(this.vsockPath)) {
|
|
1273
|
+
reject(new Error(`Vsock socket not found: ${this.vsockPath}`));
|
|
1274
|
+
return;
|
|
1243
1275
|
}
|
|
1276
|
+
const socket = net.createConnection(this.vsockPath);
|
|
1277
|
+
const decoder = new Decoder();
|
|
1278
|
+
let fcConnected = false;
|
|
1279
|
+
let gotReady = false;
|
|
1280
|
+
let pingId = null;
|
|
1281
|
+
let connectionEstablished = false;
|
|
1282
|
+
const timeout = setTimeout(() => {
|
|
1283
|
+
socket.destroy();
|
|
1284
|
+
reject(new Error("Vsock connection timeout"));
|
|
1285
|
+
}, CONNECT_TIMEOUT_MS);
|
|
1286
|
+
socket.on("connect", () => {
|
|
1287
|
+
socket.write(`CONNECT ${VSOCK_PORT}
|
|
1288
|
+
`);
|
|
1289
|
+
});
|
|
1290
|
+
socket.on("data", (data) => {
|
|
1291
|
+
if (!fcConnected) {
|
|
1292
|
+
const str = data.toString();
|
|
1293
|
+
if (str.startsWith("OK ")) {
|
|
1294
|
+
fcConnected = true;
|
|
1295
|
+
} else {
|
|
1296
|
+
clearTimeout(timeout);
|
|
1297
|
+
socket.destroy();
|
|
1298
|
+
reject(new Error(`Firecracker connect failed: ${str.trim()}`));
|
|
1299
|
+
}
|
|
1300
|
+
return;
|
|
1301
|
+
}
|
|
1302
|
+
try {
|
|
1303
|
+
for (const msg of decoder.decode(data)) {
|
|
1304
|
+
if (!connectionEstablished) {
|
|
1305
|
+
if (!gotReady && msg.type === "ready") {
|
|
1306
|
+
gotReady = true;
|
|
1307
|
+
pingId = crypto.randomUUID();
|
|
1308
|
+
const ping = { type: "ping", id: pingId, payload: {} };
|
|
1309
|
+
socket.write(encode(ping));
|
|
1310
|
+
} else if (msg.type === "pong" && msg.id === pingId) {
|
|
1311
|
+
clearTimeout(timeout);
|
|
1312
|
+
this.socket = socket;
|
|
1313
|
+
this.connected = true;
|
|
1314
|
+
connectionEstablished = true;
|
|
1315
|
+
resolve();
|
|
1316
|
+
}
|
|
1317
|
+
} else {
|
|
1318
|
+
this.handleMessage(msg);
|
|
1319
|
+
}
|
|
1320
|
+
}
|
|
1321
|
+
} catch (e) {
|
|
1322
|
+
clearTimeout(timeout);
|
|
1323
|
+
socket.destroy();
|
|
1324
|
+
reject(new Error(`Failed to parse message: ${e}`));
|
|
1325
|
+
}
|
|
1326
|
+
});
|
|
1327
|
+
socket.on("error", (err) => {
|
|
1328
|
+
clearTimeout(timeout);
|
|
1329
|
+
this.connected = false;
|
|
1330
|
+
this.socket = null;
|
|
1331
|
+
reject(new Error(`Vsock error: ${err.message}`));
|
|
1332
|
+
});
|
|
1333
|
+
socket.on("close", () => {
|
|
1334
|
+
clearTimeout(timeout);
|
|
1335
|
+
this.connected = false;
|
|
1336
|
+
this.socket = null;
|
|
1337
|
+
if (!gotReady) {
|
|
1338
|
+
reject(new Error("Vsock closed before ready"));
|
|
1339
|
+
}
|
|
1340
|
+
for (const [id, req] of this.pendingRequests) {
|
|
1341
|
+
clearTimeout(req.timeout);
|
|
1342
|
+
req.reject(new Error("Connection closed"));
|
|
1343
|
+
this.pendingRequests.delete(id);
|
|
1344
|
+
}
|
|
1345
|
+
});
|
|
1346
|
+
});
|
|
1347
|
+
}
|
|
1348
|
+
/**
|
|
1349
|
+
* Handle incoming message and route to pending request
|
|
1350
|
+
*/
|
|
1351
|
+
handleMessage(msg) {
|
|
1352
|
+
const pending = this.pendingRequests.get(msg.id);
|
|
1353
|
+
if (pending) {
|
|
1354
|
+
clearTimeout(pending.timeout);
|
|
1355
|
+
this.pendingRequests.delete(msg.id);
|
|
1356
|
+
pending.resolve(msg);
|
|
1244
1357
|
}
|
|
1245
1358
|
}
|
|
1246
1359
|
/**
|
|
1247
|
-
*
|
|
1360
|
+
* Send a request and wait for response
|
|
1248
1361
|
*/
|
|
1249
|
-
|
|
1250
|
-
|
|
1251
|
-
|
|
1252
|
-
|
|
1253
|
-
|
|
1254
|
-
|
|
1362
|
+
async request(type, payload, timeoutMs) {
|
|
1363
|
+
await this.connect();
|
|
1364
|
+
if (!this.socket) {
|
|
1365
|
+
throw new Error("Not connected");
|
|
1366
|
+
}
|
|
1367
|
+
const id = crypto.randomUUID();
|
|
1368
|
+
const msg = { type, id, payload };
|
|
1369
|
+
return new Promise((resolve, reject) => {
|
|
1370
|
+
const timeout = setTimeout(() => {
|
|
1371
|
+
this.pendingRequests.delete(id);
|
|
1372
|
+
reject(new Error(`Request timeout: ${type}`));
|
|
1373
|
+
}, timeoutMs);
|
|
1374
|
+
this.pendingRequests.set(id, {
|
|
1375
|
+
resolve,
|
|
1376
|
+
reject,
|
|
1377
|
+
timeout
|
|
1378
|
+
});
|
|
1379
|
+
this.socket.write(encode(msg));
|
|
1380
|
+
});
|
|
1255
1381
|
}
|
|
1256
1382
|
/**
|
|
1257
1383
|
* Execute a command on the remote VM
|
|
1258
|
-
* @param command - The command to execute
|
|
1259
|
-
* @param timeoutMs - Optional timeout in milliseconds (default: 300000ms = 5 minutes)
|
|
1260
1384
|
*/
|
|
1261
1385
|
async exec(command, timeoutMs) {
|
|
1262
|
-
const
|
|
1263
|
-
const escapedCommand = command.replace(/'/g, "'\\''");
|
|
1264
|
-
const fullCmd = [...sshCmd, `'${escapedCommand}'`].join(" ");
|
|
1386
|
+
const actualTimeout = timeoutMs ?? DEFAULT_EXEC_TIMEOUT_MS;
|
|
1265
1387
|
try {
|
|
1266
|
-
const
|
|
1267
|
-
|
|
1268
|
-
|
|
1269
|
-
|
|
1270
|
-
//
|
|
1271
|
-
|
|
1388
|
+
const response = await this.request(
|
|
1389
|
+
"exec",
|
|
1390
|
+
{ command, timeoutMs: actualTimeout },
|
|
1391
|
+
actualTimeout + 5e3
|
|
1392
|
+
// Add buffer for network latency
|
|
1393
|
+
);
|
|
1394
|
+
if (response.type === "error") {
|
|
1395
|
+
const errorPayload = response.payload;
|
|
1396
|
+
return {
|
|
1397
|
+
exitCode: 1,
|
|
1398
|
+
stdout: "",
|
|
1399
|
+
stderr: errorPayload.message
|
|
1400
|
+
};
|
|
1401
|
+
}
|
|
1272
1402
|
return {
|
|
1273
|
-
exitCode:
|
|
1274
|
-
stdout: stdout
|
|
1275
|
-
stderr: stderr
|
|
1403
|
+
exitCode: response.payload.exitCode,
|
|
1404
|
+
stdout: response.payload.stdout,
|
|
1405
|
+
stderr: response.payload.stderr
|
|
1276
1406
|
};
|
|
1277
|
-
} catch (
|
|
1278
|
-
const execError = error;
|
|
1407
|
+
} catch (e) {
|
|
1279
1408
|
return {
|
|
1280
|
-
exitCode:
|
|
1281
|
-
stdout:
|
|
1282
|
-
stderr:
|
|
1409
|
+
exitCode: 1,
|
|
1410
|
+
stdout: "",
|
|
1411
|
+
stderr: e instanceof Error ? e.message : String(e)
|
|
1283
1412
|
};
|
|
1284
1413
|
}
|
|
1285
1414
|
}
|
|
@@ -1297,7 +1426,6 @@ var SSHClient = class {
|
|
|
1297
1426
|
}
|
|
1298
1427
|
/**
|
|
1299
1428
|
* Write content to a file on the remote VM
|
|
1300
|
-
* Uses base64 encoding to safely transfer any content
|
|
1301
1429
|
*/
|
|
1302
1430
|
async writeFile(remotePath, content) {
|
|
1303
1431
|
const encoded = Buffer.from(content).toString("base64");
|
|
@@ -1317,7 +1445,6 @@ var SSHClient = class {
|
|
|
1317
1445
|
}
|
|
1318
1446
|
/**
|
|
1319
1447
|
* Write content to a file on the remote VM using sudo
|
|
1320
|
-
* Used for writing to privileged locations like /usr/local/bin
|
|
1321
1448
|
*/
|
|
1322
1449
|
async writeFileWithSudo(remotePath, content) {
|
|
1323
1450
|
const encoded = Buffer.from(content).toString("base64");
|
|
@@ -1348,8 +1475,7 @@ var SSHClient = class {
|
|
|
1348
1475
|
return result.stdout;
|
|
1349
1476
|
}
|
|
1350
1477
|
/**
|
|
1351
|
-
* Check if
|
|
1352
|
-
* Uses a short timeout (15s) to ensure waitUntilReachable() respects its outer timeout
|
|
1478
|
+
* Check if vsock connection is available
|
|
1353
1479
|
*/
|
|
1354
1480
|
async isReachable() {
|
|
1355
1481
|
try {
|
|
@@ -1360,7 +1486,7 @@ var SSHClient = class {
|
|
|
1360
1486
|
}
|
|
1361
1487
|
}
|
|
1362
1488
|
/**
|
|
1363
|
-
* Wait for
|
|
1489
|
+
* Wait for vsock to become available
|
|
1364
1490
|
*/
|
|
1365
1491
|
async waitUntilReachable(timeoutMs = 12e4, intervalMs = 2e3) {
|
|
1366
1492
|
const start = Date.now();
|
|
@@ -1377,12 +1503,10 @@ var SSHClient = class {
|
|
|
1377
1503
|
}
|
|
1378
1504
|
});
|
|
1379
1505
|
}
|
|
1380
|
-
throw new Error(
|
|
1381
|
-
`SSH not reachable after ${timeoutMs}ms at ${this.config.host}`
|
|
1382
|
-
);
|
|
1506
|
+
throw new Error(`Vsock not reachable after ${timeoutMs}ms`);
|
|
1383
1507
|
}
|
|
1384
1508
|
/**
|
|
1385
|
-
* Create a directory on the remote VM
|
|
1509
|
+
* Create a directory on the remote VM
|
|
1386
1510
|
*/
|
|
1387
1511
|
async mkdir(remotePath) {
|
|
1388
1512
|
await this.execOrThrow(`mkdir -p '${remotePath}'`);
|
|
@@ -1395,30 +1519,27 @@ var SSHClient = class {
|
|
|
1395
1519
|
return result.exitCode === 0;
|
|
1396
1520
|
}
|
|
1397
1521
|
/**
|
|
1398
|
-
* Get the
|
|
1522
|
+
* Get the vsock path (for logging/debugging)
|
|
1399
1523
|
*/
|
|
1400
|
-
|
|
1401
|
-
return this.
|
|
1402
|
-
}
|
|
1403
|
-
};
|
|
1404
|
-
function createVMSSHClient(guestIp, user = "root", privateKeyPath) {
|
|
1405
|
-
return new SSHClient({
|
|
1406
|
-
host: guestIp,
|
|
1407
|
-
user,
|
|
1408
|
-
privateKeyPath
|
|
1409
|
-
});
|
|
1410
|
-
}
|
|
1411
|
-
function getRunnerSSHKeyPath() {
|
|
1412
|
-
const runnerKeyPath = "/opt/vm0-runner/ssh/id_rsa";
|
|
1413
|
-
if (fs4.existsSync(runnerKeyPath)) {
|
|
1414
|
-
return runnerKeyPath;
|
|
1524
|
+
getVsockPath() {
|
|
1525
|
+
return this.vsockPath;
|
|
1415
1526
|
}
|
|
1416
|
-
|
|
1417
|
-
|
|
1418
|
-
|
|
1527
|
+
/**
|
|
1528
|
+
* Close the connection
|
|
1529
|
+
*/
|
|
1530
|
+
close() {
|
|
1531
|
+
if (this.socket) {
|
|
1532
|
+
this.socket.end();
|
|
1533
|
+
this.socket = null;
|
|
1534
|
+
}
|
|
1535
|
+
this.connected = false;
|
|
1536
|
+
for (const [id, req] of this.pendingRequests) {
|
|
1537
|
+
clearTimeout(req.timeout);
|
|
1538
|
+
req.reject(new Error("Connection closed"));
|
|
1539
|
+
this.pendingRequests.delete(id);
|
|
1540
|
+
}
|
|
1419
1541
|
}
|
|
1420
|
-
|
|
1421
|
-
}
|
|
1542
|
+
};
|
|
1422
1543
|
|
|
1423
1544
|
// ../../node_modules/.pnpm/@ts-rest+core@3.53.0-rc.1_@types+node@24.3.0/node_modules/@ts-rest/core/index.esm.mjs
|
|
1424
1545
|
var util;
|
|
@@ -1768,8 +1889,8 @@ function getErrorMap() {
|
|
|
1768
1889
|
return overrideErrorMap;
|
|
1769
1890
|
}
|
|
1770
1891
|
var makeIssue = (params) => {
|
|
1771
|
-
const { data, path:
|
|
1772
|
-
const fullPath = [...
|
|
1892
|
+
const { data, path: path6, errorMaps, issueData } = params;
|
|
1893
|
+
const fullPath = [...path6, ...issueData.path || []];
|
|
1773
1894
|
const fullIssue = {
|
|
1774
1895
|
...issueData,
|
|
1775
1896
|
path: fullPath
|
|
@@ -1868,11 +1989,11 @@ var errorUtil;
|
|
|
1868
1989
|
errorUtil2.toString = (message) => typeof message === "string" ? message : message === null || message === void 0 ? void 0 : message.message;
|
|
1869
1990
|
})(errorUtil || (errorUtil = {}));
|
|
1870
1991
|
var ParseInputLazyPath = class {
|
|
1871
|
-
constructor(parent, value,
|
|
1992
|
+
constructor(parent, value, path6, key) {
|
|
1872
1993
|
this._cachedPath = [];
|
|
1873
1994
|
this.parent = parent;
|
|
1874
1995
|
this.data = value;
|
|
1875
|
-
this._path =
|
|
1996
|
+
this._path = path6;
|
|
1876
1997
|
this._key = key;
|
|
1877
1998
|
}
|
|
1878
1999
|
get path() {
|
|
@@ -5744,7 +5865,7 @@ var runEventsContract = c3.router({
|
|
|
5744
5865
|
id: z6.string().min(1, "Run ID is required")
|
|
5745
5866
|
}),
|
|
5746
5867
|
query: z6.object({
|
|
5747
|
-
since: z6.coerce.number().default(
|
|
5868
|
+
since: z6.coerce.number().default(-1),
|
|
5748
5869
|
limit: z6.coerce.number().default(100)
|
|
5749
5870
|
}),
|
|
5750
5871
|
responses: {
|
|
@@ -6127,7 +6248,7 @@ import { z as z8 } from "zod";
|
|
|
6127
6248
|
var c5 = initContract();
|
|
6128
6249
|
var agentEventSchema = z8.object({
|
|
6129
6250
|
type: z8.string(),
|
|
6130
|
-
sequenceNumber: z8.number().int().
|
|
6251
|
+
sequenceNumber: z8.number().int().nonnegative()
|
|
6131
6252
|
}).passthrough();
|
|
6132
6253
|
var artifactSnapshotSchema = z8.object({
|
|
6133
6254
|
artifactName: z8.string(),
|
|
@@ -7762,7 +7883,7 @@ var publicVolumeDownloadContract = c18.router({
|
|
|
7762
7883
|
});
|
|
7763
7884
|
|
|
7764
7885
|
// ../../packages/core/src/sandbox/scripts/dist/bundled.ts
|
|
7765
|
-
var RUN_AGENT_SCRIPT = '#!/usr/bin/env node\n\n// src/sandbox/scripts/src/run-agent.ts\nimport * as fs7 from "fs";\nimport { spawn, execSync as execSync4 } from "child_process";\nimport * as readline from "readline";\n\n// src/sandbox/scripts/src/lib/common.ts\nimport * as fs from "fs";\nvar RUN_ID = process.env.VM0_RUN_ID ?? "";\nvar API_URL = process.env.VM0_API_URL ?? "";\nvar API_TOKEN = process.env.VM0_API_TOKEN ?? "";\nvar PROMPT = process.env.VM0_PROMPT ?? "";\nvar VERCEL_BYPASS = process.env.VERCEL_PROTECTION_BYPASS ?? "";\nvar RESUME_SESSION_ID = process.env.VM0_RESUME_SESSION_ID ?? "";\nvar CLI_AGENT_TYPE = process.env.CLI_AGENT_TYPE ?? "claude-code";\nvar OPENAI_MODEL = process.env.OPENAI_MODEL ?? "";\nvar WORKING_DIR = process.env.VM0_WORKING_DIR ?? "";\nvar ARTIFACT_DRIVER = process.env.VM0_ARTIFACT_DRIVER ?? "";\nvar ARTIFACT_MOUNT_PATH = process.env.VM0_ARTIFACT_MOUNT_PATH ?? "";\nvar ARTIFACT_VOLUME_NAME = process.env.VM0_ARTIFACT_VOLUME_NAME ?? "";\nvar ARTIFACT_VERSION_ID = process.env.VM0_ARTIFACT_VERSION_ID ?? "";\nvar WEBHOOK_URL = `${API_URL}/api/webhooks/agent/events`;\nvar CHECKPOINT_URL = `${API_URL}/api/webhooks/agent/checkpoints`;\nvar COMPLETE_URL = `${API_URL}/api/webhooks/agent/complete`;\nvar HEARTBEAT_URL = `${API_URL}/api/webhooks/agent/heartbeat`;\nvar TELEMETRY_URL = `${API_URL}/api/webhooks/agent/telemetry`;\nvar PROXY_URL = `${API_URL}/api/webhooks/agent/proxy`;\nvar STORAGE_PREPARE_URL = `${API_URL}/api/webhooks/agent/storages/prepare`;\nvar STORAGE_COMMIT_URL = `${API_URL}/api/webhooks/agent/storages/commit`;\nvar HEARTBEAT_INTERVAL = 60;\nvar TELEMETRY_INTERVAL = 30;\nvar HTTP_CONNECT_TIMEOUT = 10;\nvar HTTP_MAX_TIME = 30;\nvar HTTP_MAX_TIME_UPLOAD = 60;\nvar HTTP_MAX_RETRIES = 3;\nvar SESSION_ID_FILE = `/tmp/vm0-session-${RUN_ID}.txt`;\nvar SESSION_HISTORY_PATH_FILE = `/tmp/vm0-session-history-${RUN_ID}.txt`;\nvar EVENT_ERROR_FLAG = `/tmp/vm0-event-error-${RUN_ID}`;\nvar SYSTEM_LOG_FILE = `/tmp/vm0-main-${RUN_ID}.log`;\nvar AGENT_LOG_FILE = `/tmp/vm0-agent-${RUN_ID}.log`;\nvar METRICS_LOG_FILE = `/tmp/vm0-metrics-${RUN_ID}.jsonl`;\nvar NETWORK_LOG_FILE = `/tmp/vm0-network-${RUN_ID}.jsonl`;\nvar TELEMETRY_LOG_POS_FILE = `/tmp/vm0-telemetry-log-pos-${RUN_ID}.txt`;\nvar TELEMETRY_METRICS_POS_FILE = `/tmp/vm0-telemetry-metrics-pos-${RUN_ID}.txt`;\nvar TELEMETRY_NETWORK_POS_FILE = `/tmp/vm0-telemetry-network-pos-${RUN_ID}.txt`;\nvar TELEMETRY_SANDBOX_OPS_POS_FILE = `/tmp/vm0-telemetry-sandbox-ops-pos-${RUN_ID}.txt`;\nvar SANDBOX_OPS_LOG_FILE = `/tmp/vm0-sandbox-ops-${RUN_ID}.jsonl`;\nvar METRICS_INTERVAL = 5;\nfunction validateConfig() {\n if (!WORKING_DIR) {\n throw new Error("VM0_WORKING_DIR is required but not set");\n }\n return true;\n}\nfunction recordSandboxOp(actionType, durationMs, success, error) {\n const entry = {\n ts: (/* @__PURE__ */ new Date()).toISOString(),\n action_type: actionType,\n duration_ms: durationMs,\n success\n };\n if (error) {\n entry.error = error;\n }\n fs.appendFileSync(SANDBOX_OPS_LOG_FILE, JSON.stringify(entry) + "\\n");\n}\n\n// src/sandbox/scripts/src/lib/log.ts\nvar SCRIPT_NAME = process.env.LOG_SCRIPT_NAME ?? "run-agent";\nvar DEBUG_MODE = process.env.VM0_DEBUG === "1";\nfunction timestamp() {\n return (/* @__PURE__ */ new Date()).toISOString().replace(/\\.\\d{3}Z$/, "Z");\n}\nfunction logInfo(msg) {\n console.error(`[${timestamp()}] [INFO] [sandbox:${SCRIPT_NAME}] ${msg}`);\n}\nfunction logWarn(msg) {\n console.error(`[${timestamp()}] [WARN] [sandbox:${SCRIPT_NAME}] ${msg}`);\n}\nfunction logError(msg) {\n console.error(`[${timestamp()}] [ERROR] [sandbox:${SCRIPT_NAME}] ${msg}`);\n}\nfunction logDebug(msg) {\n if (DEBUG_MODE) {\n console.error(`[${timestamp()}] [DEBUG] [sandbox:${SCRIPT_NAME}] ${msg}`);\n }\n}\n\n// src/sandbox/scripts/src/lib/events.ts\nimport * as fs2 from "fs";\n\n// src/sandbox/scripts/src/lib/http-client.ts\nimport { execSync } from "child_process";\nfunction sleep(ms) {\n return new Promise((resolve) => setTimeout(resolve, ms));\n}\nasync function httpPostJson(url, data, maxRetries = HTTP_MAX_RETRIES) {\n const headers = {\n "Content-Type": "application/json",\n Authorization: `Bearer ${API_TOKEN}`\n };\n if (VERCEL_BYPASS) {\n headers["x-vercel-protection-bypass"] = VERCEL_BYPASS;\n }\n for (let attempt = 1; attempt <= maxRetries; attempt++) {\n logDebug(`HTTP POST attempt ${attempt}/${maxRetries} to ${url}`);\n try {\n const controller = new AbortController();\n const timeoutId = setTimeout(\n () => controller.abort(),\n HTTP_MAX_TIME * 1e3\n );\n const response = await fetch(url, {\n method: "POST",\n headers,\n body: JSON.stringify(data),\n signal: controller.signal\n });\n clearTimeout(timeoutId);\n if (response.ok) {\n const text = await response.text();\n if (text) {\n return JSON.parse(text);\n }\n return {};\n }\n logWarn(\n `HTTP POST failed (attempt ${attempt}/${maxRetries}): HTTP ${response.status}`\n );\n if (attempt < maxRetries) {\n await sleep(1e3);\n }\n } catch (error) {\n const errorMsg = error instanceof Error ? error.message : String(error);\n if (errorMsg.includes("abort")) {\n logWarn(`HTTP POST failed (attempt ${attempt}/${maxRetries}): Timeout`);\n } else {\n logWarn(\n `HTTP POST failed (attempt ${attempt}/${maxRetries}): ${errorMsg}`\n );\n }\n if (attempt < maxRetries) {\n await sleep(1e3);\n }\n }\n }\n logError(`HTTP POST failed after ${maxRetries} attempts to ${url}`);\n return null;\n}\nasync function httpPutPresigned(presignedUrl, filePath, contentType = "application/octet-stream", maxRetries = HTTP_MAX_RETRIES) {\n for (let attempt = 1; attempt <= maxRetries; attempt++) {\n logDebug(`HTTP PUT presigned attempt ${attempt}/${maxRetries}`);\n try {\n const curlCmd = [\n "curl",\n "-f",\n "-X",\n "PUT",\n "-H",\n `Content-Type: ${contentType}`,\n "--data-binary",\n `@${filePath}`,\n "--connect-timeout",\n String(HTTP_CONNECT_TIMEOUT),\n "--max-time",\n String(HTTP_MAX_TIME_UPLOAD),\n "--silent",\n `"${presignedUrl}"`\n ].join(" ");\n execSync(curlCmd, {\n timeout: HTTP_MAX_TIME_UPLOAD * 1e3,\n stdio: ["pipe", "pipe", "pipe"]\n });\n return true;\n } catch (error) {\n const errorMsg = error instanceof Error ? error.message : String(error);\n if (errorMsg.includes("ETIMEDOUT") || errorMsg.includes("timeout")) {\n logWarn(\n `HTTP PUT presigned failed (attempt ${attempt}/${maxRetries}): Timeout`\n );\n } else {\n logWarn(\n `HTTP PUT presigned failed (attempt ${attempt}/${maxRetries}): ${errorMsg}`\n );\n }\n if (attempt < maxRetries) {\n await sleep(1e3);\n }\n }\n }\n logError(`HTTP PUT presigned failed after ${maxRetries} attempts`);\n return false;\n}\n\n// src/sandbox/scripts/src/lib/secret-masker.ts\nvar MASK_PLACEHOLDER = "***";\nvar MIN_SECRET_LENGTH = 5;\nvar _masker = null;\nvar SecretMasker = class {\n patterns;\n /**\n * Initialize masker with secret values.\n *\n * @param secretValues - List of secret values to mask\n */\n constructor(secretValues) {\n this.patterns = /* @__PURE__ */ new Set();\n for (const secret of secretValues) {\n if (!secret || secret.length < MIN_SECRET_LENGTH) {\n continue;\n }\n this.patterns.add(secret);\n try {\n const b64 = Buffer.from(secret).toString("base64");\n if (b64.length >= MIN_SECRET_LENGTH) {\n this.patterns.add(b64);\n }\n } catch {\n }\n try {\n const urlEnc = encodeURIComponent(secret);\n if (urlEnc !== secret && urlEnc.length >= MIN_SECRET_LENGTH) {\n this.patterns.add(urlEnc);\n }\n } catch {\n }\n }\n }\n /**\n * Recursively mask all occurrences of secrets in the data.\n *\n * @param data - Data to mask (string, list, dict, or primitive)\n * @returns Masked data with the same structure\n */\n mask(data) {\n return this.deepMask(data);\n }\n deepMask(data) {\n if (typeof data === "string") {\n let result = data;\n for (const pattern of this.patterns) {\n result = result.split(pattern).join(MASK_PLACEHOLDER);\n }\n return result;\n }\n if (Array.isArray(data)) {\n return data.map((item) => this.deepMask(item));\n }\n if (data !== null && typeof data === "object") {\n const result = {};\n for (const [key, value] of Object.entries(\n data\n )) {\n result[key] = this.deepMask(value);\n }\n return result;\n }\n return data;\n }\n};\nfunction createMasker() {\n const secretValuesStr = process.env.VM0_SECRET_VALUES ?? "";\n if (!secretValuesStr) {\n return new SecretMasker([]);\n }\n const secretValues = [];\n for (const encodedValue of secretValuesStr.split(",")) {\n const trimmed = encodedValue.trim();\n if (trimmed) {\n try {\n const decoded = Buffer.from(trimmed, "base64").toString("utf-8");\n if (decoded) {\n secretValues.push(decoded);\n }\n } catch {\n }\n }\n }\n return new SecretMasker(secretValues);\n}\nfunction getMasker() {\n if (_masker === null) {\n _masker = createMasker();\n }\n return _masker;\n}\nfunction maskData(data) {\n return getMasker().mask(data);\n}\n\n// src/sandbox/scripts/src/lib/events.ts\nasync function sendEvent(event, sequenceNumber) {\n const eventType = event.type ?? "";\n const eventSubtype = event.subtype ?? "";\n let sessionId = null;\n if (CLI_AGENT_TYPE === "codex") {\n if (eventType === "thread.started") {\n sessionId = event.thread_id ?? "";\n }\n } else {\n if (eventType === "system" && eventSubtype === "init") {\n sessionId = event.session_id ?? "";\n }\n }\n if (sessionId && !fs2.existsSync(SESSION_ID_FILE)) {\n logInfo(`Captured session ID: ${sessionId}`);\n fs2.writeFileSync(SESSION_ID_FILE, sessionId);\n const homeDir = process.env.HOME ?? "/home/user";\n let sessionHistoryPath;\n if (CLI_AGENT_TYPE === "codex") {\n const codexHome = process.env.CODEX_HOME ?? `${homeDir}/.codex`;\n sessionHistoryPath = `CODEX_SEARCH:${codexHome}/sessions:${sessionId}`;\n } else {\n const projectName = WORKING_DIR.replace(/^\\//, "").replace(/\\//g, "-");\n sessionHistoryPath = `${homeDir}/.claude/projects/-${projectName}/${sessionId}.jsonl`;\n }\n fs2.writeFileSync(SESSION_HISTORY_PATH_FILE, sessionHistoryPath);\n logInfo(`Session history will be at: ${sessionHistoryPath}`);\n }\n const eventWithSequence = {\n ...event,\n sequenceNumber\n };\n const maskedEvent = maskData(eventWithSequence);\n const payload = {\n runId: RUN_ID,\n events: [maskedEvent]\n };\n const result = await httpPostJson(WEBHOOK_URL, payload);\n if (result === null) {\n logError("Failed to send event after retries");\n fs2.writeFileSync(EVENT_ERROR_FLAG, "1");\n return false;\n }\n return true;\n}\n\n// src/sandbox/scripts/src/lib/checkpoint.ts\nimport * as fs4 from "fs";\nimport * as path2 from "path";\n\n// src/sandbox/scripts/src/lib/direct-upload.ts\nimport * as fs3 from "fs";\nimport * as path from "path";\nimport * as crypto from "crypto";\nimport { execSync as execSync2 } from "child_process";\nfunction computeFileHash(filePath) {\n const hash = crypto.createHash("sha256");\n const buffer = fs3.readFileSync(filePath);\n hash.update(buffer);\n return hash.digest("hex");\n}\nfunction collectFileMetadata(dirPath) {\n const files = [];\n function walkDir(currentPath, relativePath) {\n const items = fs3.readdirSync(currentPath);\n for (const item of items) {\n if (item === ".git" || item === ".vm0") {\n continue;\n }\n const fullPath = path.join(currentPath, item);\n const relPath = relativePath ? path.join(relativePath, item) : item;\n const stat = fs3.statSync(fullPath);\n if (stat.isDirectory()) {\n walkDir(fullPath, relPath);\n } else if (stat.isFile()) {\n try {\n const fileHash = computeFileHash(fullPath);\n files.push({\n path: relPath,\n hash: fileHash,\n size: stat.size\n });\n } catch (error) {\n logWarn(`Could not process file ${relPath}: ${error}`);\n }\n }\n }\n }\n walkDir(dirPath, "");\n return files;\n}\nfunction createArchive(dirPath, tarPath) {\n try {\n execSync2(\n `tar -czf "${tarPath}" --exclude=\'.git\' --exclude=\'.vm0\' -C "${dirPath}" .`,\n { stdio: ["pipe", "pipe", "pipe"] }\n );\n return true;\n } catch (error) {\n logError(`Failed to create archive: ${error}`);\n return false;\n }\n}\nfunction createManifest(files, manifestPath) {\n try {\n const manifest = {\n version: 1,\n files,\n createdAt: (/* @__PURE__ */ new Date()).toISOString()\n };\n fs3.writeFileSync(manifestPath, JSON.stringify(manifest, null, 2));\n return true;\n } catch (error) {\n logError(`Failed to create manifest: ${error}`);\n return false;\n }\n}\nasync function createDirectUploadSnapshot(mountPath, storageName, storageType = "artifact", runId, message) {\n logInfo(\n `Creating direct upload snapshot for \'${storageName}\' (type: ${storageType})`\n );\n logInfo("Computing file hashes...");\n const hashStart = Date.now();\n const files = collectFileMetadata(mountPath);\n recordSandboxOp("artifact_hash_compute", Date.now() - hashStart, true);\n logInfo(`Found ${files.length} files`);\n if (files.length === 0) {\n logInfo("No files to upload, creating empty version");\n }\n logInfo("Calling prepare endpoint...");\n const prepareStart = Date.now();\n const preparePayload = {\n storageName,\n storageType,\n files\n };\n if (runId) {\n preparePayload.runId = runId;\n }\n const prepareResponse = await httpPostJson(\n STORAGE_PREPARE_URL,\n preparePayload\n );\n if (!prepareResponse) {\n logError("Failed to call prepare endpoint");\n recordSandboxOp("artifact_prepare_api", Date.now() - prepareStart, false);\n return null;\n }\n const versionId = prepareResponse.versionId;\n if (!versionId) {\n logError(`Invalid prepare response: ${JSON.stringify(prepareResponse)}`);\n recordSandboxOp("artifact_prepare_api", Date.now() - prepareStart, false);\n return null;\n }\n recordSandboxOp("artifact_prepare_api", Date.now() - prepareStart, true);\n if (prepareResponse.existing) {\n logInfo(`Version already exists (deduplicated): ${versionId.slice(0, 8)}`);\n logInfo("Updating HEAD pointer...");\n const commitPayload = {\n storageName,\n storageType,\n versionId,\n files\n };\n if (runId) {\n commitPayload.runId = runId;\n }\n const commitResponse = await httpPostJson(\n STORAGE_COMMIT_URL,\n commitPayload\n );\n if (!commitResponse || !commitResponse.success) {\n logError(`Failed to update HEAD: ${JSON.stringify(commitResponse)}`);\n return null;\n }\n return { versionId, deduplicated: true };\n }\n const uploads = prepareResponse.uploads;\n if (!uploads) {\n logError("No upload URLs in prepare response");\n return null;\n }\n const archiveInfo = uploads.archive;\n const manifestInfo = uploads.manifest;\n if (!archiveInfo || !manifestInfo) {\n logError("Missing archive or manifest upload info");\n return null;\n }\n const tempDir = fs3.mkdtempSync(`/tmp/direct-upload-${storageName}-`);\n try {\n logInfo("Creating archive...");\n const archiveStart = Date.now();\n const archivePath = path.join(tempDir, "archive.tar.gz");\n if (!createArchive(mountPath, archivePath)) {\n logError("Failed to create archive");\n recordSandboxOp(\n "artifact_archive_create",\n Date.now() - archiveStart,\n false\n );\n return null;\n }\n recordSandboxOp("artifact_archive_create", Date.now() - archiveStart, true);\n logInfo("Creating manifest...");\n const manifestPath = path.join(tempDir, "manifest.json");\n if (!createManifest(files, manifestPath)) {\n logError("Failed to create manifest");\n return null;\n }\n logInfo("Uploading archive to S3...");\n const s3UploadStart = Date.now();\n if (!await httpPutPresigned(\n archiveInfo.presignedUrl,\n archivePath,\n "application/gzip"\n )) {\n logError("Failed to upload archive to S3");\n recordSandboxOp("artifact_s3_upload", Date.now() - s3UploadStart, false);\n return null;\n }\n logInfo("Uploading manifest to S3...");\n if (!await httpPutPresigned(\n manifestInfo.presignedUrl,\n manifestPath,\n "application/json"\n )) {\n logError("Failed to upload manifest to S3");\n recordSandboxOp("artifact_s3_upload", Date.now() - s3UploadStart, false);\n return null;\n }\n recordSandboxOp("artifact_s3_upload", Date.now() - s3UploadStart, true);\n logInfo("Calling commit endpoint...");\n const commitStart = Date.now();\n const commitPayload = {\n storageName,\n storageType,\n versionId,\n files\n };\n if (runId) {\n commitPayload.runId = runId;\n }\n if (message) {\n commitPayload.message = message;\n }\n const commitResponse = await httpPostJson(\n STORAGE_COMMIT_URL,\n commitPayload\n );\n if (!commitResponse) {\n logError("Failed to call commit endpoint");\n recordSandboxOp("artifact_commit_api", Date.now() - commitStart, false);\n return null;\n }\n if (!commitResponse.success) {\n logError(`Commit failed: ${JSON.stringify(commitResponse)}`);\n recordSandboxOp("artifact_commit_api", Date.now() - commitStart, false);\n return null;\n }\n recordSandboxOp("artifact_commit_api", Date.now() - commitStart, true);\n logInfo(`Direct upload snapshot created: ${versionId.slice(0, 8)}`);\n return { versionId };\n } finally {\n try {\n fs3.rmSync(tempDir, { recursive: true, force: true });\n } catch {\n }\n }\n}\n\n// src/sandbox/scripts/src/lib/checkpoint.ts\nfunction findJsonlFiles(dir) {\n const files = [];\n function walk(currentDir) {\n try {\n const items = fs4.readdirSync(currentDir);\n for (const item of items) {\n const fullPath = path2.join(currentDir, item);\n const stat = fs4.statSync(fullPath);\n if (stat.isDirectory()) {\n walk(fullPath);\n } else if (item.endsWith(".jsonl")) {\n files.push(fullPath);\n }\n }\n } catch {\n }\n }\n walk(dir);\n return files;\n}\nfunction findCodexSessionFile(sessionsDir, sessionId) {\n const files = findJsonlFiles(sessionsDir);\n logInfo(`Searching for Codex session ${sessionId} in ${files.length} files`);\n for (const filepath of files) {\n const filename = path2.basename(filepath);\n if (filename.includes(sessionId) || filename.replace(/-/g, "").includes(sessionId.replace(/-/g, ""))) {\n logInfo(`Found Codex session file: ${filepath}`);\n return filepath;\n }\n }\n if (files.length > 0) {\n files.sort((a, b) => {\n const statA = fs4.statSync(a);\n const statB = fs4.statSync(b);\n return statB.mtimeMs - statA.mtimeMs;\n });\n const mostRecent = files[0] ?? null;\n if (mostRecent) {\n logInfo(\n `Session ID not found in filenames, using most recent: ${mostRecent}`\n );\n }\n return mostRecent;\n }\n return null;\n}\nasync function createCheckpoint() {\n const checkpointStart = Date.now();\n logInfo("Creating checkpoint...");\n const sessionIdStart = Date.now();\n if (!fs4.existsSync(SESSION_ID_FILE)) {\n logError("No session ID found, checkpoint creation failed");\n recordSandboxOp(\n "session_id_read",\n Date.now() - sessionIdStart,\n false,\n "Session ID file not found"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n const cliAgentSessionId = fs4.readFileSync(SESSION_ID_FILE, "utf-8").trim();\n recordSandboxOp("session_id_read", Date.now() - sessionIdStart, true);\n const sessionHistoryStart = Date.now();\n if (!fs4.existsSync(SESSION_HISTORY_PATH_FILE)) {\n logError("No session history path found, checkpoint creation failed");\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n false,\n "Session history path file not found"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n const sessionHistoryPathRaw = fs4.readFileSync(SESSION_HISTORY_PATH_FILE, "utf-8").trim();\n let sessionHistoryPath;\n if (sessionHistoryPathRaw.startsWith("CODEX_SEARCH:")) {\n const parts = sessionHistoryPathRaw.split(":");\n if (parts.length !== 3) {\n logError(`Invalid Codex search marker format: ${sessionHistoryPathRaw}`);\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n false,\n "Invalid Codex search marker"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n const sessionsDir = parts[1] ?? "";\n const codexSessionId = parts[2] ?? "";\n logInfo(`Searching for Codex session in ${sessionsDir}`);\n const foundPath = findCodexSessionFile(sessionsDir, codexSessionId);\n if (!foundPath) {\n logError(\n `Could not find Codex session file for ${codexSessionId} in ${sessionsDir}`\n );\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n false,\n "Codex session file not found"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n sessionHistoryPath = foundPath;\n } else {\n sessionHistoryPath = sessionHistoryPathRaw;\n }\n if (!fs4.existsSync(sessionHistoryPath)) {\n logError(\n `Session history file not found at ${sessionHistoryPath}, checkpoint creation failed`\n );\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n false,\n "Session history file not found"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n let cliAgentSessionHistory;\n try {\n cliAgentSessionHistory = fs4.readFileSync(sessionHistoryPath, "utf-8");\n } catch (error) {\n logError(`Failed to read session history: ${error}`);\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n false,\n String(error)\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n if (!cliAgentSessionHistory.trim()) {\n logError("Session history is empty, checkpoint creation failed");\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n false,\n "Session history empty"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n const lineCount = cliAgentSessionHistory.trim().split("\\n").length;\n logInfo(`Session history loaded (${lineCount} lines)`);\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n true\n );\n let artifactSnapshot = null;\n if (ARTIFACT_DRIVER && ARTIFACT_VOLUME_NAME) {\n logInfo(`Processing artifact with driver: ${ARTIFACT_DRIVER}`);\n if (ARTIFACT_DRIVER !== "vas") {\n logError(\n `Unknown artifact driver: ${ARTIFACT_DRIVER} (only \'vas\' is supported)`\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n logInfo(\n `Creating VAS snapshot for artifact \'${ARTIFACT_VOLUME_NAME}\' at ${ARTIFACT_MOUNT_PATH}`\n );\n logInfo("Using direct S3 upload...");\n const snapshot = await createDirectUploadSnapshot(\n ARTIFACT_MOUNT_PATH,\n ARTIFACT_VOLUME_NAME,\n "artifact",\n RUN_ID,\n `Checkpoint from run ${RUN_ID}`\n );\n if (!snapshot) {\n logError("Failed to create VAS snapshot for artifact");\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n const artifactVersion = snapshot.versionId;\n if (!artifactVersion) {\n logError("Failed to extract versionId from snapshot");\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n artifactSnapshot = {\n artifactName: ARTIFACT_VOLUME_NAME,\n artifactVersion\n };\n logInfo(\n `VAS artifact snapshot created: ${ARTIFACT_VOLUME_NAME}@${artifactVersion}`\n );\n } else {\n logInfo(\n "No artifact configured, creating checkpoint without artifact snapshot"\n );\n }\n logInfo("Calling checkpoint API...");\n const checkpointPayload = {\n runId: RUN_ID,\n cliAgentType: CLI_AGENT_TYPE,\n cliAgentSessionId,\n cliAgentSessionHistory\n };\n if (artifactSnapshot) {\n checkpointPayload.artifactSnapshot = artifactSnapshot;\n }\n const apiCallStart = Date.now();\n const result = await httpPostJson(\n CHECKPOINT_URL,\n checkpointPayload\n );\n if (result && result.checkpointId) {\n const checkpointId = result.checkpointId;\n logInfo(`Checkpoint created successfully: ${checkpointId}`);\n recordSandboxOp("checkpoint_api_call", Date.now() - apiCallStart, true);\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, true);\n return true;\n } else {\n logError(\n `Checkpoint API returned invalid response: ${JSON.stringify(result)}`\n );\n recordSandboxOp(\n "checkpoint_api_call",\n Date.now() - apiCallStart,\n false,\n "Invalid API response"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n}\n\n// src/sandbox/scripts/src/lib/metrics.ts\nimport * as fs5 from "fs";\nimport { execSync as execSync3 } from "child_process";\nvar shutdownRequested = false;\nfunction getCpuPercent() {\n try {\n const content = fs5.readFileSync("/proc/stat", "utf-8");\n const line = content.split("\\n")[0];\n if (!line) {\n return 0;\n }\n const parts = line.split(/\\s+/);\n if (parts[0] !== "cpu") {\n return 0;\n }\n const values = parts.slice(1).map((x) => parseInt(x, 10));\n const idleVal = values[3];\n const iowaitVal = values[4];\n if (idleVal === void 0 || iowaitVal === void 0) {\n return 0;\n }\n const idle = idleVal + iowaitVal;\n const total = values.reduce((a, b) => a + b, 0);\n if (total === 0) {\n return 0;\n }\n const cpuPercent = 100 * (1 - idle / total);\n return Math.round(cpuPercent * 100) / 100;\n } catch (error) {\n logDebug(`Failed to get CPU percent: ${error}`);\n return 0;\n }\n}\nfunction getMemoryInfo() {\n try {\n const result = execSync3("free -b", {\n encoding: "utf-8",\n timeout: 5e3,\n stdio: ["pipe", "pipe", "pipe"]\n });\n const lines = result.trim().split("\\n");\n for (const line of lines) {\n if (line.startsWith("Mem:")) {\n const parts = line.split(/\\s+/);\n const totalStr = parts[1];\n const usedStr = parts[2];\n if (!totalStr || !usedStr) {\n return [0, 0];\n }\n const total = parseInt(totalStr, 10);\n const used = parseInt(usedStr, 10);\n return [used, total];\n }\n }\n return [0, 0];\n } catch (error) {\n logDebug(`Failed to get memory info: ${error}`);\n return [0, 0];\n }\n}\nfunction getDiskInfo() {\n try {\n const result = execSync3("df -B1 /", {\n encoding: "utf-8",\n timeout: 5e3,\n stdio: ["pipe", "pipe", "pipe"]\n });\n const lines = result.trim().split("\\n");\n if (lines.length < 2) {\n return [0, 0];\n }\n const dataLine = lines[1];\n if (!dataLine) {\n return [0, 0];\n }\n const parts = dataLine.split(/\\s+/);\n const totalStr = parts[1];\n const usedStr = parts[2];\n if (!totalStr || !usedStr) {\n return [0, 0];\n }\n const total = parseInt(totalStr, 10);\n const used = parseInt(usedStr, 10);\n return [used, total];\n } catch (error) {\n logDebug(`Failed to get disk info: ${error}`);\n return [0, 0];\n }\n}\nfunction collectMetrics() {\n const cpu = getCpuPercent();\n const [memUsed, memTotal] = getMemoryInfo();\n const [diskUsed, diskTotal] = getDiskInfo();\n return {\n ts: (/* @__PURE__ */ new Date()).toISOString(),\n cpu,\n mem_used: memUsed,\n mem_total: memTotal,\n disk_used: diskUsed,\n disk_total: diskTotal\n };\n}\nfunction metricsCollectorLoop() {\n logInfo(`Metrics collector started, writing to ${METRICS_LOG_FILE}`);\n const writeMetrics = () => {\n if (shutdownRequested) {\n logInfo("Metrics collector stopped");\n return;\n }\n try {\n const metrics = collectMetrics();\n fs5.appendFileSync(METRICS_LOG_FILE, JSON.stringify(metrics) + "\\n");\n logDebug(\n `Metrics collected: cpu=${metrics.cpu}%, mem=${metrics.mem_used}/${metrics.mem_total}`\n );\n } catch (error) {\n logError(`Failed to collect/write metrics: ${error}`);\n }\n setTimeout(writeMetrics, METRICS_INTERVAL * 1e3);\n };\n writeMetrics();\n}\nfunction startMetricsCollector() {\n shutdownRequested = false;\n setTimeout(metricsCollectorLoop, 0);\n}\nfunction stopMetricsCollector() {\n shutdownRequested = true;\n}\n\n// src/sandbox/scripts/src/lib/upload-telemetry.ts\nimport * as fs6 from "fs";\nvar shutdownRequested2 = false;\nfunction readFileFromPosition(filePath, posFile) {\n let lastPos = 0;\n if (fs6.existsSync(posFile)) {\n try {\n const content = fs6.readFileSync(posFile, "utf-8").trim();\n lastPos = parseInt(content, 10) || 0;\n } catch {\n lastPos = 0;\n }\n }\n let newContent = "";\n let newPos = lastPos;\n if (fs6.existsSync(filePath)) {\n try {\n const fd = fs6.openSync(filePath, "r");\n const stats = fs6.fstatSync(fd);\n const bufferSize = stats.size - lastPos;\n if (bufferSize > 0) {\n const buffer = Buffer.alloc(bufferSize);\n fs6.readSync(fd, buffer, 0, bufferSize, lastPos);\n newContent = buffer.toString("utf-8");\n newPos = stats.size;\n }\n fs6.closeSync(fd);\n } catch (error) {\n logDebug(`Failed to read ${filePath}: ${error}`);\n }\n }\n return [newContent, newPos];\n}\nfunction savePosition(posFile, position) {\n try {\n fs6.writeFileSync(posFile, String(position));\n } catch (error) {\n logDebug(`Failed to save position to ${posFile}: ${error}`);\n }\n}\nfunction readJsonlFromPosition(filePath, posFile) {\n const [content, newPos] = readFileFromPosition(filePath, posFile);\n const entries = [];\n if (content) {\n for (const line of content.trim().split("\\n")) {\n if (line) {\n try {\n entries.push(JSON.parse(line));\n } catch {\n }\n }\n }\n }\n return [entries, newPos];\n}\nfunction readMetricsFromPosition(posFile) {\n return readJsonlFromPosition(METRICS_LOG_FILE, posFile);\n}\nfunction readNetworkLogsFromPosition(posFile) {\n return readJsonlFromPosition(NETWORK_LOG_FILE, posFile);\n}\nfunction readSandboxOpsFromPosition(posFile) {\n return readJsonlFromPosition(SANDBOX_OPS_LOG_FILE, posFile);\n}\nasync function uploadTelemetry() {\n const [systemLog, logPos] = readFileFromPosition(\n SYSTEM_LOG_FILE,\n TELEMETRY_LOG_POS_FILE\n );\n const [metrics, metricsPos] = readMetricsFromPosition(\n TELEMETRY_METRICS_POS_FILE\n );\n const [networkLogs, networkPos] = readNetworkLogsFromPosition(\n TELEMETRY_NETWORK_POS_FILE\n );\n const [sandboxOps, sandboxOpsPos] = readSandboxOpsFromPosition(\n TELEMETRY_SANDBOX_OPS_POS_FILE\n );\n if (!systemLog && metrics.length === 0 && networkLogs.length === 0 && sandboxOps.length === 0) {\n logDebug("No new telemetry data to upload");\n return true;\n }\n const maskedSystemLog = systemLog ? maskData(systemLog) : "";\n const maskedNetworkLogs = networkLogs.length > 0 ? maskData(networkLogs) : [];\n const payload = {\n runId: RUN_ID,\n systemLog: maskedSystemLog,\n metrics,\n // Metrics don\'t contain secrets (just numbers)\n networkLogs: maskedNetworkLogs,\n sandboxOperations: sandboxOps\n // Sandbox ops don\'t contain secrets (just timing data)\n };\n logDebug(\n `Uploading telemetry: ${systemLog.length} bytes log, ${metrics.length} metrics, ${networkLogs.length} network logs, ${sandboxOps.length} sandbox ops`\n );\n const result = await httpPostJson(TELEMETRY_URL, payload, 1);\n if (result) {\n savePosition(TELEMETRY_LOG_POS_FILE, logPos);\n savePosition(TELEMETRY_METRICS_POS_FILE, metricsPos);\n savePosition(TELEMETRY_NETWORK_POS_FILE, networkPos);\n savePosition(TELEMETRY_SANDBOX_OPS_POS_FILE, sandboxOpsPos);\n logDebug(\n `Telemetry uploaded successfully: ${result.id ?? "unknown"}`\n );\n return true;\n } else {\n logWarn("Failed to upload telemetry (will retry next interval)");\n return false;\n }\n}\nasync function telemetryUploadLoop() {\n logInfo(`Telemetry upload started (interval: ${TELEMETRY_INTERVAL}s)`);\n const runUpload = async () => {\n if (shutdownRequested2) {\n logInfo("Telemetry upload stopped");\n return;\n }\n try {\n await uploadTelemetry();\n } catch (error) {\n logError(`Telemetry upload error: ${error}`);\n }\n setTimeout(() => void runUpload(), TELEMETRY_INTERVAL * 1e3);\n };\n await runUpload();\n}\nfunction startTelemetryUpload() {\n shutdownRequested2 = false;\n setTimeout(() => void telemetryUploadLoop(), 0);\n}\nfunction stopTelemetryUpload() {\n shutdownRequested2 = true;\n}\nasync function finalTelemetryUpload() {\n logInfo("Performing final telemetry upload...");\n return uploadTelemetry();\n}\n\n// src/sandbox/scripts/src/run-agent.ts\nvar shutdownRequested3 = false;\nfunction heartbeatLoop() {\n const sendHeartbeat = async () => {\n if (shutdownRequested3) {\n return;\n }\n try {\n if (await httpPostJson(HEARTBEAT_URL, { runId: RUN_ID })) {\n logInfo("Heartbeat sent");\n } else {\n logWarn("Heartbeat failed");\n }\n } catch (error) {\n logWarn(`Heartbeat error: ${error}`);\n }\n setTimeout(() => {\n sendHeartbeat().catch(() => {\n });\n }, HEARTBEAT_INTERVAL * 1e3);\n };\n sendHeartbeat().catch(() => {\n });\n}\nasync function cleanup(exitCode, errorMessage) {\n logInfo("\\u25B7 Cleanup");\n const telemetryStart = Date.now();\n let telemetrySuccess = true;\n try {\n await finalTelemetryUpload();\n } catch (error) {\n telemetrySuccess = false;\n logError(`Final telemetry upload failed: ${error}`);\n }\n recordSandboxOp(\n "final_telemetry_upload",\n Date.now() - telemetryStart,\n telemetrySuccess\n );\n logInfo(`Calling complete API with exitCode=${exitCode}`);\n const completePayload = {\n runId: RUN_ID,\n exitCode\n };\n if (errorMessage) {\n completePayload.error = errorMessage;\n }\n const completeStart = Date.now();\n let completeSuccess = false;\n try {\n if (await httpPostJson(COMPLETE_URL, completePayload)) {\n logInfo("Complete API called successfully");\n completeSuccess = true;\n } else {\n logError("Failed to call complete API (sandbox may not be cleaned up)");\n }\n } catch (error) {\n logError(`Complete API call failed: ${error}`);\n }\n recordSandboxOp(\n "complete_api_call",\n Date.now() - completeStart,\n completeSuccess\n );\n shutdownRequested3 = true;\n stopMetricsCollector();\n stopTelemetryUpload();\n logInfo("Background processes stopped");\n if (exitCode === 0) {\n logInfo("\\u2713 Sandbox finished successfully");\n } else {\n logInfo(`\\u2717 Sandbox failed (exit code ${exitCode})`);\n }\n}\nasync function run() {\n validateConfig();\n logInfo(`\\u25B6 VM0 Sandbox ${RUN_ID}`);\n logInfo("\\u25B7 Initialization");\n const initStartTime = Date.now();\n logInfo(`Working directory: ${WORKING_DIR}`);\n const heartbeatStart = Date.now();\n heartbeatLoop();\n logInfo("Heartbeat started");\n recordSandboxOp("heartbeat_start", Date.now() - heartbeatStart, true);\n const metricsStart = Date.now();\n startMetricsCollector();\n logInfo("Metrics collector started");\n recordSandboxOp("metrics_collector_start", Date.now() - metricsStart, true);\n const telemetryStart = Date.now();\n startTelemetryUpload();\n logInfo("Telemetry upload started");\n recordSandboxOp("telemetry_upload_start", Date.now() - telemetryStart, true);\n const workingDirStart = Date.now();\n try {\n fs7.mkdirSync(WORKING_DIR, { recursive: true });\n process.chdir(WORKING_DIR);\n } catch (error) {\n recordSandboxOp(\n "working_dir_setup",\n Date.now() - workingDirStart,\n false,\n String(error)\n );\n throw new Error(\n `Failed to create/change to working directory: ${WORKING_DIR} - ${error}`\n );\n }\n recordSandboxOp("working_dir_setup", Date.now() - workingDirStart, true);\n if (CLI_AGENT_TYPE === "codex") {\n const homeDir = process.env.HOME ?? "/home/user";\n const codexHome = `${homeDir}/.codex`;\n fs7.mkdirSync(codexHome, { recursive: true });\n process.env.CODEX_HOME = codexHome;\n logInfo(`Codex home directory: ${codexHome}`);\n const codexLoginStart = Date.now();\n let codexLoginSuccess = false;\n const apiKey = process.env.OPENAI_API_KEY ?? "";\n if (apiKey) {\n try {\n execSync4("codex login --with-api-key", {\n input: apiKey,\n encoding: "utf-8",\n stdio: ["pipe", "pipe", "pipe"]\n });\n logInfo("Codex authenticated with API key");\n codexLoginSuccess = true;\n } catch (error) {\n logError(`Codex login failed: ${error}`);\n }\n } else {\n logError("OPENAI_API_KEY not set");\n }\n recordSandboxOp(\n "codex_login",\n Date.now() - codexLoginStart,\n codexLoginSuccess\n );\n }\n const initDurationMs = Date.now() - initStartTime;\n recordSandboxOp("init_total", initDurationMs, true);\n logInfo(`\\u2713 Initialization complete (${Math.floor(initDurationMs / 1e3)}s)`);\n logInfo("\\u25B7 Execution");\n const execStartTime = Date.now();\n logInfo(`Starting ${CLI_AGENT_TYPE} execution...`);\n logInfo(`Prompt: ${PROMPT}`);\n const useMock = process.env.USE_MOCK_CLAUDE === "true";\n let cmd;\n if (CLI_AGENT_TYPE === "codex") {\n if (useMock) {\n throw new Error("Mock mode not supported for Codex");\n }\n const codexArgs = [\n "exec",\n "--json",\n "--dangerously-bypass-approvals-and-sandbox",\n "--skip-git-repo-check",\n "-C",\n WORKING_DIR\n ];\n if (OPENAI_MODEL) {\n codexArgs.push("-m", OPENAI_MODEL);\n }\n if (RESUME_SESSION_ID) {\n logInfo(`Resuming session: ${RESUME_SESSION_ID}`);\n codexArgs.push("resume", RESUME_SESSION_ID, PROMPT);\n } else {\n logInfo("Starting new session");\n codexArgs.push(PROMPT);\n }\n cmd = ["codex", ...codexArgs];\n } else {\n const claudeArgs = [\n "--print",\n "--verbose",\n "--output-format",\n "stream-json",\n "--dangerously-skip-permissions"\n ];\n if (RESUME_SESSION_ID) {\n logInfo(`Resuming session: ${RESUME_SESSION_ID}`);\n claudeArgs.push("--resume", RESUME_SESSION_ID);\n } else {\n logInfo("Starting new session");\n }\n const claudeBin = useMock ? "/usr/local/bin/vm0-agent/mock-claude.mjs" : "claude";\n if (useMock) {\n logInfo("Using mock-claude for testing");\n }\n cmd = [claudeBin, ...claudeArgs, PROMPT];\n }\n let agentExitCode = 0;\n const stderrLines = [];\n let logFile = null;\n try {\n logFile = fs7.createWriteStream(AGENT_LOG_FILE);\n const cmdExe = cmd[0];\n if (!cmdExe) {\n throw new Error("Empty command");\n }\n const proc = spawn(cmdExe, cmd.slice(1), {\n stdio: ["ignore", "pipe", "pipe"]\n });\n const exitPromise = new Promise((resolve) => {\n let resolved = false;\n proc.on("error", (err) => {\n if (!resolved) {\n resolved = true;\n logError(`Failed to spawn ${CLI_AGENT_TYPE}: ${err.message}`);\n stderrLines.push(`Spawn error: ${err.message}`);\n resolve(1);\n }\n });\n proc.on("close", (code) => {\n if (!resolved) {\n resolved = true;\n resolve(code ?? 1);\n }\n });\n });\n if (proc.stderr) {\n const stderrRl = readline.createInterface({ input: proc.stderr });\n stderrRl.on("line", (line) => {\n stderrLines.push(line);\n if (logFile && !logFile.destroyed) {\n logFile.write(`[STDERR] ${line}\n`);\n }\n });\n }\n if (proc.stdout) {\n const stdoutRl = readline.createInterface({ input: proc.stdout });\n let eventSequence = 0;\n for await (const line of stdoutRl) {\n if (logFile && !logFile.destroyed) {\n logFile.write(line + "\\n");\n }\n const stripped = line.trim();\n if (!stripped) {\n continue;\n }\n try {\n const event = JSON.parse(stripped);\n eventSequence++;\n await sendEvent(event, eventSequence);\n if (event.type === "result") {\n const resultContent = event.result;\n if (resultContent) {\n console.log(resultContent);\n }\n }\n } catch {\n logDebug(`Non-JSON line from agent: ${stripped.slice(0, 100)}`);\n }\n }\n }\n agentExitCode = await exitPromise;\n } catch (error) {\n logError(`Failed to execute ${CLI_AGENT_TYPE}: ${error}`);\n agentExitCode = 1;\n } finally {\n if (logFile && !logFile.destroyed) {\n logFile.end();\n }\n }\n console.log();\n let finalExitCode = agentExitCode;\n let errorMessage = "";\n if (fs7.existsSync(EVENT_ERROR_FLAG)) {\n logError("Some events failed to send, marking run as failed");\n finalExitCode = 1;\n errorMessage = "Some events failed to send";\n }\n const execDurationMs = Date.now() - execStartTime;\n recordSandboxOp("cli_execution", execDurationMs, agentExitCode === 0);\n if (agentExitCode === 0 && finalExitCode === 0) {\n logInfo(`\\u2713 Execution complete (${Math.floor(execDurationMs / 1e3)}s)`);\n } else {\n logInfo(`\\u2717 Execution failed (${Math.floor(execDurationMs / 1e3)}s)`);\n }\n if (agentExitCode === 0 && finalExitCode === 0) {\n logInfo(`${CLI_AGENT_TYPE} completed successfully`);\n logInfo("\\u25B7 Checkpoint");\n const checkpointStartTime = Date.now();\n const checkpointSuccess = await createCheckpoint();\n const checkpointDuration = Math.floor(\n (Date.now() - checkpointStartTime) / 1e3\n );\n if (checkpointSuccess) {\n logInfo(`\\u2713 Checkpoint complete (${checkpointDuration}s)`);\n } else {\n logInfo(`\\u2717 Checkpoint failed (${checkpointDuration}s)`);\n }\n if (!checkpointSuccess) {\n logError("Checkpoint creation failed, marking run as failed");\n finalExitCode = 1;\n errorMessage = "Checkpoint creation failed";\n }\n } else {\n if (agentExitCode !== 0) {\n logInfo(`${CLI_AGENT_TYPE} failed with exit code ${agentExitCode}`);\n if (stderrLines.length > 0) {\n errorMessage = stderrLines.map((line) => line.trim()).join(" ");\n logInfo(`Captured stderr: ${errorMessage}`);\n } else {\n errorMessage = `Agent exited with code ${agentExitCode}`;\n }\n }\n }\n return [finalExitCode, errorMessage];\n}\nasync function main() {\n let exitCode = 1;\n let errorMessage = "Unexpected termination";\n try {\n [exitCode, errorMessage] = await run();\n } catch (error) {\n if (error instanceof Error) {\n exitCode = 1;\n errorMessage = error.message;\n logError(`Error: ${errorMessage}`);\n } else {\n exitCode = 1;\n errorMessage = `Unexpected error: ${error}`;\n logError(errorMessage);\n }\n } finally {\n await cleanup(exitCode, errorMessage);\n }\n return exitCode;\n}\nmain().then((code) => process.exit(code)).catch((error) => {\n console.error("Fatal error:", error);\n process.exit(1);\n});\n';
|
|
7886
|
+
var RUN_AGENT_SCRIPT = '#!/usr/bin/env node\n\n// src/sandbox/scripts/src/run-agent.ts\nimport * as fs7 from "fs";\nimport { spawn, execSync as execSync4 } from "child_process";\nimport * as readline from "readline";\n\n// src/sandbox/scripts/src/lib/common.ts\nimport * as fs from "fs";\nvar RUN_ID = process.env.VM0_RUN_ID ?? "";\nvar API_URL = process.env.VM0_API_URL ?? "";\nvar API_TOKEN = process.env.VM0_API_TOKEN ?? "";\nvar PROMPT = process.env.VM0_PROMPT ?? "";\nvar VERCEL_BYPASS = process.env.VERCEL_PROTECTION_BYPASS ?? "";\nvar RESUME_SESSION_ID = process.env.VM0_RESUME_SESSION_ID ?? "";\nvar CLI_AGENT_TYPE = process.env.CLI_AGENT_TYPE ?? "claude-code";\nvar OPENAI_MODEL = process.env.OPENAI_MODEL ?? "";\nvar WORKING_DIR = process.env.VM0_WORKING_DIR ?? "";\nvar ARTIFACT_DRIVER = process.env.VM0_ARTIFACT_DRIVER ?? "";\nvar ARTIFACT_MOUNT_PATH = process.env.VM0_ARTIFACT_MOUNT_PATH ?? "";\nvar ARTIFACT_VOLUME_NAME = process.env.VM0_ARTIFACT_VOLUME_NAME ?? "";\nvar ARTIFACT_VERSION_ID = process.env.VM0_ARTIFACT_VERSION_ID ?? "";\nvar WEBHOOK_URL = `${API_URL}/api/webhooks/agent/events`;\nvar CHECKPOINT_URL = `${API_URL}/api/webhooks/agent/checkpoints`;\nvar COMPLETE_URL = `${API_URL}/api/webhooks/agent/complete`;\nvar HEARTBEAT_URL = `${API_URL}/api/webhooks/agent/heartbeat`;\nvar TELEMETRY_URL = `${API_URL}/api/webhooks/agent/telemetry`;\nvar PROXY_URL = `${API_URL}/api/webhooks/agent/proxy`;\nvar STORAGE_PREPARE_URL = `${API_URL}/api/webhooks/agent/storages/prepare`;\nvar STORAGE_COMMIT_URL = `${API_URL}/api/webhooks/agent/storages/commit`;\nvar HEARTBEAT_INTERVAL = 60;\nvar TELEMETRY_INTERVAL = 30;\nvar HTTP_CONNECT_TIMEOUT = 10;\nvar HTTP_MAX_TIME = 30;\nvar HTTP_MAX_TIME_UPLOAD = 60;\nvar HTTP_MAX_RETRIES = 3;\nvar SESSION_ID_FILE = `/tmp/vm0-session-${RUN_ID}.txt`;\nvar SESSION_HISTORY_PATH_FILE = `/tmp/vm0-session-history-${RUN_ID}.txt`;\nvar EVENT_ERROR_FLAG = `/tmp/vm0-event-error-${RUN_ID}`;\nvar SYSTEM_LOG_FILE = `/tmp/vm0-main-${RUN_ID}.log`;\nvar AGENT_LOG_FILE = `/tmp/vm0-agent-${RUN_ID}.log`;\nvar METRICS_LOG_FILE = `/tmp/vm0-metrics-${RUN_ID}.jsonl`;\nvar NETWORK_LOG_FILE = `/tmp/vm0-network-${RUN_ID}.jsonl`;\nvar TELEMETRY_LOG_POS_FILE = `/tmp/vm0-telemetry-log-pos-${RUN_ID}.txt`;\nvar TELEMETRY_METRICS_POS_FILE = `/tmp/vm0-telemetry-metrics-pos-${RUN_ID}.txt`;\nvar TELEMETRY_NETWORK_POS_FILE = `/tmp/vm0-telemetry-network-pos-${RUN_ID}.txt`;\nvar TELEMETRY_SANDBOX_OPS_POS_FILE = `/tmp/vm0-telemetry-sandbox-ops-pos-${RUN_ID}.txt`;\nvar SANDBOX_OPS_LOG_FILE = `/tmp/vm0-sandbox-ops-${RUN_ID}.jsonl`;\nvar METRICS_INTERVAL = 5;\nfunction validateConfig() {\n if (!WORKING_DIR) {\n throw new Error("VM0_WORKING_DIR is required but not set");\n }\n return true;\n}\nfunction recordSandboxOp(actionType, durationMs, success, error) {\n const entry = {\n ts: (/* @__PURE__ */ new Date()).toISOString(),\n action_type: actionType,\n duration_ms: durationMs,\n success\n };\n if (error) {\n entry.error = error;\n }\n fs.appendFileSync(SANDBOX_OPS_LOG_FILE, JSON.stringify(entry) + "\\n");\n}\n\n// src/sandbox/scripts/src/lib/log.ts\nvar SCRIPT_NAME = process.env.LOG_SCRIPT_NAME ?? "run-agent";\nvar DEBUG_MODE = process.env.VM0_DEBUG === "1";\nfunction timestamp() {\n return (/* @__PURE__ */ new Date()).toISOString().replace(/\\.\\d{3}Z$/, "Z");\n}\nfunction logInfo(msg) {\n console.error(`[${timestamp()}] [INFO] [sandbox:${SCRIPT_NAME}] ${msg}`);\n}\nfunction logWarn(msg) {\n console.error(`[${timestamp()}] [WARN] [sandbox:${SCRIPT_NAME}] ${msg}`);\n}\nfunction logError(msg) {\n console.error(`[${timestamp()}] [ERROR] [sandbox:${SCRIPT_NAME}] ${msg}`);\n}\nfunction logDebug(msg) {\n if (DEBUG_MODE) {\n console.error(`[${timestamp()}] [DEBUG] [sandbox:${SCRIPT_NAME}] ${msg}`);\n }\n}\n\n// src/sandbox/scripts/src/lib/events.ts\nimport * as fs2 from "fs";\n\n// src/sandbox/scripts/src/lib/http-client.ts\nimport { execSync } from "child_process";\nfunction sleep(ms) {\n return new Promise((resolve) => setTimeout(resolve, ms));\n}\nasync function httpPostJson(url, data, maxRetries = HTTP_MAX_RETRIES) {\n const headers = {\n "Content-Type": "application/json",\n Authorization: `Bearer ${API_TOKEN}`\n };\n if (VERCEL_BYPASS) {\n headers["x-vercel-protection-bypass"] = VERCEL_BYPASS;\n }\n for (let attempt = 1; attempt <= maxRetries; attempt++) {\n logDebug(`HTTP POST attempt ${attempt}/${maxRetries} to ${url}`);\n try {\n const controller = new AbortController();\n const timeoutId = setTimeout(\n () => controller.abort(),\n HTTP_MAX_TIME * 1e3\n );\n const response = await fetch(url, {\n method: "POST",\n headers,\n body: JSON.stringify(data),\n signal: controller.signal\n });\n clearTimeout(timeoutId);\n if (response.ok) {\n const text = await response.text();\n if (text) {\n return JSON.parse(text);\n }\n return {};\n }\n logWarn(\n `HTTP POST failed (attempt ${attempt}/${maxRetries}): HTTP ${response.status}`\n );\n if (attempt < maxRetries) {\n await sleep(1e3);\n }\n } catch (error) {\n const errorMsg = error instanceof Error ? error.message : String(error);\n if (errorMsg.includes("abort")) {\n logWarn(`HTTP POST failed (attempt ${attempt}/${maxRetries}): Timeout`);\n } else {\n logWarn(\n `HTTP POST failed (attempt ${attempt}/${maxRetries}): ${errorMsg}`\n );\n }\n if (attempt < maxRetries) {\n await sleep(1e3);\n }\n }\n }\n logError(`HTTP POST failed after ${maxRetries} attempts to ${url}`);\n return null;\n}\nasync function httpPutPresigned(presignedUrl, filePath, contentType = "application/octet-stream", maxRetries = HTTP_MAX_RETRIES) {\n for (let attempt = 1; attempt <= maxRetries; attempt++) {\n logDebug(`HTTP PUT presigned attempt ${attempt}/${maxRetries}`);\n try {\n const curlCmd = [\n "curl",\n "-f",\n "-X",\n "PUT",\n "-H",\n `Content-Type: ${contentType}`,\n "--data-binary",\n `@${filePath}`,\n "--connect-timeout",\n String(HTTP_CONNECT_TIMEOUT),\n "--max-time",\n String(HTTP_MAX_TIME_UPLOAD),\n "--silent",\n `"${presignedUrl}"`\n ].join(" ");\n execSync(curlCmd, {\n timeout: HTTP_MAX_TIME_UPLOAD * 1e3,\n stdio: ["pipe", "pipe", "pipe"]\n });\n return true;\n } catch (error) {\n const errorMsg = error instanceof Error ? error.message : String(error);\n if (errorMsg.includes("ETIMEDOUT") || errorMsg.includes("timeout")) {\n logWarn(\n `HTTP PUT presigned failed (attempt ${attempt}/${maxRetries}): Timeout`\n );\n } else {\n logWarn(\n `HTTP PUT presigned failed (attempt ${attempt}/${maxRetries}): ${errorMsg}`\n );\n }\n if (attempt < maxRetries) {\n await sleep(1e3);\n }\n }\n }\n logError(`HTTP PUT presigned failed after ${maxRetries} attempts`);\n return false;\n}\n\n// src/sandbox/scripts/src/lib/secret-masker.ts\nvar MASK_PLACEHOLDER = "***";\nvar MIN_SECRET_LENGTH = 5;\nvar _masker = null;\nvar SecretMasker = class {\n patterns;\n /**\n * Initialize masker with secret values.\n *\n * @param secretValues - List of secret values to mask\n */\n constructor(secretValues) {\n this.patterns = /* @__PURE__ */ new Set();\n for (const secret of secretValues) {\n if (!secret || secret.length < MIN_SECRET_LENGTH) {\n continue;\n }\n this.patterns.add(secret);\n try {\n const b64 = Buffer.from(secret).toString("base64");\n if (b64.length >= MIN_SECRET_LENGTH) {\n this.patterns.add(b64);\n }\n } catch {\n }\n try {\n const urlEnc = encodeURIComponent(secret);\n if (urlEnc !== secret && urlEnc.length >= MIN_SECRET_LENGTH) {\n this.patterns.add(urlEnc);\n }\n } catch {\n }\n }\n }\n /**\n * Recursively mask all occurrences of secrets in the data.\n *\n * @param data - Data to mask (string, list, dict, or primitive)\n * @returns Masked data with the same structure\n */\n mask(data) {\n return this.deepMask(data);\n }\n deepMask(data) {\n if (typeof data === "string") {\n let result = data;\n for (const pattern of this.patterns) {\n result = result.split(pattern).join(MASK_PLACEHOLDER);\n }\n return result;\n }\n if (Array.isArray(data)) {\n return data.map((item) => this.deepMask(item));\n }\n if (data !== null && typeof data === "object") {\n const result = {};\n for (const [key, value] of Object.entries(\n data\n )) {\n result[key] = this.deepMask(value);\n }\n return result;\n }\n return data;\n }\n};\nfunction createMasker() {\n const secretValuesStr = process.env.VM0_SECRET_VALUES ?? "";\n if (!secretValuesStr) {\n return new SecretMasker([]);\n }\n const secretValues = [];\n for (const encodedValue of secretValuesStr.split(",")) {\n const trimmed = encodedValue.trim();\n if (trimmed) {\n try {\n const decoded = Buffer.from(trimmed, "base64").toString("utf-8");\n if (decoded) {\n secretValues.push(decoded);\n }\n } catch {\n }\n }\n }\n return new SecretMasker(secretValues);\n}\nfunction getMasker() {\n if (_masker === null) {\n _masker = createMasker();\n }\n return _masker;\n}\nfunction maskData(data) {\n return getMasker().mask(data);\n}\n\n// src/sandbox/scripts/src/lib/events.ts\nasync function sendEvent(event, sequenceNumber) {\n const eventType = event.type ?? "";\n const eventSubtype = event.subtype ?? "";\n let sessionId = null;\n if (CLI_AGENT_TYPE === "codex") {\n if (eventType === "thread.started") {\n sessionId = event.thread_id ?? "";\n }\n } else {\n if (eventType === "system" && eventSubtype === "init") {\n sessionId = event.session_id ?? "";\n }\n }\n if (sessionId && !fs2.existsSync(SESSION_ID_FILE)) {\n logInfo(`Captured session ID: ${sessionId}`);\n fs2.writeFileSync(SESSION_ID_FILE, sessionId);\n const homeDir = process.env.HOME ?? "/home/user";\n let sessionHistoryPath;\n if (CLI_AGENT_TYPE === "codex") {\n const codexHome = process.env.CODEX_HOME ?? `${homeDir}/.codex`;\n sessionHistoryPath = `CODEX_SEARCH:${codexHome}/sessions:${sessionId}`;\n } else {\n const projectName = WORKING_DIR.replace(/^\\//, "").replace(/\\//g, "-");\n sessionHistoryPath = `${homeDir}/.claude/projects/-${projectName}/${sessionId}.jsonl`;\n }\n fs2.writeFileSync(SESSION_HISTORY_PATH_FILE, sessionHistoryPath);\n logInfo(`Session history will be at: ${sessionHistoryPath}`);\n }\n const eventWithSequence = {\n ...event,\n sequenceNumber\n };\n const maskedEvent = maskData(eventWithSequence);\n const payload = {\n runId: RUN_ID,\n events: [maskedEvent]\n };\n const result = await httpPostJson(WEBHOOK_URL, payload);\n if (result === null) {\n logError("Failed to send event after retries");\n fs2.writeFileSync(EVENT_ERROR_FLAG, "1");\n return false;\n }\n return true;\n}\n\n// src/sandbox/scripts/src/lib/checkpoint.ts\nimport * as fs4 from "fs";\nimport * as path2 from "path";\n\n// src/sandbox/scripts/src/lib/direct-upload.ts\nimport * as fs3 from "fs";\nimport * as path from "path";\nimport * as crypto from "crypto";\nimport { execSync as execSync2 } from "child_process";\nfunction computeFileHash(filePath) {\n const hash = crypto.createHash("sha256");\n const buffer = fs3.readFileSync(filePath);\n hash.update(buffer);\n return hash.digest("hex");\n}\nfunction collectFileMetadata(dirPath) {\n const files = [];\n function walkDir(currentPath, relativePath) {\n const items = fs3.readdirSync(currentPath);\n for (const item of items) {\n if (item === ".git" || item === ".vm0") {\n continue;\n }\n const fullPath = path.join(currentPath, item);\n const relPath = relativePath ? path.join(relativePath, item) : item;\n const stat = fs3.statSync(fullPath);\n if (stat.isDirectory()) {\n walkDir(fullPath, relPath);\n } else if (stat.isFile()) {\n try {\n const fileHash = computeFileHash(fullPath);\n files.push({\n path: relPath,\n hash: fileHash,\n size: stat.size\n });\n } catch (error) {\n logWarn(`Could not process file ${relPath}: ${error}`);\n }\n }\n }\n }\n walkDir(dirPath, "");\n return files;\n}\nfunction createArchive(dirPath, tarPath) {\n try {\n execSync2(\n `tar -czf "${tarPath}" --exclude=\'.git\' --exclude=\'.vm0\' -C "${dirPath}" .`,\n { stdio: ["pipe", "pipe", "pipe"] }\n );\n return true;\n } catch (error) {\n logError(`Failed to create archive: ${error}`);\n return false;\n }\n}\nfunction createManifest(files, manifestPath) {\n try {\n const manifest = {\n version: 1,\n files,\n createdAt: (/* @__PURE__ */ new Date()).toISOString()\n };\n fs3.writeFileSync(manifestPath, JSON.stringify(manifest, null, 2));\n return true;\n } catch (error) {\n logError(`Failed to create manifest: ${error}`);\n return false;\n }\n}\nasync function createDirectUploadSnapshot(mountPath, storageName, storageType = "artifact", runId, message) {\n logInfo(\n `Creating direct upload snapshot for \'${storageName}\' (type: ${storageType})`\n );\n logInfo("Computing file hashes...");\n const hashStart = Date.now();\n const files = collectFileMetadata(mountPath);\n recordSandboxOp("artifact_hash_compute", Date.now() - hashStart, true);\n logInfo(`Found ${files.length} files`);\n if (files.length === 0) {\n logInfo("No files to upload, creating empty version");\n }\n logInfo("Calling prepare endpoint...");\n const prepareStart = Date.now();\n const preparePayload = {\n storageName,\n storageType,\n files\n };\n if (runId) {\n preparePayload.runId = runId;\n }\n const prepareResponse = await httpPostJson(\n STORAGE_PREPARE_URL,\n preparePayload\n );\n if (!prepareResponse) {\n logError("Failed to call prepare endpoint");\n recordSandboxOp("artifact_prepare_api", Date.now() - prepareStart, false);\n return null;\n }\n const versionId = prepareResponse.versionId;\n if (!versionId) {\n logError(`Invalid prepare response: ${JSON.stringify(prepareResponse)}`);\n recordSandboxOp("artifact_prepare_api", Date.now() - prepareStart, false);\n return null;\n }\n recordSandboxOp("artifact_prepare_api", Date.now() - prepareStart, true);\n if (prepareResponse.existing) {\n logInfo(`Version already exists (deduplicated): ${versionId.slice(0, 8)}`);\n logInfo("Updating HEAD pointer...");\n const commitPayload = {\n storageName,\n storageType,\n versionId,\n files\n };\n if (runId) {\n commitPayload.runId = runId;\n }\n const commitResponse = await httpPostJson(\n STORAGE_COMMIT_URL,\n commitPayload\n );\n if (!commitResponse || !commitResponse.success) {\n logError(`Failed to update HEAD: ${JSON.stringify(commitResponse)}`);\n return null;\n }\n return { versionId, deduplicated: true };\n }\n const uploads = prepareResponse.uploads;\n if (!uploads) {\n logError("No upload URLs in prepare response");\n return null;\n }\n const archiveInfo = uploads.archive;\n const manifestInfo = uploads.manifest;\n if (!archiveInfo || !manifestInfo) {\n logError("Missing archive or manifest upload info");\n return null;\n }\n const tempDir = fs3.mkdtempSync(`/tmp/direct-upload-${storageName}-`);\n try {\n logInfo("Creating archive...");\n const archiveStart = Date.now();\n const archivePath = path.join(tempDir, "archive.tar.gz");\n if (!createArchive(mountPath, archivePath)) {\n logError("Failed to create archive");\n recordSandboxOp(\n "artifact_archive_create",\n Date.now() - archiveStart,\n false\n );\n return null;\n }\n recordSandboxOp("artifact_archive_create", Date.now() - archiveStart, true);\n logInfo("Creating manifest...");\n const manifestPath = path.join(tempDir, "manifest.json");\n if (!createManifest(files, manifestPath)) {\n logError("Failed to create manifest");\n return null;\n }\n logInfo("Uploading archive to S3...");\n const s3UploadStart = Date.now();\n if (!await httpPutPresigned(\n archiveInfo.presignedUrl,\n archivePath,\n "application/gzip"\n )) {\n logError("Failed to upload archive to S3");\n recordSandboxOp("artifact_s3_upload", Date.now() - s3UploadStart, false);\n return null;\n }\n logInfo("Uploading manifest to S3...");\n if (!await httpPutPresigned(\n manifestInfo.presignedUrl,\n manifestPath,\n "application/json"\n )) {\n logError("Failed to upload manifest to S3");\n recordSandboxOp("artifact_s3_upload", Date.now() - s3UploadStart, false);\n return null;\n }\n recordSandboxOp("artifact_s3_upload", Date.now() - s3UploadStart, true);\n logInfo("Calling commit endpoint...");\n const commitStart = Date.now();\n const commitPayload = {\n storageName,\n storageType,\n versionId,\n files\n };\n if (runId) {\n commitPayload.runId = runId;\n }\n if (message) {\n commitPayload.message = message;\n }\n const commitResponse = await httpPostJson(\n STORAGE_COMMIT_URL,\n commitPayload\n );\n if (!commitResponse) {\n logError("Failed to call commit endpoint");\n recordSandboxOp("artifact_commit_api", Date.now() - commitStart, false);\n return null;\n }\n if (!commitResponse.success) {\n logError(`Commit failed: ${JSON.stringify(commitResponse)}`);\n recordSandboxOp("artifact_commit_api", Date.now() - commitStart, false);\n return null;\n }\n recordSandboxOp("artifact_commit_api", Date.now() - commitStart, true);\n logInfo(`Direct upload snapshot created: ${versionId.slice(0, 8)}`);\n return { versionId };\n } finally {\n try {\n fs3.rmSync(tempDir, { recursive: true, force: true });\n } catch {\n }\n }\n}\n\n// src/sandbox/scripts/src/lib/checkpoint.ts\nfunction findJsonlFiles(dir) {\n const files = [];\n function walk(currentDir) {\n try {\n const items = fs4.readdirSync(currentDir);\n for (const item of items) {\n const fullPath = path2.join(currentDir, item);\n const stat = fs4.statSync(fullPath);\n if (stat.isDirectory()) {\n walk(fullPath);\n } else if (item.endsWith(".jsonl")) {\n files.push(fullPath);\n }\n }\n } catch {\n }\n }\n walk(dir);\n return files;\n}\nfunction findCodexSessionFile(sessionsDir, sessionId) {\n const files = findJsonlFiles(sessionsDir);\n logInfo(`Searching for Codex session ${sessionId} in ${files.length} files`);\n for (const filepath of files) {\n const filename = path2.basename(filepath);\n if (filename.includes(sessionId) || filename.replace(/-/g, "").includes(sessionId.replace(/-/g, ""))) {\n logInfo(`Found Codex session file: ${filepath}`);\n return filepath;\n }\n }\n if (files.length > 0) {\n files.sort((a, b) => {\n const statA = fs4.statSync(a);\n const statB = fs4.statSync(b);\n return statB.mtimeMs - statA.mtimeMs;\n });\n const mostRecent = files[0] ?? null;\n if (mostRecent) {\n logInfo(\n `Session ID not found in filenames, using most recent: ${mostRecent}`\n );\n }\n return mostRecent;\n }\n return null;\n}\nasync function createCheckpoint() {\n const checkpointStart = Date.now();\n logInfo("Creating checkpoint...");\n const sessionIdStart = Date.now();\n if (!fs4.existsSync(SESSION_ID_FILE)) {\n logError("No session ID found, checkpoint creation failed");\n recordSandboxOp(\n "session_id_read",\n Date.now() - sessionIdStart,\n false,\n "Session ID file not found"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n const cliAgentSessionId = fs4.readFileSync(SESSION_ID_FILE, "utf-8").trim();\n recordSandboxOp("session_id_read", Date.now() - sessionIdStart, true);\n const sessionHistoryStart = Date.now();\n if (!fs4.existsSync(SESSION_HISTORY_PATH_FILE)) {\n logError("No session history path found, checkpoint creation failed");\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n false,\n "Session history path file not found"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n const sessionHistoryPathRaw = fs4.readFileSync(SESSION_HISTORY_PATH_FILE, "utf-8").trim();\n let sessionHistoryPath;\n if (sessionHistoryPathRaw.startsWith("CODEX_SEARCH:")) {\n const parts = sessionHistoryPathRaw.split(":");\n if (parts.length !== 3) {\n logError(`Invalid Codex search marker format: ${sessionHistoryPathRaw}`);\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n false,\n "Invalid Codex search marker"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n const sessionsDir = parts[1] ?? "";\n const codexSessionId = parts[2] ?? "";\n logInfo(`Searching for Codex session in ${sessionsDir}`);\n const foundPath = findCodexSessionFile(sessionsDir, codexSessionId);\n if (!foundPath) {\n logError(\n `Could not find Codex session file for ${codexSessionId} in ${sessionsDir}`\n );\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n false,\n "Codex session file not found"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n sessionHistoryPath = foundPath;\n } else {\n sessionHistoryPath = sessionHistoryPathRaw;\n }\n if (!fs4.existsSync(sessionHistoryPath)) {\n logError(\n `Session history file not found at ${sessionHistoryPath}, checkpoint creation failed`\n );\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n false,\n "Session history file not found"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n let cliAgentSessionHistory;\n try {\n cliAgentSessionHistory = fs4.readFileSync(sessionHistoryPath, "utf-8");\n } catch (error) {\n logError(`Failed to read session history: ${error}`);\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n false,\n String(error)\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n if (!cliAgentSessionHistory.trim()) {\n logError("Session history is empty, checkpoint creation failed");\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n false,\n "Session history empty"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n const lineCount = cliAgentSessionHistory.trim().split("\\n").length;\n logInfo(`Session history loaded (${lineCount} lines)`);\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n true\n );\n let artifactSnapshot = null;\n if (ARTIFACT_DRIVER && ARTIFACT_VOLUME_NAME) {\n logInfo(`Processing artifact with driver: ${ARTIFACT_DRIVER}`);\n if (ARTIFACT_DRIVER !== "vas") {\n logError(\n `Unknown artifact driver: ${ARTIFACT_DRIVER} (only \'vas\' is supported)`\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n logInfo(\n `Creating VAS snapshot for artifact \'${ARTIFACT_VOLUME_NAME}\' at ${ARTIFACT_MOUNT_PATH}`\n );\n logInfo("Using direct S3 upload...");\n const snapshot = await createDirectUploadSnapshot(\n ARTIFACT_MOUNT_PATH,\n ARTIFACT_VOLUME_NAME,\n "artifact",\n RUN_ID,\n `Checkpoint from run ${RUN_ID}`\n );\n if (!snapshot) {\n logError("Failed to create VAS snapshot for artifact");\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n const artifactVersion = snapshot.versionId;\n if (!artifactVersion) {\n logError("Failed to extract versionId from snapshot");\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n artifactSnapshot = {\n artifactName: ARTIFACT_VOLUME_NAME,\n artifactVersion\n };\n logInfo(\n `VAS artifact snapshot created: ${ARTIFACT_VOLUME_NAME}@${artifactVersion}`\n );\n } else {\n logInfo(\n "No artifact configured, creating checkpoint without artifact snapshot"\n );\n }\n logInfo("Calling checkpoint API...");\n const checkpointPayload = {\n runId: RUN_ID,\n cliAgentType: CLI_AGENT_TYPE,\n cliAgentSessionId,\n cliAgentSessionHistory\n };\n if (artifactSnapshot) {\n checkpointPayload.artifactSnapshot = artifactSnapshot;\n }\n const apiCallStart = Date.now();\n const result = await httpPostJson(\n CHECKPOINT_URL,\n checkpointPayload\n );\n if (result && result.checkpointId) {\n const checkpointId = result.checkpointId;\n logInfo(`Checkpoint created successfully: ${checkpointId}`);\n recordSandboxOp("checkpoint_api_call", Date.now() - apiCallStart, true);\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, true);\n return true;\n } else {\n logError(\n `Checkpoint API returned invalid response: ${JSON.stringify(result)}`\n );\n recordSandboxOp(\n "checkpoint_api_call",\n Date.now() - apiCallStart,\n false,\n "Invalid API response"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n}\n\n// src/sandbox/scripts/src/lib/metrics.ts\nimport * as fs5 from "fs";\nimport { execSync as execSync3 } from "child_process";\nvar shutdownRequested = false;\nfunction getCpuPercent() {\n try {\n const content = fs5.readFileSync("/proc/stat", "utf-8");\n const line = content.split("\\n")[0];\n if (!line) {\n return 0;\n }\n const parts = line.split(/\\s+/);\n if (parts[0] !== "cpu") {\n return 0;\n }\n const values = parts.slice(1).map((x) => parseInt(x, 10));\n const idleVal = values[3];\n const iowaitVal = values[4];\n if (idleVal === void 0 || iowaitVal === void 0) {\n return 0;\n }\n const idle = idleVal + iowaitVal;\n const total = values.reduce((a, b) => a + b, 0);\n if (total === 0) {\n return 0;\n }\n const cpuPercent = 100 * (1 - idle / total);\n return Math.round(cpuPercent * 100) / 100;\n } catch (error) {\n logDebug(`Failed to get CPU percent: ${error}`);\n return 0;\n }\n}\nfunction getMemoryInfo() {\n try {\n const result = execSync3("free -b", {\n encoding: "utf-8",\n timeout: 5e3,\n stdio: ["pipe", "pipe", "pipe"]\n });\n const lines = result.trim().split("\\n");\n for (const line of lines) {\n if (line.startsWith("Mem:")) {\n const parts = line.split(/\\s+/);\n const totalStr = parts[1];\n const usedStr = parts[2];\n if (!totalStr || !usedStr) {\n return [0, 0];\n }\n const total = parseInt(totalStr, 10);\n const used = parseInt(usedStr, 10);\n return [used, total];\n }\n }\n return [0, 0];\n } catch (error) {\n logDebug(`Failed to get memory info: ${error}`);\n return [0, 0];\n }\n}\nfunction getDiskInfo() {\n try {\n const result = execSync3("df -B1 /", {\n encoding: "utf-8",\n timeout: 5e3,\n stdio: ["pipe", "pipe", "pipe"]\n });\n const lines = result.trim().split("\\n");\n if (lines.length < 2) {\n return [0, 0];\n }\n const dataLine = lines[1];\n if (!dataLine) {\n return [0, 0];\n }\n const parts = dataLine.split(/\\s+/);\n const totalStr = parts[1];\n const usedStr = parts[2];\n if (!totalStr || !usedStr) {\n return [0, 0];\n }\n const total = parseInt(totalStr, 10);\n const used = parseInt(usedStr, 10);\n return [used, total];\n } catch (error) {\n logDebug(`Failed to get disk info: ${error}`);\n return [0, 0];\n }\n}\nfunction collectMetrics() {\n const cpu = getCpuPercent();\n const [memUsed, memTotal] = getMemoryInfo();\n const [diskUsed, diskTotal] = getDiskInfo();\n return {\n ts: (/* @__PURE__ */ new Date()).toISOString(),\n cpu,\n mem_used: memUsed,\n mem_total: memTotal,\n disk_used: diskUsed,\n disk_total: diskTotal\n };\n}\nfunction metricsCollectorLoop() {\n logInfo(`Metrics collector started, writing to ${METRICS_LOG_FILE}`);\n const writeMetrics = () => {\n if (shutdownRequested) {\n logInfo("Metrics collector stopped");\n return;\n }\n try {\n const metrics = collectMetrics();\n fs5.appendFileSync(METRICS_LOG_FILE, JSON.stringify(metrics) + "\\n");\n logDebug(\n `Metrics collected: cpu=${metrics.cpu}%, mem=${metrics.mem_used}/${metrics.mem_total}`\n );\n } catch (error) {\n logError(`Failed to collect/write metrics: ${error}`);\n }\n setTimeout(writeMetrics, METRICS_INTERVAL * 1e3);\n };\n writeMetrics();\n}\nfunction startMetricsCollector() {\n shutdownRequested = false;\n setTimeout(metricsCollectorLoop, 0);\n}\nfunction stopMetricsCollector() {\n shutdownRequested = true;\n}\n\n// src/sandbox/scripts/src/lib/upload-telemetry.ts\nimport * as fs6 from "fs";\nvar shutdownRequested2 = false;\nfunction readFileFromPosition(filePath, posFile) {\n let lastPos = 0;\n if (fs6.existsSync(posFile)) {\n try {\n const content = fs6.readFileSync(posFile, "utf-8").trim();\n lastPos = parseInt(content, 10) || 0;\n } catch {\n lastPos = 0;\n }\n }\n let newContent = "";\n let newPos = lastPos;\n if (fs6.existsSync(filePath)) {\n try {\n const fd = fs6.openSync(filePath, "r");\n const stats = fs6.fstatSync(fd);\n const bufferSize = stats.size - lastPos;\n if (bufferSize > 0) {\n const buffer = Buffer.alloc(bufferSize);\n fs6.readSync(fd, buffer, 0, bufferSize, lastPos);\n newContent = buffer.toString("utf-8");\n newPos = stats.size;\n }\n fs6.closeSync(fd);\n } catch (error) {\n logDebug(`Failed to read ${filePath}: ${error}`);\n }\n }\n return [newContent, newPos];\n}\nfunction savePosition(posFile, position) {\n try {\n fs6.writeFileSync(posFile, String(position));\n } catch (error) {\n logDebug(`Failed to save position to ${posFile}: ${error}`);\n }\n}\nfunction readJsonlFromPosition(filePath, posFile) {\n const [content, newPos] = readFileFromPosition(filePath, posFile);\n const entries = [];\n if (content) {\n for (const line of content.trim().split("\\n")) {\n if (line) {\n try {\n entries.push(JSON.parse(line));\n } catch {\n }\n }\n }\n }\n return [entries, newPos];\n}\nfunction readMetricsFromPosition(posFile) {\n return readJsonlFromPosition(METRICS_LOG_FILE, posFile);\n}\nfunction readNetworkLogsFromPosition(posFile) {\n return readJsonlFromPosition(NETWORK_LOG_FILE, posFile);\n}\nfunction readSandboxOpsFromPosition(posFile) {\n return readJsonlFromPosition(SANDBOX_OPS_LOG_FILE, posFile);\n}\nasync function uploadTelemetry() {\n const [systemLog, logPos] = readFileFromPosition(\n SYSTEM_LOG_FILE,\n TELEMETRY_LOG_POS_FILE\n );\n const [metrics, metricsPos] = readMetricsFromPosition(\n TELEMETRY_METRICS_POS_FILE\n );\n const [networkLogs, networkPos] = readNetworkLogsFromPosition(\n TELEMETRY_NETWORK_POS_FILE\n );\n const [sandboxOps, sandboxOpsPos] = readSandboxOpsFromPosition(\n TELEMETRY_SANDBOX_OPS_POS_FILE\n );\n if (!systemLog && metrics.length === 0 && networkLogs.length === 0 && sandboxOps.length === 0) {\n logDebug("No new telemetry data to upload");\n return true;\n }\n const maskedSystemLog = systemLog ? maskData(systemLog) : "";\n const maskedNetworkLogs = networkLogs.length > 0 ? maskData(networkLogs) : [];\n const payload = {\n runId: RUN_ID,\n systemLog: maskedSystemLog,\n metrics,\n // Metrics don\'t contain secrets (just numbers)\n networkLogs: maskedNetworkLogs,\n sandboxOperations: sandboxOps\n // Sandbox ops don\'t contain secrets (just timing data)\n };\n logDebug(\n `Uploading telemetry: ${systemLog.length} bytes log, ${metrics.length} metrics, ${networkLogs.length} network logs, ${sandboxOps.length} sandbox ops`\n );\n const result = await httpPostJson(TELEMETRY_URL, payload, 1);\n if (result) {\n savePosition(TELEMETRY_LOG_POS_FILE, logPos);\n savePosition(TELEMETRY_METRICS_POS_FILE, metricsPos);\n savePosition(TELEMETRY_NETWORK_POS_FILE, networkPos);\n savePosition(TELEMETRY_SANDBOX_OPS_POS_FILE, sandboxOpsPos);\n logDebug(\n `Telemetry uploaded successfully: ${result.id ?? "unknown"}`\n );\n return true;\n } else {\n logWarn("Failed to upload telemetry (will retry next interval)");\n return false;\n }\n}\nasync function telemetryUploadLoop() {\n logInfo(`Telemetry upload started (interval: ${TELEMETRY_INTERVAL}s)`);\n const runUpload = async () => {\n if (shutdownRequested2) {\n logInfo("Telemetry upload stopped");\n return;\n }\n try {\n await uploadTelemetry();\n } catch (error) {\n logError(`Telemetry upload error: ${error}`);\n }\n setTimeout(() => void runUpload(), TELEMETRY_INTERVAL * 1e3);\n };\n await runUpload();\n}\nfunction startTelemetryUpload() {\n shutdownRequested2 = false;\n setTimeout(() => void telemetryUploadLoop(), 0);\n}\nfunction stopTelemetryUpload() {\n shutdownRequested2 = true;\n}\nasync function finalTelemetryUpload() {\n logInfo("Performing final telemetry upload...");\n return uploadTelemetry();\n}\n\n// src/sandbox/scripts/src/run-agent.ts\nvar shutdownRequested3 = false;\nfunction heartbeatLoop() {\n const sendHeartbeat = async () => {\n if (shutdownRequested3) {\n return;\n }\n try {\n if (await httpPostJson(HEARTBEAT_URL, { runId: RUN_ID })) {\n logInfo("Heartbeat sent");\n } else {\n logWarn("Heartbeat failed");\n }\n } catch (error) {\n logWarn(`Heartbeat error: ${error}`);\n }\n setTimeout(() => {\n sendHeartbeat().catch(() => {\n });\n }, HEARTBEAT_INTERVAL * 1e3);\n };\n sendHeartbeat().catch(() => {\n });\n}\nasync function cleanup(exitCode, errorMessage) {\n logInfo("\\u25B7 Cleanup");\n const telemetryStart = Date.now();\n let telemetrySuccess = true;\n try {\n await finalTelemetryUpload();\n } catch (error) {\n telemetrySuccess = false;\n logError(`Final telemetry upload failed: ${error}`);\n }\n recordSandboxOp(\n "final_telemetry_upload",\n Date.now() - telemetryStart,\n telemetrySuccess\n );\n logInfo(`Calling complete API with exitCode=${exitCode}`);\n const completePayload = {\n runId: RUN_ID,\n exitCode\n };\n if (errorMessage) {\n completePayload.error = errorMessage;\n }\n const completeStart = Date.now();\n let completeSuccess = false;\n try {\n if (await httpPostJson(COMPLETE_URL, completePayload)) {\n logInfo("Complete API called successfully");\n completeSuccess = true;\n } else {\n logError("Failed to call complete API (sandbox may not be cleaned up)");\n }\n } catch (error) {\n logError(`Complete API call failed: ${error}`);\n }\n recordSandboxOp(\n "complete_api_call",\n Date.now() - completeStart,\n completeSuccess\n );\n shutdownRequested3 = true;\n stopMetricsCollector();\n stopTelemetryUpload();\n logInfo("Background processes stopped");\n if (exitCode === 0) {\n logInfo("\\u2713 Sandbox finished successfully");\n } else {\n logInfo(`\\u2717 Sandbox failed (exit code ${exitCode})`);\n }\n}\nasync function run() {\n validateConfig();\n logInfo(`\\u25B6 VM0 Sandbox ${RUN_ID}`);\n logInfo("\\u25B7 Initialization");\n const initStartTime = Date.now();\n logInfo(`Working directory: ${WORKING_DIR}`);\n const heartbeatStart = Date.now();\n heartbeatLoop();\n logInfo("Heartbeat started");\n recordSandboxOp("heartbeat_start", Date.now() - heartbeatStart, true);\n const metricsStart = Date.now();\n startMetricsCollector();\n logInfo("Metrics collector started");\n recordSandboxOp("metrics_collector_start", Date.now() - metricsStart, true);\n const telemetryStart = Date.now();\n startTelemetryUpload();\n logInfo("Telemetry upload started");\n recordSandboxOp("telemetry_upload_start", Date.now() - telemetryStart, true);\n const workingDirStart = Date.now();\n try {\n fs7.mkdirSync(WORKING_DIR, { recursive: true });\n process.chdir(WORKING_DIR);\n } catch (error) {\n recordSandboxOp(\n "working_dir_setup",\n Date.now() - workingDirStart,\n false,\n String(error)\n );\n throw new Error(\n `Failed to create/change to working directory: ${WORKING_DIR} - ${error}`\n );\n }\n recordSandboxOp("working_dir_setup", Date.now() - workingDirStart, true);\n if (CLI_AGENT_TYPE === "codex") {\n const homeDir = process.env.HOME ?? "/home/user";\n const codexHome = `${homeDir}/.codex`;\n fs7.mkdirSync(codexHome, { recursive: true });\n process.env.CODEX_HOME = codexHome;\n logInfo(`Codex home directory: ${codexHome}`);\n const codexLoginStart = Date.now();\n let codexLoginSuccess = false;\n const apiKey = process.env.OPENAI_API_KEY ?? "";\n if (apiKey) {\n try {\n execSync4("codex login --with-api-key", {\n input: apiKey,\n encoding: "utf-8",\n stdio: ["pipe", "pipe", "pipe"]\n });\n logInfo("Codex authenticated with API key");\n codexLoginSuccess = true;\n } catch (error) {\n logError(`Codex login failed: ${error}`);\n }\n } else {\n logError("OPENAI_API_KEY not set");\n }\n recordSandboxOp(\n "codex_login",\n Date.now() - codexLoginStart,\n codexLoginSuccess\n );\n }\n const initDurationMs = Date.now() - initStartTime;\n recordSandboxOp("init_total", initDurationMs, true);\n logInfo(`\\u2713 Initialization complete (${Math.floor(initDurationMs / 1e3)}s)`);\n logInfo("\\u25B7 Execution");\n const execStartTime = Date.now();\n logInfo(`Starting ${CLI_AGENT_TYPE} execution...`);\n logInfo(`Prompt: ${PROMPT}`);\n const useMock = process.env.USE_MOCK_CLAUDE === "true";\n let cmd;\n if (CLI_AGENT_TYPE === "codex") {\n if (useMock) {\n throw new Error("Mock mode not supported for Codex");\n }\n const codexArgs = [\n "exec",\n "--json",\n "--dangerously-bypass-approvals-and-sandbox",\n "--skip-git-repo-check",\n "-C",\n WORKING_DIR\n ];\n if (OPENAI_MODEL) {\n codexArgs.push("-m", OPENAI_MODEL);\n }\n if (RESUME_SESSION_ID) {\n logInfo(`Resuming session: ${RESUME_SESSION_ID}`);\n codexArgs.push("resume", RESUME_SESSION_ID, PROMPT);\n } else {\n logInfo("Starting new session");\n codexArgs.push(PROMPT);\n }\n cmd = ["codex", ...codexArgs];\n } else {\n const claudeArgs = [\n "--print",\n "--verbose",\n "--output-format",\n "stream-json",\n "--dangerously-skip-permissions"\n ];\n if (RESUME_SESSION_ID) {\n logInfo(`Resuming session: ${RESUME_SESSION_ID}`);\n claudeArgs.push("--resume", RESUME_SESSION_ID);\n } else {\n logInfo("Starting new session");\n }\n const claudeBin = useMock ? "/usr/local/bin/vm0-agent/mock-claude.mjs" : "claude";\n if (useMock) {\n logInfo("Using mock-claude for testing");\n }\n cmd = [claudeBin, ...claudeArgs, PROMPT];\n }\n let agentExitCode = 0;\n const stderrLines = [];\n let logFile = null;\n try {\n logFile = fs7.createWriteStream(AGENT_LOG_FILE);\n const cmdExe = cmd[0];\n if (!cmdExe) {\n throw new Error("Empty command");\n }\n const proc = spawn(cmdExe, cmd.slice(1), {\n stdio: ["ignore", "pipe", "pipe"]\n });\n const exitPromise = new Promise((resolve) => {\n let resolved = false;\n proc.on("error", (err) => {\n if (!resolved) {\n resolved = true;\n logError(`Failed to spawn ${CLI_AGENT_TYPE}: ${err.message}`);\n stderrLines.push(`Spawn error: ${err.message}`);\n resolve(1);\n }\n });\n proc.on("close", (code) => {\n if (!resolved) {\n resolved = true;\n resolve(code ?? 1);\n }\n });\n });\n if (proc.stderr) {\n const stderrRl = readline.createInterface({ input: proc.stderr });\n stderrRl.on("line", (line) => {\n stderrLines.push(line);\n if (logFile && !logFile.destroyed) {\n logFile.write(`[STDERR] ${line}\n`);\n }\n });\n }\n if (proc.stdout) {\n const stdoutRl = readline.createInterface({ input: proc.stdout });\n let eventSequence = 0;\n for await (const line of stdoutRl) {\n if (logFile && !logFile.destroyed) {\n logFile.write(line + "\\n");\n }\n const stripped = line.trim();\n if (!stripped) {\n continue;\n }\n try {\n const event = JSON.parse(stripped);\n await sendEvent(event, eventSequence);\n eventSequence++;\n if (event.type === "result") {\n const resultContent = event.result;\n if (resultContent) {\n console.log(resultContent);\n }\n }\n } catch {\n logDebug(`Non-JSON line from agent: ${stripped.slice(0, 100)}`);\n }\n }\n }\n agentExitCode = await exitPromise;\n } catch (error) {\n logError(`Failed to execute ${CLI_AGENT_TYPE}: ${error}`);\n agentExitCode = 1;\n } finally {\n if (logFile && !logFile.destroyed) {\n logFile.end();\n }\n }\n console.log();\n let finalExitCode = agentExitCode;\n let errorMessage = "";\n if (fs7.existsSync(EVENT_ERROR_FLAG)) {\n logError("Some events failed to send, marking run as failed");\n finalExitCode = 1;\n errorMessage = "Some events failed to send";\n }\n const execDurationMs = Date.now() - execStartTime;\n recordSandboxOp("cli_execution", execDurationMs, agentExitCode === 0);\n if (agentExitCode === 0 && finalExitCode === 0) {\n logInfo(`\\u2713 Execution complete (${Math.floor(execDurationMs / 1e3)}s)`);\n } else {\n logInfo(`\\u2717 Execution failed (${Math.floor(execDurationMs / 1e3)}s)`);\n }\n if (agentExitCode === 0 && finalExitCode === 0) {\n logInfo(`${CLI_AGENT_TYPE} completed successfully`);\n logInfo("\\u25B7 Checkpoint");\n const checkpointStartTime = Date.now();\n const checkpointSuccess = await createCheckpoint();\n const checkpointDuration = Math.floor(\n (Date.now() - checkpointStartTime) / 1e3\n );\n if (checkpointSuccess) {\n logInfo(`\\u2713 Checkpoint complete (${checkpointDuration}s)`);\n } else {\n logInfo(`\\u2717 Checkpoint failed (${checkpointDuration}s)`);\n }\n if (!checkpointSuccess) {\n logError("Checkpoint creation failed, marking run as failed");\n finalExitCode = 1;\n errorMessage = "Checkpoint creation failed";\n }\n } else {\n if (agentExitCode !== 0) {\n logInfo(`${CLI_AGENT_TYPE} failed with exit code ${agentExitCode}`);\n if (stderrLines.length > 0) {\n errorMessage = stderrLines.map((line) => line.trim()).join(" ");\n logInfo(`Captured stderr: ${errorMessage}`);\n } else {\n errorMessage = `Agent exited with code ${agentExitCode}`;\n }\n }\n }\n return [finalExitCode, errorMessage];\n}\nasync function main() {\n let exitCode = 1;\n let errorMessage = "Unexpected termination";\n try {\n [exitCode, errorMessage] = await run();\n } catch (error) {\n if (error instanceof Error) {\n exitCode = 1;\n errorMessage = error.message;\n logError(`Error: ${errorMessage}`);\n } else {\n exitCode = 1;\n errorMessage = `Unexpected error: ${error}`;\n logError(errorMessage);\n }\n } finally {\n await cleanup(exitCode, errorMessage);\n }\n return exitCode;\n}\nmain().then((code) => process.exit(code)).catch((error) => {\n console.error("Fatal error:", error);\n process.exit(1);\n});\n';
|
|
7766
7887
|
var DOWNLOAD_SCRIPT = '#!/usr/bin/env node\n\n// src/sandbox/scripts/src/download.ts\nimport * as fs2 from "fs";\nimport * as path from "path";\nimport * as os from "os";\nimport { execSync as execSync2 } from "child_process";\n\n// src/sandbox/scripts/src/lib/common.ts\nimport * as fs from "fs";\nvar RUN_ID = process.env.VM0_RUN_ID ?? "";\nvar API_URL = process.env.VM0_API_URL ?? "";\nvar API_TOKEN = process.env.VM0_API_TOKEN ?? "";\nvar PROMPT = process.env.VM0_PROMPT ?? "";\nvar VERCEL_BYPASS = process.env.VERCEL_PROTECTION_BYPASS ?? "";\nvar RESUME_SESSION_ID = process.env.VM0_RESUME_SESSION_ID ?? "";\nvar CLI_AGENT_TYPE = process.env.CLI_AGENT_TYPE ?? "claude-code";\nvar OPENAI_MODEL = process.env.OPENAI_MODEL ?? "";\nvar WORKING_DIR = process.env.VM0_WORKING_DIR ?? "";\nvar ARTIFACT_DRIVER = process.env.VM0_ARTIFACT_DRIVER ?? "";\nvar ARTIFACT_MOUNT_PATH = process.env.VM0_ARTIFACT_MOUNT_PATH ?? "";\nvar ARTIFACT_VOLUME_NAME = process.env.VM0_ARTIFACT_VOLUME_NAME ?? "";\nvar ARTIFACT_VERSION_ID = process.env.VM0_ARTIFACT_VERSION_ID ?? "";\nvar WEBHOOK_URL = `${API_URL}/api/webhooks/agent/events`;\nvar CHECKPOINT_URL = `${API_URL}/api/webhooks/agent/checkpoints`;\nvar COMPLETE_URL = `${API_URL}/api/webhooks/agent/complete`;\nvar HEARTBEAT_URL = `${API_URL}/api/webhooks/agent/heartbeat`;\nvar TELEMETRY_URL = `${API_URL}/api/webhooks/agent/telemetry`;\nvar PROXY_URL = `${API_URL}/api/webhooks/agent/proxy`;\nvar STORAGE_PREPARE_URL = `${API_URL}/api/webhooks/agent/storages/prepare`;\nvar STORAGE_COMMIT_URL = `${API_URL}/api/webhooks/agent/storages/commit`;\nvar HTTP_MAX_TIME_UPLOAD = 60;\nvar HTTP_MAX_RETRIES = 3;\nvar SESSION_ID_FILE = `/tmp/vm0-session-${RUN_ID}.txt`;\nvar SESSION_HISTORY_PATH_FILE = `/tmp/vm0-session-history-${RUN_ID}.txt`;\nvar EVENT_ERROR_FLAG = `/tmp/vm0-event-error-${RUN_ID}`;\nvar SYSTEM_LOG_FILE = `/tmp/vm0-main-${RUN_ID}.log`;\nvar AGENT_LOG_FILE = `/tmp/vm0-agent-${RUN_ID}.log`;\nvar METRICS_LOG_FILE = `/tmp/vm0-metrics-${RUN_ID}.jsonl`;\nvar NETWORK_LOG_FILE = `/tmp/vm0-network-${RUN_ID}.jsonl`;\nvar TELEMETRY_LOG_POS_FILE = `/tmp/vm0-telemetry-log-pos-${RUN_ID}.txt`;\nvar TELEMETRY_METRICS_POS_FILE = `/tmp/vm0-telemetry-metrics-pos-${RUN_ID}.txt`;\nvar TELEMETRY_NETWORK_POS_FILE = `/tmp/vm0-telemetry-network-pos-${RUN_ID}.txt`;\nvar TELEMETRY_SANDBOX_OPS_POS_FILE = `/tmp/vm0-telemetry-sandbox-ops-pos-${RUN_ID}.txt`;\nvar SANDBOX_OPS_LOG_FILE = `/tmp/vm0-sandbox-ops-${RUN_ID}.jsonl`;\nfunction recordSandboxOp(actionType, durationMs, success, error) {\n const entry = {\n ts: (/* @__PURE__ */ new Date()).toISOString(),\n action_type: actionType,\n duration_ms: durationMs,\n success\n };\n if (error) {\n entry.error = error;\n }\n fs.appendFileSync(SANDBOX_OPS_LOG_FILE, JSON.stringify(entry) + "\\n");\n}\n\n// src/sandbox/scripts/src/lib/log.ts\nvar SCRIPT_NAME = process.env.LOG_SCRIPT_NAME ?? "run-agent";\nvar DEBUG_MODE = process.env.VM0_DEBUG === "1";\nfunction timestamp() {\n return (/* @__PURE__ */ new Date()).toISOString().replace(/\\.\\d{3}Z$/, "Z");\n}\nfunction logInfo(msg) {\n console.error(`[${timestamp()}] [INFO] [sandbox:${SCRIPT_NAME}] ${msg}`);\n}\nfunction logWarn(msg) {\n console.error(`[${timestamp()}] [WARN] [sandbox:${SCRIPT_NAME}] ${msg}`);\n}\nfunction logError(msg) {\n console.error(`[${timestamp()}] [ERROR] [sandbox:${SCRIPT_NAME}] ${msg}`);\n}\nfunction logDebug(msg) {\n if (DEBUG_MODE) {\n console.error(`[${timestamp()}] [DEBUG] [sandbox:${SCRIPT_NAME}] ${msg}`);\n }\n}\n\n// src/sandbox/scripts/src/lib/http-client.ts\nimport { execSync } from "child_process";\nfunction sleep(ms) {\n return new Promise((resolve) => setTimeout(resolve, ms));\n}\nasync function httpDownload(url, destPath, maxRetries = HTTP_MAX_RETRIES) {\n for (let attempt = 1; attempt <= maxRetries; attempt++) {\n logDebug(`HTTP download attempt ${attempt}/${maxRetries} from ${url}`);\n try {\n const curlCmd = ["curl", "-fsSL", "-o", destPath, `"${url}"`].join(" ");\n execSync(curlCmd, {\n timeout: HTTP_MAX_TIME_UPLOAD * 1e3,\n stdio: ["pipe", "pipe", "pipe"]\n });\n return true;\n } catch (error) {\n const errorMsg = error instanceof Error ? error.message : String(error);\n if (errorMsg.includes("ETIMEDOUT") || errorMsg.includes("timeout")) {\n logWarn(\n `HTTP download failed (attempt ${attempt}/${maxRetries}): Timeout`\n );\n } else {\n logWarn(\n `HTTP download failed (attempt ${attempt}/${maxRetries}): ${errorMsg}`\n );\n }\n if (attempt < maxRetries) {\n await sleep(1e3);\n }\n }\n }\n logError(`HTTP download failed after ${maxRetries} attempts from ${url}`);\n return false;\n}\n\n// src/sandbox/scripts/src/download.ts\nasync function downloadStorage(mountPath, archiveUrl) {\n logInfo(`Downloading storage to ${mountPath}`);\n const tempTar = path.join(\n os.tmpdir(),\n `storage-${Date.now()}-${Math.random().toString(36).slice(2)}.tar.gz`\n );\n try {\n if (!await httpDownload(archiveUrl, tempTar)) {\n logError(`Failed to download archive for ${mountPath}`);\n return false;\n }\n fs2.mkdirSync(mountPath, { recursive: true });\n try {\n execSync2(`tar -xzf "${tempTar}" -C "${mountPath}"`, {\n stdio: ["pipe", "pipe", "pipe"]\n });\n } catch {\n logInfo(`Archive appears empty for ${mountPath}`);\n }\n logInfo(`Successfully extracted to ${mountPath}`);\n return true;\n } finally {\n try {\n fs2.unlinkSync(tempTar);\n } catch {\n }\n }\n}\nasync function main() {\n const args = process.argv.slice(2);\n if (args.length < 1) {\n logError("Usage: node download.mjs <manifest_path>");\n process.exit(1);\n }\n const manifestPath = args[0] ?? "";\n if (!manifestPath || !fs2.existsSync(manifestPath)) {\n logError(`Manifest file not found: ${manifestPath}`);\n process.exit(1);\n }\n logInfo(`Starting storage download from manifest: ${manifestPath}`);\n let manifest;\n try {\n const content = fs2.readFileSync(manifestPath, "utf-8");\n manifest = JSON.parse(content);\n } catch (error) {\n logError(`Failed to load manifest: ${error}`);\n process.exit(1);\n }\n const storages = manifest.storages ?? [];\n const artifact = manifest.artifact;\n const storageCount = storages.length;\n const hasArtifact = artifact !== void 0;\n logInfo(`Found ${storageCount} storages, artifact: ${hasArtifact}`);\n const downloadTotalStart = Date.now();\n let downloadSuccess = true;\n for (const storage of storages) {\n const mountPath = storage.mountPath;\n const archiveUrl = storage.archiveUrl;\n if (archiveUrl && archiveUrl !== "null") {\n const storageStart = Date.now();\n const success = await downloadStorage(mountPath, archiveUrl);\n recordSandboxOp("storage_download", Date.now() - storageStart, success);\n if (!success) {\n downloadSuccess = false;\n }\n }\n }\n if (artifact) {\n const artifactMount = artifact.mountPath;\n const artifactUrl = artifact.archiveUrl;\n if (artifactUrl && artifactUrl !== "null") {\n const artifactStart = Date.now();\n const success = await downloadStorage(artifactMount, artifactUrl);\n recordSandboxOp("artifact_download", Date.now() - artifactStart, success);\n if (!success) {\n downloadSuccess = false;\n }\n }\n }\n recordSandboxOp(\n "download_total",\n Date.now() - downloadTotalStart,\n downloadSuccess\n );\n logInfo("All storages downloaded successfully");\n}\nmain().catch((error) => {\n logError(`Fatal error: ${error}`);\n process.exit(1);\n});\n';
|
|
7767
7888
|
var MOCK_CLAUDE_SCRIPT = '#!/usr/bin/env node\n\n// src/sandbox/scripts/src/mock-claude.ts\nimport * as fs from "fs";\nimport * as path from "path";\nimport { execSync } from "child_process";\nfunction parseArgs(args) {\n const result = {\n outputFormat: "text",\n print: false,\n verbose: false,\n dangerouslySkipPermissions: false,\n resume: null,\n prompt: ""\n };\n const remaining = [];\n let i = 0;\n while (i < args.length) {\n const arg = args[i];\n if (arg === "--output-format" && i + 1 < args.length) {\n result.outputFormat = args[i + 1] ?? "text";\n i += 2;\n } else if (arg === "--print") {\n result.print = true;\n i++;\n } else if (arg === "--verbose") {\n result.verbose = true;\n i++;\n } else if (arg === "--dangerously-skip-permissions") {\n result.dangerouslySkipPermissions = true;\n i++;\n } else if (arg === "--resume" && i + 1 < args.length) {\n result.resume = args[i + 1] ?? null;\n i += 2;\n } else if (arg) {\n remaining.push(arg);\n i++;\n } else {\n i++;\n }\n }\n if (remaining.length > 0) {\n result.prompt = remaining[0] ?? "";\n }\n return result;\n}\nfunction createSessionHistory(sessionId, cwd) {\n const projectName = cwd.replace(/^\\//, "").replace(/\\//g, "-");\n const homeDir = process.env.HOME ?? "/home/user";\n const sessionDir = `${homeDir}/.claude/projects/-${projectName}`;\n fs.mkdirSync(sessionDir, { recursive: true });\n return path.join(sessionDir, `${sessionId}.jsonl`);\n}\nfunction main() {\n const sessionId = `mock-${Date.now() * 1e3 + Math.floor(Math.random() * 1e3)}`;\n const args = parseArgs(process.argv.slice(2));\n const prompt = args.prompt;\n const outputFormat = args.outputFormat;\n if (prompt.startsWith("@fail:")) {\n const errorMsg = prompt.slice(6);\n console.error(errorMsg);\n process.exit(1);\n }\n const cwd = process.cwd();\n if (outputFormat === "stream-json") {\n const sessionHistoryFile = createSessionHistory(sessionId, cwd);\n const events = [];\n const initEvent = {\n type: "system",\n subtype: "init",\n cwd,\n session_id: sessionId,\n tools: ["Bash"],\n model: "mock-claude"\n };\n console.log(JSON.stringify(initEvent));\n events.push(initEvent);\n const textEvent = {\n type: "assistant",\n message: {\n role: "assistant",\n content: [{ type: "text", text: "Executing command..." }]\n },\n session_id: sessionId\n };\n console.log(JSON.stringify(textEvent));\n events.push(textEvent);\n const toolUseEvent = {\n type: "assistant",\n message: {\n role: "assistant",\n content: [\n {\n type: "tool_use",\n id: "toolu_mock_001",\n name: "Bash",\n input: { command: prompt }\n }\n ]\n },\n session_id: sessionId\n };\n console.log(JSON.stringify(toolUseEvent));\n events.push(toolUseEvent);\n let output;\n let exitCode;\n try {\n output = execSync(`bash -c ${JSON.stringify(prompt)}`, {\n encoding: "utf-8",\n stdio: ["pipe", "pipe", "pipe"]\n });\n exitCode = 0;\n } catch (error) {\n const execError = error;\n output = (execError.stdout ?? "") + (execError.stderr ?? "");\n exitCode = execError.status ?? 1;\n }\n const isError = exitCode !== 0;\n const toolResultEvent = {\n type: "user",\n message: {\n role: "user",\n content: [\n {\n type: "tool_result",\n tool_use_id: "toolu_mock_001",\n content: output,\n is_error: isError\n }\n ]\n },\n session_id: sessionId\n };\n console.log(JSON.stringify(toolResultEvent));\n events.push(toolResultEvent);\n const resultEvent = {\n type: "result",\n subtype: exitCode === 0 ? "success" : "error",\n is_error: exitCode !== 0,\n duration_ms: 100,\n num_turns: 1,\n result: output,\n session_id: sessionId,\n total_cost_usd: 0,\n usage: { input_tokens: 0, output_tokens: 0 }\n };\n console.log(JSON.stringify(resultEvent));\n events.push(resultEvent);\n const historyContent = events.map((e) => JSON.stringify(e)).join("\\n") + "\\n";\n fs.writeFileSync(sessionHistoryFile, historyContent);\n process.exit(exitCode);\n } else {\n try {\n execSync(`bash -c ${JSON.stringify(prompt)}`, {\n stdio: "inherit"\n });\n process.exit(0);\n } catch (error) {\n const execError = error;\n process.exit(execError.status ?? 1);\n }\n }\n}\nvar isMainModule = process.argv[1]?.endsWith("mock-claude.mjs") || process.argv[1]?.endsWith("mock-claude.ts");\nif (isMainModule) {\n main();\n}\nexport {\n createSessionHistory,\n parseArgs\n};\n';
|
|
7768
7889
|
var ENV_LOADER_SCRIPT = '#!/usr/bin/env node\n\n// src/sandbox/scripts/src/env-loader.ts\nimport * as fs from "fs";\nimport { spawn } from "child_process";\nvar ENV_JSON_PATH = "/tmp/vm0-env.json";\nconsole.log("[env-loader] Starting...");\nif (fs.existsSync(ENV_JSON_PATH)) {\n console.log(`[env-loader] Loading environment from ${ENV_JSON_PATH}`);\n try {\n const content = fs.readFileSync(ENV_JSON_PATH, "utf-8");\n const envData = JSON.parse(content);\n for (const [key, value] of Object.entries(envData)) {\n process.env[key] = value;\n }\n console.log(\n `[env-loader] Loaded ${Object.keys(envData).length} environment variables`\n );\n } catch (error) {\n console.error(`[env-loader] ERROR loading JSON: ${error}`);\n process.exit(1);\n }\n} else {\n console.error(\n `[env-loader] ERROR: Environment file not found: ${ENV_JSON_PATH}`\n );\n process.exit(1);\n}\nvar criticalVars = [\n "VM0_RUN_ID",\n "VM0_API_URL",\n "VM0_WORKING_DIR",\n "VM0_PROMPT"\n];\nfor (const varName of criticalVars) {\n const val = process.env[varName] ?? "";\n if (val) {\n const display = val.length > 50 ? val.substring(0, 50) + "..." : val;\n console.log(`[env-loader] ${varName}=${display}`);\n } else {\n console.log(`[env-loader] WARNING: ${varName} is empty`);\n }\n}\nvar runAgentPath = "/usr/local/bin/vm0-agent/run-agent.mjs";\nconsole.log(`[env-loader] Executing ${runAgentPath}`);\nvar child = spawn("node", [runAgentPath], {\n stdio: "inherit",\n env: process.env\n});\nchild.on("close", (code) => {\n process.exit(code ?? 1);\n});\n';
|
|
@@ -7782,13 +7903,39 @@ var SCRIPT_PATHS = {
|
|
|
7782
7903
|
};
|
|
7783
7904
|
|
|
7784
7905
|
// ../../packages/core/src/feature-switch.ts
|
|
7785
|
-
var PricingSwitch = {
|
|
7786
|
-
key: "pricing" /* Pricing */,
|
|
7787
|
-
maintainer: "ethan@vm0.ai",
|
|
7788
|
-
enabled: false
|
|
7789
|
-
};
|
|
7790
7906
|
var FEATURE_SWITCHES = {
|
|
7791
|
-
["pricing" /* Pricing */]:
|
|
7907
|
+
["pricing" /* Pricing */]: {
|
|
7908
|
+
maintainer: "ethan@vm0.ai",
|
|
7909
|
+
enabled: false
|
|
7910
|
+
},
|
|
7911
|
+
["dummy" /* Dummy */]: {
|
|
7912
|
+
maintainer: "ethan@vm0.ai",
|
|
7913
|
+
enabled: true
|
|
7914
|
+
},
|
|
7915
|
+
["platformOnboarding" /* PlatformOnboarding */]: {
|
|
7916
|
+
maintainer: "ethan@vm0.ai",
|
|
7917
|
+
enabled: false
|
|
7918
|
+
},
|
|
7919
|
+
["platformAgents" /* PlatformAgents */]: {
|
|
7920
|
+
maintainer: "ethan@vm0.ai",
|
|
7921
|
+
enabled: false
|
|
7922
|
+
},
|
|
7923
|
+
["platformSecrets" /* PlatformSecrets */]: {
|
|
7924
|
+
maintainer: "ethan@vm0.ai",
|
|
7925
|
+
enabled: false
|
|
7926
|
+
},
|
|
7927
|
+
["platformArtifacts" /* PlatformArtifacts */]: {
|
|
7928
|
+
maintainer: "ethan@vm0.ai",
|
|
7929
|
+
enabled: false
|
|
7930
|
+
},
|
|
7931
|
+
["platformApiKeys" /* PlatformApiKeys */]: {
|
|
7932
|
+
maintainer: "ethan@vm0.ai",
|
|
7933
|
+
enabled: false
|
|
7934
|
+
},
|
|
7935
|
+
["platformLogs" /* PlatformLogs */]: {
|
|
7936
|
+
maintainer: "ethan@vm0.ai",
|
|
7937
|
+
enabled: false
|
|
7938
|
+
}
|
|
7792
7939
|
};
|
|
7793
7940
|
|
|
7794
7941
|
// src/lib/scripts/index.ts
|
|
@@ -7901,7 +8048,7 @@ function initVMRegistry(registryPath) {
|
|
|
7901
8048
|
// src/lib/proxy/proxy-manager.ts
|
|
7902
8049
|
import { spawn as spawn2 } from "child_process";
|
|
7903
8050
|
import fs6 from "fs";
|
|
7904
|
-
import
|
|
8051
|
+
import path3 from "path";
|
|
7905
8052
|
|
|
7906
8053
|
// src/lib/proxy/mitm-addon-script.ts
|
|
7907
8054
|
var RUNNER_MITM_ADDON_SCRIPT = `#!/usr/bin/env python3
|
|
@@ -8394,7 +8541,7 @@ var ProxyManager = class {
|
|
|
8394
8541
|
process = null;
|
|
8395
8542
|
isRunning = false;
|
|
8396
8543
|
constructor(config) {
|
|
8397
|
-
const addonPath =
|
|
8544
|
+
const addonPath = path3.join(config.caDir, "mitm_addon.py");
|
|
8398
8545
|
this.config = {
|
|
8399
8546
|
...DEFAULT_PROXY_OPTIONS,
|
|
8400
8547
|
...config,
|
|
@@ -8421,7 +8568,7 @@ var ProxyManager = class {
|
|
|
8421
8568
|
* Ensure the addon script exists at the configured path
|
|
8422
8569
|
*/
|
|
8423
8570
|
ensureAddonScript() {
|
|
8424
|
-
const addonDir =
|
|
8571
|
+
const addonDir = path3.dirname(this.config.addonPath);
|
|
8425
8572
|
if (!fs6.existsSync(addonDir)) {
|
|
8426
8573
|
fs6.mkdirSync(addonDir, { recursive: true });
|
|
8427
8574
|
}
|
|
@@ -8439,7 +8586,7 @@ var ProxyManager = class {
|
|
|
8439
8586
|
if (!fs6.existsSync(this.config.caDir)) {
|
|
8440
8587
|
throw new Error(`Proxy CA directory not found: ${this.config.caDir}`);
|
|
8441
8588
|
}
|
|
8442
|
-
const caCertPath =
|
|
8589
|
+
const caCertPath = path3.join(this.config.caDir, "mitmproxy-ca.pem");
|
|
8443
8590
|
if (!fs6.existsSync(caCertPath)) {
|
|
8444
8591
|
throw new Error(`Proxy CA certificate not found: ${caCertPath}`);
|
|
8445
8592
|
}
|
|
@@ -8911,17 +9058,17 @@ function getAllScripts() {
|
|
|
8911
9058
|
}
|
|
8912
9059
|
|
|
8913
9060
|
// src/lib/vm-setup/vm-setup.ts
|
|
8914
|
-
async function uploadScripts(
|
|
9061
|
+
async function uploadScripts(guest) {
|
|
8915
9062
|
const scripts = getAllScripts();
|
|
8916
|
-
await
|
|
9063
|
+
await guest.execOrThrow(`sudo mkdir -p ${SCRIPT_PATHS.baseDir}`);
|
|
8917
9064
|
for (const script of scripts) {
|
|
8918
|
-
await
|
|
9065
|
+
await guest.writeFileWithSudo(script.path, script.content);
|
|
8919
9066
|
}
|
|
8920
|
-
await
|
|
9067
|
+
await guest.execOrThrow(
|
|
8921
9068
|
`sudo chmod +x ${SCRIPT_PATHS.baseDir}/*.mjs 2>/dev/null || true`
|
|
8922
9069
|
);
|
|
8923
9070
|
}
|
|
8924
|
-
async function downloadStorages(
|
|
9071
|
+
async function downloadStorages(guest, manifest) {
|
|
8925
9072
|
const totalArchives = manifest.storages.filter((s) => s.archiveUrl).length + (manifest.artifact?.archiveUrl ? 1 : 0);
|
|
8926
9073
|
if (totalArchives === 0) {
|
|
8927
9074
|
console.log(`[Executor] No archives to download`);
|
|
@@ -8929,8 +9076,8 @@ async function downloadStorages(ssh, manifest) {
|
|
|
8929
9076
|
}
|
|
8930
9077
|
console.log(`[Executor] Downloading ${totalArchives} archive(s)...`);
|
|
8931
9078
|
const manifestJson = JSON.stringify(manifest);
|
|
8932
|
-
await
|
|
8933
|
-
const result = await
|
|
9079
|
+
await guest.writeFile("/tmp/storage-manifest.json", manifestJson);
|
|
9080
|
+
const result = await guest.exec(
|
|
8934
9081
|
`node ${SCRIPT_PATHS.download} /tmp/storage-manifest.json`
|
|
8935
9082
|
);
|
|
8936
9083
|
if (result.exitCode !== 0) {
|
|
@@ -8938,7 +9085,7 @@ async function downloadStorages(ssh, manifest) {
|
|
|
8938
9085
|
}
|
|
8939
9086
|
console.log(`[Executor] Storage download completed`);
|
|
8940
9087
|
}
|
|
8941
|
-
async function restoreSessionHistory(
|
|
9088
|
+
async function restoreSessionHistory(guest, resumeSession, workingDir, cliAgentType) {
|
|
8942
9089
|
const { sessionId, sessionHistory } = resumeSession;
|
|
8943
9090
|
let sessionPath;
|
|
8944
9091
|
if (cliAgentType === "codex") {
|
|
@@ -8952,13 +9099,13 @@ async function restoreSessionHistory(ssh, resumeSession, workingDir, cliAgentTyp
|
|
|
8952
9099
|
}
|
|
8953
9100
|
console.log(`[Executor] Restoring session history to ${sessionPath}`);
|
|
8954
9101
|
const dirPath = sessionPath.substring(0, sessionPath.lastIndexOf("/"));
|
|
8955
|
-
await
|
|
8956
|
-
await
|
|
9102
|
+
await guest.execOrThrow(`mkdir -p "${dirPath}"`);
|
|
9103
|
+
await guest.writeFile(sessionPath, sessionHistory);
|
|
8957
9104
|
console.log(
|
|
8958
9105
|
`[Executor] Session history restored (${sessionHistory.split("\n").length} lines)`
|
|
8959
9106
|
);
|
|
8960
9107
|
}
|
|
8961
|
-
async function installProxyCA(
|
|
9108
|
+
async function installProxyCA(guest, caCertPath) {
|
|
8962
9109
|
if (!fs8.existsSync(caCertPath)) {
|
|
8963
9110
|
throw new Error(
|
|
8964
9111
|
`Proxy CA certificate not found at ${caCertPath}. Run generate-proxy-ca.sh first.`
|
|
@@ -8968,18 +9115,18 @@ async function installProxyCA(ssh, caCertPath) {
|
|
|
8968
9115
|
console.log(
|
|
8969
9116
|
`[Executor] Installing proxy CA certificate (${caCert.length} bytes)`
|
|
8970
9117
|
);
|
|
8971
|
-
await
|
|
9118
|
+
await guest.writeFileWithSudo(
|
|
8972
9119
|
"/usr/local/share/ca-certificates/vm0-proxy-ca.crt",
|
|
8973
9120
|
caCert
|
|
8974
9121
|
);
|
|
8975
|
-
await
|
|
9122
|
+
await guest.execOrThrow("sudo update-ca-certificates");
|
|
8976
9123
|
console.log(`[Executor] Proxy CA certificate installed successfully`);
|
|
8977
9124
|
}
|
|
8978
|
-
async function configureDNS(
|
|
9125
|
+
async function configureDNS(guest) {
|
|
8979
9126
|
const dnsConfig = `nameserver 8.8.8.8
|
|
8980
9127
|
nameserver 8.8.4.4
|
|
8981
9128
|
nameserver 1.1.1.1`;
|
|
8982
|
-
await
|
|
9129
|
+
await guest.execOrThrow(
|
|
8983
9130
|
`sudo sh -c 'rm -f /etc/resolv.conf && echo "${dnsConfig}" > /etc/resolv.conf'`
|
|
8984
9131
|
);
|
|
8985
9132
|
}
|
|
@@ -8995,11 +9142,11 @@ var CURL_ERROR_MESSAGES = {
|
|
|
8995
9142
|
60: "TLS certificate error (proxy CA not trusted)",
|
|
8996
9143
|
22: "HTTP error from server"
|
|
8997
9144
|
};
|
|
8998
|
-
async function runPreflightCheck(
|
|
9145
|
+
async function runPreflightCheck(guest, apiUrl, runId, sandboxToken, bypassSecret) {
|
|
8999
9146
|
const heartbeatUrl = `${apiUrl}/api/webhooks/agent/heartbeat`;
|
|
9000
9147
|
const bypassHeader = bypassSecret ? ` -H "x-vercel-protection-bypass: ${bypassSecret}"` : "";
|
|
9001
9148
|
const curlCmd = `curl -sf --connect-timeout 5 --max-time 10 "${heartbeatUrl}" -X POST -H "Content-Type: application/json" -H "Authorization: Bearer ${sandboxToken}"${bypassHeader} -d '{"runId":"${runId}"}'`;
|
|
9002
|
-
const result = await
|
|
9149
|
+
const result = await guest.exec(curlCmd, 2e4);
|
|
9003
9150
|
if (result.exitCode === 0) {
|
|
9004
9151
|
return { success: true };
|
|
9005
9152
|
}
|
|
@@ -9045,7 +9192,7 @@ async function executeJob(context, config, options = {}) {
|
|
|
9045
9192
|
const log = options.logger ?? ((msg) => console.log(msg));
|
|
9046
9193
|
log(`[Executor] Starting job ${context.runId} in VM ${vmId}`);
|
|
9047
9194
|
try {
|
|
9048
|
-
const workspacesDir =
|
|
9195
|
+
const workspacesDir = path4.join(process.cwd(), "workspaces");
|
|
9049
9196
|
const vmConfig = {
|
|
9050
9197
|
vmId,
|
|
9051
9198
|
vcpus: config.sandbox.vcpu,
|
|
@@ -9053,7 +9200,7 @@ async function executeJob(context, config, options = {}) {
|
|
|
9053
9200
|
kernelPath: config.firecracker.kernel,
|
|
9054
9201
|
rootfsPath: config.firecracker.rootfs,
|
|
9055
9202
|
firecrackerBinary: config.firecracker.binary,
|
|
9056
|
-
workDir:
|
|
9203
|
+
workDir: path4.join(workspacesDir, `vm0-${vmId}`)
|
|
9057
9204
|
};
|
|
9058
9205
|
log(`[Executor] Creating VM ${vmId}...`);
|
|
9059
9206
|
vm = new FirecrackerVM(vmConfig);
|
|
@@ -9063,14 +9210,15 @@ async function executeJob(context, config, options = {}) {
|
|
|
9063
9210
|
throw new Error("VM started but no IP address available");
|
|
9064
9211
|
}
|
|
9065
9212
|
log(`[Executor] VM ${vmId} started, guest IP: ${guestIp}`);
|
|
9066
|
-
const
|
|
9067
|
-
const
|
|
9068
|
-
log(`[Executor]
|
|
9213
|
+
const vsockPath = vm.getVsockPath();
|
|
9214
|
+
const guest = new VsockClient(vsockPath);
|
|
9215
|
+
log(`[Executor] Using vsock for guest communication: ${vsockPath}`);
|
|
9216
|
+
log(`[Executor] Verifying vsock connectivity...`);
|
|
9069
9217
|
await withSandboxTiming(
|
|
9070
|
-
"
|
|
9071
|
-
() =>
|
|
9218
|
+
"guest_wait",
|
|
9219
|
+
() => guest.waitUntilReachable(3e4, 1e3)
|
|
9072
9220
|
);
|
|
9073
|
-
log(`[Executor]
|
|
9221
|
+
log(`[Executor] Guest client ready`);
|
|
9074
9222
|
const firewallConfig = context.experimentalFirewall;
|
|
9075
9223
|
if (firewallConfig?.enabled) {
|
|
9076
9224
|
const mitmEnabled = firewallConfig.experimental_mitm ?? false;
|
|
@@ -9085,29 +9233,29 @@ async function executeJob(context, config, options = {}) {
|
|
|
9085
9233
|
sealSecretsEnabled
|
|
9086
9234
|
});
|
|
9087
9235
|
if (mitmEnabled) {
|
|
9088
|
-
const caCertPath =
|
|
9236
|
+
const caCertPath = path4.join(
|
|
9089
9237
|
config.proxy.ca_dir,
|
|
9090
9238
|
"mitmproxy-ca-cert.pem"
|
|
9091
9239
|
);
|
|
9092
|
-
await installProxyCA(
|
|
9240
|
+
await installProxyCA(guest, caCertPath);
|
|
9093
9241
|
}
|
|
9094
9242
|
}
|
|
9095
9243
|
log(`[Executor] Configuring DNS...`);
|
|
9096
|
-
await configureDNS(
|
|
9244
|
+
await configureDNS(guest);
|
|
9097
9245
|
log(`[Executor] Uploading scripts...`);
|
|
9098
|
-
await withSandboxTiming("script_upload", () => uploadScripts(
|
|
9246
|
+
await withSandboxTiming("script_upload", () => uploadScripts(guest));
|
|
9099
9247
|
log(`[Executor] Scripts uploaded to ${SCRIPT_PATHS.baseDir}`);
|
|
9100
9248
|
if (context.storageManifest) {
|
|
9101
9249
|
await withSandboxTiming(
|
|
9102
9250
|
"storage_download",
|
|
9103
|
-
() => downloadStorages(
|
|
9251
|
+
() => downloadStorages(guest, context.storageManifest)
|
|
9104
9252
|
);
|
|
9105
9253
|
}
|
|
9106
9254
|
if (context.resumeSession) {
|
|
9107
9255
|
await withSandboxTiming(
|
|
9108
9256
|
"session_restore",
|
|
9109
9257
|
() => restoreSessionHistory(
|
|
9110
|
-
|
|
9258
|
+
guest,
|
|
9111
9259
|
context.resumeSession,
|
|
9112
9260
|
context.workingDir,
|
|
9113
9261
|
context.cliAgentType || "claude-code"
|
|
@@ -9119,12 +9267,12 @@ async function executeJob(context, config, options = {}) {
|
|
|
9119
9267
|
log(
|
|
9120
9268
|
`[Executor] Writing env JSON (${envJson.length} bytes) to ${ENV_JSON_PATH}`
|
|
9121
9269
|
);
|
|
9122
|
-
await
|
|
9270
|
+
await guest.writeFile(ENV_JSON_PATH, envJson);
|
|
9123
9271
|
if (!options.benchmarkMode) {
|
|
9124
9272
|
log(`[Executor] Running preflight connectivity check...`);
|
|
9125
9273
|
const bypassSecret = process.env.VERCEL_AUTOMATION_BYPASS_SECRET;
|
|
9126
9274
|
const preflight = await runPreflightCheck(
|
|
9127
|
-
|
|
9275
|
+
guest,
|
|
9128
9276
|
config.server.url,
|
|
9129
9277
|
context.runId,
|
|
9130
9278
|
context.sandboxToken,
|
|
@@ -9151,24 +9299,24 @@ async function executeJob(context, config, options = {}) {
|
|
|
9151
9299
|
const startTime = Date.now();
|
|
9152
9300
|
if (options.benchmarkMode) {
|
|
9153
9301
|
log(`[Executor] Running command directly (benchmark mode)...`);
|
|
9154
|
-
await
|
|
9302
|
+
await guest.exec(
|
|
9155
9303
|
`nohup sh -c '${context.prompt}; echo $? > ${exitCodeFile}' > ${systemLogFile} 2>&1 &`
|
|
9156
9304
|
);
|
|
9157
9305
|
log(`[Executor] Command started in background`);
|
|
9158
9306
|
} else {
|
|
9159
9307
|
log(`[Executor] Running agent via env-loader (background)...`);
|
|
9160
|
-
await
|
|
9308
|
+
await guest.exec(
|
|
9161
9309
|
`nohup sh -c 'node ${ENV_LOADER_PATH}; echo $? > ${exitCodeFile}' > ${systemLogFile} 2>&1 &`
|
|
9162
9310
|
);
|
|
9163
9311
|
log(`[Executor] Agent started in background`);
|
|
9164
9312
|
}
|
|
9165
9313
|
const pollIntervalMs = 2e3;
|
|
9166
|
-
const maxWaitMs =
|
|
9314
|
+
const maxWaitMs = 2 * 60 * 60 * 1e3;
|
|
9167
9315
|
let exitCode = 1;
|
|
9168
9316
|
let completed = false;
|
|
9169
9317
|
while (Date.now() - startTime < maxWaitMs) {
|
|
9170
9318
|
await new Promise((resolve) => setTimeout(resolve, pollIntervalMs));
|
|
9171
|
-
const checkResult = await
|
|
9319
|
+
const checkResult = await guest.exec(`cat ${exitCodeFile} 2>/dev/null`);
|
|
9172
9320
|
if (checkResult.exitCode === 0 && checkResult.stdout.trim()) {
|
|
9173
9321
|
const parsed = parseInt(checkResult.stdout.trim(), 10);
|
|
9174
9322
|
exitCode = Number.isNaN(parsed) ? 1 : parsed;
|
|
@@ -9176,17 +9324,17 @@ async function executeJob(context, config, options = {}) {
|
|
|
9176
9324
|
break;
|
|
9177
9325
|
}
|
|
9178
9326
|
if (!options.benchmarkMode) {
|
|
9179
|
-
const processCheck = await
|
|
9327
|
+
const processCheck = await guest.exec(
|
|
9180
9328
|
`pgrep -f "env-loader.mjs" > /dev/null 2>&1 && echo "RUNNING" || echo "DEAD"`
|
|
9181
9329
|
);
|
|
9182
9330
|
if (processCheck.stdout.trim() === "DEAD") {
|
|
9183
9331
|
log(
|
|
9184
9332
|
`[Executor] Agent process died unexpectedly without writing exit code`
|
|
9185
9333
|
);
|
|
9186
|
-
const logContent = await
|
|
9334
|
+
const logContent = await guest.exec(
|
|
9187
9335
|
`tail -50 ${systemLogFile} 2>/dev/null`
|
|
9188
9336
|
);
|
|
9189
|
-
const dmesgCheck = await
|
|
9337
|
+
const dmesgCheck = await guest.exec(
|
|
9190
9338
|
`dmesg | tail -20 | grep -iE "killed|oom" 2>/dev/null`
|
|
9191
9339
|
);
|
|
9192
9340
|
let errorMsg = "Agent process terminated unexpectedly";
|
|
@@ -9232,7 +9380,9 @@ async function executeJob(context, config, options = {}) {
|
|
|
9232
9380
|
success: exitCode === 0
|
|
9233
9381
|
});
|
|
9234
9382
|
log(`[Executor] Agent finished in ${duration}s with exit code ${exitCode}`);
|
|
9235
|
-
const logResult = await
|
|
9383
|
+
const logResult = await guest.exec(
|
|
9384
|
+
`tail -100 ${systemLogFile} 2>/dev/null`
|
|
9385
|
+
);
|
|
9236
9386
|
if (logResult.stdout) {
|
|
9237
9387
|
log(
|
|
9238
9388
|
`[Executor] Log output (${logResult.stdout.length} chars): ${logResult.stdout.substring(0, 500)}`
|
|
@@ -9508,12 +9658,12 @@ var startCommand = new Command("start").description("Start the runner").option("
|
|
|
9508
9658
|
|
|
9509
9659
|
// src/commands/doctor.ts
|
|
9510
9660
|
import { Command as Command2 } from "commander";
|
|
9511
|
-
import { existsSync as
|
|
9661
|
+
import { existsSync as existsSync4, readFileSync as readFileSync3, readdirSync as readdirSync2 } from "fs";
|
|
9512
9662
|
import { dirname as dirname2, join as join3 } from "path";
|
|
9513
9663
|
|
|
9514
9664
|
// src/lib/firecracker/process.ts
|
|
9515
|
-
import { readdirSync, readFileSync as readFileSync2, existsSync as
|
|
9516
|
-
import
|
|
9665
|
+
import { readdirSync, readFileSync as readFileSync2, existsSync as existsSync3 } from "fs";
|
|
9666
|
+
import path5 from "path";
|
|
9517
9667
|
function parseFirecrackerCmdline(cmdline) {
|
|
9518
9668
|
const args = cmdline.split("\0");
|
|
9519
9669
|
if (!args[0]?.includes("firecracker")) return null;
|
|
@@ -9546,8 +9696,8 @@ function findFirecrackerProcesses() {
|
|
|
9546
9696
|
for (const entry of entries) {
|
|
9547
9697
|
if (!/^\d+$/.test(entry)) continue;
|
|
9548
9698
|
const pid = parseInt(entry, 10);
|
|
9549
|
-
const cmdlinePath =
|
|
9550
|
-
if (!
|
|
9699
|
+
const cmdlinePath = path5.join(procDir, entry, "cmdline");
|
|
9700
|
+
if (!existsSync3(cmdlinePath)) continue;
|
|
9551
9701
|
try {
|
|
9552
9702
|
const cmdline = readFileSync2(cmdlinePath, "utf-8");
|
|
9553
9703
|
const parsed = parseFirecrackerCmdline(cmdline);
|
|
@@ -9603,8 +9753,8 @@ function findMitmproxyProcess() {
|
|
|
9603
9753
|
for (const entry of entries) {
|
|
9604
9754
|
if (!/^\d+$/.test(entry)) continue;
|
|
9605
9755
|
const pid = parseInt(entry, 10);
|
|
9606
|
-
const cmdlinePath =
|
|
9607
|
-
if (!
|
|
9756
|
+
const cmdlinePath = path5.join(procDir, entry, "cmdline");
|
|
9757
|
+
if (!existsSync3(cmdlinePath)) continue;
|
|
9608
9758
|
try {
|
|
9609
9759
|
const cmdline = readFileSync2(cmdlinePath, "utf-8");
|
|
9610
9760
|
const parsed = parseMitmproxyCmdline(cmdline);
|
|
@@ -9629,7 +9779,7 @@ var doctorCommand = new Command2("doctor").description("Diagnose runner health,
|
|
|
9629
9779
|
const workspacesDir = join3(configDir, "workspaces");
|
|
9630
9780
|
console.log(`Runner: ${config.name}`);
|
|
9631
9781
|
let status = null;
|
|
9632
|
-
if (
|
|
9782
|
+
if (existsSync4(statusFilePath)) {
|
|
9633
9783
|
try {
|
|
9634
9784
|
status = JSON.parse(
|
|
9635
9785
|
readFileSync3(statusFilePath, "utf-8")
|
|
@@ -9693,7 +9843,7 @@ var doctorCommand = new Command2("doctor").description("Diagnose runner health,
|
|
|
9693
9843
|
console.log("");
|
|
9694
9844
|
const processes = findFirecrackerProcesses();
|
|
9695
9845
|
const tapDevices = await listTapDevices();
|
|
9696
|
-
const workspaces =
|
|
9846
|
+
const workspaces = existsSync4(workspacesDir) ? readdirSync2(workspacesDir).filter((d) => d.startsWith("vm0-")) : [];
|
|
9697
9847
|
const jobs = [];
|
|
9698
9848
|
const statusVmIds = /* @__PURE__ */ new Set();
|
|
9699
9849
|
const allocations = getAllocations();
|
|
@@ -9823,7 +9973,7 @@ function formatUptime(ms) {
|
|
|
9823
9973
|
|
|
9824
9974
|
// src/commands/kill.ts
|
|
9825
9975
|
import { Command as Command3 } from "commander";
|
|
9826
|
-
import { existsSync as
|
|
9976
|
+
import { existsSync as existsSync5, readFileSync as readFileSync4, writeFileSync as writeFileSync3, rmSync } from "fs";
|
|
9827
9977
|
import { dirname as dirname3, join as join4 } from "path";
|
|
9828
9978
|
import * as readline2 from "readline";
|
|
9829
9979
|
var killCommand = new Command3("kill").description("Force terminate a run and clean up all resources").argument("<run-id>", "Run ID (full UUID or short 8-char vmId)").option("--config <path>", "Config file path", "./runner.yaml").option("--force", "Skip confirmation prompt").action(
|
|
@@ -9888,7 +10038,7 @@ var killCommand = new Command3("kill").description("Force terminate a run and cl
|
|
|
9888
10038
|
message: error instanceof Error ? error.message : "Unknown error"
|
|
9889
10039
|
});
|
|
9890
10040
|
}
|
|
9891
|
-
if (
|
|
10041
|
+
if (existsSync5(workspaceDir)) {
|
|
9892
10042
|
try {
|
|
9893
10043
|
rmSync(workspaceDir, { recursive: true, force: true });
|
|
9894
10044
|
results.push({
|
|
@@ -9910,7 +10060,7 @@ var killCommand = new Command3("kill").description("Force terminate a run and cl
|
|
|
9910
10060
|
message: "Not found (already cleaned)"
|
|
9911
10061
|
});
|
|
9912
10062
|
}
|
|
9913
|
-
if (runId &&
|
|
10063
|
+
if (runId && existsSync5(statusFilePath)) {
|
|
9914
10064
|
try {
|
|
9915
10065
|
const status = JSON.parse(
|
|
9916
10066
|
readFileSync4(statusFilePath, "utf-8")
|
|
@@ -9969,7 +10119,7 @@ function resolveRunId(input, statusFilePath) {
|
|
|
9969
10119
|
const vmId = input.split("-")[0];
|
|
9970
10120
|
return { vmId: vmId ?? input, runId: input };
|
|
9971
10121
|
}
|
|
9972
|
-
if (
|
|
10122
|
+
if (existsSync5(statusFilePath)) {
|
|
9973
10123
|
try {
|
|
9974
10124
|
const status = JSON.parse(
|
|
9975
10125
|
readFileSync4(statusFilePath, "utf-8")
|
|
@@ -10000,7 +10150,7 @@ async function confirm(message) {
|
|
|
10000
10150
|
|
|
10001
10151
|
// src/commands/benchmark.ts
|
|
10002
10152
|
import { Command as Command4 } from "commander";
|
|
10003
|
-
import
|
|
10153
|
+
import crypto2 from "crypto";
|
|
10004
10154
|
|
|
10005
10155
|
// src/lib/timing.ts
|
|
10006
10156
|
var Timer = class {
|
|
@@ -10035,7 +10185,7 @@ var Timer = class {
|
|
|
10035
10185
|
// src/commands/benchmark.ts
|
|
10036
10186
|
function createBenchmarkContext(prompt, options) {
|
|
10037
10187
|
return {
|
|
10038
|
-
runId:
|
|
10188
|
+
runId: crypto2.randomUUID(),
|
|
10039
10189
|
prompt,
|
|
10040
10190
|
agentComposeVersionId: "benchmark-local",
|
|
10041
10191
|
vars: null,
|
|
@@ -10090,7 +10240,7 @@ var benchmarkCommand = new Command4("benchmark").description(
|
|
|
10090
10240
|
});
|
|
10091
10241
|
|
|
10092
10242
|
// src/index.ts
|
|
10093
|
-
var version = true ? "
|
|
10243
|
+
var version = true ? "3.0.0" : "0.1.0";
|
|
10094
10244
|
program.name("vm0-runner").version(version).description("Self-hosted runner for VM0 agents");
|
|
10095
10245
|
program.addCommand(startCommand);
|
|
10096
10246
|
program.addCommand(doctorCommand);
|