@vm0/runner 2.13.5 → 2.15.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +774 -411
- package/package.json +1 -1
package/index.js
CHANGED
|
@@ -16,7 +16,8 @@ var SANDBOX_DEFAULTS = {
|
|
|
16
16
|
max_concurrent: 1,
|
|
17
17
|
vcpu: 2,
|
|
18
18
|
memory_mb: 2048,
|
|
19
|
-
poll_interval_ms: 5e3
|
|
19
|
+
poll_interval_ms: 5e3,
|
|
20
|
+
guest_protocol: "vsock"
|
|
20
21
|
};
|
|
21
22
|
var PROXY_DEFAULTS = {
|
|
22
23
|
port: 8080
|
|
@@ -35,7 +36,8 @@ var runnerConfigSchema = z.object({
|
|
|
35
36
|
max_concurrent: z.number().int().min(1).default(SANDBOX_DEFAULTS.max_concurrent),
|
|
36
37
|
vcpu: z.number().int().min(1).default(SANDBOX_DEFAULTS.vcpu),
|
|
37
38
|
memory_mb: z.number().int().min(128).default(SANDBOX_DEFAULTS.memory_mb),
|
|
38
|
-
poll_interval_ms: z.number().int().min(1e3).default(SANDBOX_DEFAULTS.poll_interval_ms)
|
|
39
|
+
poll_interval_ms: z.number().int().min(1e3).default(SANDBOX_DEFAULTS.poll_interval_ms),
|
|
40
|
+
guest_protocol: z.enum(["vsock", "ssh"]).default(SANDBOX_DEFAULTS.guest_protocol)
|
|
39
41
|
}).default(SANDBOX_DEFAULTS),
|
|
40
42
|
firecracker: z.object({
|
|
41
43
|
binary: z.string().min(1, "Firecracker binary path is required"),
|
|
@@ -62,7 +64,8 @@ var debugConfigSchema = z.object({
|
|
|
62
64
|
max_concurrent: z.number().int().min(1).default(SANDBOX_DEFAULTS.max_concurrent),
|
|
63
65
|
vcpu: z.number().int().min(1).default(SANDBOX_DEFAULTS.vcpu),
|
|
64
66
|
memory_mb: z.number().int().min(128).default(SANDBOX_DEFAULTS.memory_mb),
|
|
65
|
-
poll_interval_ms: z.number().int().min(1e3).default(SANDBOX_DEFAULTS.poll_interval_ms)
|
|
67
|
+
poll_interval_ms: z.number().int().min(1e3).default(SANDBOX_DEFAULTS.poll_interval_ms),
|
|
68
|
+
guest_protocol: z.enum(["vsock", "ssh"]).default(SANDBOX_DEFAULTS.guest_protocol)
|
|
66
69
|
}).default(SANDBOX_DEFAULTS),
|
|
67
70
|
firecracker: z.object({
|
|
68
71
|
binary: z.string().min(1, "Firecracker binary path is required"),
|
|
@@ -969,11 +972,14 @@ var FirecrackerVM = class {
|
|
|
969
972
|
socketPath;
|
|
970
973
|
vmOverlayPath;
|
|
971
974
|
// Per-VM sparse overlay for writes
|
|
975
|
+
vsockPath;
|
|
976
|
+
// Vsock UDS path for host-guest communication
|
|
972
977
|
constructor(config) {
|
|
973
978
|
this.config = config;
|
|
974
979
|
this.workDir = config.workDir || `/tmp/vm0-vm-${config.vmId}`;
|
|
975
980
|
this.socketPath = path2.join(this.workDir, "firecracker.sock");
|
|
976
981
|
this.vmOverlayPath = path2.join(this.workDir, "overlay.ext4");
|
|
982
|
+
this.vsockPath = path2.join(this.workDir, "vsock.sock");
|
|
977
983
|
}
|
|
978
984
|
/**
|
|
979
985
|
* Get current VM state
|
|
@@ -999,6 +1005,12 @@ var FirecrackerVM = class {
|
|
|
999
1005
|
getSocketPath() {
|
|
1000
1006
|
return this.socketPath;
|
|
1001
1007
|
}
|
|
1008
|
+
/**
|
|
1009
|
+
* Get the vsock UDS path for host-guest communication
|
|
1010
|
+
*/
|
|
1011
|
+
getVsockPath() {
|
|
1012
|
+
return this.vsockPath;
|
|
1013
|
+
}
|
|
1002
1014
|
/**
|
|
1003
1015
|
* Start the VM
|
|
1004
1016
|
* This spawns Firecracker, configures it via API, and boots the VM
|
|
@@ -1124,6 +1136,12 @@ var FirecrackerVM = class {
|
|
|
1124
1136
|
guest_mac: this.networkConfig.guestMac,
|
|
1125
1137
|
host_dev_name: this.networkConfig.tapDevice
|
|
1126
1138
|
});
|
|
1139
|
+
console.log(`[VM ${this.config.vmId}] Vsock: ${this.vsockPath}`);
|
|
1140
|
+
await this.client.setVsock({
|
|
1141
|
+
vsock_id: "vsock0",
|
|
1142
|
+
guest_cid: 3,
|
|
1143
|
+
uds_path: this.vsockPath
|
|
1144
|
+
});
|
|
1127
1145
|
}
|
|
1128
1146
|
/**
|
|
1129
1147
|
* Stop the VM gracefully
|
|
@@ -1401,13 +1419,6 @@ var SSHClient = class {
|
|
|
1401
1419
|
return this.config.host;
|
|
1402
1420
|
}
|
|
1403
1421
|
};
|
|
1404
|
-
function createVMSSHClient(guestIp, user = "root", privateKeyPath) {
|
|
1405
|
-
return new SSHClient({
|
|
1406
|
-
host: guestIp,
|
|
1407
|
-
user,
|
|
1408
|
-
privateKeyPath
|
|
1409
|
-
});
|
|
1410
|
-
}
|
|
1411
1422
|
function getRunnerSSHKeyPath() {
|
|
1412
1423
|
const runnerKeyPath = "/opt/vm0-runner/ssh/id_rsa";
|
|
1413
1424
|
if (fs4.existsSync(runnerKeyPath)) {
|
|
@@ -1420,6 +1431,326 @@ function getRunnerSSHKeyPath() {
|
|
|
1420
1431
|
return "";
|
|
1421
1432
|
}
|
|
1422
1433
|
|
|
1434
|
+
// src/lib/firecracker/vsock.ts
|
|
1435
|
+
import * as net from "net";
|
|
1436
|
+
import * as fs5 from "fs";
|
|
1437
|
+
import * as crypto from "crypto";
|
|
1438
|
+
var VSOCK_PORT = 1e3;
|
|
1439
|
+
var CONNECT_TIMEOUT_MS = 5e3;
|
|
1440
|
+
var HEADER_SIZE = 4;
|
|
1441
|
+
var MAX_MESSAGE_SIZE = 1024 * 1024;
|
|
1442
|
+
var DEFAULT_EXEC_TIMEOUT_MS = 3e5;
|
|
1443
|
+
function encode(msg) {
|
|
1444
|
+
const json = Buffer.from(JSON.stringify(msg), "utf-8");
|
|
1445
|
+
const header = Buffer.alloc(HEADER_SIZE);
|
|
1446
|
+
header.writeUInt32BE(json.length, 0);
|
|
1447
|
+
return Buffer.concat([header, json]);
|
|
1448
|
+
}
|
|
1449
|
+
var Decoder = class {
|
|
1450
|
+
buf = Buffer.alloc(0);
|
|
1451
|
+
decode(data) {
|
|
1452
|
+
this.buf = Buffer.concat([this.buf, data]);
|
|
1453
|
+
const messages = [];
|
|
1454
|
+
while (this.buf.length >= HEADER_SIZE) {
|
|
1455
|
+
const len = this.buf.readUInt32BE(0);
|
|
1456
|
+
if (len > MAX_MESSAGE_SIZE) throw new Error(`Message too large: ${len}`);
|
|
1457
|
+
const total = HEADER_SIZE + len;
|
|
1458
|
+
if (this.buf.length < total) break;
|
|
1459
|
+
const json = this.buf.subarray(HEADER_SIZE, total);
|
|
1460
|
+
messages.push(JSON.parse(json.toString("utf-8")));
|
|
1461
|
+
this.buf = this.buf.subarray(total);
|
|
1462
|
+
}
|
|
1463
|
+
return messages;
|
|
1464
|
+
}
|
|
1465
|
+
};
|
|
1466
|
+
var VsockClient = class {
|
|
1467
|
+
vsockPath;
|
|
1468
|
+
socket = null;
|
|
1469
|
+
connected = false;
|
|
1470
|
+
pendingRequests = /* @__PURE__ */ new Map();
|
|
1471
|
+
constructor(vsockPath) {
|
|
1472
|
+
this.vsockPath = vsockPath;
|
|
1473
|
+
}
|
|
1474
|
+
/**
|
|
1475
|
+
* Connect to the guest agent via vsock
|
|
1476
|
+
*/
|
|
1477
|
+
async connect() {
|
|
1478
|
+
if (this.connected && this.socket) {
|
|
1479
|
+
return;
|
|
1480
|
+
}
|
|
1481
|
+
return new Promise((resolve, reject) => {
|
|
1482
|
+
if (!fs5.existsSync(this.vsockPath)) {
|
|
1483
|
+
reject(new Error(`Vsock socket not found: ${this.vsockPath}`));
|
|
1484
|
+
return;
|
|
1485
|
+
}
|
|
1486
|
+
const socket = net.createConnection(this.vsockPath);
|
|
1487
|
+
const decoder = new Decoder();
|
|
1488
|
+
let fcConnected = false;
|
|
1489
|
+
let gotReady = false;
|
|
1490
|
+
let pingId = null;
|
|
1491
|
+
let connectionEstablished = false;
|
|
1492
|
+
const timeout = setTimeout(() => {
|
|
1493
|
+
socket.destroy();
|
|
1494
|
+
reject(new Error("Vsock connection timeout"));
|
|
1495
|
+
}, CONNECT_TIMEOUT_MS);
|
|
1496
|
+
socket.on("connect", () => {
|
|
1497
|
+
socket.write(`CONNECT ${VSOCK_PORT}
|
|
1498
|
+
`);
|
|
1499
|
+
});
|
|
1500
|
+
socket.on("data", (data) => {
|
|
1501
|
+
if (!fcConnected) {
|
|
1502
|
+
const str = data.toString();
|
|
1503
|
+
if (str.startsWith("OK ")) {
|
|
1504
|
+
fcConnected = true;
|
|
1505
|
+
} else {
|
|
1506
|
+
clearTimeout(timeout);
|
|
1507
|
+
socket.destroy();
|
|
1508
|
+
reject(new Error(`Firecracker connect failed: ${str.trim()}`));
|
|
1509
|
+
}
|
|
1510
|
+
return;
|
|
1511
|
+
}
|
|
1512
|
+
try {
|
|
1513
|
+
for (const msg of decoder.decode(data)) {
|
|
1514
|
+
if (!connectionEstablished) {
|
|
1515
|
+
if (!gotReady && msg.type === "ready") {
|
|
1516
|
+
gotReady = true;
|
|
1517
|
+
pingId = crypto.randomUUID();
|
|
1518
|
+
const ping = { type: "ping", id: pingId, payload: {} };
|
|
1519
|
+
socket.write(encode(ping));
|
|
1520
|
+
} else if (msg.type === "pong" && msg.id === pingId) {
|
|
1521
|
+
clearTimeout(timeout);
|
|
1522
|
+
this.socket = socket;
|
|
1523
|
+
this.connected = true;
|
|
1524
|
+
connectionEstablished = true;
|
|
1525
|
+
resolve();
|
|
1526
|
+
}
|
|
1527
|
+
} else {
|
|
1528
|
+
this.handleMessage(msg);
|
|
1529
|
+
}
|
|
1530
|
+
}
|
|
1531
|
+
} catch (e) {
|
|
1532
|
+
clearTimeout(timeout);
|
|
1533
|
+
socket.destroy();
|
|
1534
|
+
reject(new Error(`Failed to parse message: ${e}`));
|
|
1535
|
+
}
|
|
1536
|
+
});
|
|
1537
|
+
socket.on("error", (err) => {
|
|
1538
|
+
clearTimeout(timeout);
|
|
1539
|
+
this.connected = false;
|
|
1540
|
+
this.socket = null;
|
|
1541
|
+
reject(new Error(`Vsock error: ${err.message}`));
|
|
1542
|
+
});
|
|
1543
|
+
socket.on("close", () => {
|
|
1544
|
+
clearTimeout(timeout);
|
|
1545
|
+
this.connected = false;
|
|
1546
|
+
this.socket = null;
|
|
1547
|
+
if (!gotReady) {
|
|
1548
|
+
reject(new Error("Vsock closed before ready"));
|
|
1549
|
+
}
|
|
1550
|
+
for (const [id, req] of this.pendingRequests) {
|
|
1551
|
+
clearTimeout(req.timeout);
|
|
1552
|
+
req.reject(new Error("Connection closed"));
|
|
1553
|
+
this.pendingRequests.delete(id);
|
|
1554
|
+
}
|
|
1555
|
+
});
|
|
1556
|
+
});
|
|
1557
|
+
}
|
|
1558
|
+
/**
|
|
1559
|
+
* Handle incoming message and route to pending request
|
|
1560
|
+
*/
|
|
1561
|
+
handleMessage(msg) {
|
|
1562
|
+
const pending = this.pendingRequests.get(msg.id);
|
|
1563
|
+
if (pending) {
|
|
1564
|
+
clearTimeout(pending.timeout);
|
|
1565
|
+
this.pendingRequests.delete(msg.id);
|
|
1566
|
+
pending.resolve(msg);
|
|
1567
|
+
}
|
|
1568
|
+
}
|
|
1569
|
+
/**
|
|
1570
|
+
* Send a request and wait for response
|
|
1571
|
+
*/
|
|
1572
|
+
async request(type, payload, timeoutMs) {
|
|
1573
|
+
await this.connect();
|
|
1574
|
+
if (!this.socket) {
|
|
1575
|
+
throw new Error("Not connected");
|
|
1576
|
+
}
|
|
1577
|
+
const id = crypto.randomUUID();
|
|
1578
|
+
const msg = { type, id, payload };
|
|
1579
|
+
return new Promise((resolve, reject) => {
|
|
1580
|
+
const timeout = setTimeout(() => {
|
|
1581
|
+
this.pendingRequests.delete(id);
|
|
1582
|
+
reject(new Error(`Request timeout: ${type}`));
|
|
1583
|
+
}, timeoutMs);
|
|
1584
|
+
this.pendingRequests.set(id, {
|
|
1585
|
+
resolve,
|
|
1586
|
+
reject,
|
|
1587
|
+
timeout
|
|
1588
|
+
});
|
|
1589
|
+
this.socket.write(encode(msg));
|
|
1590
|
+
});
|
|
1591
|
+
}
|
|
1592
|
+
/**
|
|
1593
|
+
* Execute a command on the remote VM
|
|
1594
|
+
*/
|
|
1595
|
+
async exec(command, timeoutMs) {
|
|
1596
|
+
const actualTimeout = timeoutMs ?? DEFAULT_EXEC_TIMEOUT_MS;
|
|
1597
|
+
try {
|
|
1598
|
+
const response = await this.request(
|
|
1599
|
+
"exec",
|
|
1600
|
+
{ command, timeoutMs: actualTimeout },
|
|
1601
|
+
actualTimeout + 5e3
|
|
1602
|
+
// Add buffer for network latency
|
|
1603
|
+
);
|
|
1604
|
+
if (response.type === "error") {
|
|
1605
|
+
const errorPayload = response.payload;
|
|
1606
|
+
return {
|
|
1607
|
+
exitCode: 1,
|
|
1608
|
+
stdout: "",
|
|
1609
|
+
stderr: errorPayload.message
|
|
1610
|
+
};
|
|
1611
|
+
}
|
|
1612
|
+
return {
|
|
1613
|
+
exitCode: response.payload.exitCode,
|
|
1614
|
+
stdout: response.payload.stdout,
|
|
1615
|
+
stderr: response.payload.stderr
|
|
1616
|
+
};
|
|
1617
|
+
} catch (e) {
|
|
1618
|
+
return {
|
|
1619
|
+
exitCode: 1,
|
|
1620
|
+
stdout: "",
|
|
1621
|
+
stderr: e instanceof Error ? e.message : String(e)
|
|
1622
|
+
};
|
|
1623
|
+
}
|
|
1624
|
+
}
|
|
1625
|
+
/**
|
|
1626
|
+
* Execute a command and throw on non-zero exit
|
|
1627
|
+
*/
|
|
1628
|
+
async execOrThrow(command) {
|
|
1629
|
+
const result = await this.exec(command);
|
|
1630
|
+
if (result.exitCode !== 0) {
|
|
1631
|
+
throw new Error(
|
|
1632
|
+
`Command failed (exit ${result.exitCode}): ${result.stderr || result.stdout}`
|
|
1633
|
+
);
|
|
1634
|
+
}
|
|
1635
|
+
return result.stdout;
|
|
1636
|
+
}
|
|
1637
|
+
/**
|
|
1638
|
+
* Write content to a file on the remote VM
|
|
1639
|
+
*/
|
|
1640
|
+
async writeFile(remotePath, content) {
|
|
1641
|
+
const encoded = Buffer.from(content).toString("base64");
|
|
1642
|
+
const maxChunkSize = 65e3;
|
|
1643
|
+
if (encoded.length <= maxChunkSize) {
|
|
1644
|
+
await this.execOrThrow(`echo '${encoded}' | base64 -d > '${remotePath}'`);
|
|
1645
|
+
} else {
|
|
1646
|
+
await this.execOrThrow(`rm -f '${remotePath}'`);
|
|
1647
|
+
for (let i = 0; i < encoded.length; i += maxChunkSize) {
|
|
1648
|
+
const chunk = encoded.slice(i, i + maxChunkSize);
|
|
1649
|
+
const operator = i === 0 ? ">" : ">>";
|
|
1650
|
+
await this.execOrThrow(
|
|
1651
|
+
`echo '${chunk}' | base64 -d ${operator} '${remotePath}'`
|
|
1652
|
+
);
|
|
1653
|
+
}
|
|
1654
|
+
}
|
|
1655
|
+
}
|
|
1656
|
+
/**
|
|
1657
|
+
* Write content to a file on the remote VM using sudo
|
|
1658
|
+
*/
|
|
1659
|
+
async writeFileWithSudo(remotePath, content) {
|
|
1660
|
+
const encoded = Buffer.from(content).toString("base64");
|
|
1661
|
+
const maxChunkSize = 65e3;
|
|
1662
|
+
if (encoded.length <= maxChunkSize) {
|
|
1663
|
+
await this.execOrThrow(
|
|
1664
|
+
`echo '${encoded}' | base64 -d | sudo tee '${remotePath}' > /dev/null`
|
|
1665
|
+
);
|
|
1666
|
+
} else {
|
|
1667
|
+
await this.execOrThrow(`sudo rm -f '${remotePath}'`);
|
|
1668
|
+
for (let i = 0; i < encoded.length; i += maxChunkSize) {
|
|
1669
|
+
const chunk = encoded.slice(i, i + maxChunkSize);
|
|
1670
|
+
const teeFlag = i === 0 ? "" : "-a";
|
|
1671
|
+
await this.execOrThrow(
|
|
1672
|
+
`echo '${chunk}' | base64 -d | sudo tee ${teeFlag} '${remotePath}' > /dev/null`
|
|
1673
|
+
);
|
|
1674
|
+
}
|
|
1675
|
+
}
|
|
1676
|
+
}
|
|
1677
|
+
/**
|
|
1678
|
+
* Read a file from the remote VM
|
|
1679
|
+
*/
|
|
1680
|
+
async readFile(remotePath) {
|
|
1681
|
+
const result = await this.exec(`cat '${remotePath}'`);
|
|
1682
|
+
if (result.exitCode !== 0) {
|
|
1683
|
+
throw new Error(`Failed to read file: ${result.stderr}`);
|
|
1684
|
+
}
|
|
1685
|
+
return result.stdout;
|
|
1686
|
+
}
|
|
1687
|
+
/**
|
|
1688
|
+
* Check if vsock connection is available
|
|
1689
|
+
*/
|
|
1690
|
+
async isReachable() {
|
|
1691
|
+
try {
|
|
1692
|
+
const result = await this.exec("echo ok", 15e3);
|
|
1693
|
+
return result.exitCode === 0 && result.stdout.trim() === "ok";
|
|
1694
|
+
} catch {
|
|
1695
|
+
return false;
|
|
1696
|
+
}
|
|
1697
|
+
}
|
|
1698
|
+
/**
|
|
1699
|
+
* Wait for vsock to become available
|
|
1700
|
+
*/
|
|
1701
|
+
async waitUntilReachable(timeoutMs = 12e4, intervalMs = 2e3) {
|
|
1702
|
+
const start = Date.now();
|
|
1703
|
+
while (Date.now() - start < timeoutMs) {
|
|
1704
|
+
if (await this.isReachable()) {
|
|
1705
|
+
return;
|
|
1706
|
+
}
|
|
1707
|
+
await new Promise((resolve) => {
|
|
1708
|
+
const remaining = timeoutMs - (Date.now() - start);
|
|
1709
|
+
if (remaining > 0) {
|
|
1710
|
+
setTimeout(resolve, Math.min(intervalMs, remaining));
|
|
1711
|
+
} else {
|
|
1712
|
+
resolve();
|
|
1713
|
+
}
|
|
1714
|
+
});
|
|
1715
|
+
}
|
|
1716
|
+
throw new Error(`Vsock not reachable after ${timeoutMs}ms`);
|
|
1717
|
+
}
|
|
1718
|
+
/**
|
|
1719
|
+
* Create a directory on the remote VM
|
|
1720
|
+
*/
|
|
1721
|
+
async mkdir(remotePath) {
|
|
1722
|
+
await this.execOrThrow(`mkdir -p '${remotePath}'`);
|
|
1723
|
+
}
|
|
1724
|
+
/**
|
|
1725
|
+
* Check if a file/directory exists on the remote VM
|
|
1726
|
+
*/
|
|
1727
|
+
async exists(remotePath) {
|
|
1728
|
+
const result = await this.exec(`test -e '${remotePath}'`);
|
|
1729
|
+
return result.exitCode === 0;
|
|
1730
|
+
}
|
|
1731
|
+
/**
|
|
1732
|
+
* Get the vsock path (for logging/debugging)
|
|
1733
|
+
*/
|
|
1734
|
+
getVsockPath() {
|
|
1735
|
+
return this.vsockPath;
|
|
1736
|
+
}
|
|
1737
|
+
/**
|
|
1738
|
+
* Close the connection
|
|
1739
|
+
*/
|
|
1740
|
+
close() {
|
|
1741
|
+
if (this.socket) {
|
|
1742
|
+
this.socket.end();
|
|
1743
|
+
this.socket = null;
|
|
1744
|
+
}
|
|
1745
|
+
this.connected = false;
|
|
1746
|
+
for (const [id, req] of this.pendingRequests) {
|
|
1747
|
+
clearTimeout(req.timeout);
|
|
1748
|
+
req.reject(new Error("Connection closed"));
|
|
1749
|
+
this.pendingRequests.delete(id);
|
|
1750
|
+
}
|
|
1751
|
+
}
|
|
1752
|
+
};
|
|
1753
|
+
|
|
1423
1754
|
// ../../node_modules/.pnpm/@ts-rest+core@3.53.0-rc.1_@types+node@24.3.0/node_modules/@ts-rest/core/index.esm.mjs
|
|
1424
1755
|
var util;
|
|
1425
1756
|
(function(util2) {
|
|
@@ -5744,7 +6075,7 @@ var runEventsContract = c3.router({
|
|
|
5744
6075
|
id: z6.string().min(1, "Run ID is required")
|
|
5745
6076
|
}),
|
|
5746
6077
|
query: z6.object({
|
|
5747
|
-
since: z6.coerce.number().default(
|
|
6078
|
+
since: z6.coerce.number().default(-1),
|
|
5748
6079
|
limit: z6.coerce.number().default(100)
|
|
5749
6080
|
}),
|
|
5750
6081
|
responses: {
|
|
@@ -6127,7 +6458,7 @@ import { z as z8 } from "zod";
|
|
|
6127
6458
|
var c5 = initContract();
|
|
6128
6459
|
var agentEventSchema = z8.object({
|
|
6129
6460
|
type: z8.string(),
|
|
6130
|
-
sequenceNumber: z8.number().int().
|
|
6461
|
+
sequenceNumber: z8.number().int().nonnegative()
|
|
6131
6462
|
}).passthrough();
|
|
6132
6463
|
var artifactSnapshotSchema = z8.object({
|
|
6133
6464
|
artifactName: z8.string(),
|
|
@@ -7762,7 +8093,7 @@ var publicVolumeDownloadContract = c18.router({
|
|
|
7762
8093
|
});
|
|
7763
8094
|
|
|
7764
8095
|
// ../../packages/core/src/sandbox/scripts/dist/bundled.ts
|
|
7765
|
-
var RUN_AGENT_SCRIPT = '#!/usr/bin/env node\n\n// src/sandbox/scripts/src/run-agent.ts\nimport * as fs7 from "fs";\nimport { spawn, execSync as execSync4 } from "child_process";\nimport * as readline from "readline";\n\n// src/sandbox/scripts/src/lib/common.ts\nimport * as fs from "fs";\nvar RUN_ID = process.env.VM0_RUN_ID ?? "";\nvar API_URL = process.env.VM0_API_URL ?? "";\nvar API_TOKEN = process.env.VM0_API_TOKEN ?? "";\nvar PROMPT = process.env.VM0_PROMPT ?? "";\nvar VERCEL_BYPASS = process.env.VERCEL_PROTECTION_BYPASS ?? "";\nvar RESUME_SESSION_ID = process.env.VM0_RESUME_SESSION_ID ?? "";\nvar CLI_AGENT_TYPE = process.env.CLI_AGENT_TYPE ?? "claude-code";\nvar OPENAI_MODEL = process.env.OPENAI_MODEL ?? "";\nvar WORKING_DIR = process.env.VM0_WORKING_DIR ?? "";\nvar ARTIFACT_DRIVER = process.env.VM0_ARTIFACT_DRIVER ?? "";\nvar ARTIFACT_MOUNT_PATH = process.env.VM0_ARTIFACT_MOUNT_PATH ?? "";\nvar ARTIFACT_VOLUME_NAME = process.env.VM0_ARTIFACT_VOLUME_NAME ?? "";\nvar ARTIFACT_VERSION_ID = process.env.VM0_ARTIFACT_VERSION_ID ?? "";\nvar WEBHOOK_URL = `${API_URL}/api/webhooks/agent/events`;\nvar CHECKPOINT_URL = `${API_URL}/api/webhooks/agent/checkpoints`;\nvar COMPLETE_URL = `${API_URL}/api/webhooks/agent/complete`;\nvar HEARTBEAT_URL = `${API_URL}/api/webhooks/agent/heartbeat`;\nvar TELEMETRY_URL = `${API_URL}/api/webhooks/agent/telemetry`;\nvar PROXY_URL = `${API_URL}/api/webhooks/agent/proxy`;\nvar STORAGE_PREPARE_URL = `${API_URL}/api/webhooks/agent/storages/prepare`;\nvar STORAGE_COMMIT_URL = `${API_URL}/api/webhooks/agent/storages/commit`;\nvar HEARTBEAT_INTERVAL = 60;\nvar TELEMETRY_INTERVAL = 30;\nvar HTTP_CONNECT_TIMEOUT = 10;\nvar HTTP_MAX_TIME = 30;\nvar HTTP_MAX_TIME_UPLOAD = 60;\nvar HTTP_MAX_RETRIES = 3;\nvar SESSION_ID_FILE = `/tmp/vm0-session-${RUN_ID}.txt`;\nvar SESSION_HISTORY_PATH_FILE = `/tmp/vm0-session-history-${RUN_ID}.txt`;\nvar EVENT_ERROR_FLAG = `/tmp/vm0-event-error-${RUN_ID}`;\nvar SYSTEM_LOG_FILE = `/tmp/vm0-main-${RUN_ID}.log`;\nvar AGENT_LOG_FILE = `/tmp/vm0-agent-${RUN_ID}.log`;\nvar METRICS_LOG_FILE = `/tmp/vm0-metrics-${RUN_ID}.jsonl`;\nvar NETWORK_LOG_FILE = `/tmp/vm0-network-${RUN_ID}.jsonl`;\nvar TELEMETRY_LOG_POS_FILE = `/tmp/vm0-telemetry-log-pos-${RUN_ID}.txt`;\nvar TELEMETRY_METRICS_POS_FILE = `/tmp/vm0-telemetry-metrics-pos-${RUN_ID}.txt`;\nvar TELEMETRY_NETWORK_POS_FILE = `/tmp/vm0-telemetry-network-pos-${RUN_ID}.txt`;\nvar TELEMETRY_SANDBOX_OPS_POS_FILE = `/tmp/vm0-telemetry-sandbox-ops-pos-${RUN_ID}.txt`;\nvar SANDBOX_OPS_LOG_FILE = `/tmp/vm0-sandbox-ops-${RUN_ID}.jsonl`;\nvar METRICS_INTERVAL = 5;\nfunction validateConfig() {\n if (!WORKING_DIR) {\n throw new Error("VM0_WORKING_DIR is required but not set");\n }\n return true;\n}\nfunction recordSandboxOp(actionType, durationMs, success, error) {\n const entry = {\n ts: (/* @__PURE__ */ new Date()).toISOString(),\n action_type: actionType,\n duration_ms: durationMs,\n success\n };\n if (error) {\n entry.error = error;\n }\n fs.appendFileSync(SANDBOX_OPS_LOG_FILE, JSON.stringify(entry) + "\\n");\n}\n\n// src/sandbox/scripts/src/lib/log.ts\nvar SCRIPT_NAME = process.env.LOG_SCRIPT_NAME ?? "run-agent";\nvar DEBUG_MODE = process.env.VM0_DEBUG === "1";\nfunction timestamp() {\n return (/* @__PURE__ */ new Date()).toISOString().replace(/\\.\\d{3}Z$/, "Z");\n}\nfunction logInfo(msg) {\n console.error(`[${timestamp()}] [INFO] [sandbox:${SCRIPT_NAME}] ${msg}`);\n}\nfunction logWarn(msg) {\n console.error(`[${timestamp()}] [WARN] [sandbox:${SCRIPT_NAME}] ${msg}`);\n}\nfunction logError(msg) {\n console.error(`[${timestamp()}] [ERROR] [sandbox:${SCRIPT_NAME}] ${msg}`);\n}\nfunction logDebug(msg) {\n if (DEBUG_MODE) {\n console.error(`[${timestamp()}] [DEBUG] [sandbox:${SCRIPT_NAME}] ${msg}`);\n }\n}\n\n// src/sandbox/scripts/src/lib/events.ts\nimport * as fs2 from "fs";\n\n// src/sandbox/scripts/src/lib/http-client.ts\nimport { execSync } from "child_process";\nfunction sleep(ms) {\n return new Promise((resolve) => setTimeout(resolve, ms));\n}\nasync function httpPostJson(url, data, maxRetries = HTTP_MAX_RETRIES) {\n const headers = {\n "Content-Type": "application/json",\n Authorization: `Bearer ${API_TOKEN}`\n };\n if (VERCEL_BYPASS) {\n headers["x-vercel-protection-bypass"] = VERCEL_BYPASS;\n }\n for (let attempt = 1; attempt <= maxRetries; attempt++) {\n logDebug(`HTTP POST attempt ${attempt}/${maxRetries} to ${url}`);\n try {\n const controller = new AbortController();\n const timeoutId = setTimeout(\n () => controller.abort(),\n HTTP_MAX_TIME * 1e3\n );\n const response = await fetch(url, {\n method: "POST",\n headers,\n body: JSON.stringify(data),\n signal: controller.signal\n });\n clearTimeout(timeoutId);\n if (response.ok) {\n const text = await response.text();\n if (text) {\n return JSON.parse(text);\n }\n return {};\n }\n logWarn(\n `HTTP POST failed (attempt ${attempt}/${maxRetries}): HTTP ${response.status}`\n );\n if (attempt < maxRetries) {\n await sleep(1e3);\n }\n } catch (error) {\n const errorMsg = error instanceof Error ? error.message : String(error);\n if (errorMsg.includes("abort")) {\n logWarn(`HTTP POST failed (attempt ${attempt}/${maxRetries}): Timeout`);\n } else {\n logWarn(\n `HTTP POST failed (attempt ${attempt}/${maxRetries}): ${errorMsg}`\n );\n }\n if (attempt < maxRetries) {\n await sleep(1e3);\n }\n }\n }\n logError(`HTTP POST failed after ${maxRetries} attempts to ${url}`);\n return null;\n}\nasync function httpPutPresigned(presignedUrl, filePath, contentType = "application/octet-stream", maxRetries = HTTP_MAX_RETRIES) {\n for (let attempt = 1; attempt <= maxRetries; attempt++) {\n logDebug(`HTTP PUT presigned attempt ${attempt}/${maxRetries}`);\n try {\n const curlCmd = [\n "curl",\n "-f",\n "-X",\n "PUT",\n "-H",\n `Content-Type: ${contentType}`,\n "--data-binary",\n `@${filePath}`,\n "--connect-timeout",\n String(HTTP_CONNECT_TIMEOUT),\n "--max-time",\n String(HTTP_MAX_TIME_UPLOAD),\n "--silent",\n `"${presignedUrl}"`\n ].join(" ");\n execSync(curlCmd, {\n timeout: HTTP_MAX_TIME_UPLOAD * 1e3,\n stdio: ["pipe", "pipe", "pipe"]\n });\n return true;\n } catch (error) {\n const errorMsg = error instanceof Error ? error.message : String(error);\n if (errorMsg.includes("ETIMEDOUT") || errorMsg.includes("timeout")) {\n logWarn(\n `HTTP PUT presigned failed (attempt ${attempt}/${maxRetries}): Timeout`\n );\n } else {\n logWarn(\n `HTTP PUT presigned failed (attempt ${attempt}/${maxRetries}): ${errorMsg}`\n );\n }\n if (attempt < maxRetries) {\n await sleep(1e3);\n }\n }\n }\n logError(`HTTP PUT presigned failed after ${maxRetries} attempts`);\n return false;\n}\n\n// src/sandbox/scripts/src/lib/secret-masker.ts\nvar MASK_PLACEHOLDER = "***";\nvar MIN_SECRET_LENGTH = 5;\nvar _masker = null;\nvar SecretMasker = class {\n patterns;\n /**\n * Initialize masker with secret values.\n *\n * @param secretValues - List of secret values to mask\n */\n constructor(secretValues) {\n this.patterns = /* @__PURE__ */ new Set();\n for (const secret of secretValues) {\n if (!secret || secret.length < MIN_SECRET_LENGTH) {\n continue;\n }\n this.patterns.add(secret);\n try {\n const b64 = Buffer.from(secret).toString("base64");\n if (b64.length >= MIN_SECRET_LENGTH) {\n this.patterns.add(b64);\n }\n } catch {\n }\n try {\n const urlEnc = encodeURIComponent(secret);\n if (urlEnc !== secret && urlEnc.length >= MIN_SECRET_LENGTH) {\n this.patterns.add(urlEnc);\n }\n } catch {\n }\n }\n }\n /**\n * Recursively mask all occurrences of secrets in the data.\n *\n * @param data - Data to mask (string, list, dict, or primitive)\n * @returns Masked data with the same structure\n */\n mask(data) {\n return this.deepMask(data);\n }\n deepMask(data) {\n if (typeof data === "string") {\n let result = data;\n for (const pattern of this.patterns) {\n result = result.split(pattern).join(MASK_PLACEHOLDER);\n }\n return result;\n }\n if (Array.isArray(data)) {\n return data.map((item) => this.deepMask(item));\n }\n if (data !== null && typeof data === "object") {\n const result = {};\n for (const [key, value] of Object.entries(\n data\n )) {\n result[key] = this.deepMask(value);\n }\n return result;\n }\n return data;\n }\n};\nfunction createMasker() {\n const secretValuesStr = process.env.VM0_SECRET_VALUES ?? "";\n if (!secretValuesStr) {\n return new SecretMasker([]);\n }\n const secretValues = [];\n for (const encodedValue of secretValuesStr.split(",")) {\n const trimmed = encodedValue.trim();\n if (trimmed) {\n try {\n const decoded = Buffer.from(trimmed, "base64").toString("utf-8");\n if (decoded) {\n secretValues.push(decoded);\n }\n } catch {\n }\n }\n }\n return new SecretMasker(secretValues);\n}\nfunction getMasker() {\n if (_masker === null) {\n _masker = createMasker();\n }\n return _masker;\n}\nfunction maskData(data) {\n return getMasker().mask(data);\n}\n\n// src/sandbox/scripts/src/lib/events.ts\nasync function sendEvent(event, sequenceNumber) {\n const eventType = event.type ?? "";\n const eventSubtype = event.subtype ?? "";\n let sessionId = null;\n if (CLI_AGENT_TYPE === "codex") {\n if (eventType === "thread.started") {\n sessionId = event.thread_id ?? "";\n }\n } else {\n if (eventType === "system" && eventSubtype === "init") {\n sessionId = event.session_id ?? "";\n }\n }\n if (sessionId && !fs2.existsSync(SESSION_ID_FILE)) {\n logInfo(`Captured session ID: ${sessionId}`);\n fs2.writeFileSync(SESSION_ID_FILE, sessionId);\n const homeDir = process.env.HOME ?? "/home/user";\n let sessionHistoryPath;\n if (CLI_AGENT_TYPE === "codex") {\n const codexHome = process.env.CODEX_HOME ?? `${homeDir}/.codex`;\n sessionHistoryPath = `CODEX_SEARCH:${codexHome}/sessions:${sessionId}`;\n } else {\n const projectName = WORKING_DIR.replace(/^\\//, "").replace(/\\//g, "-");\n sessionHistoryPath = `${homeDir}/.claude/projects/-${projectName}/${sessionId}.jsonl`;\n }\n fs2.writeFileSync(SESSION_HISTORY_PATH_FILE, sessionHistoryPath);\n logInfo(`Session history will be at: ${sessionHistoryPath}`);\n }\n const eventWithSequence = {\n ...event,\n sequenceNumber\n };\n const maskedEvent = maskData(eventWithSequence);\n const payload = {\n runId: RUN_ID,\n events: [maskedEvent]\n };\n const result = await httpPostJson(WEBHOOK_URL, payload);\n if (result === null) {\n logError("Failed to send event after retries");\n fs2.writeFileSync(EVENT_ERROR_FLAG, "1");\n return false;\n }\n return true;\n}\n\n// src/sandbox/scripts/src/lib/checkpoint.ts\nimport * as fs4 from "fs";\nimport * as path2 from "path";\n\n// src/sandbox/scripts/src/lib/direct-upload.ts\nimport * as fs3 from "fs";\nimport * as path from "path";\nimport * as crypto from "crypto";\nimport { execSync as execSync2 } from "child_process";\nfunction computeFileHash(filePath) {\n const hash = crypto.createHash("sha256");\n const buffer = fs3.readFileSync(filePath);\n hash.update(buffer);\n return hash.digest("hex");\n}\nfunction collectFileMetadata(dirPath) {\n const files = [];\n function walkDir(currentPath, relativePath) {\n const items = fs3.readdirSync(currentPath);\n for (const item of items) {\n if (item === ".git" || item === ".vm0") {\n continue;\n }\n const fullPath = path.join(currentPath, item);\n const relPath = relativePath ? path.join(relativePath, item) : item;\n const stat = fs3.statSync(fullPath);\n if (stat.isDirectory()) {\n walkDir(fullPath, relPath);\n } else if (stat.isFile()) {\n try {\n const fileHash = computeFileHash(fullPath);\n files.push({\n path: relPath,\n hash: fileHash,\n size: stat.size\n });\n } catch (error) {\n logWarn(`Could not process file ${relPath}: ${error}`);\n }\n }\n }\n }\n walkDir(dirPath, "");\n return files;\n}\nfunction createArchive(dirPath, tarPath) {\n try {\n execSync2(\n `tar -czf "${tarPath}" --exclude=\'.git\' --exclude=\'.vm0\' -C "${dirPath}" .`,\n { stdio: ["pipe", "pipe", "pipe"] }\n );\n return true;\n } catch (error) {\n logError(`Failed to create archive: ${error}`);\n return false;\n }\n}\nfunction createManifest(files, manifestPath) {\n try {\n const manifest = {\n version: 1,\n files,\n createdAt: (/* @__PURE__ */ new Date()).toISOString()\n };\n fs3.writeFileSync(manifestPath, JSON.stringify(manifest, null, 2));\n return true;\n } catch (error) {\n logError(`Failed to create manifest: ${error}`);\n return false;\n }\n}\nasync function createDirectUploadSnapshot(mountPath, storageName, storageType = "artifact", runId, message) {\n logInfo(\n `Creating direct upload snapshot for \'${storageName}\' (type: ${storageType})`\n );\n logInfo("Computing file hashes...");\n const hashStart = Date.now();\n const files = collectFileMetadata(mountPath);\n recordSandboxOp("artifact_hash_compute", Date.now() - hashStart, true);\n logInfo(`Found ${files.length} files`);\n if (files.length === 0) {\n logInfo("No files to upload, creating empty version");\n }\n logInfo("Calling prepare endpoint...");\n const prepareStart = Date.now();\n const preparePayload = {\n storageName,\n storageType,\n files\n };\n if (runId) {\n preparePayload.runId = runId;\n }\n const prepareResponse = await httpPostJson(\n STORAGE_PREPARE_URL,\n preparePayload\n );\n if (!prepareResponse) {\n logError("Failed to call prepare endpoint");\n recordSandboxOp("artifact_prepare_api", Date.now() - prepareStart, false);\n return null;\n }\n const versionId = prepareResponse.versionId;\n if (!versionId) {\n logError(`Invalid prepare response: ${JSON.stringify(prepareResponse)}`);\n recordSandboxOp("artifact_prepare_api", Date.now() - prepareStart, false);\n return null;\n }\n recordSandboxOp("artifact_prepare_api", Date.now() - prepareStart, true);\n if (prepareResponse.existing) {\n logInfo(`Version already exists (deduplicated): ${versionId.slice(0, 8)}`);\n logInfo("Updating HEAD pointer...");\n const commitPayload = {\n storageName,\n storageType,\n versionId,\n files\n };\n if (runId) {\n commitPayload.runId = runId;\n }\n const commitResponse = await httpPostJson(\n STORAGE_COMMIT_URL,\n commitPayload\n );\n if (!commitResponse || !commitResponse.success) {\n logError(`Failed to update HEAD: ${JSON.stringify(commitResponse)}`);\n return null;\n }\n return { versionId, deduplicated: true };\n }\n const uploads = prepareResponse.uploads;\n if (!uploads) {\n logError("No upload URLs in prepare response");\n return null;\n }\n const archiveInfo = uploads.archive;\n const manifestInfo = uploads.manifest;\n if (!archiveInfo || !manifestInfo) {\n logError("Missing archive or manifest upload info");\n return null;\n }\n const tempDir = fs3.mkdtempSync(`/tmp/direct-upload-${storageName}-`);\n try {\n logInfo("Creating archive...");\n const archiveStart = Date.now();\n const archivePath = path.join(tempDir, "archive.tar.gz");\n if (!createArchive(mountPath, archivePath)) {\n logError("Failed to create archive");\n recordSandboxOp(\n "artifact_archive_create",\n Date.now() - archiveStart,\n false\n );\n return null;\n }\n recordSandboxOp("artifact_archive_create", Date.now() - archiveStart, true);\n logInfo("Creating manifest...");\n const manifestPath = path.join(tempDir, "manifest.json");\n if (!createManifest(files, manifestPath)) {\n logError("Failed to create manifest");\n return null;\n }\n logInfo("Uploading archive to S3...");\n const s3UploadStart = Date.now();\n if (!await httpPutPresigned(\n archiveInfo.presignedUrl,\n archivePath,\n "application/gzip"\n )) {\n logError("Failed to upload archive to S3");\n recordSandboxOp("artifact_s3_upload", Date.now() - s3UploadStart, false);\n return null;\n }\n logInfo("Uploading manifest to S3...");\n if (!await httpPutPresigned(\n manifestInfo.presignedUrl,\n manifestPath,\n "application/json"\n )) {\n logError("Failed to upload manifest to S3");\n recordSandboxOp("artifact_s3_upload", Date.now() - s3UploadStart, false);\n return null;\n }\n recordSandboxOp("artifact_s3_upload", Date.now() - s3UploadStart, true);\n logInfo("Calling commit endpoint...");\n const commitStart = Date.now();\n const commitPayload = {\n storageName,\n storageType,\n versionId,\n files\n };\n if (runId) {\n commitPayload.runId = runId;\n }\n if (message) {\n commitPayload.message = message;\n }\n const commitResponse = await httpPostJson(\n STORAGE_COMMIT_URL,\n commitPayload\n );\n if (!commitResponse) {\n logError("Failed to call commit endpoint");\n recordSandboxOp("artifact_commit_api", Date.now() - commitStart, false);\n return null;\n }\n if (!commitResponse.success) {\n logError(`Commit failed: ${JSON.stringify(commitResponse)}`);\n recordSandboxOp("artifact_commit_api", Date.now() - commitStart, false);\n return null;\n }\n recordSandboxOp("artifact_commit_api", Date.now() - commitStart, true);\n logInfo(`Direct upload snapshot created: ${versionId.slice(0, 8)}`);\n return { versionId };\n } finally {\n try {\n fs3.rmSync(tempDir, { recursive: true, force: true });\n } catch {\n }\n }\n}\n\n// src/sandbox/scripts/src/lib/checkpoint.ts\nfunction findJsonlFiles(dir) {\n const files = [];\n function walk(currentDir) {\n try {\n const items = fs4.readdirSync(currentDir);\n for (const item of items) {\n const fullPath = path2.join(currentDir, item);\n const stat = fs4.statSync(fullPath);\n if (stat.isDirectory()) {\n walk(fullPath);\n } else if (item.endsWith(".jsonl")) {\n files.push(fullPath);\n }\n }\n } catch {\n }\n }\n walk(dir);\n return files;\n}\nfunction findCodexSessionFile(sessionsDir, sessionId) {\n const files = findJsonlFiles(sessionsDir);\n logInfo(`Searching for Codex session ${sessionId} in ${files.length} files`);\n for (const filepath of files) {\n const filename = path2.basename(filepath);\n if (filename.includes(sessionId) || filename.replace(/-/g, "").includes(sessionId.replace(/-/g, ""))) {\n logInfo(`Found Codex session file: ${filepath}`);\n return filepath;\n }\n }\n if (files.length > 0) {\n files.sort((a, b) => {\n const statA = fs4.statSync(a);\n const statB = fs4.statSync(b);\n return statB.mtimeMs - statA.mtimeMs;\n });\n const mostRecent = files[0] ?? null;\n if (mostRecent) {\n logInfo(\n `Session ID not found in filenames, using most recent: ${mostRecent}`\n );\n }\n return mostRecent;\n }\n return null;\n}\nasync function createCheckpoint() {\n const checkpointStart = Date.now();\n logInfo("Creating checkpoint...");\n const sessionIdStart = Date.now();\n if (!fs4.existsSync(SESSION_ID_FILE)) {\n logError("No session ID found, checkpoint creation failed");\n recordSandboxOp(\n "session_id_read",\n Date.now() - sessionIdStart,\n false,\n "Session ID file not found"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n const cliAgentSessionId = fs4.readFileSync(SESSION_ID_FILE, "utf-8").trim();\n recordSandboxOp("session_id_read", Date.now() - sessionIdStart, true);\n const sessionHistoryStart = Date.now();\n if (!fs4.existsSync(SESSION_HISTORY_PATH_FILE)) {\n logError("No session history path found, checkpoint creation failed");\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n false,\n "Session history path file not found"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n const sessionHistoryPathRaw = fs4.readFileSync(SESSION_HISTORY_PATH_FILE, "utf-8").trim();\n let sessionHistoryPath;\n if (sessionHistoryPathRaw.startsWith("CODEX_SEARCH:")) {\n const parts = sessionHistoryPathRaw.split(":");\n if (parts.length !== 3) {\n logError(`Invalid Codex search marker format: ${sessionHistoryPathRaw}`);\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n false,\n "Invalid Codex search marker"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n const sessionsDir = parts[1] ?? "";\n const codexSessionId = parts[2] ?? "";\n logInfo(`Searching for Codex session in ${sessionsDir}`);\n const foundPath = findCodexSessionFile(sessionsDir, codexSessionId);\n if (!foundPath) {\n logError(\n `Could not find Codex session file for ${codexSessionId} in ${sessionsDir}`\n );\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n false,\n "Codex session file not found"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n sessionHistoryPath = foundPath;\n } else {\n sessionHistoryPath = sessionHistoryPathRaw;\n }\n if (!fs4.existsSync(sessionHistoryPath)) {\n logError(\n `Session history file not found at ${sessionHistoryPath}, checkpoint creation failed`\n );\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n false,\n "Session history file not found"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n let cliAgentSessionHistory;\n try {\n cliAgentSessionHistory = fs4.readFileSync(sessionHistoryPath, "utf-8");\n } catch (error) {\n logError(`Failed to read session history: ${error}`);\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n false,\n String(error)\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n if (!cliAgentSessionHistory.trim()) {\n logError("Session history is empty, checkpoint creation failed");\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n false,\n "Session history empty"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n const lineCount = cliAgentSessionHistory.trim().split("\\n").length;\n logInfo(`Session history loaded (${lineCount} lines)`);\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n true\n );\n let artifactSnapshot = null;\n if (ARTIFACT_DRIVER && ARTIFACT_VOLUME_NAME) {\n logInfo(`Processing artifact with driver: ${ARTIFACT_DRIVER}`);\n if (ARTIFACT_DRIVER !== "vas") {\n logError(\n `Unknown artifact driver: ${ARTIFACT_DRIVER} (only \'vas\' is supported)`\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n logInfo(\n `Creating VAS snapshot for artifact \'${ARTIFACT_VOLUME_NAME}\' at ${ARTIFACT_MOUNT_PATH}`\n );\n logInfo("Using direct S3 upload...");\n const snapshot = await createDirectUploadSnapshot(\n ARTIFACT_MOUNT_PATH,\n ARTIFACT_VOLUME_NAME,\n "artifact",\n RUN_ID,\n `Checkpoint from run ${RUN_ID}`\n );\n if (!snapshot) {\n logError("Failed to create VAS snapshot for artifact");\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n const artifactVersion = snapshot.versionId;\n if (!artifactVersion) {\n logError("Failed to extract versionId from snapshot");\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n artifactSnapshot = {\n artifactName: ARTIFACT_VOLUME_NAME,\n artifactVersion\n };\n logInfo(\n `VAS artifact snapshot created: ${ARTIFACT_VOLUME_NAME}@${artifactVersion}`\n );\n } else {\n logInfo(\n "No artifact configured, creating checkpoint without artifact snapshot"\n );\n }\n logInfo("Calling checkpoint API...");\n const checkpointPayload = {\n runId: RUN_ID,\n cliAgentType: CLI_AGENT_TYPE,\n cliAgentSessionId,\n cliAgentSessionHistory\n };\n if (artifactSnapshot) {\n checkpointPayload.artifactSnapshot = artifactSnapshot;\n }\n const apiCallStart = Date.now();\n const result = await httpPostJson(\n CHECKPOINT_URL,\n checkpointPayload\n );\n if (result && result.checkpointId) {\n const checkpointId = result.checkpointId;\n logInfo(`Checkpoint created successfully: ${checkpointId}`);\n recordSandboxOp("checkpoint_api_call", Date.now() - apiCallStart, true);\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, true);\n return true;\n } else {\n logError(\n `Checkpoint API returned invalid response: ${JSON.stringify(result)}`\n );\n recordSandboxOp(\n "checkpoint_api_call",\n Date.now() - apiCallStart,\n false,\n "Invalid API response"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n}\n\n// src/sandbox/scripts/src/lib/metrics.ts\nimport * as fs5 from "fs";\nimport { execSync as execSync3 } from "child_process";\nvar shutdownRequested = false;\nfunction getCpuPercent() {\n try {\n const content = fs5.readFileSync("/proc/stat", "utf-8");\n const line = content.split("\\n")[0];\n if (!line) {\n return 0;\n }\n const parts = line.split(/\\s+/);\n if (parts[0] !== "cpu") {\n return 0;\n }\n const values = parts.slice(1).map((x) => parseInt(x, 10));\n const idleVal = values[3];\n const iowaitVal = values[4];\n if (idleVal === void 0 || iowaitVal === void 0) {\n return 0;\n }\n const idle = idleVal + iowaitVal;\n const total = values.reduce((a, b) => a + b, 0);\n if (total === 0) {\n return 0;\n }\n const cpuPercent = 100 * (1 - idle / total);\n return Math.round(cpuPercent * 100) / 100;\n } catch (error) {\n logDebug(`Failed to get CPU percent: ${error}`);\n return 0;\n }\n}\nfunction getMemoryInfo() {\n try {\n const result = execSync3("free -b", {\n encoding: "utf-8",\n timeout: 5e3,\n stdio: ["pipe", "pipe", "pipe"]\n });\n const lines = result.trim().split("\\n");\n for (const line of lines) {\n if (line.startsWith("Mem:")) {\n const parts = line.split(/\\s+/);\n const totalStr = parts[1];\n const usedStr = parts[2];\n if (!totalStr || !usedStr) {\n return [0, 0];\n }\n const total = parseInt(totalStr, 10);\n const used = parseInt(usedStr, 10);\n return [used, total];\n }\n }\n return [0, 0];\n } catch (error) {\n logDebug(`Failed to get memory info: ${error}`);\n return [0, 0];\n }\n}\nfunction getDiskInfo() {\n try {\n const result = execSync3("df -B1 /", {\n encoding: "utf-8",\n timeout: 5e3,\n stdio: ["pipe", "pipe", "pipe"]\n });\n const lines = result.trim().split("\\n");\n if (lines.length < 2) {\n return [0, 0];\n }\n const dataLine = lines[1];\n if (!dataLine) {\n return [0, 0];\n }\n const parts = dataLine.split(/\\s+/);\n const totalStr = parts[1];\n const usedStr = parts[2];\n if (!totalStr || !usedStr) {\n return [0, 0];\n }\n const total = parseInt(totalStr, 10);\n const used = parseInt(usedStr, 10);\n return [used, total];\n } catch (error) {\n logDebug(`Failed to get disk info: ${error}`);\n return [0, 0];\n }\n}\nfunction collectMetrics() {\n const cpu = getCpuPercent();\n const [memUsed, memTotal] = getMemoryInfo();\n const [diskUsed, diskTotal] = getDiskInfo();\n return {\n ts: (/* @__PURE__ */ new Date()).toISOString(),\n cpu,\n mem_used: memUsed,\n mem_total: memTotal,\n disk_used: diskUsed,\n disk_total: diskTotal\n };\n}\nfunction metricsCollectorLoop() {\n logInfo(`Metrics collector started, writing to ${METRICS_LOG_FILE}`);\n const writeMetrics = () => {\n if (shutdownRequested) {\n logInfo("Metrics collector stopped");\n return;\n }\n try {\n const metrics = collectMetrics();\n fs5.appendFileSync(METRICS_LOG_FILE, JSON.stringify(metrics) + "\\n");\n logDebug(\n `Metrics collected: cpu=${metrics.cpu}%, mem=${metrics.mem_used}/${metrics.mem_total}`\n );\n } catch (error) {\n logError(`Failed to collect/write metrics: ${error}`);\n }\n setTimeout(writeMetrics, METRICS_INTERVAL * 1e3);\n };\n writeMetrics();\n}\nfunction startMetricsCollector() {\n shutdownRequested = false;\n setTimeout(metricsCollectorLoop, 0);\n}\nfunction stopMetricsCollector() {\n shutdownRequested = true;\n}\n\n// src/sandbox/scripts/src/lib/upload-telemetry.ts\nimport * as fs6 from "fs";\nvar shutdownRequested2 = false;\nfunction readFileFromPosition(filePath, posFile) {\n let lastPos = 0;\n if (fs6.existsSync(posFile)) {\n try {\n const content = fs6.readFileSync(posFile, "utf-8").trim();\n lastPos = parseInt(content, 10) || 0;\n } catch {\n lastPos = 0;\n }\n }\n let newContent = "";\n let newPos = lastPos;\n if (fs6.existsSync(filePath)) {\n try {\n const fd = fs6.openSync(filePath, "r");\n const stats = fs6.fstatSync(fd);\n const bufferSize = stats.size - lastPos;\n if (bufferSize > 0) {\n const buffer = Buffer.alloc(bufferSize);\n fs6.readSync(fd, buffer, 0, bufferSize, lastPos);\n newContent = buffer.toString("utf-8");\n newPos = stats.size;\n }\n fs6.closeSync(fd);\n } catch (error) {\n logDebug(`Failed to read ${filePath}: ${error}`);\n }\n }\n return [newContent, newPos];\n}\nfunction savePosition(posFile, position) {\n try {\n fs6.writeFileSync(posFile, String(position));\n } catch (error) {\n logDebug(`Failed to save position to ${posFile}: ${error}`);\n }\n}\nfunction readJsonlFromPosition(filePath, posFile) {\n const [content, newPos] = readFileFromPosition(filePath, posFile);\n const entries = [];\n if (content) {\n for (const line of content.trim().split("\\n")) {\n if (line) {\n try {\n entries.push(JSON.parse(line));\n } catch {\n }\n }\n }\n }\n return [entries, newPos];\n}\nfunction readMetricsFromPosition(posFile) {\n return readJsonlFromPosition(METRICS_LOG_FILE, posFile);\n}\nfunction readNetworkLogsFromPosition(posFile) {\n return readJsonlFromPosition(NETWORK_LOG_FILE, posFile);\n}\nfunction readSandboxOpsFromPosition(posFile) {\n return readJsonlFromPosition(SANDBOX_OPS_LOG_FILE, posFile);\n}\nasync function uploadTelemetry() {\n const [systemLog, logPos] = readFileFromPosition(\n SYSTEM_LOG_FILE,\n TELEMETRY_LOG_POS_FILE\n );\n const [metrics, metricsPos] = readMetricsFromPosition(\n TELEMETRY_METRICS_POS_FILE\n );\n const [networkLogs, networkPos] = readNetworkLogsFromPosition(\n TELEMETRY_NETWORK_POS_FILE\n );\n const [sandboxOps, sandboxOpsPos] = readSandboxOpsFromPosition(\n TELEMETRY_SANDBOX_OPS_POS_FILE\n );\n if (!systemLog && metrics.length === 0 && networkLogs.length === 0 && sandboxOps.length === 0) {\n logDebug("No new telemetry data to upload");\n return true;\n }\n const maskedSystemLog = systemLog ? maskData(systemLog) : "";\n const maskedNetworkLogs = networkLogs.length > 0 ? maskData(networkLogs) : [];\n const payload = {\n runId: RUN_ID,\n systemLog: maskedSystemLog,\n metrics,\n // Metrics don\'t contain secrets (just numbers)\n networkLogs: maskedNetworkLogs,\n sandboxOperations: sandboxOps\n // Sandbox ops don\'t contain secrets (just timing data)\n };\n logDebug(\n `Uploading telemetry: ${systemLog.length} bytes log, ${metrics.length} metrics, ${networkLogs.length} network logs, ${sandboxOps.length} sandbox ops`\n );\n const result = await httpPostJson(TELEMETRY_URL, payload, 1);\n if (result) {\n savePosition(TELEMETRY_LOG_POS_FILE, logPos);\n savePosition(TELEMETRY_METRICS_POS_FILE, metricsPos);\n savePosition(TELEMETRY_NETWORK_POS_FILE, networkPos);\n savePosition(TELEMETRY_SANDBOX_OPS_POS_FILE, sandboxOpsPos);\n logDebug(\n `Telemetry uploaded successfully: ${result.id ?? "unknown"}`\n );\n return true;\n } else {\n logWarn("Failed to upload telemetry (will retry next interval)");\n return false;\n }\n}\nasync function telemetryUploadLoop() {\n logInfo(`Telemetry upload started (interval: ${TELEMETRY_INTERVAL}s)`);\n const runUpload = async () => {\n if (shutdownRequested2) {\n logInfo("Telemetry upload stopped");\n return;\n }\n try {\n await uploadTelemetry();\n } catch (error) {\n logError(`Telemetry upload error: ${error}`);\n }\n setTimeout(() => void runUpload(), TELEMETRY_INTERVAL * 1e3);\n };\n await runUpload();\n}\nfunction startTelemetryUpload() {\n shutdownRequested2 = false;\n setTimeout(() => void telemetryUploadLoop(), 0);\n}\nfunction stopTelemetryUpload() {\n shutdownRequested2 = true;\n}\nasync function finalTelemetryUpload() {\n logInfo("Performing final telemetry upload...");\n return uploadTelemetry();\n}\n\n// src/sandbox/scripts/src/run-agent.ts\nvar shutdownRequested3 = false;\nfunction heartbeatLoop() {\n const sendHeartbeat = async () => {\n if (shutdownRequested3) {\n return;\n }\n try {\n if (await httpPostJson(HEARTBEAT_URL, { runId: RUN_ID })) {\n logInfo("Heartbeat sent");\n } else {\n logWarn("Heartbeat failed");\n }\n } catch (error) {\n logWarn(`Heartbeat error: ${error}`);\n }\n setTimeout(() => {\n sendHeartbeat().catch(() => {\n });\n }, HEARTBEAT_INTERVAL * 1e3);\n };\n sendHeartbeat().catch(() => {\n });\n}\nasync function cleanup(exitCode, errorMessage) {\n logInfo("\\u25B7 Cleanup");\n const telemetryStart = Date.now();\n let telemetrySuccess = true;\n try {\n await finalTelemetryUpload();\n } catch (error) {\n telemetrySuccess = false;\n logError(`Final telemetry upload failed: ${error}`);\n }\n recordSandboxOp(\n "final_telemetry_upload",\n Date.now() - telemetryStart,\n telemetrySuccess\n );\n logInfo(`Calling complete API with exitCode=${exitCode}`);\n const completePayload = {\n runId: RUN_ID,\n exitCode\n };\n if (errorMessage) {\n completePayload.error = errorMessage;\n }\n const completeStart = Date.now();\n let completeSuccess = false;\n try {\n if (await httpPostJson(COMPLETE_URL, completePayload)) {\n logInfo("Complete API called successfully");\n completeSuccess = true;\n } else {\n logError("Failed to call complete API (sandbox may not be cleaned up)");\n }\n } catch (error) {\n logError(`Complete API call failed: ${error}`);\n }\n recordSandboxOp(\n "complete_api_call",\n Date.now() - completeStart,\n completeSuccess\n );\n shutdownRequested3 = true;\n stopMetricsCollector();\n stopTelemetryUpload();\n logInfo("Background processes stopped");\n if (exitCode === 0) {\n logInfo("\\u2713 Sandbox finished successfully");\n } else {\n logInfo(`\\u2717 Sandbox failed (exit code ${exitCode})`);\n }\n}\nasync function run() {\n validateConfig();\n logInfo(`\\u25B6 VM0 Sandbox ${RUN_ID}`);\n logInfo("\\u25B7 Initialization");\n const initStartTime = Date.now();\n logInfo(`Working directory: ${WORKING_DIR}`);\n const heartbeatStart = Date.now();\n heartbeatLoop();\n logInfo("Heartbeat started");\n recordSandboxOp("heartbeat_start", Date.now() - heartbeatStart, true);\n const metricsStart = Date.now();\n startMetricsCollector();\n logInfo("Metrics collector started");\n recordSandboxOp("metrics_collector_start", Date.now() - metricsStart, true);\n const telemetryStart = Date.now();\n startTelemetryUpload();\n logInfo("Telemetry upload started");\n recordSandboxOp("telemetry_upload_start", Date.now() - telemetryStart, true);\n const workingDirStart = Date.now();\n try {\n fs7.mkdirSync(WORKING_DIR, { recursive: true });\n process.chdir(WORKING_DIR);\n } catch (error) {\n recordSandboxOp(\n "working_dir_setup",\n Date.now() - workingDirStart,\n false,\n String(error)\n );\n throw new Error(\n `Failed to create/change to working directory: ${WORKING_DIR} - ${error}`\n );\n }\n recordSandboxOp("working_dir_setup", Date.now() - workingDirStart, true);\n if (CLI_AGENT_TYPE === "codex") {\n const homeDir = process.env.HOME ?? "/home/user";\n const codexHome = `${homeDir}/.codex`;\n fs7.mkdirSync(codexHome, { recursive: true });\n process.env.CODEX_HOME = codexHome;\n logInfo(`Codex home directory: ${codexHome}`);\n const codexLoginStart = Date.now();\n let codexLoginSuccess = false;\n const apiKey = process.env.OPENAI_API_KEY ?? "";\n if (apiKey) {\n try {\n execSync4("codex login --with-api-key", {\n input: apiKey,\n encoding: "utf-8",\n stdio: ["pipe", "pipe", "pipe"]\n });\n logInfo("Codex authenticated with API key");\n codexLoginSuccess = true;\n } catch (error) {\n logError(`Codex login failed: ${error}`);\n }\n } else {\n logError("OPENAI_API_KEY not set");\n }\n recordSandboxOp(\n "codex_login",\n Date.now() - codexLoginStart,\n codexLoginSuccess\n );\n }\n const initDurationMs = Date.now() - initStartTime;\n recordSandboxOp("init_total", initDurationMs, true);\n logInfo(`\\u2713 Initialization complete (${Math.floor(initDurationMs / 1e3)}s)`);\n logInfo("\\u25B7 Execution");\n const execStartTime = Date.now();\n logInfo(`Starting ${CLI_AGENT_TYPE} execution...`);\n logInfo(`Prompt: ${PROMPT}`);\n const useMock = process.env.USE_MOCK_CLAUDE === "true";\n let cmd;\n if (CLI_AGENT_TYPE === "codex") {\n if (useMock) {\n throw new Error("Mock mode not supported for Codex");\n }\n const codexArgs = [\n "exec",\n "--json",\n "--dangerously-bypass-approvals-and-sandbox",\n "--skip-git-repo-check",\n "-C",\n WORKING_DIR\n ];\n if (OPENAI_MODEL) {\n codexArgs.push("-m", OPENAI_MODEL);\n }\n if (RESUME_SESSION_ID) {\n logInfo(`Resuming session: ${RESUME_SESSION_ID}`);\n codexArgs.push("resume", RESUME_SESSION_ID, PROMPT);\n } else {\n logInfo("Starting new session");\n codexArgs.push(PROMPT);\n }\n cmd = ["codex", ...codexArgs];\n } else {\n const claudeArgs = [\n "--print",\n "--verbose",\n "--output-format",\n "stream-json",\n "--dangerously-skip-permissions"\n ];\n if (RESUME_SESSION_ID) {\n logInfo(`Resuming session: ${RESUME_SESSION_ID}`);\n claudeArgs.push("--resume", RESUME_SESSION_ID);\n } else {\n logInfo("Starting new session");\n }\n const claudeBin = useMock ? "/usr/local/bin/vm0-agent/mock-claude.mjs" : "claude";\n if (useMock) {\n logInfo("Using mock-claude for testing");\n }\n cmd = [claudeBin, ...claudeArgs, PROMPT];\n }\n let agentExitCode = 0;\n const stderrLines = [];\n let logFile = null;\n try {\n logFile = fs7.createWriteStream(AGENT_LOG_FILE);\n const cmdExe = cmd[0];\n if (!cmdExe) {\n throw new Error("Empty command");\n }\n const proc = spawn(cmdExe, cmd.slice(1), {\n stdio: ["ignore", "pipe", "pipe"]\n });\n const exitPromise = new Promise((resolve) => {\n let resolved = false;\n proc.on("error", (err) => {\n if (!resolved) {\n resolved = true;\n logError(`Failed to spawn ${CLI_AGENT_TYPE}: ${err.message}`);\n stderrLines.push(`Spawn error: ${err.message}`);\n resolve(1);\n }\n });\n proc.on("close", (code) => {\n if (!resolved) {\n resolved = true;\n resolve(code ?? 1);\n }\n });\n });\n if (proc.stderr) {\n const stderrRl = readline.createInterface({ input: proc.stderr });\n stderrRl.on("line", (line) => {\n stderrLines.push(line);\n if (logFile && !logFile.destroyed) {\n logFile.write(`[STDERR] ${line}\n`);\n }\n });\n }\n if (proc.stdout) {\n const stdoutRl = readline.createInterface({ input: proc.stdout });\n let eventSequence = 0;\n for await (const line of stdoutRl) {\n if (logFile && !logFile.destroyed) {\n logFile.write(line + "\\n");\n }\n const stripped = line.trim();\n if (!stripped) {\n continue;\n }\n try {\n const event = JSON.parse(stripped);\n eventSequence++;\n await sendEvent(event, eventSequence);\n if (event.type === "result") {\n const resultContent = event.result;\n if (resultContent) {\n console.log(resultContent);\n }\n }\n } catch {\n logDebug(`Non-JSON line from agent: ${stripped.slice(0, 100)}`);\n }\n }\n }\n agentExitCode = await exitPromise;\n } catch (error) {\n logError(`Failed to execute ${CLI_AGENT_TYPE}: ${error}`);\n agentExitCode = 1;\n } finally {\n if (logFile && !logFile.destroyed) {\n logFile.end();\n }\n }\n console.log();\n let finalExitCode = agentExitCode;\n let errorMessage = "";\n if (fs7.existsSync(EVENT_ERROR_FLAG)) {\n logError("Some events failed to send, marking run as failed");\n finalExitCode = 1;\n errorMessage = "Some events failed to send";\n }\n const execDurationMs = Date.now() - execStartTime;\n recordSandboxOp("cli_execution", execDurationMs, agentExitCode === 0);\n if (agentExitCode === 0 && finalExitCode === 0) {\n logInfo(`\\u2713 Execution complete (${Math.floor(execDurationMs / 1e3)}s)`);\n } else {\n logInfo(`\\u2717 Execution failed (${Math.floor(execDurationMs / 1e3)}s)`);\n }\n if (agentExitCode === 0 && finalExitCode === 0) {\n logInfo(`${CLI_AGENT_TYPE} completed successfully`);\n logInfo("\\u25B7 Checkpoint");\n const checkpointStartTime = Date.now();\n const checkpointSuccess = await createCheckpoint();\n const checkpointDuration = Math.floor(\n (Date.now() - checkpointStartTime) / 1e3\n );\n if (checkpointSuccess) {\n logInfo(`\\u2713 Checkpoint complete (${checkpointDuration}s)`);\n } else {\n logInfo(`\\u2717 Checkpoint failed (${checkpointDuration}s)`);\n }\n if (!checkpointSuccess) {\n logError("Checkpoint creation failed, marking run as failed");\n finalExitCode = 1;\n errorMessage = "Checkpoint creation failed";\n }\n } else {\n if (agentExitCode !== 0) {\n logInfo(`${CLI_AGENT_TYPE} failed with exit code ${agentExitCode}`);\n if (stderrLines.length > 0) {\n errorMessage = stderrLines.map((line) => line.trim()).join(" ");\n logInfo(`Captured stderr: ${errorMessage}`);\n } else {\n errorMessage = `Agent exited with code ${agentExitCode}`;\n }\n }\n }\n return [finalExitCode, errorMessage];\n}\nasync function main() {\n let exitCode = 1;\n let errorMessage = "Unexpected termination";\n try {\n [exitCode, errorMessage] = await run();\n } catch (error) {\n if (error instanceof Error) {\n exitCode = 1;\n errorMessage = error.message;\n logError(`Error: ${errorMessage}`);\n } else {\n exitCode = 1;\n errorMessage = `Unexpected error: ${error}`;\n logError(errorMessage);\n }\n } finally {\n await cleanup(exitCode, errorMessage);\n }\n return exitCode;\n}\nmain().then((code) => process.exit(code)).catch((error) => {\n console.error("Fatal error:", error);\n process.exit(1);\n});\n';
|
|
8096
|
+
var RUN_AGENT_SCRIPT = '#!/usr/bin/env node\n\n// src/sandbox/scripts/src/run-agent.ts\nimport * as fs7 from "fs";\nimport { spawn, execSync as execSync4 } from "child_process";\nimport * as readline from "readline";\n\n// src/sandbox/scripts/src/lib/common.ts\nimport * as fs from "fs";\nvar RUN_ID = process.env.VM0_RUN_ID ?? "";\nvar API_URL = process.env.VM0_API_URL ?? "";\nvar API_TOKEN = process.env.VM0_API_TOKEN ?? "";\nvar PROMPT = process.env.VM0_PROMPT ?? "";\nvar VERCEL_BYPASS = process.env.VERCEL_PROTECTION_BYPASS ?? "";\nvar RESUME_SESSION_ID = process.env.VM0_RESUME_SESSION_ID ?? "";\nvar CLI_AGENT_TYPE = process.env.CLI_AGENT_TYPE ?? "claude-code";\nvar OPENAI_MODEL = process.env.OPENAI_MODEL ?? "";\nvar WORKING_DIR = process.env.VM0_WORKING_DIR ?? "";\nvar ARTIFACT_DRIVER = process.env.VM0_ARTIFACT_DRIVER ?? "";\nvar ARTIFACT_MOUNT_PATH = process.env.VM0_ARTIFACT_MOUNT_PATH ?? "";\nvar ARTIFACT_VOLUME_NAME = process.env.VM0_ARTIFACT_VOLUME_NAME ?? "";\nvar ARTIFACT_VERSION_ID = process.env.VM0_ARTIFACT_VERSION_ID ?? "";\nvar WEBHOOK_URL = `${API_URL}/api/webhooks/agent/events`;\nvar CHECKPOINT_URL = `${API_URL}/api/webhooks/agent/checkpoints`;\nvar COMPLETE_URL = `${API_URL}/api/webhooks/agent/complete`;\nvar HEARTBEAT_URL = `${API_URL}/api/webhooks/agent/heartbeat`;\nvar TELEMETRY_URL = `${API_URL}/api/webhooks/agent/telemetry`;\nvar PROXY_URL = `${API_URL}/api/webhooks/agent/proxy`;\nvar STORAGE_PREPARE_URL = `${API_URL}/api/webhooks/agent/storages/prepare`;\nvar STORAGE_COMMIT_URL = `${API_URL}/api/webhooks/agent/storages/commit`;\nvar HEARTBEAT_INTERVAL = 60;\nvar TELEMETRY_INTERVAL = 30;\nvar HTTP_CONNECT_TIMEOUT = 10;\nvar HTTP_MAX_TIME = 30;\nvar HTTP_MAX_TIME_UPLOAD = 60;\nvar HTTP_MAX_RETRIES = 3;\nvar SESSION_ID_FILE = `/tmp/vm0-session-${RUN_ID}.txt`;\nvar SESSION_HISTORY_PATH_FILE = `/tmp/vm0-session-history-${RUN_ID}.txt`;\nvar EVENT_ERROR_FLAG = `/tmp/vm0-event-error-${RUN_ID}`;\nvar SYSTEM_LOG_FILE = `/tmp/vm0-main-${RUN_ID}.log`;\nvar AGENT_LOG_FILE = `/tmp/vm0-agent-${RUN_ID}.log`;\nvar METRICS_LOG_FILE = `/tmp/vm0-metrics-${RUN_ID}.jsonl`;\nvar NETWORK_LOG_FILE = `/tmp/vm0-network-${RUN_ID}.jsonl`;\nvar TELEMETRY_LOG_POS_FILE = `/tmp/vm0-telemetry-log-pos-${RUN_ID}.txt`;\nvar TELEMETRY_METRICS_POS_FILE = `/tmp/vm0-telemetry-metrics-pos-${RUN_ID}.txt`;\nvar TELEMETRY_NETWORK_POS_FILE = `/tmp/vm0-telemetry-network-pos-${RUN_ID}.txt`;\nvar TELEMETRY_SANDBOX_OPS_POS_FILE = `/tmp/vm0-telemetry-sandbox-ops-pos-${RUN_ID}.txt`;\nvar SANDBOX_OPS_LOG_FILE = `/tmp/vm0-sandbox-ops-${RUN_ID}.jsonl`;\nvar METRICS_INTERVAL = 5;\nfunction validateConfig() {\n if (!WORKING_DIR) {\n throw new Error("VM0_WORKING_DIR is required but not set");\n }\n return true;\n}\nfunction recordSandboxOp(actionType, durationMs, success, error) {\n const entry = {\n ts: (/* @__PURE__ */ new Date()).toISOString(),\n action_type: actionType,\n duration_ms: durationMs,\n success\n };\n if (error) {\n entry.error = error;\n }\n fs.appendFileSync(SANDBOX_OPS_LOG_FILE, JSON.stringify(entry) + "\\n");\n}\n\n// src/sandbox/scripts/src/lib/log.ts\nvar SCRIPT_NAME = process.env.LOG_SCRIPT_NAME ?? "run-agent";\nvar DEBUG_MODE = process.env.VM0_DEBUG === "1";\nfunction timestamp() {\n return (/* @__PURE__ */ new Date()).toISOString().replace(/\\.\\d{3}Z$/, "Z");\n}\nfunction logInfo(msg) {\n console.error(`[${timestamp()}] [INFO] [sandbox:${SCRIPT_NAME}] ${msg}`);\n}\nfunction logWarn(msg) {\n console.error(`[${timestamp()}] [WARN] [sandbox:${SCRIPT_NAME}] ${msg}`);\n}\nfunction logError(msg) {\n console.error(`[${timestamp()}] [ERROR] [sandbox:${SCRIPT_NAME}] ${msg}`);\n}\nfunction logDebug(msg) {\n if (DEBUG_MODE) {\n console.error(`[${timestamp()}] [DEBUG] [sandbox:${SCRIPT_NAME}] ${msg}`);\n }\n}\n\n// src/sandbox/scripts/src/lib/events.ts\nimport * as fs2 from "fs";\n\n// src/sandbox/scripts/src/lib/http-client.ts\nimport { execSync } from "child_process";\nfunction sleep(ms) {\n return new Promise((resolve) => setTimeout(resolve, ms));\n}\nasync function httpPostJson(url, data, maxRetries = HTTP_MAX_RETRIES) {\n const headers = {\n "Content-Type": "application/json",\n Authorization: `Bearer ${API_TOKEN}`\n };\n if (VERCEL_BYPASS) {\n headers["x-vercel-protection-bypass"] = VERCEL_BYPASS;\n }\n for (let attempt = 1; attempt <= maxRetries; attempt++) {\n logDebug(`HTTP POST attempt ${attempt}/${maxRetries} to ${url}`);\n try {\n const controller = new AbortController();\n const timeoutId = setTimeout(\n () => controller.abort(),\n HTTP_MAX_TIME * 1e3\n );\n const response = await fetch(url, {\n method: "POST",\n headers,\n body: JSON.stringify(data),\n signal: controller.signal\n });\n clearTimeout(timeoutId);\n if (response.ok) {\n const text = await response.text();\n if (text) {\n return JSON.parse(text);\n }\n return {};\n }\n logWarn(\n `HTTP POST failed (attempt ${attempt}/${maxRetries}): HTTP ${response.status}`\n );\n if (attempt < maxRetries) {\n await sleep(1e3);\n }\n } catch (error) {\n const errorMsg = error instanceof Error ? error.message : String(error);\n if (errorMsg.includes("abort")) {\n logWarn(`HTTP POST failed (attempt ${attempt}/${maxRetries}): Timeout`);\n } else {\n logWarn(\n `HTTP POST failed (attempt ${attempt}/${maxRetries}): ${errorMsg}`\n );\n }\n if (attempt < maxRetries) {\n await sleep(1e3);\n }\n }\n }\n logError(`HTTP POST failed after ${maxRetries} attempts to ${url}`);\n return null;\n}\nasync function httpPutPresigned(presignedUrl, filePath, contentType = "application/octet-stream", maxRetries = HTTP_MAX_RETRIES) {\n for (let attempt = 1; attempt <= maxRetries; attempt++) {\n logDebug(`HTTP PUT presigned attempt ${attempt}/${maxRetries}`);\n try {\n const curlCmd = [\n "curl",\n "-f",\n "-X",\n "PUT",\n "-H",\n `Content-Type: ${contentType}`,\n "--data-binary",\n `@${filePath}`,\n "--connect-timeout",\n String(HTTP_CONNECT_TIMEOUT),\n "--max-time",\n String(HTTP_MAX_TIME_UPLOAD),\n "--silent",\n `"${presignedUrl}"`\n ].join(" ");\n execSync(curlCmd, {\n timeout: HTTP_MAX_TIME_UPLOAD * 1e3,\n stdio: ["pipe", "pipe", "pipe"]\n });\n return true;\n } catch (error) {\n const errorMsg = error instanceof Error ? error.message : String(error);\n if (errorMsg.includes("ETIMEDOUT") || errorMsg.includes("timeout")) {\n logWarn(\n `HTTP PUT presigned failed (attempt ${attempt}/${maxRetries}): Timeout`\n );\n } else {\n logWarn(\n `HTTP PUT presigned failed (attempt ${attempt}/${maxRetries}): ${errorMsg}`\n );\n }\n if (attempt < maxRetries) {\n await sleep(1e3);\n }\n }\n }\n logError(`HTTP PUT presigned failed after ${maxRetries} attempts`);\n return false;\n}\n\n// src/sandbox/scripts/src/lib/secret-masker.ts\nvar MASK_PLACEHOLDER = "***";\nvar MIN_SECRET_LENGTH = 5;\nvar _masker = null;\nvar SecretMasker = class {\n patterns;\n /**\n * Initialize masker with secret values.\n *\n * @param secretValues - List of secret values to mask\n */\n constructor(secretValues) {\n this.patterns = /* @__PURE__ */ new Set();\n for (const secret of secretValues) {\n if (!secret || secret.length < MIN_SECRET_LENGTH) {\n continue;\n }\n this.patterns.add(secret);\n try {\n const b64 = Buffer.from(secret).toString("base64");\n if (b64.length >= MIN_SECRET_LENGTH) {\n this.patterns.add(b64);\n }\n } catch {\n }\n try {\n const urlEnc = encodeURIComponent(secret);\n if (urlEnc !== secret && urlEnc.length >= MIN_SECRET_LENGTH) {\n this.patterns.add(urlEnc);\n }\n } catch {\n }\n }\n }\n /**\n * Recursively mask all occurrences of secrets in the data.\n *\n * @param data - Data to mask (string, list, dict, or primitive)\n * @returns Masked data with the same structure\n */\n mask(data) {\n return this.deepMask(data);\n }\n deepMask(data) {\n if (typeof data === "string") {\n let result = data;\n for (const pattern of this.patterns) {\n result = result.split(pattern).join(MASK_PLACEHOLDER);\n }\n return result;\n }\n if (Array.isArray(data)) {\n return data.map((item) => this.deepMask(item));\n }\n if (data !== null && typeof data === "object") {\n const result = {};\n for (const [key, value] of Object.entries(\n data\n )) {\n result[key] = this.deepMask(value);\n }\n return result;\n }\n return data;\n }\n};\nfunction createMasker() {\n const secretValuesStr = process.env.VM0_SECRET_VALUES ?? "";\n if (!secretValuesStr) {\n return new SecretMasker([]);\n }\n const secretValues = [];\n for (const encodedValue of secretValuesStr.split(",")) {\n const trimmed = encodedValue.trim();\n if (trimmed) {\n try {\n const decoded = Buffer.from(trimmed, "base64").toString("utf-8");\n if (decoded) {\n secretValues.push(decoded);\n }\n } catch {\n }\n }\n }\n return new SecretMasker(secretValues);\n}\nfunction getMasker() {\n if (_masker === null) {\n _masker = createMasker();\n }\n return _masker;\n}\nfunction maskData(data) {\n return getMasker().mask(data);\n}\n\n// src/sandbox/scripts/src/lib/events.ts\nasync function sendEvent(event, sequenceNumber) {\n const eventType = event.type ?? "";\n const eventSubtype = event.subtype ?? "";\n let sessionId = null;\n if (CLI_AGENT_TYPE === "codex") {\n if (eventType === "thread.started") {\n sessionId = event.thread_id ?? "";\n }\n } else {\n if (eventType === "system" && eventSubtype === "init") {\n sessionId = event.session_id ?? "";\n }\n }\n if (sessionId && !fs2.existsSync(SESSION_ID_FILE)) {\n logInfo(`Captured session ID: ${sessionId}`);\n fs2.writeFileSync(SESSION_ID_FILE, sessionId);\n const homeDir = process.env.HOME ?? "/home/user";\n let sessionHistoryPath;\n if (CLI_AGENT_TYPE === "codex") {\n const codexHome = process.env.CODEX_HOME ?? `${homeDir}/.codex`;\n sessionHistoryPath = `CODEX_SEARCH:${codexHome}/sessions:${sessionId}`;\n } else {\n const projectName = WORKING_DIR.replace(/^\\//, "").replace(/\\//g, "-");\n sessionHistoryPath = `${homeDir}/.claude/projects/-${projectName}/${sessionId}.jsonl`;\n }\n fs2.writeFileSync(SESSION_HISTORY_PATH_FILE, sessionHistoryPath);\n logInfo(`Session history will be at: ${sessionHistoryPath}`);\n }\n const eventWithSequence = {\n ...event,\n sequenceNumber\n };\n const maskedEvent = maskData(eventWithSequence);\n const payload = {\n runId: RUN_ID,\n events: [maskedEvent]\n };\n const result = await httpPostJson(WEBHOOK_URL, payload);\n if (result === null) {\n logError("Failed to send event after retries");\n fs2.writeFileSync(EVENT_ERROR_FLAG, "1");\n return false;\n }\n return true;\n}\n\n// src/sandbox/scripts/src/lib/checkpoint.ts\nimport * as fs4 from "fs";\nimport * as path2 from "path";\n\n// src/sandbox/scripts/src/lib/direct-upload.ts\nimport * as fs3 from "fs";\nimport * as path from "path";\nimport * as crypto from "crypto";\nimport { execSync as execSync2 } from "child_process";\nfunction computeFileHash(filePath) {\n const hash = crypto.createHash("sha256");\n const buffer = fs3.readFileSync(filePath);\n hash.update(buffer);\n return hash.digest("hex");\n}\nfunction collectFileMetadata(dirPath) {\n const files = [];\n function walkDir(currentPath, relativePath) {\n const items = fs3.readdirSync(currentPath);\n for (const item of items) {\n if (item === ".git" || item === ".vm0") {\n continue;\n }\n const fullPath = path.join(currentPath, item);\n const relPath = relativePath ? path.join(relativePath, item) : item;\n const stat = fs3.statSync(fullPath);\n if (stat.isDirectory()) {\n walkDir(fullPath, relPath);\n } else if (stat.isFile()) {\n try {\n const fileHash = computeFileHash(fullPath);\n files.push({\n path: relPath,\n hash: fileHash,\n size: stat.size\n });\n } catch (error) {\n logWarn(`Could not process file ${relPath}: ${error}`);\n }\n }\n }\n }\n walkDir(dirPath, "");\n return files;\n}\nfunction createArchive(dirPath, tarPath) {\n try {\n execSync2(\n `tar -czf "${tarPath}" --exclude=\'.git\' --exclude=\'.vm0\' -C "${dirPath}" .`,\n { stdio: ["pipe", "pipe", "pipe"] }\n );\n return true;\n } catch (error) {\n logError(`Failed to create archive: ${error}`);\n return false;\n }\n}\nfunction createManifest(files, manifestPath) {\n try {\n const manifest = {\n version: 1,\n files,\n createdAt: (/* @__PURE__ */ new Date()).toISOString()\n };\n fs3.writeFileSync(manifestPath, JSON.stringify(manifest, null, 2));\n return true;\n } catch (error) {\n logError(`Failed to create manifest: ${error}`);\n return false;\n }\n}\nasync function createDirectUploadSnapshot(mountPath, storageName, storageType = "artifact", runId, message) {\n logInfo(\n `Creating direct upload snapshot for \'${storageName}\' (type: ${storageType})`\n );\n logInfo("Computing file hashes...");\n const hashStart = Date.now();\n const files = collectFileMetadata(mountPath);\n recordSandboxOp("artifact_hash_compute", Date.now() - hashStart, true);\n logInfo(`Found ${files.length} files`);\n if (files.length === 0) {\n logInfo("No files to upload, creating empty version");\n }\n logInfo("Calling prepare endpoint...");\n const prepareStart = Date.now();\n const preparePayload = {\n storageName,\n storageType,\n files\n };\n if (runId) {\n preparePayload.runId = runId;\n }\n const prepareResponse = await httpPostJson(\n STORAGE_PREPARE_URL,\n preparePayload\n );\n if (!prepareResponse) {\n logError("Failed to call prepare endpoint");\n recordSandboxOp("artifact_prepare_api", Date.now() - prepareStart, false);\n return null;\n }\n const versionId = prepareResponse.versionId;\n if (!versionId) {\n logError(`Invalid prepare response: ${JSON.stringify(prepareResponse)}`);\n recordSandboxOp("artifact_prepare_api", Date.now() - prepareStart, false);\n return null;\n }\n recordSandboxOp("artifact_prepare_api", Date.now() - prepareStart, true);\n if (prepareResponse.existing) {\n logInfo(`Version already exists (deduplicated): ${versionId.slice(0, 8)}`);\n logInfo("Updating HEAD pointer...");\n const commitPayload = {\n storageName,\n storageType,\n versionId,\n files\n };\n if (runId) {\n commitPayload.runId = runId;\n }\n const commitResponse = await httpPostJson(\n STORAGE_COMMIT_URL,\n commitPayload\n );\n if (!commitResponse || !commitResponse.success) {\n logError(`Failed to update HEAD: ${JSON.stringify(commitResponse)}`);\n return null;\n }\n return { versionId, deduplicated: true };\n }\n const uploads = prepareResponse.uploads;\n if (!uploads) {\n logError("No upload URLs in prepare response");\n return null;\n }\n const archiveInfo = uploads.archive;\n const manifestInfo = uploads.manifest;\n if (!archiveInfo || !manifestInfo) {\n logError("Missing archive or manifest upload info");\n return null;\n }\n const tempDir = fs3.mkdtempSync(`/tmp/direct-upload-${storageName}-`);\n try {\n logInfo("Creating archive...");\n const archiveStart = Date.now();\n const archivePath = path.join(tempDir, "archive.tar.gz");\n if (!createArchive(mountPath, archivePath)) {\n logError("Failed to create archive");\n recordSandboxOp(\n "artifact_archive_create",\n Date.now() - archiveStart,\n false\n );\n return null;\n }\n recordSandboxOp("artifact_archive_create", Date.now() - archiveStart, true);\n logInfo("Creating manifest...");\n const manifestPath = path.join(tempDir, "manifest.json");\n if (!createManifest(files, manifestPath)) {\n logError("Failed to create manifest");\n return null;\n }\n logInfo("Uploading archive to S3...");\n const s3UploadStart = Date.now();\n if (!await httpPutPresigned(\n archiveInfo.presignedUrl,\n archivePath,\n "application/gzip"\n )) {\n logError("Failed to upload archive to S3");\n recordSandboxOp("artifact_s3_upload", Date.now() - s3UploadStart, false);\n return null;\n }\n logInfo("Uploading manifest to S3...");\n if (!await httpPutPresigned(\n manifestInfo.presignedUrl,\n manifestPath,\n "application/json"\n )) {\n logError("Failed to upload manifest to S3");\n recordSandboxOp("artifact_s3_upload", Date.now() - s3UploadStart, false);\n return null;\n }\n recordSandboxOp("artifact_s3_upload", Date.now() - s3UploadStart, true);\n logInfo("Calling commit endpoint...");\n const commitStart = Date.now();\n const commitPayload = {\n storageName,\n storageType,\n versionId,\n files\n };\n if (runId) {\n commitPayload.runId = runId;\n }\n if (message) {\n commitPayload.message = message;\n }\n const commitResponse = await httpPostJson(\n STORAGE_COMMIT_URL,\n commitPayload\n );\n if (!commitResponse) {\n logError("Failed to call commit endpoint");\n recordSandboxOp("artifact_commit_api", Date.now() - commitStart, false);\n return null;\n }\n if (!commitResponse.success) {\n logError(`Commit failed: ${JSON.stringify(commitResponse)}`);\n recordSandboxOp("artifact_commit_api", Date.now() - commitStart, false);\n return null;\n }\n recordSandboxOp("artifact_commit_api", Date.now() - commitStart, true);\n logInfo(`Direct upload snapshot created: ${versionId.slice(0, 8)}`);\n return { versionId };\n } finally {\n try {\n fs3.rmSync(tempDir, { recursive: true, force: true });\n } catch {\n }\n }\n}\n\n// src/sandbox/scripts/src/lib/checkpoint.ts\nfunction findJsonlFiles(dir) {\n const files = [];\n function walk(currentDir) {\n try {\n const items = fs4.readdirSync(currentDir);\n for (const item of items) {\n const fullPath = path2.join(currentDir, item);\n const stat = fs4.statSync(fullPath);\n if (stat.isDirectory()) {\n walk(fullPath);\n } else if (item.endsWith(".jsonl")) {\n files.push(fullPath);\n }\n }\n } catch {\n }\n }\n walk(dir);\n return files;\n}\nfunction findCodexSessionFile(sessionsDir, sessionId) {\n const files = findJsonlFiles(sessionsDir);\n logInfo(`Searching for Codex session ${sessionId} in ${files.length} files`);\n for (const filepath of files) {\n const filename = path2.basename(filepath);\n if (filename.includes(sessionId) || filename.replace(/-/g, "").includes(sessionId.replace(/-/g, ""))) {\n logInfo(`Found Codex session file: ${filepath}`);\n return filepath;\n }\n }\n if (files.length > 0) {\n files.sort((a, b) => {\n const statA = fs4.statSync(a);\n const statB = fs4.statSync(b);\n return statB.mtimeMs - statA.mtimeMs;\n });\n const mostRecent = files[0] ?? null;\n if (mostRecent) {\n logInfo(\n `Session ID not found in filenames, using most recent: ${mostRecent}`\n );\n }\n return mostRecent;\n }\n return null;\n}\nasync function createCheckpoint() {\n const checkpointStart = Date.now();\n logInfo("Creating checkpoint...");\n const sessionIdStart = Date.now();\n if (!fs4.existsSync(SESSION_ID_FILE)) {\n logError("No session ID found, checkpoint creation failed");\n recordSandboxOp(\n "session_id_read",\n Date.now() - sessionIdStart,\n false,\n "Session ID file not found"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n const cliAgentSessionId = fs4.readFileSync(SESSION_ID_FILE, "utf-8").trim();\n recordSandboxOp("session_id_read", Date.now() - sessionIdStart, true);\n const sessionHistoryStart = Date.now();\n if (!fs4.existsSync(SESSION_HISTORY_PATH_FILE)) {\n logError("No session history path found, checkpoint creation failed");\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n false,\n "Session history path file not found"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n const sessionHistoryPathRaw = fs4.readFileSync(SESSION_HISTORY_PATH_FILE, "utf-8").trim();\n let sessionHistoryPath;\n if (sessionHistoryPathRaw.startsWith("CODEX_SEARCH:")) {\n const parts = sessionHistoryPathRaw.split(":");\n if (parts.length !== 3) {\n logError(`Invalid Codex search marker format: ${sessionHistoryPathRaw}`);\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n false,\n "Invalid Codex search marker"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n const sessionsDir = parts[1] ?? "";\n const codexSessionId = parts[2] ?? "";\n logInfo(`Searching for Codex session in ${sessionsDir}`);\n const foundPath = findCodexSessionFile(sessionsDir, codexSessionId);\n if (!foundPath) {\n logError(\n `Could not find Codex session file for ${codexSessionId} in ${sessionsDir}`\n );\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n false,\n "Codex session file not found"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n sessionHistoryPath = foundPath;\n } else {\n sessionHistoryPath = sessionHistoryPathRaw;\n }\n if (!fs4.existsSync(sessionHistoryPath)) {\n logError(\n `Session history file not found at ${sessionHistoryPath}, checkpoint creation failed`\n );\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n false,\n "Session history file not found"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n let cliAgentSessionHistory;\n try {\n cliAgentSessionHistory = fs4.readFileSync(sessionHistoryPath, "utf-8");\n } catch (error) {\n logError(`Failed to read session history: ${error}`);\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n false,\n String(error)\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n if (!cliAgentSessionHistory.trim()) {\n logError("Session history is empty, checkpoint creation failed");\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n false,\n "Session history empty"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n const lineCount = cliAgentSessionHistory.trim().split("\\n").length;\n logInfo(`Session history loaded (${lineCount} lines)`);\n recordSandboxOp(\n "session_history_read",\n Date.now() - sessionHistoryStart,\n true\n );\n let artifactSnapshot = null;\n if (ARTIFACT_DRIVER && ARTIFACT_VOLUME_NAME) {\n logInfo(`Processing artifact with driver: ${ARTIFACT_DRIVER}`);\n if (ARTIFACT_DRIVER !== "vas") {\n logError(\n `Unknown artifact driver: ${ARTIFACT_DRIVER} (only \'vas\' is supported)`\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n logInfo(\n `Creating VAS snapshot for artifact \'${ARTIFACT_VOLUME_NAME}\' at ${ARTIFACT_MOUNT_PATH}`\n );\n logInfo("Using direct S3 upload...");\n const snapshot = await createDirectUploadSnapshot(\n ARTIFACT_MOUNT_PATH,\n ARTIFACT_VOLUME_NAME,\n "artifact",\n RUN_ID,\n `Checkpoint from run ${RUN_ID}`\n );\n if (!snapshot) {\n logError("Failed to create VAS snapshot for artifact");\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n const artifactVersion = snapshot.versionId;\n if (!artifactVersion) {\n logError("Failed to extract versionId from snapshot");\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n artifactSnapshot = {\n artifactName: ARTIFACT_VOLUME_NAME,\n artifactVersion\n };\n logInfo(\n `VAS artifact snapshot created: ${ARTIFACT_VOLUME_NAME}@${artifactVersion}`\n );\n } else {\n logInfo(\n "No artifact configured, creating checkpoint without artifact snapshot"\n );\n }\n logInfo("Calling checkpoint API...");\n const checkpointPayload = {\n runId: RUN_ID,\n cliAgentType: CLI_AGENT_TYPE,\n cliAgentSessionId,\n cliAgentSessionHistory\n };\n if (artifactSnapshot) {\n checkpointPayload.artifactSnapshot = artifactSnapshot;\n }\n const apiCallStart = Date.now();\n const result = await httpPostJson(\n CHECKPOINT_URL,\n checkpointPayload\n );\n if (result && result.checkpointId) {\n const checkpointId = result.checkpointId;\n logInfo(`Checkpoint created successfully: ${checkpointId}`);\n recordSandboxOp("checkpoint_api_call", Date.now() - apiCallStart, true);\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, true);\n return true;\n } else {\n logError(\n `Checkpoint API returned invalid response: ${JSON.stringify(result)}`\n );\n recordSandboxOp(\n "checkpoint_api_call",\n Date.now() - apiCallStart,\n false,\n "Invalid API response"\n );\n recordSandboxOp("checkpoint_total", Date.now() - checkpointStart, false);\n return false;\n }\n}\n\n// src/sandbox/scripts/src/lib/metrics.ts\nimport * as fs5 from "fs";\nimport { execSync as execSync3 } from "child_process";\nvar shutdownRequested = false;\nfunction getCpuPercent() {\n try {\n const content = fs5.readFileSync("/proc/stat", "utf-8");\n const line = content.split("\\n")[0];\n if (!line) {\n return 0;\n }\n const parts = line.split(/\\s+/);\n if (parts[0] !== "cpu") {\n return 0;\n }\n const values = parts.slice(1).map((x) => parseInt(x, 10));\n const idleVal = values[3];\n const iowaitVal = values[4];\n if (idleVal === void 0 || iowaitVal === void 0) {\n return 0;\n }\n const idle = idleVal + iowaitVal;\n const total = values.reduce((a, b) => a + b, 0);\n if (total === 0) {\n return 0;\n }\n const cpuPercent = 100 * (1 - idle / total);\n return Math.round(cpuPercent * 100) / 100;\n } catch (error) {\n logDebug(`Failed to get CPU percent: ${error}`);\n return 0;\n }\n}\nfunction getMemoryInfo() {\n try {\n const result = execSync3("free -b", {\n encoding: "utf-8",\n timeout: 5e3,\n stdio: ["pipe", "pipe", "pipe"]\n });\n const lines = result.trim().split("\\n");\n for (const line of lines) {\n if (line.startsWith("Mem:")) {\n const parts = line.split(/\\s+/);\n const totalStr = parts[1];\n const usedStr = parts[2];\n if (!totalStr || !usedStr) {\n return [0, 0];\n }\n const total = parseInt(totalStr, 10);\n const used = parseInt(usedStr, 10);\n return [used, total];\n }\n }\n return [0, 0];\n } catch (error) {\n logDebug(`Failed to get memory info: ${error}`);\n return [0, 0];\n }\n}\nfunction getDiskInfo() {\n try {\n const result = execSync3("df -B1 /", {\n encoding: "utf-8",\n timeout: 5e3,\n stdio: ["pipe", "pipe", "pipe"]\n });\n const lines = result.trim().split("\\n");\n if (lines.length < 2) {\n return [0, 0];\n }\n const dataLine = lines[1];\n if (!dataLine) {\n return [0, 0];\n }\n const parts = dataLine.split(/\\s+/);\n const totalStr = parts[1];\n const usedStr = parts[2];\n if (!totalStr || !usedStr) {\n return [0, 0];\n }\n const total = parseInt(totalStr, 10);\n const used = parseInt(usedStr, 10);\n return [used, total];\n } catch (error) {\n logDebug(`Failed to get disk info: ${error}`);\n return [0, 0];\n }\n}\nfunction collectMetrics() {\n const cpu = getCpuPercent();\n const [memUsed, memTotal] = getMemoryInfo();\n const [diskUsed, diskTotal] = getDiskInfo();\n return {\n ts: (/* @__PURE__ */ new Date()).toISOString(),\n cpu,\n mem_used: memUsed,\n mem_total: memTotal,\n disk_used: diskUsed,\n disk_total: diskTotal\n };\n}\nfunction metricsCollectorLoop() {\n logInfo(`Metrics collector started, writing to ${METRICS_LOG_FILE}`);\n const writeMetrics = () => {\n if (shutdownRequested) {\n logInfo("Metrics collector stopped");\n return;\n }\n try {\n const metrics = collectMetrics();\n fs5.appendFileSync(METRICS_LOG_FILE, JSON.stringify(metrics) + "\\n");\n logDebug(\n `Metrics collected: cpu=${metrics.cpu}%, mem=${metrics.mem_used}/${metrics.mem_total}`\n );\n } catch (error) {\n logError(`Failed to collect/write metrics: ${error}`);\n }\n setTimeout(writeMetrics, METRICS_INTERVAL * 1e3);\n };\n writeMetrics();\n}\nfunction startMetricsCollector() {\n shutdownRequested = false;\n setTimeout(metricsCollectorLoop, 0);\n}\nfunction stopMetricsCollector() {\n shutdownRequested = true;\n}\n\n// src/sandbox/scripts/src/lib/upload-telemetry.ts\nimport * as fs6 from "fs";\nvar shutdownRequested2 = false;\nfunction readFileFromPosition(filePath, posFile) {\n let lastPos = 0;\n if (fs6.existsSync(posFile)) {\n try {\n const content = fs6.readFileSync(posFile, "utf-8").trim();\n lastPos = parseInt(content, 10) || 0;\n } catch {\n lastPos = 0;\n }\n }\n let newContent = "";\n let newPos = lastPos;\n if (fs6.existsSync(filePath)) {\n try {\n const fd = fs6.openSync(filePath, "r");\n const stats = fs6.fstatSync(fd);\n const bufferSize = stats.size - lastPos;\n if (bufferSize > 0) {\n const buffer = Buffer.alloc(bufferSize);\n fs6.readSync(fd, buffer, 0, bufferSize, lastPos);\n newContent = buffer.toString("utf-8");\n newPos = stats.size;\n }\n fs6.closeSync(fd);\n } catch (error) {\n logDebug(`Failed to read ${filePath}: ${error}`);\n }\n }\n return [newContent, newPos];\n}\nfunction savePosition(posFile, position) {\n try {\n fs6.writeFileSync(posFile, String(position));\n } catch (error) {\n logDebug(`Failed to save position to ${posFile}: ${error}`);\n }\n}\nfunction readJsonlFromPosition(filePath, posFile) {\n const [content, newPos] = readFileFromPosition(filePath, posFile);\n const entries = [];\n if (content) {\n for (const line of content.trim().split("\\n")) {\n if (line) {\n try {\n entries.push(JSON.parse(line));\n } catch {\n }\n }\n }\n }\n return [entries, newPos];\n}\nfunction readMetricsFromPosition(posFile) {\n return readJsonlFromPosition(METRICS_LOG_FILE, posFile);\n}\nfunction readNetworkLogsFromPosition(posFile) {\n return readJsonlFromPosition(NETWORK_LOG_FILE, posFile);\n}\nfunction readSandboxOpsFromPosition(posFile) {\n return readJsonlFromPosition(SANDBOX_OPS_LOG_FILE, posFile);\n}\nasync function uploadTelemetry() {\n const [systemLog, logPos] = readFileFromPosition(\n SYSTEM_LOG_FILE,\n TELEMETRY_LOG_POS_FILE\n );\n const [metrics, metricsPos] = readMetricsFromPosition(\n TELEMETRY_METRICS_POS_FILE\n );\n const [networkLogs, networkPos] = readNetworkLogsFromPosition(\n TELEMETRY_NETWORK_POS_FILE\n );\n const [sandboxOps, sandboxOpsPos] = readSandboxOpsFromPosition(\n TELEMETRY_SANDBOX_OPS_POS_FILE\n );\n if (!systemLog && metrics.length === 0 && networkLogs.length === 0 && sandboxOps.length === 0) {\n logDebug("No new telemetry data to upload");\n return true;\n }\n const maskedSystemLog = systemLog ? maskData(systemLog) : "";\n const maskedNetworkLogs = networkLogs.length > 0 ? maskData(networkLogs) : [];\n const payload = {\n runId: RUN_ID,\n systemLog: maskedSystemLog,\n metrics,\n // Metrics don\'t contain secrets (just numbers)\n networkLogs: maskedNetworkLogs,\n sandboxOperations: sandboxOps\n // Sandbox ops don\'t contain secrets (just timing data)\n };\n logDebug(\n `Uploading telemetry: ${systemLog.length} bytes log, ${metrics.length} metrics, ${networkLogs.length} network logs, ${sandboxOps.length} sandbox ops`\n );\n const result = await httpPostJson(TELEMETRY_URL, payload, 1);\n if (result) {\n savePosition(TELEMETRY_LOG_POS_FILE, logPos);\n savePosition(TELEMETRY_METRICS_POS_FILE, metricsPos);\n savePosition(TELEMETRY_NETWORK_POS_FILE, networkPos);\n savePosition(TELEMETRY_SANDBOX_OPS_POS_FILE, sandboxOpsPos);\n logDebug(\n `Telemetry uploaded successfully: ${result.id ?? "unknown"}`\n );\n return true;\n } else {\n logWarn("Failed to upload telemetry (will retry next interval)");\n return false;\n }\n}\nasync function telemetryUploadLoop() {\n logInfo(`Telemetry upload started (interval: ${TELEMETRY_INTERVAL}s)`);\n const runUpload = async () => {\n if (shutdownRequested2) {\n logInfo("Telemetry upload stopped");\n return;\n }\n try {\n await uploadTelemetry();\n } catch (error) {\n logError(`Telemetry upload error: ${error}`);\n }\n setTimeout(() => void runUpload(), TELEMETRY_INTERVAL * 1e3);\n };\n await runUpload();\n}\nfunction startTelemetryUpload() {\n shutdownRequested2 = false;\n setTimeout(() => void telemetryUploadLoop(), 0);\n}\nfunction stopTelemetryUpload() {\n shutdownRequested2 = true;\n}\nasync function finalTelemetryUpload() {\n logInfo("Performing final telemetry upload...");\n return uploadTelemetry();\n}\n\n// src/sandbox/scripts/src/run-agent.ts\nvar shutdownRequested3 = false;\nfunction heartbeatLoop() {\n const sendHeartbeat = async () => {\n if (shutdownRequested3) {\n return;\n }\n try {\n if (await httpPostJson(HEARTBEAT_URL, { runId: RUN_ID })) {\n logInfo("Heartbeat sent");\n } else {\n logWarn("Heartbeat failed");\n }\n } catch (error) {\n logWarn(`Heartbeat error: ${error}`);\n }\n setTimeout(() => {\n sendHeartbeat().catch(() => {\n });\n }, HEARTBEAT_INTERVAL * 1e3);\n };\n sendHeartbeat().catch(() => {\n });\n}\nasync function cleanup(exitCode, errorMessage) {\n logInfo("\\u25B7 Cleanup");\n const telemetryStart = Date.now();\n let telemetrySuccess = true;\n try {\n await finalTelemetryUpload();\n } catch (error) {\n telemetrySuccess = false;\n logError(`Final telemetry upload failed: ${error}`);\n }\n recordSandboxOp(\n "final_telemetry_upload",\n Date.now() - telemetryStart,\n telemetrySuccess\n );\n logInfo(`Calling complete API with exitCode=${exitCode}`);\n const completePayload = {\n runId: RUN_ID,\n exitCode\n };\n if (errorMessage) {\n completePayload.error = errorMessage;\n }\n const completeStart = Date.now();\n let completeSuccess = false;\n try {\n if (await httpPostJson(COMPLETE_URL, completePayload)) {\n logInfo("Complete API called successfully");\n completeSuccess = true;\n } else {\n logError("Failed to call complete API (sandbox may not be cleaned up)");\n }\n } catch (error) {\n logError(`Complete API call failed: ${error}`);\n }\n recordSandboxOp(\n "complete_api_call",\n Date.now() - completeStart,\n completeSuccess\n );\n shutdownRequested3 = true;\n stopMetricsCollector();\n stopTelemetryUpload();\n logInfo("Background processes stopped");\n if (exitCode === 0) {\n logInfo("\\u2713 Sandbox finished successfully");\n } else {\n logInfo(`\\u2717 Sandbox failed (exit code ${exitCode})`);\n }\n}\nasync function run() {\n validateConfig();\n logInfo(`\\u25B6 VM0 Sandbox ${RUN_ID}`);\n logInfo("\\u25B7 Initialization");\n const initStartTime = Date.now();\n logInfo(`Working directory: ${WORKING_DIR}`);\n const heartbeatStart = Date.now();\n heartbeatLoop();\n logInfo("Heartbeat started");\n recordSandboxOp("heartbeat_start", Date.now() - heartbeatStart, true);\n const metricsStart = Date.now();\n startMetricsCollector();\n logInfo("Metrics collector started");\n recordSandboxOp("metrics_collector_start", Date.now() - metricsStart, true);\n const telemetryStart = Date.now();\n startTelemetryUpload();\n logInfo("Telemetry upload started");\n recordSandboxOp("telemetry_upload_start", Date.now() - telemetryStart, true);\n const workingDirStart = Date.now();\n try {\n fs7.mkdirSync(WORKING_DIR, { recursive: true });\n process.chdir(WORKING_DIR);\n } catch (error) {\n recordSandboxOp(\n "working_dir_setup",\n Date.now() - workingDirStart,\n false,\n String(error)\n );\n throw new Error(\n `Failed to create/change to working directory: ${WORKING_DIR} - ${error}`\n );\n }\n recordSandboxOp("working_dir_setup", Date.now() - workingDirStart, true);\n if (CLI_AGENT_TYPE === "codex") {\n const homeDir = process.env.HOME ?? "/home/user";\n const codexHome = `${homeDir}/.codex`;\n fs7.mkdirSync(codexHome, { recursive: true });\n process.env.CODEX_HOME = codexHome;\n logInfo(`Codex home directory: ${codexHome}`);\n const codexLoginStart = Date.now();\n let codexLoginSuccess = false;\n const apiKey = process.env.OPENAI_API_KEY ?? "";\n if (apiKey) {\n try {\n execSync4("codex login --with-api-key", {\n input: apiKey,\n encoding: "utf-8",\n stdio: ["pipe", "pipe", "pipe"]\n });\n logInfo("Codex authenticated with API key");\n codexLoginSuccess = true;\n } catch (error) {\n logError(`Codex login failed: ${error}`);\n }\n } else {\n logError("OPENAI_API_KEY not set");\n }\n recordSandboxOp(\n "codex_login",\n Date.now() - codexLoginStart,\n codexLoginSuccess\n );\n }\n const initDurationMs = Date.now() - initStartTime;\n recordSandboxOp("init_total", initDurationMs, true);\n logInfo(`\\u2713 Initialization complete (${Math.floor(initDurationMs / 1e3)}s)`);\n logInfo("\\u25B7 Execution");\n const execStartTime = Date.now();\n logInfo(`Starting ${CLI_AGENT_TYPE} execution...`);\n logInfo(`Prompt: ${PROMPT}`);\n const useMock = process.env.USE_MOCK_CLAUDE === "true";\n let cmd;\n if (CLI_AGENT_TYPE === "codex") {\n if (useMock) {\n throw new Error("Mock mode not supported for Codex");\n }\n const codexArgs = [\n "exec",\n "--json",\n "--dangerously-bypass-approvals-and-sandbox",\n "--skip-git-repo-check",\n "-C",\n WORKING_DIR\n ];\n if (OPENAI_MODEL) {\n codexArgs.push("-m", OPENAI_MODEL);\n }\n if (RESUME_SESSION_ID) {\n logInfo(`Resuming session: ${RESUME_SESSION_ID}`);\n codexArgs.push("resume", RESUME_SESSION_ID, PROMPT);\n } else {\n logInfo("Starting new session");\n codexArgs.push(PROMPT);\n }\n cmd = ["codex", ...codexArgs];\n } else {\n const claudeArgs = [\n "--print",\n "--verbose",\n "--output-format",\n "stream-json",\n "--dangerously-skip-permissions"\n ];\n if (RESUME_SESSION_ID) {\n logInfo(`Resuming session: ${RESUME_SESSION_ID}`);\n claudeArgs.push("--resume", RESUME_SESSION_ID);\n } else {\n logInfo("Starting new session");\n }\n const claudeBin = useMock ? "/usr/local/bin/vm0-agent/mock-claude.mjs" : "claude";\n if (useMock) {\n logInfo("Using mock-claude for testing");\n }\n cmd = [claudeBin, ...claudeArgs, PROMPT];\n }\n let agentExitCode = 0;\n const stderrLines = [];\n let logFile = null;\n try {\n logFile = fs7.createWriteStream(AGENT_LOG_FILE);\n const cmdExe = cmd[0];\n if (!cmdExe) {\n throw new Error("Empty command");\n }\n const proc = spawn(cmdExe, cmd.slice(1), {\n stdio: ["ignore", "pipe", "pipe"]\n });\n const exitPromise = new Promise((resolve) => {\n let resolved = false;\n proc.on("error", (err) => {\n if (!resolved) {\n resolved = true;\n logError(`Failed to spawn ${CLI_AGENT_TYPE}: ${err.message}`);\n stderrLines.push(`Spawn error: ${err.message}`);\n resolve(1);\n }\n });\n proc.on("close", (code) => {\n if (!resolved) {\n resolved = true;\n resolve(code ?? 1);\n }\n });\n });\n if (proc.stderr) {\n const stderrRl = readline.createInterface({ input: proc.stderr });\n stderrRl.on("line", (line) => {\n stderrLines.push(line);\n if (logFile && !logFile.destroyed) {\n logFile.write(`[STDERR] ${line}\n`);\n }\n });\n }\n if (proc.stdout) {\n const stdoutRl = readline.createInterface({ input: proc.stdout });\n let eventSequence = 0;\n for await (const line of stdoutRl) {\n if (logFile && !logFile.destroyed) {\n logFile.write(line + "\\n");\n }\n const stripped = line.trim();\n if (!stripped) {\n continue;\n }\n try {\n const event = JSON.parse(stripped);\n await sendEvent(event, eventSequence);\n eventSequence++;\n if (event.type === "result") {\n const resultContent = event.result;\n if (resultContent) {\n console.log(resultContent);\n }\n }\n } catch {\n logDebug(`Non-JSON line from agent: ${stripped.slice(0, 100)}`);\n }\n }\n }\n agentExitCode = await exitPromise;\n } catch (error) {\n logError(`Failed to execute ${CLI_AGENT_TYPE}: ${error}`);\n agentExitCode = 1;\n } finally {\n if (logFile && !logFile.destroyed) {\n logFile.end();\n }\n }\n console.log();\n let finalExitCode = agentExitCode;\n let errorMessage = "";\n if (fs7.existsSync(EVENT_ERROR_FLAG)) {\n logError("Some events failed to send, marking run as failed");\n finalExitCode = 1;\n errorMessage = "Some events failed to send";\n }\n const execDurationMs = Date.now() - execStartTime;\n recordSandboxOp("cli_execution", execDurationMs, agentExitCode === 0);\n if (agentExitCode === 0 && finalExitCode === 0) {\n logInfo(`\\u2713 Execution complete (${Math.floor(execDurationMs / 1e3)}s)`);\n } else {\n logInfo(`\\u2717 Execution failed (${Math.floor(execDurationMs / 1e3)}s)`);\n }\n if (agentExitCode === 0 && finalExitCode === 0) {\n logInfo(`${CLI_AGENT_TYPE} completed successfully`);\n logInfo("\\u25B7 Checkpoint");\n const checkpointStartTime = Date.now();\n const checkpointSuccess = await createCheckpoint();\n const checkpointDuration = Math.floor(\n (Date.now() - checkpointStartTime) / 1e3\n );\n if (checkpointSuccess) {\n logInfo(`\\u2713 Checkpoint complete (${checkpointDuration}s)`);\n } else {\n logInfo(`\\u2717 Checkpoint failed (${checkpointDuration}s)`);\n }\n if (!checkpointSuccess) {\n logError("Checkpoint creation failed, marking run as failed");\n finalExitCode = 1;\n errorMessage = "Checkpoint creation failed";\n }\n } else {\n if (agentExitCode !== 0) {\n logInfo(`${CLI_AGENT_TYPE} failed with exit code ${agentExitCode}`);\n if (stderrLines.length > 0) {\n errorMessage = stderrLines.map((line) => line.trim()).join(" ");\n logInfo(`Captured stderr: ${errorMessage}`);\n } else {\n errorMessage = `Agent exited with code ${agentExitCode}`;\n }\n }\n }\n return [finalExitCode, errorMessage];\n}\nasync function main() {\n let exitCode = 1;\n let errorMessage = "Unexpected termination";\n try {\n [exitCode, errorMessage] = await run();\n } catch (error) {\n if (error instanceof Error) {\n exitCode = 1;\n errorMessage = error.message;\n logError(`Error: ${errorMessage}`);\n } else {\n exitCode = 1;\n errorMessage = `Unexpected error: ${error}`;\n logError(errorMessage);\n }\n } finally {\n await cleanup(exitCode, errorMessage);\n }\n return exitCode;\n}\nmain().then((code) => process.exit(code)).catch((error) => {\n console.error("Fatal error:", error);\n process.exit(1);\n});\n';
|
|
7766
8097
|
var DOWNLOAD_SCRIPT = '#!/usr/bin/env node\n\n// src/sandbox/scripts/src/download.ts\nimport * as fs2 from "fs";\nimport * as path from "path";\nimport * as os from "os";\nimport { execSync as execSync2 } from "child_process";\n\n// src/sandbox/scripts/src/lib/common.ts\nimport * as fs from "fs";\nvar RUN_ID = process.env.VM0_RUN_ID ?? "";\nvar API_URL = process.env.VM0_API_URL ?? "";\nvar API_TOKEN = process.env.VM0_API_TOKEN ?? "";\nvar PROMPT = process.env.VM0_PROMPT ?? "";\nvar VERCEL_BYPASS = process.env.VERCEL_PROTECTION_BYPASS ?? "";\nvar RESUME_SESSION_ID = process.env.VM0_RESUME_SESSION_ID ?? "";\nvar CLI_AGENT_TYPE = process.env.CLI_AGENT_TYPE ?? "claude-code";\nvar OPENAI_MODEL = process.env.OPENAI_MODEL ?? "";\nvar WORKING_DIR = process.env.VM0_WORKING_DIR ?? "";\nvar ARTIFACT_DRIVER = process.env.VM0_ARTIFACT_DRIVER ?? "";\nvar ARTIFACT_MOUNT_PATH = process.env.VM0_ARTIFACT_MOUNT_PATH ?? "";\nvar ARTIFACT_VOLUME_NAME = process.env.VM0_ARTIFACT_VOLUME_NAME ?? "";\nvar ARTIFACT_VERSION_ID = process.env.VM0_ARTIFACT_VERSION_ID ?? "";\nvar WEBHOOK_URL = `${API_URL}/api/webhooks/agent/events`;\nvar CHECKPOINT_URL = `${API_URL}/api/webhooks/agent/checkpoints`;\nvar COMPLETE_URL = `${API_URL}/api/webhooks/agent/complete`;\nvar HEARTBEAT_URL = `${API_URL}/api/webhooks/agent/heartbeat`;\nvar TELEMETRY_URL = `${API_URL}/api/webhooks/agent/telemetry`;\nvar PROXY_URL = `${API_URL}/api/webhooks/agent/proxy`;\nvar STORAGE_PREPARE_URL = `${API_URL}/api/webhooks/agent/storages/prepare`;\nvar STORAGE_COMMIT_URL = `${API_URL}/api/webhooks/agent/storages/commit`;\nvar HTTP_MAX_TIME_UPLOAD = 60;\nvar HTTP_MAX_RETRIES = 3;\nvar SESSION_ID_FILE = `/tmp/vm0-session-${RUN_ID}.txt`;\nvar SESSION_HISTORY_PATH_FILE = `/tmp/vm0-session-history-${RUN_ID}.txt`;\nvar EVENT_ERROR_FLAG = `/tmp/vm0-event-error-${RUN_ID}`;\nvar SYSTEM_LOG_FILE = `/tmp/vm0-main-${RUN_ID}.log`;\nvar AGENT_LOG_FILE = `/tmp/vm0-agent-${RUN_ID}.log`;\nvar METRICS_LOG_FILE = `/tmp/vm0-metrics-${RUN_ID}.jsonl`;\nvar NETWORK_LOG_FILE = `/tmp/vm0-network-${RUN_ID}.jsonl`;\nvar TELEMETRY_LOG_POS_FILE = `/tmp/vm0-telemetry-log-pos-${RUN_ID}.txt`;\nvar TELEMETRY_METRICS_POS_FILE = `/tmp/vm0-telemetry-metrics-pos-${RUN_ID}.txt`;\nvar TELEMETRY_NETWORK_POS_FILE = `/tmp/vm0-telemetry-network-pos-${RUN_ID}.txt`;\nvar TELEMETRY_SANDBOX_OPS_POS_FILE = `/tmp/vm0-telemetry-sandbox-ops-pos-${RUN_ID}.txt`;\nvar SANDBOX_OPS_LOG_FILE = `/tmp/vm0-sandbox-ops-${RUN_ID}.jsonl`;\nfunction recordSandboxOp(actionType, durationMs, success, error) {\n const entry = {\n ts: (/* @__PURE__ */ new Date()).toISOString(),\n action_type: actionType,\n duration_ms: durationMs,\n success\n };\n if (error) {\n entry.error = error;\n }\n fs.appendFileSync(SANDBOX_OPS_LOG_FILE, JSON.stringify(entry) + "\\n");\n}\n\n// src/sandbox/scripts/src/lib/log.ts\nvar SCRIPT_NAME = process.env.LOG_SCRIPT_NAME ?? "run-agent";\nvar DEBUG_MODE = process.env.VM0_DEBUG === "1";\nfunction timestamp() {\n return (/* @__PURE__ */ new Date()).toISOString().replace(/\\.\\d{3}Z$/, "Z");\n}\nfunction logInfo(msg) {\n console.error(`[${timestamp()}] [INFO] [sandbox:${SCRIPT_NAME}] ${msg}`);\n}\nfunction logWarn(msg) {\n console.error(`[${timestamp()}] [WARN] [sandbox:${SCRIPT_NAME}] ${msg}`);\n}\nfunction logError(msg) {\n console.error(`[${timestamp()}] [ERROR] [sandbox:${SCRIPT_NAME}] ${msg}`);\n}\nfunction logDebug(msg) {\n if (DEBUG_MODE) {\n console.error(`[${timestamp()}] [DEBUG] [sandbox:${SCRIPT_NAME}] ${msg}`);\n }\n}\n\n// src/sandbox/scripts/src/lib/http-client.ts\nimport { execSync } from "child_process";\nfunction sleep(ms) {\n return new Promise((resolve) => setTimeout(resolve, ms));\n}\nasync function httpDownload(url, destPath, maxRetries = HTTP_MAX_RETRIES) {\n for (let attempt = 1; attempt <= maxRetries; attempt++) {\n logDebug(`HTTP download attempt ${attempt}/${maxRetries} from ${url}`);\n try {\n const curlCmd = ["curl", "-fsSL", "-o", destPath, `"${url}"`].join(" ");\n execSync(curlCmd, {\n timeout: HTTP_MAX_TIME_UPLOAD * 1e3,\n stdio: ["pipe", "pipe", "pipe"]\n });\n return true;\n } catch (error) {\n const errorMsg = error instanceof Error ? error.message : String(error);\n if (errorMsg.includes("ETIMEDOUT") || errorMsg.includes("timeout")) {\n logWarn(\n `HTTP download failed (attempt ${attempt}/${maxRetries}): Timeout`\n );\n } else {\n logWarn(\n `HTTP download failed (attempt ${attempt}/${maxRetries}): ${errorMsg}`\n );\n }\n if (attempt < maxRetries) {\n await sleep(1e3);\n }\n }\n }\n logError(`HTTP download failed after ${maxRetries} attempts from ${url}`);\n return false;\n}\n\n// src/sandbox/scripts/src/download.ts\nasync function downloadStorage(mountPath, archiveUrl) {\n logInfo(`Downloading storage to ${mountPath}`);\n const tempTar = path.join(\n os.tmpdir(),\n `storage-${Date.now()}-${Math.random().toString(36).slice(2)}.tar.gz`\n );\n try {\n if (!await httpDownload(archiveUrl, tempTar)) {\n logError(`Failed to download archive for ${mountPath}`);\n return false;\n }\n fs2.mkdirSync(mountPath, { recursive: true });\n try {\n execSync2(`tar -xzf "${tempTar}" -C "${mountPath}"`, {\n stdio: ["pipe", "pipe", "pipe"]\n });\n } catch {\n logInfo(`Archive appears empty for ${mountPath}`);\n }\n logInfo(`Successfully extracted to ${mountPath}`);\n return true;\n } finally {\n try {\n fs2.unlinkSync(tempTar);\n } catch {\n }\n }\n}\nasync function main() {\n const args = process.argv.slice(2);\n if (args.length < 1) {\n logError("Usage: node download.mjs <manifest_path>");\n process.exit(1);\n }\n const manifestPath = args[0] ?? "";\n if (!manifestPath || !fs2.existsSync(manifestPath)) {\n logError(`Manifest file not found: ${manifestPath}`);\n process.exit(1);\n }\n logInfo(`Starting storage download from manifest: ${manifestPath}`);\n let manifest;\n try {\n const content = fs2.readFileSync(manifestPath, "utf-8");\n manifest = JSON.parse(content);\n } catch (error) {\n logError(`Failed to load manifest: ${error}`);\n process.exit(1);\n }\n const storages = manifest.storages ?? [];\n const artifact = manifest.artifact;\n const storageCount = storages.length;\n const hasArtifact = artifact !== void 0;\n logInfo(`Found ${storageCount} storages, artifact: ${hasArtifact}`);\n const downloadTotalStart = Date.now();\n let downloadSuccess = true;\n for (const storage of storages) {\n const mountPath = storage.mountPath;\n const archiveUrl = storage.archiveUrl;\n if (archiveUrl && archiveUrl !== "null") {\n const storageStart = Date.now();\n const success = await downloadStorage(mountPath, archiveUrl);\n recordSandboxOp("storage_download", Date.now() - storageStart, success);\n if (!success) {\n downloadSuccess = false;\n }\n }\n }\n if (artifact) {\n const artifactMount = artifact.mountPath;\n const artifactUrl = artifact.archiveUrl;\n if (artifactUrl && artifactUrl !== "null") {\n const artifactStart = Date.now();\n const success = await downloadStorage(artifactMount, artifactUrl);\n recordSandboxOp("artifact_download", Date.now() - artifactStart, success);\n if (!success) {\n downloadSuccess = false;\n }\n }\n }\n recordSandboxOp(\n "download_total",\n Date.now() - downloadTotalStart,\n downloadSuccess\n );\n logInfo("All storages downloaded successfully");\n}\nmain().catch((error) => {\n logError(`Fatal error: ${error}`);\n process.exit(1);\n});\n';
|
|
7767
8098
|
var MOCK_CLAUDE_SCRIPT = '#!/usr/bin/env node\n\n// src/sandbox/scripts/src/mock-claude.ts\nimport * as fs from "fs";\nimport * as path from "path";\nimport { execSync } from "child_process";\nfunction parseArgs(args) {\n const result = {\n outputFormat: "text",\n print: false,\n verbose: false,\n dangerouslySkipPermissions: false,\n resume: null,\n prompt: ""\n };\n const remaining = [];\n let i = 0;\n while (i < args.length) {\n const arg = args[i];\n if (arg === "--output-format" && i + 1 < args.length) {\n result.outputFormat = args[i + 1] ?? "text";\n i += 2;\n } else if (arg === "--print") {\n result.print = true;\n i++;\n } else if (arg === "--verbose") {\n result.verbose = true;\n i++;\n } else if (arg === "--dangerously-skip-permissions") {\n result.dangerouslySkipPermissions = true;\n i++;\n } else if (arg === "--resume" && i + 1 < args.length) {\n result.resume = args[i + 1] ?? null;\n i += 2;\n } else if (arg) {\n remaining.push(arg);\n i++;\n } else {\n i++;\n }\n }\n if (remaining.length > 0) {\n result.prompt = remaining[0] ?? "";\n }\n return result;\n}\nfunction createSessionHistory(sessionId, cwd) {\n const projectName = cwd.replace(/^\\//, "").replace(/\\//g, "-");\n const homeDir = process.env.HOME ?? "/home/user";\n const sessionDir = `${homeDir}/.claude/projects/-${projectName}`;\n fs.mkdirSync(sessionDir, { recursive: true });\n return path.join(sessionDir, `${sessionId}.jsonl`);\n}\nfunction main() {\n const sessionId = `mock-${Date.now() * 1e3 + Math.floor(Math.random() * 1e3)}`;\n const args = parseArgs(process.argv.slice(2));\n const prompt = args.prompt;\n const outputFormat = args.outputFormat;\n if (prompt.startsWith("@fail:")) {\n const errorMsg = prompt.slice(6);\n console.error(errorMsg);\n process.exit(1);\n }\n const cwd = process.cwd();\n if (outputFormat === "stream-json") {\n const sessionHistoryFile = createSessionHistory(sessionId, cwd);\n const events = [];\n const initEvent = {\n type: "system",\n subtype: "init",\n cwd,\n session_id: sessionId,\n tools: ["Bash"],\n model: "mock-claude"\n };\n console.log(JSON.stringify(initEvent));\n events.push(initEvent);\n const textEvent = {\n type: "assistant",\n message: {\n role: "assistant",\n content: [{ type: "text", text: "Executing command..." }]\n },\n session_id: sessionId\n };\n console.log(JSON.stringify(textEvent));\n events.push(textEvent);\n const toolUseEvent = {\n type: "assistant",\n message: {\n role: "assistant",\n content: [\n {\n type: "tool_use",\n id: "toolu_mock_001",\n name: "Bash",\n input: { command: prompt }\n }\n ]\n },\n session_id: sessionId\n };\n console.log(JSON.stringify(toolUseEvent));\n events.push(toolUseEvent);\n let output;\n let exitCode;\n try {\n output = execSync(`bash -c ${JSON.stringify(prompt)}`, {\n encoding: "utf-8",\n stdio: ["pipe", "pipe", "pipe"]\n });\n exitCode = 0;\n } catch (error) {\n const execError = error;\n output = (execError.stdout ?? "") + (execError.stderr ?? "");\n exitCode = execError.status ?? 1;\n }\n const isError = exitCode !== 0;\n const toolResultEvent = {\n type: "user",\n message: {\n role: "user",\n content: [\n {\n type: "tool_result",\n tool_use_id: "toolu_mock_001",\n content: output,\n is_error: isError\n }\n ]\n },\n session_id: sessionId\n };\n console.log(JSON.stringify(toolResultEvent));\n events.push(toolResultEvent);\n const resultEvent = {\n type: "result",\n subtype: exitCode === 0 ? "success" : "error",\n is_error: exitCode !== 0,\n duration_ms: 100,\n num_turns: 1,\n result: output,\n session_id: sessionId,\n total_cost_usd: 0,\n usage: { input_tokens: 0, output_tokens: 0 }\n };\n console.log(JSON.stringify(resultEvent));\n events.push(resultEvent);\n const historyContent = events.map((e) => JSON.stringify(e)).join("\\n") + "\\n";\n fs.writeFileSync(sessionHistoryFile, historyContent);\n process.exit(exitCode);\n } else {\n try {\n execSync(`bash -c ${JSON.stringify(prompt)}`, {\n stdio: "inherit"\n });\n process.exit(0);\n } catch (error) {\n const execError = error;\n process.exit(execError.status ?? 1);\n }\n }\n}\nvar isMainModule = process.argv[1]?.endsWith("mock-claude.mjs") || process.argv[1]?.endsWith("mock-claude.ts");\nif (isMainModule) {\n main();\n}\nexport {\n createSessionHistory,\n parseArgs\n};\n';
|
|
7768
8099
|
var ENV_LOADER_SCRIPT = '#!/usr/bin/env node\n\n// src/sandbox/scripts/src/env-loader.ts\nimport * as fs from "fs";\nimport { spawn } from "child_process";\nvar ENV_JSON_PATH = "/tmp/vm0-env.json";\nconsole.log("[env-loader] Starting...");\nif (fs.existsSync(ENV_JSON_PATH)) {\n console.log(`[env-loader] Loading environment from ${ENV_JSON_PATH}`);\n try {\n const content = fs.readFileSync(ENV_JSON_PATH, "utf-8");\n const envData = JSON.parse(content);\n for (const [key, value] of Object.entries(envData)) {\n process.env[key] = value;\n }\n console.log(\n `[env-loader] Loaded ${Object.keys(envData).length} environment variables`\n );\n } catch (error) {\n console.error(`[env-loader] ERROR loading JSON: ${error}`);\n process.exit(1);\n }\n} else {\n console.error(\n `[env-loader] ERROR: Environment file not found: ${ENV_JSON_PATH}`\n );\n process.exit(1);\n}\nvar criticalVars = [\n "VM0_RUN_ID",\n "VM0_API_URL",\n "VM0_WORKING_DIR",\n "VM0_PROMPT"\n];\nfor (const varName of criticalVars) {\n const val = process.env[varName] ?? "";\n if (val) {\n const display = val.length > 50 ? val.substring(0, 50) + "..." : val;\n console.log(`[env-loader] ${varName}=${display}`);\n } else {\n console.log(`[env-loader] WARNING: ${varName} is empty`);\n }\n}\nvar runAgentPath = "/usr/local/bin/vm0-agent/run-agent.mjs";\nconsole.log(`[env-loader] Executing ${runAgentPath}`);\nvar child = spawn("node", [runAgentPath], {\n stdio: "inherit",\n env: process.env\n});\nchild.on("close", (code) => {\n process.exit(code ?? 1);\n});\n';
|
|
@@ -7795,7 +8126,7 @@ var FEATURE_SWITCHES = {
|
|
|
7795
8126
|
var ENV_LOADER_PATH = "/usr/local/bin/vm0-agent/env-loader.mjs";
|
|
7796
8127
|
|
|
7797
8128
|
// src/lib/proxy/vm-registry.ts
|
|
7798
|
-
import
|
|
8129
|
+
import fs6 from "fs";
|
|
7799
8130
|
var DEFAULT_REGISTRY_PATH = "/tmp/vm0-vm-registry.json";
|
|
7800
8131
|
var VMRegistry = class {
|
|
7801
8132
|
registryPath;
|
|
@@ -7809,8 +8140,8 @@ var VMRegistry = class {
|
|
|
7809
8140
|
*/
|
|
7810
8141
|
load() {
|
|
7811
8142
|
try {
|
|
7812
|
-
if (
|
|
7813
|
-
const content =
|
|
8143
|
+
if (fs6.existsSync(this.registryPath)) {
|
|
8144
|
+
const content = fs6.readFileSync(this.registryPath, "utf-8");
|
|
7814
8145
|
return JSON.parse(content);
|
|
7815
8146
|
}
|
|
7816
8147
|
} catch {
|
|
@@ -7824,8 +8155,8 @@ var VMRegistry = class {
|
|
|
7824
8155
|
this.data.updatedAt = Date.now();
|
|
7825
8156
|
const content = JSON.stringify(this.data, null, 2);
|
|
7826
8157
|
const tempPath = `${this.registryPath}.tmp`;
|
|
7827
|
-
|
|
7828
|
-
|
|
8158
|
+
fs6.writeFileSync(tempPath, content, { mode: 420 });
|
|
8159
|
+
fs6.renameSync(tempPath, this.registryPath);
|
|
7829
8160
|
}
|
|
7830
8161
|
/**
|
|
7831
8162
|
* Register a VM with its IP address
|
|
@@ -7900,7 +8231,7 @@ function initVMRegistry(registryPath) {
|
|
|
7900
8231
|
|
|
7901
8232
|
// src/lib/proxy/proxy-manager.ts
|
|
7902
8233
|
import { spawn as spawn2 } from "child_process";
|
|
7903
|
-
import
|
|
8234
|
+
import fs7 from "fs";
|
|
7904
8235
|
import path4 from "path";
|
|
7905
8236
|
|
|
7906
8237
|
// src/lib/proxy/mitm-addon-script.ts
|
|
@@ -8422,10 +8753,10 @@ var ProxyManager = class {
|
|
|
8422
8753
|
*/
|
|
8423
8754
|
ensureAddonScript() {
|
|
8424
8755
|
const addonDir = path4.dirname(this.config.addonPath);
|
|
8425
|
-
if (!
|
|
8426
|
-
|
|
8756
|
+
if (!fs7.existsSync(addonDir)) {
|
|
8757
|
+
fs7.mkdirSync(addonDir, { recursive: true });
|
|
8427
8758
|
}
|
|
8428
|
-
|
|
8759
|
+
fs7.writeFileSync(this.config.addonPath, RUNNER_MITM_ADDON_SCRIPT, {
|
|
8429
8760
|
mode: 493
|
|
8430
8761
|
});
|
|
8431
8762
|
console.log(
|
|
@@ -8436,11 +8767,11 @@ var ProxyManager = class {
|
|
|
8436
8767
|
* Validate proxy configuration
|
|
8437
8768
|
*/
|
|
8438
8769
|
validateConfig() {
|
|
8439
|
-
if (!
|
|
8770
|
+
if (!fs7.existsSync(this.config.caDir)) {
|
|
8440
8771
|
throw new Error(`Proxy CA directory not found: ${this.config.caDir}`);
|
|
8441
8772
|
}
|
|
8442
8773
|
const caCertPath = path4.join(this.config.caDir, "mitmproxy-ca.pem");
|
|
8443
|
-
if (!
|
|
8774
|
+
if (!fs7.existsSync(caCertPath)) {
|
|
8444
8775
|
throw new Error(`Proxy CA certificate not found: ${caCertPath}`);
|
|
8445
8776
|
}
|
|
8446
8777
|
this.ensureAddonScript();
|
|
@@ -8830,17 +9161,17 @@ function buildEnvironmentVariables(context, apiUrl) {
|
|
|
8830
9161
|
}
|
|
8831
9162
|
|
|
8832
9163
|
// src/lib/network-logs/network-logs.ts
|
|
8833
|
-
import
|
|
9164
|
+
import fs8 from "fs";
|
|
8834
9165
|
function getNetworkLogPath(runId) {
|
|
8835
9166
|
return `/tmp/vm0-network-${runId}.jsonl`;
|
|
8836
9167
|
}
|
|
8837
9168
|
function readNetworkLogs(runId) {
|
|
8838
9169
|
const logPath = getNetworkLogPath(runId);
|
|
8839
|
-
if (!
|
|
9170
|
+
if (!fs8.existsSync(logPath)) {
|
|
8840
9171
|
return [];
|
|
8841
9172
|
}
|
|
8842
9173
|
try {
|
|
8843
|
-
const content =
|
|
9174
|
+
const content = fs8.readFileSync(logPath, "utf-8");
|
|
8844
9175
|
const lines = content.split("\n").filter((line) => line.trim());
|
|
8845
9176
|
return lines.map((line) => JSON.parse(line));
|
|
8846
9177
|
} catch (err) {
|
|
@@ -8853,8 +9184,8 @@ function readNetworkLogs(runId) {
|
|
|
8853
9184
|
function cleanupNetworkLogs(runId) {
|
|
8854
9185
|
const logPath = getNetworkLogPath(runId);
|
|
8855
9186
|
try {
|
|
8856
|
-
if (
|
|
8857
|
-
|
|
9187
|
+
if (fs8.existsSync(logPath)) {
|
|
9188
|
+
fs8.unlinkSync(logPath);
|
|
8858
9189
|
}
|
|
8859
9190
|
} catch (err) {
|
|
8860
9191
|
console.error(
|
|
@@ -8897,7 +9228,7 @@ async function uploadNetworkLogs(apiUrl, sandboxToken, runId) {
|
|
|
8897
9228
|
}
|
|
8898
9229
|
|
|
8899
9230
|
// src/lib/vm-setup/vm-setup.ts
|
|
8900
|
-
import
|
|
9231
|
+
import fs9 from "fs";
|
|
8901
9232
|
|
|
8902
9233
|
// src/lib/scripts/utils.ts
|
|
8903
9234
|
function getAllScripts() {
|
|
@@ -8911,17 +9242,17 @@ function getAllScripts() {
|
|
|
8911
9242
|
}
|
|
8912
9243
|
|
|
8913
9244
|
// src/lib/vm-setup/vm-setup.ts
|
|
8914
|
-
async function uploadScripts(
|
|
9245
|
+
async function uploadScripts(guest) {
|
|
8915
9246
|
const scripts = getAllScripts();
|
|
8916
|
-
await
|
|
9247
|
+
await guest.execOrThrow(`sudo mkdir -p ${SCRIPT_PATHS.baseDir}`);
|
|
8917
9248
|
for (const script of scripts) {
|
|
8918
|
-
await
|
|
9249
|
+
await guest.writeFileWithSudo(script.path, script.content);
|
|
8919
9250
|
}
|
|
8920
|
-
await
|
|
9251
|
+
await guest.execOrThrow(
|
|
8921
9252
|
`sudo chmod +x ${SCRIPT_PATHS.baseDir}/*.mjs 2>/dev/null || true`
|
|
8922
9253
|
);
|
|
8923
9254
|
}
|
|
8924
|
-
async function downloadStorages(
|
|
9255
|
+
async function downloadStorages(guest, manifest) {
|
|
8925
9256
|
const totalArchives = manifest.storages.filter((s) => s.archiveUrl).length + (manifest.artifact?.archiveUrl ? 1 : 0);
|
|
8926
9257
|
if (totalArchives === 0) {
|
|
8927
9258
|
console.log(`[Executor] No archives to download`);
|
|
@@ -8929,8 +9260,8 @@ async function downloadStorages(ssh, manifest) {
|
|
|
8929
9260
|
}
|
|
8930
9261
|
console.log(`[Executor] Downloading ${totalArchives} archive(s)...`);
|
|
8931
9262
|
const manifestJson = JSON.stringify(manifest);
|
|
8932
|
-
await
|
|
8933
|
-
const result = await
|
|
9263
|
+
await guest.writeFile("/tmp/storage-manifest.json", manifestJson);
|
|
9264
|
+
const result = await guest.exec(
|
|
8934
9265
|
`node ${SCRIPT_PATHS.download} /tmp/storage-manifest.json`
|
|
8935
9266
|
);
|
|
8936
9267
|
if (result.exitCode !== 0) {
|
|
@@ -8938,7 +9269,7 @@ async function downloadStorages(ssh, manifest) {
|
|
|
8938
9269
|
}
|
|
8939
9270
|
console.log(`[Executor] Storage download completed`);
|
|
8940
9271
|
}
|
|
8941
|
-
async function restoreSessionHistory(
|
|
9272
|
+
async function restoreSessionHistory(guest, resumeSession, workingDir, cliAgentType) {
|
|
8942
9273
|
const { sessionId, sessionHistory } = resumeSession;
|
|
8943
9274
|
let sessionPath;
|
|
8944
9275
|
if (cliAgentType === "codex") {
|
|
@@ -8952,34 +9283,34 @@ async function restoreSessionHistory(ssh, resumeSession, workingDir, cliAgentTyp
|
|
|
8952
9283
|
}
|
|
8953
9284
|
console.log(`[Executor] Restoring session history to ${sessionPath}`);
|
|
8954
9285
|
const dirPath = sessionPath.substring(0, sessionPath.lastIndexOf("/"));
|
|
8955
|
-
await
|
|
8956
|
-
await
|
|
9286
|
+
await guest.execOrThrow(`mkdir -p "${dirPath}"`);
|
|
9287
|
+
await guest.writeFile(sessionPath, sessionHistory);
|
|
8957
9288
|
console.log(
|
|
8958
9289
|
`[Executor] Session history restored (${sessionHistory.split("\n").length} lines)`
|
|
8959
9290
|
);
|
|
8960
9291
|
}
|
|
8961
|
-
async function installProxyCA(
|
|
8962
|
-
if (!
|
|
9292
|
+
async function installProxyCA(guest, caCertPath) {
|
|
9293
|
+
if (!fs9.existsSync(caCertPath)) {
|
|
8963
9294
|
throw new Error(
|
|
8964
9295
|
`Proxy CA certificate not found at ${caCertPath}. Run generate-proxy-ca.sh first.`
|
|
8965
9296
|
);
|
|
8966
9297
|
}
|
|
8967
|
-
const caCert =
|
|
9298
|
+
const caCert = fs9.readFileSync(caCertPath, "utf-8");
|
|
8968
9299
|
console.log(
|
|
8969
9300
|
`[Executor] Installing proxy CA certificate (${caCert.length} bytes)`
|
|
8970
9301
|
);
|
|
8971
|
-
await
|
|
9302
|
+
await guest.writeFileWithSudo(
|
|
8972
9303
|
"/usr/local/share/ca-certificates/vm0-proxy-ca.crt",
|
|
8973
9304
|
caCert
|
|
8974
9305
|
);
|
|
8975
|
-
await
|
|
9306
|
+
await guest.execOrThrow("sudo update-ca-certificates");
|
|
8976
9307
|
console.log(`[Executor] Proxy CA certificate installed successfully`);
|
|
8977
9308
|
}
|
|
8978
|
-
async function configureDNS(
|
|
9309
|
+
async function configureDNS(guest) {
|
|
8979
9310
|
const dnsConfig = `nameserver 8.8.8.8
|
|
8980
9311
|
nameserver 8.8.4.4
|
|
8981
9312
|
nameserver 1.1.1.1`;
|
|
8982
|
-
await
|
|
9313
|
+
await guest.execOrThrow(
|
|
8983
9314
|
`sudo sh -c 'rm -f /etc/resolv.conf && echo "${dnsConfig}" > /etc/resolv.conf'`
|
|
8984
9315
|
);
|
|
8985
9316
|
}
|
|
@@ -8995,11 +9326,11 @@ var CURL_ERROR_MESSAGES = {
|
|
|
8995
9326
|
60: "TLS certificate error (proxy CA not trusted)",
|
|
8996
9327
|
22: "HTTP error from server"
|
|
8997
9328
|
};
|
|
8998
|
-
async function runPreflightCheck(
|
|
9329
|
+
async function runPreflightCheck(guest, apiUrl, runId, sandboxToken, bypassSecret) {
|
|
8999
9330
|
const heartbeatUrl = `${apiUrl}/api/webhooks/agent/heartbeat`;
|
|
9000
9331
|
const bypassHeader = bypassSecret ? ` -H "x-vercel-protection-bypass: ${bypassSecret}"` : "";
|
|
9001
9332
|
const curlCmd = `curl -sf --connect-timeout 5 --max-time 10 "${heartbeatUrl}" -X POST -H "Content-Type: application/json" -H "Authorization: Bearer ${sandboxToken}"${bypassHeader} -d '{"runId":"${runId}"}'`;
|
|
9002
|
-
const result = await
|
|
9333
|
+
const result = await guest.exec(curlCmd, 2e4);
|
|
9003
9334
|
if (result.exitCode === 0) {
|
|
9004
9335
|
return { success: true };
|
|
9005
9336
|
}
|
|
@@ -9063,14 +9394,27 @@ async function executeJob(context, config, options = {}) {
|
|
|
9063
9394
|
throw new Error("VM started but no IP address available");
|
|
9064
9395
|
}
|
|
9065
9396
|
log(`[Executor] VM ${vmId} started, guest IP: ${guestIp}`);
|
|
9066
|
-
const
|
|
9067
|
-
|
|
9068
|
-
|
|
9397
|
+
const guestProtocol = config.sandbox.guest_protocol;
|
|
9398
|
+
let guest;
|
|
9399
|
+
if (guestProtocol === "ssh") {
|
|
9400
|
+
const sshKeyPath = getRunnerSSHKeyPath();
|
|
9401
|
+
guest = new SSHClient({
|
|
9402
|
+
host: guestIp,
|
|
9403
|
+
user: "user",
|
|
9404
|
+
privateKeyPath: sshKeyPath || void 0
|
|
9405
|
+
});
|
|
9406
|
+
log(`[Executor] Using SSH for guest communication: ${guestIp}`);
|
|
9407
|
+
} else {
|
|
9408
|
+
const vsockPath = vm.getVsockPath();
|
|
9409
|
+
guest = new VsockClient(vsockPath);
|
|
9410
|
+
log(`[Executor] Using vsock for guest communication: ${vsockPath}`);
|
|
9411
|
+
}
|
|
9412
|
+
log(`[Executor] Verifying ${guestProtocol} connectivity...`);
|
|
9069
9413
|
await withSandboxTiming(
|
|
9070
|
-
"
|
|
9071
|
-
() =>
|
|
9414
|
+
"guest_wait",
|
|
9415
|
+
() => guest.waitUntilReachable(3e4, 1e3)
|
|
9072
9416
|
);
|
|
9073
|
-
log(`[Executor]
|
|
9417
|
+
log(`[Executor] Guest client ready (${guestProtocol})`);
|
|
9074
9418
|
const firewallConfig = context.experimentalFirewall;
|
|
9075
9419
|
if (firewallConfig?.enabled) {
|
|
9076
9420
|
const mitmEnabled = firewallConfig.experimental_mitm ?? false;
|
|
@@ -9089,25 +9433,25 @@ async function executeJob(context, config, options = {}) {
|
|
|
9089
9433
|
config.proxy.ca_dir,
|
|
9090
9434
|
"mitmproxy-ca-cert.pem"
|
|
9091
9435
|
);
|
|
9092
|
-
await installProxyCA(
|
|
9436
|
+
await installProxyCA(guest, caCertPath);
|
|
9093
9437
|
}
|
|
9094
9438
|
}
|
|
9095
9439
|
log(`[Executor] Configuring DNS...`);
|
|
9096
|
-
await configureDNS(
|
|
9440
|
+
await configureDNS(guest);
|
|
9097
9441
|
log(`[Executor] Uploading scripts...`);
|
|
9098
|
-
await withSandboxTiming("script_upload", () => uploadScripts(
|
|
9442
|
+
await withSandboxTiming("script_upload", () => uploadScripts(guest));
|
|
9099
9443
|
log(`[Executor] Scripts uploaded to ${SCRIPT_PATHS.baseDir}`);
|
|
9100
9444
|
if (context.storageManifest) {
|
|
9101
9445
|
await withSandboxTiming(
|
|
9102
9446
|
"storage_download",
|
|
9103
|
-
() => downloadStorages(
|
|
9447
|
+
() => downloadStorages(guest, context.storageManifest)
|
|
9104
9448
|
);
|
|
9105
9449
|
}
|
|
9106
9450
|
if (context.resumeSession) {
|
|
9107
9451
|
await withSandboxTiming(
|
|
9108
9452
|
"session_restore",
|
|
9109
9453
|
() => restoreSessionHistory(
|
|
9110
|
-
|
|
9454
|
+
guest,
|
|
9111
9455
|
context.resumeSession,
|
|
9112
9456
|
context.workingDir,
|
|
9113
9457
|
context.cliAgentType || "claude-code"
|
|
@@ -9119,12 +9463,12 @@ async function executeJob(context, config, options = {}) {
|
|
|
9119
9463
|
log(
|
|
9120
9464
|
`[Executor] Writing env JSON (${envJson.length} bytes) to ${ENV_JSON_PATH}`
|
|
9121
9465
|
);
|
|
9122
|
-
await
|
|
9466
|
+
await guest.writeFile(ENV_JSON_PATH, envJson);
|
|
9123
9467
|
if (!options.benchmarkMode) {
|
|
9124
9468
|
log(`[Executor] Running preflight connectivity check...`);
|
|
9125
9469
|
const bypassSecret = process.env.VERCEL_AUTOMATION_BYPASS_SECRET;
|
|
9126
9470
|
const preflight = await runPreflightCheck(
|
|
9127
|
-
|
|
9471
|
+
guest,
|
|
9128
9472
|
config.server.url,
|
|
9129
9473
|
context.runId,
|
|
9130
9474
|
context.sandboxToken,
|
|
@@ -9151,24 +9495,24 @@ async function executeJob(context, config, options = {}) {
|
|
|
9151
9495
|
const startTime = Date.now();
|
|
9152
9496
|
if (options.benchmarkMode) {
|
|
9153
9497
|
log(`[Executor] Running command directly (benchmark mode)...`);
|
|
9154
|
-
await
|
|
9498
|
+
await guest.exec(
|
|
9155
9499
|
`nohup sh -c '${context.prompt}; echo $? > ${exitCodeFile}' > ${systemLogFile} 2>&1 &`
|
|
9156
9500
|
);
|
|
9157
9501
|
log(`[Executor] Command started in background`);
|
|
9158
9502
|
} else {
|
|
9159
9503
|
log(`[Executor] Running agent via env-loader (background)...`);
|
|
9160
|
-
await
|
|
9504
|
+
await guest.exec(
|
|
9161
9505
|
`nohup sh -c 'node ${ENV_LOADER_PATH}; echo $? > ${exitCodeFile}' > ${systemLogFile} 2>&1 &`
|
|
9162
9506
|
);
|
|
9163
9507
|
log(`[Executor] Agent started in background`);
|
|
9164
9508
|
}
|
|
9165
9509
|
const pollIntervalMs = 2e3;
|
|
9166
|
-
const maxWaitMs =
|
|
9510
|
+
const maxWaitMs = 2 * 60 * 60 * 1e3;
|
|
9167
9511
|
let exitCode = 1;
|
|
9168
9512
|
let completed = false;
|
|
9169
9513
|
while (Date.now() - startTime < maxWaitMs) {
|
|
9170
9514
|
await new Promise((resolve) => setTimeout(resolve, pollIntervalMs));
|
|
9171
|
-
const checkResult = await
|
|
9515
|
+
const checkResult = await guest.exec(`cat ${exitCodeFile} 2>/dev/null`);
|
|
9172
9516
|
if (checkResult.exitCode === 0 && checkResult.stdout.trim()) {
|
|
9173
9517
|
const parsed = parseInt(checkResult.stdout.trim(), 10);
|
|
9174
9518
|
exitCode = Number.isNaN(parsed) ? 1 : parsed;
|
|
@@ -9176,17 +9520,17 @@ async function executeJob(context, config, options = {}) {
|
|
|
9176
9520
|
break;
|
|
9177
9521
|
}
|
|
9178
9522
|
if (!options.benchmarkMode) {
|
|
9179
|
-
const processCheck = await
|
|
9523
|
+
const processCheck = await guest.exec(
|
|
9180
9524
|
`pgrep -f "env-loader.mjs" > /dev/null 2>&1 && echo "RUNNING" || echo "DEAD"`
|
|
9181
9525
|
);
|
|
9182
9526
|
if (processCheck.stdout.trim() === "DEAD") {
|
|
9183
9527
|
log(
|
|
9184
9528
|
`[Executor] Agent process died unexpectedly without writing exit code`
|
|
9185
9529
|
);
|
|
9186
|
-
const logContent = await
|
|
9530
|
+
const logContent = await guest.exec(
|
|
9187
9531
|
`tail -50 ${systemLogFile} 2>/dev/null`
|
|
9188
9532
|
);
|
|
9189
|
-
const dmesgCheck = await
|
|
9533
|
+
const dmesgCheck = await guest.exec(
|
|
9190
9534
|
`dmesg | tail -20 | grep -iE "killed|oom" 2>/dev/null`
|
|
9191
9535
|
);
|
|
9192
9536
|
let errorMsg = "Agent process terminated unexpectedly";
|
|
@@ -9232,7 +9576,9 @@ async function executeJob(context, config, options = {}) {
|
|
|
9232
9576
|
success: exitCode === 0
|
|
9233
9577
|
});
|
|
9234
9578
|
log(`[Executor] Agent finished in ${duration}s with exit code ${exitCode}`);
|
|
9235
|
-
const logResult = await
|
|
9579
|
+
const logResult = await guest.exec(
|
|
9580
|
+
`tail -100 ${systemLogFile} 2>/dev/null`
|
|
9581
|
+
);
|
|
9236
9582
|
if (logResult.stdout) {
|
|
9237
9583
|
log(
|
|
9238
9584
|
`[Executor] Log output (${logResult.stdout.length} chars): ${logResult.stdout.substring(0, 500)}`
|
|
@@ -9318,196 +9664,201 @@ async function executeJob2(context, config) {
|
|
|
9318
9664
|
console.log(` Job ${context.runId} reported as ${result.status}`);
|
|
9319
9665
|
}
|
|
9320
9666
|
}
|
|
9321
|
-
var startCommand = new Command("start").description("Start the runner").option("--config <path>", "Config file path", "./runner.yaml").action(
|
|
9322
|
-
|
|
9323
|
-
|
|
9324
|
-
validateFirecrackerPaths(config.firecracker);
|
|
9325
|
-
console.log("Config valid");
|
|
9326
|
-
const datasetSuffix = process.env.AXIOM_DATASET_SUFFIX;
|
|
9327
|
-
if (!datasetSuffix) {
|
|
9328
|
-
throw new Error(
|
|
9329
|
-
"AXIOM_DATASET_SUFFIX is required. Set to 'dev' or 'prod'."
|
|
9330
|
-
);
|
|
9331
|
-
}
|
|
9332
|
-
initMetrics({
|
|
9333
|
-
serviceName: "vm0-runner",
|
|
9334
|
-
runnerLabel: config.name,
|
|
9335
|
-
axiomToken: process.env.AXIOM_TOKEN,
|
|
9336
|
-
environment: datasetSuffix
|
|
9337
|
-
});
|
|
9338
|
-
const networkCheck = checkNetworkPrerequisites();
|
|
9339
|
-
if (!networkCheck.ok) {
|
|
9340
|
-
console.error("Network prerequisites not met:");
|
|
9341
|
-
for (const error of networkCheck.errors) {
|
|
9342
|
-
console.error(` - ${error}`);
|
|
9343
|
-
}
|
|
9344
|
-
process.exit(1);
|
|
9345
|
-
}
|
|
9346
|
-
console.log("Setting up network bridge...");
|
|
9347
|
-
await setupBridge();
|
|
9348
|
-
console.log("Flushing bridge ARP cache...");
|
|
9349
|
-
await flushBridgeArpCache();
|
|
9350
|
-
console.log("Cleaning up orphaned proxy rules...");
|
|
9351
|
-
await cleanupOrphanedProxyRules(config.name);
|
|
9352
|
-
console.log("Cleaning up orphaned IP allocations...");
|
|
9353
|
-
await cleanupOrphanedAllocations();
|
|
9354
|
-
console.log("Initializing network proxy...");
|
|
9355
|
-
initVMRegistry();
|
|
9356
|
-
const proxyManager = initProxyManager({
|
|
9357
|
-
apiUrl: config.server.url,
|
|
9358
|
-
port: config.proxy.port,
|
|
9359
|
-
caDir: config.proxy.ca_dir
|
|
9360
|
-
});
|
|
9361
|
-
let proxyEnabled = false;
|
|
9667
|
+
var startCommand = new Command("start").description("Start the runner").option("--config <path>", "Config file path", "./runner.yaml").action(
|
|
9668
|
+
// eslint-disable-next-line complexity -- TODO: refactor complex function
|
|
9669
|
+
async (options) => {
|
|
9362
9670
|
try {
|
|
9363
|
-
|
|
9364
|
-
|
|
9365
|
-
console.log("
|
|
9366
|
-
|
|
9367
|
-
|
|
9368
|
-
|
|
9369
|
-
|
|
9370
|
-
console.warn(
|
|
9371
|
-
"Jobs with experimentalFirewall enabled will run without network interception"
|
|
9372
|
-
);
|
|
9373
|
-
}
|
|
9374
|
-
const statusFilePath = join2(dirname(options.config), "status.json");
|
|
9375
|
-
const startedAt = /* @__PURE__ */ new Date();
|
|
9376
|
-
const state = { mode: "running" };
|
|
9377
|
-
const updateStatus = () => {
|
|
9378
|
-
writeStatusFile(statusFilePath, state.mode, startedAt);
|
|
9379
|
-
};
|
|
9380
|
-
console.log(
|
|
9381
|
-
`Starting runner '${config.name}' for group '${config.group}'...`
|
|
9382
|
-
);
|
|
9383
|
-
console.log(`Max concurrent jobs: ${config.sandbox.max_concurrent}`);
|
|
9384
|
-
console.log(`Status file: ${statusFilePath}`);
|
|
9385
|
-
console.log("Press Ctrl+C to stop");
|
|
9386
|
-
console.log("");
|
|
9387
|
-
updateStatus();
|
|
9388
|
-
let running = true;
|
|
9389
|
-
process.on("SIGINT", () => {
|
|
9390
|
-
console.log("\nShutting down...");
|
|
9391
|
-
running = false;
|
|
9392
|
-
state.mode = "stopped";
|
|
9393
|
-
updateStatus();
|
|
9394
|
-
});
|
|
9395
|
-
process.on("SIGTERM", () => {
|
|
9396
|
-
console.log("\nShutting down...");
|
|
9397
|
-
running = false;
|
|
9398
|
-
state.mode = "stopped";
|
|
9399
|
-
updateStatus();
|
|
9400
|
-
});
|
|
9401
|
-
process.on("SIGUSR1", () => {
|
|
9402
|
-
if (state.mode === "running") {
|
|
9403
|
-
console.log("\n[Maintenance] Entering drain mode...");
|
|
9404
|
-
console.log(
|
|
9405
|
-
`[Maintenance] Active jobs: ${activeRuns.size} (will wait for completion)`
|
|
9671
|
+
const config = loadConfig(options.config);
|
|
9672
|
+
validateFirecrackerPaths(config.firecracker);
|
|
9673
|
+
console.log("Config valid");
|
|
9674
|
+
const datasetSuffix = process.env.AXIOM_DATASET_SUFFIX;
|
|
9675
|
+
if (!datasetSuffix) {
|
|
9676
|
+
throw new Error(
|
|
9677
|
+
"AXIOM_DATASET_SUFFIX is required. Set to 'dev' or 'prod'."
|
|
9406
9678
|
);
|
|
9407
|
-
state.mode = "draining";
|
|
9408
|
-
updateStatus();
|
|
9409
9679
|
}
|
|
9410
|
-
|
|
9411
|
-
|
|
9412
|
-
|
|
9413
|
-
|
|
9414
|
-
|
|
9415
|
-
|
|
9416
|
-
|
|
9417
|
-
|
|
9418
|
-
|
|
9419
|
-
|
|
9420
|
-
|
|
9421
|
-
updateStatus();
|
|
9422
|
-
}
|
|
9423
|
-
continue;
|
|
9424
|
-
}
|
|
9425
|
-
if (activeRuns.size >= config.sandbox.max_concurrent) {
|
|
9426
|
-
if (jobPromises.size > 0) {
|
|
9427
|
-
await Promise.race(jobPromises);
|
|
9428
|
-
updateStatus();
|
|
9680
|
+
initMetrics({
|
|
9681
|
+
serviceName: "vm0-runner",
|
|
9682
|
+
runnerLabel: config.name,
|
|
9683
|
+
axiomToken: process.env.AXIOM_TOKEN,
|
|
9684
|
+
environment: datasetSuffix
|
|
9685
|
+
});
|
|
9686
|
+
const networkCheck = checkNetworkPrerequisites();
|
|
9687
|
+
if (!networkCheck.ok) {
|
|
9688
|
+
console.error("Network prerequisites not met:");
|
|
9689
|
+
for (const error of networkCheck.errors) {
|
|
9690
|
+
console.error(` - ${error}`);
|
|
9429
9691
|
}
|
|
9430
|
-
|
|
9692
|
+
process.exit(1);
|
|
9431
9693
|
}
|
|
9694
|
+
console.log("Setting up network bridge...");
|
|
9695
|
+
await setupBridge();
|
|
9696
|
+
console.log("Flushing bridge ARP cache...");
|
|
9697
|
+
await flushBridgeArpCache();
|
|
9698
|
+
console.log("Cleaning up orphaned proxy rules...");
|
|
9699
|
+
await cleanupOrphanedProxyRules(config.name);
|
|
9700
|
+
console.log("Cleaning up orphaned IP allocations...");
|
|
9701
|
+
await cleanupOrphanedAllocations();
|
|
9702
|
+
console.log("Initializing network proxy...");
|
|
9703
|
+
initVMRegistry();
|
|
9704
|
+
const proxyManager = initProxyManager({
|
|
9705
|
+
apiUrl: config.server.url,
|
|
9706
|
+
port: config.proxy.port,
|
|
9707
|
+
caDir: config.proxy.ca_dir
|
|
9708
|
+
});
|
|
9709
|
+
let proxyEnabled = false;
|
|
9432
9710
|
try {
|
|
9433
|
-
|
|
9434
|
-
|
|
9435
|
-
|
|
9711
|
+
await proxyManager.start();
|
|
9712
|
+
proxyEnabled = true;
|
|
9713
|
+
console.log("Network proxy initialized successfully");
|
|
9714
|
+
} catch (err) {
|
|
9715
|
+
console.warn(
|
|
9716
|
+
`Network proxy not available: ${err instanceof Error ? err.message : "Unknown error"}`
|
|
9717
|
+
);
|
|
9718
|
+
console.warn(
|
|
9719
|
+
"Jobs with experimentalFirewall enabled will run without network interception"
|
|
9436
9720
|
);
|
|
9437
|
-
|
|
9438
|
-
|
|
9439
|
-
|
|
9721
|
+
}
|
|
9722
|
+
const statusFilePath = join2(dirname(options.config), "status.json");
|
|
9723
|
+
const startedAt = /* @__PURE__ */ new Date();
|
|
9724
|
+
const state = { mode: "running" };
|
|
9725
|
+
const updateStatus = () => {
|
|
9726
|
+
writeStatusFile(statusFilePath, state.mode, startedAt);
|
|
9727
|
+
};
|
|
9728
|
+
console.log(
|
|
9729
|
+
`Starting runner '${config.name}' for group '${config.group}'...`
|
|
9730
|
+
);
|
|
9731
|
+
console.log(`Max concurrent jobs: ${config.sandbox.max_concurrent}`);
|
|
9732
|
+
console.log(`Status file: ${statusFilePath}`);
|
|
9733
|
+
console.log("Press Ctrl+C to stop");
|
|
9734
|
+
console.log("");
|
|
9735
|
+
updateStatus();
|
|
9736
|
+
let running = true;
|
|
9737
|
+
process.on("SIGINT", () => {
|
|
9738
|
+
console.log("\nShutting down...");
|
|
9739
|
+
running = false;
|
|
9740
|
+
state.mode = "stopped";
|
|
9741
|
+
updateStatus();
|
|
9742
|
+
});
|
|
9743
|
+
process.on("SIGTERM", () => {
|
|
9744
|
+
console.log("\nShutting down...");
|
|
9745
|
+
running = false;
|
|
9746
|
+
state.mode = "stopped";
|
|
9747
|
+
updateStatus();
|
|
9748
|
+
});
|
|
9749
|
+
process.on("SIGUSR1", () => {
|
|
9750
|
+
if (state.mode === "running") {
|
|
9751
|
+
console.log("\n[Maintenance] Entering drain mode...");
|
|
9752
|
+
console.log(
|
|
9753
|
+
`[Maintenance] Active jobs: ${activeRuns.size} (will wait for completion)`
|
|
9440
9754
|
);
|
|
9755
|
+
state.mode = "draining";
|
|
9756
|
+
updateStatus();
|
|
9757
|
+
}
|
|
9758
|
+
});
|
|
9759
|
+
const jobPromises = /* @__PURE__ */ new Set();
|
|
9760
|
+
while (running) {
|
|
9761
|
+
if (state.mode === "draining") {
|
|
9762
|
+
if (activeRuns.size === 0) {
|
|
9763
|
+
console.log(
|
|
9764
|
+
"[Maintenance] All jobs completed, exiting drain mode"
|
|
9765
|
+
);
|
|
9766
|
+
running = false;
|
|
9767
|
+
break;
|
|
9768
|
+
}
|
|
9769
|
+
if (jobPromises.size > 0) {
|
|
9770
|
+
await Promise.race(jobPromises);
|
|
9771
|
+
updateStatus();
|
|
9772
|
+
}
|
|
9773
|
+
continue;
|
|
9774
|
+
}
|
|
9775
|
+
if (activeRuns.size >= config.sandbox.max_concurrent) {
|
|
9776
|
+
if (jobPromises.size > 0) {
|
|
9777
|
+
await Promise.race(jobPromises);
|
|
9778
|
+
updateStatus();
|
|
9779
|
+
}
|
|
9441
9780
|
continue;
|
|
9442
9781
|
}
|
|
9443
|
-
console.log(`Found job: ${job.runId}`);
|
|
9444
9782
|
try {
|
|
9445
|
-
const
|
|
9446
|
-
"
|
|
9447
|
-
() =>
|
|
9783
|
+
const job = await withRunnerTiming(
|
|
9784
|
+
"poll",
|
|
9785
|
+
() => pollForJob(config.server, config.group)
|
|
9448
9786
|
);
|
|
9449
|
-
|
|
9450
|
-
|
|
9451
|
-
|
|
9452
|
-
|
|
9453
|
-
|
|
9454
|
-
|
|
9455
|
-
|
|
9787
|
+
if (!job) {
|
|
9788
|
+
await new Promise(
|
|
9789
|
+
(resolve) => setTimeout(resolve, config.sandbox.poll_interval_ms)
|
|
9790
|
+
);
|
|
9791
|
+
continue;
|
|
9792
|
+
}
|
|
9793
|
+
console.log(`Found job: ${job.runId}`);
|
|
9794
|
+
try {
|
|
9795
|
+
const context = await withRunnerTiming(
|
|
9796
|
+
"claim",
|
|
9797
|
+
() => claimJob(config.server, job.runId)
|
|
9456
9798
|
);
|
|
9457
|
-
|
|
9458
|
-
activeRuns.
|
|
9459
|
-
jobPromises.delete(jobPromise);
|
|
9799
|
+
console.log(`Claimed job: ${context.runId}`);
|
|
9800
|
+
activeRuns.add(context.runId);
|
|
9460
9801
|
updateStatus();
|
|
9461
|
-
|
|
9462
|
-
|
|
9802
|
+
const jobPromise = executeJob2(context, config).catch((error) => {
|
|
9803
|
+
console.error(
|
|
9804
|
+
`Job ${context.runId} failed:`,
|
|
9805
|
+
error instanceof Error ? error.message : "Unknown error"
|
|
9806
|
+
);
|
|
9807
|
+
}).finally(() => {
|
|
9808
|
+
activeRuns.delete(context.runId);
|
|
9809
|
+
jobPromises.delete(jobPromise);
|
|
9810
|
+
updateStatus();
|
|
9811
|
+
});
|
|
9812
|
+
jobPromises.add(jobPromise);
|
|
9813
|
+
} catch (error) {
|
|
9814
|
+
console.log(
|
|
9815
|
+
`Could not claim job ${job.runId}:`,
|
|
9816
|
+
error instanceof Error ? error.message : "Unknown error"
|
|
9817
|
+
);
|
|
9818
|
+
}
|
|
9463
9819
|
} catch (error) {
|
|
9464
|
-
console.
|
|
9465
|
-
|
|
9820
|
+
console.error(
|
|
9821
|
+
"Polling error:",
|
|
9466
9822
|
error instanceof Error ? error.message : "Unknown error"
|
|
9467
9823
|
);
|
|
9824
|
+
await new Promise((resolve) => setTimeout(resolve, 2e3));
|
|
9468
9825
|
}
|
|
9469
|
-
}
|
|
9470
|
-
|
|
9471
|
-
|
|
9472
|
-
|
|
9826
|
+
}
|
|
9827
|
+
if (jobPromises.size > 0) {
|
|
9828
|
+
console.log(
|
|
9829
|
+
`Waiting for ${jobPromises.size} active job(s) to complete...`
|
|
9473
9830
|
);
|
|
9474
|
-
await
|
|
9831
|
+
await Promise.all(jobPromises);
|
|
9475
9832
|
}
|
|
9833
|
+
if (proxyEnabled) {
|
|
9834
|
+
console.log("Stopping network proxy...");
|
|
9835
|
+
await getProxyManager().stop();
|
|
9836
|
+
}
|
|
9837
|
+
console.log("Flushing metrics...");
|
|
9838
|
+
await flushMetrics();
|
|
9839
|
+
await shutdownMetrics();
|
|
9840
|
+
state.mode = "stopped";
|
|
9841
|
+
updateStatus();
|
|
9842
|
+
console.log("Runner stopped");
|
|
9843
|
+
process.exit(0);
|
|
9844
|
+
} catch (error) {
|
|
9845
|
+
if (error instanceof Error) {
|
|
9846
|
+
console.error(`Error: ${error.message}`);
|
|
9847
|
+
} else {
|
|
9848
|
+
console.error("An unknown error occurred");
|
|
9849
|
+
}
|
|
9850
|
+
process.exit(1);
|
|
9476
9851
|
}
|
|
9477
|
-
if (jobPromises.size > 0) {
|
|
9478
|
-
console.log(
|
|
9479
|
-
`Waiting for ${jobPromises.size} active job(s) to complete...`
|
|
9480
|
-
);
|
|
9481
|
-
await Promise.all(jobPromises);
|
|
9482
|
-
}
|
|
9483
|
-
if (proxyEnabled) {
|
|
9484
|
-
console.log("Stopping network proxy...");
|
|
9485
|
-
await getProxyManager().stop();
|
|
9486
|
-
}
|
|
9487
|
-
console.log("Flushing metrics...");
|
|
9488
|
-
await flushMetrics();
|
|
9489
|
-
await shutdownMetrics();
|
|
9490
|
-
state.mode = "stopped";
|
|
9491
|
-
updateStatus();
|
|
9492
|
-
console.log("Runner stopped");
|
|
9493
|
-
process.exit(0);
|
|
9494
|
-
} catch (error) {
|
|
9495
|
-
if (error instanceof Error) {
|
|
9496
|
-
console.error(`Error: ${error.message}`);
|
|
9497
|
-
} else {
|
|
9498
|
-
console.error("An unknown error occurred");
|
|
9499
|
-
}
|
|
9500
|
-
process.exit(1);
|
|
9501
9852
|
}
|
|
9502
|
-
|
|
9853
|
+
);
|
|
9503
9854
|
|
|
9504
9855
|
// src/commands/doctor.ts
|
|
9505
9856
|
import { Command as Command2 } from "commander";
|
|
9506
|
-
import { existsSync as
|
|
9857
|
+
import { existsSync as existsSync4, readFileSync as readFileSync3, readdirSync as readdirSync2 } from "fs";
|
|
9507
9858
|
import { dirname as dirname2, join as join3 } from "path";
|
|
9508
9859
|
|
|
9509
9860
|
// src/lib/firecracker/process.ts
|
|
9510
|
-
import { readdirSync, readFileSync as readFileSync2, existsSync as
|
|
9861
|
+
import { readdirSync, readFileSync as readFileSync2, existsSync as existsSync3 } from "fs";
|
|
9511
9862
|
import path6 from "path";
|
|
9512
9863
|
function parseFirecrackerCmdline(cmdline) {
|
|
9513
9864
|
const args = cmdline.split("\0");
|
|
@@ -9542,7 +9893,7 @@ function findFirecrackerProcesses() {
|
|
|
9542
9893
|
if (!/^\d+$/.test(entry)) continue;
|
|
9543
9894
|
const pid = parseInt(entry, 10);
|
|
9544
9895
|
const cmdlinePath = path6.join(procDir, entry, "cmdline");
|
|
9545
|
-
if (!
|
|
9896
|
+
if (!existsSync3(cmdlinePath)) continue;
|
|
9546
9897
|
try {
|
|
9547
9898
|
const cmdline = readFileSync2(cmdlinePath, "utf-8");
|
|
9548
9899
|
const parsed = parseFirecrackerCmdline(cmdline);
|
|
@@ -9599,7 +9950,7 @@ function findMitmproxyProcess() {
|
|
|
9599
9950
|
if (!/^\d+$/.test(entry)) continue;
|
|
9600
9951
|
const pid = parseInt(entry, 10);
|
|
9601
9952
|
const cmdlinePath = path6.join(procDir, entry, "cmdline");
|
|
9602
|
-
if (!
|
|
9953
|
+
if (!existsSync3(cmdlinePath)) continue;
|
|
9603
9954
|
try {
|
|
9604
9955
|
const cmdline = readFileSync2(cmdlinePath, "utf-8");
|
|
9605
9956
|
const parsed = parseMitmproxyCmdline(cmdline);
|
|
@@ -9614,194 +9965,197 @@ function findMitmproxyProcess() {
|
|
|
9614
9965
|
}
|
|
9615
9966
|
|
|
9616
9967
|
// src/commands/doctor.ts
|
|
9617
|
-
var doctorCommand = new Command2("doctor").description("Diagnose runner health, check network, and detect issues").option("--config <path>", "Config file path", "./runner.yaml").action(
|
|
9618
|
-
|
|
9619
|
-
|
|
9620
|
-
|
|
9621
|
-
|
|
9622
|
-
|
|
9623
|
-
|
|
9624
|
-
|
|
9625
|
-
|
|
9626
|
-
|
|
9627
|
-
|
|
9628
|
-
|
|
9629
|
-
|
|
9630
|
-
|
|
9631
|
-
if (status.started_at) {
|
|
9632
|
-
const started = new Date(status.started_at);
|
|
9633
|
-
const uptime = formatUptime(Date.now() - started.getTime());
|
|
9634
|
-
console.log(
|
|
9635
|
-
`Started: ${started.toLocaleString()} (uptime: ${uptime})`
|
|
9968
|
+
var doctorCommand = new Command2("doctor").description("Diagnose runner health, check network, and detect issues").option("--config <path>", "Config file path", "./runner.yaml").action(
|
|
9969
|
+
// eslint-disable-next-line complexity -- TODO: refactor complex function
|
|
9970
|
+
async (options) => {
|
|
9971
|
+
try {
|
|
9972
|
+
const config = loadConfig(options.config);
|
|
9973
|
+
const configDir = dirname2(options.config);
|
|
9974
|
+
const statusFilePath = join3(configDir, "status.json");
|
|
9975
|
+
const workspacesDir = join3(configDir, "workspaces");
|
|
9976
|
+
console.log(`Runner: ${config.name}`);
|
|
9977
|
+
let status = null;
|
|
9978
|
+
if (existsSync4(statusFilePath)) {
|
|
9979
|
+
try {
|
|
9980
|
+
status = JSON.parse(
|
|
9981
|
+
readFileSync3(statusFilePath, "utf-8")
|
|
9636
9982
|
);
|
|
9983
|
+
console.log(`Mode: ${status.mode}`);
|
|
9984
|
+
if (status.started_at) {
|
|
9985
|
+
const started = new Date(status.started_at);
|
|
9986
|
+
const uptime = formatUptime(Date.now() - started.getTime());
|
|
9987
|
+
console.log(
|
|
9988
|
+
`Started: ${started.toLocaleString()} (uptime: ${uptime})`
|
|
9989
|
+
);
|
|
9990
|
+
}
|
|
9991
|
+
} catch {
|
|
9992
|
+
console.log("Mode: unknown (status.json unreadable)");
|
|
9637
9993
|
}
|
|
9638
|
-
}
|
|
9639
|
-
console.log("Mode: unknown (status.json
|
|
9640
|
-
}
|
|
9641
|
-
} else {
|
|
9642
|
-
console.log("Mode: unknown (no status.json)");
|
|
9643
|
-
}
|
|
9644
|
-
console.log("");
|
|
9645
|
-
console.log("API Connectivity:");
|
|
9646
|
-
try {
|
|
9647
|
-
await pollForJob(config.server, config.group);
|
|
9648
|
-
console.log(` \u2713 Connected to ${config.server.url}`);
|
|
9649
|
-
console.log(" \u2713 Authentication: OK");
|
|
9650
|
-
} catch (error) {
|
|
9651
|
-
console.log(` \u2717 Cannot connect to ${config.server.url}`);
|
|
9652
|
-
console.log(
|
|
9653
|
-
` Error: ${error instanceof Error ? error.message : "Unknown error"}`
|
|
9654
|
-
);
|
|
9655
|
-
}
|
|
9656
|
-
console.log("");
|
|
9657
|
-
console.log("Network:");
|
|
9658
|
-
const warnings = [];
|
|
9659
|
-
const bridgeStatus = await checkBridgeStatus();
|
|
9660
|
-
if (bridgeStatus.exists) {
|
|
9661
|
-
console.log(` \u2713 Bridge ${BRIDGE_NAME2} (${bridgeStatus.ip})`);
|
|
9662
|
-
} else {
|
|
9663
|
-
console.log(` \u2717 Bridge ${BRIDGE_NAME2} not found`);
|
|
9664
|
-
warnings.push({
|
|
9665
|
-
message: `Network bridge ${BRIDGE_NAME2} does not exist`
|
|
9666
|
-
});
|
|
9667
|
-
}
|
|
9668
|
-
const proxyPort = config.proxy.port;
|
|
9669
|
-
const mitmProc = findMitmproxyProcess();
|
|
9670
|
-
const portInUse = await isPortInUse(proxyPort);
|
|
9671
|
-
if (mitmProc) {
|
|
9672
|
-
console.log(
|
|
9673
|
-
` \u2713 Proxy mitmproxy (PID ${mitmProc.pid}) on :${proxyPort}`
|
|
9674
|
-
);
|
|
9675
|
-
} else if (portInUse) {
|
|
9676
|
-
console.log(
|
|
9677
|
-
` \u26A0\uFE0F Proxy port :${proxyPort} in use but mitmproxy process not found`
|
|
9678
|
-
);
|
|
9679
|
-
warnings.push({
|
|
9680
|
-
message: `Port ${proxyPort} is in use but mitmproxy process not detected`
|
|
9681
|
-
});
|
|
9682
|
-
} else {
|
|
9683
|
-
console.log(` \u2717 Proxy mitmproxy not running`);
|
|
9684
|
-
warnings.push({ message: "Proxy mitmproxy is not running" });
|
|
9685
|
-
}
|
|
9686
|
-
console.log("");
|
|
9687
|
-
const processes = findFirecrackerProcesses();
|
|
9688
|
-
const tapDevices = await listTapDevices();
|
|
9689
|
-
const workspaces = existsSync3(workspacesDir) ? readdirSync2(workspacesDir).filter((d) => d.startsWith("vm0-")) : [];
|
|
9690
|
-
const jobs = [];
|
|
9691
|
-
const statusVmIds = /* @__PURE__ */ new Set();
|
|
9692
|
-
const allocations = getAllocations();
|
|
9693
|
-
if (status?.active_run_ids) {
|
|
9694
|
-
for (const runId of status.active_run_ids) {
|
|
9695
|
-
const vmId = runId.split("-")[0];
|
|
9696
|
-
if (!vmId) continue;
|
|
9697
|
-
statusVmIds.add(vmId);
|
|
9698
|
-
const proc = processes.find((p) => p.vmId === vmId);
|
|
9699
|
-
const ip = getIPForVm(vmId) ?? "not allocated";
|
|
9700
|
-
jobs.push({
|
|
9701
|
-
runId,
|
|
9702
|
-
vmId,
|
|
9703
|
-
ip,
|
|
9704
|
-
hasProcess: !!proc,
|
|
9705
|
-
pid: proc?.pid
|
|
9706
|
-
});
|
|
9994
|
+
} else {
|
|
9995
|
+
console.log("Mode: unknown (no status.json)");
|
|
9707
9996
|
}
|
|
9708
|
-
|
|
9709
|
-
|
|
9710
|
-
|
|
9711
|
-
|
|
9712
|
-
|
|
9713
|
-
|
|
9714
|
-
|
|
9715
|
-
|
|
9716
|
-
console.log(`Runs (${jobs.length} active, max ${maxConcurrent}):`);
|
|
9717
|
-
if (jobs.length === 0) {
|
|
9718
|
-
console.log(" No active runs");
|
|
9719
|
-
} else {
|
|
9720
|
-
console.log(
|
|
9721
|
-
" Run ID VM ID IP Status"
|
|
9722
|
-
);
|
|
9723
|
-
for (const job of jobs) {
|
|
9724
|
-
const ipConflict = (ipToVmIds.get(job.ip)?.length ?? 0) > 1;
|
|
9725
|
-
let statusText;
|
|
9726
|
-
if (ipConflict) {
|
|
9727
|
-
statusText = "\u26A0\uFE0F IP conflict!";
|
|
9728
|
-
} else if (job.hasProcess) {
|
|
9729
|
-
statusText = `\u2713 Running (PID ${job.pid})`;
|
|
9730
|
-
} else {
|
|
9731
|
-
statusText = "\u26A0\uFE0F No process";
|
|
9732
|
-
}
|
|
9997
|
+
console.log("");
|
|
9998
|
+
console.log("API Connectivity:");
|
|
9999
|
+
try {
|
|
10000
|
+
await pollForJob(config.server, config.group);
|
|
10001
|
+
console.log(` \u2713 Connected to ${config.server.url}`);
|
|
10002
|
+
console.log(" \u2713 Authentication: OK");
|
|
10003
|
+
} catch (error) {
|
|
10004
|
+
console.log(` \u2717 Cannot connect to ${config.server.url}`);
|
|
9733
10005
|
console.log(
|
|
9734
|
-
`
|
|
10006
|
+
` Error: ${error instanceof Error ? error.message : "Unknown error"}`
|
|
9735
10007
|
);
|
|
9736
10008
|
}
|
|
9737
|
-
|
|
9738
|
-
|
|
9739
|
-
|
|
9740
|
-
|
|
10009
|
+
console.log("");
|
|
10010
|
+
console.log("Network:");
|
|
10011
|
+
const warnings = [];
|
|
10012
|
+
const bridgeStatus = await checkBridgeStatus();
|
|
10013
|
+
if (bridgeStatus.exists) {
|
|
10014
|
+
console.log(` \u2713 Bridge ${BRIDGE_NAME2} (${bridgeStatus.ip})`);
|
|
10015
|
+
} else {
|
|
10016
|
+
console.log(` \u2717 Bridge ${BRIDGE_NAME2} not found`);
|
|
9741
10017
|
warnings.push({
|
|
9742
|
-
message: `
|
|
10018
|
+
message: `Network bridge ${BRIDGE_NAME2} does not exist`
|
|
9743
10019
|
});
|
|
9744
10020
|
}
|
|
9745
|
-
|
|
9746
|
-
|
|
9747
|
-
|
|
10021
|
+
const proxyPort = config.proxy.port;
|
|
10022
|
+
const mitmProc = findMitmproxyProcess();
|
|
10023
|
+
const portInUse = await isPortInUse(proxyPort);
|
|
10024
|
+
if (mitmProc) {
|
|
10025
|
+
console.log(
|
|
10026
|
+
` \u2713 Proxy mitmproxy (PID ${mitmProc.pid}) on :${proxyPort}`
|
|
10027
|
+
);
|
|
10028
|
+
} else if (portInUse) {
|
|
10029
|
+
console.log(
|
|
10030
|
+
` \u26A0\uFE0F Proxy port :${proxyPort} in use but mitmproxy process not found`
|
|
10031
|
+
);
|
|
9748
10032
|
warnings.push({
|
|
9749
|
-
message: `
|
|
10033
|
+
message: `Port ${proxyPort} is in use but mitmproxy process not detected`
|
|
9750
10034
|
});
|
|
10035
|
+
} else {
|
|
10036
|
+
console.log(` \u2717 Proxy mitmproxy not running`);
|
|
10037
|
+
warnings.push({ message: "Proxy mitmproxy is not running" });
|
|
9751
10038
|
}
|
|
9752
|
-
|
|
9753
|
-
|
|
9754
|
-
|
|
9755
|
-
|
|
9756
|
-
|
|
9757
|
-
|
|
9758
|
-
|
|
10039
|
+
console.log("");
|
|
10040
|
+
const processes = findFirecrackerProcesses();
|
|
10041
|
+
const tapDevices = await listTapDevices();
|
|
10042
|
+
const workspaces = existsSync4(workspacesDir) ? readdirSync2(workspacesDir).filter((d) => d.startsWith("vm0-")) : [];
|
|
10043
|
+
const jobs = [];
|
|
10044
|
+
const statusVmIds = /* @__PURE__ */ new Set();
|
|
10045
|
+
const allocations = getAllocations();
|
|
10046
|
+
if (status?.active_run_ids) {
|
|
10047
|
+
for (const runId of status.active_run_ids) {
|
|
10048
|
+
const vmId = runId.split("-")[0];
|
|
10049
|
+
if (!vmId) continue;
|
|
10050
|
+
statusVmIds.add(vmId);
|
|
10051
|
+
const proc = processes.find((p) => p.vmId === vmId);
|
|
10052
|
+
const ip = getIPForVm(vmId) ?? "not allocated";
|
|
10053
|
+
jobs.push({
|
|
10054
|
+
runId,
|
|
10055
|
+
vmId,
|
|
10056
|
+
ip,
|
|
10057
|
+
hasProcess: !!proc,
|
|
10058
|
+
pid: proc?.pid
|
|
10059
|
+
});
|
|
10060
|
+
}
|
|
9759
10061
|
}
|
|
9760
|
-
|
|
9761
|
-
|
|
9762
|
-
|
|
9763
|
-
|
|
9764
|
-
|
|
9765
|
-
message: `Orphan TAP device: ${tap} (no matching job or process)`
|
|
9766
|
-
});
|
|
10062
|
+
const ipToVmIds = /* @__PURE__ */ new Map();
|
|
10063
|
+
for (const [ip, allocation] of allocations) {
|
|
10064
|
+
const existing = ipToVmIds.get(ip) ?? [];
|
|
10065
|
+
existing.push(allocation.vmId);
|
|
10066
|
+
ipToVmIds.set(ip, existing);
|
|
9767
10067
|
}
|
|
9768
|
-
|
|
9769
|
-
|
|
9770
|
-
|
|
9771
|
-
|
|
10068
|
+
const maxConcurrent = config.sandbox.max_concurrent;
|
|
10069
|
+
console.log(`Runs (${jobs.length} active, max ${maxConcurrent}):`);
|
|
10070
|
+
if (jobs.length === 0) {
|
|
10071
|
+
console.log(" No active runs");
|
|
10072
|
+
} else {
|
|
10073
|
+
console.log(
|
|
10074
|
+
" Run ID VM ID IP Status"
|
|
10075
|
+
);
|
|
10076
|
+
for (const job of jobs) {
|
|
10077
|
+
const ipConflict = (ipToVmIds.get(job.ip)?.length ?? 0) > 1;
|
|
10078
|
+
let statusText;
|
|
10079
|
+
if (ipConflict) {
|
|
10080
|
+
statusText = "\u26A0\uFE0F IP conflict!";
|
|
10081
|
+
} else if (job.hasProcess) {
|
|
10082
|
+
statusText = `\u2713 Running (PID ${job.pid})`;
|
|
10083
|
+
} else {
|
|
10084
|
+
statusText = "\u26A0\uFE0F No process";
|
|
10085
|
+
}
|
|
10086
|
+
console.log(
|
|
10087
|
+
` ${job.runId} ${job.vmId} ${job.ip.padEnd(15)} ${statusText}`
|
|
10088
|
+
);
|
|
10089
|
+
}
|
|
10090
|
+
}
|
|
10091
|
+
console.log("");
|
|
10092
|
+
for (const job of jobs) {
|
|
10093
|
+
if (!job.hasProcess) {
|
|
10094
|
+
warnings.push({
|
|
10095
|
+
message: `Run ${job.vmId} in status.json but no Firecracker process running`
|
|
10096
|
+
});
|
|
10097
|
+
}
|
|
10098
|
+
}
|
|
10099
|
+
for (const [ip, vmIds] of ipToVmIds) {
|
|
10100
|
+
if (vmIds.length > 1) {
|
|
10101
|
+
warnings.push({
|
|
10102
|
+
message: `IP conflict: ${ip} assigned to ${vmIds.join(", ")}`
|
|
10103
|
+
});
|
|
10104
|
+
}
|
|
10105
|
+
}
|
|
10106
|
+
const processVmIds = new Set(processes.map((p) => p.vmId));
|
|
10107
|
+
for (const proc of processes) {
|
|
10108
|
+
if (!statusVmIds.has(proc.vmId)) {
|
|
10109
|
+
warnings.push({
|
|
10110
|
+
message: `Orphan process: PID ${proc.pid} (vmId ${proc.vmId}) not in status.json`
|
|
10111
|
+
});
|
|
10112
|
+
}
|
|
10113
|
+
}
|
|
10114
|
+
for (const tap of tapDevices) {
|
|
10115
|
+
const vmId = tap.replace("tap", "");
|
|
10116
|
+
if (!processVmIds.has(vmId) && !statusVmIds.has(vmId)) {
|
|
10117
|
+
warnings.push({
|
|
10118
|
+
message: `Orphan TAP device: ${tap} (no matching job or process)`
|
|
10119
|
+
});
|
|
10120
|
+
}
|
|
10121
|
+
}
|
|
10122
|
+
for (const ws of workspaces) {
|
|
10123
|
+
const vmId = ws.replace("vm0-", "");
|
|
10124
|
+
if (!processVmIds.has(vmId) && !statusVmIds.has(vmId)) {
|
|
10125
|
+
warnings.push({
|
|
10126
|
+
message: `Orphan workspace: ${ws} (no matching job or process)`
|
|
10127
|
+
});
|
|
10128
|
+
}
|
|
10129
|
+
}
|
|
10130
|
+
const activeVmIps = new Set(jobs.map((j) => j.ip));
|
|
10131
|
+
const iptablesRules = await listIptablesNatRules();
|
|
10132
|
+
const orphanedIptables = await findOrphanedIptablesRules(
|
|
10133
|
+
iptablesRules,
|
|
10134
|
+
activeVmIps,
|
|
10135
|
+
proxyPort
|
|
10136
|
+
);
|
|
10137
|
+
for (const rule of orphanedIptables) {
|
|
9772
10138
|
warnings.push({
|
|
9773
|
-
message: `Orphan
|
|
10139
|
+
message: `Orphan iptables rule: redirect ${rule.sourceIp}:${rule.destPort} -> :${rule.redirectPort}`
|
|
9774
10140
|
});
|
|
9775
10141
|
}
|
|
9776
|
-
|
|
9777
|
-
|
|
9778
|
-
|
|
9779
|
-
|
|
9780
|
-
|
|
9781
|
-
|
|
9782
|
-
|
|
9783
|
-
);
|
|
9784
|
-
for (const rule of orphanedIptables) {
|
|
9785
|
-
warnings.push({
|
|
9786
|
-
message: `Orphan iptables rule: redirect ${rule.sourceIp}:${rule.destPort} -> :${rule.redirectPort}`
|
|
9787
|
-
});
|
|
9788
|
-
}
|
|
9789
|
-
console.log("Warnings:");
|
|
9790
|
-
if (warnings.length === 0) {
|
|
9791
|
-
console.log(" None");
|
|
9792
|
-
} else {
|
|
9793
|
-
for (const w of warnings) {
|
|
9794
|
-
console.log(` - ${w.message}`);
|
|
10142
|
+
console.log("Warnings:");
|
|
10143
|
+
if (warnings.length === 0) {
|
|
10144
|
+
console.log(" None");
|
|
10145
|
+
} else {
|
|
10146
|
+
for (const w of warnings) {
|
|
10147
|
+
console.log(` - ${w.message}`);
|
|
10148
|
+
}
|
|
9795
10149
|
}
|
|
10150
|
+
process.exit(warnings.length > 0 ? 1 : 0);
|
|
10151
|
+
} catch (error) {
|
|
10152
|
+
console.error(
|
|
10153
|
+
`Error: ${error instanceof Error ? error.message : "Unknown error"}`
|
|
10154
|
+
);
|
|
10155
|
+
process.exit(1);
|
|
9796
10156
|
}
|
|
9797
|
-
process.exit(warnings.length > 0 ? 1 : 0);
|
|
9798
|
-
} catch (error) {
|
|
9799
|
-
console.error(
|
|
9800
|
-
`Error: ${error instanceof Error ? error.message : "Unknown error"}`
|
|
9801
|
-
);
|
|
9802
|
-
process.exit(1);
|
|
9803
10157
|
}
|
|
9804
|
-
|
|
10158
|
+
);
|
|
9805
10159
|
function formatUptime(ms) {
|
|
9806
10160
|
const seconds = Math.floor(ms / 1e3);
|
|
9807
10161
|
const minutes = Math.floor(seconds / 60);
|
|
@@ -9815,10 +10169,11 @@ function formatUptime(ms) {
|
|
|
9815
10169
|
|
|
9816
10170
|
// src/commands/kill.ts
|
|
9817
10171
|
import { Command as Command3 } from "commander";
|
|
9818
|
-
import { existsSync as
|
|
10172
|
+
import { existsSync as existsSync5, readFileSync as readFileSync4, writeFileSync as writeFileSync3, rmSync } from "fs";
|
|
9819
10173
|
import { dirname as dirname3, join as join4 } from "path";
|
|
9820
10174
|
import * as readline2 from "readline";
|
|
9821
10175
|
var killCommand = new Command3("kill").description("Force terminate a run and clean up all resources").argument("<run-id>", "Run ID (full UUID or short 8-char vmId)").option("--config <path>", "Config file path", "./runner.yaml").option("--force", "Skip confirmation prompt").action(
|
|
10176
|
+
// eslint-disable-next-line complexity -- TODO: refactor complex function
|
|
9822
10177
|
async (runIdArg, options) => {
|
|
9823
10178
|
try {
|
|
9824
10179
|
loadConfig(options.config);
|
|
@@ -9879,7 +10234,7 @@ var killCommand = new Command3("kill").description("Force terminate a run and cl
|
|
|
9879
10234
|
message: error instanceof Error ? error.message : "Unknown error"
|
|
9880
10235
|
});
|
|
9881
10236
|
}
|
|
9882
|
-
if (
|
|
10237
|
+
if (existsSync5(workspaceDir)) {
|
|
9883
10238
|
try {
|
|
9884
10239
|
rmSync(workspaceDir, { recursive: true, force: true });
|
|
9885
10240
|
results.push({
|
|
@@ -9901,7 +10256,7 @@ var killCommand = new Command3("kill").description("Force terminate a run and cl
|
|
|
9901
10256
|
message: "Not found (already cleaned)"
|
|
9902
10257
|
});
|
|
9903
10258
|
}
|
|
9904
|
-
if (runId &&
|
|
10259
|
+
if (runId && existsSync5(statusFilePath)) {
|
|
9905
10260
|
try {
|
|
9906
10261
|
const status = JSON.parse(
|
|
9907
10262
|
readFileSync4(statusFilePath, "utf-8")
|
|
@@ -9960,7 +10315,7 @@ function resolveRunId(input, statusFilePath) {
|
|
|
9960
10315
|
const vmId = input.split("-")[0];
|
|
9961
10316
|
return { vmId: vmId ?? input, runId: input };
|
|
9962
10317
|
}
|
|
9963
|
-
if (
|
|
10318
|
+
if (existsSync5(statusFilePath)) {
|
|
9964
10319
|
try {
|
|
9965
10320
|
const status = JSON.parse(
|
|
9966
10321
|
readFileSync4(statusFilePath, "utf-8")
|
|
@@ -9990,8 +10345,8 @@ async function confirm(message) {
|
|
|
9990
10345
|
}
|
|
9991
10346
|
|
|
9992
10347
|
// src/commands/benchmark.ts
|
|
9993
|
-
import { Command as Command4 } from "commander";
|
|
9994
|
-
import
|
|
10348
|
+
import { Command as Command4, Option } from "commander";
|
|
10349
|
+
import crypto2 from "crypto";
|
|
9995
10350
|
|
|
9996
10351
|
// src/lib/timing.ts
|
|
9997
10352
|
var Timer = class {
|
|
@@ -10026,7 +10381,7 @@ var Timer = class {
|
|
|
10026
10381
|
// src/commands/benchmark.ts
|
|
10027
10382
|
function createBenchmarkContext(prompt, options) {
|
|
10028
10383
|
return {
|
|
10029
|
-
runId:
|
|
10384
|
+
runId: crypto2.randomUUID(),
|
|
10030
10385
|
prompt,
|
|
10031
10386
|
agentComposeVersionId: "benchmark-local",
|
|
10032
10387
|
vars: null,
|
|
@@ -10043,11 +10398,19 @@ function createBenchmarkContext(prompt, options) {
|
|
|
10043
10398
|
}
|
|
10044
10399
|
var benchmarkCommand = new Command4("benchmark").description(
|
|
10045
10400
|
"Run a VM performance benchmark (executes bash command directly)"
|
|
10046
|
-
).argument("<prompt>", "The bash command to execute in the VM").option("--config <path>", "Config file path", "./runner.yaml").option("--working-dir <path>", "Working directory in VM", "/home/user").option("--agent-type <type>", "Agent type", "claude-code").
|
|
10401
|
+
).argument("<prompt>", "The bash command to execute in the VM").option("--config <path>", "Config file path", "./runner.yaml").option("--working-dir <path>", "Working directory in VM", "/home/user").option("--agent-type <type>", "Agent type", "claude-code").addOption(
|
|
10402
|
+
new Option(
|
|
10403
|
+
"--guest-protocol <protocol>",
|
|
10404
|
+
"Guest communication protocol"
|
|
10405
|
+
).choices(["vsock", "ssh"])
|
|
10406
|
+
).action(async (prompt, options) => {
|
|
10047
10407
|
const timer = new Timer();
|
|
10048
10408
|
try {
|
|
10049
10409
|
timer.log("Loading configuration...");
|
|
10050
10410
|
const config = loadDebugConfig(options.config);
|
|
10411
|
+
if (options.guestProtocol) {
|
|
10412
|
+
config.sandbox.guest_protocol = options.guestProtocol;
|
|
10413
|
+
}
|
|
10051
10414
|
validateFirecrackerPaths(config.firecracker);
|
|
10052
10415
|
timer.log("Checking network prerequisites...");
|
|
10053
10416
|
const networkCheck = checkNetworkPrerequisites();
|
|
@@ -10081,7 +10444,7 @@ var benchmarkCommand = new Command4("benchmark").description(
|
|
|
10081
10444
|
});
|
|
10082
10445
|
|
|
10083
10446
|
// src/index.ts
|
|
10084
|
-
var version = true ? "2.
|
|
10447
|
+
var version = true ? "2.15.0" : "0.1.0";
|
|
10085
10448
|
program.name("vm0-runner").version(version).description("Self-hosted runner for VM0 agents");
|
|
10086
10449
|
program.addCommand(startCommand);
|
|
10087
10450
|
program.addCommand(doctorCommand);
|